text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def version(): """Return version string.""" with io.open('pgmagick/_version.py') as input_file: for line in input_file: if line.startswith('__version__'): return ast.parse(line).body[0].value.s
[ "def", "version", "(", ")", ":", "with", "io", ".", "open", "(", "'pgmagick/_version.py'", ")", "as", "input_file", ":", "for", "line", "in", "input_file", ":", "if", "line", ".", "startswith", "(", "'__version__'", ")", ":", "return", "ast", ".", "parse", "(", "line", ")", ".", "body", "[", "0", "]", ".", "value", ".", "s" ]
38.666667
11.666667
def imshow_z(data, name): """2D color plot of the quasiparticle weight as a function of interaction and doping""" zmes = pick_flat_z(data) plt.figure() plt.imshow(zmes.T, origin='lower', \ extent=[data['doping'].min(), data['doping'].max(), \ 0, data['u_int'].max()], aspect=.16) plt.colorbar() plt.xlabel('$n$', fontsize=20) plt.ylabel('$U/D$', fontsize=20) plt.savefig(name+'_imshow.png', dpi=300, format='png', transparent=False, bbox_inches='tight', pad_inches=0.05)
[ "def", "imshow_z", "(", "data", ",", "name", ")", ":", "zmes", "=", "pick_flat_z", "(", "data", ")", "plt", ".", "figure", "(", ")", "plt", ".", "imshow", "(", "zmes", ".", "T", ",", "origin", "=", "'lower'", ",", "extent", "=", "[", "data", "[", "'doping'", "]", ".", "min", "(", ")", ",", "data", "[", "'doping'", "]", ".", "max", "(", ")", ",", "0", ",", "data", "[", "'u_int'", "]", ".", "max", "(", ")", "]", ",", "aspect", "=", ".16", ")", "plt", ".", "colorbar", "(", ")", "plt", ".", "xlabel", "(", "'$n$'", ",", "fontsize", "=", "20", ")", "plt", ".", "ylabel", "(", "'$U/D$'", ",", "fontsize", "=", "20", ")", "plt", ".", "savefig", "(", "name", "+", "'_imshow.png'", ",", "dpi", "=", "300", ",", "format", "=", "'png'", ",", "transparent", "=", "False", ",", "bbox_inches", "=", "'tight'", ",", "pad_inches", "=", "0.05", ")" ]
35.533333
16.133333
def _change_splitlevel(self, ttype, value): """Get the new split level (increase, decrease or remain equal)""" # parenthesis increase/decrease a level if ttype is T.Punctuation and value == '(': return 1 elif ttype is T.Punctuation and value == ')': return -1 elif ttype not in T.Keyword: # if normal token return return 0 # Everything after here is ttype = T.Keyword # Also to note, once entered an If statement you are done and basically # returning unified = value.upper() # three keywords begin with CREATE, but only one of them is DDL # DDL Create though can contain more words such as "or replace" if ttype is T.Keyword.DDL and unified.startswith('CREATE'): self._is_create = True return 0 # can have nested declare inside of being... if unified == 'DECLARE' and self._is_create and self._begin_depth == 0: self._in_declare = True return 1 if unified == 'BEGIN': self._begin_depth += 1 if self._is_create: # FIXME(andi): This makes no sense. return 1 return 0 # Should this respect a preceding BEGIN? # In CASE ... WHEN ... END this results in a split level -1. # Would having multiple CASE WHEN END and a Assignment Operator # cause the statement to cut off prematurely? if unified == 'END': self._begin_depth = max(0, self._begin_depth - 1) return -1 if (unified in ('IF', 'FOR', 'WHILE') and self._is_create and self._begin_depth > 0): return 1 if unified in ('END IF', 'END FOR', 'END WHILE'): return -1 # Default return 0
[ "def", "_change_splitlevel", "(", "self", ",", "ttype", ",", "value", ")", ":", "# parenthesis increase/decrease a level", "if", "ttype", "is", "T", ".", "Punctuation", "and", "value", "==", "'('", ":", "return", "1", "elif", "ttype", "is", "T", ".", "Punctuation", "and", "value", "==", "')'", ":", "return", "-", "1", "elif", "ttype", "not", "in", "T", ".", "Keyword", ":", "# if normal token return", "return", "0", "# Everything after here is ttype = T.Keyword", "# Also to note, once entered an If statement you are done and basically", "# returning", "unified", "=", "value", ".", "upper", "(", ")", "# three keywords begin with CREATE, but only one of them is DDL", "# DDL Create though can contain more words such as \"or replace\"", "if", "ttype", "is", "T", ".", "Keyword", ".", "DDL", "and", "unified", ".", "startswith", "(", "'CREATE'", ")", ":", "self", ".", "_is_create", "=", "True", "return", "0", "# can have nested declare inside of being...", "if", "unified", "==", "'DECLARE'", "and", "self", ".", "_is_create", "and", "self", ".", "_begin_depth", "==", "0", ":", "self", ".", "_in_declare", "=", "True", "return", "1", "if", "unified", "==", "'BEGIN'", ":", "self", ".", "_begin_depth", "+=", "1", "if", "self", ".", "_is_create", ":", "# FIXME(andi): This makes no sense.", "return", "1", "return", "0", "# Should this respect a preceding BEGIN?", "# In CASE ... WHEN ... END this results in a split level -1.", "# Would having multiple CASE WHEN END and a Assignment Operator", "# cause the statement to cut off prematurely?", "if", "unified", "==", "'END'", ":", "self", ".", "_begin_depth", "=", "max", "(", "0", ",", "self", ".", "_begin_depth", "-", "1", ")", "return", "-", "1", "if", "(", "unified", "in", "(", "'IF'", ",", "'FOR'", ",", "'WHILE'", ")", "and", "self", ".", "_is_create", "and", "self", ".", "_begin_depth", ">", "0", ")", ":", "return", "1", "if", "unified", "in", "(", "'END IF'", ",", "'END FOR'", ",", "'END WHILE'", ")", ":", "return", "-", "1", "# Default", "return", "0" ]
35.27451
21.196078
def all_settings(self, uppercase_keys=False): """Return all settings as a `dict`.""" d = {} for k in self.all_keys(uppercase_keys): d[k] = self.get(k) return d
[ "def", "all_settings", "(", "self", ",", "uppercase_keys", "=", "False", ")", ":", "d", "=", "{", "}", "for", "k", "in", "self", ".", "all_keys", "(", "uppercase_keys", ")", ":", "d", "[", "k", "]", "=", "self", ".", "get", "(", "k", ")", "return", "d" ]
24.75
19
def revoke_user_token(self, user_id): """ Revoke user token Erases user token on file forcing them to re-login and obtain a new one. :param user_id: int :return: """ user = self.get(user_id) user._token = None self.save(user)
[ "def", "revoke_user_token", "(", "self", ",", "user_id", ")", ":", "user", "=", "self", ".", "get", "(", "user_id", ")", "user", ".", "_token", "=", "None", "self", ".", "save", "(", "user", ")" ]
28.8
13.4
def _set_matplotlib_default_backend(): """ matplotlib will try to print to a display if it is available, but don't want to run it in interactive mode. we tried setting the backend to 'Agg'' before importing, but it was still resulting in issues. we replace the existing backend with 'agg' in the default matplotlibrc. This is a hack until we can find a better solution """ if _matplotlib_installed(): import matplotlib matplotlib.use('Agg', force=True) config = matplotlib.matplotlib_fname() if os.access(config, os.W_OK): with file_transaction(config) as tx_out_file: with open(config) as in_file, open(tx_out_file, "w") as out_file: for line in in_file: if line.split(":")[0].strip() == "backend": out_file.write("backend: agg\n") else: out_file.write(line)
[ "def", "_set_matplotlib_default_backend", "(", ")", ":", "if", "_matplotlib_installed", "(", ")", ":", "import", "matplotlib", "matplotlib", ".", "use", "(", "'Agg'", ",", "force", "=", "True", ")", "config", "=", "matplotlib", ".", "matplotlib_fname", "(", ")", "if", "os", ".", "access", "(", "config", ",", "os", ".", "W_OK", ")", ":", "with", "file_transaction", "(", "config", ")", "as", "tx_out_file", ":", "with", "open", "(", "config", ")", "as", "in_file", ",", "open", "(", "tx_out_file", ",", "\"w\"", ")", "as", "out_file", ":", "for", "line", "in", "in_file", ":", "if", "line", ".", "split", "(", "\":\"", ")", "[", "0", "]", ".", "strip", "(", ")", "==", "\"backend\"", ":", "out_file", ".", "write", "(", "\"backend: agg\\n\"", ")", "else", ":", "out_file", ".", "write", "(", "line", ")" ]
47.8
16.4
def getParameter(self, objID, param): """getParameter(string, string) -> string Returns the value of the given parameter for the given objID """ self._connection._beginMessage( self._cmdGetID, tc.VAR_PARAMETER, objID, 1 + 4 + len(param)) self._connection._packString(param) result = self._connection._checkResult( self._cmdGetID, tc.VAR_PARAMETER, objID) return result.readString()
[ "def", "getParameter", "(", "self", ",", "objID", ",", "param", ")", ":", "self", ".", "_connection", ".", "_beginMessage", "(", "self", ".", "_cmdGetID", ",", "tc", ".", "VAR_PARAMETER", ",", "objID", ",", "1", "+", "4", "+", "len", "(", "param", ")", ")", "self", ".", "_connection", ".", "_packString", "(", "param", ")", "result", "=", "self", ".", "_connection", ".", "_checkResult", "(", "self", ".", "_cmdGetID", ",", "tc", ".", "VAR_PARAMETER", ",", "objID", ")", "return", "result", ".", "readString", "(", ")" ]
41.090909
12
def DOMDebugger_setDOMBreakpoint(self, nodeId, type): """ Function path: DOMDebugger.setDOMBreakpoint Domain: DOMDebugger Method name: setDOMBreakpoint Parameters: Required arguments: 'nodeId' (type: DOM.NodeId) -> Identifier of the node to set breakpoint on. 'type' (type: DOMBreakpointType) -> Type of the operation to stop upon. No return value. Description: Sets breakpoint on particular operation with DOM. """ subdom_funcs = self.synchronous_command('DOMDebugger.setDOMBreakpoint', nodeId=nodeId, type=type) return subdom_funcs
[ "def", "DOMDebugger_setDOMBreakpoint", "(", "self", ",", "nodeId", ",", "type", ")", ":", "subdom_funcs", "=", "self", ".", "synchronous_command", "(", "'DOMDebugger.setDOMBreakpoint'", ",", "nodeId", "=", "nodeId", ",", "type", "=", "type", ")", "return", "subdom_funcs" ]
33.411765
20.352941
def parse_include(self, start): """ Extract include from text based on start position of token Returns ------- (end, incl_path) - end: last char in include - incl_path: Resolved path to include """ # Seek back to start of line i = start while i: if self.text[i] == '\n': i += 1 break i -= 1 line_start = i # check that there is no unexpected text before the include if not (self.text[line_start:start] == "" or self.text[line_start:start].isspace()): self.env.msg.fatal( "Unexpected text before include", messages.SourceRef(line_start, start-1, filename=self.path) ) # Capture include contents inc_regex = re.compile(r'`include\s+("([^\r\n]+)"|<([^\r\n]+)>)') m_inc = inc_regex.match(self.text, start) if m_inc is None: self.env.msg.fatal( "Invalid usage of include directive", messages.SourceRef(start, start+7, filename=self.path) ) incl_path_raw = m_inc.group(2) or m_inc.group(3) end = m_inc.end(0)-1 path_start = m_inc.start(1) #[^\r\n]*?\r?\n # Check that only comments follow tail_regex = re.compile(r'(?:[ \t]*/\*[^\r\n]*?\*/)*[ \t]*(?://[^\r\n]*?|/\*[^\r\n]*?)?\r?\n') if not tail_regex.match(self.text, end+1): tail_capture_regex = re.compile(r'[^\r\n]*?\r?\n') m = tail_capture_regex.match(self.text, end+1) self.env.msg.fatal( "Unexpected text after include", messages.SourceRef(end+1, m.end(0)-1, filename=self.path) ) # Resolve include path. if os.path.isabs(incl_path_raw): incl_path = incl_path_raw else: # Search include paths first. for search_path in self.search_paths: incl_path = os.path.join(search_path, incl_path_raw) if os.path.isfile(incl_path): # found match! break else: # Otherwise, assume it is relative to the current file incl_path = os.path.join(os.path.dirname(self.path), incl_path_raw) if not os.path.isfile(incl_path): self.env.msg.fatal( "Could not find '%s' in include search paths" % incl_path_raw, messages.SourceRef(path_start, end, filename=self.path) ) # Check if path has already been referenced before incl_ref = self.incl_ref while incl_ref: if os.path.samefile(incl_path, incl_ref.path): self.env.msg.fatal( "Include of '%s' results in a circular reference" % incl_path_raw, messages.SourceRef(path_start, end, filename=self.path) ) incl_ref = incl_ref.parent return(end, incl_path)
[ "def", "parse_include", "(", "self", ",", "start", ")", ":", "# Seek back to start of line", "i", "=", "start", "while", "i", ":", "if", "self", ".", "text", "[", "i", "]", "==", "'\\n'", ":", "i", "+=", "1", "break", "i", "-=", "1", "line_start", "=", "i", "# check that there is no unexpected text before the include", "if", "not", "(", "self", ".", "text", "[", "line_start", ":", "start", "]", "==", "\"\"", "or", "self", ".", "text", "[", "line_start", ":", "start", "]", ".", "isspace", "(", ")", ")", ":", "self", ".", "env", ".", "msg", ".", "fatal", "(", "\"Unexpected text before include\"", ",", "messages", ".", "SourceRef", "(", "line_start", ",", "start", "-", "1", ",", "filename", "=", "self", ".", "path", ")", ")", "# Capture include contents", "inc_regex", "=", "re", ".", "compile", "(", "r'`include\\s+(\"([^\\r\\n]+)\"|<([^\\r\\n]+)>)'", ")", "m_inc", "=", "inc_regex", ".", "match", "(", "self", ".", "text", ",", "start", ")", "if", "m_inc", "is", "None", ":", "self", ".", "env", ".", "msg", ".", "fatal", "(", "\"Invalid usage of include directive\"", ",", "messages", ".", "SourceRef", "(", "start", ",", "start", "+", "7", ",", "filename", "=", "self", ".", "path", ")", ")", "incl_path_raw", "=", "m_inc", ".", "group", "(", "2", ")", "or", "m_inc", ".", "group", "(", "3", ")", "end", "=", "m_inc", ".", "end", "(", "0", ")", "-", "1", "path_start", "=", "m_inc", ".", "start", "(", "1", ")", "#[^\\r\\n]*?\\r?\\n", "# Check that only comments follow", "tail_regex", "=", "re", ".", "compile", "(", "r'(?:[ \\t]*/\\*[^\\r\\n]*?\\*/)*[ \\t]*(?://[^\\r\\n]*?|/\\*[^\\r\\n]*?)?\\r?\\n'", ")", "if", "not", "tail_regex", ".", "match", "(", "self", ".", "text", ",", "end", "+", "1", ")", ":", "tail_capture_regex", "=", "re", ".", "compile", "(", "r'[^\\r\\n]*?\\r?\\n'", ")", "m", "=", "tail_capture_regex", ".", "match", "(", "self", ".", "text", ",", "end", "+", "1", ")", "self", ".", "env", ".", "msg", ".", "fatal", "(", "\"Unexpected text after include\"", ",", "messages", ".", "SourceRef", "(", "end", "+", "1", ",", "m", ".", "end", "(", "0", ")", "-", "1", ",", "filename", "=", "self", ".", "path", ")", ")", "# Resolve include path.", "if", "os", ".", "path", ".", "isabs", "(", "incl_path_raw", ")", ":", "incl_path", "=", "incl_path_raw", "else", ":", "# Search include paths first.", "for", "search_path", "in", "self", ".", "search_paths", ":", "incl_path", "=", "os", ".", "path", ".", "join", "(", "search_path", ",", "incl_path_raw", ")", "if", "os", ".", "path", ".", "isfile", "(", "incl_path", ")", ":", "# found match!", "break", "else", ":", "# Otherwise, assume it is relative to the current file", "incl_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "self", ".", "path", ")", ",", "incl_path_raw", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "incl_path", ")", ":", "self", ".", "env", ".", "msg", ".", "fatal", "(", "\"Could not find '%s' in include search paths\"", "%", "incl_path_raw", ",", "messages", ".", "SourceRef", "(", "path_start", ",", "end", ",", "filename", "=", "self", ".", "path", ")", ")", "# Check if path has already been referenced before", "incl_ref", "=", "self", ".", "incl_ref", "while", "incl_ref", ":", "if", "os", ".", "path", ".", "samefile", "(", "incl_path", ",", "incl_ref", ".", "path", ")", ":", "self", ".", "env", ".", "msg", ".", "fatal", "(", "\"Include of '%s' results in a circular reference\"", "%", "incl_path_raw", ",", "messages", ".", "SourceRef", "(", "path_start", ",", "end", ",", "filename", "=", "self", ".", "path", ")", ")", "incl_ref", "=", "incl_ref", ".", "parent", "return", "(", "end", ",", "incl_path", ")" ]
38.24359
19.371795
async def setRemoteDescription(self, sessionDescription): """ Changes the remote description associated with the connection. :param: sessionDescription: An :class:`RTCSessionDescription` created from information received over the signaling channel. """ # parse and validate description description = sdp.SessionDescription.parse(sessionDescription.sdp) description.type = sessionDescription.type self.__validate_description(description, is_local=False) # apply description trackEvents = [] for i, media in enumerate(description.media): self.__seenMids.add(media.rtp.muxId) if media.kind in ['audio', 'video']: # find transceiver transceiver = None for t in self.__transceivers: if t.kind == media.kind and t.mid in [None, media.rtp.muxId]: transceiver = t if transceiver is None: transceiver = self.__createTransceiver(direction='recvonly', kind=media.kind) if transceiver.mid is None: transceiver._set_mid(media.rtp.muxId) transceiver._set_mline_index(i) # negotiate codecs common = filter_preferred_codecs( find_common_codecs(CODECS[media.kind], media.rtp.codecs), transceiver._preferred_codecs) assert len(common) transceiver._codecs = common transceiver._headerExtensions = find_common_header_extensions( HEADER_EXTENSIONS[media.kind], media.rtp.headerExtensions) # configure transport iceTransport = transceiver._transport.transport add_remote_candidates(iceTransport, media) self.__remoteDtls[transceiver] = media.dtls self.__remoteIce[transceiver] = media.ice # configure direction direction = reverse_direction(media.direction) if description.type in ['answer', 'pranswer']: transceiver._currentDirection = direction else: transceiver._offerDirection = direction # create remote stream track if direction in ['recvonly', 'sendrecv'] and not transceiver.receiver._track: transceiver.receiver._track = RemoteStreamTrack(kind=media.kind) trackEvents.append(RTCTrackEvent( receiver=transceiver.receiver, track=transceiver.receiver._track, transceiver=transceiver, )) elif media.kind == 'application': if not self.__sctp: self.__createSctpTransport() if self.__sctp.mid is None: self.__sctp.mid = media.rtp.muxId self.__sctp_mline_index = i # configure sctp if media.profile == 'DTLS/SCTP': self._sctpLegacySdp = True self.__sctpRemotePort = int(media.fmt[0]) else: self._sctpLegacySdp = False self.__sctpRemotePort = media.sctp_port self.__sctpRemoteCaps = media.sctpCapabilities # configure transport iceTransport = self.__sctp.transport.transport add_remote_candidates(iceTransport, media) self.__remoteDtls[self.__sctp] = media.dtls self.__remoteIce[self.__sctp] = media.ice # remove bundled transports bundle = next((x for x in description.group if x.semantic == 'BUNDLE'), None) if bundle and bundle.items: # find main media stream masterMid = bundle.items[0] masterTransport = None for transceiver in self.__transceivers: if transceiver.mid == masterMid: masterTransport = transceiver._transport break if self.__sctp and self.__sctp.mid == masterMid: masterTransport = self.__sctp.transport # replace transport for bundled media oldTransports = set() slaveMids = bundle.items[1:] for transceiver in self.__transceivers: if transceiver.mid in slaveMids and not transceiver._bundled: oldTransports.add(transceiver._transport) transceiver.receiver.setTransport(masterTransport) transceiver.sender.setTransport(masterTransport) transceiver._bundled = True transceiver._transport = masterTransport if self.__sctp and self.__sctp.mid in slaveMids: oldTransports.add(self.__sctp.transport) self.__sctp.setTransport(masterTransport) self.__sctp._bundled = True # stop and discard old ICE transports for dtlsTransport in oldTransports: await dtlsTransport.stop() await dtlsTransport.transport.stop() self.__iceTransports.discard(dtlsTransport.transport) self.__updateIceGatheringState() self.__updateIceConnectionState() # FIXME: in aiortc 1.0.0 emit RTCTrackEvent directly for event in trackEvents: self.emit('track', event.track) # connect asyncio.ensure_future(self.__connect()) # update signaling state if description.type == 'offer': self.__setSignalingState('have-remote-offer') elif description.type == 'answer': self.__setSignalingState('stable') # replace description if description.type == 'answer': self.__currentRemoteDescription = description self.__pendingRemoteDescription = None else: self.__pendingRemoteDescription = description
[ "async", "def", "setRemoteDescription", "(", "self", ",", "sessionDescription", ")", ":", "# parse and validate description", "description", "=", "sdp", ".", "SessionDescription", ".", "parse", "(", "sessionDescription", ".", "sdp", ")", "description", ".", "type", "=", "sessionDescription", ".", "type", "self", ".", "__validate_description", "(", "description", ",", "is_local", "=", "False", ")", "# apply description", "trackEvents", "=", "[", "]", "for", "i", ",", "media", "in", "enumerate", "(", "description", ".", "media", ")", ":", "self", ".", "__seenMids", ".", "add", "(", "media", ".", "rtp", ".", "muxId", ")", "if", "media", ".", "kind", "in", "[", "'audio'", ",", "'video'", "]", ":", "# find transceiver", "transceiver", "=", "None", "for", "t", "in", "self", ".", "__transceivers", ":", "if", "t", ".", "kind", "==", "media", ".", "kind", "and", "t", ".", "mid", "in", "[", "None", ",", "media", ".", "rtp", ".", "muxId", "]", ":", "transceiver", "=", "t", "if", "transceiver", "is", "None", ":", "transceiver", "=", "self", ".", "__createTransceiver", "(", "direction", "=", "'recvonly'", ",", "kind", "=", "media", ".", "kind", ")", "if", "transceiver", ".", "mid", "is", "None", ":", "transceiver", ".", "_set_mid", "(", "media", ".", "rtp", ".", "muxId", ")", "transceiver", ".", "_set_mline_index", "(", "i", ")", "# negotiate codecs", "common", "=", "filter_preferred_codecs", "(", "find_common_codecs", "(", "CODECS", "[", "media", ".", "kind", "]", ",", "media", ".", "rtp", ".", "codecs", ")", ",", "transceiver", ".", "_preferred_codecs", ")", "assert", "len", "(", "common", ")", "transceiver", ".", "_codecs", "=", "common", "transceiver", ".", "_headerExtensions", "=", "find_common_header_extensions", "(", "HEADER_EXTENSIONS", "[", "media", ".", "kind", "]", ",", "media", ".", "rtp", ".", "headerExtensions", ")", "# configure transport", "iceTransport", "=", "transceiver", ".", "_transport", ".", "transport", "add_remote_candidates", "(", "iceTransport", ",", "media", ")", "self", ".", "__remoteDtls", "[", "transceiver", "]", "=", "media", ".", "dtls", "self", ".", "__remoteIce", "[", "transceiver", "]", "=", "media", ".", "ice", "# configure direction", "direction", "=", "reverse_direction", "(", "media", ".", "direction", ")", "if", "description", ".", "type", "in", "[", "'answer'", ",", "'pranswer'", "]", ":", "transceiver", ".", "_currentDirection", "=", "direction", "else", ":", "transceiver", ".", "_offerDirection", "=", "direction", "# create remote stream track", "if", "direction", "in", "[", "'recvonly'", ",", "'sendrecv'", "]", "and", "not", "transceiver", ".", "receiver", ".", "_track", ":", "transceiver", ".", "receiver", ".", "_track", "=", "RemoteStreamTrack", "(", "kind", "=", "media", ".", "kind", ")", "trackEvents", ".", "append", "(", "RTCTrackEvent", "(", "receiver", "=", "transceiver", ".", "receiver", ",", "track", "=", "transceiver", ".", "receiver", ".", "_track", ",", "transceiver", "=", "transceiver", ",", ")", ")", "elif", "media", ".", "kind", "==", "'application'", ":", "if", "not", "self", ".", "__sctp", ":", "self", ".", "__createSctpTransport", "(", ")", "if", "self", ".", "__sctp", ".", "mid", "is", "None", ":", "self", ".", "__sctp", ".", "mid", "=", "media", ".", "rtp", ".", "muxId", "self", ".", "__sctp_mline_index", "=", "i", "# configure sctp", "if", "media", ".", "profile", "==", "'DTLS/SCTP'", ":", "self", ".", "_sctpLegacySdp", "=", "True", "self", ".", "__sctpRemotePort", "=", "int", "(", "media", ".", "fmt", "[", "0", "]", ")", "else", ":", "self", ".", "_sctpLegacySdp", "=", "False", "self", ".", "__sctpRemotePort", "=", "media", ".", "sctp_port", "self", ".", "__sctpRemoteCaps", "=", "media", ".", "sctpCapabilities", "# configure transport", "iceTransport", "=", "self", ".", "__sctp", ".", "transport", ".", "transport", "add_remote_candidates", "(", "iceTransport", ",", "media", ")", "self", ".", "__remoteDtls", "[", "self", ".", "__sctp", "]", "=", "media", ".", "dtls", "self", ".", "__remoteIce", "[", "self", ".", "__sctp", "]", "=", "media", ".", "ice", "# remove bundled transports", "bundle", "=", "next", "(", "(", "x", "for", "x", "in", "description", ".", "group", "if", "x", ".", "semantic", "==", "'BUNDLE'", ")", ",", "None", ")", "if", "bundle", "and", "bundle", ".", "items", ":", "# find main media stream", "masterMid", "=", "bundle", ".", "items", "[", "0", "]", "masterTransport", "=", "None", "for", "transceiver", "in", "self", ".", "__transceivers", ":", "if", "transceiver", ".", "mid", "==", "masterMid", ":", "masterTransport", "=", "transceiver", ".", "_transport", "break", "if", "self", ".", "__sctp", "and", "self", ".", "__sctp", ".", "mid", "==", "masterMid", ":", "masterTransport", "=", "self", ".", "__sctp", ".", "transport", "# replace transport for bundled media", "oldTransports", "=", "set", "(", ")", "slaveMids", "=", "bundle", ".", "items", "[", "1", ":", "]", "for", "transceiver", "in", "self", ".", "__transceivers", ":", "if", "transceiver", ".", "mid", "in", "slaveMids", "and", "not", "transceiver", ".", "_bundled", ":", "oldTransports", ".", "add", "(", "transceiver", ".", "_transport", ")", "transceiver", ".", "receiver", ".", "setTransport", "(", "masterTransport", ")", "transceiver", ".", "sender", ".", "setTransport", "(", "masterTransport", ")", "transceiver", ".", "_bundled", "=", "True", "transceiver", ".", "_transport", "=", "masterTransport", "if", "self", ".", "__sctp", "and", "self", ".", "__sctp", ".", "mid", "in", "slaveMids", ":", "oldTransports", ".", "add", "(", "self", ".", "__sctp", ".", "transport", ")", "self", ".", "__sctp", ".", "setTransport", "(", "masterTransport", ")", "self", ".", "__sctp", ".", "_bundled", "=", "True", "# stop and discard old ICE transports", "for", "dtlsTransport", "in", "oldTransports", ":", "await", "dtlsTransport", ".", "stop", "(", ")", "await", "dtlsTransport", ".", "transport", ".", "stop", "(", ")", "self", ".", "__iceTransports", ".", "discard", "(", "dtlsTransport", ".", "transport", ")", "self", ".", "__updateIceGatheringState", "(", ")", "self", ".", "__updateIceConnectionState", "(", ")", "# FIXME: in aiortc 1.0.0 emit RTCTrackEvent directly", "for", "event", "in", "trackEvents", ":", "self", ".", "emit", "(", "'track'", ",", "event", ".", "track", ")", "# connect", "asyncio", ".", "ensure_future", "(", "self", ".", "__connect", "(", ")", ")", "# update signaling state", "if", "description", ".", "type", "==", "'offer'", ":", "self", ".", "__setSignalingState", "(", "'have-remote-offer'", ")", "elif", "description", ".", "type", "==", "'answer'", ":", "self", ".", "__setSignalingState", "(", "'stable'", ")", "# replace description", "if", "description", ".", "type", "==", "'answer'", ":", "self", ".", "__currentRemoteDescription", "=", "description", "self", ".", "__pendingRemoteDescription", "=", "None", "else", ":", "self", ".", "__pendingRemoteDescription", "=", "description" ]
44.147059
17.705882
def users_identity(self, **kwargs) -> SlackResponse: """Get a user's identity.""" self._validate_xoxp_token() return self.api_call("users.identity", http_verb="GET", params=kwargs)
[ "def", "users_identity", "(", "self", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "self", ".", "_validate_xoxp_token", "(", ")", "return", "self", ".", "api_call", "(", "\"users.identity\"", ",", "http_verb", "=", "\"GET\"", ",", "params", "=", "kwargs", ")" ]
50.25
13.75
def drawBackground( self, painter, rect ): """ Draws the background of the scene using painter. :param painter | <QPainter> rect | <QRectF> """ if ( self._rebuildRequired ): self.rebuild() super(XCalendarScene, self).drawBackground(painter, rect) palette = self.palette() # draw custom options if ( 'curr_date' in self._buildData ): clr = palette.color(QPalette.Highlight) clr.setAlpha(40) painter.setBrush(clr) painter.setPen(Qt.NoPen) painter.drawRect(self._buildData['curr_date']) painter.setBrush(Qt.NoBrush) if ( 'today' in self._buildData ): painter.setPen(Qt.NoPen) clr = palette.color(QPalette.AlternateBase) clr.setAlpha(120) painter.setBrush(clr) painter.drawRect(self._buildData['today']) painter.setBrush(Qt.NoBrush) # draw the grid painter.setPen(palette.color(QPalette.Mid)) painter.drawLines(self._buildData.get('grid', [])) # draw text fields painter.setPen(palette.color(QPalette.Text)) for data in self._buildData.get('regular_text', []): painter.drawText(*data) # draw mid text fields painter.setPen(palette.color(QPalette.Mid)) for data in self._buildData.get('mid_text', []): painter.drawText(*data)
[ "def", "drawBackground", "(", "self", ",", "painter", ",", "rect", ")", ":", "if", "(", "self", ".", "_rebuildRequired", ")", ":", "self", ".", "rebuild", "(", ")", "super", "(", "XCalendarScene", ",", "self", ")", ".", "drawBackground", "(", "painter", ",", "rect", ")", "palette", "=", "self", ".", "palette", "(", ")", "# draw custom options\r", "if", "(", "'curr_date'", "in", "self", ".", "_buildData", ")", ":", "clr", "=", "palette", ".", "color", "(", "QPalette", ".", "Highlight", ")", "clr", ".", "setAlpha", "(", "40", ")", "painter", ".", "setBrush", "(", "clr", ")", "painter", ".", "setPen", "(", "Qt", ".", "NoPen", ")", "painter", ".", "drawRect", "(", "self", ".", "_buildData", "[", "'curr_date'", "]", ")", "painter", ".", "setBrush", "(", "Qt", ".", "NoBrush", ")", "if", "(", "'today'", "in", "self", ".", "_buildData", ")", ":", "painter", ".", "setPen", "(", "Qt", ".", "NoPen", ")", "clr", "=", "palette", ".", "color", "(", "QPalette", ".", "AlternateBase", ")", "clr", ".", "setAlpha", "(", "120", ")", "painter", ".", "setBrush", "(", "clr", ")", "painter", ".", "drawRect", "(", "self", ".", "_buildData", "[", "'today'", "]", ")", "painter", ".", "setBrush", "(", "Qt", ".", "NoBrush", ")", "# draw the grid\r", "painter", ".", "setPen", "(", "palette", ".", "color", "(", "QPalette", ".", "Mid", ")", ")", "painter", ".", "drawLines", "(", "self", ".", "_buildData", ".", "get", "(", "'grid'", ",", "[", "]", ")", ")", "# draw text fields\r", "painter", ".", "setPen", "(", "palette", ".", "color", "(", "QPalette", ".", "Text", ")", ")", "for", "data", "in", "self", ".", "_buildData", ".", "get", "(", "'regular_text'", ",", "[", "]", ")", ":", "painter", ".", "drawText", "(", "*", "data", ")", "# draw mid text fields\r", "painter", ".", "setPen", "(", "palette", ".", "color", "(", "QPalette", ".", "Mid", ")", ")", "for", "data", "in", "self", ".", "_buildData", ".", "get", "(", "'mid_text'", ",", "[", "]", ")", ":", "painter", ".", "drawText", "(", "*", "data", ")" ]
35.113636
13.227273
def require_dataset(self, name, shape, dtype=None, exact=False, **kwargs): """Obtain an array, creating if it doesn't exist. Other `kwargs` are as per :func:`zarr.hierarchy.Group.create_dataset`. Parameters ---------- name : string Array name. shape : int or tuple of ints Array shape. dtype : string or dtype, optional NumPy dtype. exact : bool, optional If True, require `dtype` to match exactly. If false, require `dtype` can be cast from array dtype. """ return self._write_op(self._require_dataset_nosync, name, shape=shape, dtype=dtype, exact=exact, **kwargs)
[ "def", "require_dataset", "(", "self", ",", "name", ",", "shape", ",", "dtype", "=", "None", ",", "exact", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_write_op", "(", "self", ".", "_require_dataset_nosync", ",", "name", ",", "shape", "=", "shape", ",", "dtype", "=", "dtype", ",", "exact", "=", "exact", ",", "*", "*", "kwargs", ")" ]
35.95
20.2
def process_equations(key, value, fmt, meta): """Processes the attributed equations.""" if key == 'Math' and len(value) == 3: # Process the equation eq = _process_equation(value, fmt) # Get the attributes and label attrs = eq['attrs'] label = attrs[0] if eq['is_unreferenceable']: attrs[0] = '' # The label isn't needed outside this function # Context-dependent output if eq['is_unnumbered']: # Unnumbered is also unreferenceable return None elif fmt in ['latex', 'beamer']: return RawInline('tex', r'\begin{equation}%s\end{equation}'%value[-1]) elif fmt in ('html', 'html5') and LABEL_PATTERN.match(label): # Present equation and its number in a span text = str(references[label]) outerspan = RawInline('html', '<span %s style="display: inline-block; ' 'position: relative; width: 100%%">'%(''\ if eq['is_unreferenceable'] \ else 'id="%s"'%label)) innerspan = RawInline('html', '<span style="position: absolute; ' 'right: 0em; top: %s; line-height:0; ' 'text-align: right">' % ('0' if text.startswith('$') and text.endswith('$') else '50%',)) num = Math({"t":"InlineMath"}, '(%s)' % text[1:-1]) \ if text.startswith('$') and text.endswith('$') \ else Str('(%s)' % text) endspans = RawInline('html', '</span></span>') return [outerspan, AttrMath(*value), innerspan, num, endspans] elif fmt == 'docx': # As per http://officeopenxml.com/WPhyperlink.php bookmarkstart = \ RawInline('openxml', '<w:bookmarkStart w:id="0" w:name="%s"/><w:r><w:t>' %label) bookmarkend = \ RawInline('openxml', '</w:t></w:r><w:bookmarkEnd w:id="0"/>') return [bookmarkstart, AttrMath(*value), bookmarkend] return None
[ "def", "process_equations", "(", "key", ",", "value", ",", "fmt", ",", "meta", ")", ":", "if", "key", "==", "'Math'", "and", "len", "(", "value", ")", "==", "3", ":", "# Process the equation", "eq", "=", "_process_equation", "(", "value", ",", "fmt", ")", "# Get the attributes and label", "attrs", "=", "eq", "[", "'attrs'", "]", "label", "=", "attrs", "[", "0", "]", "if", "eq", "[", "'is_unreferenceable'", "]", ":", "attrs", "[", "0", "]", "=", "''", "# The label isn't needed outside this function", "# Context-dependent output", "if", "eq", "[", "'is_unnumbered'", "]", ":", "# Unnumbered is also unreferenceable", "return", "None", "elif", "fmt", "in", "[", "'latex'", ",", "'beamer'", "]", ":", "return", "RawInline", "(", "'tex'", ",", "r'\\begin{equation}%s\\end{equation}'", "%", "value", "[", "-", "1", "]", ")", "elif", "fmt", "in", "(", "'html'", ",", "'html5'", ")", "and", "LABEL_PATTERN", ".", "match", "(", "label", ")", ":", "# Present equation and its number in a span", "text", "=", "str", "(", "references", "[", "label", "]", ")", "outerspan", "=", "RawInline", "(", "'html'", ",", "'<span %s style=\"display: inline-block; '", "'position: relative; width: 100%%\">'", "%", "(", "''", "if", "eq", "[", "'is_unreferenceable'", "]", "else", "'id=\"%s\"'", "%", "label", ")", ")", "innerspan", "=", "RawInline", "(", "'html'", ",", "'<span style=\"position: absolute; '", "'right: 0em; top: %s; line-height:0; '", "'text-align: right\">'", "%", "(", "'0'", "if", "text", ".", "startswith", "(", "'$'", ")", "and", "text", ".", "endswith", "(", "'$'", ")", "else", "'50%'", ",", ")", ")", "num", "=", "Math", "(", "{", "\"t\"", ":", "\"InlineMath\"", "}", ",", "'(%s)'", "%", "text", "[", "1", ":", "-", "1", "]", ")", "if", "text", ".", "startswith", "(", "'$'", ")", "and", "text", ".", "endswith", "(", "'$'", ")", "else", "Str", "(", "'(%s)'", "%", "text", ")", "endspans", "=", "RawInline", "(", "'html'", ",", "'</span></span>'", ")", "return", "[", "outerspan", ",", "AttrMath", "(", "*", "value", ")", ",", "innerspan", ",", "num", ",", "endspans", "]", "elif", "fmt", "==", "'docx'", ":", "# As per http://officeopenxml.com/WPhyperlink.php", "bookmarkstart", "=", "RawInline", "(", "'openxml'", ",", "'<w:bookmarkStart w:id=\"0\" w:name=\"%s\"/><w:r><w:t>'", "%", "label", ")", "bookmarkend", "=", "RawInline", "(", "'openxml'", ",", "'</w:t></w:r><w:bookmarkEnd w:id=\"0\"/>'", ")", "return", "[", "bookmarkstart", ",", "AttrMath", "(", "*", "value", ")", ",", "bookmarkend", "]", "return", "None" ]
44.745098
18.72549
def align(time, time2, magnitude, magnitude2, error, error2): """Synchronizes the light-curves in the two different bands. Returns ------- aligned_time aligned_magnitude aligned_magnitude2 aligned_error aligned_error2 """ error = np.zeros(time.shape) if error is None else error error2 = np.zeros(time2.shape) if error2 is None else error2 # this asume that the first series is the short one sserie = pd.DataFrame({"mag": magnitude, "error": error}, index=time) lserie = pd.DataFrame({"mag": magnitude2, "error": error2}, index=time2) # if the second serie is logest then revert if len(time) > len(time2): sserie, lserie = lserie, sserie # make the merge merged = sserie.join(lserie, how="inner", rsuffix='2') # recreate columns new_time = merged.index.values new_mag, new_mag2 = merged.mag.values, merged.mag2.values new_error, new_error2 = merged.error.values, merged.error2.values if len(time) > len(time2): new_mag, new_mag2 = new_mag2, new_mag new_error, new_error2 = new_error2, new_error return new_time, new_mag, new_mag2, new_error, new_error2
[ "def", "align", "(", "time", ",", "time2", ",", "magnitude", ",", "magnitude2", ",", "error", ",", "error2", ")", ":", "error", "=", "np", ".", "zeros", "(", "time", ".", "shape", ")", "if", "error", "is", "None", "else", "error", "error2", "=", "np", ".", "zeros", "(", "time2", ".", "shape", ")", "if", "error2", "is", "None", "else", "error2", "# this asume that the first series is the short one", "sserie", "=", "pd", ".", "DataFrame", "(", "{", "\"mag\"", ":", "magnitude", ",", "\"error\"", ":", "error", "}", ",", "index", "=", "time", ")", "lserie", "=", "pd", ".", "DataFrame", "(", "{", "\"mag\"", ":", "magnitude2", ",", "\"error\"", ":", "error2", "}", ",", "index", "=", "time2", ")", "# if the second serie is logest then revert", "if", "len", "(", "time", ")", ">", "len", "(", "time2", ")", ":", "sserie", ",", "lserie", "=", "lserie", ",", "sserie", "# make the merge", "merged", "=", "sserie", ".", "join", "(", "lserie", ",", "how", "=", "\"inner\"", ",", "rsuffix", "=", "'2'", ")", "# recreate columns", "new_time", "=", "merged", ".", "index", ".", "values", "new_mag", ",", "new_mag2", "=", "merged", ".", "mag", ".", "values", ",", "merged", ".", "mag2", ".", "values", "new_error", ",", "new_error2", "=", "merged", ".", "error", ".", "values", ",", "merged", ".", "error2", ".", "values", "if", "len", "(", "time", ")", ">", "len", "(", "time2", ")", ":", "new_mag", ",", "new_mag2", "=", "new_mag2", ",", "new_mag", "new_error", ",", "new_error2", "=", "new_error2", ",", "new_error", "return", "new_time", ",", "new_mag", ",", "new_mag2", ",", "new_error", ",", "new_error2" ]
30.131579
23.473684
def _new_pool(self, scheme, host, port): """ Create a new :class:`ConnectionPool` based on host, port and scheme. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls = pool_classes_by_scheme[scheme] kwargs = self.connection_pool_kw if scheme == 'http': kwargs = self.connection_pool_kw.copy() for kw in SSL_KEYWORDS: kwargs.pop(kw, None) return pool_cls(host, port, **kwargs)
[ "def", "_new_pool", "(", "self", ",", "scheme", ",", "host", ",", "port", ")", ":", "pool_cls", "=", "pool_classes_by_scheme", "[", "scheme", "]", "kwargs", "=", "self", ".", "connection_pool_kw", "if", "scheme", "==", "'http'", ":", "kwargs", "=", "self", ".", "connection_pool_kw", ".", "copy", "(", ")", "for", "kw", "in", "SSL_KEYWORDS", ":", "kwargs", ".", "pop", "(", "kw", ",", "None", ")", "return", "pool_cls", "(", "host", ",", "port", ",", "*", "*", "kwargs", ")" ]
38.6875
14.9375
def _check_default_index(items, default_index): '''Check that the default is in the list, and not empty''' num_items = len(items) if default_index is not None and not isinstance(default_index, int): raise TypeError("The default index ({}) is not an integer".format(default_index)) if default_index is not None and default_index >= num_items: raise ValueError("The default index ({}) >= length of the list ({})".format(default_index, num_items)) if default_index is not None and default_index < 0: raise ValueError("The default index ({}) < 0.".format(default_index)) if default_index is not None and not items[default_index]: raise ValueError("The default index ({}) points to an empty item.".format(default_index))
[ "def", "_check_default_index", "(", "items", ",", "default_index", ")", ":", "num_items", "=", "len", "(", "items", ")", "if", "default_index", "is", "not", "None", "and", "not", "isinstance", "(", "default_index", ",", "int", ")", ":", "raise", "TypeError", "(", "\"The default index ({}) is not an integer\"", ".", "format", "(", "default_index", ")", ")", "if", "default_index", "is", "not", "None", "and", "default_index", ">=", "num_items", ":", "raise", "ValueError", "(", "\"The default index ({}) >= length of the list ({})\"", ".", "format", "(", "default_index", ",", "num_items", ")", ")", "if", "default_index", "is", "not", "None", "and", "default_index", "<", "0", ":", "raise", "ValueError", "(", "\"The default index ({}) < 0.\"", ".", "format", "(", "default_index", ")", ")", "if", "default_index", "is", "not", "None", "and", "not", "items", "[", "default_index", "]", ":", "raise", "ValueError", "(", "\"The default index ({}) points to an empty item.\"", ".", "format", "(", "default_index", ")", ")" ]
69.181818
31.727273
def density_between_circular_annuli_in_angular_units(self, inner_annuli_radius, outer_annuli_radius): """Calculate the mass between two circular annuli and compute the density by dividing by the annuli surface area. The value returned by the mass integral is dimensionless, therefore the density between annuli is returned in \ units of inverse radius squared. A conversion factor can be specified to convert this to a physical value \ (e.g. the critical surface mass density). Parameters ----------- inner_annuli_radius : float The radius of the inner annulus outside of which the density are estimated. outer_annuli_radius : float The radius of the outer annulus inside of which the density is estimated. """ annuli_area = (np.pi * outer_annuli_radius ** 2.0) - (np.pi * inner_annuli_radius ** 2.0) return (self.mass_within_circle_in_units(radius=outer_annuli_radius) - self.mass_within_circle_in_units(radius=inner_annuli_radius)) \ / annuli_area
[ "def", "density_between_circular_annuli_in_angular_units", "(", "self", ",", "inner_annuli_radius", ",", "outer_annuli_radius", ")", ":", "annuli_area", "=", "(", "np", ".", "pi", "*", "outer_annuli_radius", "**", "2.0", ")", "-", "(", "np", ".", "pi", "*", "inner_annuli_radius", "**", "2.0", ")", "return", "(", "self", ".", "mass_within_circle_in_units", "(", "radius", "=", "outer_annuli_radius", ")", "-", "self", ".", "mass_within_circle_in_units", "(", "radius", "=", "inner_annuli_radius", ")", ")", "/", "annuli_area" ]
57.052632
32.736842
def estimate_reduce(interface, state, label, inp): """Estimate the cluster centers for each cluster.""" centers = {} for i, c in inp: centers[i] = c if i not in centers else state['update'](centers[i], c) out = interface.output(0) for i, c in centers.items(): out.add(i, state['finalize'](c))
[ "def", "estimate_reduce", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "centers", "=", "{", "}", "for", "i", ",", "c", "in", "inp", ":", "centers", "[", "i", "]", "=", "c", "if", "i", "not", "in", "centers", "else", "state", "[", "'update'", "]", "(", "centers", "[", "i", "]", ",", "c", ")", "out", "=", "interface", ".", "output", "(", "0", ")", "for", "i", ",", "c", "in", "centers", ".", "items", "(", ")", ":", "out", ".", "add", "(", "i", ",", "state", "[", "'finalize'", "]", "(", "c", ")", ")" ]
35.666667
16.777778
def generateRevision(self): """ Generates the revision file for this builder. """ revpath = self.sourcePath() if not os.path.exists(revpath): return # determine the revision location revfile = os.path.join(revpath, self.revisionFilename()) mode = '' # test for svn revision try: args = ['svn', 'info', revpath] proc = subprocess.Popen(args, stdout=subprocess.PIPE) mode = 'svn' except WindowsError: try: args = ['git', 'rev-parse', 'HEAD', revpath] proc = subprocess.Popen(args, stdout=subprocess.PIPE) mode = 'git' except WindowsError: return # process SVN revision rev = None if mode == 'svn': for line in proc.stdout: data = re.match('^Revision: (\d+)', line) if data: rev = int(data.group(1)) break if rev is not None: try: f = open(revfile, 'w') f.write('__revision__ = {0}\n'.format(rev)) f.close() except IOError: pass
[ "def", "generateRevision", "(", "self", ")", ":", "revpath", "=", "self", ".", "sourcePath", "(", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "revpath", ")", ":", "return", "# determine the revision location", "revfile", "=", "os", ".", "path", ".", "join", "(", "revpath", ",", "self", ".", "revisionFilename", "(", ")", ")", "mode", "=", "''", "# test for svn revision", "try", ":", "args", "=", "[", "'svn'", ",", "'info'", ",", "revpath", "]", "proc", "=", "subprocess", ".", "Popen", "(", "args", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "mode", "=", "'svn'", "except", "WindowsError", ":", "try", ":", "args", "=", "[", "'git'", ",", "'rev-parse'", ",", "'HEAD'", ",", "revpath", "]", "proc", "=", "subprocess", ".", "Popen", "(", "args", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "mode", "=", "'git'", "except", "WindowsError", ":", "return", "# process SVN revision", "rev", "=", "None", "if", "mode", "==", "'svn'", ":", "for", "line", "in", "proc", ".", "stdout", ":", "data", "=", "re", ".", "match", "(", "'^Revision: (\\d+)'", ",", "line", ")", "if", "data", ":", "rev", "=", "int", "(", "data", ".", "group", "(", "1", ")", ")", "break", "if", "rev", "is", "not", "None", ":", "try", ":", "f", "=", "open", "(", "revfile", ",", "'w'", ")", "f", ".", "write", "(", "'__revision__ = {0}\\n'", ".", "format", "(", "rev", ")", ")", "f", ".", "close", "(", ")", "except", "IOError", ":", "pass" ]
29.682927
16.463415
async def _handle_container_timeout(self, container_id, timeout): """ Check timeout with docker stats :param container_id: :param timeout: in seconds (cpu time) """ try: docker_stats = await self._docker_interface.get_stats(container_id) source = AsyncIteratorWrapper(docker_stats) nano_timeout = timeout * (10 ** 9) async for upd in source: if upd is None: await self._kill_it_with_fire(container_id) self._logger.debug("%i", upd['cpu_stats']['cpu_usage']['total_usage']) if upd['cpu_stats']['cpu_usage']['total_usage'] > nano_timeout: self._logger.info("Killing container %s as it used %i CPU seconds (max was %i)", container_id, int(upd['cpu_stats']['cpu_usage']['total_usage'] / (10 ** 9)), timeout) await self._kill_it_with_fire(container_id) return except asyncio.CancelledError: pass except: self._logger.exception("Exception in _handle_container_timeout")
[ "async", "def", "_handle_container_timeout", "(", "self", ",", "container_id", ",", "timeout", ")", ":", "try", ":", "docker_stats", "=", "await", "self", ".", "_docker_interface", ".", "get_stats", "(", "container_id", ")", "source", "=", "AsyncIteratorWrapper", "(", "docker_stats", ")", "nano_timeout", "=", "timeout", "*", "(", "10", "**", "9", ")", "async", "for", "upd", "in", "source", ":", "if", "upd", "is", "None", ":", "await", "self", ".", "_kill_it_with_fire", "(", "container_id", ")", "self", ".", "_logger", ".", "debug", "(", "\"%i\"", ",", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", ")", "if", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", ">", "nano_timeout", ":", "self", ".", "_logger", ".", "info", "(", "\"Killing container %s as it used %i CPU seconds (max was %i)\"", ",", "container_id", ",", "int", "(", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", "/", "(", "10", "**", "9", ")", ")", ",", "timeout", ")", "await", "self", ".", "_kill_it_with_fire", "(", "container_id", ")", "return", "except", "asyncio", ".", "CancelledError", ":", "pass", "except", ":", "self", ".", "_logger", ".", "exception", "(", "\"Exception in _handle_container_timeout\"", ")" ]
49.695652
22.565217
async def popHiveKey(self, path): ''' Remove and return the value of a key in the cell default hive ''' perm = ('hive:pop',) + path self.user.allowed(perm) return await self.cell.hive.pop(path)
[ "async", "def", "popHiveKey", "(", "self", ",", "path", ")", ":", "perm", "=", "(", "'hive:pop'", ",", ")", "+", "path", "self", ".", "user", ".", "allowed", "(", "perm", ")", "return", "await", "self", ".", "cell", ".", "hive", ".", "pop", "(", "path", ")" ]
44.2
12.6
def update_coordinates(self, new_coordinates): """ new_coordinates : dict """ for k, v in new_coordinates.items(): if k in self.coordinates: self.coordinates[k] = v for svertex in self.spawn_list: verts = tuple([self.coordinates.get(ch, None) for ch in svertex.channels]) if len(svertex.channels) == 1: # This means a histogram svertex.update_position(verts[0], None) else: svertex.update_position(verts[0], verts[1]) self.callback(Event(Event.BASE_GATE_CHANGED))
[ "def", "update_coordinates", "(", "self", ",", "new_coordinates", ")", ":", "for", "k", ",", "v", "in", "new_coordinates", ".", "items", "(", ")", ":", "if", "k", "in", "self", ".", "coordinates", ":", "self", ".", "coordinates", "[", "k", "]", "=", "v", "for", "svertex", "in", "self", ".", "spawn_list", ":", "verts", "=", "tuple", "(", "[", "self", ".", "coordinates", ".", "get", "(", "ch", ",", "None", ")", "for", "ch", "in", "svertex", ".", "channels", "]", ")", "if", "len", "(", "svertex", ".", "channels", ")", "==", "1", ":", "# This means a histogram", "svertex", ".", "update_position", "(", "verts", "[", "0", "]", ",", "None", ")", "else", ":", "svertex", ".", "update_position", "(", "verts", "[", "0", "]", ",", "verts", "[", "1", "]", ")", "self", ".", "callback", "(", "Event", "(", "Event", ".", "BASE_GATE_CHANGED", ")", ")" ]
37.1875
15.5625
def add_comment(self, comment): """ Add a comment to the database. Args: comment (hotdoc.core.Comment): comment to add """ if not comment: return self.__comments[comment.name] = comment self.comment_added_signal(self, comment)
[ "def", "add_comment", "(", "self", ",", "comment", ")", ":", "if", "not", "comment", ":", "return", "self", ".", "__comments", "[", "comment", ".", "name", "]", "=", "comment", "self", ".", "comment_added_signal", "(", "self", ",", "comment", ")" ]
24.75
15.75
def manage_file(name, sfn, ret, source, source_sum, user, group, mode, attrs, saltenv, backup, makedirs=False, template=None, # pylint: disable=W0613 show_changes=True, contents=None, dir_mode=None, follow_symlinks=True, skip_verify=False, keep_mode=False, encoding=None, encoding_errors='strict', seuser=None, serole=None, setype=None, serange=None, **kwargs): ''' Checks the destination against what was retrieved with get_managed and makes the appropriate modifications (if necessary). name location to place the file sfn location of cached file on the minion This is the path to the file stored on the minion. This file is placed on the minion using cp.cache_file. If the hash sum of that file matches the source_sum, we do not transfer the file to the minion again. This file is then grabbed and if it has template set, it renders the file to be placed into the correct place on the system using salt.files.utils.copyfile() ret The initial state return data structure. Pass in ``None`` to use the default structure. source file reference on the master source_sum sum hash for source user user owner group group owner backup backup_mode attrs attributes to be set on file: '' means remove all of them .. versionadded:: 2018.3.0 makedirs make directories if they do not exist template format of templating show_changes Include diff in state return contents: contents to be placed in the file dir_mode mode for directories created with makedirs skip_verify : False If ``True``, hash verification of remote file sources (``http://``, ``https://``, ``ftp://``) will be skipped, and the ``source_hash`` argument will be ignored. .. versionadded:: 2016.3.0 keep_mode : False If ``True``, and the ``source`` is a file from the Salt fileserver (or a local file on the minion), the mode of the destination file will be set to the mode of the source file. .. note:: keep_mode does not work with salt-ssh. As a consequence of how the files are transferred to the minion, and the inability to connect back to the master with salt-ssh, salt is unable to stat the file as it exists on the fileserver and thus cannot mirror the mode on the salt-ssh minion encoding If specified, then the specified encoding will be used. Otherwise, the file will be encoded using the system locale (usually UTF-8). See https://docs.python.org/3/library/codecs.html#standard-encodings for the list of available encodings. .. versionadded:: 2017.7.0 encoding_errors : 'strict' Default is ```'strict'```. See https://docs.python.org/2/library/codecs.html#codec-base-classes for the error handling schemes. .. versionadded:: 2017.7.0 seuser selinux user attribute .. versionadded:: Neon serange selinux range attribute .. versionadded:: Neon setype selinux type attribute .. versionadded:: Neon serange selinux range attribute .. versionadded:: Neon CLI Example: .. code-block:: bash salt '*' file.manage_file /etc/httpd/conf.d/httpd.conf '' '{}' salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' root root '755' '' base '' .. versionchanged:: 2014.7.0 ``follow_symlinks`` option added ''' name = os.path.expanduser(name) if not ret: ret = {'name': name, 'changes': {}, 'comment': '', 'result': True} # Ensure that user-provided hash string is lowercase if source_sum and ('hsum' in source_sum): source_sum['hsum'] = source_sum['hsum'].lower() if source: if not sfn: # File is not present, cache it sfn = __salt__['cp.cache_file'](source, saltenv) if not sfn: return _error( ret, 'Source file \'{0}\' not found'.format(source)) htype = source_sum.get('hash_type', __opts__['hash_type']) # Recalculate source sum now that file has been cached source_sum = { 'hash_type': htype, 'hsum': get_hash(sfn, form=htype) } if keep_mode: if _urlparse(source).scheme in ('salt', 'file', ''): try: mode = __salt__['cp.stat_file'](source, saltenv=saltenv, octal=True) except Exception as exc: log.warning('Unable to stat %s: %s', sfn, exc) # Check changes if the target file exists if os.path.isfile(name) or os.path.islink(name): if os.path.islink(name) and follow_symlinks: real_name = os.path.realpath(name) else: real_name = name # Only test the checksums on files with managed contents if source and not (not follow_symlinks and os.path.islink(real_name)): name_sum = get_hash(real_name, source_sum.get('hash_type', __opts__['hash_type'])) else: name_sum = None # Check if file needs to be replaced if source and (name_sum is None or source_sum.get('hsum', __opts__['hash_type']) != name_sum): if not sfn: sfn = __salt__['cp.cache_file'](source, saltenv) if not sfn: return _error( ret, 'Source file \'{0}\' not found'.format(source)) # If the downloaded file came from a non salt server or local # source, and we are not skipping checksum verification, then # verify that it matches the specified checksum. if not skip_verify \ and _urlparse(source).scheme != 'salt': dl_sum = get_hash(sfn, source_sum['hash_type']) if dl_sum != source_sum['hsum']: ret['comment'] = ( 'Specified {0} checksum for {1} ({2}) does not match ' 'actual checksum ({3}). If the \'source_hash\' value ' 'refers to a remote file with multiple possible ' 'matches, then it may be necessary to set ' '\'source_hash_name\'.'.format( source_sum['hash_type'], source, source_sum['hsum'], dl_sum ) ) ret['result'] = False return ret # Print a diff equivalent to diff -u old new if __salt__['config.option']('obfuscate_templates'): ret['changes']['diff'] = '<Obfuscated Template>' elif not show_changes: ret['changes']['diff'] = '<show_changes=False>' else: try: ret['changes']['diff'] = get_diff( real_name, sfn, show_filenames=False) except CommandExecutionError as exc: ret['changes']['diff'] = exc.strerror # Pre requisites are met, and the file needs to be replaced, do it try: salt.utils.files.copyfile(sfn, real_name, __salt__['config.backup_mode'](backup), __opts__['cachedir']) except IOError as io_error: __clean_tmp(sfn) return _error( ret, 'Failed to commit change: {0}'.format(io_error)) if contents is not None: # Write the static contents to a temporary file tmp = salt.utils.files.mkstemp(prefix=salt.utils.files.TEMPFILE_PREFIX, text=True) if salt.utils.platform.is_windows(): contents = os.linesep.join( _splitlines_preserving_trailing_newline(contents)) with salt.utils.files.fopen(tmp, 'wb') as tmp_: if encoding: log.debug('File will be encoded with %s', encoding) tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors)) else: tmp_.write(salt.utils.stringutils.to_bytes(contents)) try: differences = get_diff( real_name, tmp, show_filenames=False, show_changes=show_changes, template=True) except CommandExecutionError as exc: ret.setdefault('warnings', []).append( 'Failed to detect changes to file: {0}'.format(exc.strerror) ) differences = '' if differences: ret['changes']['diff'] = differences # Pre requisites are met, the file needs to be replaced, do it try: salt.utils.files.copyfile(tmp, real_name, __salt__['config.backup_mode'](backup), __opts__['cachedir']) except IOError as io_error: __clean_tmp(tmp) return _error( ret, 'Failed to commit change: {0}'.format(io_error)) __clean_tmp(tmp) # Check for changing symlink to regular file here if os.path.islink(name) and not follow_symlinks: if not sfn: sfn = __salt__['cp.cache_file'](source, saltenv) if not sfn: return _error( ret, 'Source file \'{0}\' not found'.format(source)) # If the downloaded file came from a non salt server source verify # that it matches the intended sum value if not skip_verify and _urlparse(source).scheme != 'salt': dl_sum = get_hash(sfn, source_sum['hash_type']) if dl_sum != source_sum['hsum']: ret['comment'] = ( 'Specified {0} checksum for {1} ({2}) does not match ' 'actual checksum ({3})'.format( source_sum['hash_type'], name, source_sum['hsum'], dl_sum ) ) ret['result'] = False return ret try: salt.utils.files.copyfile(sfn, name, __salt__['config.backup_mode'](backup), __opts__['cachedir']) except IOError as io_error: __clean_tmp(sfn) return _error( ret, 'Failed to commit change: {0}'.format(io_error)) ret['changes']['diff'] = \ 'Replace symbolic link with regular file' if salt.utils.platform.is_windows(): # This function resides in win_file.py and will be available # on Windows. The local function will be overridden # pylint: disable=E1120,E1121,E1123 ret = check_perms( path=name, ret=ret, owner=kwargs.get('win_owner'), grant_perms=kwargs.get('win_perms'), deny_perms=kwargs.get('win_deny_perms'), inheritance=kwargs.get('win_inheritance', True), reset=kwargs.get('win_perms_reset', False)) # pylint: enable=E1120,E1121,E1123 else: ret, _ = check_perms(name, ret, user, group, mode, attrs, follow_symlinks, seuser=seuser, serole=serole, setype=setype, serange=serange) if ret['changes']: ret['comment'] = 'File {0} updated'.format( salt.utils.data.decode(name) ) elif not ret['changes'] and ret['result']: ret['comment'] = 'File {0} is in the correct state'.format( salt.utils.data.decode(name) ) if sfn: __clean_tmp(sfn) return ret else: # target file does not exist contain_dir = os.path.dirname(name) def _set_mode_and_make_dirs(name, dir_mode, mode, user, group): # check for existence of windows drive letter if salt.utils.platform.is_windows(): drive, _ = os.path.splitdrive(name) if drive and not os.path.exists(drive): __clean_tmp(sfn) return _error(ret, '{0} drive not present'.format(drive)) if dir_mode is None and mode is not None: # Add execute bit to each nonzero digit in the mode, if # dir_mode was not specified. Otherwise, any # directories created with makedirs_() below can't be # listed via a shell. mode_list = [x for x in six.text_type(mode)][-3:] for idx in range(len(mode_list)): if mode_list[idx] != '0': mode_list[idx] = six.text_type(int(mode_list[idx]) | 1) dir_mode = ''.join(mode_list) if salt.utils.platform.is_windows(): # This function resides in win_file.py and will be available # on Windows. The local function will be overridden # pylint: disable=E1120,E1121,E1123 makedirs_( path=name, owner=kwargs.get('win_owner'), grant_perms=kwargs.get('win_perms'), deny_perms=kwargs.get('win_deny_perms'), inheritance=kwargs.get('win_inheritance', True), reset=kwargs.get('win_perms_reset', False)) # pylint: enable=E1120,E1121,E1123 else: makedirs_(name, user=user, group=group, mode=dir_mode) if source: # Apply the new file if not sfn: sfn = __salt__['cp.cache_file'](source, saltenv) if not sfn: return _error( ret, 'Source file \'{0}\' not found'.format(source)) # If the downloaded file came from a non salt server source verify # that it matches the intended sum value if not skip_verify \ and _urlparse(source).scheme != 'salt': dl_sum = get_hash(sfn, source_sum['hash_type']) if dl_sum != source_sum['hsum']: ret['comment'] = ( 'Specified {0} checksum for {1} ({2}) does not match ' 'actual checksum ({3})'.format( source_sum['hash_type'], name, source_sum['hsum'], dl_sum ) ) ret['result'] = False return ret # It is a new file, set the diff accordingly ret['changes']['diff'] = 'New file' if not os.path.isdir(contain_dir): if makedirs: _set_mode_and_make_dirs(name, dir_mode, mode, user, group) else: __clean_tmp(sfn) # No changes actually made ret['changes'].pop('diff', None) return _error(ret, 'Parent directory not present') else: # source != True if not os.path.isdir(contain_dir): if makedirs: _set_mode_and_make_dirs(name, dir_mode, mode, user, group) else: __clean_tmp(sfn) # No changes actually made ret['changes'].pop('diff', None) return _error(ret, 'Parent directory not present') # Create the file, user rw-only if mode will be set to prevent # a small security race problem before the permissions are set with salt.utils.files.set_umask(0o077 if mode else None): # Create a new file when test is False and source is None if contents is None: if not __opts__['test']: if touch(name): ret['changes']['new'] = 'file {0} created'.format(name) ret['comment'] = 'Empty file' else: return _error( ret, 'Empty file {0} not created'.format(name) ) else: if not __opts__['test']: if touch(name): ret['changes']['diff'] = 'New file' else: return _error( ret, 'File {0} not created'.format(name) ) if contents is not None: # Write the static contents to a temporary file tmp = salt.utils.files.mkstemp(prefix=salt.utils.files.TEMPFILE_PREFIX, text=True) with salt.utils.files.fopen(tmp, 'wb') as tmp_: if encoding: if salt.utils.platform.is_windows(): contents = os.linesep.join( _splitlines_preserving_trailing_newline(contents)) log.debug('File will be encoded with %s', encoding) tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors)) else: tmp_.write(salt.utils.stringutils.to_bytes(contents)) # Copy into place salt.utils.files.copyfile(tmp, name, __salt__['config.backup_mode'](backup), __opts__['cachedir']) __clean_tmp(tmp) # Now copy the file contents if there is a source file elif sfn: salt.utils.files.copyfile(sfn, name, __salt__['config.backup_mode'](backup), __opts__['cachedir']) __clean_tmp(sfn) # This is a new file, if no mode specified, use the umask to figure # out what mode to use for the new file. if mode is None and not salt.utils.platform.is_windows(): # Get current umask mask = salt.utils.files.get_umask() # Calculate the mode value that results from the umask mode = oct((0o777 ^ mask) & 0o666) if salt.utils.platform.is_windows(): # This function resides in win_file.py and will be available # on Windows. The local function will be overridden # pylint: disable=E1120,E1121,E1123 ret = check_perms( path=name, ret=ret, owner=kwargs.get('win_owner'), grant_perms=kwargs.get('win_perms'), deny_perms=kwargs.get('win_deny_perms'), inheritance=kwargs.get('win_inheritance', True), reset=kwargs.get('win_perms_reset', False)) # pylint: enable=E1120,E1121,E1123 else: ret, _ = check_perms(name, ret, user, group, mode, attrs, seuser=seuser, serole=serole, setype=setype, serange=serange) if not ret['comment']: ret['comment'] = 'File ' + name + ' updated' if __opts__['test']: ret['comment'] = 'File ' + name + ' not updated' elif not ret['changes'] and ret['result']: ret['comment'] = 'File ' + name + ' is in the correct state' if sfn: __clean_tmp(sfn) return ret
[ "def", "manage_file", "(", "name", ",", "sfn", ",", "ret", ",", "source", ",", "source_sum", ",", "user", ",", "group", ",", "mode", ",", "attrs", ",", "saltenv", ",", "backup", ",", "makedirs", "=", "False", ",", "template", "=", "None", ",", "# pylint: disable=W0613", "show_changes", "=", "True", ",", "contents", "=", "None", ",", "dir_mode", "=", "None", ",", "follow_symlinks", "=", "True", ",", "skip_verify", "=", "False", ",", "keep_mode", "=", "False", ",", "encoding", "=", "None", ",", "encoding_errors", "=", "'strict'", ",", "seuser", "=", "None", ",", "serole", "=", "None", ",", "setype", "=", "None", ",", "serange", "=", "None", ",", "*", "*", "kwargs", ")", ":", "name", "=", "os", ".", "path", ".", "expanduser", "(", "name", ")", "if", "not", "ret", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", ",", "'result'", ":", "True", "}", "# Ensure that user-provided hash string is lowercase", "if", "source_sum", "and", "(", "'hsum'", "in", "source_sum", ")", ":", "source_sum", "[", "'hsum'", "]", "=", "source_sum", "[", "'hsum'", "]", ".", "lower", "(", ")", "if", "source", ":", "if", "not", "sfn", ":", "# File is not present, cache it", "sfn", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "source", ",", "saltenv", ")", "if", "not", "sfn", ":", "return", "_error", "(", "ret", ",", "'Source file \\'{0}\\' not found'", ".", "format", "(", "source", ")", ")", "htype", "=", "source_sum", ".", "get", "(", "'hash_type'", ",", "__opts__", "[", "'hash_type'", "]", ")", "# Recalculate source sum now that file has been cached", "source_sum", "=", "{", "'hash_type'", ":", "htype", ",", "'hsum'", ":", "get_hash", "(", "sfn", ",", "form", "=", "htype", ")", "}", "if", "keep_mode", ":", "if", "_urlparse", "(", "source", ")", ".", "scheme", "in", "(", "'salt'", ",", "'file'", ",", "''", ")", ":", "try", ":", "mode", "=", "__salt__", "[", "'cp.stat_file'", "]", "(", "source", ",", "saltenv", "=", "saltenv", ",", "octal", "=", "True", ")", "except", "Exception", "as", "exc", ":", "log", ".", "warning", "(", "'Unable to stat %s: %s'", ",", "sfn", ",", "exc", ")", "# Check changes if the target file exists", "if", "os", ".", "path", ".", "isfile", "(", "name", ")", "or", "os", ".", "path", ".", "islink", "(", "name", ")", ":", "if", "os", ".", "path", ".", "islink", "(", "name", ")", "and", "follow_symlinks", ":", "real_name", "=", "os", ".", "path", ".", "realpath", "(", "name", ")", "else", ":", "real_name", "=", "name", "# Only test the checksums on files with managed contents", "if", "source", "and", "not", "(", "not", "follow_symlinks", "and", "os", ".", "path", ".", "islink", "(", "real_name", ")", ")", ":", "name_sum", "=", "get_hash", "(", "real_name", ",", "source_sum", ".", "get", "(", "'hash_type'", ",", "__opts__", "[", "'hash_type'", "]", ")", ")", "else", ":", "name_sum", "=", "None", "# Check if file needs to be replaced", "if", "source", "and", "(", "name_sum", "is", "None", "or", "source_sum", ".", "get", "(", "'hsum'", ",", "__opts__", "[", "'hash_type'", "]", ")", "!=", "name_sum", ")", ":", "if", "not", "sfn", ":", "sfn", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "source", ",", "saltenv", ")", "if", "not", "sfn", ":", "return", "_error", "(", "ret", ",", "'Source file \\'{0}\\' not found'", ".", "format", "(", "source", ")", ")", "# If the downloaded file came from a non salt server or local", "# source, and we are not skipping checksum verification, then", "# verify that it matches the specified checksum.", "if", "not", "skip_verify", "and", "_urlparse", "(", "source", ")", ".", "scheme", "!=", "'salt'", ":", "dl_sum", "=", "get_hash", "(", "sfn", ",", "source_sum", "[", "'hash_type'", "]", ")", "if", "dl_sum", "!=", "source_sum", "[", "'hsum'", "]", ":", "ret", "[", "'comment'", "]", "=", "(", "'Specified {0} checksum for {1} ({2}) does not match '", "'actual checksum ({3}). If the \\'source_hash\\' value '", "'refers to a remote file with multiple possible '", "'matches, then it may be necessary to set '", "'\\'source_hash_name\\'.'", ".", "format", "(", "source_sum", "[", "'hash_type'", "]", ",", "source", ",", "source_sum", "[", "'hsum'", "]", ",", "dl_sum", ")", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "# Print a diff equivalent to diff -u old new", "if", "__salt__", "[", "'config.option'", "]", "(", "'obfuscate_templates'", ")", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "'<Obfuscated Template>'", "elif", "not", "show_changes", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "'<show_changes=False>'", "else", ":", "try", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "get_diff", "(", "real_name", ",", "sfn", ",", "show_filenames", "=", "False", ")", "except", "CommandExecutionError", "as", "exc", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "exc", ".", "strerror", "# Pre requisites are met, and the file needs to be replaced, do it", "try", ":", "salt", ".", "utils", ".", "files", ".", "copyfile", "(", "sfn", ",", "real_name", ",", "__salt__", "[", "'config.backup_mode'", "]", "(", "backup", ")", ",", "__opts__", "[", "'cachedir'", "]", ")", "except", "IOError", "as", "io_error", ":", "__clean_tmp", "(", "sfn", ")", "return", "_error", "(", "ret", ",", "'Failed to commit change: {0}'", ".", "format", "(", "io_error", ")", ")", "if", "contents", "is", "not", "None", ":", "# Write the static contents to a temporary file", "tmp", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", "prefix", "=", "salt", ".", "utils", ".", "files", ".", "TEMPFILE_PREFIX", ",", "text", "=", "True", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "contents", "=", "os", ".", "linesep", ".", "join", "(", "_splitlines_preserving_trailing_newline", "(", "contents", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "tmp", ",", "'wb'", ")", "as", "tmp_", ":", "if", "encoding", ":", "log", ".", "debug", "(", "'File will be encoded with %s'", ",", "encoding", ")", "tmp_", ".", "write", "(", "contents", ".", "encode", "(", "encoding", "=", "encoding", ",", "errors", "=", "encoding_errors", ")", ")", "else", ":", "tmp_", ".", "write", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "contents", ")", ")", "try", ":", "differences", "=", "get_diff", "(", "real_name", ",", "tmp", ",", "show_filenames", "=", "False", ",", "show_changes", "=", "show_changes", ",", "template", "=", "True", ")", "except", "CommandExecutionError", "as", "exc", ":", "ret", ".", "setdefault", "(", "'warnings'", ",", "[", "]", ")", ".", "append", "(", "'Failed to detect changes to file: {0}'", ".", "format", "(", "exc", ".", "strerror", ")", ")", "differences", "=", "''", "if", "differences", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "differences", "# Pre requisites are met, the file needs to be replaced, do it", "try", ":", "salt", ".", "utils", ".", "files", ".", "copyfile", "(", "tmp", ",", "real_name", ",", "__salt__", "[", "'config.backup_mode'", "]", "(", "backup", ")", ",", "__opts__", "[", "'cachedir'", "]", ")", "except", "IOError", "as", "io_error", ":", "__clean_tmp", "(", "tmp", ")", "return", "_error", "(", "ret", ",", "'Failed to commit change: {0}'", ".", "format", "(", "io_error", ")", ")", "__clean_tmp", "(", "tmp", ")", "# Check for changing symlink to regular file here", "if", "os", ".", "path", ".", "islink", "(", "name", ")", "and", "not", "follow_symlinks", ":", "if", "not", "sfn", ":", "sfn", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "source", ",", "saltenv", ")", "if", "not", "sfn", ":", "return", "_error", "(", "ret", ",", "'Source file \\'{0}\\' not found'", ".", "format", "(", "source", ")", ")", "# If the downloaded file came from a non salt server source verify", "# that it matches the intended sum value", "if", "not", "skip_verify", "and", "_urlparse", "(", "source", ")", ".", "scheme", "!=", "'salt'", ":", "dl_sum", "=", "get_hash", "(", "sfn", ",", "source_sum", "[", "'hash_type'", "]", ")", "if", "dl_sum", "!=", "source_sum", "[", "'hsum'", "]", ":", "ret", "[", "'comment'", "]", "=", "(", "'Specified {0} checksum for {1} ({2}) does not match '", "'actual checksum ({3})'", ".", "format", "(", "source_sum", "[", "'hash_type'", "]", ",", "name", ",", "source_sum", "[", "'hsum'", "]", ",", "dl_sum", ")", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "try", ":", "salt", ".", "utils", ".", "files", ".", "copyfile", "(", "sfn", ",", "name", ",", "__salt__", "[", "'config.backup_mode'", "]", "(", "backup", ")", ",", "__opts__", "[", "'cachedir'", "]", ")", "except", "IOError", "as", "io_error", ":", "__clean_tmp", "(", "sfn", ")", "return", "_error", "(", "ret", ",", "'Failed to commit change: {0}'", ".", "format", "(", "io_error", ")", ")", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "'Replace symbolic link with regular file'", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# This function resides in win_file.py and will be available", "# on Windows. The local function will be overridden", "# pylint: disable=E1120,E1121,E1123", "ret", "=", "check_perms", "(", "path", "=", "name", ",", "ret", "=", "ret", ",", "owner", "=", "kwargs", ".", "get", "(", "'win_owner'", ")", ",", "grant_perms", "=", "kwargs", ".", "get", "(", "'win_perms'", ")", ",", "deny_perms", "=", "kwargs", ".", "get", "(", "'win_deny_perms'", ")", ",", "inheritance", "=", "kwargs", ".", "get", "(", "'win_inheritance'", ",", "True", ")", ",", "reset", "=", "kwargs", ".", "get", "(", "'win_perms_reset'", ",", "False", ")", ")", "# pylint: enable=E1120,E1121,E1123", "else", ":", "ret", ",", "_", "=", "check_perms", "(", "name", ",", "ret", ",", "user", ",", "group", ",", "mode", ",", "attrs", ",", "follow_symlinks", ",", "seuser", "=", "seuser", ",", "serole", "=", "serole", ",", "setype", "=", "setype", ",", "serange", "=", "serange", ")", "if", "ret", "[", "'changes'", "]", ":", "ret", "[", "'comment'", "]", "=", "'File {0} updated'", ".", "format", "(", "salt", ".", "utils", ".", "data", ".", "decode", "(", "name", ")", ")", "elif", "not", "ret", "[", "'changes'", "]", "and", "ret", "[", "'result'", "]", ":", "ret", "[", "'comment'", "]", "=", "'File {0} is in the correct state'", ".", "format", "(", "salt", ".", "utils", ".", "data", ".", "decode", "(", "name", ")", ")", "if", "sfn", ":", "__clean_tmp", "(", "sfn", ")", "return", "ret", "else", ":", "# target file does not exist", "contain_dir", "=", "os", ".", "path", ".", "dirname", "(", "name", ")", "def", "_set_mode_and_make_dirs", "(", "name", ",", "dir_mode", ",", "mode", ",", "user", ",", "group", ")", ":", "# check for existence of windows drive letter", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "drive", ",", "_", "=", "os", ".", "path", ".", "splitdrive", "(", "name", ")", "if", "drive", "and", "not", "os", ".", "path", ".", "exists", "(", "drive", ")", ":", "__clean_tmp", "(", "sfn", ")", "return", "_error", "(", "ret", ",", "'{0} drive not present'", ".", "format", "(", "drive", ")", ")", "if", "dir_mode", "is", "None", "and", "mode", "is", "not", "None", ":", "# Add execute bit to each nonzero digit in the mode, if", "# dir_mode was not specified. Otherwise, any", "# directories created with makedirs_() below can't be", "# listed via a shell.", "mode_list", "=", "[", "x", "for", "x", "in", "six", ".", "text_type", "(", "mode", ")", "]", "[", "-", "3", ":", "]", "for", "idx", "in", "range", "(", "len", "(", "mode_list", ")", ")", ":", "if", "mode_list", "[", "idx", "]", "!=", "'0'", ":", "mode_list", "[", "idx", "]", "=", "six", ".", "text_type", "(", "int", "(", "mode_list", "[", "idx", "]", ")", "|", "1", ")", "dir_mode", "=", "''", ".", "join", "(", "mode_list", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# This function resides in win_file.py and will be available", "# on Windows. The local function will be overridden", "# pylint: disable=E1120,E1121,E1123", "makedirs_", "(", "path", "=", "name", ",", "owner", "=", "kwargs", ".", "get", "(", "'win_owner'", ")", ",", "grant_perms", "=", "kwargs", ".", "get", "(", "'win_perms'", ")", ",", "deny_perms", "=", "kwargs", ".", "get", "(", "'win_deny_perms'", ")", ",", "inheritance", "=", "kwargs", ".", "get", "(", "'win_inheritance'", ",", "True", ")", ",", "reset", "=", "kwargs", ".", "get", "(", "'win_perms_reset'", ",", "False", ")", ")", "# pylint: enable=E1120,E1121,E1123", "else", ":", "makedirs_", "(", "name", ",", "user", "=", "user", ",", "group", "=", "group", ",", "mode", "=", "dir_mode", ")", "if", "source", ":", "# Apply the new file", "if", "not", "sfn", ":", "sfn", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "source", ",", "saltenv", ")", "if", "not", "sfn", ":", "return", "_error", "(", "ret", ",", "'Source file \\'{0}\\' not found'", ".", "format", "(", "source", ")", ")", "# If the downloaded file came from a non salt server source verify", "# that it matches the intended sum value", "if", "not", "skip_verify", "and", "_urlparse", "(", "source", ")", ".", "scheme", "!=", "'salt'", ":", "dl_sum", "=", "get_hash", "(", "sfn", ",", "source_sum", "[", "'hash_type'", "]", ")", "if", "dl_sum", "!=", "source_sum", "[", "'hsum'", "]", ":", "ret", "[", "'comment'", "]", "=", "(", "'Specified {0} checksum for {1} ({2}) does not match '", "'actual checksum ({3})'", ".", "format", "(", "source_sum", "[", "'hash_type'", "]", ",", "name", ",", "source_sum", "[", "'hsum'", "]", ",", "dl_sum", ")", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "# It is a new file, set the diff accordingly", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "'New file'", "if", "not", "os", ".", "path", ".", "isdir", "(", "contain_dir", ")", ":", "if", "makedirs", ":", "_set_mode_and_make_dirs", "(", "name", ",", "dir_mode", ",", "mode", ",", "user", ",", "group", ")", "else", ":", "__clean_tmp", "(", "sfn", ")", "# No changes actually made", "ret", "[", "'changes'", "]", ".", "pop", "(", "'diff'", ",", "None", ")", "return", "_error", "(", "ret", ",", "'Parent directory not present'", ")", "else", ":", "# source != True", "if", "not", "os", ".", "path", ".", "isdir", "(", "contain_dir", ")", ":", "if", "makedirs", ":", "_set_mode_and_make_dirs", "(", "name", ",", "dir_mode", ",", "mode", ",", "user", ",", "group", ")", "else", ":", "__clean_tmp", "(", "sfn", ")", "# No changes actually made", "ret", "[", "'changes'", "]", ".", "pop", "(", "'diff'", ",", "None", ")", "return", "_error", "(", "ret", ",", "'Parent directory not present'", ")", "# Create the file, user rw-only if mode will be set to prevent", "# a small security race problem before the permissions are set", "with", "salt", ".", "utils", ".", "files", ".", "set_umask", "(", "0o077", "if", "mode", "else", "None", ")", ":", "# Create a new file when test is False and source is None", "if", "contents", "is", "None", ":", "if", "not", "__opts__", "[", "'test'", "]", ":", "if", "touch", "(", "name", ")", ":", "ret", "[", "'changes'", "]", "[", "'new'", "]", "=", "'file {0} created'", ".", "format", "(", "name", ")", "ret", "[", "'comment'", "]", "=", "'Empty file'", "else", ":", "return", "_error", "(", "ret", ",", "'Empty file {0} not created'", ".", "format", "(", "name", ")", ")", "else", ":", "if", "not", "__opts__", "[", "'test'", "]", ":", "if", "touch", "(", "name", ")", ":", "ret", "[", "'changes'", "]", "[", "'diff'", "]", "=", "'New file'", "else", ":", "return", "_error", "(", "ret", ",", "'File {0} not created'", ".", "format", "(", "name", ")", ")", "if", "contents", "is", "not", "None", ":", "# Write the static contents to a temporary file", "tmp", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", "prefix", "=", "salt", ".", "utils", ".", "files", ".", "TEMPFILE_PREFIX", ",", "text", "=", "True", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "tmp", ",", "'wb'", ")", "as", "tmp_", ":", "if", "encoding", ":", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "contents", "=", "os", ".", "linesep", ".", "join", "(", "_splitlines_preserving_trailing_newline", "(", "contents", ")", ")", "log", ".", "debug", "(", "'File will be encoded with %s'", ",", "encoding", ")", "tmp_", ".", "write", "(", "contents", ".", "encode", "(", "encoding", "=", "encoding", ",", "errors", "=", "encoding_errors", ")", ")", "else", ":", "tmp_", ".", "write", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "contents", ")", ")", "# Copy into place", "salt", ".", "utils", ".", "files", ".", "copyfile", "(", "tmp", ",", "name", ",", "__salt__", "[", "'config.backup_mode'", "]", "(", "backup", ")", ",", "__opts__", "[", "'cachedir'", "]", ")", "__clean_tmp", "(", "tmp", ")", "# Now copy the file contents if there is a source file", "elif", "sfn", ":", "salt", ".", "utils", ".", "files", ".", "copyfile", "(", "sfn", ",", "name", ",", "__salt__", "[", "'config.backup_mode'", "]", "(", "backup", ")", ",", "__opts__", "[", "'cachedir'", "]", ")", "__clean_tmp", "(", "sfn", ")", "# This is a new file, if no mode specified, use the umask to figure", "# out what mode to use for the new file.", "if", "mode", "is", "None", "and", "not", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# Get current umask", "mask", "=", "salt", ".", "utils", ".", "files", ".", "get_umask", "(", ")", "# Calculate the mode value that results from the umask", "mode", "=", "oct", "(", "(", "0o777", "^", "mask", ")", "&", "0o666", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# This function resides in win_file.py and will be available", "# on Windows. The local function will be overridden", "# pylint: disable=E1120,E1121,E1123", "ret", "=", "check_perms", "(", "path", "=", "name", ",", "ret", "=", "ret", ",", "owner", "=", "kwargs", ".", "get", "(", "'win_owner'", ")", ",", "grant_perms", "=", "kwargs", ".", "get", "(", "'win_perms'", ")", ",", "deny_perms", "=", "kwargs", ".", "get", "(", "'win_deny_perms'", ")", ",", "inheritance", "=", "kwargs", ".", "get", "(", "'win_inheritance'", ",", "True", ")", ",", "reset", "=", "kwargs", ".", "get", "(", "'win_perms_reset'", ",", "False", ")", ")", "# pylint: enable=E1120,E1121,E1123", "else", ":", "ret", ",", "_", "=", "check_perms", "(", "name", ",", "ret", ",", "user", ",", "group", ",", "mode", ",", "attrs", ",", "seuser", "=", "seuser", ",", "serole", "=", "serole", ",", "setype", "=", "setype", ",", "serange", "=", "serange", ")", "if", "not", "ret", "[", "'comment'", "]", ":", "ret", "[", "'comment'", "]", "=", "'File '", "+", "name", "+", "' updated'", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'File '", "+", "name", "+", "' not updated'", "elif", "not", "ret", "[", "'changes'", "]", "and", "ret", "[", "'result'", "]", ":", "ret", "[", "'comment'", "]", "=", "'File '", "+", "name", "+", "' is in the correct state'", "if", "sfn", ":", "__clean_tmp", "(", "sfn", ")", "return", "ret" ]
39.254826
21.158301
def _login(self, username, password): '''Authenticates a TissueMAPS user. Parameters ---------- username: str name password: str password ''' logger.debug('login in as user "%s"' % username) url = self._build_url('/auth') payload = {'username': username, 'password': password} res = self._session.post(url, json=payload) res.raise_for_status() self._access_token = res.json()['access_token'] self._session.headers.update( {'Authorization': 'JWT %s' % self._access_token} )
[ "def", "_login", "(", "self", ",", "username", ",", "password", ")", ":", "logger", ".", "debug", "(", "'login in as user \"%s\"'", "%", "username", ")", "url", "=", "self", ".", "_build_url", "(", "'/auth'", ")", "payload", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", "}", "res", "=", "self", ".", "_session", ".", "post", "(", "url", ",", "json", "=", "payload", ")", "res", ".", "raise_for_status", "(", ")", "self", ".", "_access_token", "=", "res", ".", "json", "(", ")", "[", "'access_token'", "]", "self", ".", "_session", ".", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'JWT %s'", "%", "self", ".", "_access_token", "}", ")" ]
31.736842
17.421053
def serialize_seeds(seeds, block): """ Serialize the seeds in peer instruction XBlock to xml Args: seeds (lxml.etree.Element): The <seeds> XML element. block (PeerInstructionXBlock): The XBlock with configuration to serialize. Returns: None """ for seed_dict in block.seeds: seed = etree.SubElement(seeds, 'seed') # options in xml starts with 1 seed.set('option', unicode(seed_dict.get('answer', 0) + 1)) seed.text = seed_dict.get('rationale', '')
[ "def", "serialize_seeds", "(", "seeds", ",", "block", ")", ":", "for", "seed_dict", "in", "block", ".", "seeds", ":", "seed", "=", "etree", ".", "SubElement", "(", "seeds", ",", "'seed'", ")", "# options in xml starts with 1", "seed", ".", "set", "(", "'option'", ",", "unicode", "(", "seed_dict", ".", "get", "(", "'answer'", ",", "0", ")", "+", "1", ")", ")", "seed", ".", "text", "=", "seed_dict", ".", "get", "(", "'rationale'", ",", "''", ")" ]
32.125
19
def manufacturer(self): """Returns the name of the manufacturer of the device. Args: self (JLinkDeviceInfo): the ``JLinkDeviceInfo`` instance Returns: Manufacturer name. """ buf = ctypes.cast(self.sManu, ctypes.c_char_p).value return buf.decode() if buf else None
[ "def", "manufacturer", "(", "self", ")", ":", "buf", "=", "ctypes", ".", "cast", "(", "self", ".", "sManu", ",", "ctypes", ".", "c_char_p", ")", ".", "value", "return", "buf", ".", "decode", "(", ")", "if", "buf", "else", "None" ]
29.363636
19.090909
def is_expired(self): """ Indicates if connection has expired. """ if time.time() - self.last_ping > HB_PING_TIME: self.ping() return (time.time() - self.last_pong) > HB_PING_TIME + HB_PONG_TIME
[ "def", "is_expired", "(", "self", ")", ":", "if", "time", ".", "time", "(", ")", "-", "self", ".", "last_ping", ">", "HB_PING_TIME", ":", "self", ".", "ping", "(", ")", "return", "(", "time", ".", "time", "(", ")", "-", "self", ".", "last_pong", ")", ">", "HB_PING_TIME", "+", "HB_PONG_TIME" ]
37.666667
21
def get_licenses(self): """ :calls: `GET /licenses <https://developer.github.com/v3/licenses/#list-all-licenses>`_ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.License.License` """ url_parameters = dict() return github.PaginatedList.PaginatedList( github.License.License, self.__requester, "/licenses", url_parameters )
[ "def", "get_licenses", "(", "self", ")", ":", "url_parameters", "=", "dict", "(", ")", "return", "github", ".", "PaginatedList", ".", "PaginatedList", "(", "github", ".", "License", ".", "License", ",", "self", ".", "__requester", ",", "\"/licenses\"", ",", "url_parameters", ")" ]
31.214286
21.5
def to_dict(self, in_dict=None): """ Turn the Namespace and sub Namespaces back into a native python dictionary. :param in_dict: Do not use, for self recursion :return: python dictionary of this Namespace """ in_dict = in_dict if in_dict else self out_dict = dict() for k, v in in_dict.items(): if isinstance(v, Namespace): v = v.to_dict() out_dict[k] = v return out_dict
[ "def", "to_dict", "(", "self", ",", "in_dict", "=", "None", ")", ":", "in_dict", "=", "in_dict", "if", "in_dict", "else", "self", "out_dict", "=", "dict", "(", ")", "for", "k", ",", "v", "in", "in_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "Namespace", ")", ":", "v", "=", "v", ".", "to_dict", "(", ")", "out_dict", "[", "k", "]", "=", "v", "return", "out_dict" ]
31.866667
11.733333
def get_load(jid): ''' Included for API consistency ''' options = _get_options(ret=None) _response = _request("GET", options['url'] + options['db'] + '/' + jid) if 'error' in _response: log.error('Unable to get JID "%s" : "%s"', jid, _response) return {} return {_response['id']: _response}
[ "def", "get_load", "(", "jid", ")", ":", "options", "=", "_get_options", "(", "ret", "=", "None", ")", "_response", "=", "_request", "(", "\"GET\"", ",", "options", "[", "'url'", "]", "+", "options", "[", "'db'", "]", "+", "'/'", "+", "jid", ")", "if", "'error'", "in", "_response", ":", "log", ".", "error", "(", "'Unable to get JID \"%s\" : \"%s\"'", ",", "jid", ",", "_response", ")", "return", "{", "}", "return", "{", "_response", "[", "'id'", "]", ":", "_response", "}" ]
32.5
19.7
def transform_data(from_client, from_project, from_logstore, from_time, to_time=None, to_client=None, to_project=None, to_logstore=None, shard_list=None, config=None, batch_size=None, compress=None, cg_name=None, c_name=None, cg_heartbeat_interval=None, cg_data_fetch_interval=None, cg_in_order=None, cg_worker_pool_size=None ): """ transform data from one logstore to another one (could be the same or in different region), the time is log received time on server side. """ if not config: logger.info("transform_data: config is not configured, use copy data by default.") return copy_data(from_client, from_project, from_logstore, from_time, to_time=to_time, to_client=to_client, to_project=to_project, to_logstore=to_logstore, shard_list=shard_list, batch_size=batch_size, compress=compress) to_client = to_client or from_client # increase the timeout to 2 min at least from_client.timeout = max(from_client.timeout, 120) to_client.timeout = max(to_client.timeout, 120) to_project = to_project or from_project to_logstore = to_logstore or from_logstore if not cg_name: # batch mode to_time = to_time or "end" cpu_count = multiprocessing.cpu_count() * 2 shards = from_client.list_shards(from_project, from_logstore).get_shards_info() current_shards = [str(shard['shardID']) for shard in shards] target_shards = _parse_shard_list(shard_list, current_shards) worker_size = min(cpu_count, len(target_shards)) result = dict() total_count = 0 total_removed = 0 with ProcessPoolExecutor(max_workers=worker_size) as pool: futures = [pool.submit(transform_worker, from_client, from_project, from_logstore, shard, from_time, to_time, config, to_client, to_project, to_logstore, batch_size=batch_size, compress=compress) for shard in target_shards] for future in as_completed(futures): if future.exception(): logger.error("get error when transforming data: {0}".format(future.exception())) else: partition, count, removed, processed, failed = future.result() total_count += count total_removed += removed if count: result[partition] = {"total_count": count, "transformed": processed, "removed": removed, "failed": failed} return LogResponse({}, {"total_count": total_count, "shards": result}) else: # consumer group mode c_name = c_name or "transform_data_{0}".format(multiprocessing.current_process().pid) cg_heartbeat_interval = cg_heartbeat_interval or 20 cg_data_fetch_interval = cg_data_fetch_interval or 2 cg_in_order = False if cg_in_order is None else cg_in_order cg_worker_pool_size = cg_worker_pool_size or 3 option = LogHubConfig(from_client._endpoint, from_client._accessKeyId, from_client._accessKey, from_project, from_logstore, cg_name, c_name, cursor_position=CursorPosition.SPECIAL_TIMER_CURSOR, cursor_start_time=from_time, cursor_end_time=to_time, heartbeat_interval=cg_heartbeat_interval, data_fetch_interval=cg_data_fetch_interval, in_order=cg_in_order, worker_pool_size=cg_worker_pool_size) TransformDataConsumer.set_transform_options(config, to_client, to_project, to_logstore) result = {"total_count": 0, "shards": {}} l = RLock() def status_updator(shard_id, count=0, removed=0, processed=0, failed=0): logger.info("status update is called, shard: {0}, count: {1}, removed: {2}, processed: {3}, failed: {4}".format(shard_id, count, removed, processed, failed)) with l: result["total_count"] += count if shard_id in result["shards"]: data = result["shards"][shard_id] result["shards"][shard_id] = {"total_count": data["total_count"] + count, "transformed": data["transformed"] + processed, "removed": data["removed"] + removed, "failed": data["failed"] + failed} else: result["shards"][shard_id] = {"total_count": count, "transformed": processed, "removed": removed, "failed": failed} worker = ConsumerWorker(TransformDataConsumer, consumer_option=option, args=(status_updator, ) ) worker.start() try: while worker.is_alive(): worker.join(timeout=60) logger.info("transform_data: worker exit unexpected, try to shutdown it") worker.shutdown() except KeyboardInterrupt: logger.info("transform_data: *** try to exit **** ") print("try to stop transforming data.") worker.shutdown() worker.join(timeout=120) return LogResponse({}, result)
[ "def", "transform_data", "(", "from_client", ",", "from_project", ",", "from_logstore", ",", "from_time", ",", "to_time", "=", "None", ",", "to_client", "=", "None", ",", "to_project", "=", "None", ",", "to_logstore", "=", "None", ",", "shard_list", "=", "None", ",", "config", "=", "None", ",", "batch_size", "=", "None", ",", "compress", "=", "None", ",", "cg_name", "=", "None", ",", "c_name", "=", "None", ",", "cg_heartbeat_interval", "=", "None", ",", "cg_data_fetch_interval", "=", "None", ",", "cg_in_order", "=", "None", ",", "cg_worker_pool_size", "=", "None", ")", ":", "if", "not", "config", ":", "logger", ".", "info", "(", "\"transform_data: config is not configured, use copy data by default.\"", ")", "return", "copy_data", "(", "from_client", ",", "from_project", ",", "from_logstore", ",", "from_time", ",", "to_time", "=", "to_time", ",", "to_client", "=", "to_client", ",", "to_project", "=", "to_project", ",", "to_logstore", "=", "to_logstore", ",", "shard_list", "=", "shard_list", ",", "batch_size", "=", "batch_size", ",", "compress", "=", "compress", ")", "to_client", "=", "to_client", "or", "from_client", "# increase the timeout to 2 min at least", "from_client", ".", "timeout", "=", "max", "(", "from_client", ".", "timeout", ",", "120", ")", "to_client", ".", "timeout", "=", "max", "(", "to_client", ".", "timeout", ",", "120", ")", "to_project", "=", "to_project", "or", "from_project", "to_logstore", "=", "to_logstore", "or", "from_logstore", "if", "not", "cg_name", ":", "# batch mode", "to_time", "=", "to_time", "or", "\"end\"", "cpu_count", "=", "multiprocessing", ".", "cpu_count", "(", ")", "*", "2", "shards", "=", "from_client", ".", "list_shards", "(", "from_project", ",", "from_logstore", ")", ".", "get_shards_info", "(", ")", "current_shards", "=", "[", "str", "(", "shard", "[", "'shardID'", "]", ")", "for", "shard", "in", "shards", "]", "target_shards", "=", "_parse_shard_list", "(", "shard_list", ",", "current_shards", ")", "worker_size", "=", "min", "(", "cpu_count", ",", "len", "(", "target_shards", ")", ")", "result", "=", "dict", "(", ")", "total_count", "=", "0", "total_removed", "=", "0", "with", "ProcessPoolExecutor", "(", "max_workers", "=", "worker_size", ")", "as", "pool", ":", "futures", "=", "[", "pool", ".", "submit", "(", "transform_worker", ",", "from_client", ",", "from_project", ",", "from_logstore", ",", "shard", ",", "from_time", ",", "to_time", ",", "config", ",", "to_client", ",", "to_project", ",", "to_logstore", ",", "batch_size", "=", "batch_size", ",", "compress", "=", "compress", ")", "for", "shard", "in", "target_shards", "]", "for", "future", "in", "as_completed", "(", "futures", ")", ":", "if", "future", ".", "exception", "(", ")", ":", "logger", ".", "error", "(", "\"get error when transforming data: {0}\"", ".", "format", "(", "future", ".", "exception", "(", ")", ")", ")", "else", ":", "partition", ",", "count", ",", "removed", ",", "processed", ",", "failed", "=", "future", ".", "result", "(", ")", "total_count", "+=", "count", "total_removed", "+=", "removed", "if", "count", ":", "result", "[", "partition", "]", "=", "{", "\"total_count\"", ":", "count", ",", "\"transformed\"", ":", "processed", ",", "\"removed\"", ":", "removed", ",", "\"failed\"", ":", "failed", "}", "return", "LogResponse", "(", "{", "}", ",", "{", "\"total_count\"", ":", "total_count", ",", "\"shards\"", ":", "result", "}", ")", "else", ":", "# consumer group mode", "c_name", "=", "c_name", "or", "\"transform_data_{0}\"", ".", "format", "(", "multiprocessing", ".", "current_process", "(", ")", ".", "pid", ")", "cg_heartbeat_interval", "=", "cg_heartbeat_interval", "or", "20", "cg_data_fetch_interval", "=", "cg_data_fetch_interval", "or", "2", "cg_in_order", "=", "False", "if", "cg_in_order", "is", "None", "else", "cg_in_order", "cg_worker_pool_size", "=", "cg_worker_pool_size", "or", "3", "option", "=", "LogHubConfig", "(", "from_client", ".", "_endpoint", ",", "from_client", ".", "_accessKeyId", ",", "from_client", ".", "_accessKey", ",", "from_project", ",", "from_logstore", ",", "cg_name", ",", "c_name", ",", "cursor_position", "=", "CursorPosition", ".", "SPECIAL_TIMER_CURSOR", ",", "cursor_start_time", "=", "from_time", ",", "cursor_end_time", "=", "to_time", ",", "heartbeat_interval", "=", "cg_heartbeat_interval", ",", "data_fetch_interval", "=", "cg_data_fetch_interval", ",", "in_order", "=", "cg_in_order", ",", "worker_pool_size", "=", "cg_worker_pool_size", ")", "TransformDataConsumer", ".", "set_transform_options", "(", "config", ",", "to_client", ",", "to_project", ",", "to_logstore", ")", "result", "=", "{", "\"total_count\"", ":", "0", ",", "\"shards\"", ":", "{", "}", "}", "l", "=", "RLock", "(", ")", "def", "status_updator", "(", "shard_id", ",", "count", "=", "0", ",", "removed", "=", "0", ",", "processed", "=", "0", ",", "failed", "=", "0", ")", ":", "logger", ".", "info", "(", "\"status update is called, shard: {0}, count: {1}, removed: {2}, processed: {3}, failed: {4}\"", ".", "format", "(", "shard_id", ",", "count", ",", "removed", ",", "processed", ",", "failed", ")", ")", "with", "l", ":", "result", "[", "\"total_count\"", "]", "+=", "count", "if", "shard_id", "in", "result", "[", "\"shards\"", "]", ":", "data", "=", "result", "[", "\"shards\"", "]", "[", "shard_id", "]", "result", "[", "\"shards\"", "]", "[", "shard_id", "]", "=", "{", "\"total_count\"", ":", "data", "[", "\"total_count\"", "]", "+", "count", ",", "\"transformed\"", ":", "data", "[", "\"transformed\"", "]", "+", "processed", ",", "\"removed\"", ":", "data", "[", "\"removed\"", "]", "+", "removed", ",", "\"failed\"", ":", "data", "[", "\"failed\"", "]", "+", "failed", "}", "else", ":", "result", "[", "\"shards\"", "]", "[", "shard_id", "]", "=", "{", "\"total_count\"", ":", "count", ",", "\"transformed\"", ":", "processed", ",", "\"removed\"", ":", "removed", ",", "\"failed\"", ":", "failed", "}", "worker", "=", "ConsumerWorker", "(", "TransformDataConsumer", ",", "consumer_option", "=", "option", ",", "args", "=", "(", "status_updator", ",", ")", ")", "worker", ".", "start", "(", ")", "try", ":", "while", "worker", ".", "is_alive", "(", ")", ":", "worker", ".", "join", "(", "timeout", "=", "60", ")", "logger", ".", "info", "(", "\"transform_data: worker exit unexpected, try to shutdown it\"", ")", "worker", ".", "shutdown", "(", ")", "except", "KeyboardInterrupt", ":", "logger", ".", "info", "(", "\"transform_data: *** try to exit **** \"", ")", "print", "(", "\"try to stop transforming data.\"", ")", "worker", ".", "shutdown", "(", ")", "worker", ".", "join", "(", "timeout", "=", "120", ")", "return", "LogResponse", "(", "{", "}", ",", "result", ")" ]
48.818182
28.690909
def grid_prep(self): """ prepare grid-based parameterizations """ if len(self.grid_props) == 0: return if self.grid_geostruct is None: self.logger.warn("grid_geostruct is None,"\ " using ExpVario with contribution=1 and a=(max(delc,delr)*10") dist = 10 * float(max(self.m.dis.delr.array.max(), self.m.dis.delc.array.max())) v = pyemu.geostats.ExpVario(contribution=1.0,a=dist) self.grid_geostruct = pyemu.geostats.GeoStruct(variograms=v)
[ "def", "grid_prep", "(", "self", ")", ":", "if", "len", "(", "self", ".", "grid_props", ")", "==", "0", ":", "return", "if", "self", ".", "grid_geostruct", "is", "None", ":", "self", ".", "logger", ".", "warn", "(", "\"grid_geostruct is None,\"", "\" using ExpVario with contribution=1 and a=(max(delc,delr)*10\"", ")", "dist", "=", "10", "*", "float", "(", "max", "(", "self", ".", "m", ".", "dis", ".", "delr", ".", "array", ".", "max", "(", ")", ",", "self", ".", "m", ".", "dis", ".", "delc", ".", "array", ".", "max", "(", ")", ")", ")", "v", "=", "pyemu", ".", "geostats", ".", "ExpVario", "(", "contribution", "=", "1.0", ",", "a", "=", "dist", ")", "self", ".", "grid_geostruct", "=", "pyemu", ".", "geostats", ".", "GeoStruct", "(", "variograms", "=", "v", ")" ]
41.357143
20.857143
def select(self, crit, axis=0): """ Return data corresponding to axis labels matching criteria. .. deprecated:: 0.21.0 Use df.loc[df.index.map(crit)] to select via labels Parameters ---------- crit : function To be called on each index (label). Should return True or False axis : int Returns ------- selection : same type as caller """ warnings.warn("'select' is deprecated and will be removed in a " "future release. You can use " ".loc[labels.map(crit)] as a replacement", FutureWarning, stacklevel=2) axis = self._get_axis_number(axis) axis_name = self._get_axis_name(axis) axis_values = self._get_axis(axis) if len(axis_values) > 0: new_axis = axis_values[ np.asarray([bool(crit(label)) for label in axis_values])] else: new_axis = axis_values return self.reindex(**{axis_name: new_axis})
[ "def", "select", "(", "self", ",", "crit", ",", "axis", "=", "0", ")", ":", "warnings", ".", "warn", "(", "\"'select' is deprecated and will be removed in a \"", "\"future release. You can use \"", "\".loc[labels.map(crit)] as a replacement\"", ",", "FutureWarning", ",", "stacklevel", "=", "2", ")", "axis", "=", "self", ".", "_get_axis_number", "(", "axis", ")", "axis_name", "=", "self", ".", "_get_axis_name", "(", "axis", ")", "axis_values", "=", "self", ".", "_get_axis", "(", "axis", ")", "if", "len", "(", "axis_values", ")", ">", "0", ":", "new_axis", "=", "axis_values", "[", "np", ".", "asarray", "(", "[", "bool", "(", "crit", "(", "label", ")", ")", "for", "label", "in", "axis_values", "]", ")", "]", "else", ":", "new_axis", "=", "axis_values", "return", "self", ".", "reindex", "(", "*", "*", "{", "axis_name", ":", "new_axis", "}", ")" ]
31.515152
19.878788
def create_object(container, portal_type, **data): """Creates an object slug :returns: The new created content object :rtype: object """ if "id" in data: # always omit the id as senaite LIMS generates a proper one id = data.pop("id") logger.warn("Passed in ID '{}' omitted! Senaite LIMS " "generates a proper ID for you" .format(id)) try: # Special case for ARs # => return immediately w/o update if portal_type == "AnalysisRequest": obj = create_analysisrequest(container, **data) # Omit values which are already set through the helper data = u.omit(data, "SampleType", "Analyses") # Set the container as the client, as the AR lives in it data["Client"] = container # Standard content creation else: # we want just a minimun viable object and set the data later obj = api.create(container, portal_type) # obj = api.create(container, portal_type, **data) except Unauthorized: fail(401, "You are not allowed to create this content") # Update the object with the given data, but omit the id try: update_object_with_data(obj, data) except APIError: # Failure in creation process, delete the invalid object container.manage_delObjects(obj.id) # reraise the error raise return obj
[ "def", "create_object", "(", "container", ",", "portal_type", ",", "*", "*", "data", ")", ":", "if", "\"id\"", "in", "data", ":", "# always omit the id as senaite LIMS generates a proper one", "id", "=", "data", ".", "pop", "(", "\"id\"", ")", "logger", ".", "warn", "(", "\"Passed in ID '{}' omitted! Senaite LIMS \"", "\"generates a proper ID for you\"", ".", "format", "(", "id", ")", ")", "try", ":", "# Special case for ARs", "# => return immediately w/o update", "if", "portal_type", "==", "\"AnalysisRequest\"", ":", "obj", "=", "create_analysisrequest", "(", "container", ",", "*", "*", "data", ")", "# Omit values which are already set through the helper", "data", "=", "u", ".", "omit", "(", "data", ",", "\"SampleType\"", ",", "\"Analyses\"", ")", "# Set the container as the client, as the AR lives in it", "data", "[", "\"Client\"", "]", "=", "container", "# Standard content creation", "else", ":", "# we want just a minimun viable object and set the data later", "obj", "=", "api", ".", "create", "(", "container", ",", "portal_type", ")", "# obj = api.create(container, portal_type, **data)", "except", "Unauthorized", ":", "fail", "(", "401", ",", "\"You are not allowed to create this content\"", ")", "# Update the object with the given data, but omit the id", "try", ":", "update_object_with_data", "(", "obj", ",", "data", ")", "except", "APIError", ":", "# Failure in creation process, delete the invalid object", "container", ".", "manage_delObjects", "(", "obj", ".", "id", ")", "# reraise the error", "raise", "return", "obj" ]
34.463415
20.170732
def reset(self): """ Reset this Task to a clean state prior to execution. """ logger.debug('Resetting task {0}'.format(self.name)) self.stdout_file = os.tmpfile() self.stderr_file = os.tmpfile() self.stdout = "" self.stderr = "" self.started_at = None self.completed_at = None self.successful = None self.terminate_sent = False self.kill_sent = False self.remote_failure = False
[ "def", "reset", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Resetting task {0}'", ".", "format", "(", "self", ".", "name", ")", ")", "self", ".", "stdout_file", "=", "os", ".", "tmpfile", "(", ")", "self", ".", "stderr_file", "=", "os", ".", "tmpfile", "(", ")", "self", ".", "stdout", "=", "\"\"", "self", ".", "stderr", "=", "\"\"", "self", ".", "started_at", "=", "None", "self", ".", "completed_at", "=", "None", "self", ".", "successful", "=", "None", "self", ".", "terminate_sent", "=", "False", "self", ".", "kill_sent", "=", "False", "self", ".", "remote_failure", "=", "False" ]
25.666667
18.111111
def get_old_sha(diff_part): """ Returns the SHA for the original file that was changed in a diff part. """ r = re.compile(r'index ([a-fA-F\d]*)') return r.search(diff_part).groups()[0]
[ "def", "get_old_sha", "(", "diff_part", ")", ":", "r", "=", "re", ".", "compile", "(", "r'index ([a-fA-F\\d]*)'", ")", "return", "r", ".", "search", "(", "diff_part", ")", ".", "groups", "(", ")", "[", "0", "]" ]
33.166667
8.5
def main(): """Command line interface for the ``coloredlogs`` program.""" actions = [] try: # Parse the command line arguments. options, arguments = getopt.getopt(sys.argv[1:], 'cdh', [ 'convert', 'to-html', 'demo', 'help', ]) # Map command line options to actions. for option, value in options: if option in ('-c', '--convert', '--to-html'): actions.append(functools.partial(convert_command_output, *arguments)) arguments = [] elif option in ('-d', '--demo'): actions.append(demonstrate_colored_logging) elif option in ('-h', '--help'): usage(__doc__) return else: assert False, "Programming error: Unhandled option!" if not actions: usage(__doc__) return except Exception as e: warning("Error: %s", e) sys.exit(1) for function in actions: function()
[ "def", "main", "(", ")", ":", "actions", "=", "[", "]", "try", ":", "# Parse the command line arguments.", "options", ",", "arguments", "=", "getopt", ".", "getopt", "(", "sys", ".", "argv", "[", "1", ":", "]", ",", "'cdh'", ",", "[", "'convert'", ",", "'to-html'", ",", "'demo'", ",", "'help'", ",", "]", ")", "# Map command line options to actions.", "for", "option", ",", "value", "in", "options", ":", "if", "option", "in", "(", "'-c'", ",", "'--convert'", ",", "'--to-html'", ")", ":", "actions", ".", "append", "(", "functools", ".", "partial", "(", "convert_command_output", ",", "*", "arguments", ")", ")", "arguments", "=", "[", "]", "elif", "option", "in", "(", "'-d'", ",", "'--demo'", ")", ":", "actions", ".", "append", "(", "demonstrate_colored_logging", ")", "elif", "option", "in", "(", "'-h'", ",", "'--help'", ")", ":", "usage", "(", "__doc__", ")", "return", "else", ":", "assert", "False", ",", "\"Programming error: Unhandled option!\"", "if", "not", "actions", ":", "usage", "(", "__doc__", ")", "return", "except", "Exception", "as", "e", ":", "warning", "(", "\"Error: %s\"", ",", "e", ")", "sys", ".", "exit", "(", "1", ")", "for", "function", "in", "actions", ":", "function", "(", ")" ]
35.571429
16.821429
def get_user(self, username): """Get user information. :param str username: User to get info on. """ r = self._query_('/users/%s' % username, 'GET') result = User(r.json()) return result
[ "def", "get_user", "(", "self", ",", "username", ")", ":", "r", "=", "self", ".", "_query_", "(", "'/users/%s'", "%", "username", ",", "'GET'", ")", "result", "=", "User", "(", "r", ".", "json", "(", ")", ")", "return", "result" ]
22.8
18.3
def _setup_output_metrics(self, engine): """Helper method to setup metrics to log """ metrics = {} if self.metric_names is not None: for name in self.metric_names: if name not in engine.state.metrics: warnings.warn("Provided metric name '{}' is missing " "in engine's state metrics: {}".format(name, list(engine.state.metrics.keys()))) continue metrics[name] = engine.state.metrics[name] if self.output_transform is not None: output_dict = self.output_transform(engine.state.output) if not isinstance(output_dict, dict): output_dict = {"output": output_dict} metrics.update({name: value for name, value in output_dict.items()}) return metrics
[ "def", "_setup_output_metrics", "(", "self", ",", "engine", ")", ":", "metrics", "=", "{", "}", "if", "self", ".", "metric_names", "is", "not", "None", ":", "for", "name", "in", "self", ".", "metric_names", ":", "if", "name", "not", "in", "engine", ".", "state", ".", "metrics", ":", "warnings", ".", "warn", "(", "\"Provided metric name '{}' is missing \"", "\"in engine's state metrics: {}\"", ".", "format", "(", "name", ",", "list", "(", "engine", ".", "state", ".", "metrics", ".", "keys", "(", ")", ")", ")", ")", "continue", "metrics", "[", "name", "]", "=", "engine", ".", "state", ".", "metrics", "[", "name", "]", "if", "self", ".", "output_transform", "is", "not", "None", ":", "output_dict", "=", "self", ".", "output_transform", "(", "engine", ".", "state", ".", "output", ")", "if", "not", "isinstance", "(", "output_dict", ",", "dict", ")", ":", "output_dict", "=", "{", "\"output\"", ":", "output_dict", "}", "metrics", ".", "update", "(", "{", "name", ":", "value", "for", "name", ",", "value", "in", "output_dict", ".", "items", "(", ")", "}", ")", "return", "metrics" ]
42.2
20.25
def get(cls, user_id, client_id): """Get RemoteAccount object for user. :param user_id: User id :param client_id: Client id. :returns: A :class:`invenio_oauthclient.models.RemoteAccount` instance. """ return cls.query.filter_by( user_id=user_id, client_id=client_id, ).first()
[ "def", "get", "(", "cls", ",", "user_id", ",", "client_id", ")", ":", "return", "cls", ".", "query", ".", "filter_by", "(", "user_id", "=", "user_id", ",", "client_id", "=", "client_id", ",", ")", ".", "first", "(", ")" ]
31.545455
13.363636
def set_verbosity(v): """Sets the logging verbosity. Causes all messages of level <= v to be logged, and all messages of level > v to be silently discarded. Args: v: int|str, the verbosity level as an integer or string. Legal string values are those that can be coerced to an integer as well as case-insensitive 'debug', 'info', 'warning', 'error', and 'fatal'. """ try: new_level = int(v) except ValueError: new_level = converter.ABSL_NAMES[v.upper()] FLAGS.verbosity = new_level
[ "def", "set_verbosity", "(", "v", ")", ":", "try", ":", "new_level", "=", "int", "(", "v", ")", "except", "ValueError", ":", "new_level", "=", "converter", ".", "ABSL_NAMES", "[", "v", ".", "upper", "(", ")", "]", "FLAGS", ".", "verbosity", "=", "new_level" ]
31.9375
21.5
def _audience_condition_deserializer(obj_dict): """ Deserializer defining how dict objects need to be decoded for audience conditions. Args: obj_dict: Dict representing one audience condition. Returns: List consisting of condition key with corresponding value, type and match. """ return [ obj_dict.get('name'), obj_dict.get('value'), obj_dict.get('type'), obj_dict.get('match') ]
[ "def", "_audience_condition_deserializer", "(", "obj_dict", ")", ":", "return", "[", "obj_dict", ".", "get", "(", "'name'", ")", ",", "obj_dict", ".", "get", "(", "'value'", ")", ",", "obj_dict", ".", "get", "(", "'type'", ")", ",", "obj_dict", ".", "get", "(", "'match'", ")", "]" ]
26.933333
21.933333
def getMaskArray(self, signature): """ Returns the appropriate StaticMask array for the image. """ if signature in self.masklist: mask = self.masklist[signature] else: mask = None return mask
[ "def", "getMaskArray", "(", "self", ",", "signature", ")", ":", "if", "signature", "in", "self", ".", "masklist", ":", "mask", "=", "self", ".", "masklist", "[", "signature", "]", "else", ":", "mask", "=", "None", "return", "mask" ]
34.571429
11
def detach_usb_device(self, id_p, done): """Notification that a VM is going to detach (@a done = @c false) or has already detached (@a done = @c true) the given USB device. When the @a done = @c true request is completed, the VM process will get a :py:func:`IInternalSessionControl.on_usb_device_detach` notification. In the @a done = @c true case, the server must run its own filters and filters of all VMs but this one on the detached device as if it were just attached to the host computer. in id_p of type str in done of type bool """ if not isinstance(id_p, basestring): raise TypeError("id_p can only be an instance of type basestring") if not isinstance(done, bool): raise TypeError("done can only be an instance of type bool") self._call("detachUSBDevice", in_p=[id_p, done])
[ "def", "detach_usb_device", "(", "self", ",", "id_p", ",", "done", ")", ":", "if", "not", "isinstance", "(", "id_p", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"id_p can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "done", ",", "bool", ")", ":", "raise", "TypeError", "(", "\"done can only be an instance of type bool\"", ")", "self", ".", "_call", "(", "\"detachUSBDevice\"", ",", "in_p", "=", "[", "id_p", ",", "done", "]", ")" ]
42.363636
20.227273
def _check_consumer(self): """ Validates the :attr:`.consumer`. """ # 'magic' using _kwarg method # pylint:disable=no-member if not self.consumer.key: raise ConfigError( 'Consumer key not specified for provider {0}!'.format( self.name)) if not self.consumer.secret: raise ConfigError( 'Consumer secret not specified for provider {0}!'.format( self.name))
[ "def", "_check_consumer", "(", "self", ")", ":", "# 'magic' using _kwarg method", "# pylint:disable=no-member", "if", "not", "self", ".", "consumer", ".", "key", ":", "raise", "ConfigError", "(", "'Consumer key not specified for provider {0}!'", ".", "format", "(", "self", ".", "name", ")", ")", "if", "not", "self", ".", "consumer", ".", "secret", ":", "raise", "ConfigError", "(", "'Consumer secret not specified for provider {0}!'", ".", "format", "(", "self", ".", "name", ")", ")" ]
30.8125
13.4375
def to_meme(self): """Return motif formatted in MEME format Returns ------- m : str String of motif in MEME format. """ motif_id = self.id.replace(" ", "_") m = "MOTIF %s\n" % motif_id m += "BL MOTIF %s width=0 seqs=0\n"% motif_id m += "letter-probability matrix: alength= 4 w= %s nsites= %s E= 0\n" % (len(self), np.sum(self.pfm[0])) m +="\n".join(["\t".join(["%s" % x for x in row]) for row in self.pwm]) return m
[ "def", "to_meme", "(", "self", ")", ":", "motif_id", "=", "self", ".", "id", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", "m", "=", "\"MOTIF %s\\n\"", "%", "motif_id", "m", "+=", "\"BL MOTIF %s width=0 seqs=0\\n\"", "%", "motif_id", "m", "+=", "\"letter-probability matrix: alength= 4 w= %s nsites= %s E= 0\\n\"", "%", "(", "len", "(", "self", ")", ",", "np", ".", "sum", "(", "self", ".", "pfm", "[", "0", "]", ")", ")", "m", "+=", "\"\\n\"", ".", "join", "(", "[", "\"\\t\"", ".", "join", "(", "[", "\"%s\"", "%", "x", "for", "x", "in", "row", "]", ")", "for", "row", "in", "self", ".", "pwm", "]", ")", "return", "m" ]
36.642857
20.714286
def extract(self, high_bit, low_bit): """ Operation extract - A cheap hack is implemented: a copy of self is returned if (high_bit - low_bit + 1 == self.bits), which is a ValueSet instance. Otherwise a StridedInterval is returned. :param high_bit: :param low_bit: :return: A ValueSet or a StridedInterval """ if high_bit - low_bit + 1 == self.bits: return self.copy() if ('global' in self._regions and len(self._regions.keys()) > 1) or \ len(self._regions.keys()) > 0: si_ret = StridedInterval.top(high_bit - low_bit + 1) else: if 'global' in self._regions: si = self._regions['global'] si_ret = si.extract(high_bit, low_bit) else: si_ret = StridedInterval.empty(high_bit - low_bit + 1) return si_ret
[ "def", "extract", "(", "self", ",", "high_bit", ",", "low_bit", ")", ":", "if", "high_bit", "-", "low_bit", "+", "1", "==", "self", ".", "bits", ":", "return", "self", ".", "copy", "(", ")", "if", "(", "'global'", "in", "self", ".", "_regions", "and", "len", "(", "self", ".", "_regions", ".", "keys", "(", ")", ")", ">", "1", ")", "or", "len", "(", "self", ".", "_regions", ".", "keys", "(", ")", ")", ">", "0", ":", "si_ret", "=", "StridedInterval", ".", "top", "(", "high_bit", "-", "low_bit", "+", "1", ")", "else", ":", "if", "'global'", "in", "self", ".", "_regions", ":", "si", "=", "self", ".", "_regions", "[", "'global'", "]", "si_ret", "=", "si", ".", "extract", "(", "high_bit", ",", "low_bit", ")", "else", ":", "si_ret", "=", "StridedInterval", ".", "empty", "(", "high_bit", "-", "low_bit", "+", "1", ")", "return", "si_ret" ]
31.714286
23.071429
def update_membership(self, contact, group): ''' input: gdata ContactEntry and GroupEntry objects ''' if not contact: log.debug('Not updating membership for EMPTY contact.') return None _uid = contact.email[0].address _gtitle = group.title.text for contact_group in contact.group_membership_info: if contact_group.href == group.get_id(): log.warn( ' ... {} already a member of {}.'.format(_uid, _gtitle)) return contact log.debug('Adding {} to group {}'.format(_uid, _gtitle)) membership = self.api.contacts.data.GroupMembershipInfo( href=group.id.text) contact.group_membership_info.append(membership) contact = self.api.update(contact) return contact
[ "def", "update_membership", "(", "self", ",", "contact", ",", "group", ")", ":", "if", "not", "contact", ":", "log", ".", "debug", "(", "'Not updating membership for EMPTY contact.'", ")", "return", "None", "_uid", "=", "contact", ".", "email", "[", "0", "]", ".", "address", "_gtitle", "=", "group", ".", "title", ".", "text", "for", "contact_group", "in", "contact", ".", "group_membership_info", ":", "if", "contact_group", ".", "href", "==", "group", ".", "get_id", "(", ")", ":", "log", ".", "warn", "(", "' ... {} already a member of {}.'", ".", "format", "(", "_uid", ",", "_gtitle", ")", ")", "return", "contact", "log", ".", "debug", "(", "'Adding {} to group {}'", ".", "format", "(", "_uid", ",", "_gtitle", ")", ")", "membership", "=", "self", ".", "api", ".", "contacts", ".", "data", ".", "GroupMembershipInfo", "(", "href", "=", "group", ".", "id", ".", "text", ")", "contact", ".", "group_membership_info", ".", "append", "(", "membership", ")", "contact", "=", "self", ".", "api", ".", "update", "(", "contact", ")", "return", "contact" ]
37.681818
18.681818
def _handle_inventory(self, inventory_path): """ Scan inventory. As Ansible is a big mess without any kind of preconceived notion of design, there are several (and I use that word lightly) different ways inventory_path can be handled: - a non-executable file: handled as a Ansible 'hosts' file. - an executable file: handled as a dynamic inventory file. - a directory: scanned for Ansible 'hosts' and dynamic inventory files. """ self.log.debug("Determining type of inventory_path {}".format(inventory_path)) if os.path.isfile(inventory_path) and \ util.is_executable(inventory_path): # It's a file and it's executable. Handle as dynamic inventory script self.log.debug("{} is a executable. Handle as dynamic inventory script".format(inventory_path)) self._parse_dyn_inventory(inventory_path) elif os.path.isfile(inventory_path): # Static inventory hosts file self.log.debug("{} is a file. Handle as static inventory file".format(inventory_path)) self._parse_hosts_inventory(inventory_path) elif os.path.isdir(inventory_path): # Directory self.log.debug("{} is a dir. Just try most files to see if they happen to be inventory files".format(inventory_path)) # Don't parse folder as inventory if it is a .git or group/host_vars if any(os.path.basename(inventory_path) == name for name in ['.git', 'group_vars', 'host_vars']): return # Scan directory for fname in os.listdir(inventory_path): # Skip files that end with certain extensions or characters if any(fname.endswith(ext) for ext in ["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo", ".gitignore"]): continue self._handle_inventory(os.path.join(inventory_path, fname)) else: raise IOError("Invalid inventory file / dir: '{0}'".format(inventory_path))
[ "def", "_handle_inventory", "(", "self", ",", "inventory_path", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Determining type of inventory_path {}\"", ".", "format", "(", "inventory_path", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "inventory_path", ")", "and", "util", ".", "is_executable", "(", "inventory_path", ")", ":", "# It's a file and it's executable. Handle as dynamic inventory script", "self", ".", "log", ".", "debug", "(", "\"{} is a executable. Handle as dynamic inventory script\"", ".", "format", "(", "inventory_path", ")", ")", "self", ".", "_parse_dyn_inventory", "(", "inventory_path", ")", "elif", "os", ".", "path", ".", "isfile", "(", "inventory_path", ")", ":", "# Static inventory hosts file", "self", ".", "log", ".", "debug", "(", "\"{} is a file. Handle as static inventory file\"", ".", "format", "(", "inventory_path", ")", ")", "self", ".", "_parse_hosts_inventory", "(", "inventory_path", ")", "elif", "os", ".", "path", ".", "isdir", "(", "inventory_path", ")", ":", "# Directory", "self", ".", "log", ".", "debug", "(", "\"{} is a dir. Just try most files to see if they happen to be inventory files\"", ".", "format", "(", "inventory_path", ")", ")", "# Don't parse folder as inventory if it is a .git or group/host_vars", "if", "any", "(", "os", ".", "path", ".", "basename", "(", "inventory_path", ")", "==", "name", "for", "name", "in", "[", "'.git'", ",", "'group_vars'", ",", "'host_vars'", "]", ")", ":", "return", "# Scan directory", "for", "fname", "in", "os", ".", "listdir", "(", "inventory_path", ")", ":", "# Skip files that end with certain extensions or characters", "if", "any", "(", "fname", ".", "endswith", "(", "ext", ")", "for", "ext", "in", "[", "\"~\"", ",", "\".orig\"", ",", "\".bak\"", ",", "\".ini\"", ",", "\".cfg\"", ",", "\".retry\"", ",", "\".pyc\"", ",", "\".pyo\"", ",", "\".gitignore\"", "]", ")", ":", "continue", "self", ".", "_handle_inventory", "(", "os", ".", "path", ".", "join", "(", "inventory_path", ",", "fname", ")", ")", "else", ":", "raise", "IOError", "(", "\"Invalid inventory file / dir: '{0}'\"", ".", "format", "(", "inventory_path", ")", ")" ]
54.157895
29.789474
def in_dateheure(objet, pattern): """ abstractSearch dans une date-heure datetime.datetime (cf abstractRender.dateheure) """ if objet: pattern = re.sub(" ", '', pattern) objet_str = abstractRender.dateheure(objet) return bool(re.search(pattern, objet_str)) return False
[ "def", "in_dateheure", "(", "objet", ",", "pattern", ")", ":", "if", "objet", ":", "pattern", "=", "re", ".", "sub", "(", "\" \"", ",", "''", ",", "pattern", ")", "objet_str", "=", "abstractRender", ".", "dateheure", "(", "objet", ")", "return", "bool", "(", "re", ".", "search", "(", "pattern", ",", "objet_str", ")", ")", "return", "False" ]
46.142857
12.142857
def get_dc_inventory(pbclient, dc=None): ''' gets inventory of one data center''' if pbclient is None: raise ValueError("argument 'pbclient' must not be None") if dc is None: raise ValueError("argument 'dc' must not be None") dc_inv = [] # inventory list to return dcid = dc['id'] # dc_data contains dc specific columns dc_data = [dcid, dc['properties']['name'], dc['properties']['location']] # first get the servers # this will build a hash to relate volumes to servers later # depth 3 is enough to get into volume/nic level plus details servers = pbclient.list_servers(dcid, 3) print("found %i servers in data center %s" % (len(servers['items']), dc['properties']['name'])) if verbose > 2: print(str(servers)) # this will build a hash to relate volumes to servers later bound_vols = dict() # hash volume-to-server relations for server in servers['items']: if verbose > 2: print("SERVER: %s" % str(server)) serverid = server['id'] # server_data contains server specific columns for later output server_data = [ server['type'], serverid, server['properties']['name'], server['metadata']['state'] ] # OS is determined by boot device (volume||cdrom), not a server property. # Might even be unspecified bootOS = "NONE" bootdev = server['properties']['bootVolume'] if bootdev is None: bootdev = server['properties']['bootCdrom'] print("server %s has boot device %s" % (serverid, "CDROM")) if bootdev is None: print("server %s has NO boot device" % (serverid)) else: bootOS = bootdev['properties']['licenceType'] server_data += [bootOS, server['properties']['cores'], server['properties']['ram']] server_vols = server['entities']['volumes']['items'] n_volumes = len(server_vols) total_disk = 0 licence_type = "" for vol in server_vols: total_disk += vol['properties']['size'] licence_type = str(vol['properties']['licenceType']) bound_vols[vol['id']] = serverid if verbose: print("volume %s is connected to %s w/ OS %s" % ( vol['id'], bound_vols[vol['id']], licence_type)) server_nics = server['entities']['nics']['items'] n_nics = len(server_nics) server_data += [ n_nics, n_volumes, total_disk, "", server['metadata']['createdDate'], server['metadata']['lastModifiedDate'] ] dc_inv.append(dc_data + server_data) # end for(servers) # and now the volumes... volumes = pbclient.list_volumes(dcid, 2) # depth 2 gives max. details for volume in volumes['items']: if verbose > 2: print("VOLUME: %s" % str(volume)) volid = volume['id'] vol_data = [ volume['type'], volid, volume['properties']['name'], volume['metadata']['state'], volume['properties']['licenceType'], "", "", "", "", volume['properties']['size'] ] connect = 'NONE' if volid in bound_vols: connect = bound_vols[volid] vol_data += [ connect, volume['metadata']['createdDate'], volume['metadata']['lastModifiedDate'] ] dc_inv.append(dc_data + vol_data) # end for(volumes) return dc_inv
[ "def", "get_dc_inventory", "(", "pbclient", ",", "dc", "=", "None", ")", ":", "if", "pbclient", "is", "None", ":", "raise", "ValueError", "(", "\"argument 'pbclient' must not be None\"", ")", "if", "dc", "is", "None", ":", "raise", "ValueError", "(", "\"argument 'dc' must not be None\"", ")", "dc_inv", "=", "[", "]", "# inventory list to return", "dcid", "=", "dc", "[", "'id'", "]", "# dc_data contains dc specific columns", "dc_data", "=", "[", "dcid", ",", "dc", "[", "'properties'", "]", "[", "'name'", "]", ",", "dc", "[", "'properties'", "]", "[", "'location'", "]", "]", "# first get the servers", "# this will build a hash to relate volumes to servers later", "# depth 3 is enough to get into volume/nic level plus details", "servers", "=", "pbclient", ".", "list_servers", "(", "dcid", ",", "3", ")", "print", "(", "\"found %i servers in data center %s\"", "%", "(", "len", "(", "servers", "[", "'items'", "]", ")", ",", "dc", "[", "'properties'", "]", "[", "'name'", "]", ")", ")", "if", "verbose", ">", "2", ":", "print", "(", "str", "(", "servers", ")", ")", "# this will build a hash to relate volumes to servers later", "bound_vols", "=", "dict", "(", ")", "# hash volume-to-server relations", "for", "server", "in", "servers", "[", "'items'", "]", ":", "if", "verbose", ">", "2", ":", "print", "(", "\"SERVER: %s\"", "%", "str", "(", "server", ")", ")", "serverid", "=", "server", "[", "'id'", "]", "# server_data contains server specific columns for later output", "server_data", "=", "[", "server", "[", "'type'", "]", ",", "serverid", ",", "server", "[", "'properties'", "]", "[", "'name'", "]", ",", "server", "[", "'metadata'", "]", "[", "'state'", "]", "]", "# OS is determined by boot device (volume||cdrom), not a server property.", "# Might even be unspecified", "bootOS", "=", "\"NONE\"", "bootdev", "=", "server", "[", "'properties'", "]", "[", "'bootVolume'", "]", "if", "bootdev", "is", "None", ":", "bootdev", "=", "server", "[", "'properties'", "]", "[", "'bootCdrom'", "]", "print", "(", "\"server %s has boot device %s\"", "%", "(", "serverid", ",", "\"CDROM\"", ")", ")", "if", "bootdev", "is", "None", ":", "print", "(", "\"server %s has NO boot device\"", "%", "(", "serverid", ")", ")", "else", ":", "bootOS", "=", "bootdev", "[", "'properties'", "]", "[", "'licenceType'", "]", "server_data", "+=", "[", "bootOS", ",", "server", "[", "'properties'", "]", "[", "'cores'", "]", ",", "server", "[", "'properties'", "]", "[", "'ram'", "]", "]", "server_vols", "=", "server", "[", "'entities'", "]", "[", "'volumes'", "]", "[", "'items'", "]", "n_volumes", "=", "len", "(", "server_vols", ")", "total_disk", "=", "0", "licence_type", "=", "\"\"", "for", "vol", "in", "server_vols", ":", "total_disk", "+=", "vol", "[", "'properties'", "]", "[", "'size'", "]", "licence_type", "=", "str", "(", "vol", "[", "'properties'", "]", "[", "'licenceType'", "]", ")", "bound_vols", "[", "vol", "[", "'id'", "]", "]", "=", "serverid", "if", "verbose", ":", "print", "(", "\"volume %s is connected to %s w/ OS %s\"", "%", "(", "vol", "[", "'id'", "]", ",", "bound_vols", "[", "vol", "[", "'id'", "]", "]", ",", "licence_type", ")", ")", "server_nics", "=", "server", "[", "'entities'", "]", "[", "'nics'", "]", "[", "'items'", "]", "n_nics", "=", "len", "(", "server_nics", ")", "server_data", "+=", "[", "n_nics", ",", "n_volumes", ",", "total_disk", ",", "\"\"", ",", "server", "[", "'metadata'", "]", "[", "'createdDate'", "]", ",", "server", "[", "'metadata'", "]", "[", "'lastModifiedDate'", "]", "]", "dc_inv", ".", "append", "(", "dc_data", "+", "server_data", ")", "# end for(servers)", "# and now the volumes...", "volumes", "=", "pbclient", ".", "list_volumes", "(", "dcid", ",", "2", ")", "# depth 2 gives max. details", "for", "volume", "in", "volumes", "[", "'items'", "]", ":", "if", "verbose", ">", "2", ":", "print", "(", "\"VOLUME: %s\"", "%", "str", "(", "volume", ")", ")", "volid", "=", "volume", "[", "'id'", "]", "vol_data", "=", "[", "volume", "[", "'type'", "]", ",", "volid", ",", "volume", "[", "'properties'", "]", "[", "'name'", "]", ",", "volume", "[", "'metadata'", "]", "[", "'state'", "]", ",", "volume", "[", "'properties'", "]", "[", "'licenceType'", "]", ",", "\"\"", ",", "\"\"", ",", "\"\"", ",", "\"\"", ",", "volume", "[", "'properties'", "]", "[", "'size'", "]", "]", "connect", "=", "'NONE'", "if", "volid", "in", "bound_vols", ":", "connect", "=", "bound_vols", "[", "volid", "]", "vol_data", "+=", "[", "connect", ",", "volume", "[", "'metadata'", "]", "[", "'createdDate'", "]", ",", "volume", "[", "'metadata'", "]", "[", "'lastModifiedDate'", "]", "]", "dc_inv", ".", "append", "(", "dc_data", "+", "vol_data", ")", "# end for(volumes)", "return", "dc_inv" ]
42.886076
19.341772
def find_enclosing_bracket_left(self, left_ch, right_ch, start_pos=None): """ Find the left bracket enclosing current position. Return the relative position to the cursor position. When `start_pos` is given, don't look past the position. """ if self.current_char == left_ch: return 0 if start_pos is None: start_pos = 0 else: start_pos = max(0, start_pos) stack = 1 # Look backward. for i in range(self.cursor_position - 1, start_pos - 1, -1): c = self.text[i] if c == right_ch: stack += 1 elif c == left_ch: stack -= 1 if stack == 0: return i - self.cursor_position
[ "def", "find_enclosing_bracket_left", "(", "self", ",", "left_ch", ",", "right_ch", ",", "start_pos", "=", "None", ")", ":", "if", "self", ".", "current_char", "==", "left_ch", ":", "return", "0", "if", "start_pos", "is", "None", ":", "start_pos", "=", "0", "else", ":", "start_pos", "=", "max", "(", "0", ",", "start_pos", ")", "stack", "=", "1", "# Look backward.", "for", "i", "in", "range", "(", "self", ".", "cursor_position", "-", "1", ",", "start_pos", "-", "1", ",", "-", "1", ")", ":", "c", "=", "self", ".", "text", "[", "i", "]", "if", "c", "==", "right_ch", ":", "stack", "+=", "1", "elif", "c", "==", "left_ch", ":", "stack", "-=", "1", "if", "stack", "==", "0", ":", "return", "i", "-", "self", ".", "cursor_position" ]
27.321429
19.892857
def from_time( year=None, month=None, day=None, hours=None, minutes=None, seconds=None, microseconds=None, timezone=None ): """Convenience wrapper to take a series of date/time elements and return a WMI time of the form `yyyymmddHHMMSS.mmmmmm+UUU`. All elements may be int, string or omitted altogether. If omitted, they will be replaced in the output string by a series of stars of the appropriate length. :param year: The year element of the date/time :param month: The month element of the date/time :param day: The day element of the date/time :param hours: The hours element of the date/time :param minutes: The minutes element of the date/time :param seconds: The seconds element of the date/time :param microseconds: The microseconds element of the date/time :param timezone: The timeezone element of the date/time :returns: A WMI datetime string of the form: `yyyymmddHHMMSS.mmmmmm+UUU` """ def str_or_stars(i, length): if i is None: return "*" * length else: return str(i).rjust(length, "0") wmi_time = "" wmi_time += str_or_stars(year, 4) wmi_time += str_or_stars(month, 2) wmi_time += str_or_stars(day, 2) wmi_time += str_or_stars(hours, 2) wmi_time += str_or_stars(minutes, 2) wmi_time += str_or_stars(seconds, 2) wmi_time += "." wmi_time += str_or_stars(microseconds, 6) if timezone is None: wmi_time += "+" else: try: int(timezone) except ValueError: wmi_time += "+" else: if timezone >= 0: wmi_time += "+" else: wmi_time += "-" timezone = abs(timezone) wmi_time += str_or_stars(timezone, 3) return wmi_time
[ "def", "from_time", "(", "year", "=", "None", ",", "month", "=", "None", ",", "day", "=", "None", ",", "hours", "=", "None", ",", "minutes", "=", "None", ",", "seconds", "=", "None", ",", "microseconds", "=", "None", ",", "timezone", "=", "None", ")", ":", "def", "str_or_stars", "(", "i", ",", "length", ")", ":", "if", "i", "is", "None", ":", "return", "\"*\"", "*", "length", "else", ":", "return", "str", "(", "i", ")", ".", "rjust", "(", "length", ",", "\"0\"", ")", "wmi_time", "=", "\"\"", "wmi_time", "+=", "str_or_stars", "(", "year", ",", "4", ")", "wmi_time", "+=", "str_or_stars", "(", "month", ",", "2", ")", "wmi_time", "+=", "str_or_stars", "(", "day", ",", "2", ")", "wmi_time", "+=", "str_or_stars", "(", "hours", ",", "2", ")", "wmi_time", "+=", "str_or_stars", "(", "minutes", ",", "2", ")", "wmi_time", "+=", "str_or_stars", "(", "seconds", ",", "2", ")", "wmi_time", "+=", "\".\"", "wmi_time", "+=", "str_or_stars", "(", "microseconds", ",", "6", ")", "if", "timezone", "is", "None", ":", "wmi_time", "+=", "\"+\"", "else", ":", "try", ":", "int", "(", "timezone", ")", "except", "ValueError", ":", "wmi_time", "+=", "\"+\"", "else", ":", "if", "timezone", ">=", "0", ":", "wmi_time", "+=", "\"+\"", "else", ":", "wmi_time", "+=", "\"-\"", "timezone", "=", "abs", "(", "timezone", ")", "wmi_time", "+=", "str_or_stars", "(", "timezone", ",", "3", ")", "return", "wmi_time" ]
36.163265
17.755102
def cut_into_parts(self): # pylint: disable=too-many-branches, too-many-locals, too-many-statements """Cut conf into part for scheduler dispatch. Basically it provides a set of host/services for each scheduler that have no dependencies between them :return: None """ # User must have set a spare if he needed one logger.info("Splitting the configuration into parts:") nb_parts = 0 for realm in self.realms: no_spare_schedulers = realm.schedulers if not no_spare_schedulers: if realm.potential_schedulers: no_spare_schedulers = [realm.potential_schedulers[0]] nb_schedulers = len(no_spare_schedulers) nb_parts += nb_schedulers if nb_schedulers: logger.info(" %d scheduler(s) for the realm %s", nb_schedulers, realm.get_name()) else: logger.warning(" no scheduler for the realm %s", realm.get_name()) if nb_parts == 0: nb_parts = 1 # We create dummy configurations for schedulers: # they are clone of the master configuration but without hosts and # services (because they are splitted between these configurations) logger.info("Splitting the configuration into %d parts...", nb_parts) self.parts = {} for part_index in range(0, nb_parts): self.parts[part_index] = Config() # Now we copy all properties of conf into the new ones for prop, entry in sorted(list(Config.properties.items())): # Do not copy the configuration instance id nor name! if prop in ['instance_id', 'config_name']: continue # Only the one that are managed and used if entry.managed and not isinstance(entry, UnusedProp): val = getattr(self, prop, None) setattr(self.parts[part_index], prop, val) # Set the cloned configuration name self.parts[part_index].config_name = "%s (%d)" % (self.config_name, part_index) logger.debug("- cloning configuration: %s -> %s", self.parts[part_index].config_name, self.parts[part_index]) # Copy the configuration objects lists. We need a deepcopy because each configuration # will have some new groups... but we create a new uuid self.parts[part_index].uuid = get_a_new_object_id() types_creations = self.__class__.types_creations for o_type in types_creations: (_, clss, inner_property, _, clonable) = types_creations[o_type] if not clonable: logger.debug(" . do not clone: %s", inner_property) continue # todo: Indeed contactgroups should be managed like hostgroups... if inner_property in ['hostgroups', 'servicegroups']: new_groups = [] for group in getattr(self, inner_property): new_groups.append(group.copy_shell()) setattr(self.parts[part_index], inner_property, clss(new_groups)) elif inner_property in ['hosts', 'services']: setattr(self.parts[part_index], inner_property, clss([])) else: setattr(self.parts[part_index], inner_property, getattr(self, inner_property)) logger.debug(" . cloned %s: %s -> %s", inner_property, getattr(self, inner_property), getattr(self.parts[part_index], inner_property)) # The elements of the others conf will be tag here self.parts[part_index].other_elements = {} # No scheduler has yet accepted the configuration self.parts[part_index].is_assigned = False self.parts[part_index].scheduler_link = None self.parts[part_index].push_flavor = '' # Once parts got created, the current configuration has some 'parts' # self.parts is the configuration split into parts for the schedulers # Just create packs. There can be numerous ones # In pack we've got hosts and service and packs are in the realms logger.debug("Creating packs for realms...") self.create_packs() # Once packs got created, all the realms have some 'packs' logger.info("Realms:") for realm in self.realms: logger.info(" - realm: %s", realm) for idx in realm.packs: logger.info(" - pack: %s / %d hosts (%s)", idx, len(realm.packs[idx]), ','.join([self.hosts[host_id].get_name() for host_id in realm.packs[idx]])) # We have packs for realms and elements into configurations, let's merge this... logger.info("Realms:") offset = 0 for realm in self.realms: logger.info(" Realm: %s", realm) for idx in realm.packs: logger.info(" - pack: %s / %d hosts", idx, len(realm.packs[idx])) if not realm.packs[idx]: logger.info(" - no hosts are declared in this realm pack.") # continue try: instance_id = self.parts[idx + offset].instance_id for host_id in realm.packs[idx]: host = self.hosts[host_id] self.parts[idx + offset].hosts.add_item(host) for service_id in host.services: service = self.services[service_id] self.parts[idx + offset].services.add_item(service) # Now the conf can be linked with the realm realm.parts.update({instance_id: self.parts[idx + offset]}) # offset += 1 except KeyError: logger.info(" - no configuration part is affected " "because of mismatching hosts packs / schedulers count. " "Probably too much schedulers for the hosts count!") offset += len(realm.packs) del realm.packs # We've nearly have hosts and services. Now we want real hosts (Class) # And we want groups too for part_index in self.parts: cfg = self.parts[part_index] # Fill host groups for ori_hg in self.hostgroups: hostgroup = cfg.hostgroups.find_by_name(ori_hg.get_name()) mbrs_id = [] for host in ori_hg.members: if host != '': mbrs_id.append(host) for host in cfg.hosts: if host.uuid in mbrs_id: hostgroup.members.append(host.uuid) # And also relink the hosts with the valid hostgroups for host in cfg.hosts: orig_hgs = host.hostgroups nhgs = [] for ohg_id in orig_hgs: ohg = self.hostgroups[ohg_id] nhg = cfg.hostgroups.find_by_name(ohg.get_name()) nhgs.append(nhg.uuid) host.hostgroups = nhgs # Fill servicegroup for ori_sg in self.servicegroups: servicegroup = cfg.servicegroups.find_by_name(ori_sg.get_name()) mbrs = ori_sg.members mbrs_id = [] for service in mbrs: if service != '': mbrs_id.append(service) for service in cfg.services: if service.uuid in mbrs_id: servicegroup.members.append(service.uuid) # And also relink the services with the valid servicegroups for host in cfg.services: orig_hgs = host.servicegroups nhgs = [] for ohg_id in orig_hgs: ohg = self.servicegroups[ohg_id] nhg = cfg.servicegroups.find_by_name(ohg.get_name()) nhgs.append(nhg.uuid) host.servicegroups = nhgs # Now we fill other_elements by host (service are with their host # so they are not tagged) logger.info("Configuration parts:") for part_index in self.parts: for host in self.parts[part_index].hosts: for j in [j for j in self.parts if j != part_index]: # So other than i self.parts[part_index].other_elements[host.get_name()] = part_index logger.info("- part: %d - %s, %d hosts", part_index, self.parts[part_index], len(self.parts[part_index].hosts))
[ "def", "cut_into_parts", "(", "self", ")", ":", "# pylint: disable=too-many-branches, too-many-locals, too-many-statements", "# User must have set a spare if he needed one", "logger", ".", "info", "(", "\"Splitting the configuration into parts:\"", ")", "nb_parts", "=", "0", "for", "realm", "in", "self", ".", "realms", ":", "no_spare_schedulers", "=", "realm", ".", "schedulers", "if", "not", "no_spare_schedulers", ":", "if", "realm", ".", "potential_schedulers", ":", "no_spare_schedulers", "=", "[", "realm", ".", "potential_schedulers", "[", "0", "]", "]", "nb_schedulers", "=", "len", "(", "no_spare_schedulers", ")", "nb_parts", "+=", "nb_schedulers", "if", "nb_schedulers", ":", "logger", ".", "info", "(", "\" %d scheduler(s) for the realm %s\"", ",", "nb_schedulers", ",", "realm", ".", "get_name", "(", ")", ")", "else", ":", "logger", ".", "warning", "(", "\" no scheduler for the realm %s\"", ",", "realm", ".", "get_name", "(", ")", ")", "if", "nb_parts", "==", "0", ":", "nb_parts", "=", "1", "# We create dummy configurations for schedulers:", "# they are clone of the master configuration but without hosts and", "# services (because they are splitted between these configurations)", "logger", ".", "info", "(", "\"Splitting the configuration into %d parts...\"", ",", "nb_parts", ")", "self", ".", "parts", "=", "{", "}", "for", "part_index", "in", "range", "(", "0", ",", "nb_parts", ")", ":", "self", ".", "parts", "[", "part_index", "]", "=", "Config", "(", ")", "# Now we copy all properties of conf into the new ones", "for", "prop", ",", "entry", "in", "sorted", "(", "list", "(", "Config", ".", "properties", ".", "items", "(", ")", ")", ")", ":", "# Do not copy the configuration instance id nor name!", "if", "prop", "in", "[", "'instance_id'", ",", "'config_name'", "]", ":", "continue", "# Only the one that are managed and used", "if", "entry", ".", "managed", "and", "not", "isinstance", "(", "entry", ",", "UnusedProp", ")", ":", "val", "=", "getattr", "(", "self", ",", "prop", ",", "None", ")", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "prop", ",", "val", ")", "# Set the cloned configuration name", "self", ".", "parts", "[", "part_index", "]", ".", "config_name", "=", "\"%s (%d)\"", "%", "(", "self", ".", "config_name", ",", "part_index", ")", "logger", ".", "debug", "(", "\"- cloning configuration: %s -> %s\"", ",", "self", ".", "parts", "[", "part_index", "]", ".", "config_name", ",", "self", ".", "parts", "[", "part_index", "]", ")", "# Copy the configuration objects lists. We need a deepcopy because each configuration", "# will have some new groups... but we create a new uuid", "self", ".", "parts", "[", "part_index", "]", ".", "uuid", "=", "get_a_new_object_id", "(", ")", "types_creations", "=", "self", ".", "__class__", ".", "types_creations", "for", "o_type", "in", "types_creations", ":", "(", "_", ",", "clss", ",", "inner_property", ",", "_", ",", "clonable", ")", "=", "types_creations", "[", "o_type", "]", "if", "not", "clonable", ":", "logger", ".", "debug", "(", "\" . do not clone: %s\"", ",", "inner_property", ")", "continue", "# todo: Indeed contactgroups should be managed like hostgroups...", "if", "inner_property", "in", "[", "'hostgroups'", ",", "'servicegroups'", "]", ":", "new_groups", "=", "[", "]", "for", "group", "in", "getattr", "(", "self", ",", "inner_property", ")", ":", "new_groups", ".", "append", "(", "group", ".", "copy_shell", "(", ")", ")", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "clss", "(", "new_groups", ")", ")", "elif", "inner_property", "in", "[", "'hosts'", ",", "'services'", "]", ":", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "clss", "(", "[", "]", ")", ")", "else", ":", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "getattr", "(", "self", ",", "inner_property", ")", ")", "logger", ".", "debug", "(", "\" . cloned %s: %s -> %s\"", ",", "inner_property", ",", "getattr", "(", "self", ",", "inner_property", ")", ",", "getattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ")", ")", "# The elements of the others conf will be tag here", "self", ".", "parts", "[", "part_index", "]", ".", "other_elements", "=", "{", "}", "# No scheduler has yet accepted the configuration", "self", ".", "parts", "[", "part_index", "]", ".", "is_assigned", "=", "False", "self", ".", "parts", "[", "part_index", "]", ".", "scheduler_link", "=", "None", "self", ".", "parts", "[", "part_index", "]", ".", "push_flavor", "=", "''", "# Once parts got created, the current configuration has some 'parts'", "# self.parts is the configuration split into parts for the schedulers", "# Just create packs. There can be numerous ones", "# In pack we've got hosts and service and packs are in the realms", "logger", ".", "debug", "(", "\"Creating packs for realms...\"", ")", "self", ".", "create_packs", "(", ")", "# Once packs got created, all the realms have some 'packs'", "logger", ".", "info", "(", "\"Realms:\"", ")", "for", "realm", "in", "self", ".", "realms", ":", "logger", ".", "info", "(", "\" - realm: %s\"", ",", "realm", ")", "for", "idx", "in", "realm", ".", "packs", ":", "logger", ".", "info", "(", "\" - pack: %s / %d hosts (%s)\"", ",", "idx", ",", "len", "(", "realm", ".", "packs", "[", "idx", "]", ")", ",", "','", ".", "join", "(", "[", "self", ".", "hosts", "[", "host_id", "]", ".", "get_name", "(", ")", "for", "host_id", "in", "realm", ".", "packs", "[", "idx", "]", "]", ")", ")", "# We have packs for realms and elements into configurations, let's merge this...", "logger", ".", "info", "(", "\"Realms:\"", ")", "offset", "=", "0", "for", "realm", "in", "self", ".", "realms", ":", "logger", ".", "info", "(", "\" Realm: %s\"", ",", "realm", ")", "for", "idx", "in", "realm", ".", "packs", ":", "logger", ".", "info", "(", "\" - pack: %s / %d hosts\"", ",", "idx", ",", "len", "(", "realm", ".", "packs", "[", "idx", "]", ")", ")", "if", "not", "realm", ".", "packs", "[", "idx", "]", ":", "logger", ".", "info", "(", "\" - no hosts are declared in this realm pack.\"", ")", "# continue", "try", ":", "instance_id", "=", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "instance_id", "for", "host_id", "in", "realm", ".", "packs", "[", "idx", "]", ":", "host", "=", "self", ".", "hosts", "[", "host_id", "]", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "hosts", ".", "add_item", "(", "host", ")", "for", "service_id", "in", "host", ".", "services", ":", "service", "=", "self", ".", "services", "[", "service_id", "]", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "services", ".", "add_item", "(", "service", ")", "# Now the conf can be linked with the realm", "realm", ".", "parts", ".", "update", "(", "{", "instance_id", ":", "self", ".", "parts", "[", "idx", "+", "offset", "]", "}", ")", "# offset += 1", "except", "KeyError", ":", "logger", ".", "info", "(", "\" - no configuration part is affected \"", "\"because of mismatching hosts packs / schedulers count. \"", "\"Probably too much schedulers for the hosts count!\"", ")", "offset", "+=", "len", "(", "realm", ".", "packs", ")", "del", "realm", ".", "packs", "# We've nearly have hosts and services. Now we want real hosts (Class)", "# And we want groups too", "for", "part_index", "in", "self", ".", "parts", ":", "cfg", "=", "self", ".", "parts", "[", "part_index", "]", "# Fill host groups", "for", "ori_hg", "in", "self", ".", "hostgroups", ":", "hostgroup", "=", "cfg", ".", "hostgroups", ".", "find_by_name", "(", "ori_hg", ".", "get_name", "(", ")", ")", "mbrs_id", "=", "[", "]", "for", "host", "in", "ori_hg", ".", "members", ":", "if", "host", "!=", "''", ":", "mbrs_id", ".", "append", "(", "host", ")", "for", "host", "in", "cfg", ".", "hosts", ":", "if", "host", ".", "uuid", "in", "mbrs_id", ":", "hostgroup", ".", "members", ".", "append", "(", "host", ".", "uuid", ")", "# And also relink the hosts with the valid hostgroups", "for", "host", "in", "cfg", ".", "hosts", ":", "orig_hgs", "=", "host", ".", "hostgroups", "nhgs", "=", "[", "]", "for", "ohg_id", "in", "orig_hgs", ":", "ohg", "=", "self", ".", "hostgroups", "[", "ohg_id", "]", "nhg", "=", "cfg", ".", "hostgroups", ".", "find_by_name", "(", "ohg", ".", "get_name", "(", ")", ")", "nhgs", ".", "append", "(", "nhg", ".", "uuid", ")", "host", ".", "hostgroups", "=", "nhgs", "# Fill servicegroup", "for", "ori_sg", "in", "self", ".", "servicegroups", ":", "servicegroup", "=", "cfg", ".", "servicegroups", ".", "find_by_name", "(", "ori_sg", ".", "get_name", "(", ")", ")", "mbrs", "=", "ori_sg", ".", "members", "mbrs_id", "=", "[", "]", "for", "service", "in", "mbrs", ":", "if", "service", "!=", "''", ":", "mbrs_id", ".", "append", "(", "service", ")", "for", "service", "in", "cfg", ".", "services", ":", "if", "service", ".", "uuid", "in", "mbrs_id", ":", "servicegroup", ".", "members", ".", "append", "(", "service", ".", "uuid", ")", "# And also relink the services with the valid servicegroups", "for", "host", "in", "cfg", ".", "services", ":", "orig_hgs", "=", "host", ".", "servicegroups", "nhgs", "=", "[", "]", "for", "ohg_id", "in", "orig_hgs", ":", "ohg", "=", "self", ".", "servicegroups", "[", "ohg_id", "]", "nhg", "=", "cfg", ".", "servicegroups", ".", "find_by_name", "(", "ohg", ".", "get_name", "(", ")", ")", "nhgs", ".", "append", "(", "nhg", ".", "uuid", ")", "host", ".", "servicegroups", "=", "nhgs", "# Now we fill other_elements by host (service are with their host", "# so they are not tagged)", "logger", ".", "info", "(", "\"Configuration parts:\"", ")", "for", "part_index", "in", "self", ".", "parts", ":", "for", "host", "in", "self", ".", "parts", "[", "part_index", "]", ".", "hosts", ":", "for", "j", "in", "[", "j", "for", "j", "in", "self", ".", "parts", "if", "j", "!=", "part_index", "]", ":", "# So other than i", "self", ".", "parts", "[", "part_index", "]", ".", "other_elements", "[", "host", ".", "get_name", "(", ")", "]", "=", "part_index", "logger", ".", "info", "(", "\"- part: %d - %s, %d hosts\"", ",", "part_index", ",", "self", ".", "parts", "[", "part_index", "]", ",", "len", "(", "self", ".", "parts", "[", "part_index", "]", ".", "hosts", ")", ")" ]
47.711957
21.570652
def update_all(self, *args, **kwargs): """Updates all objects with details given if they match a set of conditions supplied. This method forwards filters and updates directly to the repository. It does not instantiate entities and it does not trigger Entity callbacks or validations. Update values can be specified either as a dict, or keyword arguments. Returns the number of objects matched (which may not be equal to the number of objects updated if objects rows already have the new value). """ updated_item_count = 0 repository = repo_factory.get_repository(self._entity_cls) try: updated_item_count = repository.update_all(self._criteria, *args, **kwargs) except Exception: # FIXME Log Exception raise return updated_item_count
[ "def", "update_all", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "updated_item_count", "=", "0", "repository", "=", "repo_factory", ".", "get_repository", "(", "self", ".", "_entity_cls", ")", "try", ":", "updated_item_count", "=", "repository", ".", "update_all", "(", "self", ".", "_criteria", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", ":", "# FIXME Log Exception", "raise", "return", "updated_item_count" ]
38.818182
27.909091
def update(self, obj, set_fields = None, unset_fields = None, update_obj = True): """ We return the result of the save method (updates are not yet implemented here). """ if set_fields: if isinstance(set_fields,(list,tuple)): set_attributes = {} for key in set_fields: try: set_attributes[key] = get_value(obj,key) except KeyError: pass else: set_attributes = set_fields else: set_attributes = {} if unset_fields: unset_attributes = unset_fields else: unset_attributes = [] self.call_hook('before_update',obj,set_attributes,unset_attributes) if update_obj: for key,value in set_attributes.items(): set_value(obj,key,value) for key in unset_attributes: delete_value(obj,key) return self.save(obj,call_hook = False)
[ "def", "update", "(", "self", ",", "obj", ",", "set_fields", "=", "None", ",", "unset_fields", "=", "None", ",", "update_obj", "=", "True", ")", ":", "if", "set_fields", ":", "if", "isinstance", "(", "set_fields", ",", "(", "list", ",", "tuple", ")", ")", ":", "set_attributes", "=", "{", "}", "for", "key", "in", "set_fields", ":", "try", ":", "set_attributes", "[", "key", "]", "=", "get_value", "(", "obj", ",", "key", ")", "except", "KeyError", ":", "pass", "else", ":", "set_attributes", "=", "set_fields", "else", ":", "set_attributes", "=", "{", "}", "if", "unset_fields", ":", "unset_attributes", "=", "unset_fields", "else", ":", "unset_attributes", "=", "[", "]", "self", ".", "call_hook", "(", "'before_update'", ",", "obj", ",", "set_attributes", ",", "unset_attributes", ")", "if", "update_obj", ":", "for", "key", ",", "value", "in", "set_attributes", ".", "items", "(", ")", ":", "set_value", "(", "obj", ",", "key", ",", "value", ")", "for", "key", "in", "unset_attributes", ":", "delete_value", "(", "obj", ",", "key", ")", "return", "self", ".", "save", "(", "obj", ",", "call_hook", "=", "False", ")" ]
33.933333
16.333333
def _make_postfixes_2( words_layer ): ''' Provides some post-fixes after the disambiguation. ''' for word_dict in words_layer: for analysis in word_dict[ANALYSIS]: analysis[FORM] = re.sub( '(Sg|Pl)([123])', '\\1 \\2', analysis[FORM] ) return words_layer
[ "def", "_make_postfixes_2", "(", "words_layer", ")", ":", "for", "word_dict", "in", "words_layer", ":", "for", "analysis", "in", "word_dict", "[", "ANALYSIS", "]", ":", "analysis", "[", "FORM", "]", "=", "re", ".", "sub", "(", "'(Sg|Pl)([123])'", ",", "'\\\\1 \\\\2'", ",", "analysis", "[", "FORM", "]", ")", "return", "words_layer" ]
46.333333
15.666667
def most_confused(self, min_val:int=1, slice_size:int=1)->Collection[Tuple[str,str,int]]: "Sorted descending list of largest non-diagonal entries of confusion matrix, presented as actual, predicted, number of occurrences." cm = self.confusion_matrix(slice_size=slice_size) np.fill_diagonal(cm, 0) res = [(self.data.classes[i],self.data.classes[j],cm[i,j]) for i,j in zip(*np.where(cm>=min_val))] return sorted(res, key=itemgetter(2), reverse=True)
[ "def", "most_confused", "(", "self", ",", "min_val", ":", "int", "=", "1", ",", "slice_size", ":", "int", "=", "1", ")", "->", "Collection", "[", "Tuple", "[", "str", ",", "str", ",", "int", "]", "]", ":", "cm", "=", "self", ".", "confusion_matrix", "(", "slice_size", "=", "slice_size", ")", "np", ".", "fill_diagonal", "(", "cm", ",", "0", ")", "res", "=", "[", "(", "self", ".", "data", ".", "classes", "[", "i", "]", ",", "self", ".", "data", ".", "classes", "[", "j", "]", ",", "cm", "[", "i", ",", "j", "]", ")", "for", "i", ",", "j", "in", "zip", "(", "*", "np", ".", "where", "(", "cm", ">=", "min_val", ")", ")", "]", "return", "sorted", "(", "res", ",", "key", "=", "itemgetter", "(", "2", ")", ",", "reverse", "=", "True", ")" ]
71
33.571429
def get_fld2val(self, name, vals): """Describe summary statistics for a list of numbers.""" if vals: return self._init_fld2val_stats(name, vals) return self._init_fld2val_null(name)
[ "def", "get_fld2val", "(", "self", ",", "name", ",", "vals", ")", ":", "if", "vals", ":", "return", "self", ".", "_init_fld2val_stats", "(", "name", ",", "vals", ")", "return", "self", ".", "_init_fld2val_null", "(", "name", ")" ]
42.6
9.8
def _tupleload(l: Loader, value, type_) -> Tuple: """ This loads into something like Tuple[int,str] """ if HAS_TUPLEARGS: args = type_.__args__ else: args = type_.__tuple_params__ if len(args) == 2 and args[1] == ...: # Tuple[something, ...] return tuple(l.load(i, args[0]) for i in value) else: # Tuple[something, something, somethingelse] if l.failonextra and len(value) > len(args): raise TypedloadValueError('Value is too long for type %s' % type_, value=value, type_=type_) elif len(value) < len(args): raise TypedloadValueError('Value is too short for type %s' % type_, value=value, type_=type_) return tuple(l.load(v, t, annotation=Annotation(AnnotationType.INDEX, i)) for i, (v, t) in enumerate(zip(value, args)))
[ "def", "_tupleload", "(", "l", ":", "Loader", ",", "value", ",", "type_", ")", "->", "Tuple", ":", "if", "HAS_TUPLEARGS", ":", "args", "=", "type_", ".", "__args__", "else", ":", "args", "=", "type_", ".", "__tuple_params__", "if", "len", "(", "args", ")", "==", "2", "and", "args", "[", "1", "]", "==", "...", ":", "# Tuple[something, ...]", "return", "tuple", "(", "l", ".", "load", "(", "i", ",", "args", "[", "0", "]", ")", "for", "i", "in", "value", ")", "else", ":", "# Tuple[something, something, somethingelse]", "if", "l", ".", "failonextra", "and", "len", "(", "value", ")", ">", "len", "(", "args", ")", ":", "raise", "TypedloadValueError", "(", "'Value is too long for type %s'", "%", "type_", ",", "value", "=", "value", ",", "type_", "=", "type_", ")", "elif", "len", "(", "value", ")", "<", "len", "(", "args", ")", ":", "raise", "TypedloadValueError", "(", "'Value is too short for type %s'", "%", "type_", ",", "value", "=", "value", ",", "type_", "=", "type_", ")", "return", "tuple", "(", "l", ".", "load", "(", "v", ",", "t", ",", "annotation", "=", "Annotation", "(", "AnnotationType", ".", "INDEX", ",", "i", ")", ")", "for", "i", ",", "(", "v", ",", "t", ")", "in", "enumerate", "(", "zip", "(", "value", ",", "args", ")", ")", ")" ]
47.411765
24
def plotMDS(data, theOrders, theLabels, theColors, theAlphas, theSizes, theMarkers, options): """Plot the MDS data. :param data: the data to plot (MDS values). :param theOrders: the order of the populations to plot. :param theLabels: the names of the populations to plot. :param theColors: the colors of the populations to plot. :param theAlphas: the alpha value for the populations to plot. :param theSizes: the sizes of the markers for each population to plot. :param theMarkers: the type of marker for each population to plot. :param options: the options. :type data: list of numpy.array :type theOrders: list :type theLabels: list :type theColors: list :type theAlphas: list :type theSizes: list :type theMarkers: list :type options: argparse.Namespace """ # Do the import import matplotlib as mpl if options.format != "X11" and mpl.get_backend() != "agg": mpl.use("Agg") import matplotlib.pyplot as plt if options.format != "X11": plt.ioff() fig = plt.figure() try: fig.subplots_adjust(right=options.adjust_right, left=options.adjust_left, bottom=options.adjust_bottom, top=options.adjust_top) except ValueError as e: raise ProgramError(e) ax = fig.add_subplot(111) # Setting the axis ax.xaxis.set_ticks_position("bottom") ax.yaxis.set_ticks_position("left") ax.spines["top"].set_visible(False) ax.spines["right"].set_visible(False) ax.spines["bottom"].set_position(("outward", 9)) ax.spines["left"].set_position(("outward", 9)) # The plot plotObject = [] labels = [] for i, index in enumerate(theOrders): try: tmp, = ax.plot(data[0][i], data[1][i], theMarkers[i], color=theColors[i], mec=theColors[i], markersize=theSizes[i], alpha=theAlphas[i]) except ValueError as e: msg = "Problem with markers: %(e)s" % locals() raise ProgramError(msg) plotObject.append(tmp) labels.append(index) # The legend prop = mpl.font_manager.FontProperties(size=options.legend_size) leg = ax.legend(plotObject, labels, loc=options.legend_position, numpoints=1, fancybox=True, prop=prop, ncol=options.legend_ncol) leg.get_frame().set_alpha(0.5) # The title and XY labels ax.set_title(options.title, fontsize=options.title_fontsize, weight="bold") ax.set_xlabel(options.xlabel, fontsize=options.label_fontsize) ax.set_ylabel(options.ylabel, fontsize=options.label_fontsize) # Changing the size of the tick labels for tick in ax.yaxis.get_major_ticks() + ax.xaxis.get_major_ticks(): tick.label.set_fontsize(options.axis_fontsize) if options.format == "X11": # Show the plot plt.show() else: fileName = options.out + "." + options.format try: plt.savefig(fileName, dpi=300) except IOError: msg = "%(fileName)s: can't write file" % locals() raise ProgramError(msg) except ValueError as e: colorError = False for errorMsg in str(e).split("\n"): if errorMsg.startswith("to_rgb"): colorError = True if colorError: msg = "problem with the population colors" raise ProgramError(msg) else: print str(e)
[ "def", "plotMDS", "(", "data", ",", "theOrders", ",", "theLabels", ",", "theColors", ",", "theAlphas", ",", "theSizes", ",", "theMarkers", ",", "options", ")", ":", "# Do the import", "import", "matplotlib", "as", "mpl", "if", "options", ".", "format", "!=", "\"X11\"", "and", "mpl", ".", "get_backend", "(", ")", "!=", "\"agg\"", ":", "mpl", ".", "use", "(", "\"Agg\"", ")", "import", "matplotlib", ".", "pyplot", "as", "plt", "if", "options", ".", "format", "!=", "\"X11\"", ":", "plt", ".", "ioff", "(", ")", "fig", "=", "plt", ".", "figure", "(", ")", "try", ":", "fig", ".", "subplots_adjust", "(", "right", "=", "options", ".", "adjust_right", ",", "left", "=", "options", ".", "adjust_left", ",", "bottom", "=", "options", ".", "adjust_bottom", ",", "top", "=", "options", ".", "adjust_top", ")", "except", "ValueError", "as", "e", ":", "raise", "ProgramError", "(", "e", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "# Setting the axis", "ax", ".", "xaxis", ".", "set_ticks_position", "(", "\"bottom\"", ")", "ax", ".", "yaxis", ".", "set_ticks_position", "(", "\"left\"", ")", "ax", ".", "spines", "[", "\"top\"", "]", ".", "set_visible", "(", "False", ")", "ax", ".", "spines", "[", "\"right\"", "]", ".", "set_visible", "(", "False", ")", "ax", ".", "spines", "[", "\"bottom\"", "]", ".", "set_position", "(", "(", "\"outward\"", ",", "9", ")", ")", "ax", ".", "spines", "[", "\"left\"", "]", ".", "set_position", "(", "(", "\"outward\"", ",", "9", ")", ")", "# The plot", "plotObject", "=", "[", "]", "labels", "=", "[", "]", "for", "i", ",", "index", "in", "enumerate", "(", "theOrders", ")", ":", "try", ":", "tmp", ",", "=", "ax", ".", "plot", "(", "data", "[", "0", "]", "[", "i", "]", ",", "data", "[", "1", "]", "[", "i", "]", ",", "theMarkers", "[", "i", "]", ",", "color", "=", "theColors", "[", "i", "]", ",", "mec", "=", "theColors", "[", "i", "]", ",", "markersize", "=", "theSizes", "[", "i", "]", ",", "alpha", "=", "theAlphas", "[", "i", "]", ")", "except", "ValueError", "as", "e", ":", "msg", "=", "\"Problem with markers: %(e)s\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "plotObject", ".", "append", "(", "tmp", ")", "labels", ".", "append", "(", "index", ")", "# The legend", "prop", "=", "mpl", ".", "font_manager", ".", "FontProperties", "(", "size", "=", "options", ".", "legend_size", ")", "leg", "=", "ax", ".", "legend", "(", "plotObject", ",", "labels", ",", "loc", "=", "options", ".", "legend_position", ",", "numpoints", "=", "1", ",", "fancybox", "=", "True", ",", "prop", "=", "prop", ",", "ncol", "=", "options", ".", "legend_ncol", ")", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0.5", ")", "# The title and XY labels", "ax", ".", "set_title", "(", "options", ".", "title", ",", "fontsize", "=", "options", ".", "title_fontsize", ",", "weight", "=", "\"bold\"", ")", "ax", ".", "set_xlabel", "(", "options", ".", "xlabel", ",", "fontsize", "=", "options", ".", "label_fontsize", ")", "ax", ".", "set_ylabel", "(", "options", ".", "ylabel", ",", "fontsize", "=", "options", ".", "label_fontsize", ")", "# Changing the size of the tick labels", "for", "tick", "in", "ax", ".", "yaxis", ".", "get_major_ticks", "(", ")", "+", "ax", ".", "xaxis", ".", "get_major_ticks", "(", ")", ":", "tick", ".", "label", ".", "set_fontsize", "(", "options", ".", "axis_fontsize", ")", "if", "options", ".", "format", "==", "\"X11\"", ":", "# Show the plot", "plt", ".", "show", "(", ")", "else", ":", "fileName", "=", "options", ".", "out", "+", "\".\"", "+", "options", ".", "format", "try", ":", "plt", ".", "savefig", "(", "fileName", ",", "dpi", "=", "300", ")", "except", "IOError", ":", "msg", "=", "\"%(fileName)s: can't write file\"", "%", "locals", "(", ")", "raise", "ProgramError", "(", "msg", ")", "except", "ValueError", "as", "e", ":", "colorError", "=", "False", "for", "errorMsg", "in", "str", "(", "e", ")", ".", "split", "(", "\"\\n\"", ")", ":", "if", "errorMsg", ".", "startswith", "(", "\"to_rgb\"", ")", ":", "colorError", "=", "True", "if", "colorError", ":", "msg", "=", "\"problem with the population colors\"", "raise", "ProgramError", "(", "msg", ")", "else", ":", "print", "str", "(", "e", ")" ]
35.383838
17.747475
def unload(self, core): """http://wiki.apache.org/solr/CoreAdmin#head-f5055a885932e2c25096a8856de840b06764d143""" params = { 'action': 'UNLOAD', 'core': core, } return self._get_url(self.url, params=params)
[ "def", "unload", "(", "self", ",", "core", ")", ":", "params", "=", "{", "'action'", ":", "'UNLOAD'", ",", "'core'", ":", "core", ",", "}", "return", "self", ".", "_get_url", "(", "self", ".", "url", ",", "params", "=", "params", ")" ]
36.571429
15.285714
def is_extracted(self, file_path): """ Check if the data file is already extracted. """ if os.path.isdir(file_path): self.chatbot.logger.info('File is already extracted') return True return False
[ "def", "is_extracted", "(", "self", ",", "file_path", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "file_path", ")", ":", "self", ".", "chatbot", ".", "logger", ".", "info", "(", "'File is already extracted'", ")", "return", "True", "return", "False" ]
28
13.777778
def _set_gre_dscp(self, v, load=False): """ Setter method for gre_dscp, mapped from YANG variable /interface/tunnel/gre_dscp (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_gre_dscp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_gre_dscp() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']}), is_leaf=True, yang_name="gre-dscp", rest_name="dscp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Tunnel dscp range 0 to 63', u'alt-name': u'dscp', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """gre_dscp must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']}), is_leaf=True, yang_name="gre-dscp", rest_name="dscp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Tunnel dscp range 0 to 63', u'alt-name': u'dscp', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='uint32', is_config=True)""", }) self.__gre_dscp = t if hasattr(self, '_set'): self._set()
[ "def", "_set_gre_dscp", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "RestrictedClassType", "(", "base_type", "=", "long", ",", "restriction_dict", "=", "{", "'range'", ":", "[", "'0..4294967295'", "]", "}", ",", "int_size", "=", "32", ")", ",", "restriction_dict", "=", "{", "'range'", ":", "[", "u'0 .. 63'", "]", "}", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"gre-dscp\"", ",", "rest_name", "=", "\"dscp\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'cli-full-command'", ":", "None", ",", "u'info'", ":", "u'Tunnel dscp range 0 to 63'", ",", "u'alt-name'", ":", "u'dscp'", ",", "u'cli-full-no'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-gre-vxlan'", ",", "defining_module", "=", "'brocade-gre-vxlan'", ",", "yang_type", "=", "'uint32'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"gre_dscp must be of a type compatible with uint32\"\"\"", ",", "'defined-type'", ":", "\"uint32\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']}), is_leaf=True, yang_name=\"gre-dscp\", rest_name=\"dscp\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Tunnel dscp range 0 to 63', u'alt-name': u'dscp', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='uint32', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__gre_dscp", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
88.545455
41.681818
def ingest_memory(self, memory): """ Transform the memory into bytes :param memory: Compose memory definition. (1g, 24k) :type memory: memory string or integer :return: The memory in bytes :rtype: int """ def lshift(num, shift): return num << shift def rshift(num, shift): return num >> shift if isinstance(memory, int): # Memory was specified as an integer, meaning it is in bytes memory = '{}b'.format(memory) bit_shift = { 'g': {'func': lshift, 'shift': 30}, 'm': {'func': lshift, 'shift': 20}, 'k': {'func': lshift, 'shift': 10}, 'b': {'func': rshift, 'shift': 0} } unit = memory[-1] number = int(memory[:-1]) return bit_shift[unit]['func'](number, bit_shift[unit]['shift'])
[ "def", "ingest_memory", "(", "self", ",", "memory", ")", ":", "def", "lshift", "(", "num", ",", "shift", ")", ":", "return", "num", "<<", "shift", "def", "rshift", "(", "num", ",", "shift", ")", ":", "return", "num", ">>", "shift", "if", "isinstance", "(", "memory", ",", "int", ")", ":", "# Memory was specified as an integer, meaning it is in bytes", "memory", "=", "'{}b'", ".", "format", "(", "memory", ")", "bit_shift", "=", "{", "'g'", ":", "{", "'func'", ":", "lshift", ",", "'shift'", ":", "30", "}", ",", "'m'", ":", "{", "'func'", ":", "lshift", ",", "'shift'", ":", "20", "}", ",", "'k'", ":", "{", "'func'", ":", "lshift", ",", "'shift'", ":", "10", "}", ",", "'b'", ":", "{", "'func'", ":", "rshift", ",", "'shift'", ":", "0", "}", "}", "unit", "=", "memory", "[", "-", "1", "]", "number", "=", "int", "(", "memory", "[", ":", "-", "1", "]", ")", "return", "bit_shift", "[", "unit", "]", "[", "'func'", "]", "(", "number", ",", "bit_shift", "[", "unit", "]", "[", "'shift'", "]", ")" ]
31.107143
15.107143
def venn2_unweighted(subsets, set_labels=('A', 'B'), set_colors=('r', 'g'), alpha=0.4, normalize_to=1.0, subset_areas=(1, 1, 1), ax=None, subset_label_formatter=None): ''' The version of venn2 without area-weighting. It is implemented as a wrapper around venn2. Namely, venn2 is invoked as usual, but with all subset areas set to 1. The subset labels are then replaced in the resulting diagram with the provided subset sizes. The parameters are all the same as that of venn2. In addition there is a subset_areas parameter, which specifies the actual subset areas. (it is (1, 1, 1) by default. You are free to change it, within reason). ''' v = venn2(subset_areas, set_labels, set_colors, alpha, normalize_to, ax) # Now rename the labels if subset_label_formatter is None: subset_label_formatter = str subset_ids = ['10', '01', '11'] if isinstance(subsets, dict): subsets = [subsets.get(t, 0) for t in subset_ids] elif len(subsets) == 2: subsets = compute_venn2_subsets(*subsets) for n, id in enumerate(subset_ids): lbl = v.get_label_by_id(id) if lbl is not None: lbl.set_text(subset_label_formatter(subsets[n])) return v
[ "def", "venn2_unweighted", "(", "subsets", ",", "set_labels", "=", "(", "'A'", ",", "'B'", ")", ",", "set_colors", "=", "(", "'r'", ",", "'g'", ")", ",", "alpha", "=", "0.4", ",", "normalize_to", "=", "1.0", ",", "subset_areas", "=", "(", "1", ",", "1", ",", "1", ")", ",", "ax", "=", "None", ",", "subset_label_formatter", "=", "None", ")", ":", "v", "=", "venn2", "(", "subset_areas", ",", "set_labels", ",", "set_colors", ",", "alpha", ",", "normalize_to", ",", "ax", ")", "# Now rename the labels", "if", "subset_label_formatter", "is", "None", ":", "subset_label_formatter", "=", "str", "subset_ids", "=", "[", "'10'", ",", "'01'", ",", "'11'", "]", "if", "isinstance", "(", "subsets", ",", "dict", ")", ":", "subsets", "=", "[", "subsets", ".", "get", "(", "t", ",", "0", ")", "for", "t", "in", "subset_ids", "]", "elif", "len", "(", "subsets", ")", "==", "2", ":", "subsets", "=", "compute_venn2_subsets", "(", "*", "subsets", ")", "for", "n", ",", "id", "in", "enumerate", "(", "subset_ids", ")", ":", "lbl", "=", "v", ".", "get_label_by_id", "(", "id", ")", "if", "lbl", "is", "not", "None", ":", "lbl", ".", "set_text", "(", "subset_label_formatter", "(", "subsets", "[", "n", "]", ")", ")", "return", "v" ]
50.916667
26.666667
def f_get_results(self, fast_access=False, copy=True): """ Returns a dictionary containing the full result names as keys and the corresponding result objects or result data items as values. :param fast_access: Determines whether the result objects or their values are returned in the dictionary. Works only for results if they contain a single item with the name of the result. :param copy: Whether the original dictionary or a shallow copy is returned. If you want the real dictionary please do not modify it at all! Not Copying and fast access do not work at the same time! Raises ValueError if fast access is true and copy false. :return: Dictionary containing the results. :raises: ValueError """ return self._return_item_dictionary(self._results, fast_access, copy)
[ "def", "f_get_results", "(", "self", ",", "fast_access", "=", "False", ",", "copy", "=", "True", ")", ":", "return", "self", ".", "_return_item_dictionary", "(", "self", ".", "_results", ",", "fast_access", ",", "copy", ")" ]
37.625
27.458333
def _get_dependencies_from_kwargs(self, args): """ Parse keyed arguments """ if not isinstance(args, dict): raise TypeError('"kwargs" must be a dictionary') dependency_names = set() for arg in args.values(): new_names = self._check_arg(arg) dependency_names.update(new_names) return dependency_names
[ "def", "_get_dependencies_from_kwargs", "(", "self", ",", "args", ")", ":", "if", "not", "isinstance", "(", "args", ",", "dict", ")", ":", "raise", "TypeError", "(", "'\"kwargs\" must be a dictionary'", ")", "dependency_names", "=", "set", "(", ")", "for", "arg", "in", "args", ".", "values", "(", ")", ":", "new_names", "=", "self", ".", "_check_arg", "(", "arg", ")", "dependency_names", ".", "update", "(", "new_names", ")", "return", "dependency_names" ]
36.7
10.2
def compose_dynamic_tree(src, target_tree_alias=None, parent_tree_item_alias=None, include_trees=None): """Returns a structure describing a dynamic sitetree.utils The structure can be built from various sources, :param str|iterable src: If a string is passed to `src`, it'll be treated as the name of an app, from where one want to import sitetrees definitions. `src` can be an iterable of tree definitions (see `sitetree.toolbox.tree()` and `item()` functions). :param str|unicode target_tree_alias: Static tree alias to attach items from dynamic trees to. :param str|unicode parent_tree_item_alias: Tree item alias from a static tree to attach items from dynamic trees to. :param list include_trees: Sitetree aliases to filter `src`. :rtype: dict """ def result(sitetrees=src): if include_trees is not None: sitetrees = [tree for tree in sitetrees if tree.alias in include_trees] return { 'app': src, 'sitetrees': sitetrees, 'tree': target_tree_alias, 'parent_item': parent_tree_item_alias} if isinstance(src, six.string_types): # Considered to be an application name. try: module = import_app_sitetree_module(src) return None if module is None else result(getattr(module, 'sitetrees', None)) except ImportError as e: if settings.DEBUG: warnings.warn('Unable to register dynamic sitetree(s) for `%s` application: %s. ' % (src, e)) return None return result()
[ "def", "compose_dynamic_tree", "(", "src", ",", "target_tree_alias", "=", "None", ",", "parent_tree_item_alias", "=", "None", ",", "include_trees", "=", "None", ")", ":", "def", "result", "(", "sitetrees", "=", "src", ")", ":", "if", "include_trees", "is", "not", "None", ":", "sitetrees", "=", "[", "tree", "for", "tree", "in", "sitetrees", "if", "tree", ".", "alias", "in", "include_trees", "]", "return", "{", "'app'", ":", "src", ",", "'sitetrees'", ":", "sitetrees", ",", "'tree'", ":", "target_tree_alias", ",", "'parent_item'", ":", "parent_tree_item_alias", "}", "if", "isinstance", "(", "src", ",", "six", ".", "string_types", ")", ":", "# Considered to be an application name.", "try", ":", "module", "=", "import_app_sitetree_module", "(", "src", ")", "return", "None", "if", "module", "is", "None", "else", "result", "(", "getattr", "(", "module", ",", "'sitetrees'", ",", "None", ")", ")", "except", "ImportError", "as", "e", ":", "if", "settings", ".", "DEBUG", ":", "warnings", ".", "warn", "(", "'Unable to register dynamic sitetree(s) for `%s` application: %s. '", "%", "(", "src", ",", "e", ")", ")", "return", "None", "return", "result", "(", ")" ]
40.947368
29.078947
def parse_hpo_phenotypes(hpo_lines): """Parse hpo phenotypes Group the genes that a phenotype is associated to in 'genes' Args: hpo_lines(iterable(str)): A file handle to the hpo phenotypes file Returns: hpo_terms(dict): A dictionary with hpo_ids as keys and terms as values { <hpo_id>: { 'hpo_id':str, 'description': str, 'hgnc_symbols': list(str), # [<hgnc_symbol>, ...] } } """ hpo_terms = {} LOG.info("Parsing hpo phenotypes...") for index, line in enumerate(hpo_lines): if index > 0 and len(line) > 0: hpo_info = parse_hpo_phenotype(line) hpo_term = hpo_info['hpo_id'] hgnc_symbol = hpo_info['hgnc_symbol'] if hpo_term in hpo_terms: hpo_terms[hpo_term]['hgnc_symbols'].append(hgnc_symbol) else: hpo_terms[hpo_term] = { 'hpo_id':hpo_term, 'description': hpo_info['description'], 'hgnc_symbols': [hgnc_symbol] } LOG.info("Parsing done.") return hpo_terms
[ "def", "parse_hpo_phenotypes", "(", "hpo_lines", ")", ":", "hpo_terms", "=", "{", "}", "LOG", ".", "info", "(", "\"Parsing hpo phenotypes...\"", ")", "for", "index", ",", "line", "in", "enumerate", "(", "hpo_lines", ")", ":", "if", "index", ">", "0", "and", "len", "(", "line", ")", ">", "0", ":", "hpo_info", "=", "parse_hpo_phenotype", "(", "line", ")", "hpo_term", "=", "hpo_info", "[", "'hpo_id'", "]", "hgnc_symbol", "=", "hpo_info", "[", "'hgnc_symbol'", "]", "if", "hpo_term", "in", "hpo_terms", ":", "hpo_terms", "[", "hpo_term", "]", "[", "'hgnc_symbols'", "]", ".", "append", "(", "hgnc_symbol", ")", "else", ":", "hpo_terms", "[", "hpo_term", "]", "=", "{", "'hpo_id'", ":", "hpo_term", ",", "'description'", ":", "hpo_info", "[", "'description'", "]", ",", "'hgnc_symbols'", ":", "[", "hgnc_symbol", "]", "}", "LOG", ".", "info", "(", "\"Parsing done.\"", ")", "return", "hpo_terms" ]
32.5
17.722222
def load_zae(file_obj, resolver=None, **kwargs): """ Load a ZAE file, which is just a zipped DAE file. Parameters ------------- file_obj : file object Contains ZAE data resolver : trimesh.visual.Resolver Resolver to load additional assets kwargs : dict Passed to load_collada Returns ------------ loaded : dict Results of loading """ # a dict, {file name : file object} archive = util.decompress(file_obj, file_type='zip') # load the first file with a .dae extension file_name = next(i for i in archive.keys() if i.lower().endswith('.dae')) # a resolver so the loader can load textures / etc resolver = visual.resolvers.ZipResolver(archive) # run the regular collada loader loaded = load_collada(archive[file_name], resolver=resolver, **kwargs) return loaded
[ "def", "load_zae", "(", "file_obj", ",", "resolver", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# a dict, {file name : file object}", "archive", "=", "util", ".", "decompress", "(", "file_obj", ",", "file_type", "=", "'zip'", ")", "# load the first file with a .dae extension", "file_name", "=", "next", "(", "i", "for", "i", "in", "archive", ".", "keys", "(", ")", "if", "i", ".", "lower", "(", ")", ".", "endswith", "(", "'.dae'", ")", ")", "# a resolver so the loader can load textures / etc", "resolver", "=", "visual", ".", "resolvers", ".", "ZipResolver", "(", "archive", ")", "# run the regular collada loader", "loaded", "=", "load_collada", "(", "archive", "[", "file_name", "]", ",", "resolver", "=", "resolver", ",", "*", "*", "kwargs", ")", "return", "loaded" ]
26.771429
16.257143
def check_requires_python(requires_python): # type: (Optional[str]) -> bool """ Check if the python version in use match the `requires_python` specifier. Returns `True` if the version of python in use matches the requirement. Returns `False` if the version of python in use does not matches the requirement. Raises an InvalidSpecifier if `requires_python` have an invalid format. """ if requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) # We only use major.minor.micro python_version = version.parse('{0}.{1}.{2}'.format(*sys.version_info[:3])) return python_version in requires_python_specifier
[ "def", "check_requires_python", "(", "requires_python", ")", ":", "# type: (Optional[str]) -> bool", "if", "requires_python", "is", "None", ":", "# The package provides no information", "return", "True", "requires_python_specifier", "=", "specifiers", ".", "SpecifierSet", "(", "requires_python", ")", "# We only use major.minor.micro", "python_version", "=", "version", ".", "parse", "(", "'{0}.{1}.{2}'", ".", "format", "(", "*", "sys", ".", "version_info", "[", ":", "3", "]", ")", ")", "return", "python_version", "in", "requires_python_specifier" ]
39.052632
21.894737
def raw_separation(self,mag_1,mag_2,steps=10000): """ Calculate the separation in magnitude-magnitude space between points and isochrone. Uses a dense sampling of the isochrone and calculates the metric distance from any isochrone sample point. Parameters: ----------- mag_1 : The magnitude of the test points in the first band mag_2 : The magnitude of the test points in the second band steps : Number of steps to sample the isochrone Returns: -------- sep : Minimum separation between test points and isochrone sample """ # http://stackoverflow.com/q/12653120/ mag_1 = np.array(mag_1,copy=False,ndmin=1) mag_2 = np.array(mag_2,copy=False,ndmin=1) init,pdf,act,iso_mag_1,iso_mag_2 = self.sample(mass_steps=steps) iso_mag_1+=self.distance_modulus iso_mag_2+=self.distance_modulus iso_cut = (iso_mag_1<np.max(mag_1))&(iso_mag_1>np.min(mag_1)) | \ (iso_mag_2<np.max(mag_2))&(iso_mag_2>np.min(mag_2)) iso_mag_1 = iso_mag_1[iso_cut] iso_mag_2 = iso_mag_2[iso_cut] dist_mag_1 = mag_1[:,np.newaxis]-iso_mag_1 dist_mag_2 = mag_2[:,np.newaxis]-iso_mag_2 return np.min(np.sqrt(dist_mag_1**2 + dist_mag_2**2),axis=1)
[ "def", "raw_separation", "(", "self", ",", "mag_1", ",", "mag_2", ",", "steps", "=", "10000", ")", ":", "# http://stackoverflow.com/q/12653120/", "mag_1", "=", "np", ".", "array", "(", "mag_1", ",", "copy", "=", "False", ",", "ndmin", "=", "1", ")", "mag_2", "=", "np", ".", "array", "(", "mag_2", ",", "copy", "=", "False", ",", "ndmin", "=", "1", ")", "init", ",", "pdf", ",", "act", ",", "iso_mag_1", ",", "iso_mag_2", "=", "self", ".", "sample", "(", "mass_steps", "=", "steps", ")", "iso_mag_1", "+=", "self", ".", "distance_modulus", "iso_mag_2", "+=", "self", ".", "distance_modulus", "iso_cut", "=", "(", "iso_mag_1", "<", "np", ".", "max", "(", "mag_1", ")", ")", "&", "(", "iso_mag_1", ">", "np", ".", "min", "(", "mag_1", ")", ")", "|", "(", "iso_mag_2", "<", "np", ".", "max", "(", "mag_2", ")", ")", "&", "(", "iso_mag_2", ">", "np", ".", "min", "(", "mag_2", ")", ")", "iso_mag_1", "=", "iso_mag_1", "[", "iso_cut", "]", "iso_mag_2", "=", "iso_mag_2", "[", "iso_cut", "]", "dist_mag_1", "=", "mag_1", "[", ":", ",", "np", ".", "newaxis", "]", "-", "iso_mag_1", "dist_mag_2", "=", "mag_2", "[", ":", ",", "np", ".", "newaxis", "]", "-", "iso_mag_2", "return", "np", ".", "min", "(", "np", ".", "sqrt", "(", "dist_mag_1", "**", "2", "+", "dist_mag_2", "**", "2", ")", ",", "axis", "=", "1", ")" ]
41.15625
24.3125
def plot_sector_exposures_gross(gross_exposures, sector_dict=None, ax=None): """ Plots output of compute_sector_exposures as area charts Parameters ---------- gross_exposures : arrays Arrays of gross sector exposures (output of compute_sector_exposures). sector_dict : dict or OrderedDict Dictionary of all sectors - See full description in compute_sector_exposures """ if ax is None: ax = plt.gca() if sector_dict is None: sector_names = SECTORS.values() else: sector_names = sector_dict.values() color_list = plt.cm.gist_rainbow(np.linspace(0, 1, 11)) ax.stackplot(gross_exposures[0].index, gross_exposures, labels=sector_names, colors=color_list, alpha=0.8, baseline='zero') ax.axhline(0, color='k', linestyle='-') ax.set(title='Gross exposure to sectors', ylabel='Proportion of gross exposure \n in sectors') return ax
[ "def", "plot_sector_exposures_gross", "(", "gross_exposures", ",", "sector_dict", "=", "None", ",", "ax", "=", "None", ")", ":", "if", "ax", "is", "None", ":", "ax", "=", "plt", ".", "gca", "(", ")", "if", "sector_dict", "is", "None", ":", "sector_names", "=", "SECTORS", ".", "values", "(", ")", "else", ":", "sector_names", "=", "sector_dict", ".", "values", "(", ")", "color_list", "=", "plt", ".", "cm", ".", "gist_rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "11", ")", ")", "ax", ".", "stackplot", "(", "gross_exposures", "[", "0", "]", ".", "index", ",", "gross_exposures", ",", "labels", "=", "sector_names", ",", "colors", "=", "color_list", ",", "alpha", "=", "0.8", ",", "baseline", "=", "'zero'", ")", "ax", ".", "axhline", "(", "0", ",", "color", "=", "'k'", ",", "linestyle", "=", "'-'", ")", "ax", ".", "set", "(", "title", "=", "'Gross exposure to sectors'", ",", "ylabel", "=", "'Proportion of gross exposure \\n in sectors'", ")", "return", "ax" ]
29.71875
21.34375
def write(url, content, **args): """Put an object into a ftps URL.""" with FTPSResource(url, **args) as resource: resource.write(content)
[ "def", "write", "(", "url", ",", "content", ",", "*", "*", "args", ")", ":", "with", "FTPSResource", "(", "url", ",", "*", "*", "args", ")", "as", "resource", ":", "resource", ".", "write", "(", "content", ")" ]
37.5
6
def spin_z(particles, index): """Generates the spin_z projection operator for a system of N=particles and for the selected spin index name. where index=0..N-1""" mat = np.zeros((2**particles, 2**particles)) for i in range(2**particles): ispin = btest(i, index) if ispin == 1: mat[i, i] = 1 else: mat[i, i] = -1 return 1/2.*mat
[ "def", "spin_z", "(", "particles", ",", "index", ")", ":", "mat", "=", "np", ".", "zeros", "(", "(", "2", "**", "particles", ",", "2", "**", "particles", ")", ")", "for", "i", "in", "range", "(", "2", "**", "particles", ")", ":", "ispin", "=", "btest", "(", "i", ",", "index", ")", "if", "ispin", "==", "1", ":", "mat", "[", "i", ",", "i", "]", "=", "1", "else", ":", "mat", "[", "i", ",", "i", "]", "=", "-", "1", "return", "1", "/", "2.", "*", "mat" ]
32.333333
14.166667
def convert_filename(txtfilename, outdir='.'): """Convert a .TXT filename to a Therion .TH filename""" return os.path.join(outdir, os.path.basename(txtfilename)).rsplit('.', 1)[0] + '.th'
[ "def", "convert_filename", "(", "txtfilename", ",", "outdir", "=", "'.'", ")", ":", "return", "os", ".", "path", ".", "join", "(", "outdir", ",", "os", ".", "path", ".", "basename", "(", "txtfilename", ")", ")", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "+", "'.th'" ]
64.333333
18
def determine_inst(i_info, param_str, command): """Determine the instance-id of the target instance. Inspect the number of instance-ids collected and take the appropriate action: exit if no ids, return if single id, and call user_picklist function if multiple ids exist. Args: i_info (dict): information and details for instances. param_str (str): the title to display in the listing. command (str): command specified on the command line. Returns: tar_inst (str): the AWS instance-id of the target. Raises: SystemExit: if no instances are match parameters specified. """ qty_instances = len(i_info) if not qty_instances: print("No instances found with parameters: {}".format(param_str)) sys.exit(1) if qty_instances > 1: print("{} instances match these parameters:".format(qty_instances)) tar_idx = user_picklist(i_info, command) else: tar_idx = 0 tar_inst = i_info[tar_idx]['id'] print("{0}{3}ing{1} instance id {2}{4}{1}". format(C_STAT[command], C_NORM, C_TI, command, tar_inst)) return (tar_inst, tar_idx)
[ "def", "determine_inst", "(", "i_info", ",", "param_str", ",", "command", ")", ":", "qty_instances", "=", "len", "(", "i_info", ")", "if", "not", "qty_instances", ":", "print", "(", "\"No instances found with parameters: {}\"", ".", "format", "(", "param_str", ")", ")", "sys", ".", "exit", "(", "1", ")", "if", "qty_instances", ">", "1", ":", "print", "(", "\"{} instances match these parameters:\"", ".", "format", "(", "qty_instances", ")", ")", "tar_idx", "=", "user_picklist", "(", "i_info", ",", "command", ")", "else", ":", "tar_idx", "=", "0", "tar_inst", "=", "i_info", "[", "tar_idx", "]", "[", "'id'", "]", "print", "(", "\"{0}{3}ing{1} instance id {2}{4}{1}\"", ".", "format", "(", "C_STAT", "[", "command", "]", ",", "C_NORM", ",", "C_TI", ",", "command", ",", "tar_inst", ")", ")", "return", "(", "tar_inst", ",", "tar_idx", ")" ]
35.40625
21.8125
def receptive_field(self, X, identities, max_len=10, threshold=0.9, batch_size=1): """ Calculate the receptive field of the SOM on some data. The receptive field is the common ending of all sequences which lead to the activation of a given BMU. If a SOM is well-tuned to specific sequences, it will have longer receptive fields, and therefore gives a better description of the dynamics of a given system. Parameters ---------- X : numpy array Input data. identities : list A list of symbolic identities associated with each input. We expect this list to be as long as the input data. max_len : int, optional, default 10 The maximum length to attempt to find. Raising this increases memory use. threshold : float, optional, default .9 The threshold at which we consider a receptive field valid. If at least this proportion of the sequences of a neuron have the same suffix, that suffix is counted as acquired by the SOM. batch_size : int, optional, default 1 The batch size to use in prediction Returns ------- receptive_fields : dict A dictionary mapping from the neuron id to the found sequences for that neuron. The sequences are represented as lists of symbols from identities. """ receptive_fields = defaultdict(list) predictions = self.predict(X, batch_size) if len(predictions) != len(identities): raise ValueError("X and identities are not the same length: " "{0} and {1}".format(len(X), len(identities))) for idx, p in enumerate(predictions.tolist()): receptive_fields[p].append(identities[idx+1 - max_len:idx+1]) rec = {} for k, v in receptive_fields.items(): # if there's only one sequence, we don't know # anything abouw how salient it is. seq = [] if len(v) <= 1: continue else: for x in reversed(list(zip(*v))): x = Counter(x) if x.most_common(1)[0][1] / sum(x.values()) > threshold: seq.append(x.most_common(1)[0][0]) else: rec[k] = seq break return rec
[ "def", "receptive_field", "(", "self", ",", "X", ",", "identities", ",", "max_len", "=", "10", ",", "threshold", "=", "0.9", ",", "batch_size", "=", "1", ")", ":", "receptive_fields", "=", "defaultdict", "(", "list", ")", "predictions", "=", "self", ".", "predict", "(", "X", ",", "batch_size", ")", "if", "len", "(", "predictions", ")", "!=", "len", "(", "identities", ")", ":", "raise", "ValueError", "(", "\"X and identities are not the same length: \"", "\"{0} and {1}\"", ".", "format", "(", "len", "(", "X", ")", ",", "len", "(", "identities", ")", ")", ")", "for", "idx", ",", "p", "in", "enumerate", "(", "predictions", ".", "tolist", "(", ")", ")", ":", "receptive_fields", "[", "p", "]", ".", "append", "(", "identities", "[", "idx", "+", "1", "-", "max_len", ":", "idx", "+", "1", "]", ")", "rec", "=", "{", "}", "for", "k", ",", "v", "in", "receptive_fields", ".", "items", "(", ")", ":", "# if there's only one sequence, we don't know", "# anything abouw how salient it is.", "seq", "=", "[", "]", "if", "len", "(", "v", ")", "<=", "1", ":", "continue", "else", ":", "for", "x", "in", "reversed", "(", "list", "(", "zip", "(", "*", "v", ")", ")", ")", ":", "x", "=", "Counter", "(", "x", ")", "if", "x", ".", "most_common", "(", "1", ")", "[", "0", "]", "[", "1", "]", "/", "sum", "(", "x", ".", "values", "(", ")", ")", ">", "threshold", ":", "seq", ".", "append", "(", "x", ".", "most_common", "(", "1", ")", "[", "0", "]", "[", "0", "]", ")", "else", ":", "rec", "[", "k", "]", "=", "seq", "break", "return", "rec" ]
37.558824
20.588235
def mscoco_generator(data_dir, tmp_dir, training, how_many, start_from=0, eos_list=None, vocab_filename=None): """Image generator for MSCOCO captioning problem with token-wise captions. Args: data_dir: path to the data directory. tmp_dir: path to temporary storage directory. training: a Boolean; if true, we use the train set, otherwise the test set. how_many: how many images and labels to generate. start_from: from which image to start. eos_list: optional list of end of sentence tokens, otherwise use default value `1`. vocab_filename: file within `tmp_dir` to read vocabulary from. Yields: A dictionary representing the images with the following fields: * image/encoded: the string encoding the image as JPEG, * image/format: the string "jpeg" representing image format, * image/class/label: a list of integers representing the caption, * image/height: an integer representing the height, * image/width: an integer representing the width. Every field is actually a list of the corresponding type. """ eos_list = [1] if eos_list is None else eos_list def get_vocab(): """Get vocab for caption text encoder.""" if data_dir is not None and vocab_filename is not None: vocab_filepath = os.path.join(data_dir, vocab_filename) if tf.gfile.Exists(vocab_filepath): tf.logging.info("Found vocab file: %s", vocab_filepath) vocab_symbolizer = text_encoder.SubwordTextEncoder(vocab_filepath) return vocab_symbolizer else: raise ValueError("Vocab file does not exist: %s" % vocab_filepath) return None vocab_symbolizer = get_vocab() _get_mscoco(tmp_dir) caption_filepath = ( _MSCOCO_TRAIN_CAPTION_FILE if training else _MSCOCO_EVAL_CAPTION_FILE) caption_filepath = os.path.join(tmp_dir, caption_filepath) prefix = _MSCOCO_TRAIN_PREFIX if training else _MSCOCO_EVAL_PREFIX caption_file = io.open(caption_filepath) caption_json = json.load(caption_file) # Dictionary from image_id to ((filename, height, width), captions). image_dict = {} for image in caption_json["images"]: image_dict[image["id"]] = [(image["file_name"], image["height"], image["width"]), []] annotations = caption_json["annotations"] annotation_count = len(annotations) image_count = len(image_dict) tf.logging.info("Processing %d images and %d labels\n" % (image_count, annotation_count)) for annotation in annotations: image_id = annotation["image_id"] image_dict[image_id][1].append(annotation["caption"]) data = list(image_dict.values())[start_from:start_from + how_many] random.shuffle(data) for image_info, labels in data: image_filename = image_info[0] image_filepath = os.path.join(tmp_dir, prefix, image_filename) with tf.gfile.Open(image_filepath, "rb") as f: encoded_image_data = f.read() height, width = image_info[1], image_info[2] for label in labels: if vocab_filename is None or vocab_symbolizer is None: label = [ord(c) for c in label] + eos_list else: label = vocab_symbolizer.encode(label) + eos_list yield { "image/encoded": [encoded_image_data], "image/format": ["jpeg"], "image/class/label": label, "image/height": [height], "image/width": [width] }
[ "def", "mscoco_generator", "(", "data_dir", ",", "tmp_dir", ",", "training", ",", "how_many", ",", "start_from", "=", "0", ",", "eos_list", "=", "None", ",", "vocab_filename", "=", "None", ")", ":", "eos_list", "=", "[", "1", "]", "if", "eos_list", "is", "None", "else", "eos_list", "def", "get_vocab", "(", ")", ":", "\"\"\"Get vocab for caption text encoder.\"\"\"", "if", "data_dir", "is", "not", "None", "and", "vocab_filename", "is", "not", "None", ":", "vocab_filepath", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "vocab_filename", ")", "if", "tf", ".", "gfile", ".", "Exists", "(", "vocab_filepath", ")", ":", "tf", ".", "logging", ".", "info", "(", "\"Found vocab file: %s\"", ",", "vocab_filepath", ")", "vocab_symbolizer", "=", "text_encoder", ".", "SubwordTextEncoder", "(", "vocab_filepath", ")", "return", "vocab_symbolizer", "else", ":", "raise", "ValueError", "(", "\"Vocab file does not exist: %s\"", "%", "vocab_filepath", ")", "return", "None", "vocab_symbolizer", "=", "get_vocab", "(", ")", "_get_mscoco", "(", "tmp_dir", ")", "caption_filepath", "=", "(", "_MSCOCO_TRAIN_CAPTION_FILE", "if", "training", "else", "_MSCOCO_EVAL_CAPTION_FILE", ")", "caption_filepath", "=", "os", ".", "path", ".", "join", "(", "tmp_dir", ",", "caption_filepath", ")", "prefix", "=", "_MSCOCO_TRAIN_PREFIX", "if", "training", "else", "_MSCOCO_EVAL_PREFIX", "caption_file", "=", "io", ".", "open", "(", "caption_filepath", ")", "caption_json", "=", "json", ".", "load", "(", "caption_file", ")", "# Dictionary from image_id to ((filename, height, width), captions).", "image_dict", "=", "{", "}", "for", "image", "in", "caption_json", "[", "\"images\"", "]", ":", "image_dict", "[", "image", "[", "\"id\"", "]", "]", "=", "[", "(", "image", "[", "\"file_name\"", "]", ",", "image", "[", "\"height\"", "]", ",", "image", "[", "\"width\"", "]", ")", ",", "[", "]", "]", "annotations", "=", "caption_json", "[", "\"annotations\"", "]", "annotation_count", "=", "len", "(", "annotations", ")", "image_count", "=", "len", "(", "image_dict", ")", "tf", ".", "logging", ".", "info", "(", "\"Processing %d images and %d labels\\n\"", "%", "(", "image_count", ",", "annotation_count", ")", ")", "for", "annotation", "in", "annotations", ":", "image_id", "=", "annotation", "[", "\"image_id\"", "]", "image_dict", "[", "image_id", "]", "[", "1", "]", ".", "append", "(", "annotation", "[", "\"caption\"", "]", ")", "data", "=", "list", "(", "image_dict", ".", "values", "(", ")", ")", "[", "start_from", ":", "start_from", "+", "how_many", "]", "random", ".", "shuffle", "(", "data", ")", "for", "image_info", ",", "labels", "in", "data", ":", "image_filename", "=", "image_info", "[", "0", "]", "image_filepath", "=", "os", ".", "path", ".", "join", "(", "tmp_dir", ",", "prefix", ",", "image_filename", ")", "with", "tf", ".", "gfile", ".", "Open", "(", "image_filepath", ",", "\"rb\"", ")", "as", "f", ":", "encoded_image_data", "=", "f", ".", "read", "(", ")", "height", ",", "width", "=", "image_info", "[", "1", "]", ",", "image_info", "[", "2", "]", "for", "label", "in", "labels", ":", "if", "vocab_filename", "is", "None", "or", "vocab_symbolizer", "is", "None", ":", "label", "=", "[", "ord", "(", "c", ")", "for", "c", "in", "label", "]", "+", "eos_list", "else", ":", "label", "=", "vocab_symbolizer", ".", "encode", "(", "label", ")", "+", "eos_list", "yield", "{", "\"image/encoded\"", ":", "[", "encoded_image_data", "]", ",", "\"image/format\"", ":", "[", "\"jpeg\"", "]", ",", "\"image/class/label\"", ":", "label", ",", "\"image/height\"", ":", "[", "height", "]", ",", "\"image/width\"", ":", "[", "width", "]", "}" ]
42.216867
17.156627
def get_accounts(self, **params): """https://developers.coinbase.com/api/v2#list-accounts""" response = self._get('v2', 'accounts', params=params) return self._make_api_object(response, Account)
[ "def", "get_accounts", "(", "self", ",", "*", "*", "params", ")", ":", "response", "=", "self", ".", "_get", "(", "'v2'", ",", "'accounts'", ",", "params", "=", "params", ")", "return", "self", ".", "_make_api_object", "(", "response", ",", "Account", ")" ]
53.75
10.75
def should_stop_early(self) -> bool: """ Returns true if improvement has stopped for long enough. """ if self._patience is None: return False else: return self._epochs_with_no_improvement >= self._patience
[ "def", "should_stop_early", "(", "self", ")", "->", "bool", ":", "if", "self", ".", "_patience", "is", "None", ":", "return", "False", "else", ":", "return", "self", ".", "_epochs_with_no_improvement", ">=", "self", ".", "_patience" ]
32.75
13.25
def _init_client(self, from_archive=False): """Init client""" return AskbotClient(self.url, self.archive, from_archive)
[ "def", "_init_client", "(", "self", ",", "from_archive", "=", "False", ")", ":", "return", "AskbotClient", "(", "self", ".", "url", ",", "self", ".", "archive", ",", "from_archive", ")" ]
33.25
17
def mBank_set_iph_id(transactions, tag, tag_dict, *args): """ mBank Collect uses ID IPH to distinguish between virtual accounts, adding iph_id may be helpful in further processing """ matches = iph_id_re.search(tag_dict[tag.slug]) if matches: # pragma no branch tag_dict['iph_id'] = matches.groupdict()['iph_id'] return tag_dict
[ "def", "mBank_set_iph_id", "(", "transactions", ",", "tag", ",", "tag_dict", ",", "*", "args", ")", ":", "matches", "=", "iph_id_re", ".", "search", "(", "tag_dict", "[", "tag", ".", "slug", "]", ")", "if", "matches", ":", "# pragma no branch", "tag_dict", "[", "'iph_id'", "]", "=", "matches", ".", "groupdict", "(", ")", "[", "'iph_id'", "]", "return", "tag_dict" ]
32.454545
17.727273
def pystr(self, min_chars=None, max_chars=20): """ Generates a random string of upper and lowercase letters. :type min_chars: int :type max_chars: int :return: String. Random of random length between min and max characters. """ if min_chars is None: return "".join(self.random_letters(length=max_chars)) else: assert ( max_chars >= min_chars), "Maximum length must be greater than or equal to minium length" return "".join( self.random_letters( length=self.generator.random.randint(min_chars, max_chars), ), )
[ "def", "pystr", "(", "self", ",", "min_chars", "=", "None", ",", "max_chars", "=", "20", ")", ":", "if", "min_chars", "is", "None", ":", "return", "\"\"", ".", "join", "(", "self", ".", "random_letters", "(", "length", "=", "max_chars", ")", ")", "else", ":", "assert", "(", "max_chars", ">=", "min_chars", ")", ",", "\"Maximum length must be greater than or equal to minium length\"", "return", "\"\"", ".", "join", "(", "self", ".", "random_letters", "(", "length", "=", "self", ".", "generator", ".", "random", ".", "randint", "(", "min_chars", ",", "max_chars", ")", ",", ")", ",", ")" ]
39.588235
20.411765
def _to_bel_lines_footer(graph) -> Iterable[str]: """Iterate the lines of a BEL graph's corresponding BEL script's footer. :param pybel.BELGraph graph: A BEL graph """ unqualified_edges_to_serialize = [ (u, v, d) for u, v, d in graph.edges(data=True) if d[RELATION] in UNQUALIFIED_EDGES and EVIDENCE not in d ] isolated_nodes_to_serialize = [ node for node in graph if not graph.pred[node] and not graph.succ[node] ] if unqualified_edges_to_serialize or isolated_nodes_to_serialize: yield '###############################################\n' yield 'SET Citation = {"PubMed","Added by PyBEL","29048466"}' yield 'SET SupportingText = "{}"'.format(PYBEL_AUTOEVIDENCE) for u, v, data in unqualified_edges_to_serialize: yield '{} {} {}'.format(u.as_bel(), data[RELATION], v.as_bel()) for node in isolated_nodes_to_serialize: yield node.as_bel() yield 'UNSET SupportingText' yield 'UNSET Citation'
[ "def", "_to_bel_lines_footer", "(", "graph", ")", "->", "Iterable", "[", "str", "]", ":", "unqualified_edges_to_serialize", "=", "[", "(", "u", ",", "v", ",", "d", ")", "for", "u", ",", "v", ",", "d", "in", "graph", ".", "edges", "(", "data", "=", "True", ")", "if", "d", "[", "RELATION", "]", "in", "UNQUALIFIED_EDGES", "and", "EVIDENCE", "not", "in", "d", "]", "isolated_nodes_to_serialize", "=", "[", "node", "for", "node", "in", "graph", "if", "not", "graph", ".", "pred", "[", "node", "]", "and", "not", "graph", ".", "succ", "[", "node", "]", "]", "if", "unqualified_edges_to_serialize", "or", "isolated_nodes_to_serialize", ":", "yield", "'###############################################\\n'", "yield", "'SET Citation = {\"PubMed\",\"Added by PyBEL\",\"29048466\"}'", "yield", "'SET SupportingText = \"{}\"'", ".", "format", "(", "PYBEL_AUTOEVIDENCE", ")", "for", "u", ",", "v", ",", "data", "in", "unqualified_edges_to_serialize", ":", "yield", "'{} {} {}'", ".", "format", "(", "u", ".", "as_bel", "(", ")", ",", "data", "[", "RELATION", "]", ",", "v", ".", "as_bel", "(", ")", ")", "for", "node", "in", "isolated_nodes_to_serialize", ":", "yield", "node", ".", "as_bel", "(", ")", "yield", "'UNSET SupportingText'", "yield", "'UNSET Citation'" ]
34.233333
21.2
def cache(self, obj): ''' Store an object in the cache (this allows temporarily assigning a new cache for exploring the DB without affecting the stored version ''' # Check cache path exists for current obj write_path = os.path.join( self.cache_path, obj.org_id ) if not os.path.exists( write_path ): mkdir_p( write_path ) with open(os.path.join( write_path, obj.id ), 'wb') as f: pickle.dump( obj, f ) # Add to localstore (keep track of numbers of objects, etc.) self.add_to_localstore(obj) self.add_to_names(obj)
[ "def", "cache", "(", "self", ",", "obj", ")", ":", "# Check cache path exists for current obj", "write_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cache_path", ",", "obj", ".", "org_id", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "write_path", ")", ":", "mkdir_p", "(", "write_path", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "write_path", ",", "obj", ".", "id", ")", ",", "'wb'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "obj", ",", "f", ")", "# Add to localstore (keep track of numbers of objects, etc.)", "self", ".", "add_to_localstore", "(", "obj", ")", "self", ".", "add_to_names", "(", "obj", ")" ]
39
20.875
def set_setting(key, value, qsettings=None): """Set value to QSettings based on key in InaSAFE scope. :param key: Unique key for setting. :type key: basestring :param value: Value to be saved. :type value: QVariant :param qsettings: A custom QSettings to use. If it's not defined, it will use the default one. :type qsettings: qgis.PyQt.QtCore.QSettings """ full_key = '%s/%s' % (APPLICATION_NAME, key) set_general_setting(full_key, value, qsettings)
[ "def", "set_setting", "(", "key", ",", "value", ",", "qsettings", "=", "None", ")", ":", "full_key", "=", "'%s/%s'", "%", "(", "APPLICATION_NAME", ",", "key", ")", "set_general_setting", "(", "full_key", ",", "value", ",", "qsettings", ")" ]
32.466667
15.6
def to_json(self, include_id: bool = False) -> Mapping[str, str]: """Return the most useful entries as a dictionary. :param include_id: If true, includes the model identifier """ result = { 'keyword': self.keyword, 'name': self.name, 'version': self.version, } if self.url: result['url'] = self.url else: result['pattern'] = self.pattern if include_id: result['id'] = self.id return result
[ "def", "to_json", "(", "self", ",", "include_id", ":", "bool", "=", "False", ")", "->", "Mapping", "[", "str", ",", "str", "]", ":", "result", "=", "{", "'keyword'", ":", "self", ".", "keyword", ",", "'name'", ":", "self", ".", "name", ",", "'version'", ":", "self", ".", "version", ",", "}", "if", "self", ".", "url", ":", "result", "[", "'url'", "]", "=", "self", ".", "url", "else", ":", "result", "[", "'pattern'", "]", "=", "self", ".", "pattern", "if", "include_id", ":", "result", "[", "'id'", "]", "=", "self", ".", "id", "return", "result" ]
26.85
18.4
def jhk_to_imag(jmag,hmag,kmag): '''Converts given J, H, Ks mags to an I magnitude value. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags of the object. Returns ------- float The converted I band magnitude. ''' return convert_constants(jmag,hmag,kmag, IJHK, IJH, IJK, IHK, IJ, IH, IK)
[ "def", "jhk_to_imag", "(", "jmag", ",", "hmag", ",", "kmag", ")", ":", "return", "convert_constants", "(", "jmag", ",", "hmag", ",", "kmag", ",", "IJHK", ",", "IJH", ",", "IJK", ",", "IHK", ",", "IJ", ",", "IH", ",", "IK", ")" ]
20.285714
22.47619
def load(self, path): """Load a set of constructs into the CLIPS data base. Constructs can be in text or binary format. The Python equivalent of the CLIPS load command. """ try: self._load_binary(path) except CLIPSError: self._load_text(path)
[ "def", "load", "(", "self", ",", "path", ")", ":", "try", ":", "self", ".", "_load_binary", "(", "path", ")", "except", "CLIPSError", ":", "self", ".", "_load_text", "(", "path", ")" ]
25.5
18.333333
def group_permissions(permissions): """ Groups a permissions list Returns a dictionary, with permission types as keys and sets of entities with access to the resource as values, e.g.: { 'organisation_id': { 'org1': set(['rw', 'r', 'w']), 'org2': set(['-']), 'org3': set(['r', 'w']), }, 'all': set(['r']) } 'org1' has 'rw' access to the resource, 'org2' is denied access and 'org3' has 'r' & 'w' access (the same as 'rw'). Note that 'rw' will always result in 'rw', 'r' & 'w' in the set to make checks easier. If present in the resource's permissions, the 'all' permission type is an exception in that it's value is just a set instead of a dictionary. :param permissions: a list of permissions :returns: defaultdict """ groups = defaultdict(lambda: defaultdict(set)) for p in sorted(permissions, key=itemgetter('type')): permission_set = groups[p['type']][p.get('value')] permission_set.add(p['permission']) if p['permission'] == 'rw': permission_set.update({'r', 'w'}) # the 'all' permission type always has None as the value groups['all'] = groups['all'][None] return groups
[ "def", "group_permissions", "(", "permissions", ")", ":", "groups", "=", "defaultdict", "(", "lambda", ":", "defaultdict", "(", "set", ")", ")", "for", "p", "in", "sorted", "(", "permissions", ",", "key", "=", "itemgetter", "(", "'type'", ")", ")", ":", "permission_set", "=", "groups", "[", "p", "[", "'type'", "]", "]", "[", "p", ".", "get", "(", "'value'", ")", "]", "permission_set", ".", "add", "(", "p", "[", "'permission'", "]", ")", "if", "p", "[", "'permission'", "]", "==", "'rw'", ":", "permission_set", ".", "update", "(", "{", "'r'", ",", "'w'", "}", ")", "# the 'all' permission type always has None as the value", "groups", "[", "'all'", "]", "=", "groups", "[", "'all'", "]", "[", "None", "]", "return", "groups" ]
32.051282
21.025641
def _unknown_data_size_handler(self, cfg, irsb, irsb_addr, stmt_idx, data_addr, max_size): # pylint:disable=unused-argument """ Return the maximum number of bytes until a potential pointer or a potential sequence is found. :param angr.analyses.CFG cfg: The control flow graph. :param pyvex.IRSB irsb: The IRSB object. :param int irsb_addr: Address of the block. :param int stmt_idx: Statement ID. :param int data_addr: Address of the data in memory. :param int max_size: Maximum size possible. :return: A 2-tuple of data type and size. :rtype: tuple """ sequence_offset = None for offset in range(1, max_size): if self._is_sequence(cfg, data_addr + offset, 5): # a potential sequence is found sequence_offset = offset break if sequence_offset is not None: if self.project.arch.bits == 32: max_size = min(max_size, sequence_offset) elif self.project.arch.bits == 64: max_size = min(max_size, sequence_offset + 5) # high 5 bytes might be all zeros... ptr_size = cfg.project.arch.bytes size = None for offset in range(1, max_size - ptr_size + 1): ptr = self.fast_memory_load(data_addr + offset, ptr_size, int, endness=cfg.project.arch.memory_endness) if self._is_pointer(cfg, ptr): size = offset break if size is not None: return "unknown", size elif sequence_offset is not None: return "unknown", sequence_offset else: return None, None
[ "def", "_unknown_data_size_handler", "(", "self", ",", "cfg", ",", "irsb", ",", "irsb_addr", ",", "stmt_idx", ",", "data_addr", ",", "max_size", ")", ":", "# pylint:disable=unused-argument", "sequence_offset", "=", "None", "for", "offset", "in", "range", "(", "1", ",", "max_size", ")", ":", "if", "self", ".", "_is_sequence", "(", "cfg", ",", "data_addr", "+", "offset", ",", "5", ")", ":", "# a potential sequence is found", "sequence_offset", "=", "offset", "break", "if", "sequence_offset", "is", "not", "None", ":", "if", "self", ".", "project", ".", "arch", ".", "bits", "==", "32", ":", "max_size", "=", "min", "(", "max_size", ",", "sequence_offset", ")", "elif", "self", ".", "project", ".", "arch", ".", "bits", "==", "64", ":", "max_size", "=", "min", "(", "max_size", ",", "sequence_offset", "+", "5", ")", "# high 5 bytes might be all zeros...", "ptr_size", "=", "cfg", ".", "project", ".", "arch", ".", "bytes", "size", "=", "None", "for", "offset", "in", "range", "(", "1", ",", "max_size", "-", "ptr_size", "+", "1", ")", ":", "ptr", "=", "self", ".", "fast_memory_load", "(", "data_addr", "+", "offset", ",", "ptr_size", ",", "int", ",", "endness", "=", "cfg", ".", "project", ".", "arch", ".", "memory_endness", ")", "if", "self", ".", "_is_pointer", "(", "cfg", ",", "ptr", ")", ":", "size", "=", "offset", "break", "if", "size", "is", "not", "None", ":", "return", "\"unknown\"", ",", "size", "elif", "sequence_offset", "is", "not", "None", ":", "return", "\"unknown\"", ",", "sequence_offset", "else", ":", "return", "None", ",", "None" ]
37.931818
20.886364
def save(self, data, xparent=None): """ Parses the element from XML to Python. :param data | <variant> xparent | <xml.etree.ElementTree.Element> || None :return <xml.etree.ElementTree.Element> """ if xparent is not None: elem = ElementTree.SubElement(xparent, 'list') else: elem = ElementTree.Element('list') for item in data: XmlDataIO.toXml(item, elem) return elem
[ "def", "save", "(", "self", ",", "data", ",", "xparent", "=", "None", ")", ":", "if", "xparent", "is", "not", "None", ":", "elem", "=", "ElementTree", ".", "SubElement", "(", "xparent", ",", "'list'", ")", "else", ":", "elem", "=", "ElementTree", ".", "Element", "(", "'list'", ")", "for", "item", "in", "data", ":", "XmlDataIO", ".", "toXml", "(", "item", ",", "elem", ")", "return", "elem" ]
28.277778
16.277778
def validate(self, validation_instances, metrics, iteration=None): ''' Evaluate this model on `validation_instances` during training and output a report. :param validation_instances: The data to use to validate the model. :type validation_instances: list(instance.Instance) :param metrics: Functions like those found in the `metrics` module for quantifying the performance of the learner. :type metrics: list(function) :param iteration: A label (anything with a sensible `str()` conversion) identifying the current iteration in output. ''' if not validation_instances or not metrics: return {} split_id = 'val%s' % iteration if iteration is not None else 'val' train_results = evaluate.evaluate(self, validation_instances, metrics=metrics, split_id=split_id) output.output_results(train_results, split_id) return train_results
[ "def", "validate", "(", "self", ",", "validation_instances", ",", "metrics", ",", "iteration", "=", "None", ")", ":", "if", "not", "validation_instances", "or", "not", "metrics", ":", "return", "{", "}", "split_id", "=", "'val%s'", "%", "iteration", "if", "iteration", "is", "not", "None", "else", "'val'", "train_results", "=", "evaluate", ".", "evaluate", "(", "self", ",", "validation_instances", ",", "metrics", "=", "metrics", ",", "split_id", "=", "split_id", ")", "output", ".", "output_results", "(", "train_results", ",", "split_id", ")", "return", "train_results" ]
45.363636
26.090909
def _process_sasl_response(self, stream, element): """Process incoming <sasl:response/> element. [receiving entity only] """ if not self.authenticator: logger.debug("Unexpected SASL response") return False content = element.text.encode("us-ascii") ret = self.authenticator.response(a2b_base64(content)) if isinstance(ret, sasl.Success): element = ElementTree.Element(SUCCESS_TAG) element.text = ret.encode() elif isinstance(ret, sasl.Challenge): element = ElementTree.Element(CHALLENGE_TAG) element.text = ret.encode() else: element = ElementTree.Element(FAILURE_TAG) ElementTree.SubElement(element, SASL_QNP + ret.reason) stream.write_element(element) if isinstance(ret, sasl.Success): self._handle_auth_success(stream, ret) elif isinstance(ret, sasl.Failure): raise SASLAuthenticationFailed("SASL authentication failed: {0!r}" .format(ret.reson)) return True
[ "def", "_process_sasl_response", "(", "self", ",", "stream", ",", "element", ")", ":", "if", "not", "self", ".", "authenticator", ":", "logger", ".", "debug", "(", "\"Unexpected SASL response\"", ")", "return", "False", "content", "=", "element", ".", "text", ".", "encode", "(", "\"us-ascii\"", ")", "ret", "=", "self", ".", "authenticator", ".", "response", "(", "a2b_base64", "(", "content", ")", ")", "if", "isinstance", "(", "ret", ",", "sasl", ".", "Success", ")", ":", "element", "=", "ElementTree", ".", "Element", "(", "SUCCESS_TAG", ")", "element", ".", "text", "=", "ret", ".", "encode", "(", ")", "elif", "isinstance", "(", "ret", ",", "sasl", ".", "Challenge", ")", ":", "element", "=", "ElementTree", ".", "Element", "(", "CHALLENGE_TAG", ")", "element", ".", "text", "=", "ret", ".", "encode", "(", ")", "else", ":", "element", "=", "ElementTree", ".", "Element", "(", "FAILURE_TAG", ")", "ElementTree", ".", "SubElement", "(", "element", ",", "SASL_QNP", "+", "ret", ".", "reason", ")", "stream", ".", "write_element", "(", "element", ")", "if", "isinstance", "(", "ret", ",", "sasl", ".", "Success", ")", ":", "self", ".", "_handle_auth_success", "(", "stream", ",", "ret", ")", "elif", "isinstance", "(", "ret", ",", "sasl", ".", "Failure", ")", ":", "raise", "SASLAuthenticationFailed", "(", "\"SASL authentication failed: {0!r}\"", ".", "format", "(", "ret", ".", "reson", ")", ")", "return", "True" ]
38.62069
16
def _add_file_mask(self, start, method_str, method): """Adds a raw file mask for dynamic requests. Parameters ---------- start : string The URL prefix that must be matched to perform this request. method_str : string The HTTP method for which to trigger the request. method : function(esrh, args) The function to execute to perform the request. The function takes two arguments. esrh is the QuickServerRequestHandler object that called the function. args is a map containing the arguments to the request (i.e., the rest of the URL as path segment array 'paths', a map of all query fields / flags 'query', the fragment string 'fragment', and if the method was a POST the JSON form content 'post'). The function must return a file object containing the response (preferably BytesIO). If the result is None no response body is sent. In this case make sure to send an appropriate error code. """ fm = self._f_mask.get(method_str, []) fm.append((start, method)) fm.sort(key=lambda k: len(k[0]), reverse=True) self._f_mask[method_str] = fm self._f_argc[method_str] = None
[ "def", "_add_file_mask", "(", "self", ",", "start", ",", "method_str", ",", "method", ")", ":", "fm", "=", "self", ".", "_f_mask", ".", "get", "(", "method_str", ",", "[", "]", ")", "fm", ".", "append", "(", "(", "start", ",", "method", ")", ")", "fm", ".", "sort", "(", "key", "=", "lambda", "k", ":", "len", "(", "k", "[", "0", "]", ")", ",", "reverse", "=", "True", ")", "self", ".", "_f_mask", "[", "method_str", "]", "=", "fm", "self", ".", "_f_argc", "[", "method_str", "]", "=", "None" ]
45.75
22.821429
def usufyToTextExport(d, fPath=None): """ Workaround to export to a .txt file or to show the information. Args: ----- d: Data to export. fPath: File path for the output file. If None was provided, it will assume that it has to print it. Returns: -------- unicode: It sometimes returns a unicode representation of the Sheet received. """ # Manual check... if d == []: return "+------------------+\n| No data found... |\n+------------------+" import pyexcel as pe import pyexcel.ext.text as text if fPath == None: isTerminal = True else: isTerminal = False try: oldData = get_data(fPath) except: # No information has been recovered oldData = {"OSRFramework":[]} # Generating the new tabular data tabularData = _generateTabularData(d, {"OSRFramework":[[]]}, True, canUnicode=False) # The tabular data contains a dict representing the whole book and we need only the sheet!! sheet = pe.Sheet(tabularData["OSRFramework"]) sheet.name = "Profiles recovered (" + getCurrentStrDatetime() +")." # Defining the headers sheet.name_columns_by_row(0) text.TABLEFMT = "grid" try: with open(fPath, "w") as oF: oF.write(str(sheet)) except Exception as e: # If a fPath was not provided... We will only print the info: return unicode(sheet)
[ "def", "usufyToTextExport", "(", "d", ",", "fPath", "=", "None", ")", ":", "# Manual check...", "if", "d", "==", "[", "]", ":", "return", "\"+------------------+\\n| No data found... |\\n+------------------+\"", "import", "pyexcel", "as", "pe", "import", "pyexcel", ".", "ext", ".", "text", "as", "text", "if", "fPath", "==", "None", ":", "isTerminal", "=", "True", "else", ":", "isTerminal", "=", "False", "try", ":", "oldData", "=", "get_data", "(", "fPath", ")", "except", ":", "# No information has been recovered", "oldData", "=", "{", "\"OSRFramework\"", ":", "[", "]", "}", "# Generating the new tabular data", "tabularData", "=", "_generateTabularData", "(", "d", ",", "{", "\"OSRFramework\"", ":", "[", "[", "]", "]", "}", ",", "True", ",", "canUnicode", "=", "False", ")", "# The tabular data contains a dict representing the whole book and we need only the sheet!!", "sheet", "=", "pe", ".", "Sheet", "(", "tabularData", "[", "\"OSRFramework\"", "]", ")", "sheet", ".", "name", "=", "\"Profiles recovered (\"", "+", "getCurrentStrDatetime", "(", ")", "+", "\").\"", "# Defining the headers", "sheet", ".", "name_columns_by_row", "(", "0", ")", "text", ".", "TABLEFMT", "=", "\"grid\"", "try", ":", "with", "open", "(", "fPath", ",", "\"w\"", ")", "as", "oF", ":", "oF", ".", "write", "(", "str", "(", "sheet", ")", ")", "except", "Exception", "as", "e", ":", "# If a fPath was not provided... We will only print the info:", "return", "unicode", "(", "sheet", ")" ]
28.836735
22.714286