idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
1,100 |
def getTCPportConnStatus ( self , ipv4 = True , ipv6 = True , include_listen = False , ** kwargs ) : status_dict = { } result = self . getStats ( tcp = True , udp = False , include_listen = include_listen , ipv4 = ipv4 , ipv6 = ipv6 , ** kwargs ) stats = result [ 'stats' ] for stat in stats : if stat is not None : status = stat [ 8 ] . lower ( ) status_dict [ status ] = status_dict . get ( status , 0 ) + 1 return status_dict
|
Returns the number of TCP endpoints discriminated by status .
|
1,101 |
def getTCPportConnCount ( self , ipv4 = True , ipv6 = True , resolve_ports = False , ** kwargs ) : port_dict = { } result = self . getStats ( tcp = True , udp = False , include_listen = False , ipv4 = ipv4 , ipv6 = ipv6 , resolve_ports = resolve_ports , ** kwargs ) stats = result [ 'stats' ] for stat in stats : if stat [ 8 ] == 'ESTABLISHED' : port_dict [ stat [ 5 ] ] = port_dict . get ( 5 , 0 ) + 1 return port_dict
|
Returns TCP connection counts for each local port .
|
1,102 |
def accuracy_helper ( egg , match = 'exact' , distance = 'euclidean' , features = None ) : def acc ( lst ) : return len ( [ i for i in np . unique ( lst ) if i >= 0 ] ) / ( egg . list_length ) opts = dict ( match = match , distance = distance , features = features ) if match is 'exact' : opts . update ( { 'features' : 'item' } ) recmat = recall_matrix ( egg , ** opts ) if match in [ 'exact' , 'best' ] : result = [ acc ( lst ) for lst in recmat ] elif match is 'smooth' : result = np . mean ( recmat , axis = 1 ) else : raise ValueError ( 'Match must be set to exact, best or smooth.' ) return np . nanmean ( result , axis = 0 )
|
Computes proportion of words recalled
|
1,103 |
def _connect ( self ) : if self . _connParams : self . _conn = psycopg2 . connect ( ** self . _connParams ) else : self . _conn = psycopg2 . connect ( '' ) try : ver_str = self . _conn . get_parameter_status ( 'server_version' ) except AttributeError : ver_str = self . getParam ( 'server_version' ) self . _version = util . SoftwareVersion ( ver_str )
|
Establish connection to PostgreSQL Database .
|
1,104 |
def _createStatsDict ( self , headers , rows ) : dbstats = { } for row in rows : dbstats [ row [ 0 ] ] = dict ( zip ( headers [ 1 : ] , row [ 1 : ] ) ) return dbstats
|
Utility method that returns database stats as a nested dictionary .
|
1,105 |
def _createTotalsDict ( self , headers , rows ) : totals = [ sum ( col ) for col in zip ( * rows ) [ 1 : ] ] return dict ( zip ( headers [ 1 : ] , totals ) )
|
Utility method that returns totals for database statistics .
|
1,106 |
def _simpleQuery ( self , query ) : cur = self . _conn . cursor ( ) cur . execute ( query ) row = cur . fetchone ( ) return util . parse_value ( row [ 0 ] )
|
Executes simple query which returns a single column .
|
1,107 |
def getConnectionStats ( self ) : cur = self . _conn . cursor ( ) cur . execute ( ) rows = cur . fetchall ( ) if rows : return dict ( rows ) else : return { }
|
Returns dictionary with number of connections for each database .
|
1,108 |
def getDatabaseStats ( self ) : headers = ( 'datname' , 'numbackends' , 'xact_commit' , 'xact_rollback' , 'blks_read' , 'blks_hit' , 'tup_returned' , 'tup_fetched' , 'tup_inserted' , 'tup_updated' , 'tup_deleted' , 'disk_size' ) cur = self . _conn . cursor ( ) cur . execute ( "SELECT %s, pg_database_size(datname) FROM pg_stat_database;" % "," . join ( headers [ : - 1 ] ) ) rows = cur . fetchall ( ) dbstats = self . _createStatsDict ( headers , rows ) totals = self . _createTotalsDict ( headers , rows ) return { 'databases' : dbstats , 'totals' : totals }
|
Returns database block read transaction and tuple stats for each database .
|
1,109 |
def getLockStatsMode ( self ) : info_dict = { 'all' : dict ( zip ( self . lockModes , ( 0 , ) * len ( self . lockModes ) ) ) , 'wait' : dict ( zip ( self . lockModes , ( 0 , ) * len ( self . lockModes ) ) ) } cur = self . _conn . cursor ( ) cur . execute ( "SELECT TRIM(mode, 'Lock'), granted, COUNT(*) FROM pg_locks " "GROUP BY TRIM(mode, 'Lock'), granted;" ) rows = cur . fetchall ( ) for ( mode , granted , cnt ) in rows : info_dict [ 'all' ] [ mode ] += cnt if not granted : info_dict [ 'wait' ] [ mode ] += cnt return info_dict
|
Returns the number of active lock discriminated by lock mode .
|
1,110 |
def getLockStatsDB ( self ) : info_dict = { 'all' : { } , 'wait' : { } } cur = self . _conn . cursor ( ) cur . execute ( "SELECT d.datname, l.granted, COUNT(*) FROM pg_database d " "JOIN pg_locks l ON d.oid=l.database " "GROUP BY d.datname, l.granted;" ) rows = cur . fetchall ( ) for ( db , granted , cnt ) in rows : info_dict [ 'all' ] [ db ] = info_dict [ 'all' ] . get ( db , 0 ) + cnt if not granted : info_dict [ 'wait' ] [ db ] = info_dict [ 'wait' ] . get ( db , 0 ) + cnt return info_dict
|
Returns the number of active lock discriminated by database .
|
1,111 |
def getBgWriterStats ( self ) : info_dict = { } if self . checkVersion ( '8.3' ) : cur = self . _conn . cursor ( cursor_factory = psycopg2 . extras . RealDictCursor ) cur . execute ( "SELECT * FROM pg_stat_bgwriter" ) info_dict = cur . fetchone ( ) return info_dict
|
Returns Global Background Writer and Checkpoint Activity stats .
|
1,112 |
def _connect ( self ) : if self . _connParams : self . _conn = MySQLdb . connect ( ** self . _connParams ) else : self . _conn = MySQLdb . connect ( '' )
|
Establish connection to MySQL Database .
|
1,113 |
def getStorageEngines ( self ) : cur = self . _conn . cursor ( ) cur . execute ( ) rows = cur . fetchall ( ) if rows : return [ row [ 0 ] . lower ( ) for row in rows if row [ 1 ] in [ 'YES' , 'DEFAULT' ] ] else : return [ ]
|
Returns list of supported storage engines .
|
1,114 |
def getParams ( self ) : cur = self . _conn . cursor ( ) cur . execute ( "SHOW GLOBAL VARIABLES" ) rows = cur . fetchall ( ) info_dict = { } for row in rows : key = row [ 0 ] val = util . parse_value ( row [ 1 ] ) info_dict [ key ] = val return info_dict
|
Returns dictionary of all run - time parameters .
|
1,115 |
def getProcessStatus ( self ) : info_dict = { } cur = self . _conn . cursor ( ) cur . execute ( ) rows = cur . fetchall ( ) if rows : for row in rows : if row [ 6 ] == '' : state = 'idle' elif row [ 6 ] is None : state = 'other' else : state = str ( row [ 6 ] ) . replace ( ' ' , '_' ) . lower ( ) info_dict [ state ] = info_dict . get ( state , 0 ) + 1 return info_dict
|
Returns number of processes discriminated by state .
|
1,116 |
def getProcessDatabase ( self ) : info_dict = { } cur = self . _conn . cursor ( ) cur . execute ( ) rows = cur . fetchall ( ) if rows : for row in rows : db = row [ 3 ] info_dict [ db ] = info_dict . get ( db , 0 ) + 1 return info_dict
|
Returns number of processes discriminated by database name .
|
1,117 |
def getDatabases ( self ) : cur = self . _conn . cursor ( ) cur . execute ( ) rows = cur . fetchall ( ) if rows : return [ row [ 0 ] for row in rows ] else : return [ ]
|
Returns list of databases .
|
1,118 |
def _retrieve ( self ) : url = "%s://%s:%d/manager/status" % ( self . _proto , self . _host , self . _port ) params = { } params [ 'XML' ] = 'true' response = util . get_url ( url , self . _user , self . _password , params ) tree = ElementTree . XML ( response ) return tree
|
Query Apache Tomcat Server Status Page in XML format and return the result as an ElementTree object .
|
1,119 |
def getMemoryStats ( self ) : if self . _statusxml is None : self . initStats ( ) node = self . _statusxml . find ( 'jvm/memory' ) memstats = { } if node is not None : for ( key , val ) in node . items ( ) : memstats [ key ] = util . parse_value ( val ) return memstats
|
Return JVM Memory Stats for Apache Tomcat Server .
|
1,120 |
def getConnectorStats ( self ) : if self . _statusxml is None : self . initStats ( ) connnodes = self . _statusxml . findall ( 'connector' ) connstats = { } if connnodes : for connnode in connnodes : namestr = connnode . get ( 'name' ) if namestr is not None : mobj = re . match ( '(.*)-(\d+)' , namestr ) if mobj : proto = mobj . group ( 1 ) port = int ( mobj . group ( 2 ) ) connstats [ port ] = { 'proto' : proto } for tag in ( 'threadInfo' , 'requestInfo' ) : stats = { } node = connnode . find ( tag ) if node is not None : for ( key , val ) in node . items ( ) : if re . search ( 'Time$' , key ) : stats [ key ] = float ( val ) / 1000.0 else : stats [ key ] = util . parse_value ( val ) if stats : connstats [ port ] [ tag ] = stats return connstats
|
Return dictionary of Connector Stats for Apache Tomcat Server .
|
1,121 |
def load ( filepath , update = True ) : if filepath == 'automatic' or filepath == 'example' : fpath = os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/automatic.egg' return load_egg ( fpath ) elif filepath == 'manual' : fpath = os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/manual.egg' return load_egg ( fpath , update = False ) elif filepath == 'naturalistic' : fpath = os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/naturalistic.egg' elif filepath . split ( '.' ) [ - 1 ] == 'egg' : return load_egg ( filepath , update = update ) elif filepath . split ( '.' ) [ - 1 ] == 'fegg' : return load_fegg ( filepath , update = False ) else : raise ValueError ( 'Could not load file.' )
|
Loads eggs fried eggs ands example data
|
1,122 |
def load_example_data ( dataset = 'automatic' ) : assert dataset in [ 'automatic' , 'manual' , 'naturalistic' ] , "Dataset can only be automatic, manual, or naturalistic" if dataset == 'naturalistic' : egg = Egg ( ** dd . io . load ( os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/' + dataset + '.egg' ) ) else : try : with open ( os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/' + dataset + '.egg' , 'rb' ) as handle : egg = pickle . load ( handle ) except : f = dd . io . load ( os . path . dirname ( os . path . abspath ( __file__ ) ) + '/data/' + dataset + '.egg' ) egg = Egg ( pres = f [ 'pres' ] , rec = f [ 'rec' ] , dist_funcs = f [ 'dist_funcs' ] , subjgroup = f [ 'subjgroup' ] , subjname = f [ 'subjname' ] , listgroup = f [ 'listgroup' ] , listname = f [ 'listname' ] , date_created = f [ 'date_created' ] ) return egg . crack ( )
|
Loads example data
|
1,123 |
def spsolve ( A , b ) : x = UmfpackLU ( A ) . solve ( b ) if b . ndim == 2 and b . shape [ 1 ] == 1 : return x . ravel ( ) else : return x
|
Solve the sparse linear system Ax = b where b may be a vector or a matrix .
|
1,124 |
def solve ( self , b ) : if isspmatrix ( b ) : b = b . toarray ( ) if b . shape [ 0 ] != self . _A . shape [ 1 ] : raise ValueError ( "Shape of b is not compatible with that of A" ) b_arr = asarray ( b , dtype = self . _A . dtype ) . reshape ( b . shape [ 0 ] , - 1 ) x = np . zeros ( ( self . _A . shape [ 0 ] , b_arr . shape [ 1 ] ) , dtype = self . _A . dtype ) for j in range ( b_arr . shape [ 1 ] ) : x [ : , j ] = self . umf . solve ( UMFPACK_A , self . _A , b_arr [ : , j ] , autoTranspose = True ) return x . reshape ( ( self . _A . shape [ 0 ] , ) + b . shape [ 1 : ] )
|
Solve linear equation A x = b for x
|
1,125 |
def solve_sparse ( self , B ) : B = B . tocsc ( ) cols = list ( ) for j in xrange ( B . shape [ 1 ] ) : col = self . solve ( B [ : , j ] ) cols . append ( csc_matrix ( col ) ) return hstack ( cols )
|
Solve linear equation of the form A X = B . Where B and X are sparse matrices .
|
1,126 |
def recall_matrix ( egg , match = 'exact' , distance = 'euclidean' , features = None ) : if match in [ 'best' , 'smooth' ] : if not features : features = [ k for k , v in egg . pres . loc [ 0 ] [ 0 ] . values [ 0 ] . items ( ) if k != 'item' ] if not features : raise ( 'No features found. Cannot match with best or smooth strategy' ) if not isinstance ( features , list ) : features = [ features ] if match == 'exact' : features = [ 'item' ] return _recmat_exact ( egg . pres , egg . rec , features ) else : return _recmat_smooth ( egg . pres , egg . rec , features , distance , match )
|
Computes recall matrix given list of presented and list of recalled words
|
1,127 |
def _connect ( self ) : try : if sys . version_info [ : 2 ] >= ( 2 , 6 ) : self . _conn = telnetlib . Telnet ( self . _amihost , self . _amiport , connTimeout ) else : self . _conn = telnetlib . Telnet ( self . _amihost , self . _amiport ) except : raise Exception ( "Connection to Asterisk Manager Interface on " "host %s and port %s failed." % ( self . _amihost , self . _amiport ) )
|
Connect to Asterisk Manager Interface .
|
1,128 |
def _getGreeting ( self ) : greeting = self . _conn . read_until ( "\r\n" , connTimeout ) mobj = re . match ( 'Asterisk Call Manager\/([\d\.]+)\s*$' , greeting ) if mobj : self . _ami_version = util . SoftwareVersion ( mobj . group ( 1 ) ) else : raise Exception ( "Asterisk Manager Interface version cannot be determined." )
|
Read and parse Asterisk Manager Interface Greeting to determine and set Manager Interface version .
|
1,129 |
def _initAsteriskVersion ( self ) : if self . _ami_version > util . SoftwareVersion ( '1.0' ) : cmd = "core show version" else : cmd = "show version" cmdresp = self . executeCommand ( cmd ) mobj = re . match ( 'Asterisk\s*(SVN-branch-|\s)(\d+(\.\d+)*)' , cmdresp ) if mobj : self . _asterisk_version = util . SoftwareVersion ( mobj . group ( 2 ) ) else : raise Exception ( 'Asterisk version cannot be determined.' )
|
Query Asterisk Manager Interface for Asterisk Version to configure system for compatibility with multiple versions . CLI Command - core show version
|
1,130 |
def _login ( self ) : self . _sendAction ( "login" , ( ( "Username" , self . _amiuser ) , ( "Secret" , self . _amipass ) , ( "Events" , "off" ) , ) ) resp = self . _getResponse ( ) if resp . get ( "Response" ) == "Success" : return True else : raise Exception ( "Authentication to Asterisk Manager Interface Failed." )
|
Login to Asterisk Manager Interface .
|
1,131 |
def _initModuleList ( self ) : if self . checkVersion ( '1.4' ) : cmd = "module show" else : cmd = "show modules" cmdresp = self . executeCommand ( cmd ) self . _modules = set ( ) for line in cmdresp . splitlines ( ) [ 1 : - 1 ] : mobj = re . match ( '\s*(\S+)\s' , line ) if mobj : self . _modules . add ( mobj . group ( 1 ) . lower ( ) )
|
Query Asterisk Manager Interface to initialize internal list of loaded modules . CLI Command - core show modules
|
1,132 |
def _initApplicationList ( self ) : if self . checkVersion ( '1.4' ) : cmd = "core show applications" else : cmd = "show applications" cmdresp = self . executeCommand ( cmd ) self . _applications = set ( ) for line in cmdresp . splitlines ( ) [ 1 : - 1 ] : mobj = re . match ( '\s*(\S+):' , line ) if mobj : self . _applications . add ( mobj . group ( 1 ) . lower ( ) )
|
Query Asterisk Manager Interface to initialize internal list of available applications . CLI Command - core show applications
|
1,133 |
def _initChannelTypesList ( self ) : if self . checkVersion ( '1.4' ) : cmd = "core show channeltypes" else : cmd = "show channeltypes" cmdresp = self . executeCommand ( cmd ) self . _chantypes = set ( ) for line in cmdresp . splitlines ( ) [ 2 : ] : mobj = re . match ( '\s*(\S+)\s+.*\s+(yes|no)\s+' , line ) if mobj : self . _chantypes . add ( mobj . group ( 1 ) . lower ( ) )
|
Query Asterisk Manager Interface to initialize internal list of supported channel types . CLI Command - core show applications
|
1,134 |
def hasModule ( self , mod ) : if self . _modules is None : self . _initModuleList ( ) return mod in self . _modules
|
Returns True if mod is among the loaded modules .
|
1,135 |
def hasApplication ( self , app ) : if self . _applications is None : self . _initApplicationList ( ) return app in self . _applications
|
Returns True if app is among the loaded modules .
|
1,136 |
def hasChannelType ( self , chan ) : if self . _chantypes is None : self . _initChannelTypesList ( ) return chan in self . _chantypes
|
Returns True if chan is among the supported channel types .
|
1,137 |
def getCodecList ( self ) : if self . checkVersion ( '1.4' ) : cmd = "core show codecs" else : cmd = "show codecs" cmdresp = self . executeCommand ( cmd ) info_dict = { } for line in cmdresp . splitlines ( ) : mobj = re . match ( '\s*(\d+)\s+\((.+)\)\s+\((.+)\)\s+(\w+)\s+(\w+)\s+\((.+)\)$' , line ) if mobj : info_dict [ mobj . group ( 5 ) ] = ( mobj . group ( 4 ) , mobj . group ( 6 ) ) return info_dict
|
Query Asterisk Manager Interface for defined codecs . CLI Command - core show codecs
|
1,138 |
def getChannelStats ( self , chantypes = ( 'dahdi' , 'zap' , 'sip' , 'iax2' , 'local' ) ) : if self . checkVersion ( '1.4' ) : cmd = "core show channels" else : cmd = "show channels" cmdresp = self . executeCommand ( cmd ) info_dict = { } for chanstr in chantypes : chan = chanstr . lower ( ) if chan in ( 'zap' , 'dahdi' ) : info_dict [ 'dahdi' ] = 0 info_dict [ 'mix' ] = 0 else : info_dict [ chan ] = 0 for k in ( 'active_calls' , 'active_channels' , 'calls_processed' ) : info_dict [ k ] = 0 regexstr = ( '(%s)\/(\w+)' % '|' . join ( chantypes ) ) for line in cmdresp . splitlines ( ) : mobj = re . match ( regexstr , line , re . IGNORECASE ) if mobj : chan_type = mobj . group ( 1 ) . lower ( ) chan_id = mobj . group ( 2 ) . lower ( ) if chan_type == 'dahdi' or chan_type == 'zap' : if chan_id == 'pseudo' : info_dict [ 'mix' ] += 1 else : info_dict [ 'dahdi' ] += 1 else : info_dict [ chan_type ] += 1 continue mobj = re . match ( '(\d+)\s+(active channel|active call|calls processed)' , line , re . IGNORECASE ) if mobj : if mobj . group ( 2 ) == 'active channel' : info_dict [ 'active_channels' ] = int ( mobj . group ( 1 ) ) elif mobj . group ( 2 ) == 'active call' : info_dict [ 'active_calls' ] = int ( mobj . group ( 1 ) ) elif mobj . group ( 2 ) == 'calls processed' : info_dict [ 'calls_processed' ] = int ( mobj . group ( 1 ) ) continue return info_dict
|
Query Asterisk Manager Interface for Channel Stats . CLI Command - core show channels
|
1,139 |
def getConferenceStats ( self ) : if not self . hasConference ( ) : return None if self . checkVersion ( '1.6' ) : cmd = "meetme list" else : cmd = "meetme" cmdresp = self . executeCommand ( cmd ) info_dict = dict ( active_conferences = 0 , conference_users = 0 ) for line in cmdresp . splitlines ( ) : mobj = re . match ( '\w+\s+0(\d+)\s' , line ) if mobj : info_dict [ 'active_conferences' ] += 1 info_dict [ 'conference_users' ] += int ( mobj . group ( 1 ) ) return info_dict
|
Query Asterisk Manager Interface for Conference Room Stats . CLI Command - meetme list
|
1,140 |
def getVoicemailStats ( self ) : if not self . hasVoicemail ( ) : return None if self . checkVersion ( '1.4' ) : cmd = "voicemail show users" else : cmd = "show voicemail users" cmdresp = self . executeCommand ( cmd ) info_dict = dict ( accounts = 0 , avg_messages = 0 , max_messages = 0 , total_messages = 0 ) for line in cmdresp . splitlines ( ) : mobj = re . match ( '\w+\s+\w+\s+.*\s+(\d+)\s*$' , line ) if mobj : msgs = int ( mobj . group ( 1 ) ) info_dict [ 'accounts' ] += 1 info_dict [ 'total_messages' ] += msgs if msgs > info_dict [ 'max_messages' ] : info_dict [ 'max_messages' ] = msgs if info_dict [ 'accounts' ] > 0 : info_dict [ 'avg_messages' ] = ( float ( info_dict [ 'total_messages' ] ) / info_dict [ 'accounts' ] ) return info_dict
|
Query Asterisk Manager Interface for Voicemail Stats . CLI Command - voicemail show users
|
1,141 |
def getTrunkStats ( self , trunkList ) : re_list = [ ] info_dict = { } for filt in trunkList : info_dict [ filt [ 0 ] ] = 0 re_list . append ( re . compile ( filt [ 1 ] , re . IGNORECASE ) ) if self . checkVersion ( '1.4' ) : cmd = "core show channels" else : cmd = "show channels" cmdresp = self . executeCommand ( cmd ) for line in cmdresp . splitlines ( ) : for idx in range ( len ( re_list ) ) : recomp = re_list [ idx ] trunkid = trunkList [ idx ] [ 0 ] mobj = recomp . match ( line ) if mobj : if len ( trunkList [ idx ] ) == 2 : info_dict [ trunkid ] += 1 continue elif len ( trunkList [ idx ] ) == 4 : num = mobj . groupdict ( ) . get ( 'num' ) if num is not None : ( vmin , vmax ) = trunkList [ idx ] [ 2 : 4 ] if int ( num ) >= int ( vmin ) and int ( num ) <= int ( vmax ) : info_dict [ trunkid ] += 1 continue return info_dict
|
Query Asterisk Manager Interface for Trunk Stats . CLI Command - core show channels
|
1,142 |
def getFaxStatsCounters ( self ) : if not self . hasFax ( ) : return None info_dict = { } cmdresp = self . executeCommand ( 'fax show stats' ) ctxt = 'general' for section in cmdresp . strip ( ) . split ( '\n\n' ) [ 1 : ] : i = 0 for line in section . splitlines ( ) : mobj = re . match ( '(\S.*\S)\s*:\s*(\d+)\s*$' , line ) if mobj : if not info_dict . has_key ( ctxt ) : info_dict [ ctxt ] = { } info_dict [ ctxt ] [ mobj . group ( 1 ) . lower ( ) ] = int ( mobj . group ( 2 ) . lower ( ) ) elif i == 0 : ctxt = line . strip ( ) . lower ( ) i += 1 return info_dict
|
Query Asterisk Manager Interface for Fax Stats . CLI Command - fax show stats
|
1,143 |
def getFaxStatsSessions ( self ) : if not self . hasFax ( ) : return None info_dict = { } info_dict [ 'total' ] = 0 fax_types = ( 'g.711' , 't.38' ) fax_operations = ( 'send' , 'recv' ) fax_states = ( 'uninitialized' , 'initialized' , 'open' , 'active' , 'inactive' , 'complete' , 'unknown' , ) info_dict [ 'type' ] = dict ( [ ( k , 0 ) for k in fax_types ] ) info_dict [ 'operation' ] = dict ( [ ( k , 0 ) for k in fax_operations ] ) info_dict [ 'state' ] = dict ( [ ( k , 0 ) for k in fax_states ] ) cmdresp = self . executeCommand ( 'fax show sessions' ) sections = cmdresp . strip ( ) . split ( '\n\n' ) if len ( sections ) >= 3 : for line in sections [ 1 ] [ 1 : ] : cols = re . split ( '\s\s+' , line ) if len ( cols ) == 7 : info_dict [ 'total' ] += 1 if cols [ 3 ] . lower ( ) in fax_types : info_dict [ 'type' ] [ cols [ 3 ] . lower ( ) ] += 1 if cols [ 4 ] == 'receive' : info_dict [ 'operation' ] [ 'recv' ] += 1 elif cols [ 4 ] == 'send' : info_dict [ 'operation' ] [ 'send' ] += 1 if cols [ 5 ] . lower ( ) in fax_states : info_dict [ 'state' ] [ cols [ 5 ] . lower ( ) ] += 1 return info_dict
|
Query Asterisk Manager Interface for Fax Stats . CLI Command - fax show sessions
|
1,144 |
def simulate_list ( nwords = 16 , nrec = 10 , ncats = 4 ) : wp = pd . read_csv ( 'data/cut_wordpool.csv' ) wp = wp [ wp [ 'GROUP' ] == np . random . choice ( list ( range ( 16 ) ) , 1 ) [ 0 ] ] . sample ( 16 ) wp [ 'COLOR' ] = [ [ int ( np . random . rand ( ) * 255 ) for i in range ( 3 ) ] for i in range ( 16 ) ]
|
A function to simulate a list
|
1,145 |
def engineIncluded ( self , name ) : if self . _engines is None : self . _engines = self . _dbconn . getStorageEngines ( ) return self . envCheckFilter ( 'engine' , name ) and name in self . _engines
|
Utility method to check if a storage engine is included in graphs .
|
1,146 |
def getSpaceUse ( self ) : stats = { } try : out = subprocess . Popen ( [ dfCmd , "-Pk" ] , stdout = subprocess . PIPE ) . communicate ( ) [ 0 ] except : raise Exception ( 'Execution of command %s failed.' % dfCmd ) lines = out . splitlines ( ) if len ( lines ) > 1 : for line in lines [ 1 : ] : fsstats = { } cols = line . split ( ) fsstats [ 'device' ] = cols [ 0 ] fsstats [ 'type' ] = self . _fstypeDict [ cols [ 5 ] ] fsstats [ 'total' ] = 1024 * int ( cols [ 1 ] ) fsstats [ 'inuse' ] = 1024 * int ( cols [ 2 ] ) fsstats [ 'avail' ] = 1024 * int ( cols [ 3 ] ) fsstats [ 'inuse_pcent' ] = int ( cols [ 4 ] [ : - 1 ] ) stats [ cols [ 5 ] ] = fsstats return stats
|
Get disk space usage .
|
1,147 |
def connect ( self , host , port ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . connect ( ( host , port ) ) self . _reader = sock . makefile ( mode = 'rb' ) self . _writer = sock . makefile ( mode = 'wb' )
|
Connects via a RS - 485 to Ethernet adapter .
|
1,148 |
def send_key ( self , key ) : _LOGGER . info ( 'Queueing key %s' , key ) frame = self . _get_key_event_frame ( key ) self . _send_queue . put ( { 'frame' : frame } )
|
Sends a key .
|
1,149 |
def states ( self ) : state_list = [ ] for state in States : if state . value & self . _states != 0 : state_list . append ( state ) if ( self . _flashing_states & States . FILTER ) != 0 : state_list . append ( States . FILTER_LOW_SPEED ) return state_list
|
Returns a set containing the enabled states .
|
1,150 |
def get_state ( self , state ) : for data in list ( self . _send_queue . queue ) : desired_states = data [ 'desired_states' ] for desired_state in desired_states : if desired_state [ 'state' ] == state : return desired_state [ 'enabled' ] if state == States . FILTER_LOW_SPEED : return ( States . FILTER . value & self . _flashing_states ) != 0 return ( state . value & self . _states ) != 0
|
Returns True if the specified state is enabled .
|
1,151 |
def trace ( function , * args , ** k ) : if doTrace : print ( "> " + function . __name__ , args , k ) result = function ( * args , ** k ) if doTrace : print ( "< " + function . __name__ , args , k , "->" , result ) return result
|
Decorates a function by tracing the begining and end of the function execution if doTrace global is True
|
1,152 |
def chol ( A ) : A = np . array ( A ) assert A . shape [ 0 ] == A . shape [ 1 ] , "Input matrix must be square" L = [ [ 0.0 ] * len ( A ) for _ in range ( len ( A ) ) ] for i in range ( len ( A ) ) : for j in range ( i + 1 ) : s = sum ( L [ i ] [ k ] * L [ j ] [ k ] for k in range ( j ) ) L [ i ] [ j ] = ( ( A [ i ] [ i ] - s ) ** 0.5 if ( i == j ) else ( 1.0 / L [ j ] [ j ] * ( A [ i ] [ j ] - s ) ) ) return np . array ( L )
|
Calculate the lower triangular matrix of the Cholesky decomposition of a symmetric positive - definite matrix .
|
1,153 |
def get ( self , uri , params = { } ) : return self . _session . get ( urljoin ( Investigate . BASE_URL , uri ) , params = params , headers = self . _auth_header , proxies = self . proxies )
|
A generic method to make GET requests to the OpenDNS Investigate API on the given URI .
|
1,154 |
def post ( self , uri , params = { } , data = { } ) : return self . _session . post ( urljoin ( Investigate . BASE_URL , uri ) , params = params , data = data , headers = self . _auth_header , proxies = self . proxies )
|
A generic method to make POST requests to the OpenDNS Investigate API on the given URI .
|
1,155 |
def categorization ( self , domains , labels = False ) : if type ( domains ) is str : return self . _get_categorization ( domains , labels ) elif type ( domains ) is list : return self . _post_categorization ( domains , labels ) else : raise Investigate . DOMAIN_ERR
|
Get the domain status and categorization of a domain or list of domains . domains can be either a single domain or a list of domains . Setting labels to True will give back categorizations in human - readable form .
|
1,156 |
def cooccurrences ( self , domain ) : uri = self . _uris [ "cooccurrences" ] . format ( domain ) return self . get_parse ( uri )
|
Get the cooccurrences of the given domain .
|
1,157 |
def related ( self , domain ) : uri = self . _uris [ "related" ] . format ( domain ) return self . get_parse ( uri )
|
Get the related domains of the given domain .
|
1,158 |
def security ( self , domain ) : uri = self . _uris [ "security" ] . format ( domain ) return self . get_parse ( uri )
|
Get the Security Information for the given domain .
|
1,159 |
def domain_whois ( self , domain ) : uri = self . _uris [ "whois_domain" ] . format ( domain ) resp_json = self . get_parse ( uri ) return resp_json
|
Gets whois information for a domain
|
1,160 |
def domain_whois_history ( self , domain , limit = None ) : params = dict ( ) if limit is not None : params [ 'limit' ] = limit uri = self . _uris [ "whois_domain_history" ] . format ( domain ) resp_json = self . get_parse ( uri , params ) return resp_json
|
Gets whois history for a domain
|
1,161 |
def ns_whois ( self , nameservers , limit = DEFAULT_LIMIT , offset = DEFAULT_OFFSET , sort_field = DEFAULT_SORT ) : if not isinstance ( nameservers , list ) : uri = self . _uris [ "whois_ns" ] . format ( nameservers ) params = { 'limit' : limit , 'offset' : offset , 'sortField' : sort_field } else : uri = self . _uris [ "whois_ns" ] . format ( '' ) params = { 'emailList' : ',' . join ( nameservers ) , 'limit' : limit , 'offset' : offset , 'sortField' : sort_field } resp_json = self . get_parse ( uri , params = params ) return resp_json
|
Gets the domains that have been registered with a nameserver or nameservers
|
1,162 |
def search ( self , pattern , start = None , limit = None , include_category = None ) : params = dict ( ) if start is None : start = datetime . timedelta ( days = 30 ) if isinstance ( start , datetime . timedelta ) : params [ 'start' ] = int ( time . mktime ( ( datetime . datetime . utcnow ( ) - start ) . timetuple ( ) ) * 1000 ) elif isinstance ( start , datetime . datetime ) : params [ 'start' ] = int ( time . mktime ( start . timetuple ( ) ) * 1000 ) else : raise Investigate . SEARCH_ERR if limit is not None and isinstance ( limit , int ) : params [ 'limit' ] = limit if include_category is not None and isinstance ( include_category , bool ) : params [ 'includeCategory' ] = str ( include_category ) . lower ( ) uri = self . _uris [ 'search' ] . format ( quote_plus ( pattern ) ) return self . get_parse ( uri , params )
|
Searches for domains that match a given pattern
|
1,163 |
def samples ( self , anystring , limit = None , offset = None , sortby = None ) : uri = self . _uris [ 'samples' ] . format ( anystring ) params = { 'limit' : limit , 'offset' : offset , 'sortby' : sortby } return self . get_parse ( uri , params )
|
Return an object representing the samples identified by the input domain IP or URL
|
1,164 |
def sample ( self , hash , limit = None , offset = None ) : uri = self . _uris [ 'sample' ] . format ( hash ) params = { 'limit' : limit , 'offset' : offset } return self . get_parse ( uri , params )
|
Return an object representing the sample identified by the input hash or an empty object if that sample is not found
|
1,165 |
def as_for_ip ( self , ip ) : if not Investigate . IP_PATTERN . match ( ip ) : raise Investigate . IP_ERR uri = self . _uris [ "as_for_ip" ] . format ( ip ) resp_json = self . get_parse ( uri ) return resp_json
|
Gets the AS information for a given IP address .
|
1,166 |
def prefixes_for_asn ( self , asn ) : uri = self . _uris [ "prefixes_for_asn" ] . format ( asn ) resp_json = self . get_parse ( uri ) return resp_json
|
Gets the AS information for a given ASN . Return the CIDR and geolocation associated with the AS .
|
1,167 |
def acosh ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . arccosh ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . arccosh ( x )
|
Inverse hyperbolic cosine
|
1,168 |
def asinh ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . arcsinh ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . arcsinh ( x )
|
Inverse hyperbolic sine
|
1,169 |
def atanh ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . arctanh ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . arctanh ( x )
|
Inverse hyperbolic tangent
|
1,170 |
def degrees ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . degrees ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . degrees ( x )
|
Convert radians to degrees
|
1,171 |
def fabs ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . fabs ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . fabs ( x )
|
Absolute value function
|
1,172 |
def hypot ( x , y ) : if isinstance ( x , UncertainFunction ) or isinstance ( x , UncertainFunction ) : ufx = to_uncertain_func ( x ) ufy = to_uncertain_func ( y ) mcpts = np . hypot ( ufx . _mcpts , ufy . _mcpts ) return UncertainFunction ( mcpts ) else : return np . hypot ( x , y )
|
Calculate the hypotenuse given two legs of a right triangle
|
1,173 |
def log10 ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . log10 ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . log10 ( x )
|
Base - 10 logarithm
|
1,174 |
def radians ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . radians ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . radians ( x )
|
Convert degrees to radians
|
1,175 |
def sqrt ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . sqrt ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . sqrt ( x )
|
Square - root function
|
1,176 |
def trunc ( x ) : if isinstance ( x , UncertainFunction ) : mcpts = np . trunc ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . trunc ( x )
|
Truncate the values to the integer value without rounding
|
1,177 |
def var ( self ) : mn = self . mean vr = np . mean ( ( self . _mcpts - mn ) ** 2 ) return vr
|
Variance value as a result of an uncertainty calculation
|
1,178 |
def load_hat ( self , path ) : hat = cv2 . imread ( path , cv2 . IMREAD_UNCHANGED ) if hat is None : raise ValueError ( 'No hat image found at `{}`' . format ( path ) ) b , g , r , a = cv2 . split ( hat ) return cv2 . merge ( ( r , g , b , a ) )
|
Loads the hat from a picture at path .
|
1,179 |
def find_faces ( self , image , draw_box = False ) : frame_gray = cv2 . cvtColor ( image , cv2 . COLOR_RGB2GRAY ) faces = self . cascade . detectMultiScale ( frame_gray , scaleFactor = 1.3 , minNeighbors = 5 , minSize = ( 50 , 50 ) , flags = 0 ) if draw_box : for x , y , w , h in faces : cv2 . rectangle ( image , ( x , y ) , ( x + w , y + h ) , ( 0 , 255 , 0 ) , 2 ) return faces
|
Uses a haarcascade to detect faces inside an image .
|
1,180 |
def changed ( self , message = None , * args ) : if message is not None : self . logger . debug ( '%s: %s' , self . _repr ( ) , message % args ) self . logger . debug ( '%s: changed' , self . _repr ( ) ) if self . parent is not None : self . parent . changed ( ) elif isinstance ( self , Mutable ) : super ( TrackedObject , self ) . changed ( )
|
Marks the object as changed .
|
1,181 |
def register ( cls , origin_type ) : def decorator ( tracked_type ) : cls . _type_mapping [ origin_type ] = tracked_type return tracked_type return decorator
|
Decorator for mutation tracker registration .
|
1,182 |
def convert ( cls , obj , parent ) : replacement_type = cls . _type_mapping . get ( type ( obj ) ) if replacement_type is not None : new = replacement_type ( obj ) new . parent = parent return new return obj
|
Converts objects to registered tracked types
|
1,183 |
def convert_items ( self , items ) : return ( ( key , self . convert ( value , self ) ) for key , value in items )
|
Generator like convert_iterable but for 2 - tuple iterators .
|
1,184 |
def convert_mapping ( self , mapping ) : if isinstance ( mapping , dict ) : return self . convert_items ( iteritems ( mapping ) ) return self . convert_items ( mapping )
|
Convenience method to track either a dict or a 2 - tuple iterator .
|
1,185 |
def md2rst ( md_lines ) : 'Only converts headers' lvl2header_char = { 1 : '=' , 2 : '-' , 3 : '~' } for md_line in md_lines : if md_line . startswith ( '#' ) : header_indent , header_text = md_line . split ( ' ' , 1 ) yield header_text header_char = lvl2header_char [ len ( header_indent ) ] yield header_char * len ( header_text ) else : yield md_line
|
Only converts headers
|
1,186 |
def aslist ( generator ) : 'Function decorator to transform a generator into a list' def wrapper ( * args , ** kwargs ) : return list ( generator ( * args , ** kwargs ) ) return wrapper
|
Function decorator to transform a generator into a list
|
1,187 |
def coerce ( cls , key , value ) : if value is None : return value if isinstance ( value , cls ) : return value if isinstance ( value , dict ) : return NestedMutableDict . coerce ( key , value ) if isinstance ( value , list ) : return NestedMutableList . coerce ( key , value ) return super ( cls ) . coerce ( key , value )
|
Convert plain dictionary to NestedMutable .
|
1,188 |
def is_mod_function ( mod , fun ) : return inspect . isfunction ( fun ) and inspect . getmodule ( fun ) == mod
|
Checks if a function in a module was declared in that module .
|
1,189 |
def is_mod_class ( mod , cls ) : return inspect . isclass ( cls ) and inspect . getmodule ( cls ) == mod
|
Checks if a class in a module was declared in that module .
|
1,190 |
def list_functions ( mod_name ) : mod = sys . modules [ mod_name ] return [ func . __name__ for func in mod . __dict__ . values ( ) if is_mod_function ( mod , func ) ]
|
Lists all functions declared in a module .
|
1,191 |
def list_classes ( mod_name ) : mod = sys . modules [ mod_name ] return [ cls . __name__ for cls in mod . __dict__ . values ( ) if is_mod_class ( mod , cls ) ]
|
Lists all classes declared in a module .
|
1,192 |
def get_linenumbers ( functions , module , searchstr = 'def {}(image):\n' ) : lines = inspect . getsourcelines ( module ) [ 0 ] line_numbers = { } for function in functions : try : line_numbers [ function ] = lines . index ( searchstr . format ( function ) ) + 1 except ValueError : print ( r'Can not find `{}`' . format ( searchstr . format ( function ) ) ) line_numbers [ function ] = 0 return line_numbers
|
Returns a dictionary which maps function names to line numbers .
|
1,193 |
def format_doc ( fun ) : SEPARATOR = '=============================' func = cvloop . functions . __dict__ [ fun ] doc_lines = [ '{}' . format ( l ) . strip ( ) for l in func . __doc__ . split ( '\n' ) ] if hasattr ( func , '__init__' ) : doc_lines . append ( SEPARATOR ) doc_lines += [ '{}' . format ( l ) . strip ( ) for l in func . __init__ . __doc__ . split ( '\n' ) ] mod_lines = [ ] argblock = False returnblock = False for line in doc_lines : if line == SEPARATOR : mod_lines . append ( '\n#### `{}.__init__(...)`:\n\n' . format ( fun ) ) elif 'Args:' in line : argblock = True if GENERATE_ARGS : mod_lines . append ( '**{}**\n' . format ( line ) ) elif 'Returns:' in line : returnblock = True mod_lines . append ( '\n**{}**' . format ( line ) ) elif not argblock and not returnblock : mod_lines . append ( '{}\n' . format ( line ) ) elif argblock and not returnblock and ':' in line : if GENERATE_ARGS : mod_lines . append ( '- *{}:* {}\n' . format ( * line . split ( ':' ) ) ) elif returnblock : mod_lines . append ( line ) else : mod_lines . append ( '{}\n' . format ( line ) ) return mod_lines
|
Formats the documentation in a nicer way and for notebook cells .
|
1,194 |
def main ( ) : notebook = { 'cells' : [ { 'cell_type' : 'markdown' , 'metadata' : { } , 'source' : [ '# cvloop functions\n\n' , 'This notebook shows an overview over all cvloop ' , 'functions provided in the [`cvloop.functions` module](' , 'https://github.com/shoeffner/cvloop/blob/' , 'develop/cvloop/functions.py).' ] } , ] , 'nbformat' : 4 , 'nbformat_minor' : 1 , 'metadata' : { 'language_info' : { 'codemirror_mode' : { 'name' : 'ipython' , 'version' : 3 } , 'file_extension' : '.py' , 'mimetype' : 'text/x-python' , 'name' : 'python' , 'nbconvert_exporter' : 'python' , 'pygments_lexer' : 'ipython3' , 'version' : '3.5.1+' } } } classes = list_classes ( 'cvloop.functions' ) functions = list_functions ( 'cvloop.functions' ) line_numbers_cls = get_linenumbers ( classes , cvloop . functions , 'class {}:\n' ) line_numbers = get_linenumbers ( functions , cvloop . functions ) for cls in classes : line_number = line_numbers_cls [ cls ] notebook [ 'cells' ] . append ( create_description_cell ( cls , line_number ) ) notebook [ 'cells' ] . append ( create_code_cell ( cls , isclass = True ) ) for func in functions : line_number = line_numbers [ func ] notebook [ 'cells' ] . append ( create_description_cell ( func , line_number ) ) notebook [ 'cells' ] . append ( create_code_cell ( func ) ) with open ( sys . argv [ 1 ] , 'w' ) as nfile : json . dump ( notebook , nfile , indent = 4 )
|
Main function creates the cvloop . functions example notebook .
|
1,195 |
def prepare_axes ( axes , title , size , cmap = None ) : if axes is None : return None axes . set_xlim ( [ 0 , size [ 1 ] ] ) axes . set_ylim ( [ size [ 0 ] , 0 ] ) axes . set_aspect ( 'equal' ) axes . axis ( 'off' ) if isinstance ( cmap , str ) : title = '{} (cmap: {})' . format ( title , cmap ) axes . set_title ( title ) axes_image = image . AxesImage ( axes , cmap = cmap , extent = ( 0 , size [ 1 ] , size [ 0 ] , 0 ) ) axes_image . set_data ( np . random . random ( ( size [ 0 ] , size [ 1 ] , 3 ) ) ) axes . add_image ( axes_image ) return axes_image
|
Prepares an axes object for clean plotting .
|
1,196 |
def connect_event_handlers ( self ) : self . figure . canvas . mpl_connect ( 'close_event' , self . evt_release ) self . figure . canvas . mpl_connect ( 'pause_event' , self . evt_toggle_pause )
|
Connects event handlers to the figure .
|
1,197 |
def evt_toggle_pause ( self , * args ) : if self . event_source . _timer is None : self . event_source . start ( ) else : self . event_source . stop ( )
|
Pauses and resumes the video source .
|
1,198 |
def print_info ( self , capture ) : self . frame_offset += 1 ret , frame = capture . read ( ) if ret : print ( 'Capture Information' ) print ( '\tDimensions (HxW): {}x{}' . format ( * frame . shape [ 0 : 2 ] ) ) print ( '\tColor channels: {}' . format ( frame . shape [ 2 ] if len ( frame . shape ) > 2 else 1 ) ) print ( '\tColor range: {}-{}' . format ( np . min ( frame ) , np . max ( frame ) ) ) print ( '\tdtype: {}' . format ( frame . dtype ) ) else : print ( 'No source found.' )
|
Prints information about the unprocessed image .
|
1,199 |
def determine_size ( self , capture ) : width = 640 height = 480 if capture and hasattr ( capture , 'get' ) : width = capture . get ( cv2 . CAP_PROP_FRAME_WIDTH ) height = capture . get ( cv2 . CAP_PROP_FRAME_HEIGHT ) else : self . frame_offset += 1 ret , frame = capture . read ( ) if ret : width = frame . shape [ 1 ] height = frame . shape [ 0 ] return ( int ( height ) , int ( width ) )
|
Determines the height and width of the image source .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.