idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
2,000
def plot_log_histogram ( df , palette , title , histnorm = "" ) : data = [ go . Histogram ( x = np . log10 ( df . loc [ df [ "dataset" ] == d , "lengths" ] ) , opacity = 0.4 , name = d , histnorm = histnorm , marker = dict ( color = c ) ) for d , c in zip ( df [ "dataset" ] . unique ( ) , palette ) ] xtickvals = [ 10 ** i for i in range ( 10 ) if not 10 ** i > 10 * np . amax ( df [ "lengths" ] ) ] html = plotly . offline . plot ( { "data" : data , "layout" : go . Layout ( barmode = 'overlay' , title = title , xaxis = dict ( tickvals = np . log10 ( xtickvals ) , ticktext = xtickvals ) ) } , output_type = "div" , show_link = False ) fig = go . Figure ( { "data" : data , "layout" : go . Layout ( barmode = 'overlay' , title = title , xaxis = dict ( tickvals = np . log10 ( xtickvals ) , ticktext = xtickvals ) ) } ) return html , fig
Plot overlaying histograms with log transformation of length Return both html and fig for png
2,001
def get_file ( db_folder , file_name ) : if not os . path . isdir ( db_folder ) : return file_name = file_name . lower ( ) . strip ( ) for cand_name in os . listdir ( db_folder ) : if cand_name . lower ( ) . strip ( ) == file_name : return os . path . join ( db_folder , cand_name )
Glob for the poor .
2,002
def parse ( db_folder , out_folder ) : stru_dat = get_file ( db_folder , 'CroStru.dat' ) data_tad = get_file ( db_folder , 'CroBank.tad' ) data_dat = get_file ( db_folder , 'CroBank.dat' ) if None in [ stru_dat , data_tad , data_dat ] : raise CronosException ( "Not all database files are present." ) meta , tables = parse_structure ( stru_dat ) for table in tables : if table [ 'abbr' ] == 'FL' and table [ 'name' ] == 'Files' : continue fh = open ( make_csv_file_name ( meta , table , out_folder ) , 'w' ) columns = table . get ( 'columns' ) writer = csv . writer ( fh ) writer . writerow ( [ encode_cell ( c [ 'name' ] ) for c in columns ] ) for row in parse_data ( data_tad , data_dat , table . get ( 'id' ) , columns ) : writer . writerow ( [ encode_cell ( c ) for c in row ] ) fh . close ( )
Parse a cronos database .
2,003
def encode1 ( self ) : data_uri = b64encode ( open ( self . path , 'rb' ) . read ( ) ) . decode ( 'utf-8' ) . replace ( '\n' , '' ) return '<img src="data:image/png;base64,{0}">' . format ( data_uri )
Return the base64 encoding of the figure file and insert in html image tag .
2,004
def encode2 ( self ) : buf = BytesIO ( ) self . fig . savefig ( buf , format = 'png' , bbox_inches = 'tight' , dpi = 100 ) buf . seek ( 0 ) string = b64encode ( buf . read ( ) ) return '<img src="data:image/png;base64,{0}">' . format ( urlquote ( string ) )
Return the base64 encoding of the fig attribute and insert in html image tag .
2,005
def loadFromDisk ( self , calculation ) : suffixes = { 'Isotropic' : 'iso' , 'Circular Dichroism (R-L)' : 'cd' , 'Right Polarized (R)' : 'r' , 'Left Polarized (L)' : 'l' , 'Linear Dichroism (V-H)' : 'ld' , 'Vertical Polarized (V)' : 'v' , 'Horizontal Polarized (H)' : 'h' , } self . raw = list ( ) for spectrumName in self . toPlot : suffix = suffixes [ spectrumName ] path = '{}_{}.spec' . format ( calculation . baseName , suffix ) try : data = np . loadtxt ( path , skiprows = 5 ) except ( OSError , IOError ) as e : raise e rows , columns = data . shape if calculation . experiment in [ 'XAS' , 'XPS' , 'XES' ] : xMin = calculation . xMin xMax = calculation . xMax xNPoints = calculation . xNPoints if calculation . experiment == 'XES' : x = np . linspace ( xMin , xMax , xNPoints + 1 ) x = x [ : : - 1 ] y = data [ : , 2 ] y = y / np . abs ( y . max ( ) ) else : x = np . linspace ( xMin , xMax , xNPoints + 1 ) y = data [ : , 2 : : 2 ] . flatten ( ) spectrum = Spectrum1D ( x , y ) spectrum . name = spectrumName if len ( suffix ) > 2 : spectrum . shortName = suffix . title ( ) else : spectrum . shortName = suffix . upper ( ) if calculation . experiment in [ 'XAS' , ] : spectrum . xLabel = 'Absorption Energy (eV)' elif calculation . experiment in [ 'XPS' , ] : spectrum . xLabel = 'Binding Energy (eV)' elif calculation . experiment in [ 'XES' , ] : spectrum . xLabel = 'Emission Energy (eV)' spectrum . yLabel = 'Intensity (a.u.)' self . broadenings = { 'gaussian' : ( calculation . xGaussian , ) , } else : xMin = calculation . xMin xMax = calculation . xMax xNPoints = calculation . xNPoints yMin = calculation . yMin yMax = calculation . yMax yNPoints = calculation . yNPoints x = np . linspace ( xMin , xMax , xNPoints + 1 ) y = np . linspace ( yMin , yMax , yNPoints + 1 ) z = data [ : , 2 : : 2 ] spectrum = Spectrum2D ( x , y , z ) spectrum . name = spectrumName if len ( suffix ) > 2 : spectrum . shortName = suffix . title ( ) else : spectrum . shortName = suffix . upper ( ) spectrum . xLabel = 'Incident Energy (eV)' spectrum . yLabel = 'Energy Transfer (eV)' self . broadenings = { 'gaussian' : ( calculation . xGaussian , calculation . yGaussian ) , } self . raw . append ( spectrum ) self . process ( )
Read the spectra from the files generated by Quanty and store them as a list of spectum objects .
2,006
def updateResultsView ( self , index ) : flags = ( QItemSelectionModel . Clear | QItemSelectionModel . Rows | QItemSelectionModel . Select ) self . resultsView . selectionModel ( ) . select ( index , flags ) self . resultsView . resizeColumnsToContents ( ) self . resultsView . setFocus ( )
Update the selection to contain only the result specified by the index . This should be the last index of the model . Finally updade the context menu .
2,007
def updatePlotWidget ( self ) : pw = self . getPlotWidget ( ) pw . reset ( ) results = self . resultsModel . getCheckedItems ( ) for result in results : if isinstance ( result , ExperimentalData ) : spectrum = result . spectra [ 'Expt' ] spectrum . legend = '{}-{}' . format ( result . index , 'Expt' ) spectrum . xLabel = 'X' spectrum . yLabel = 'Y' spectrum . plot ( plotWidget = pw ) else : if len ( results ) > 1 and result . experiment in [ 'RIXS' , ] : continue for spectrum in result . spectra . processed : spectrum . legend = '{}-{}' . format ( result . index , spectrum . shortName ) if spectrum . name in result . spectra . toPlotChecked : spectrum . plot ( plotWidget = pw )
Updating the plotting widget should not require any information about the current state of the widget .
2,008
def row ( self ) : if self . parent is not None : children = self . parent . getChildren ( ) return children . index ( self ) else : return 0
Return the row of the child .
2,009
def parent ( self , index ) : childItem = self . item ( index ) parentItem = childItem . parent if parentItem == self . rootItem : parentIndex = QModelIndex ( ) else : parentIndex = self . createIndex ( parentItem . row ( ) , 0 , parentItem ) return parentIndex
Return the index of the parent for a given index of the child . Unfortunately the name of the method has to be parent even though a more verbose name like parentIndex would avoid confusion about what parent actually is - an index or an item .
2,010
def setData ( self , index , value , role ) : if not index . isValid ( ) : return False item = self . item ( index ) column = index . column ( ) if role == Qt . EditRole : items = list ( ) items . append ( item ) if self . sync : parentIndex = self . parent ( index ) for sibling in self . siblings ( parentIndex ) : siblingNode = self . item ( sibling ) for child in siblingNode . children : if child . getItemData ( 0 ) == item . getItemData ( 0 ) : items . append ( child ) for item in items : columnData = str ( item . getItemData ( column ) ) if columnData and columnData != value : try : item . setItemData ( column , float ( value ) ) except ValueError : return False else : return False elif role == Qt . CheckStateRole : item . setCheckState ( value ) if value == Qt . Unchecked or value == Qt . Checked : state = value self . itemCheckStateChanged . emit ( index , state ) self . dataChanged . emit ( index , index ) return True
Set the role data for the item at index to value .
2,011
def flags ( self , index ) : activeFlags = ( Qt . ItemIsEnabled | Qt . ItemIsSelectable | Qt . ItemIsUserCheckable ) item = self . item ( index ) column = index . column ( ) if column > 0 and not item . childCount ( ) : activeFlags = activeFlags | Qt . ItemIsEditable return activeFlags
Return the active flags for the given index . Add editable flag to items other than the first column .
2,012
def _getModelData ( self , modelData , parentItem = None ) : if parentItem is None : parentItem = self . rootItem for item in parentItem . getChildren ( ) : key = item . getItemData ( 0 ) if item . childCount ( ) : modelData [ key ] = odict ( ) self . _getModelData ( modelData [ key ] , item ) else : if isinstance ( item . getItemData ( 2 ) , float ) : modelData [ key ] = [ item . getItemData ( 1 ) , item . getItemData ( 2 ) ] else : modelData [ key ] = item . getItemData ( 1 )
Return the data contained in the model .
2,013
def _contextMenu ( self , pos ) : menu = QMenu ( self ) menu . addAction ( self . _zoomBackAction ) plotArea = self . getWidgetHandle ( ) globalPosition = plotArea . mapToGlobal ( pos ) menu . exec_ ( globalPosition )
Handle plot area customContextMenuRequested signal .
2,014
def convolve_fft ( array , kernel ) : array = np . asarray ( array , dtype = np . complex ) kernel = np . asarray ( kernel , dtype = np . complex ) if array . ndim != kernel . ndim : raise ValueError ( "Image and kernel must have same number of " "dimensions" ) array_shape = array . shape kernel_shape = kernel . shape new_shape = np . array ( array_shape ) + np . array ( kernel_shape ) array_slices = [ ] kernel_slices = [ ] for ( new_dimsize , array_dimsize , kernel_dimsize ) in zip ( new_shape , array_shape , kernel_shape ) : center = new_dimsize - ( new_dimsize + 1 ) // 2 array_slices += [ slice ( center - array_dimsize // 2 , center + ( array_dimsize + 1 ) // 2 ) ] kernel_slices += [ slice ( center - kernel_dimsize // 2 , center + ( kernel_dimsize + 1 ) // 2 ) ] array_slices = tuple ( array_slices ) kernel_slices = tuple ( kernel_slices ) if not np . all ( new_shape == array_shape ) : big_array = np . zeros ( new_shape , dtype = np . complex ) big_array [ array_slices ] = array else : big_array = array if not np . all ( new_shape == kernel_shape ) : big_kernel = np . zeros ( new_shape , dtype = np . complex ) big_kernel [ kernel_slices ] = kernel else : big_kernel = kernel array_fft = np . fft . fftn ( big_array ) kernel_fft = np . fft . fftn ( np . fft . ifftshift ( big_kernel ) ) rifft = np . fft . ifftn ( array_fft * kernel_fft ) return rifft [ array_slices ] . real
Convolve an array with a kernel using FFT . Implemntation based on the convolve_fft function from astropy .
2,015
def diagonalize ( self ) : self . eigvals , self . eigvecs = np . linalg . eig ( ( self . tensor . transpose ( ) + self . tensor ) / 2.0 ) self . eigvals = np . diag ( np . dot ( np . dot ( self . eigvecs . transpose ( ) , self . tensor ) , self . eigvecs ) )
Diagonalize the tensor .
2,016
def _skip_lines ( self , n ) : for i in range ( n ) : self . line = next ( self . output ) return self . line
Skip a number of lines from the output .
2,017
def _parse_tensor ( self , indices = False ) : if indices : self . line = self . _skip_lines ( 1 ) tensor = np . zeros ( ( 3 , 3 ) ) for i in range ( 3 ) : tokens = self . line . split ( ) if indices : tensor [ i ] [ 0 ] = float ( tokens [ 1 ] ) tensor [ i ] [ 1 ] = float ( tokens [ 2 ] ) tensor [ i ] [ 2 ] = float ( tokens [ 3 ] ) else : tensor [ i ] [ 0 ] = float ( tokens [ 0 ] ) tensor [ i ] [ 1 ] = float ( tokens [ 1 ] ) tensor [ i ] [ 2 ] = float ( tokens [ 2 ] ) self . line = self . _skip_lines ( 1 ) return tensor
Parse a tensor .
2,018
def __validate ( self , target , value , oldvalue , initiator ) : if value == oldvalue : return value if self . allow_null and value is None : return value if self . check_value ( value ) : return value else : if self . throw_exception : if self . message : self . message = self . message . format ( field = self . field , new_value = value , old_value = oldvalue , key = initiator . key ) raise ValidateError ( self . message ) else : raise ValidateError ( 'Value %s from column %s is not valid' % ( value , initiator . key ) ) return oldvalue
Method executed when the event set is triggered .
2,019
def __create_event ( self ) : if not event . contains ( self . field , 'set' , self . __validate ) : event . listen ( self . field , 'set' , self . __validate , retval = True )
Create an SQLAlchemy event listening the set in a particular column .
2,020
def stop ( self ) : if event . contains ( self . field , 'set' , self . __validate ) : event . remove ( self . field , 'set' , self . __validate )
Remove the listener to stop the validation
2,021
def start ( self ) : if not event . contains ( self . field , 'set' , self . __validate ) : self . __create_event ( )
Restart the listener
2,022
def nhapDaiHan ( self , cucSo , gioiTinh ) : for cung in self . thapNhiCung : khoangCach = khoangCachCung ( cung . cungSo , self . cungMenh , gioiTinh ) cung . daiHan ( cucSo + khoangCach * 10 ) return self
Nhap dai han
2,023
def nt2aa ( ntseq ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 , 'a' : 0 , 'c' : 1 , 'g' : 2 , 't' : 3 } aa_dict = 'KQE*TPASRRG*ILVLNHDYTPASSRGCILVFKQE*TPASRRGWMLVLNHDYTPASSRGCILVF' return '' . join ( [ aa_dict [ nt2num [ ntseq [ i ] ] + 4 * nt2num [ ntseq [ i + 1 ] ] + 16 * nt2num [ ntseq [ i + 2 ] ] ] for i in range ( 0 , len ( ntseq ) , 3 ) if i + 2 < len ( ntseq ) ] )
Translate a nucleotide sequence into an amino acid sequence .
2,024
def nt2codon_rep ( ntseq ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 , 'a' : 0 , 'c' : 1 , 'g' : 2 , 't' : 3 } codon_rep = '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf' return '' . join ( [ codon_rep [ nt2num [ ntseq [ i ] ] + 4 * nt2num [ ntseq [ i + 1 ] ] + 16 * nt2num [ ntseq [ i + 2 ] ] ] for i in range ( 0 , len ( ntseq ) , 3 ) if i + 2 < len ( ntseq ) ] )
Represent nucleotide sequence by sequence of codon symbols .
2,025
def cutR_seq ( seq , cutR , max_palindrome ) : complement_dict = { 'A' : 'T' , 'C' : 'G' , 'G' : 'C' , 'T' : 'A' } if cutR < max_palindrome : seq = seq + '' . join ( [ complement_dict [ nt ] for nt in seq [ cutR - max_palindrome : ] ] [ : : - 1 ] ) else : seq = seq [ : len ( seq ) - cutR + max_palindrome ] return seq
Cut genomic sequence from the right .
2,026
def cutL_seq ( seq , cutL , max_palindrome ) : complement_dict = { 'A' : 'T' , 'C' : 'G' , 'G' : 'C' , 'T' : 'A' } if cutL < max_palindrome : seq = '' . join ( [ complement_dict [ nt ] for nt in seq [ : max_palindrome - cutL ] ] [ : : - 1 ] ) + seq else : seq = seq [ cutL - max_palindrome : ] return seq
Cut genomic sequence from the left .
2,027
def generate_sub_codons_left ( codons_dict ) : sub_codons_left = { } for aa in codons_dict . keys ( ) : sub_codons_left [ aa ] = list ( set ( [ x [ 0 ] for x in codons_dict [ aa ] ] + [ x [ : 2 ] for x in codons_dict [ aa ] ] ) ) return sub_codons_left
Generate the sub_codons_left dictionary of codon prefixes .
2,028
def calc_steady_state_dist ( R ) : w , v = np . linalg . eig ( R ) for i in range ( 4 ) : if np . abs ( w [ i ] - 1 ) < 1e-8 : return np . real ( v [ : , i ] / np . sum ( v [ : , i ] ) ) return - 1
Calculate the steady state dist of a 4 state markov transition matrix .
2,029
def rnd_ins_seq ( ins_len , C_R , CP_first_nt ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 } num2nt = 'ACGT' if ins_len == 0 : return '' seq = num2nt [ CP_first_nt . searchsorted ( np . random . random ( ) ) ] ins_len += - 1 while ins_len > 0 : seq += num2nt [ C_R [ nt2num [ seq [ - 1 ] ] , : ] . searchsorted ( np . random . random ( ) ) ] ins_len += - 1 return seq
Generate a random insertion nucleotide sequence of length ins_len .
2,030
def update_rates ( self ) : source , created = RateSource . objects . get_or_create ( name = self . get_source_name ( ) ) source . base_currency = self . get_base_currency ( ) source . save ( ) for currency , value in six . iteritems ( self . get_rates ( ) ) : try : rate = Rate . objects . get ( source = source , currency = currency ) except Rate . DoesNotExist : rate = Rate ( source = source , currency = currency ) rate . value = value rate . save ( )
Creates or updates rates for a source
2,031
def showDosHeaderData ( peInstance ) : dosFields = peInstance . dosHeader . getFields ( ) print "[+] IMAGE_DOS_HEADER values:\n" for field in dosFields : if isinstance ( dosFields [ field ] , datatypes . Array ) : print " % ( field , len ( dosFields [ field ] ) ) counter = 0 for element in dosFields [ field ] : print "[%d] 0x%08x" % ( counter , element . value ) counter += 1 else : print " % ( field , dosFields [ field ] . value )
Prints IMAGE_DOS_HEADER fields .
2,032
def showFileHeaderData ( peInstance ) : fileHeaderFields = peInstance . ntHeaders . fileHeader . getFields ( ) print "[+] IMAGE_FILE_HEADER values:\n" for field in fileHeaderFields : print " % ( field , fileHeaderFields [ field ] . value )
Prints IMAGE_FILE_HEADER fields .
2,033
def showOptionalHeaderData ( peInstance ) : print "[+] IMAGE_OPTIONAL_HEADER:\n" ohFields = peInstance . ntHeaders . optionalHeader . getFields ( ) for field in ohFields : if not isinstance ( ohFields [ field ] , datadirs . DataDirectory ) : print " % ( field , ohFields [ field ] . value )
Prints IMAGE_OPTIONAL_HEADER fields .
2,034
def showDataDirectoriesData ( peInstance ) : print "[+] Data directories:\n" dirs = peInstance . ntHeaders . optionalHeader . dataDirectory counter = 1 for dir in dirs : print "[%d] % ( counter , dir . name . value , dir . rva . value , dir . size . value ) counter += 1
Prints the DATA_DIRECTORY fields .
2,035
def showSectionsHeaders ( peInstance ) : print "[+] Sections information:\n" print " % peInstance . ntHeaders . fileHeader . numberOfSections . value for section in peInstance . sectionHeaders : fields = section . getFields ( ) for field in fields : if isinstance ( fields [ field ] , datatypes . String ) : fmt = "%s = %s" else : fmt = "%s = 0x%08x" print fmt % ( field , fields [ field ] . value ) print "\n"
Prints IMAGE_SECTION_HEADER for every section present in the file .
2,036
def showImports ( peInstance ) : iidEntries = peInstance . ntHeaders . optionalHeader . dataDirectory [ consts . IMPORT_DIRECTORY ] . info if iidEntries : for iidEntry in iidEntries : fields = iidEntry . getFields ( ) print "module: %s" % iidEntry . metaData . moduleName . value for field in fields : print "%s -> %x" % ( field , fields [ field ] . value ) for iatEntry in iidEntry . iat : fields = iatEntry . getFields ( ) for field in fields : print "%s - %r" % ( field , fields [ field ] . value ) print "\n" else : print "The file does not have imported functions."
Shows imports information .
2,037
def showExports ( peInstance ) : exports = peInstance . ntHeaders . optionalHeader . dataDirectory [ consts . EXPORT_DIRECTORY ] . info if exports : exp_fields = exports . getFields ( ) for field in exp_fields : print "%s -> %x" % ( field , exp_fields [ field ] . value ) for entry in exports . exportTable : entry_fields = entry . getFields ( ) for field in entry_fields : print "%s -> %r" % ( field , entry_fields [ field ] . value ) else : print "The file does not have exported functions."
Show exports information
2,038
def getFields ( self ) : d = { } for i in self . _attrsList : key = i value = getattr ( self , i ) d [ key ] = value return d
Returns all the class attributues .
2,039
def calc_euler_tour ( g , start , end ) : even_g = nx . subgraph ( g , g . nodes ( ) ) . copy ( ) if end in even_g . neighbors ( start ) : even_g . remove_edge ( start , end ) comps = list ( nx . connected_components ( even_g ) ) if len ( comps ) == 1 : trail = list ( nx . eulerian_circuit ( even_g , start ) ) trail . append ( ( start , end ) ) elif len ( comps ) == 2 : subg1 = nx . subgraph ( even_g , comps [ 0 ] ) subg2 = nx . subgraph ( even_g , comps [ 1 ] ) start_subg , end_subg = ( subg1 , subg2 ) if start in subg1 . nodes ( ) else ( subg2 , subg1 ) trail = list ( nx . eulerian_circuit ( start_subg , start ) ) + [ ( start , end ) ] + list ( nx . eulerian_circuit ( end_subg , end ) ) else : raise Exception ( 'Unknown edge case with connected components of size {0}:\n{1}' . format ( len ( comps ) , comps ) ) else : even_g . add_edge ( start , end ) circ = list ( nx . eulerian_circuit ( even_g , start ) ) try : trail_start = circ . index ( ( start , end ) ) except : trail_start = circ . index ( ( end , start ) ) trail = circ [ trail_start + 1 : ] + circ [ : trail_start ] return trail
Calculates an Euler tour over the graph g from vertex start to vertex end . Assumes start and end are odd - degree vertices and that there are no other odd - degree vertices .
2,040
def greedy_trails ( subg , odds , verbose ) : if verbose : print ( '\tCreating edge map' ) edges = defaultdict ( list ) for x , y in subg . edges ( ) : edges [ x ] . append ( y ) edges [ y ] . append ( x ) if verbose : print ( '\tSelecting trails' ) trails = [ ] for x in subg . nodes ( ) : if verbose > 2 : print ( '\t\tNode {0}' . format ( x ) ) while len ( edges [ x ] ) > 0 : y = edges [ x ] [ 0 ] trail = [ ( x , y ) ] edges [ x ] . remove ( y ) edges [ y ] . remove ( x ) while len ( edges [ y ] ) > 0 : x = y y = edges [ y ] [ 0 ] trail . append ( ( x , y ) ) edges [ x ] . remove ( y ) edges [ y ] . remove ( x ) trails . append ( trail ) return trails
Greedily select trails by making the longest you can until the end
2,041
def decompose_graph ( g , heuristic = 'tour' , max_odds = 20 , verbose = 0 ) : subgraphs = [ nx . subgraph ( g , x ) . copy ( ) for x in nx . connected_components ( g ) ] chains = [ ] num_subgraphs = len ( subgraphs ) step = 0 while num_subgraphs > 0 : if verbose : print ( 'Step #{0} ({1} subgraphs)' . format ( step , num_subgraphs ) ) for i in range ( num_subgraphs - 1 , - 1 , - 1 ) : subg = subgraphs [ i ] odds = [ x for x , y in dict ( nx . degree ( subg ) ) . items ( ) if y % 2 == 1 ] if verbose > 1 : if len ( odds ) == 0 : print ( '\t\tNo odds' ) elif len ( odds ) == 2 : print ( '\t\tExactly 2 odds' ) else : print ( '\t\t{0} odds' . format ( len ( odds ) ) ) if len ( odds ) == 0 : trails = [ list ( nx . eulerian_circuit ( subg ) ) ] elif len ( odds ) == 2 : trails = [ calc_euler_tour ( subg , odds [ 0 ] , odds [ 1 ] ) ] elif heuristic in [ 'min' , 'max' , 'median' , 'any' ] : trails = select_odd_degree_trail ( subg , odds , max_odds , heuristic , verbose ) elif heuristic == 'random' : trails = select_random_trail ( subg , verbose ) elif heuristic == 'mindegree' : trails = select_min_degree_trail ( subg , max_odds , verbose ) elif heuristic == 'ones' : trails = select_single_edge_trails ( subg , verbose ) elif heuristic == 'tour' : trails = pseudo_tour_trails ( subg , odds , verbose ) elif heuristic == 'greedy' : trails = greedy_trails ( subg , odds , verbose ) if verbose > 2 : print ( '\t\tTrails: {0}' . format ( len ( trails ) ) ) for trail in trails : subg . remove_edges_from ( trail ) chains . extend ( trails ) if subg . number_of_edges ( ) == 0 : del subgraphs [ i ] else : comps = list ( nx . connected_components ( subg ) ) if len ( comps ) > 1 : for x in comps : compg = nx . subgraph ( subg , x ) if compg . number_of_edges ( ) > 0 : subgraphs . append ( compg ) del subgraphs [ i ] num_subgraphs = len ( subgraphs ) step += 1 return chains
Decompose a graph into a set of non - overlapping trails .
2,042
def lerp ( self , a , t ) : return self . plus ( a . minus ( self ) . times ( t ) )
Lerp . Linear interpolation from self to a
2,043
def interpolate ( self , other , t ) : return Vertex ( self . pos . lerp ( other . pos , t ) , self . normal . lerp ( other . normal , t ) )
Create a new vertex between this vertex and other by linearly interpolating all properties using a parameter of t . Subclasses should override this to interpolate additional properties .
2,044
def splitPolygon ( self , polygon , coplanarFront , coplanarBack , front , back ) : COPLANAR = 0 FRONT = 1 BACK = 2 SPANNING = 3 polygonType = 0 vertexLocs = [ ] numVertices = len ( polygon . vertices ) for i in range ( numVertices ) : t = self . normal . dot ( polygon . vertices [ i ] . pos ) - self . w loc = - 1 if t < - Plane . EPSILON : loc = BACK elif t > Plane . EPSILON : loc = FRONT else : loc = COPLANAR polygonType |= loc vertexLocs . append ( loc ) if polygonType == COPLANAR : normalDotPlaneNormal = self . normal . dot ( polygon . plane . normal ) if normalDotPlaneNormal > 0 : coplanarFront . append ( polygon ) else : coplanarBack . append ( polygon ) elif polygonType == FRONT : front . append ( polygon ) elif polygonType == BACK : back . append ( polygon ) elif polygonType == SPANNING : f = [ ] b = [ ] for i in range ( numVertices ) : j = ( i + 1 ) % numVertices ti = vertexLocs [ i ] tj = vertexLocs [ j ] vi = polygon . vertices [ i ] vj = polygon . vertices [ j ] if ti != BACK : f . append ( vi ) if ti != FRONT : if ti != BACK : b . append ( vi . clone ( ) ) else : b . append ( vi ) if ( ti | tj ) == SPANNING : t = ( self . w - self . normal . dot ( vi . pos ) ) / self . normal . dot ( vj . pos . minus ( vi . pos ) ) v = vi . interpolate ( vj , t ) f . append ( v ) b . append ( v . clone ( ) ) if len ( f ) >= 3 : front . append ( Polygon ( f , polygon . shared ) ) if len ( b ) >= 3 : back . append ( Polygon ( b , polygon . shared ) )
Split polygon by this plane if needed then put the polygon or polygon fragments in the appropriate lists . Coplanar polygons go into either coplanarFront or coplanarBack depending on their orientation with respect to this plane . Polygons in front or in back of this plane go into either front or back
2,045
def invert ( self ) : for poly in self . polygons : poly . flip ( ) self . plane . flip ( ) if self . front : self . front . invert ( ) if self . back : self . back . invert ( ) temp = self . front self . front = self . back self . back = temp
Convert solid space to empty space and empty space to solid space .
2,046
def clipPolygons ( self , polygons ) : if not self . plane : return polygons [ : ] front = [ ] back = [ ] for poly in polygons : self . plane . splitPolygon ( poly , front , back , front , back ) if self . front : front = self . front . clipPolygons ( front ) if self . back : back = self . back . clipPolygons ( back ) else : back = [ ] front . extend ( back ) return front
Recursively remove all polygons in polygons that are inside this BSP tree .
2,047
def clipTo ( self , bsp ) : self . polygons = bsp . clipPolygons ( self . polygons ) if self . front : self . front . clipTo ( bsp ) if self . back : self . back . clipTo ( bsp )
Remove all polygons in this BSP tree that are inside the other BSP tree bsp .
2,048
def allPolygons ( self ) : polygons = self . polygons [ : ] if self . front : polygons . extend ( self . front . allPolygons ( ) ) if self . back : polygons . extend ( self . back . allPolygons ( ) ) return polygons
Return a list of all polygons in this BSP tree .
2,049
def get_rate ( currency ) : source = get_rate_source ( ) try : return Rate . objects . get ( source = source , currency = currency ) . value except Rate . DoesNotExist : raise CurrencyConversionException ( "Rate for %s in %s do not exists. " "Please run python manage.py update_rates" % ( currency , source . name ) )
Returns the rate from the default currency to currency .
2,050
def get_rate_source ( ) : backend = money_rates_settings . DEFAULT_BACKEND ( ) try : return RateSource . objects . get ( name = backend . get_source_name ( ) ) except RateSource . DoesNotExist : raise CurrencyConversionException ( "Rate for %s source do not exists. " "Please run python manage.py update_rates" % backend . get_source_name ( ) )
Get the default Rate Source and return it .
2,051
def base_convert_money ( amount , currency_from , currency_to ) : source = get_rate_source ( ) if source . base_currency != currency_from : rate_from = get_rate ( currency_from ) else : rate_from = Decimal ( 1 ) rate_to = get_rate ( currency_to ) if isinstance ( amount , float ) : amount = Decimal ( amount ) . quantize ( Decimal ( '.000001' ) ) return ( ( amount / rate_from ) * rate_to ) . quantize ( Decimal ( "1.00" ) )
Convert amount from currency_from to currency_to
2,052
def convert_money ( amount , currency_from , currency_to ) : new_amount = base_convert_money ( amount , currency_from , currency_to ) return moneyed . Money ( new_amount , currency_to )
Convert amount from currency_from to currency_to and return a Money instance of the converted amount .
2,053
def format_date ( format_string = None , datetime_obj = None ) : datetime_obj = datetime_obj or datetime . now ( ) if format_string is None : seconds = int ( datetime_obj . strftime ( "%s" ) ) milliseconds = datetime_obj . microsecond // 1000 return str ( seconds * 1000 + milliseconds ) else : formatter = SimpleDateFormat ( format_string ) return formatter . format_datetime ( datetime_obj )
Format a datetime object with Java SimpleDateFormat s - like string .
2,054
def allZero ( buffer ) : allZero = True for byte in buffer : if byte != "\x00" : allZero = False break return allZero
Tries to determine if a buffer is empty .
2,055
def readAlignedString ( self , align = 4 ) : s = self . readString ( ) r = align - len ( s ) % align while r : s += self . data [ self . offset ] self . offset += 1 r -= 1 return s . rstrip ( "\x00" )
Reads an ASCII string aligned to the next align - bytes boundary .
2,056
def readAt ( self , offset , size ) : if offset > self . length : if self . log : print "Warning: Trying to read: %d bytes - only %d bytes left" % ( nroBytes , self . length - self . offset ) offset = self . length - self . offset tmpOff = self . tell ( ) self . setOffset ( offset ) r = self . read ( size ) self . setOffset ( tmpOff ) return r
Reads as many bytes indicated in the size parameter at the specific offset .
2,057
def send ( self , message , channel_name = None , fail_silently = False , options = None ) : if channel_name is None : channels = self . settings [ "CHANNELS" ] else : try : channels = { "__selected__" : self . settings [ "CHANNELS" ] [ channel_name ] } except KeyError : raise Exception ( "channels does not exist %s" , channel_name ) for _ , config in channels . items ( ) : if "_backend" not in config : raise ImproperlyConfigured ( "Specify the backend class in the channel configuration" ) backend = self . _load_backend ( config [ "_backend" ] ) config = deepcopy ( config ) del config [ "_backend" ] channel = backend ( ** config ) channel . send ( message , fail_silently = fail_silently , options = options )
Send a notification to channels
2,058
def request ( self , method , path , params = None , headers = None , cookies = None , data = None , json = None , allow_redirects = None , timeout = None ) : headers = headers or { } timeout = timeout if timeout is not None else self . _timeout allow_redirects = allow_redirects if allow_redirects is not None else self . _allow_redirects if self . _keep_alive and self . __session is None : self . __session = requests . Session ( ) if self . __session is not None and not self . _use_cookies : self . __session . cookies . clear ( ) address = self . _bake_address ( path ) req_headers = copy . deepcopy ( self . _additional_headers ) req_headers . update ( headers ) response = http . request ( method , address , session = self . __session , params = params , headers = headers , cookies = cookies , data = data , json = json , allow_redirects = allow_redirects , timeout = timeout ) if self . _auto_assert_ok : response . assert_ok ( ) return response
Prepares and sends an HTTP request . Returns the HTTPResponse object .
2,059
def load_genomic_CDR3_anchor_pos_and_functionality ( anchor_pos_file_name ) : anchor_pos_and_functionality = { } anchor_pos_file = open ( anchor_pos_file_name , 'r' ) first_line = True for line in anchor_pos_file : if first_line : first_line = False continue split_line = line . split ( ',' ) split_line = [ x . strip ( ) for x in split_line ] anchor_pos_and_functionality [ split_line [ 0 ] ] = [ int ( split_line [ 1 ] ) , split_line [ 2 ] . strip ( ) . strip ( '()' ) ] return anchor_pos_and_functionality
Read anchor position and functionality from file .
2,060
def generate_cutV_genomic_CDR3_segs ( self ) : max_palindrome = self . max_delV_palindrome self . cutV_genomic_CDR3_segs = [ ] for CDR3_V_seg in [ x [ 1 ] for x in self . genV ] : if len ( CDR3_V_seg ) < max_palindrome : self . cutV_genomic_CDR3_segs += [ cutR_seq ( CDR3_V_seg , 0 , len ( CDR3_V_seg ) ) ] else : self . cutV_genomic_CDR3_segs += [ cutR_seq ( CDR3_V_seg , 0 , max_palindrome ) ]
Add palindromic inserted nucleotides to germline V sequences . The maximum number of palindromic insertions are appended to the germline V segments so that delV can index directly for number of nucleotides to delete from a segment . Sets the attribute cutV_genomic_CDR3_segs .
2,061
def generate_cutJ_genomic_CDR3_segs ( self ) : max_palindrome = self . max_delJ_palindrome self . cutJ_genomic_CDR3_segs = [ ] for CDR3_J_seg in [ x [ 1 ] for x in self . genJ ] : if len ( CDR3_J_seg ) < max_palindrome : self . cutJ_genomic_CDR3_segs += [ cutL_seq ( CDR3_J_seg , 0 , len ( CDR3_J_seg ) ) ] else : self . cutJ_genomic_CDR3_segs += [ cutL_seq ( CDR3_J_seg , 0 , max_palindrome ) ]
Add palindromic inserted nucleotides to germline J sequences . The maximum number of palindromic insertions are appended to the germline J segments so that delJ can index directly for number of nucleotides to delete from a segment . Sets the attribute cutJ_genomic_CDR3_segs .
2,062
def generate_cutD_genomic_CDR3_segs ( self ) : max_palindrome_L = self . max_delDl_palindrome max_palindrome_R = self . max_delDr_palindrome self . cutD_genomic_CDR3_segs = [ ] for CDR3_D_seg in [ x [ 1 ] for x in self . genD ] : if len ( CDR3_D_seg ) < min ( max_palindrome_L , max_palindrome_R ) : self . cutD_genomic_CDR3_segs += [ cutR_seq ( cutL_seq ( CDR3_D_seg , 0 , len ( CDR3_D_seg ) ) , 0 , len ( CDR3_D_seg ) ) ] else : self . cutD_genomic_CDR3_segs += [ cutR_seq ( cutL_seq ( CDR3_D_seg , 0 , max_palindrome_L ) , 0 , max_palindrome_R ) ]
Add palindromic inserted nucleotides to germline V sequences . The maximum number of palindromic insertions are appended to the germline D segments so that delDl and delDr can index directly for number of nucleotides to delete from a segment . Sets the attribute cutV_genomic_CDR3_segs .
2,063
def verify_and_fill_address_paths_from_bip32key ( address_paths , master_key , network ) : assert network , network wallet_obj = Wallet . deserialize ( master_key , network = network ) address_paths_cleaned = [ ] for address_path in address_paths : path = address_path [ 'path' ] input_address = address_path [ 'address' ] child_wallet = wallet_obj . get_child_for_path ( path ) if child_wallet . to_address ( ) != input_address : err_msg = 'Client Side Verification Fail for %s on %s:\n%s != %s' % ( path , master_key , child_wallet . to_address ( ) , input_address , ) raise Exception ( err_msg ) pubkeyhex = child_wallet . get_public_key_hex ( compressed = True ) server_pubkeyhex = address_path . get ( 'public' ) if server_pubkeyhex and server_pubkeyhex != pubkeyhex : err_msg = 'Client Side Verification Fail for %s on %s:\n%s != %s' % ( path , master_key , pubkeyhex , server_pubkeyhex , ) raise Exception ( err_msg ) address_path_cleaned = { 'pub_address' : input_address , 'path' : path , 'pubkeyhex' : pubkeyhex , } if child_wallet . private_key : privkeyhex = child_wallet . get_private_key_hex ( ) address_path_cleaned [ 'wif' ] = child_wallet . export_to_wif ( ) address_path_cleaned [ 'privkeyhex' ] = privkeyhex address_paths_cleaned . append ( address_path_cleaned ) return address_paths_cleaned
Take address paths and verifies their accuracy client - side .
2,064
def run ( self , lam , initial_values = None ) : if initial_values is not None : if self . k == 0 and self . trails is not None : betas , zs , us = initial_values else : betas , us = initial_values else : if self . k == 0 and self . trails is not None : betas = [ np . zeros ( self . num_nodes , dtype = 'double' ) for _ in self . bins ] zs = [ np . zeros ( self . breakpoints [ - 1 ] , dtype = 'double' ) for _ in self . bins ] us = [ np . zeros ( self . breakpoints [ - 1 ] , dtype = 'double' ) for _ in self . bins ] else : betas = [ np . zeros ( self . num_nodes , dtype = 'double' ) for _ in self . bins ] us = [ np . zeros ( self . Dk . shape [ 0 ] , dtype = 'double' ) for _ in self . bins ] for j , ( left , mid , right , trials , successes ) in enumerate ( self . bins ) : if self . bins_allowed is not None and j not in self . bins_allowed : continue if self . verbose > 2 : print ( '\tBin #{0} [{1},{2},{3}]' . format ( j , left , mid , right ) ) beta = betas [ j ] u = us [ j ] if self . k == 0 and self . trails is not None : z = zs [ j ] self . graphfl ( len ( beta ) , trials , successes , self . ntrails , self . trails , self . breakpoints , lam , self . alpha , self . inflate , self . max_steps , self . converge , beta , z , u ) else : self . graphtf ( len ( beta ) , trials , successes , lam , self . Dk . shape [ 0 ] , self . Dk . shape [ 1 ] , self . Dk . nnz , self . Dk . row . astype ( 'int32' ) , self . Dk . col . astype ( 'int32' ) , self . Dk . data . astype ( 'double' ) , self . max_steps , self . converge , beta , u ) beta = np . clip ( beta , 1e-12 , 1 - 1e-12 ) betas [ j ] = - np . log ( 1. / beta - 1. ) return ( betas , zs , us ) if self . k == 0 and self . trails is not None else ( betas , us )
Run the graph - fused logit lasso with a fixed lambda penalty .
2,065
def data_log_likelihood ( self , successes , trials , beta ) : return binom . logpmf ( successes , trials , 1.0 / ( 1 + np . exp ( - beta ) ) ) . sum ( )
Calculates the log - likelihood of a Polya tree bin given the beta values .
2,066
def spawn_worker ( params ) : setup_logging ( params ) log . info ( "Adding worker: idx=%s\tconcurrency=%s\tresults=%s" , params . worker_index , params . concurrency , params . report ) worker = Worker ( params ) worker . start ( ) worker . join ( )
This method has to be module level function
2,067
def create_plateaus ( data , edges , plateau_size , plateau_vals , plateaus = None ) : nodes = set ( edges . keys ( ) ) if plateaus is None : plateaus = [ ] for i in range ( len ( plateau_vals ) ) : if len ( nodes ) == 0 : break node = np . random . choice ( list ( nodes ) ) nodes . remove ( node ) plateau = [ node ] available = set ( edges [ node ] ) & nodes while len ( nodes ) > 0 and len ( available ) > 0 and len ( plateau ) < plateau_size : node = np . random . choice ( list ( available ) ) plateau . append ( node ) available |= nodes & set ( edges [ node ] ) available . remove ( node ) nodes -= set ( plateau ) plateaus . append ( set ( plateau ) ) for p , v in zip ( plateaus , plateau_vals ) : data [ np . array ( list ( p ) , dtype = int ) ] = v return plateaus
Creates plateaus of constant value in the data .
2,068
def pretty_str ( p , decimal_places = 2 , print_zero = True , label_columns = False ) : if len ( p . shape ) == 1 : return vector_str ( p , decimal_places , print_zero ) if len ( p . shape ) == 2 : return matrix_str ( p , decimal_places , print_zero , label_columns ) raise Exception ( 'Invalid array with shape {0}' . format ( p . shape ) )
Pretty - print a matrix or vector .
2,069
def matrix_str ( p , decimal_places = 2 , print_zero = True , label_columns = False ) : return '[{0}]' . format ( "\n " . join ( [ ( str ( i ) if label_columns else '' ) + vector_str ( a , decimal_places , print_zero ) for i , a in enumerate ( p ) ] ) )
Pretty - print the matrix .
2,070
def vector_str ( p , decimal_places = 2 , print_zero = True ) : style = '{0:.' + str ( decimal_places ) + 'f}' return '[{0}]' . format ( ", " . join ( [ ' ' if not print_zero and a == 0 else style . format ( a ) for a in p ] ) )
Pretty - print the vector values .
2,071
def nearly_unique ( arr , rel_tol = 1e-4 , verbose = 0 ) : results = np . array ( [ arr [ 0 ] ] ) for x in arr : if np . abs ( results - x ) . min ( ) > rel_tol : results = np . append ( results , x ) return results
Heuristic method to return the uniques within some precision in a numpy array
2,072
def get_delta ( D , k ) : if k < 0 : raise Exception ( 'k must be at least 0th order.' ) result = D for i in range ( k ) : result = D . T . dot ( result ) if i % 2 == 0 else D . dot ( result ) return result
Calculate the k - th order trend filtering matrix given the oriented edge incidence matrix and the value of k .
2,073
def decompose_delta ( deltak ) : if not isspmatrix_coo ( deltak ) : deltak = coo_matrix ( deltak ) dk_rows = deltak . shape [ 0 ] dk_rowbreaks = np . cumsum ( deltak . getnnz ( 1 ) , dtype = "int32" ) dk_cols = deltak . col . astype ( 'int32' ) dk_vals = deltak . data . astype ( 'double' ) return dk_rows , dk_rowbreaks , dk_cols , dk_vals
Decomposes the k - th order trend filtering matrix into a c - compatible set of arrays .
2,074
def hasMZSignature ( self , rd ) : rd . setOffset ( 0 ) sign = rd . read ( 2 ) if sign == "MZ" : return True return False
Check for MZ signature .
2,075
def hasPESignature ( self , rd ) : rd . setOffset ( 0 ) e_lfanew_offset = unpack ( "<L" , rd . readAt ( 0x3c , 4 ) ) [ 0 ] sign = rd . readAt ( e_lfanew_offset , 2 ) if sign == "PE" : return True return False
Check for PE signature .
2,076
def validate ( self ) : if self . dosHeader . e_magic . value != consts . MZ_SIGNATURE : raise excep . PEException ( "Invalid MZ signature. Found %d instead of %d." % ( self . dosHeader . magic . value , consts . MZ_SIGNATURE ) ) if self . dosHeader . e_lfanew . value > len ( self ) : raise excep . PEException ( "Invalid e_lfanew value. Probably not a PE file." ) if self . ntHeaders . signature . value != consts . PE_SIGNATURE : raise excep . PEException ( "Invalid PE signature. Found %d instead of %d." % ( self . ntHeaders . optionaHeader . signature . value , consts . PE_SIGNATURE ) ) if self . ntHeaders . optionalHeader . numberOfRvaAndSizes . value > 0x10 : print excep . PEWarning ( "Suspicious value for NumberOfRvaAndSizes: %d." % self . ntHeaders . optionaHeader . numberOfRvaAndSizes . value )
Performs validations over some fields of the PE structure to determine if the loaded file has a valid PE format .
2,077
def readFile ( self , pathToFile ) : fd = open ( pathToFile , "rb" ) data = fd . read ( ) fd . close ( ) return data
Returns data from a file .
2,078
def _getPaddingDataToSectionOffset ( self ) : start = self . _getPaddingToSectionOffset ( ) end = self . sectionHeaders [ 0 ] . pointerToRawData . value - start return self . _data [ start : start + end ]
Returns the data between the last section header and the begenning of data from the first section .
2,079
def _getSignature ( self , readDataInstance , dataDirectoryInstance ) : signature = "" if readDataInstance is not None and dataDirectoryInstance is not None : securityDirectory = dataDirectoryInstance [ consts . SECURITY_DIRECTORY ] if ( securityDirectory . rva . value and securityDirectory . size . value ) : readDataInstance . setOffset ( self . getOffsetFromRva ( securityDirectory . rva . value ) ) signature = readDataInstance . read ( securityDirectory . size . value ) else : raise excep . InstanceErrorException ( "ReadData instance or DataDirectory instance not specified." ) return signature
Returns the digital signature within a digital signed PE file .
2,080
def _getOverlay ( self , readDataInstance , sectionHdrsInstance ) : if readDataInstance is not None and sectionHdrsInstance is not None : try : offset = sectionHdrsInstance [ - 1 ] . pointerToRawData . value + sectionHdrsInstance [ - 1 ] . sizeOfRawData . value readDataInstance . setOffset ( offset ) except excep . WrongOffsetValueException : if self . _verbose : print "It seems that the file has no overlay data." else : raise excep . InstanceErrorException ( "ReadData instance or SectionHeaders instance not specified." ) return readDataInstance . data [ readDataInstance . offset : ]
Returns the overlay data from the PE file .
2,081
def getOffsetFromRva ( self , rva ) : offset = - 1 s = self . getSectionByRva ( rva ) if s != offset : offset = ( rva - self . sectionHeaders [ s ] . virtualAddress . value ) + self . sectionHeaders [ s ] . pointerToRawData . value else : offset = rva return offset
Converts an offset to an RVA .
2,082
def getRvaFromOffset ( self , offset ) : rva = - 1 s = self . getSectionByOffset ( offset ) if s : rva = ( offset - self . sectionHeaders [ s ] . pointerToRawData . value ) + self . sectionHeaders [ s ] . virtualAddress . value return rva
Converts a RVA to an offset .
2,083
def getSectionByOffset ( self , offset ) : index = - 1 for i in range ( len ( self . sectionHeaders ) ) : if ( offset < self . sectionHeaders [ i ] . pointerToRawData . value + self . sectionHeaders [ i ] . sizeOfRawData . value ) : index = i break return index
Given an offset in the file tries to determine the section this offset belong to .
2,084
def getSectionIndexByName ( self , name ) : index = - 1 if name : for i in range ( len ( self . sectionHeaders ) ) : if self . sectionHeaders [ i ] . name . value . find ( name ) >= 0 : index = i break return index
Given a string representing a section name tries to find the section index .
2,085
def getSectionByRva ( self , rva ) : index = - 1 if rva < self . sectionHeaders [ 0 ] . virtualAddress . value : return index for i in range ( len ( self . sectionHeaders ) ) : fa = self . ntHeaders . optionalHeader . fileAlignment . value prd = self . sectionHeaders [ i ] . pointerToRawData . value srd = self . sectionHeaders [ i ] . sizeOfRawData . value if len ( str ( self ) ) - self . _adjustFileAlignment ( prd , fa ) < srd : size = self . sectionHeaders [ i ] . misc . value else : size = max ( srd , self . sectionHeaders [ i ] . misc . value ) if ( self . sectionHeaders [ i ] . virtualAddress . value <= rva ) and rva < ( self . sectionHeaders [ i ] . virtualAddress . value + size ) : index = i break return index
Given a RVA in the file tries to determine the section this RVA belongs to .
2,086
def _getPaddingToSectionOffset ( self ) : return len ( str ( self . dosHeader ) + str ( self . dosStub ) + str ( self . ntHeaders ) + str ( self . sectionHeaders ) )
Returns the offset to last section header present in the PE file .
2,087
def fullLoad ( self ) : self . _parseDirectories ( self . ntHeaders . optionalHeader . dataDirectory , self . PE_TYPE )
Parse all the directories in the PE file .
2,088
def _fixPe ( self ) : sizeOfImage = 0 for sh in self . sectionHeaders : sizeOfImage += sh . misc self . ntHeaders . optionaHeader . sizeoOfImage . value = self . _sectionAlignment ( sizeOfImage + 0x1000 )
Fixes the necessary fields in the PE file instance in order to create a valid PE32 . i . e . SizeOfImage .
2,089
def getDataAtRva ( self , rva , size ) : return self . getDataAtOffset ( self . getOffsetFromRva ( rva ) , size )
Gets binary data at a given RVA .
2,090
def getDataAtOffset ( self , offset , size ) : data = str ( self ) return data [ offset : offset + size ]
Gets binary data at a given offset .
2,091
def _parseDelayImportDirectory ( self , rva , size , magic = consts . PE32 ) : return self . getDataAtRva ( rva , size )
Parses the delay imports directory .
2,092
def _parseBoundImportDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) boundImportDirectory = directories . ImageBoundImportDescriptor . parse ( rd ) for i in range ( len ( boundImportDirectory ) - 1 ) : if hasattr ( boundImportDirectory [ i ] , "forwarderRefsList" ) : if boundImportDirectory [ i ] . forwarderRefsList : for forwarderRefEntry in boundImportDirectory [ i ] . forwarderRefsList : offset = forwarderRefEntry . offsetModuleName . value forwarderRefEntry . moduleName = self . readStringAtRva ( offset + rva ) offset = boundImportDirectory [ i ] . offsetModuleName . value boundImportDirectory [ i ] . moduleName = self . readStringAtRva ( offset + rva ) return boundImportDirectory
Parses the bound import directory .
2,093
def _parseLoadConfigDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , directories . ImageLoadConfigDirectory ( ) . sizeof ( ) ) rd = utils . ReadData ( data ) if magic == consts . PE32 : return directories . ImageLoadConfigDirectory . parse ( rd ) elif magic == consts . PE64 : return directories . ImageLoadConfigDirectory64 . parse ( rd ) else : raise excep . InvalidParameterException ( "Wrong magic" )
Parses IMAGE_LOAD_CONFIG_DIRECTORY .
2,094
def _parseTlsDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) if magic == consts . PE32 : return directories . TLSDirectory . parse ( rd ) elif magic == consts . PE64 : return directories . TLSDirectory64 . parse ( rd ) else : raise excep . InvalidParameterException ( "Wrong magic" )
Parses the TLS directory .
2,095
def _parseRelocsDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) relocsArray = directories . ImageBaseRelocation ( ) while rd . offset < size : relocEntry = directories . ImageBaseRelocationEntry . parse ( rd ) relocsArray . append ( relocEntry ) return relocsArray
Parses the relocation directory .
2,096
def get_addresses_on_both_chains ( wallet_obj , used = None , zero_balance = None ) : mpub = wallet_obj . serialize_b58 ( private = False ) wallet_name = get_blockcypher_walletname_from_mpub ( mpub = mpub , subchain_indices = [ 0 , 1 ] , ) wallet_addresses = get_wallet_addresses ( wallet_name = wallet_name , api_key = BLOCKCYPHER_API_KEY , is_hd_wallet = True , used = used , zero_balance = zero_balance , coin_symbol = coin_symbol_from_mkey ( mpub ) , ) verbose_print ( 'wallet_addresses:' ) verbose_print ( wallet_addresses ) if wallet_obj . private_key : master_key = wallet_obj . serialize_b58 ( private = True ) else : master_key = mpub chains_address_paths_cleaned = [ ] for chain in wallet_addresses [ 'chains' ] : if chain [ 'chain_addresses' ] : chain_address_paths = verify_and_fill_address_paths_from_bip32key ( address_paths = chain [ 'chain_addresses' ] , master_key = master_key , network = guess_network_from_mkey ( mpub ) , ) chain_address_paths_cleaned = { 'index' : chain [ 'index' ] , 'chain_addresses' : chain_address_paths , } chains_address_paths_cleaned . append ( chain_address_paths_cleaned ) return chains_address_paths_cleaned
Get addresses across both subchains based on the filter criteria passed in
2,097
def dump_all_keys_or_addrs ( wallet_obj ) : print_traversal_warning ( ) puts ( '\nDo you understand this warning?' ) if not confirm ( user_prompt = DEFAULT_PROMPT , default = False ) : puts ( colored . red ( 'Dump Cancelled!' ) ) return mpub = wallet_obj . serialize_b58 ( private = False ) if wallet_obj . private_key : desc_str = 'private keys' else : desc_str = 'addresses' puts ( 'Displaying Public Addresses Only' ) puts ( 'For Private Keys, please open bcwallet with your Master Private Key:\n' ) priv_to_display = '%s123...' % first4mprv_from_mpub ( mpub = mpub ) print_bcwallet_basic_priv_opening ( priv_to_display = priv_to_display ) puts ( 'How many %s (on each chain) do you want to dump?' % desc_str ) puts ( 'Enter "b" to go back.\n' ) num_keys = get_int ( user_prompt = DEFAULT_PROMPT , max_int = 10 ** 5 , default_input = '5' , show_default = True , quit_ok = True , ) if num_keys is False : return if wallet_obj . private_key : print_childprivkey_warning ( ) puts ( '-' * 70 ) for chain_int in ( 0 , 1 ) : for current in range ( 0 , num_keys ) : path = "m/%d/%d" % ( chain_int , current ) if current == 0 : if chain_int == 0 : print_external_chain ( ) print_key_path_header ( ) elif chain_int == 1 : print_internal_chain ( ) print_key_path_header ( ) child_wallet = wallet_obj . get_child_for_path ( path ) if wallet_obj . private_key : wif_to_use = child_wallet . export_to_wif ( ) else : wif_to_use = None print_path_info ( address = child_wallet . to_address ( ) , path = path , wif = wif_to_use , coin_symbol = coin_symbol_from_mkey ( mpub ) , ) puts ( colored . blue ( '\nYou can compare this output to bip32.org' ) )
Offline - enabled mechanism to dump addresses
2,098
def dump_selected_keys_or_addrs ( wallet_obj , used = None , zero_balance = None ) : if wallet_obj . private_key : content_str = 'private keys' else : content_str = 'addresses' if not USER_ONLINE : puts ( colored . red ( '\nInternet connection required, would you like to dump *all* %s instead?' % ( content_str , content_str , ) ) ) if confirm ( user_prompt = DEFAULT_PROMPT , default = True ) : dump_all_keys_or_addrs ( wallet_obj = wallet_obj ) else : return mpub = wallet_obj . serialize_b58 ( private = False ) if wallet_obj . private_key is None : puts ( 'Displaying Public Addresses Only' ) puts ( 'For Private Keys, please open bcwallet with your Master Private Key:\n' ) priv_to_display = '%s123...' % first4mprv_from_mpub ( mpub = mpub ) print_bcwallet_basic_priv_opening ( priv_to_display = priv_to_display ) chain_address_objs = get_addresses_on_both_chains ( wallet_obj = wallet_obj , used = used , zero_balance = zero_balance , ) if wallet_obj . private_key and chain_address_objs : print_childprivkey_warning ( ) addr_cnt = 0 for chain_address_obj in chain_address_objs : if chain_address_obj [ 'index' ] == 0 : print_external_chain ( ) elif chain_address_obj [ 'index' ] == 1 : print_internal_chain ( ) print_key_path_header ( ) for address_obj in chain_address_obj [ 'chain_addresses' ] : print_path_info ( address = address_obj [ 'pub_address' ] , wif = address_obj . get ( 'wif' ) , path = address_obj [ 'path' ] , coin_symbol = coin_symbol_from_mkey ( mpub ) , ) addr_cnt += 1 if addr_cnt : puts ( colored . blue ( '\nYou can compare this output to bip32.org' ) ) else : puts ( 'No matching %s in this subset. Would you like to dump *all* %s instead?' % ( content_str , content_str , ) ) if confirm ( user_prompt = DEFAULT_PROMPT , default = True ) : dump_all_keys_or_addrs ( wallet_obj = wallet_obj )
Works for both public key only or private key access
2,099
def dump_private_keys_or_addrs_chooser ( wallet_obj ) : if wallet_obj . private_key : puts ( 'Which private keys and addresses do you want?' ) else : puts ( 'Which addresses do you want?' ) with indent ( 2 ) : puts ( colored . cyan ( '1: Active - have funds to spend' ) ) puts ( colored . cyan ( '2: Spent - no funds to spend (because they have been spent)' ) ) puts ( colored . cyan ( '3: Unused - no funds to spend (because the address has never been used)' ) ) puts ( colored . cyan ( '0: All (works offline) - regardless of whether they have funds to spend (super advanced users only)' ) ) puts ( colored . cyan ( '\nb: Go Back\n' ) ) choice = choice_prompt ( user_prompt = DEFAULT_PROMPT , acceptable_responses = [ 0 , 1 , 2 , 3 ] , default_input = '1' , show_default = True , quit_ok = True , ) if choice is False : return if choice == '1' : return dump_selected_keys_or_addrs ( wallet_obj = wallet_obj , zero_balance = False , used = True ) elif choice == '2' : return dump_selected_keys_or_addrs ( wallet_obj = wallet_obj , zero_balance = True , used = True ) elif choice == '3' : return dump_selected_keys_or_addrs ( wallet_obj = wallet_obj , zero_balance = None , used = False ) elif choice == '0' : return dump_all_keys_or_addrs ( wallet_obj = wallet_obj )
Offline - enabled mechanism to dump everything