idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
3,200 |
def _update_transforms ( self ) : if len ( self . _fb_stack ) == 0 : fb_size = fb_rect = None else : fb , origin , fb_size = self . _fb_stack [ - 1 ] fb_rect = origin + fb_size if len ( self . _vp_stack ) == 0 : viewport = None else : viewport = self . _vp_stack [ - 1 ] self . transforms . configure ( viewport = viewport , fbo_size = fb_size , fbo_rect = fb_rect )
|
Update the canvas s TransformSystem to correct for the current canvas size framebuffer and viewport .
|
3,201 |
def wrapping ( self ) : value = self . _wrapping return value [ 0 ] if all ( [ v == value [ 0 ] for v in value ] ) else value
|
Texture wrapping mode
|
3,202 |
def _resize ( self , shape , format = None , internalformat = None ) : shape = self . _normalize_shape ( shape ) if not self . _resizable : raise RuntimeError ( "Texture is not resizable" ) if format is None : format = self . _formats [ shape [ - 1 ] ] if self . _format and self . _inv_formats [ self . _format ] == self . _inv_formats [ format ] : format = self . _format else : format = check_enum ( format ) if internalformat is None : if self . _internalformat and self . _inv_internalformats [ self . _internalformat ] == shape [ - 1 ] : internalformat = self . _internalformat else : internalformat = check_enum ( internalformat ) if format not in self . _inv_formats : raise ValueError ( 'Invalid texture format: %r.' % format ) elif shape [ - 1 ] != self . _inv_formats [ format ] : raise ValueError ( 'Format does not match with given shape. ' '(format expects %d elements, data has %d)' % ( self . _inv_formats [ format ] , shape [ - 1 ] ) ) if internalformat is None : pass elif internalformat not in self . _inv_internalformats : raise ValueError ( 'Invalid texture internalformat: %r. Allowed formats: %r' % ( internalformat , self . _inv_internalformats ) ) elif shape [ - 1 ] != self . _inv_internalformats [ internalformat ] : raise ValueError ( 'Internalformat does not match with given shape.' ) self . _shape = shape self . _format = format self . _internalformat = internalformat self . _glir . command ( 'SIZE' , self . _id , self . _shape , self . _format , self . _internalformat )
|
Internal method for resize .
|
3,203 |
def _set_data ( self , data , offset = None , copy = False ) : data = np . array ( data , copy = copy ) data = self . _normalize_shape ( data ) if offset is None : self . _resize ( data . shape ) elif all ( [ i == 0 for i in offset ] ) and data . shape == self . _shape : self . _resize ( data . shape ) offset = offset or tuple ( [ 0 for i in range ( self . _ndim ) ] ) assert len ( offset ) == self . _ndim for i in range ( len ( data . shape ) - 1 ) : if offset [ i ] + data . shape [ i ] > self . _shape [ i ] : raise ValueError ( "Data is too large" ) self . _glir . command ( 'DATA' , self . _id , offset , data )
|
Internal method for set_data .
|
3,204 |
def get_free_region ( self , width , height ) : best_height = best_width = np . inf best_index = - 1 for i in range ( len ( self . _atlas_nodes ) ) : y = self . _fit ( i , width , height ) if y >= 0 : node = self . _atlas_nodes [ i ] if ( y + height < best_height or ( y + height == best_height and node [ 2 ] < best_width ) ) : best_height = y + height best_index = i best_width = node [ 2 ] region = node [ 0 ] , y , width , height if best_index == - 1 : return None node = region [ 0 ] , region [ 1 ] + height , width self . _atlas_nodes . insert ( best_index , node ) i = best_index + 1 while i < len ( self . _atlas_nodes ) : node = self . _atlas_nodes [ i ] prev_node = self . _atlas_nodes [ i - 1 ] if node [ 0 ] < prev_node [ 0 ] + prev_node [ 2 ] : shrink = prev_node [ 0 ] + prev_node [ 2 ] - node [ 0 ] x , y , w = self . _atlas_nodes [ i ] self . _atlas_nodes [ i ] = x + shrink , y , w - shrink if self . _atlas_nodes [ i ] [ 2 ] <= 0 : del self . _atlas_nodes [ i ] i -= 1 else : break else : break i += 1 i = 0 while i < len ( self . _atlas_nodes ) - 1 : node = self . _atlas_nodes [ i ] next_node = self . _atlas_nodes [ i + 1 ] if node [ 1 ] == next_node [ 1 ] : self . _atlas_nodes [ i ] = node [ 0 ] , node [ 1 ] , node [ 2 ] + next_node [ 2 ] del self . _atlas_nodes [ i + 1 ] else : i += 1 return region
|
Get a free region of given size and allocate it
|
3,205 |
def _vector_or_scalar ( x , type = 'row' ) : if isinstance ( x , ( list , tuple ) ) : x = np . array ( x ) if isinstance ( x , np . ndarray ) : assert x . ndim == 1 if type == 'column' : x = x [ : , None ] return x
|
Convert an object to either a scalar or a row or column vector .
|
3,206 |
def _vector ( x , type = 'row' ) : if isinstance ( x , ( list , tuple ) ) : x = np . array ( x , dtype = np . float32 ) elif not isinstance ( x , np . ndarray ) : x = np . array ( [ x ] , dtype = np . float32 ) assert x . ndim == 1 if type == 'column' : x = x [ : , None ] return x
|
Convert an object to a row or column vector .
|
3,207 |
def smoothstep ( edge0 , edge1 , x ) : x = np . clip ( ( x - edge0 ) / ( edge1 - edge0 ) , 0.0 , 1.0 ) return x * x * ( 3 - 2 * x )
|
performs smooth Hermite interpolation between 0 and 1 when edge0 < x < edge1 .
|
3,208 |
def _glsl_mix ( controls = None ) : assert ( controls [ 0 ] , controls [ - 1 ] ) == ( 0. , 1. ) ncolors = len ( controls ) assert ncolors >= 2 if ncolors == 2 : s = " return mix($color_0, $color_1, t);\n" else : s = "" for i in range ( ncolors - 1 ) : if i == 0 : ifs = 'if (t < %.6f)' % ( controls [ i + 1 ] ) elif i == ( ncolors - 2 ) : ifs = 'else' else : ifs = 'else if (t < %.6f)' % ( controls [ i + 1 ] ) adj_t = '(t - %s) / %s' % ( controls [ i ] , controls [ i + 1 ] - controls [ i ] ) s += ( "%s {\n return mix($color_%d, $color_%d, %s);\n} " % ( ifs , i , i + 1 , adj_t ) ) return "vec4 colormap(float t) {\n%s\n}" % s
|
Generate a GLSL template function from a given interpolation patterns and control points .
|
3,209 |
def get_colormap ( name , * args , ** kwargs ) : if isinstance ( name , BaseColormap ) : cmap = name else : if not isinstance ( name , string_types ) : raise TypeError ( 'colormap must be a Colormap or string name' ) if name not in _colormaps : raise KeyError ( 'colormap name %s not found' % name ) cmap = _colormaps [ name ] if inspect . isclass ( cmap ) : cmap = cmap ( * args , ** kwargs ) return cmap
|
Obtain a colormap
|
3,210 |
def visual_border_width ( self ) : render_to_doc = self . transforms . get_transform ( 'document' , 'visual' ) vec = render_to_doc . map ( [ self . border_width , self . border_width , 0 ] ) origin = render_to_doc . map ( [ 0 , 0 , 0 ] ) visual_border_width = [ vec [ 0 ] - origin [ 0 ] , vec [ 1 ] - origin [ 1 ] ] visual_border_width [ 1 ] *= - 1 return visual_border_width
|
The border width in visual coordinates
|
3,211 |
def run ( self , fig ) : fig . savefig ( io . BytesIO ( ) , format = 'png' , dpi = fig . dpi ) if self . close_mpl : import matplotlib . pyplot as plt plt . close ( fig ) self . crawl_fig ( fig )
|
Run the exporter on the given figure
|
3,212 |
def process_transform ( transform , ax = None , data = None , return_trans = False , force_trans = None ) : if isinstance ( transform , transforms . BlendedGenericTransform ) : warnings . warn ( "Blended transforms not yet supported. " "Zoom behavior may not work as expected." ) if force_trans is not None : if data is not None : data = ( transform - force_trans ) . transform ( data ) transform = force_trans code = "display" if ax is not None : for ( c , trans ) in [ ( "data" , ax . transData ) , ( "axes" , ax . transAxes ) , ( "figure" , ax . figure . transFigure ) , ( "display" , transforms . IdentityTransform ( ) ) ] : if transform . contains_branch ( trans ) : code , transform = ( c , transform - trans ) break if data is not None : if return_trans : return code , transform . transform ( data ) , transform else : return code , transform . transform ( data ) else : if return_trans : return code , transform else : return code
|
Process the transform and convert data to figure or data coordinates
|
3,213 |
def crawl_fig ( self , fig ) : with self . renderer . draw_figure ( fig = fig , props = utils . get_figure_properties ( fig ) ) : for ax in fig . axes : self . crawl_ax ( ax )
|
Crawl the figure and process all axes
|
3,214 |
def crawl_ax ( self , ax ) : with self . renderer . draw_axes ( ax = ax , props = utils . get_axes_properties ( ax ) ) : for line in ax . lines : self . draw_line ( ax , line ) for text in ax . texts : self . draw_text ( ax , text ) for ( text , ttp ) in zip ( [ ax . xaxis . label , ax . yaxis . label , ax . title ] , [ "xlabel" , "ylabel" , "title" ] ) : if ( hasattr ( text , 'get_text' ) and text . get_text ( ) ) : self . draw_text ( ax , text , force_trans = ax . transAxes , text_type = ttp ) for artist in ax . artists : if isinstance ( artist , matplotlib . text . Text ) : self . draw_text ( ax , artist ) for patch in ax . patches : self . draw_patch ( ax , patch ) for collection in ax . collections : self . draw_collection ( ax , collection ) for image in ax . images : self . draw_image ( ax , image ) legend = ax . get_legend ( ) if legend is not None : props = utils . get_legend_properties ( ax , legend ) with self . renderer . draw_legend ( legend = legend , props = props ) : if props [ 'visible' ] : self . crawl_legend ( ax , legend )
|
Crawl the axes and process all elements within
|
3,215 |
def crawl_legend ( self , ax , legend ) : legendElements = list ( utils . iter_all_children ( legend . _legend_box , skipContainers = True ) ) legendElements . append ( legend . legendPatch ) for child in legendElements : child . set_zorder ( 1E6 + child . get_zorder ( ) ) try : if isinstance ( child , matplotlib . patches . Patch ) : self . draw_patch ( ax , child , force_trans = ax . transAxes ) elif isinstance ( child , matplotlib . text . Text ) : if not ( child is legend . get_children ( ) [ - 1 ] and child . get_text ( ) == 'None' ) : self . draw_text ( ax , child , force_trans = ax . transAxes ) elif isinstance ( child , matplotlib . lines . Line2D ) : self . draw_line ( ax , child , force_trans = ax . transAxes ) elif isinstance ( child , matplotlib . collections . Collection ) : self . draw_collection ( ax , child , force_pathtrans = ax . transAxes ) else : warnings . warn ( "Legend element %s not impemented" % child ) except NotImplementedError : warnings . warn ( "Legend element %s not impemented" % child )
|
Recursively look through objects in legend children
|
3,216 |
def draw_line ( self , ax , line , force_trans = None ) : coordinates , data = self . process_transform ( line . get_transform ( ) , ax , line . get_xydata ( ) , force_trans = force_trans ) linestyle = utils . get_line_style ( line ) if linestyle [ 'dasharray' ] is None : linestyle = None markerstyle = utils . get_marker_style ( line ) if ( markerstyle [ 'marker' ] in [ 'None' , 'none' , None ] or markerstyle [ 'markerpath' ] [ 0 ] . size == 0 ) : markerstyle = None label = line . get_label ( ) if markerstyle or linestyle : self . renderer . draw_marked_line ( data = data , coordinates = coordinates , linestyle = linestyle , markerstyle = markerstyle , label = label , mplobj = line )
|
Process a matplotlib line and call renderer . draw_line
|
3,217 |
def draw_text ( self , ax , text , force_trans = None , text_type = None ) : content = text . get_text ( ) if content : transform = text . get_transform ( ) position = text . get_position ( ) coords , position = self . process_transform ( transform , ax , position , force_trans = force_trans ) style = utils . get_text_style ( text ) self . renderer . draw_text ( text = content , position = position , coordinates = coords , text_type = text_type , style = style , mplobj = text )
|
Process a matplotlib text object and call renderer . draw_text
|
3,218 |
def draw_patch ( self , ax , patch , force_trans = None ) : vertices , pathcodes = utils . SVG_path ( patch . get_path ( ) ) transform = patch . get_transform ( ) coordinates , vertices = self . process_transform ( transform , ax , vertices , force_trans = force_trans ) linestyle = utils . get_path_style ( patch , fill = patch . get_fill ( ) ) self . renderer . draw_path ( data = vertices , coordinates = coordinates , pathcodes = pathcodes , style = linestyle , mplobj = patch )
|
Process a matplotlib patch object and call renderer . draw_path
|
3,219 |
def draw_collection ( self , ax , collection , force_pathtrans = None , force_offsettrans = None ) : ( transform , transOffset , offsets , paths ) = collection . _prepare_points ( ) offset_coords , offsets = self . process_transform ( transOffset , ax , offsets , force_trans = force_offsettrans ) path_coords = self . process_transform ( transform , ax , force_trans = force_pathtrans ) processed_paths = [ utils . SVG_path ( path ) for path in paths ] processed_paths = [ ( self . process_transform ( transform , ax , path [ 0 ] , force_trans = force_pathtrans ) [ 1 ] , path [ 1 ] ) for path in processed_paths ] path_transforms = collection . get_transforms ( ) try : path_transforms = [ t . get_matrix ( ) for t in path_transforms ] except AttributeError : pass styles = { 'linewidth' : collection . get_linewidths ( ) , 'facecolor' : collection . get_facecolors ( ) , 'edgecolor' : collection . get_edgecolors ( ) , 'alpha' : collection . _alpha , 'zorder' : collection . get_zorder ( ) } offset_dict = { "data" : "before" , "screen" : "after" } offset_order = offset_dict [ collection . get_offset_position ( ) ] self . renderer . draw_path_collection ( paths = processed_paths , path_coordinates = path_coords , path_transforms = path_transforms , offsets = offsets , offset_coordinates = offset_coords , offset_order = offset_order , styles = styles , mplobj = collection )
|
Process a matplotlib collection and call renderer . draw_collection
|
3,220 |
def draw_image ( self , ax , image ) : self . renderer . draw_image ( imdata = utils . image_to_base64 ( image ) , extent = image . get_extent ( ) , coordinates = "data" , style = { "alpha" : image . get_alpha ( ) , "zorder" : image . get_zorder ( ) } , mplobj = image )
|
Process a matplotlib image object and call renderer . draw_image
|
3,221 |
def draw_marked_line ( self , data , coordinates , linestyle , markerstyle , label , mplobj = None ) : if linestyle is not None : self . draw_line ( data , coordinates , linestyle , label , mplobj ) if markerstyle is not None : self . draw_markers ( data , coordinates , markerstyle , label , mplobj )
|
Draw a line that also has markers .
|
3,222 |
def _iter_path_collection ( paths , path_transforms , offsets , styles ) : N = max ( len ( paths ) , len ( offsets ) ) if not path_transforms : path_transforms = [ np . eye ( 3 ) ] edgecolor = styles [ 'edgecolor' ] if np . size ( edgecolor ) == 0 : edgecolor = [ 'none' ] facecolor = styles [ 'facecolor' ] if np . size ( facecolor ) == 0 : facecolor = [ 'none' ] elements = [ paths , path_transforms , offsets , edgecolor , styles [ 'linewidth' ] , facecolor ] it = itertools return it . islice ( py3k . zip ( * py3k . map ( it . cycle , elements ) ) , N )
|
Build an iterator over the elements of the path collection
|
3,223 |
def draw_path ( self , data , coordinates , pathcodes , style , offset = None , offset_coordinates = "data" , mplobj = None ) : raise NotImplementedError ( )
|
Draw a path .
|
3,224 |
def from_times ( cls , times , delta_t = DEFAULT_OBSERVATION_TIME ) : times_arr = np . asarray ( times . jd * TimeMOC . DAY_MICRO_SEC , dtype = int ) intervals_arr = np . vstack ( ( times_arr , times_arr + 1 ) ) . T order = TimeMOC . time_resolution_to_order ( delta_t ) return TimeMOC ( IntervalSet ( intervals_arr ) ) . degrade_to_order ( order )
|
Create a TimeMOC from a astropy . time . Time
|
3,225 |
def from_time_ranges ( cls , min_times , max_times , delta_t = DEFAULT_OBSERVATION_TIME ) : min_times_arr = np . asarray ( min_times . jd * TimeMOC . DAY_MICRO_SEC , dtype = int ) max_times_arr = np . asarray ( max_times . jd * TimeMOC . DAY_MICRO_SEC , dtype = int ) intervals_arr = np . vstack ( ( min_times_arr , max_times_arr + 1 ) ) . T order = TimeMOC . time_resolution_to_order ( delta_t ) return TimeMOC ( IntervalSet ( intervals_arr ) ) . degrade_to_order ( order )
|
Create a TimeMOC from a range defined by two astropy . time . Time
|
3,226 |
def add_neighbours ( self ) : time_delta = 1 << ( 2 * ( IntervalSet . HPY_MAX_ORDER - self . max_order ) ) intervals_arr = self . _interval_set . _intervals intervals_arr [ : , 0 ] = np . maximum ( intervals_arr [ : , 0 ] - time_delta , 0 ) intervals_arr [ : , 1 ] = np . minimum ( intervals_arr [ : , 1 ] + time_delta , ( 1 << 58 ) - 1 ) self . _interval_set = IntervalSet ( intervals_arr )
|
Add all the pixels at max order in the neighbourhood of the moc
|
3,227 |
def remove_neighbours ( self ) : time_delta = 1 << ( 2 * ( IntervalSet . HPY_MAX_ORDER - self . max_order ) ) intervals_arr = self . _interval_set . _intervals intervals_arr [ : , 0 ] = np . minimum ( intervals_arr [ : , 0 ] + time_delta , ( 1 << 58 ) - 1 ) intervals_arr [ : , 1 ] = np . maximum ( intervals_arr [ : , 1 ] - time_delta , 0 ) good_intervals = intervals_arr [ : , 1 ] > intervals_arr [ : , 0 ] self . _interval_set = IntervalSet ( intervals_arr [ good_intervals ] )
|
Remove all the pixels at max order located at the bound of the moc
|
3,228 |
def intersection ( self , another_moc , delta_t = DEFAULT_OBSERVATION_TIME ) : order_op = TimeMOC . time_resolution_to_order ( delta_t ) self_degraded , moc_degraded = self . _process_degradation ( another_moc , order_op ) return super ( TimeMOC , self_degraded ) . intersection ( moc_degraded )
|
Intersection between self and moc . delta_t gives the possibility to the user to set a time resolution for performing the tmoc intersection
|
3,229 |
def total_duration ( self ) : if self . _interval_set . empty ( ) : return 0 total_time_us = 0 for ( start_time , stop_time ) in self . _interval_set . _intervals : total_time_us = total_time_us + ( stop_time - start_time ) duration = TimeDelta ( total_time_us / 1e6 , format = 'sec' , scale = 'tdb' ) return duration
|
Get the total duration covered by the temporal moc
|
3,230 |
def consistency ( self ) : result = self . total_duration . jd / ( self . max_time - self . min_time ) . jd return result
|
Get a percentage of fill between the min and max time the moc is defined .
|
3,231 |
def min_time ( self ) : min_time = Time ( self . _interval_set . min / TimeMOC . DAY_MICRO_SEC , format = 'jd' , scale = 'tdb' ) return min_time
|
Get the ~astropy . time . Time time of the tmoc first observation
|
3,232 |
def max_time ( self ) : max_time = Time ( self . _interval_set . max / TimeMOC . DAY_MICRO_SEC , format = 'jd' , scale = 'tdb' ) return max_time
|
Get the ~astropy . time . Time time of the tmoc last observation
|
3,233 |
def plot ( self , title = 'TimeMoc' , view = ( None , None ) ) : from matplotlib . colors import LinearSegmentedColormap import matplotlib . pyplot as plt if self . _interval_set . empty ( ) : print ( 'Nothing to print. This TimeMoc object is empty.' ) return plot_order = 15 if self . max_order > plot_order : plotted_moc = self . degrade_to_order ( plot_order ) else : plotted_moc = self min_jd = plotted_moc . min_time . jd if not view [ 0 ] else view [ 0 ] . jd max_jd = plotted_moc . max_time . jd if not view [ 1 ] else view [ 1 ] . jd if max_jd < min_jd : raise ValueError ( "Invalid selection: max_jd = {0} must be > to min_jd = {1}" . format ( max_jd , min_jd ) ) fig1 = plt . figure ( figsize = ( 9.5 , 5 ) ) ax = fig1 . add_subplot ( 111 ) ax . set_xlabel ( 'iso' ) ax . get_yaxis ( ) . set_visible ( False ) size = 2000 delta = ( max_jd - min_jd ) / size min_jd_time = min_jd ax . set_xticks ( [ 0 , size ] ) ax . set_xticklabels ( Time ( [ min_jd_time , max_jd ] , format = 'jd' , scale = 'tdb' ) . iso , rotation = 70 ) y = np . zeros ( size ) for ( s_time_us , e_time_us ) in plotted_moc . _interval_set . _intervals : s_index = int ( ( s_time_us / TimeMOC . DAY_MICRO_SEC - min_jd_time ) / delta ) e_index = int ( ( e_time_us / TimeMOC . DAY_MICRO_SEC - min_jd_time ) / delta ) y [ s_index : ( e_index + 1 ) ] = 1.0 if np . all ( y ) : y [ 0 ] = 0 z = np . tile ( y , ( int ( size // 10 ) , 1 ) ) plt . title ( title ) color_map = LinearSegmentedColormap . from_list ( 'w2r' , [ '#fffff0' , '#aa0000' ] ) color_map . set_under ( 'w' ) color_map . set_bad ( 'gray' ) plt . imshow ( z , interpolation = 'bilinear' , cmap = color_map ) def on_mouse_motion ( event ) : for txt in ax . texts : txt . set_visible ( False ) text = ax . text ( 0 , 0 , "" , va = "bottom" , ha = "left" ) time = Time ( event . xdata * delta + min_jd_time , format = 'jd' , scale = 'tdb' ) tx = '{0}' . format ( time . iso ) text . set_position ( ( event . xdata - 50 , 700 ) ) text . set_rotation ( 70 ) text . set_text ( tx ) cid = fig1 . canvas . mpl_connect ( 'motion_notify_event' , on_mouse_motion ) plt . show ( )
|
Plot the TimeMoc in a time window .
|
3,234 |
def handle ( self , client , subhooks = ( ) ) : new_data = self . fetch ( client ) updated = { } if not subhooks : subhooks = [ self . name ] for subhook in subhooks : new_key = self . extract_key ( new_data , subhook ) if new_key != self . previous_keys . get ( subhook ) : updated [ subhook ] = new_key if updated : logger . debug ( "Hook %s: data changed from %r to %r" , self . name , self . previous_keys , updated ) self . previous_keys . update ( updated ) return ( True , new_data ) return ( False , None )
|
Handle a new update .
|
3,235 |
def _load_char ( self , char ) : assert isinstance ( char , string_types ) and len ( char ) == 1 assert char not in self . _glyphs _load_glyph ( self . _font , char , self . _glyphs ) glyph = self . _glyphs [ char ] bitmap = glyph [ 'bitmap' ] data = np . zeros ( ( bitmap . shape [ 0 ] + 2 * self . _spread , bitmap . shape [ 1 ] + 2 * self . _spread ) , np . uint8 ) data [ self . _spread : - self . _spread , self . _spread : - self . _spread ] = bitmap height = data . shape [ 0 ] // self . ratio width = data . shape [ 1 ] // self . ratio region = self . _atlas . get_free_region ( width + 2 , height + 2 ) if region is None : raise RuntimeError ( 'Cannot store glyph' ) x , y , w , h = region x , y , w , h = x + 1 , y + 1 , w - 2 , h - 2 self . _renderer . render_to_texture ( data , self . _atlas , ( x , y ) , ( w , h ) ) u0 = x / float ( self . _atlas . shape [ 1 ] ) v0 = y / float ( self . _atlas . shape [ 0 ] ) u1 = ( x + w ) / float ( self . _atlas . shape [ 1 ] ) v1 = ( y + h ) / float ( self . _atlas . shape [ 0 ] ) texcoords = ( u0 , v0 , u1 , v1 ) glyph . update ( dict ( size = ( w , h ) , texcoords = texcoords ) )
|
Build and store a glyph corresponding to an individual character
|
3,236 |
def get_font ( self , face , bold = False , italic = False ) : key = '%s-%s-%s' % ( face , bold , italic ) if key not in self . _fonts : font = dict ( face = face , bold = bold , italic = italic ) self . _fonts [ key ] = TextureFont ( font , self . _renderer ) return self . _fonts [ key ]
|
Get a font described by face and size
|
3,237 |
def fft_freqs ( n_fft , fs ) : return np . arange ( 0 , ( n_fft // 2 + 1 ) ) / float ( n_fft ) * float ( fs )
|
Return frequencies for DFT
|
3,238 |
def set_data ( self , pos = None , color = None , width = None , connect = None , arrows = None ) : if arrows is not None : self . _arrows = arrows self . _arrows_changed = True LineVisual . set_data ( self , pos , color , width , connect )
|
Set the data used for this visual
|
3,239 |
def strip_html ( text ) : def reply_to ( text ) : replying_to = [ ] split_text = text . split ( ) for index , token in enumerate ( split_text ) : if token . startswith ( '@' ) : replying_to . append ( token [ 1 : ] ) else : message = split_text [ index : ] break rply_msg = "" if len ( replying_to ) > 0 : rply_msg = "Replying to " for token in replying_to [ : - 1 ] : rply_msg += token + "," if len ( replying_to ) > 1 : rply_msg += 'and ' rply_msg += replying_to [ - 1 ] + ". " return rply_msg + " " . join ( message ) text = reply_to ( text ) text = text . replace ( '@' , ' ' ) return " " . join ( [ token for token in text . split ( ) if ( 'http:' not in token ) and ( 'https:' not in token ) ] )
|
Get rid of ugly twitter html
|
3,240 |
def post_tweet ( user_id , message , additional_params = { } ) : url = "https://api.twitter.com/1.1/statuses/update.json" params = { "status" : message } params . update ( additional_params ) r = make_twitter_request ( url , user_id , params , request_type = 'POST' ) print ( r . text ) return "Successfully posted a tweet {}" . format ( message )
|
Helper function to post a tweet
|
3,241 |
def make_twitter_request ( url , user_id , params = { } , request_type = 'GET' ) : if request_type == "GET" : return requests . get ( url , auth = get_twitter_auth ( user_id ) , params = params ) elif request_type == "POST" : return requests . post ( url , auth = get_twitter_auth ( user_id ) , params = params )
|
Generically make a request to twitter API using a particular user s authorization
|
3,242 |
def geo_search ( user_id , search_location ) : url = "https://api.twitter.com/1.1/geo/search.json" params = { "query" : search_location } response = make_twitter_request ( url , user_id , params ) . json ( ) return response
|
Search for a location - free form
|
3,243 |
def read_out_tweets ( processed_tweets , speech_convertor = None ) : return [ "tweet number {num} by {user}. {text}." . format ( num = index + 1 , user = user , text = text ) for index , ( user , text ) in enumerate ( processed_tweets ) ]
|
Input - list of processed Tweets output - list of spoken responses
|
3,244 |
def search_for_tweets_about ( user_id , params ) : url = "https://api.twitter.com/1.1/search/tweets.json" response = make_twitter_request ( url , user_id , params ) return process_tweets ( response . json ( ) [ "statuses" ] )
|
Search twitter API
|
3,245 |
def add_val ( self , val ) : if not isinstance ( val , type ( { } ) ) : raise ValueError ( type ( { } ) ) self . read ( ) self . config . update ( val ) self . save ( )
|
add value in form of dict
|
3,246 |
def read_mesh ( fname ) : fmt = op . splitext ( fname ) [ 1 ] . lower ( ) if fmt == '.gz' : fmt = op . splitext ( op . splitext ( fname ) [ 0 ] ) [ 1 ] . lower ( ) if fmt in ( '.obj' ) : return WavefrontReader . read ( fname ) elif not format : raise ValueError ( 'read_mesh needs could not determine format.' ) else : raise ValueError ( 'read_mesh does not understand format %s.' % fmt )
|
Read mesh data from file .
|
3,247 |
def write_mesh ( fname , vertices , faces , normals , texcoords , name = '' , format = 'obj' , overwrite = False , reshape_faces = True ) : if op . isfile ( fname ) and not overwrite : raise IOError ( 'file "%s" exists, use overwrite=True' % fname ) if format not in ( 'obj' ) : raise ValueError ( 'Only "obj" format writing currently supported' ) WavefrontWriter . write ( fname , vertices , faces , normals , texcoords , name , reshape_faces )
|
Write mesh data to file .
|
3,248 |
def _parse_variables_from_code ( self ) : code = '\n\n' . join ( self . _shaders ) code = re . sub ( r'(.*)(//.*)' , r'\1' , code , re . M ) var_regexp = ( "\s*VARIABLE\s+" "((highp|mediump|lowp)\s+)?" "(?P<type>\w+)\s+" "(?P<name>\w+)\s*" "(\[(?P<size>\d+)\])?" "(\s*\=\s*[0-9.]+)?" "\s*;" ) self . _code_variables = { } for kind in ( 'uniform' , 'attribute' , 'varying' , 'const' ) : regex = re . compile ( var_regexp . replace ( 'VARIABLE' , kind ) , flags = re . MULTILINE ) for m in re . finditer ( regex , code ) : gtype = m . group ( 'type' ) size = int ( m . group ( 'size' ) ) if m . group ( 'size' ) else - 1 this_kind = kind if size >= 1 : for i in range ( size ) : name = '%s[%d]' % ( m . group ( 'name' ) , i ) self . _code_variables [ name ] = kind , gtype , name , - 1 this_kind = 'uniform_array' name = m . group ( 'name' ) self . _code_variables [ name ] = this_kind , gtype , name , size self . _process_pending_variables ( )
|
Parse uniforms attributes and varyings from the source code .
|
3,249 |
def _process_pending_variables ( self ) : self . _pending_variables , pending = { } , self . _pending_variables for name , data in pending . items ( ) : self [ name ] = data
|
Try to apply the variables that were set but not known yet .
|
3,250 |
def draw ( self , mode = 'triangles' , indices = None , check_error = True ) : self . _buffer = None mode = check_enum ( mode ) if mode not in [ 'points' , 'lines' , 'line_strip' , 'line_loop' , 'triangles' , 'triangle_strip' , 'triangle_fan' ] : raise ValueError ( 'Invalid draw mode: %r' % mode ) for name in self . _pending_variables : logger . warn ( 'Variable %r is given but not known.' % name ) self . _pending_variables = { } attributes = [ vbo for vbo in self . _user_variables . values ( ) if isinstance ( vbo , DataBuffer ) ] sizes = [ a . size for a in attributes ] if len ( attributes ) < 1 : raise RuntimeError ( 'Must have at least one attribute' ) if not all ( s == sizes [ 0 ] for s in sizes [ 1 : ] ) : msg = '\n' . join ( [ '%s: %s' % ( str ( a ) , a . size ) for a in attributes ] ) raise RuntimeError ( 'All attributes must have the same size, got:\n' '%s' % msg ) canvas = get_current_canvas ( ) assert canvas is not None canvas . context . glir . associate ( self . glir ) if isinstance ( indices , IndexBuffer ) : canvas . context . glir . associate ( indices . glir ) logger . debug ( "Program drawing %r with index buffer" % mode ) gltypes = { np . dtype ( np . uint8 ) : 'UNSIGNED_BYTE' , np . dtype ( np . uint16 ) : 'UNSIGNED_SHORT' , np . dtype ( np . uint32 ) : 'UNSIGNED_INT' } selection = indices . id , gltypes [ indices . dtype ] , indices . size canvas . context . glir . command ( 'DRAW' , self . _id , mode , selection ) elif indices is None : selection = 0 , attributes [ 0 ] . size logger . debug ( "Program drawing %r with %r" % ( mode , selection ) ) canvas . context . glir . command ( 'DRAW' , self . _id , mode , selection ) else : raise TypeError ( "Invalid index: %r (must be IndexBuffer)" % indices ) canvas . context . flush_commands ( )
|
Draw the attribute arrays in the specified mode .
|
3,251 |
def set_data ( self , x = None , y = None , z = None , colors = None ) : if x is not None : if self . _x is None or len ( x ) != len ( self . _x ) : self . __vertices = None self . _x = x if y is not None : if self . _y is None or len ( y ) != len ( self . _y ) : self . __vertices = None self . _y = y if z is not None : if self . _x is not None and z . shape [ 0 ] != len ( self . _x ) : raise TypeError ( 'Z values must have shape (len(x), len(y))' ) if self . _y is not None and z . shape [ 1 ] != len ( self . _y ) : raise TypeError ( 'Z values must have shape (len(x), len(y))' ) self . _z = z if ( self . __vertices is not None and self . _z . shape != self . __vertices . shape [ : 2 ] ) : self . __vertices = None if self . _z is None : return update_mesh = False new_vertices = False if self . __vertices is None : new_vertices = True self . __vertices = np . empty ( ( self . _z . shape [ 0 ] , self . _z . shape [ 1 ] , 3 ) , dtype = np . float32 ) self . generate_faces ( ) self . __meshdata . set_faces ( self . __faces ) update_mesh = True if new_vertices or x is not None : if x is None : if self . _x is None : x = np . arange ( self . _z . shape [ 0 ] ) else : x = self . _x self . __vertices [ : , : , 0 ] = x . reshape ( len ( x ) , 1 ) update_mesh = True if new_vertices or y is not None : if y is None : if self . _y is None : y = np . arange ( self . _z . shape [ 1 ] ) else : y = self . _y self . __vertices [ : , : , 1 ] = y . reshape ( 1 , len ( y ) ) update_mesh = True if new_vertices or z is not None : self . __vertices [ ... , 2 ] = self . _z update_mesh = True if colors is not None : self . __meshdata . set_vertex_colors ( colors ) update_mesh = True if update_mesh : self . __meshdata . set_vertices ( self . __vertices . reshape ( self . __vertices . shape [ 0 ] * self . __vertices . shape [ 1 ] , 3 ) ) MeshVisual . set_data ( self , meshdata = self . __meshdata )
|
Update the data in this surface plot .
|
3,252 |
def simplified ( self ) : if self . _simplified is None : self . _simplified = SimplifiedChainTransform ( self ) return self . _simplified
|
A simplified representation of the same transformation .
|
3,253 |
def imap ( self , coords ) : for tr in self . transforms : coords = tr . imap ( coords ) return coords
|
Inverse map coordinates
|
3,254 |
def append ( self , tr ) : self . transforms . append ( tr ) tr . changed . connect ( self . _subtr_changed ) self . _rebuild_shaders ( ) self . update ( )
|
Add a new transform to the end of this chain .
|
3,255 |
def prepend ( self , tr ) : self . transforms . insert ( 0 , tr ) tr . changed . connect ( self . _subtr_changed ) self . _rebuild_shaders ( ) self . update ( )
|
Add a new transform to the beginning of this chain .
|
3,256 |
def source_changed ( self , event ) : transforms = self . _chain . transforms [ : ] if len ( transforms ) == 0 : self . transforms = [ ] return if event is not None : for source in event . sources [ : : - 1 ] : if source in self . transforms : self . update ( event ) return new_chain = [ ] while len ( transforms ) > 0 : tr = transforms . pop ( 0 ) if isinstance ( tr , ChainTransform ) and not tr . dynamic : transforms = tr . transforms [ : ] + transforms else : new_chain . append ( tr ) cont = True tr = new_chain while cont : new_tr = [ tr [ 0 ] ] cont = False for t2 in tr [ 1 : ] : t1 = new_tr [ - 1 ] pr = t1 * t2 if ( not t1 . dynamic and not t2 . dynamic and not isinstance ( pr , ChainTransform ) ) : cont = True new_tr . pop ( ) new_tr . append ( pr ) else : new_tr . append ( t2 ) tr = new_tr self . transforms = tr
|
Generate a simplified chain by joining adjacent transforms .
|
3,257 |
def pack_iterable ( messages ) : return pack_string ( struct . pack ( '>l' , len ( messages ) ) + '' . join ( map ( pack_string , messages ) ) )
|
Pack an iterable of messages in the TCP protocol format
|
3,258 |
def hexify ( message ) : import string hexified = [ ] for char in message : if ( char in '\n\r \t' ) or ( char not in string . printable ) : hexified . append ( '\\x%02x' % ord ( char ) ) else : hexified . append ( char ) return '' . join ( hexified )
|
Print out printable characters but others in hex
|
3,259 |
def clean ( self ) : for _ , item in self . queue . items ( ) : if item [ 'status' ] in [ 'paused' , 'running' , 'stopping' , 'killing' ] : item [ 'status' ] = 'queued' item [ 'start' ] = '' item [ 'end' ] = ''
|
Clean queue items from a previous session .
|
3,260 |
def clear ( self ) : for key in list ( self . queue . keys ( ) ) : if self . queue [ key ] [ 'status' ] in [ 'done' , 'failed' ] : del self . queue [ key ] self . write ( )
|
Remove all completed tasks from the queue .
|
3,261 |
def next ( self ) : smallest = None for key in self . queue . keys ( ) : if self . queue [ key ] [ 'status' ] == 'queued' : if smallest is None or key < smallest : smallest = key return smallest
|
Get the next processable item of the queue .
|
3,262 |
def write ( self ) : queue_path = os . path . join ( self . config_dir , 'queue' ) queue_file = open ( queue_path , 'wb+' ) try : pickle . dump ( self . queue , queue_file , - 1 ) except Exception : print ( 'Error while writing to queue file. Wrong file permissions?' ) queue_file . close ( )
|
Write the current queue to a file . We need this to continue an earlier session .
|
3,263 |
def add_new ( self , command ) : self . queue [ self . next_key ] = command self . queue [ self . next_key ] [ 'status' ] = 'queued' self . queue [ self . next_key ] [ 'returncode' ] = '' self . queue [ self . next_key ] [ 'stdout' ] = '' self . queue [ self . next_key ] [ 'stderr' ] = '' self . queue [ self . next_key ] [ 'start' ] = '' self . queue [ self . next_key ] [ 'end' ] = '' self . next_key += 1 self . write ( )
|
Add a new entry to the queue .
|
3,264 |
def remove ( self , key ) : if key in self . queue : del self . queue [ key ] self . write ( ) return True return False
|
Remove a key from the queue return False if no such key exists .
|
3,265 |
def restart ( self , key ) : if key in self . queue : if self . queue [ key ] [ 'status' ] in [ 'failed' , 'done' ] : new_entry = { 'command' : self . queue [ key ] [ 'command' ] , 'path' : self . queue [ key ] [ 'path' ] } self . add_new ( new_entry ) self . write ( ) return True return False
|
Restart a previously finished entry .
|
3,266 |
def switch ( self , first , second ) : allowed_states = [ 'queued' , 'stashed' ] if first in self . queue and second in self . queue and self . queue [ first ] [ 'status' ] in allowed_states and self . queue [ second ] [ 'status' ] in allowed_states : tmp = self . queue [ second ] . copy ( ) self . queue [ second ] = self . queue [ first ] . copy ( ) self . queue [ first ] = tmp self . write ( ) return True return False
|
Switch two entries in the queue . Return False if an entry doesn t exist .
|
3,267 |
def receive_data ( socket ) : answer = b"" while True : packet = socket . recv ( 4096 ) if not packet : break answer += packet response = pickle . loads ( answer ) socket . close ( ) return response
|
Receive an answer from the daemon and return the response .
|
3,268 |
def connect_socket ( root_dir ) : config_dir = os . path . join ( root_dir , '.config/pueue' ) try : client = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) socket_path = os . path . join ( config_dir , 'pueue.sock' ) if os . path . exists ( socket_path ) : client . connect ( socket_path ) else : print ( "Socket doesn't exist" ) raise Exception except : print ( "Error connecting to socket. Make sure the daemon is running" ) sys . exit ( 1 ) return client
|
Connect to a daemon s socket .
|
3,269 |
def from_raw ( conn , raw ) : frame_type = struct . unpack ( '>l' , raw [ 0 : 4 ] ) [ 0 ] message = raw [ 4 : ] if frame_type == FRAME_TYPE_MESSAGE : return Message ( conn , frame_type , message ) elif frame_type == FRAME_TYPE_RESPONSE : return Response ( conn , frame_type , message ) elif frame_type == FRAME_TYPE_ERROR : return Error ( conn , frame_type , message ) else : raise TypeError ( 'Unknown frame type: %s' % frame_type )
|
Return a new response from a raw buffer
|
3,270 |
def pack ( cls , data ) : return struct . pack ( '>ll' , len ( data ) + 4 , cls . FRAME_TYPE ) + data
|
Pack the provided data into a Response
|
3,271 |
def fin ( self ) : self . connection . fin ( self . id ) self . processed = True
|
Indicate that this message is finished processing
|
3,272 |
def handle ( self ) : try : yield self except : typ , value , trace = sys . exc_info ( ) if not self . processed : try : self . req ( self . delay ( ) ) except socket . error : self . connection . close ( ) raise typ , value , trace else : if not self . processed : try : self . fin ( ) except socket . error : self . connection . close ( )
|
Make sure this message gets either fin or req d
|
3,273 |
def find ( cls , name ) : if not cls . mapping : for _ , obj in inspect . getmembers ( exceptions ) : if inspect . isclass ( obj ) : if issubclass ( obj , exceptions . NSQException ) : if hasattr ( obj , 'name' ) : cls . mapping [ obj . name ] = obj klass = cls . mapping . get ( name ) if klass == None : raise TypeError ( 'No matching exception for %s' % name ) return klass
|
Find the exception class by name
|
3,274 |
def exception ( self ) : code , _ , message = self . data . partition ( ' ' ) return self . find ( code ) ( message )
|
Return an instance of the corresponding exception
|
3,275 |
def parse_gdb_version ( line ) : r if line . startswith ( '~"' ) and line . endswith ( r'\n"' ) : version = line [ 2 : - 3 ] . rsplit ( ' ' , 1 ) if len ( version ) == 2 : version = '' . join ( takewhile ( lambda x : x . isdigit ( ) or x == '.' , version [ 1 ] . lstrip ( '(' ) ) ) return version . strip ( '.' ) return ''
|
r Parse the gdb version from the gdb header .
|
3,276 |
def spawn_gdb ( pid , address = DFLT_ADDRESS , gdb = 'gdb' , verbose = False , ctx = None , proc_iut = None ) : parent , child = socket . socketpair ( ) proc = Popen ( [ gdb , '--interpreter=mi' , '-nx' ] , bufsize = 0 , stdin = child , stdout = child , stderr = STDOUT ) child . close ( ) connections = { } gdb = GdbSocket ( ctx , address , proc , proc_iut , parent , verbose , connections ) gdb . mi_command ( '-target-attach %d' % pid ) gdb . cli_command ( 'python import pdb_clone.bootstrappdb_gdb' ) asyncore . loop ( map = connections ) proc . wait ( ) return gdb . error
|
Spawn gdb and attach to a process .
|
3,277 |
def attach_loop ( argv ) : p = Popen ( ( sys . executable , '-X' , 'pdbhandler' , '-c' , 'import pdbhandler; pdbhandler.get_handler().host' ) , stdout = PIPE , stderr = STDOUT ) p . wait ( ) use_xoption = True if p . returncode == 0 else False args = [ sys . executable ] if use_xoption : args . extend ( [ '-X' , 'pdbhandler=localhost 7935 %d' % signal . SIGUSR2 ] ) args . extend ( argv ) proc = Popen ( args ) else : args . extend ( argv ) proc = Popen ( args ) ctx = Context ( ) error = None time . sleep ( .5 + random . random ( ) ) while not error and proc . poll ( ) is None : if use_xoption : os . kill ( proc . pid , signal . SIGUSR2 ) connections = { } dev_null = io . StringIO ( ) if PY3 else StringIO . StringIO ( ) asock = AttachSocketWithDetach ( connections , stdout = dev_null ) asock . create_socket ( socket . AF_INET , socket . SOCK_STREAM ) connect_process ( asock , ctx , proc ) asyncore . loop ( map = connections ) else : error = spawn_gdb ( proc . pid , ctx = ctx , proc_iut = proc ) time . sleep ( random . random ( ) ) if error and gdb_terminated ( error ) : error = None if proc . poll ( ) is None : proc . terminate ( ) else : print ( 'pdb-attach: program under test return code:' , proc . wait ( ) ) result = str ( ctx . result ) if result : print ( result ) return error
|
Spawn the process then repeatedly attach to the process .
|
3,278 |
def skip ( self ) : line = self . line self . line = '' if line in self . lines : if not self . skipping : self . skipping = True printflush ( 'Skipping lines' , end = '' ) printflush ( '.' , end = '' ) return True elif line : self . lines . append ( line ) if len ( self . lines ) > 30 : self . lines . popleft ( ) return False
|
Skip this py - pdb command to avoid attaching within the same loop .
|
3,279 |
def rotate ( self , log ) : self . write ( log , rotate = True ) self . write ( { } )
|
Move the current log to a new file with timestamp and create a new empty log file .
|
3,280 |
def write ( self , log , rotate = False ) : if rotate : timestamp = time . strftime ( '-%Y%m%d-%H%M' ) logPath = os . path . join ( self . log_dir , 'queue{}.log' . format ( timestamp ) ) else : logPath = os . path . join ( self . log_dir , 'queue.log' ) if os . path . exists ( logPath ) : os . remove ( logPath ) log_file = open ( logPath , 'w' ) log_file . write ( 'Pueue log for executed Commands: \n \n' ) for key , logentry in log . items ( ) : if logentry . get ( 'returncode' ) is not None : try : returncode = logentry [ 'returncode' ] if returncode == 0 : returncode = Color ( '{autogreen}' + '{}' . format ( returncode ) + '{/autogreen}' ) else : returncode = Color ( '{autored}' + '{}' . format ( returncode ) + '{/autored}' ) log_file . write ( Color ( '{autoyellow}' + 'Command #{} ' . format ( key ) + '{/autoyellow}' ) + 'exited with returncode {}: \n' . format ( returncode ) + '"{}" \n' . format ( logentry [ 'command' ] ) ) log_file . write ( 'Path: {} \n' . format ( logentry [ 'path' ] ) ) log_file . write ( 'Start: {}, End: {} \n' . format ( logentry [ 'start' ] , logentry [ 'end' ] ) ) if logentry [ 'stderr' ] : log_file . write ( Color ( '{autored}Stderr output: {/autored}\n ' ) + logentry [ 'stderr' ] ) if len ( logentry [ 'stdout' ] ) > 0 : log_file . write ( Color ( '{autogreen}Stdout output: {/autogreen}\n ' ) + logentry [ 'stdout' ] ) log_file . write ( '\n' ) except Exception as a : print ( 'Failed while writing to log file. Wrong file permissions?' ) print ( 'Exception: {}' . format ( str ( a ) ) ) log_file . close ( )
|
Write the output of all finished processes to a compiled log file .
|
3,281 |
def remove_old ( self , max_log_time ) : files = glob . glob ( '{}/queue-*' . format ( self . log_dir ) ) files = list ( map ( lambda x : os . path . basename ( x ) , files ) ) for log_file in files : name = os . path . splitext ( log_file ) [ 0 ] timestamp = name . split ( '-' , maxsplit = 1 ) [ 1 ] time = datetime . strptime ( timestamp , '%Y%m%d-%H%M' ) now = datetime . now ( ) delta = now - time seconds = delta . total_seconds ( ) if seconds > int ( max_log_time ) : log_filePath = os . path . join ( self . log_dir , log_file ) os . remove ( log_filePath )
|
Remove all logs which are older than the specified time .
|
3,282 |
def wrap ( function , * args , ** kwargs ) : try : req = function ( * args , ** kwargs ) logger . debug ( 'Got %s: %s' , req . status_code , req . content ) if req . status_code == 200 : return req else : raise ClientException ( req . reason , req . content ) except ClientException : raise except Exception as exc : raise ClientException ( exc )
|
Wrap a function that returns a request with some exception handling
|
3,283 |
def json_wrap ( function , * args , ** kwargs ) : try : response = json . loads ( function ( * args , ** kwargs ) . content ) if 'data' in response : return response [ 'data' ] or True else : return response except Exception as exc : raise ClientException ( exc )
|
Return the json content of a function that returns a request
|
3,284 |
def ok_check ( function , * args , ** kwargs ) : req = function ( * args , ** kwargs ) if req . content . lower ( ) != 'ok' : raise ClientException ( req . content ) return req . content
|
Ensure that the response body is OK
|
3,285 |
def get ( self , path , * args , ** kwargs ) : target = self . _host . relative ( path ) . utf8 if not isinstance ( target , basestring ) : target = target ( ) params = kwargs . get ( 'params' , { } ) params . update ( self . _params ) kwargs [ 'params' ] = params logger . debug ( 'GET %s with %s, %s' , target , args , kwargs ) return requests . get ( target , * args , ** kwargs )
|
GET the provided endpoint
|
3,286 |
def compute_hash_info ( fd , unit_size = None ) : logger . debug ( "compute_hash_info(%s, unit_size=%s)" , fd , unit_size ) fd . seek ( 0 , os . SEEK_END ) file_size = fd . tell ( ) fd . seek ( 0 , os . SEEK_SET ) units = [ ] unit_counter = 0 file_hash = hashlib . sha256 ( ) unit_hash = hashlib . sha256 ( ) for chunk in iter ( lambda : fd . read ( HASH_CHUNK_SIZE_BYTES ) , b'' ) : file_hash . update ( chunk ) unit_hash . update ( chunk ) unit_counter += len ( chunk ) if unit_size is not None and unit_counter == unit_size : units . append ( unit_hash . hexdigest ( ) . lower ( ) ) unit_counter = 0 unit_hash = hashlib . sha256 ( ) if unit_size is not None and unit_counter > 0 : units . append ( unit_hash . hexdigest ( ) . lower ( ) ) fd . seek ( 0 , os . SEEK_SET ) return MediaFireHashInfo ( file = file_hash . hexdigest ( ) . lower ( ) , units = units , size = file_size )
|
Get MediaFireHashInfo structure from the fd unit_size
|
3,287 |
def upload ( self , fd , name = None , folder_key = None , filedrop_key = None , path = None , action_on_duplicate = None ) : fd . seek ( 0 , os . SEEK_END ) size = fd . tell ( ) fd . seek ( 0 , os . SEEK_SET ) if size > UPLOAD_SIMPLE_LIMIT_BYTES : resumable = True else : resumable = False logger . debug ( "Calculating checksum" ) hash_info = compute_hash_info ( fd ) if hash_info . size != size : raise ValueError ( "hash_info.size mismatch" ) upload_info = _UploadInfo ( fd = fd , name = name , folder_key = folder_key , hash_info = hash_info , size = size , path = path , filedrop_key = filedrop_key , action_on_duplicate = action_on_duplicate ) check_result = self . _upload_check ( upload_info , resumable ) upload_result = None upload_func = None folder_key = check_result . get ( 'folder_key' , None ) if folder_key is not None : upload_info . folder_key = folder_key upload_info . path = None if check_result [ 'hash_exists' ] == 'yes' : if check_result [ 'in_folder' ] == 'yes' and check_result [ 'file_exists' ] == 'yes' : different_hash = check_result . get ( 'different_hash' , 'no' ) if different_hash == 'no' : upload_func = self . _upload_none if not upload_func : upload_func = self . _upload_instant if not upload_func : if resumable : resumable_upload_info = check_result [ 'resumable_upload' ] upload_info . hash_info = compute_hash_info ( fd , int ( resumable_upload_info [ 'unit_size' ] ) ) upload_func = self . _upload_resumable else : upload_func = self . _upload_simple retries = UPLOAD_RETRY_COUNT while retries > 0 : try : upload_result = upload_func ( upload_info , check_result ) except ( RetriableUploadError , MediaFireConnectionError ) : retries -= 1 logger . exception ( "%s failed (%d retries left)" , upload_func . __name__ , retries ) check_result = self . _upload_check ( upload_info , resumable ) except Exception : logger . exception ( "%s failed" , upload_func ) break else : break if upload_result is None : raise UploadError ( "Upload failed" ) return upload_result
|
Upload file returns UploadResult object
|
3,288 |
def _poll_upload ( self , upload_key , action ) : if len ( upload_key ) != UPLOAD_KEY_LENGTH : return UploadResult ( action = action , quickkey = None , hash_ = None , filename = None , size = None , created = None , revision = None ) quick_key = None while quick_key is None : poll_result = self . _api . upload_poll ( upload_key ) doupload = poll_result [ 'doupload' ] logger . debug ( "poll(%s): status=%d, description=%s, filename=%s," " result=%d" , upload_key , int ( doupload [ 'status' ] ) , doupload [ 'description' ] , doupload [ 'filename' ] , int ( doupload [ 'result' ] ) ) if int ( doupload [ 'result' ] ) != 0 : break if doupload [ 'fileerror' ] != '' : logger . warning ( "poll(%s): fileerror=%d" , upload_key , int ( doupload [ 'fileerror' ] ) ) break if int ( doupload [ 'status' ] ) == STATUS_NO_MORE_REQUESTS : quick_key = doupload [ 'quickkey' ] elif int ( doupload [ 'status' ] ) == STATUS_UPLOAD_IN_PROGRESS : raise RetriableUploadError ( "Invalid state transition ({})" . format ( doupload [ 'description' ] ) ) else : time . sleep ( UPLOAD_POLL_INTERVAL ) return UploadResult ( action = action , quickkey = doupload [ 'quickkey' ] , hash_ = doupload [ 'hash' ] , filename = doupload [ 'filename' ] , size = doupload [ 'size' ] , created = doupload [ 'created' ] , revision = doupload [ 'revision' ] )
|
Poll upload until quickkey is found
|
3,289 |
def _upload_none ( self , upload_info , check_result ) : return UploadResult ( action = None , quickkey = check_result [ 'duplicate_quickkey' ] , hash_ = upload_info . hash_info . file , filename = upload_info . name , size = upload_info . size , created = None , revision = None )
|
Dummy upload function for when we don t actually upload
|
3,290 |
def _upload_instant ( self , upload_info , _ = None ) : result = self . _api . upload_instant ( upload_info . name , upload_info . size , upload_info . hash_info . file , path = upload_info . path , folder_key = upload_info . folder_key , filedrop_key = upload_info . filedrop_key , action_on_duplicate = upload_info . action_on_duplicate ) return UploadResult ( action = 'upload/instant' , quickkey = result [ 'quickkey' ] , filename = result [ 'filename' ] , revision = result [ 'new_device_revision' ] , hash_ = upload_info . hash_info . file , size = upload_info . size , created = None )
|
Instant upload and return quickkey
|
3,291 |
def _upload_simple ( self , upload_info , _ = None ) : upload_result = self . _api . upload_simple ( upload_info . fd , upload_info . name , folder_key = upload_info . folder_key , filedrop_key = upload_info . filedrop_key , path = upload_info . path , file_size = upload_info . size , file_hash = upload_info . hash_info . file , action_on_duplicate = upload_info . action_on_duplicate ) logger . debug ( "upload_result: %s" , upload_result ) upload_key = upload_result [ 'doupload' ] [ 'key' ] return self . _poll_upload ( upload_key , 'upload/simple' )
|
Simple upload and return quickkey
|
3,292 |
def _upload_resumable_all ( self , upload_info , bitmap , number_of_units , unit_size ) : fd = upload_info . fd upload_key = None for unit_id in range ( number_of_units ) : upload_status = decode_resumable_upload_bitmap ( bitmap , number_of_units ) if upload_status [ unit_id ] : logger . debug ( "Skipping unit %d/%d - already uploaded" , unit_id + 1 , number_of_units ) continue logger . debug ( "Uploading unit %d/%d" , unit_id + 1 , number_of_units ) offset = unit_id * unit_size with SubsetIO ( fd , offset , unit_size ) as unit_fd : unit_info = _UploadUnitInfo ( upload_info = upload_info , hash_ = upload_info . hash_info . units [ unit_id ] , fd = unit_fd , uid = unit_id ) upload_result = self . _upload_resumable_unit ( unit_info ) if upload_key is None : upload_key = upload_result [ 'doupload' ] [ 'key' ] return upload_key
|
Prepare and upload all resumable units and return upload_key
|
3,293 |
def _upload_resumable ( self , upload_info , check_result ) : resumable_upload = check_result [ 'resumable_upload' ] unit_size = int ( resumable_upload [ 'unit_size' ] ) number_of_units = int ( resumable_upload [ 'number_of_units' ] ) logger . debug ( "number_of_units=%s (expected %s)" , number_of_units , len ( upload_info . hash_info . units ) ) assert len ( upload_info . hash_info . units ) == number_of_units logger . debug ( "Preparing %d units * %d bytes" , number_of_units , unit_size ) upload_key = None retries = UPLOAD_RETRY_COUNT all_units_ready = resumable_upload [ 'all_units_ready' ] == 'yes' bitmap = resumable_upload [ 'bitmap' ] while not all_units_ready and retries > 0 : upload_key = self . _upload_resumable_all ( upload_info , bitmap , number_of_units , unit_size ) check_result = self . _upload_check ( upload_info , resumable = True ) resumable_upload = check_result [ 'resumable_upload' ] all_units_ready = resumable_upload [ 'all_units_ready' ] == 'yes' bitmap = resumable_upload [ 'bitmap' ] if not all_units_ready : retries -= 1 logger . debug ( "Some units failed to upload (%d retries left)" , retries ) if not all_units_ready : raise UploadError ( "Could not upload all units" ) logger . debug ( "Upload complete, polling for status" ) return self . _poll_upload ( upload_key , 'upload/resumable' )
|
Resumable upload and return quickkey
|
3,294 |
def reset ( self ) : if not self . hooked : self . hooked = True sys . path_hooks . append ( self ) sys . path . insert ( 0 , self . PATH_ENTRY ) return for modname in self : if modname in sys . modules : del sys . modules [ modname ] submods = [ ] for subm in sys . modules : if subm . startswith ( modname + '.' ) : submods . append ( subm ) for subm in submods : del sys . modules [ subm ] self [ : ] = [ ]
|
Remove from sys . modules the modules imported by the debuggee .
|
3,295 |
def get_func_lno ( self , funcname ) : class FuncLineno ( ast . NodeVisitor ) : def __init__ ( self ) : self . clss = [ ] def generic_visit ( self , node ) : for child in ast . iter_child_nodes ( node ) : for item in self . visit ( child ) : yield item def visit_ClassDef ( self , node ) : self . clss . append ( node . name ) for item in self . generic_visit ( node ) : yield item self . clss . pop ( ) def visit_FunctionDef ( self , node ) : name = '.' . join ( itertools . chain ( self . clss , [ node . name ] ) ) yield name , node . lineno if self . functions_firstlno is None : self . functions_firstlno = { } for name , lineno in FuncLineno ( ) . visit ( self . node ) : if ( name not in self . functions_firstlno or self . functions_firstlno [ name ] < lineno ) : self . functions_firstlno [ name ] = lineno try : return self . functions_firstlno [ funcname ] except KeyError : raise BdbSourceError ( '{}: function "{}" not found.' . format ( self . filename , funcname ) )
|
The first line number of the last defined funcname function .
|
3,296 |
def get_actual_bp ( self , lineno ) : def _distance ( code , module_level = False ) : subcodes = dict ( ( c . co_firstlineno , c ) for c in code . co_consts if isinstance ( c , types . CodeType ) and not c . co_name . startswith ( '<' ) ) subcode_dist = None subcodes_flnos = sorted ( subcodes ) idx = bisect ( subcodes_flnos , lineno ) if idx != 0 : flno = subcodes_flnos [ idx - 1 ] subcode_dist = _distance ( subcodes [ flno ] ) code_lnos = sorted ( code_line_numbers ( code ) ) if not module_level and len ( code_lnos ) > 1 : code_lnos = code_lnos [ 1 : ] if lineno in code_lnos and lineno not in subcodes_flnos : return 0 , ( code . co_firstlineno , lineno ) idx = bisect ( code_lnos , lineno ) if idx == len ( code_lnos ) : return subcode_dist actual_lno = code_lnos [ idx ] dist = actual_lno - lineno if subcode_dist and subcode_dist [ 0 ] < dist : return subcode_dist if actual_lno not in subcodes_flnos : return dist , ( code . co_firstlineno , actual_lno ) else : return _distance ( subcodes [ actual_lno ] ) if self . code : code_dist = _distance ( self . code , module_level = True ) if not self . code or not code_dist : raise BdbSourceError ( '{}: line {} is after the last ' 'valid statement.' . format ( self . filename , lineno ) ) return code_dist [ 1 ]
|
Get the actual breakpoint line number .
|
3,297 |
def get_breakpoints ( self , lineno ) : try : firstlineno , actual_lno = self . bdb_module . get_actual_bp ( lineno ) except BdbSourceError : return [ ] if firstlineno not in self : return [ ] code_bps = self [ firstlineno ] if actual_lno not in code_bps : return [ ] return [ bp for bp in sorted ( code_bps [ actual_lno ] , key = attrgetter ( 'number' ) ) if bp . line == lineno ]
|
Return the list of breakpoints set at lineno .
|
3,298 |
def settrace ( self , do_set ) : if do_set : sys . settrace ( self . trace_dispatch ) else : sys . settrace ( None )
|
Set or remove the trace function .
|
3,299 |
def restart ( self ) : _module_finder . reset ( ) linecache . checkcache ( ) for module_bpts in self . breakpoints . values ( ) : module_bpts . reset ( )
|
Restart the debugger after source code changes .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.