idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
1,700
def setCmd ( self , cmd ) : cmd = cmd . upper ( ) if cmd not in VALID_COMMANDS : raise FrameError ( "The cmd '%s' is not valid! It must be one of '%s' (STOMP v%s)." % ( cmd , VALID_COMMANDS , STOMP_VERSION ) ) else : self . _cmd = cmd
Check the cmd is valid FrameError will be raised if its not .
1,701
def pack ( self ) : headers = '' . join ( [ '%s:%s\n' % ( f , v ) for f , v in sorted ( self . headers . items ( ) ) ] ) stomp_message = "%s\n%s\n%s%s\n" % ( self . _cmd , headers , self . body , NULL ) return stomp_message
Called to create a STOMP message from the internal values .
1,702
def unpack ( self , message ) : if not message : raise FrameError ( "Unpack error! The given message isn't valid '%s'!" % message ) msg = unpack_frame ( message ) self . cmd = msg [ 'cmd' ] self . headers = msg [ 'headers' ] self . body = msg [ 'body' ] return msg
Called to extract a STOMP message into this instance .
1,703
def react ( self , msg ) : returned = "" mtype = type ( msg ) if mtype in stringTypes : msg = unpack_frame ( msg ) elif mtype == dict : pass else : raise FrameError ( "Unknown message type '%s', I don't know what to do with this!" % mtype ) if msg [ 'cmd' ] in self . states : returned = self . states [ msg [ 'cmd' ] ] ( msg ) return returned
Called to provide a response to a message if needed .
1,704
def error ( self , msg ) : body = msg [ 'body' ] . replace ( NULL , '' ) brief_msg = "" if 'message' in msg [ 'headers' ] : brief_msg = msg [ 'headers' ] [ 'message' ] self . log . error ( "Received server error - message%s\n\n%s" % ( brief_msg , body ) ) returned = NO_RESPONSE_NEEDED if self . testing : returned = 'error' return returned
Called to handle an error message received from the server .
1,705
def receipt ( self , msg ) : body = msg [ 'body' ] . replace ( NULL , '' ) brief_msg = "" if 'receipt-id' in msg [ 'headers' ] : brief_msg = msg [ 'headers' ] [ 'receipt-id' ] self . log . info ( "Received server receipt message - receipt-id:%s\n\n%s" % ( brief_msg , body ) ) returned = NO_RESPONSE_NEEDED if self . testing : returned = 'receipt' return returned
Called to handle a receipt message received from the server .
1,706
def log_init ( level ) : log = logging . getLogger ( ) hdlr = logging . StreamHandler ( ) formatter = logging . Formatter ( '%(asctime)s %(name)s %(levelname)s %(message)s' ) hdlr . setFormatter ( formatter ) log . addHandler ( hdlr ) log . setLevel ( level )
Set up a logger that catches all channels and logs it to stdout . This is used to set up logging when testing .
1,707
def ack ( self , msg ) : print ( "Got a message:\n%s\n" % msg [ 'body' ] ) return super ( Pong , self ) . ack ( msg )
Override this and do some customer message handler .
1,708
def transaction_atomic_with_retry ( num_retries = 5 , backoff = 0.1 ) : @ wrapt . decorator def wrapper ( wrapped , instance , args , kwargs ) : num_tries = 0 exception = None while num_tries <= num_retries : try : with transaction . atomic ( ) : return wrapped ( * args , ** kwargs ) except db . utils . OperationalError as e : num_tries += 1 exception = e sleep ( backoff * num_tries ) raise exception return wrapper
This is a decorator that will wrap the decorated method in an atomic transaction and retry the transaction a given number of times
1,709
def defer_entity_syncing ( wrapped , instance , args , kwargs ) : sync_entities . defer = True try : return wrapped ( * args , ** kwargs ) finally : sync_entities . defer = False model_objs = list ( sync_entities . buffer . values ( ) ) if None in sync_entities . buffer : model_objs = list ( ) if len ( sync_entities . buffer ) : sync_entities ( * model_objs ) sync_entities . buffer = { }
A decorator that can be used to defer the syncing of entities until after the method has been run This is being introduced to help avoid deadlocks in the meantime as we attempt to better understand why they are happening
1,710
def _get_super_entities_by_ctype ( model_objs_by_ctype , model_ids_to_sync , sync_all ) : super_entities_by_ctype = defaultdict ( lambda : defaultdict ( list ) ) for ctype , model_objs_for_ctype in model_objs_by_ctype . items ( ) : entity_config = entity_registry . entity_registry . get ( ctype . model_class ( ) ) super_entities = entity_config . get_super_entities ( model_objs_for_ctype , sync_all ) super_entities_by_ctype [ ctype ] = { ContentType . objects . get_for_model ( model_class , for_concrete_model = False ) : relationships for model_class , relationships in super_entities . items ( ) } for super_entity_ctype , relationships in super_entities_by_ctype [ ctype ] . items ( ) : for sub_entity_id , super_entity_id in relationships : model_ids_to_sync [ ctype ] . add ( sub_entity_id ) model_ids_to_sync [ super_entity_ctype ] . add ( super_entity_id ) return super_entities_by_ctype
Given model objects organized by content type and a dictionary of all model IDs that need to be synced organize all super entity relationships that need to be synced .
1,711
def _get_model_objs_to_sync ( model_ids_to_sync , model_objs_map , sync_all ) : model_objs_to_sync = { } for ctype , model_ids_to_sync_for_ctype in model_ids_to_sync . items ( ) : model_qset = entity_registry . entity_registry . get ( ctype . model_class ( ) ) . queryset if not sync_all : model_objs_to_sync [ ctype ] = model_qset . filter ( id__in = model_ids_to_sync_for_ctype ) else : model_objs_to_sync [ ctype ] = [ model_objs_map [ ctype , model_id ] for model_id in model_ids_to_sync_for_ctype ] return model_objs_to_sync
Given the model IDs to sync fetch all model objects to sync
1,712
def sync_entities_watching ( instance ) : for entity_model , entity_model_getter in entity_registry . entity_watching [ instance . __class__ ] : model_objs = list ( entity_model_getter ( instance ) ) if model_objs : sync_entities ( * model_objs )
Syncs entities watching changes of a model instance .
1,713
def upsert_entity_kinds ( self , entity_kinds ) : unchanged_entity_kinds = { } if entity_kinds : unchanged_entity_kinds = { ( entity_kind . name , entity_kind . display_name ) : entity_kind for entity_kind in EntityKind . all_objects . extra ( where = [ '(name, display_name) IN %s' ] , params = [ tuple ( ( entity_kind . name , entity_kind . display_name ) for entity_kind in entity_kinds ) ] ) } changed_entity_kinds = [ entity_kind for entity_kind in entity_kinds if ( entity_kind . name , entity_kind . display_name ) not in unchanged_entity_kinds ] upserted_enitity_kinds = [ ] if changed_entity_kinds : list ( EntityKind . all_objects . all ( ) . select_for_update ( ) . values_list ( 'id' , flat = True ) ) upserted_enitity_kinds = manager_utils . bulk_upsert ( queryset = EntityKind . all_objects . filter ( name__in = [ entity_kind . name for entity_kind in changed_entity_kinds ] ) , model_objs = changed_entity_kinds , unique_fields = [ 'name' ] , update_fields = [ 'display_name' ] , return_upserts = True ) return upserted_enitity_kinds + list ( unchanged_entity_kinds . values ( ) )
Given a list of entity kinds ensure they are synced properly to the database . This will ensure that only unchanged entity kinds are synced and will still return all updated entity kinds
1,714
def get_entity_kind ( self , model_obj ) : model_obj_ctype = ContentType . objects . get_for_model ( self . queryset . model ) return ( u'{0}.{1}' . format ( model_obj_ctype . app_label , model_obj_ctype . model ) , u'{0}' . format ( model_obj_ctype ) )
Returns a tuple for a kind name and kind display name of an entity . By default uses the app_label and model of the model object s content type as the kind .
1,715
def register_entity ( self , entity_config ) : if not issubclass ( entity_config , EntityConfig ) : raise ValueError ( 'Must register entity config class of subclass EntityConfig' ) if entity_config . queryset is None : raise ValueError ( 'Entity config must define queryset' ) model = entity_config . queryset . model self . _entity_registry [ model ] = entity_config ( ) for watching_model , entity_model_getter in entity_config . watching : self . _entity_watching [ watching_model ] . append ( ( model , entity_model_getter ) )
Registers an entity config
1,716
def start ( host = 'localhost' , port = 61613 , username = '' , password = '' ) : StompClientFactory . username = username StompClientFactory . password = password reactor . connectTCP ( host , port , StompClientFactory ( ) ) reactor . run ( )
Start twisted event loop and the fun should begin ...
1,717
def send ( self ) : self . log . info ( "Saying hello (%d)." % self . counter ) f = stomper . Frame ( ) f . unpack ( stomper . send ( DESTINATION , 'hello there (%d)' % self . counter ) ) self . counter += 1 self . transport . write ( f . pack ( ) )
Send out a hello message periodically .
1,718
def connectionMade ( self ) : cmd = stomper . connect ( self . username , self . password ) self . transport . write ( cmd )
Register with stomp server .
1,719
def dataReceived ( self , data ) : self . stompBuffer . appendData ( data ) while True : msg = self . stompBuffer . getOneMessage ( ) if msg is None : break returned = self . react ( msg ) if returned : self . transport . write ( returned )
Use stompbuffer to determine when a complete message has been received .
1,720
def ack ( self , msg ) : self . log . info ( "receiverId <%s> Received: <%s> " % ( self . receiverId , msg [ 'body' ] ) ) return stomper . NO_REPONSE_NEEDED
Process the message and determine what to do with it .
1,721
def connectionMade ( self ) : cmd = self . sm . connect ( ) self . transport . write ( cmd )
Register with the stomp server .
1,722
def dataReceived ( self , data ) : msg = stomper . unpack_frame ( data ) returned = self . sm . react ( msg ) if returned : self . transport . write ( returned )
Data received react to it and respond if needed .
1,723
def find_id_in_folder ( self , name , parent_folder_id = 0 ) : if name is None or len ( name ) == 0 : return parent_folder_id offset = 0 resp = self . get_folder_items ( parent_folder_id , limit = 1000 , offset = offset , fields_list = [ 'name' ] ) total = int ( resp [ 'total_count' ] ) while offset < total : found = self . __find_name ( resp , name ) if found is not None : return found offset += int ( len ( resp [ 'entries' ] ) ) resp = self . get_folder_items ( parent_folder_id , limit = 1000 , offset = offset , fields_list = [ 'name' ] ) return None
Find a folder or a file ID from its name inside a given folder .
1,724
def create_folder ( self , name , parent_folder_id = 0 ) : return self . __request ( "POST" , "folders" , data = { "name" : name , "parent" : { "id" : unicode ( parent_folder_id ) } } )
Create a folder
1,725
def delete_folder ( self , folder_id , recursive = True ) : return self . __request ( "DELETE" , "folders/%s" % ( folder_id , ) , querystring = { 'recursive' : unicode ( recursive ) . lower ( ) } )
Delete an existing folder
1,726
def get_folder_items ( self , folder_id , limit = 100 , offset = 0 , fields_list = None ) : qs = { "limit" : limit , "offset" : offset } if fields_list : qs [ 'fields' ] = ',' . join ( fields_list ) return self . __request ( "GET" , "folders/%s/items" % ( folder_id , ) , querystring = qs )
Get files and folders inside a given folder
1,727
def upload_file ( self , name , folder_id , file_path ) : try : return self . __do_upload_file ( name , folder_id , file_path ) except BoxError , ex : if ex . status != 401 : raise return self . __do_upload_file ( name , folder_id , file_path )
Upload a file into a folder .
1,728
def upload_new_file_version ( self , name , folder_id , file_id , file_path ) : try : return self . __do_upload_file ( name , folder_id , file_path , file_id ) except BoxError , ex : if ex . status != 401 : raise return self . __do_upload_file ( name , folder_id , file_path , file_id )
Upload a new version of a file into a folder .
1,729
def chunk_upload_file ( self , name , folder_id , file_path , progress_callback = None , chunk_size = 1024 * 1024 * 1 ) : try : return self . __do_chunk_upload_file ( name , folder_id , file_path , progress_callback , chunk_size ) except BoxError , ex : if ex . status != 401 : raise return self . __do_chunk_upload_file ( name , folder_id , file_path , progress_callback , chunk_size )
Upload a file chunk by chunk .
1,730
def copy_file ( self , file_id , dest_folder_id ) : return self . __request ( "POST" , "/files/" + unicode ( file_id ) + "/copy" , data = { "parent" : { "id" : unicode ( dest_folder_id ) } } )
Copy file to new destination
1,731
def ack ( self , msg ) : self . log . info ( "senderID:%s Received: %s " % ( self . senderID , msg [ 'body' ] ) ) return stomper . NO_REPONSE_NEEDED
Processes the received message . I don t need to generate an ack message .
1,732
def _clear ( self , pipe = None ) : redis = self . redis if pipe is None else pipe redis . delete ( self . key )
Helper for clear operations .
1,733
def _normalize_index ( self , index , pipe = None ) : pipe = self . redis if pipe is None else pipe len_self = self . __len__ ( pipe ) positive_index = index if index >= 0 else len_self + index return len_self , positive_index
Convert negative indexes into their positive equivalents .
1,734
def _transaction ( self , fn , * extra_keys ) : results = [ ] def trans ( pipe ) : results . append ( fn ( pipe ) ) self . redis . transaction ( trans , self . key , * extra_keys ) return results [ 0 ]
Helper simplifying code within watched transaction .
1,735
def recursive_path ( pack , path ) : matches = [ ] for root , _ , filenames in os . walk ( os . path . join ( pack , path ) ) : for filename in filenames : matches . append ( os . path . join ( root , filename ) [ len ( pack ) + 1 : ] ) return matches
Find paths recursively
1,736
def nack ( messageid , subscriptionid , transactionid = None ) : header = 'subscription:%s\nmessage-id:%s' % ( subscriptionid , messageid ) if transactionid : header += '\ntransaction:%s' % transactionid return "NACK\n%s\n\n\x00\n" % header
STOMP negative acknowledge command .
1,737
def connect ( username , password , host , heartbeats = ( 0 , 0 ) ) : if len ( heartbeats ) != 2 : raise ValueError ( 'Invalid heartbeat %r' % heartbeats ) cx , cy = heartbeats return "CONNECT\naccept-version:1.1\nhost:%s\nheart-beat:%i,%i\nlogin:%s\npasscode:%s\n\n\x00\n" % ( host , cx , cy , username , password )
STOMP connect command .
1,738
def ack ( self , msg ) : message_id = msg [ 'headers' ] [ 'message-id' ] subscription = msg [ 'headers' ] [ 'subscription' ] transaction_id = None if 'transaction-id' in msg [ 'headers' ] : transaction_id = msg [ 'headers' ] [ 'transaction-id' ] return ack ( message_id , subscription , transaction_id )
Called when a MESSAGE has been received .
1,739
def getOneMessage ( self ) : ( mbytes , hbytes ) = self . _findMessageBytes ( self . buffer ) if not mbytes : return None msgdata = self . buffer [ : mbytes ] self . buffer = self . buffer [ mbytes : ] hdata = msgdata [ : hbytes ] elems = hdata . split ( '\n' ) cmd = elems . pop ( 0 ) headers = { } for e in elems : try : i = e . find ( ':' ) except ValueError : continue k = e [ : i ] . strip ( ) v = e [ i + 1 : ] . strip ( ) headers [ k ] = v body = msgdata [ hbytes + 2 : - 2 ] msg = { 'cmd' : cmd , 'headers' : headers , 'body' : body , } return msg
I pull one complete message off the buffer and return it decoded as a dict . If there is no complete message in the buffer I return None .
1,740
def delete_entity_signal_handler ( sender , instance , ** kwargs ) : if instance . __class__ in entity_registry . entity_registry : Entity . all_objects . delete_for_obj ( instance )
Defines a signal handler for syncing an individual entity . Called when an entity is saved or deleted .
1,741
def save_entity_signal_handler ( sender , instance , ** kwargs ) : if instance . __class__ in entity_registry . entity_registry : sync_entities ( instance ) if instance . __class__ in entity_registry . entity_watching : sync_entities_watching ( instance )
Defines a signal handler for saving an entity . Syncs the entity to the entity mirror table .
1,742
def turn_on_syncing ( for_post_save = True , for_post_delete = True , for_m2m_changed = True , for_post_bulk_operation = False ) : if for_post_save : post_save . connect ( save_entity_signal_handler , dispatch_uid = 'save_entity_signal_handler' ) if for_post_delete : post_delete . connect ( delete_entity_signal_handler , dispatch_uid = 'delete_entity_signal_handler' ) if for_m2m_changed : m2m_changed . connect ( m2m_changed_entity_signal_handler , dispatch_uid = 'm2m_changed_entity_signal_handler' ) if for_post_bulk_operation : post_bulk_operation . connect ( bulk_operation_signal_handler , dispatch_uid = 'bulk_operation_signal_handler' )
Enables all of the signals for syncing entities . Everything is True by default except for the post_bulk_operation signal . The reason for this is because when any bulk operation occurs on any mirrored entity model it will result in every single entity being synced again . This is not a desired behavior by the majority of users and should only be turned on explicitly .
1,743
def scan_elements ( self ) : for x in self . redis . sscan_iter ( self . key ) : yield self . _unpickle ( x )
Yield each of the elements from the collection without pulling them all into memory .
1,744
def places_within_radius ( self , place = None , latitude = None , longitude = None , radius = 0 , ** kwargs ) : kwargs [ 'withdist' ] = True kwargs [ 'withcoord' ] = True kwargs [ 'withhash' ] = False kwargs . setdefault ( 'sort' , 'ASC' ) unit = kwargs . setdefault ( 'unit' , 'km' ) if place is not None : response = self . redis . georadiusbymember ( self . key , self . _pickle ( place ) , radius , ** kwargs ) elif ( latitude is not None ) and ( longitude is not None ) : response = self . redis . georadius ( self . key , longitude , latitude , radius , ** kwargs ) else : raise ValueError ( 'Must specify place, or both latitude and longitude' ) ret = [ ] for item in response : ret . append ( { 'place' : self . _unpickle ( item [ 0 ] ) , 'distance' : item [ 1 ] , 'unit' : unit , 'latitude' : item [ 2 ] [ 1 ] , 'longitude' : item [ 2 ] [ 0 ] , } ) return ret
Return descriptions of the places stored in the collection that are within the circle specified by the given location and radius . A list of dicts will be returned .
1,745
def rotate ( self , n = 1 ) : if n == 0 : return def rotate_trans ( pipe ) : if self . writeback : self . _sync_helper ( pipe ) len_self = self . __len__ ( pipe ) steps = abs_n % len_self if forward : pipe . multi ( ) for __ in range ( steps ) : pipe . rpoplpush ( self . key , self . key ) else : for __ in range ( steps ) : pickled_value = pipe . lpop ( self . key ) pipe . rpush ( self . key , pickled_value ) forward = n >= 0 abs_n = abs ( n ) self . _transaction ( rotate_trans )
Rotate the deque n steps to the right . If n is negative rotate to the left .
1,746
def is_sub_to_all ( self , * super_entities ) : if super_entities : if len ( super_entities ) == 1 : has_subset = EntityRelationship . objects . filter ( super_entity = super_entities [ 0 ] ) . values_list ( 'sub_entity' , flat = True ) else : has_subset = EntityRelationship . objects . filter ( super_entity__in = super_entities ) . values ( 'sub_entity' ) . annotate ( Count ( 'super_entity' ) ) . filter ( super_entity__count = len ( set ( super_entities ) ) ) . values_list ( 'sub_entity' , flat = True ) return self . filter ( id__in = has_subset ) else : return self
Given a list of super entities return the entities that have those as a subset of their super entities .
1,747
def is_sub_to_any ( self , * super_entities ) : if super_entities : return self . filter ( id__in = EntityRelationship . objects . filter ( super_entity__in = super_entities ) . values_list ( 'sub_entity' , flat = True ) ) else : return self
Given a list of super entities return the entities that have super entities that interset with those provided .
1,748
def is_sub_to_any_kind ( self , * super_entity_kinds ) : if super_entity_kinds : if len ( super_entity_kinds ) == 1 : entity_pks = EntityRelationship . objects . filter ( super_entity__entity_kind = super_entity_kinds [ 0 ] ) . select_related ( 'entity_kind' , 'sub_entity' ) . values_list ( 'sub_entity' , flat = True ) else : entity_pks = EntityRelationship . objects . filter ( super_entity__entity_kind__in = super_entity_kinds ) . select_related ( 'entity_kind' , 'sub_entity' ) . values_list ( 'sub_entity' , flat = True ) return self . filter ( pk__in = entity_pks ) else : return self
Find all entities that have super_entities of any of the specified kinds
1,749
def get_for_obj ( self , entity_model_obj ) : return self . get ( entity_type = ContentType . objects . get_for_model ( entity_model_obj , for_concrete_model = False ) , entity_id = entity_model_obj . id )
Given a saved entity model object return the associated entity .
1,750
def delete_for_obj ( self , entity_model_obj ) : return self . filter ( entity_type = ContentType . objects . get_for_model ( entity_model_obj , for_concrete_model = False ) , entity_id = entity_model_obj . id ) . delete ( force = True )
Delete the entities associated with a model object .
1,751
def all_entities ( self , is_active = True ) : return self . get_all_entities ( return_models = True , is_active = is_active )
Return all the entities in the group .
1,752
def add_entity ( self , entity , sub_entity_kind = None ) : membership = EntityGroupMembership . objects . create ( entity_group = self , entity = entity , sub_entity_kind = sub_entity_kind , ) return membership
Add an entity or sub - entity group to this EntityGroup .
1,753
def bulk_add_entities ( self , entities_and_kinds ) : memberships = [ EntityGroupMembership ( entity_group = self , entity = entity , sub_entity_kind = sub_entity_kind , ) for entity , sub_entity_kind in entities_and_kinds ] created = EntityGroupMembership . objects . bulk_create ( memberships ) return created
Add many entities and sub - entity groups to this EntityGroup .
1,754
def remove_entity ( self , entity , sub_entity_kind = None ) : EntityGroupMembership . objects . get ( entity_group = self , entity = entity , sub_entity_kind = sub_entity_kind , ) . delete ( )
Remove an entity or sub - entity group to this EntityGroup .
1,755
def bulk_remove_entities ( self , entities_and_kinds ) : criteria = [ Q ( entity = entity , sub_entity_kind = entity_kind ) for entity , entity_kind in entities_and_kinds ] criteria = reduce ( lambda q1 , q2 : q1 | q2 , criteria , Q ( ) ) EntityGroupMembership . objects . filter ( criteria , entity_group = self ) . delete ( )
Remove many entities and sub - entity groups to this EntityGroup .
1,756
def bulk_overwrite ( self , entities_and_kinds ) : EntityGroupMembership . objects . filter ( entity_group = self ) . delete ( ) return self . bulk_add_entities ( entities_and_kinds )
Update the group to the given entities and sub - entity groups .
1,757
def set_slug ( apps , schema_editor , class_name ) : Cls = apps . get_model ( 'spectator_events' , class_name ) for obj in Cls . objects . all ( ) : obj . slug = generate_slug ( obj . pk ) obj . save ( update_fields = [ 'slug' ] )
Create a slug for each Work already in the DB .
1,758
def convert_descriptor_and_rows ( self , descriptor , rows ) : primary_key = None schema = tableschema . Schema ( descriptor ) if len ( schema . primary_key ) == 1 : primary_key = schema . primary_key [ 0 ] elif len ( schema . primary_key ) > 1 : message = 'Multi-column primary keys are not supported' raise tableschema . exceptions . StorageError ( message ) data_rows = [ ] index_rows = [ ] jtstypes_map = { } for row in rows : values = [ ] index = None for field , value in zip ( schema . fields , row ) : try : if isinstance ( value , float ) and np . isnan ( value ) : value = None if value and field . type == 'integer' : value = int ( value ) value = field . cast_value ( value ) except tableschema . exceptions . CastError : value = json . loads ( value ) if value is None and field . type in ( 'number' , 'integer' ) : jtstypes_map [ field . name ] = 'number' value = np . NaN if field . name == primary_key : index = value else : values . append ( value ) data_rows . append ( tuple ( values ) ) index_rows . append ( index ) dtypes = [ ] for field in schema . fields : if field . name != primary_key : field_name = field . name if six . PY2 : field_name = field . name . encode ( 'utf-8' ) dtype = self . convert_type ( jtstypes_map . get ( field . name , field . type ) ) dtypes . append ( ( field_name , dtype ) ) index = None columns = schema . headers array = np . array ( data_rows , dtype = dtypes ) if primary_key : index_field = schema . get_field ( primary_key ) index_dtype = self . convert_type ( index_field . type ) index_class = pd . Index if index_field . type in [ 'datetime' , 'date' ] : index_class = pd . DatetimeIndex index = index_class ( index_rows , name = primary_key , dtype = index_dtype ) columns = filter ( lambda column : column != primary_key , schema . headers ) dataframe = pd . DataFrame ( array , index = index , columns = columns ) return dataframe
Convert descriptor and rows to Pandas
1,759
def convert_type ( self , type ) : mapping = { 'any' : np . dtype ( 'O' ) , 'array' : np . dtype ( list ) , 'boolean' : np . dtype ( bool ) , 'date' : np . dtype ( 'O' ) , 'datetime' : np . dtype ( 'datetime64[ns]' ) , 'duration' : np . dtype ( 'O' ) , 'geojson' : np . dtype ( 'O' ) , 'geopoint' : np . dtype ( 'O' ) , 'integer' : np . dtype ( int ) , 'number' : np . dtype ( float ) , 'object' : np . dtype ( dict ) , 'string' : np . dtype ( 'O' ) , 'time' : np . dtype ( 'O' ) , 'year' : np . dtype ( int ) , 'yearmonth' : np . dtype ( 'O' ) , } if type not in mapping : message = 'Type "%s" is not supported' % type raise tableschema . exceptions . StorageError ( message ) return mapping [ type ]
Convert type to Pandas
1,760
def restore_descriptor ( self , dataframe ) : fields = [ ] primary_key = None if dataframe . index . name : field_type = self . restore_type ( dataframe . index . dtype ) field = { 'name' : dataframe . index . name , 'type' : field_type , 'constraints' : { 'required' : True } , } fields . append ( field ) primary_key = dataframe . index . name for column , dtype in dataframe . dtypes . iteritems ( ) : sample = dataframe [ column ] . iloc [ 0 ] if len ( dataframe ) else None field_type = self . restore_type ( dtype , sample = sample ) field = { 'name' : column , 'type' : field_type } fields . append ( field ) descriptor = { } descriptor [ 'fields' ] = fields if primary_key : descriptor [ 'primaryKey' ] = primary_key return descriptor
Restore descriptor from Pandas
1,761
def restore_row ( self , row , schema , pk ) : result = [ ] for field in schema . fields : if schema . primary_key and schema . primary_key [ 0 ] == field . name : if field . type == 'number' and np . isnan ( pk ) : pk = None if pk and field . type == 'integer' : pk = int ( pk ) result . append ( field . cast_value ( pk ) ) else : value = row [ field . name ] if field . type == 'number' and np . isnan ( value ) : value = None if value and field . type == 'integer' : value = int ( value ) elif field . type == 'datetime' : value = value . to_pydatetime ( ) result . append ( field . cast_value ( value ) ) return result
Restore row from Pandas
1,762
def restore_type ( self , dtype , sample = None ) : if pdc . is_bool_dtype ( dtype ) : return 'boolean' elif pdc . is_datetime64_any_dtype ( dtype ) : return 'datetime' elif pdc . is_integer_dtype ( dtype ) : return 'integer' elif pdc . is_numeric_dtype ( dtype ) : return 'number' if sample is not None : if isinstance ( sample , ( list , tuple ) ) : return 'array' elif isinstance ( sample , datetime . date ) : return 'date' elif isinstance ( sample , isodate . Duration ) : return 'duration' elif isinstance ( sample , dict ) : return 'object' elif isinstance ( sample , six . string_types ) : return 'string' elif isinstance ( sample , datetime . time ) : return 'time' return 'string'
Restore type from Pandas
1,763
def domain_urlize ( value ) : parsed_uri = urlparse ( value ) domain = '{uri.netloc}' . format ( uri = parsed_uri ) if domain . startswith ( 'www.' ) : domain = domain [ 4 : ] return format_html ( '<a href="{}" rel="nofollow">{}</a>' , value , domain )
Returns an HTML link to the supplied URL but only using the domain as the text . Strips www . from the start of the domain if present .
1,764
def current_url_name ( context ) : url_name = False if context . request . resolver_match : url_name = "{}:{}" . format ( context . request . resolver_match . namespace , context . request . resolver_match . url_name ) return url_name
Returns the name of the current URL namespaced or False .
1,765
def most_read_creators_card ( num = 10 ) : if spectator_apps . is_enabled ( 'reading' ) : object_list = most_read_creators ( num = num ) object_list = chartify ( object_list , 'num_readings' , cutoff = 1 ) return { 'card_title' : 'Most read authors' , 'score_attr' : 'num_readings' , 'object_list' : object_list , }
Displays a card showing the Creators who have the most Readings associated with their Publications .
1,766
def most_visited_venues_card ( num = 10 ) : if spectator_apps . is_enabled ( 'events' ) : object_list = most_visited_venues ( num = num ) object_list = chartify ( object_list , 'num_visits' , cutoff = 1 ) return { 'card_title' : 'Most visited venues' , 'score_attr' : 'num_visits' , 'object_list' : object_list , }
Displays a card showing the Venues that have the most Events .
1,767
def has_urls ( self ) : "Handy for templates." if self . isbn_uk or self . isbn_us or self . official_url or self . notes_url : return True else : return False
Handy for templates .
1,768
def get_queryset ( self ) : "Reduce the number of queries and speed things up." qs = super ( ) . get_queryset ( ) qs = qs . select_related ( 'publication__series' ) . prefetch_related ( 'publication__roles__creator' ) return qs
Reduce the number of queries and speed things up .
1,769
def set_slug ( apps , schema_editor ) : Creator = apps . get_model ( 'spectator_core' , 'Creator' ) for c in Creator . objects . all ( ) : c . slug = generate_slug ( c . pk ) c . save ( update_fields = [ 'slug' ] )
Create a slug for each Creator already in the DB .
1,770
def forwards ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) ClassicalWorkSelection = apps . get_model ( 'spectator_events' , 'ClassicalWorkSelection' ) DancePieceSelection = apps . get_model ( 'spectator_events' , 'DancePieceSelection' ) for event in Event . objects . all ( ) : for work in event . classicalworks . all ( ) : selection = ClassicalWorkSelection ( classical_work = work , event = event ) selection . save ( ) for piece in event . dancepieces . all ( ) : selection = DancePieceSelection ( dance_piece = piece , event = event ) selection . save ( )
Copy the ClassicalWork and DancePiece data to use the new through models .
1,771
def forwards ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) for event in Event . objects . all ( ) : if event . venue is not None : event . venue_name = event . venue . name event . save ( )
Set the venue_name field of all Events that have a Venue .
1,772
def forwards ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) for ev in Event . objects . filter ( kind = 'exhibition' ) : ev . kind = 'museum' ev . save ( )
Migrate all exhibition Events to the new museum Event kind .
1,773
def chartify ( qs , score_field , cutoff = 0 , ensure_chartiness = True ) : chart = [ ] position = 0 prev_obj = None for counter , obj in enumerate ( qs ) : score = getattr ( obj , score_field ) if score != getattr ( prev_obj , score_field , None ) : position = counter + 1 if cutoff is None or score > cutoff : obj . chart_position = position chart . append ( obj ) prev_obj = obj if ensure_chartiness and len ( chart ) > 0 : if getattr ( chart [ 0 ] , score_field ) == getattr ( chart [ - 1 ] , score_field ) : chart = [ ] return chart
Given a QuerySet it will go through and add a chart_position property to each object returning a list of the objects .
1,774
def by_visits ( self , event_kind = None ) : qs = self . get_queryset ( ) if event_kind is not None : qs = qs . filter ( event__kind = event_kind ) qs = qs . annotate ( num_visits = Count ( 'event' ) ) . order_by ( '-num_visits' , 'name_sort' ) return qs
Gets Venues in order of how many Events have been held there . Adds a num_visits field to each one .
1,775
def by_views ( self , kind = None ) : qs = self . get_queryset ( ) if kind is not None : qs = qs . filter ( kind = kind ) qs = qs . annotate ( num_views = Count ( 'event' ) ) . order_by ( '-num_views' , 'title_sort' ) return qs
Gets Works in order of how many times they ve been attached to Events .
1,776
def naturalize_person ( self , string ) : suffixes = [ 'Jr' , 'Jr.' , 'Sr' , 'Sr.' , 'I' , 'II' , 'III' , 'IV' , 'V' , ] suffixes = suffixes + [ s . lower ( ) for s in suffixes ] particles = [ 'Le' , 'La' , 'Von' , 'Van' , 'Du' , 'De' , ] surname = '' names = '' suffix = '' sort_string = string parts = string . split ( ' ' ) if parts [ - 1 ] in suffixes : suffix = parts [ - 1 ] parts = parts [ 0 : - 1 ] sort_string = ' ' . join ( parts ) if len ( parts ) > 1 : if parts [ - 2 ] in particles : parts = parts [ 0 : - 2 ] + [ ' ' . join ( parts [ - 2 : ] ) ] sort_string = '{}, {}' . format ( parts [ - 1 ] , ' ' . join ( parts [ : - 1 ] ) ) if suffix : sort_string = '{} {}' . format ( sort_string , suffix ) sort_string = self . _naturalize_numbers ( sort_string ) return sort_string
Attempt to make a version of the string that has the surname if any at the start .
1,777
def forward ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) MovieSelection = apps . get_model ( 'spectator_events' , 'MovieSelection' ) PlaySelection = apps . get_model ( 'spectator_events' , 'PlaySelection' ) for event in Event . objects . all ( ) : if event . movie is not None : selection = MovieSelection ( event = event , movie = event . movie ) selection . save ( ) if event . play is not None : selection = PlaySelection ( event = event , play = event . play ) selection . save ( )
Copying data from the old Event . movie and Event . play ForeignKey fields into the new Event . movies and Event . plays ManyToManyFields .
1,778
def set_slug ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) for e in Event . objects . all ( ) : e . slug = generate_slug ( e . pk ) e . save ( update_fields = [ 'slug' ] )
Create a slug for each Event already in the DB .
1,779
def page ( self , number , * args , ** kwargs ) : page = super ( ) . page ( number , * args , ** kwargs ) number = int ( number ) num_pages , body , tail , padding , margin = self . num_pages , self . body , self . tail , self . padding , self . margin main_range = list ( map ( int , [ math . floor ( number - body / 2.0 ) + 1 , math . floor ( number + body / 2.0 ) ] ) ) if main_range [ 0 ] < 1 : main_range = list ( map ( abs ( main_range [ 0 ] - 1 ) . __add__ , main_range ) ) if main_range [ 1 ] > num_pages : main_range = list ( map ( ( num_pages - main_range [ 1 ] ) . __add__ , main_range ) ) if main_range [ 0 ] <= tail + margin : leading = [ ] main_range = [ 1 , max ( body , min ( number + padding , main_range [ 1 ] ) ) ] main_range [ 0 ] = 1 else : leading = list ( range ( 1 , tail + 1 ) ) if self . align_left : trailing = [ ] else : if main_range [ 1 ] >= num_pages - ( tail + margin ) + 1 : trailing = [ ] if not leading : main_range = [ 1 , num_pages ] else : main_range = [ min ( num_pages - body + 1 , max ( number - padding , main_range [ 0 ] ) ) , num_pages ] else : trailing = list ( range ( num_pages - tail + 1 , num_pages + 1 ) ) main_range = [ max ( main_range [ 0 ] , 1 ) , min ( main_range [ 1 ] , num_pages ) ] page . main_range = list ( range ( main_range [ 0 ] , main_range [ 1 ] + 1 ) ) page . leading_range = leading page . trailing_range = trailing page . page_range = reduce ( lambda x , y : x + ( ( x and y ) and [ False ] ) + y , [ page . leading_range , page . main_range , page . trailing_range ] ) page . __class__ = DiggPage return page
Return a standard Page instance with custom digg - specific page ranges attached .
1,780
def version ( ) : namespace = { } with open ( os . path . join ( 'mrcfile' , 'version.py' ) ) as f : exec ( f . read ( ) , namespace ) return namespace [ '__version__' ]
Get the version number without importing the mrcfile package .
1,781
def get_event_kind ( self ) : slug = self . kwargs . get ( 'kind_slug' , None ) if slug is None : return None else : slugs_to_kinds = { v : k for k , v in Event . KIND_SLUGS . items ( ) } return slugs_to_kinds . get ( slug , None )
Unless we re on the front page we ll have a kind_slug like movies . We need to translate that into an event kind like movie .
1,782
def get_queryset ( self ) : "Restrict to a single kind of event, if any, and include Venue data." qs = super ( ) . get_queryset ( ) kind = self . get_event_kind ( ) if kind is not None : qs = qs . filter ( kind = kind ) qs = qs . select_related ( 'venue' ) return qs
Restrict to a single kind of event if any and include Venue data .
1,783
def get_work_kind ( self ) : slugs_to_kinds = { v : k for k , v in Work . KIND_SLUGS . items ( ) } return slugs_to_kinds . get ( self . kind_slug , None )
We ll have a kind_slug like movies . We need to translate that into a work kind like movie .
1,784
def get_countries ( self ) : qs = Venue . objects . values ( 'country' ) . exclude ( country = '' ) . distinct ( ) . order_by ( 'country' ) countries = [ ] for c in qs : countries . append ( { 'code' : c [ 'country' ] , 'name' : Venue . get_country_name ( c [ 'country' ] ) } ) return sorted ( countries , key = lambda k : k [ 'name' ] )
Returns a list of dicts one per country that has at least one Venue in it .
1,785
def forwards ( apps , schema_editor ) : Work = apps . get_model ( 'spectator_events' , 'Work' ) for work in Work . objects . all ( ) : if not work . slug : work . slug = generate_slug ( work . pk ) work . save ( )
Re - save all the Works because something earlier didn t create their slugs .
1,786
def annual_event_counts_card ( kind = 'all' , current_year = None ) : if kind == 'all' : card_title = 'Events per year' else : card_title = '{} per year' . format ( Event . get_kind_name_plural ( kind ) ) return { 'card_title' : card_title , 'kind' : kind , 'years' : annual_event_counts ( kind = kind ) , 'current_year' : current_year }
Displays years and the number of events per year .
1,787
def event_list_tabs ( counts , current_kind , page_number = 1 ) : return { 'counts' : counts , 'current_kind' : current_kind , 'page_number' : page_number , 'event_kinds' : Event . get_kinds ( ) , 'event_kinds_data' : Event . get_kinds_data ( ) , }
Displays the tabs to different event_list pages .
1,788
def day_events_card ( date ) : d = date . strftime ( app_settings . DATE_FORMAT ) card_title = 'Events on {}' . format ( d ) return { 'card_title' : card_title , 'event_list' : day_events ( date = date ) , }
Displays Events that happened on the supplied date . date is a date object .
1,789
def most_seen_creators_card ( event_kind = None , num = 10 ) : object_list = most_seen_creators ( event_kind = event_kind , num = num ) object_list = chartify ( object_list , 'num_events' , cutoff = 1 ) return { 'card_title' : 'Most seen people/groups' , 'score_attr' : 'num_events' , 'object_list' : object_list , }
Displays a card showing the Creators that are associated with the most Events .
1,790
def most_seen_creators_by_works ( work_kind = None , role_name = None , num = 10 ) : return Creator . objects . by_works ( kind = work_kind , role_name = role_name ) [ : num ]
Returns a QuerySet of the Creators that are associated with the most Works .
1,791
def most_seen_creators_by_works_card ( work_kind = None , role_name = None , num = 10 ) : object_list = most_seen_creators_by_works ( work_kind = work_kind , role_name = role_name , num = num ) object_list = chartify ( object_list , 'num_works' , cutoff = 1 ) if role_name : creators_name = '{}s' . format ( role_name . capitalize ( ) ) else : creators_name = 'People/groups' if work_kind : works_name = Work . get_kind_name_plural ( work_kind ) . lower ( ) else : works_name = 'works' card_title = '{} with most {}' . format ( creators_name , works_name ) return { 'card_title' : card_title , 'score_attr' : 'num_works' , 'object_list' : object_list , }
Displays a card showing the Creators that are associated with the most Works .
1,792
def most_seen_works_card ( kind = None , num = 10 ) : object_list = most_seen_works ( kind = kind , num = num ) object_list = chartify ( object_list , 'num_views' , cutoff = 1 ) if kind : card_title = 'Most seen {}' . format ( Work . get_kind_name_plural ( kind ) . lower ( ) ) else : card_title = 'Most seen works' return { 'card_title' : card_title , 'score_attr' : 'num_views' , 'object_list' : object_list , 'name_attr' : 'title' , 'use_cite' : True , }
Displays a card showing the Works that are associated with the most Events .
1,793
def forwards ( apps , schema_editor ) : Movie = apps . get_model ( 'spectator_events' , 'Movie' ) Work = apps . get_model ( 'spectator_events' , 'Work' ) WorkRole = apps . get_model ( 'spectator_events' , 'WorkRole' ) WorkSelection = apps . get_model ( 'spectator_events' , 'WorkSelection' ) for m in Movie . objects . all ( ) : work = Work . objects . create ( kind = 'movie' , title = m . title , title_sort = m . title_sort , year = m . year , imdb_id = m . imdb_id ) for role in m . roles . all ( ) : WorkRole . objects . create ( creator = role . creator , work = work , role_name = role . role_name , role_order = role . role_order ) for selection in m . events . all ( ) : WorkSelection . objects . create ( event = selection . event , work = work , order = selection . order ) m . delete ( )
Change all Movie objects into Work objects and their associated data into WorkRole and WorkSelection models then delete the Movie .
1,794
def paginate_queryset ( self , queryset , page_size ) : paginator = self . get_paginator ( queryset , page_size , orphans = self . get_paginate_orphans ( ) , allow_empty_first_page = self . get_allow_empty ( ) , body = self . paginator_body , margin = self . paginator_margin , padding = self . paginator_padding , tail = self . paginator_tail , ) page_kwarg = self . page_kwarg page = self . kwargs . get ( page_kwarg ) or self . request . GET . get ( page_kwarg ) or 1 try : page_number = int ( page ) except ValueError : if page == 'last' : page_number = paginator . num_pages else : raise Http404 ( _ ( "Page is not 'last', nor can it be converted to an int." ) ) try : page = paginator . page ( page_number , softlimit = False ) return ( paginator , page , page . object_list , page . has_other_pages ( ) ) except InvalidPage as e : raise Http404 ( _ ( 'Invalid page (%(page_number)s): %(message)s' ) % { 'page_number' : page_number , 'message' : str ( e ) } )
Paginate the queryset if needed .
1,795
def day_publications ( date ) : readings = Reading . objects . filter ( start_date__lte = date ) . filter ( Q ( end_date__gte = date ) | Q ( end_date__isnull = True ) ) if readings : return Publication . objects . filter ( reading__in = readings ) . select_related ( 'series' ) . prefetch_related ( 'roles__creator' ) . distinct ( ) else : return Publication . objects . none ( )
Returns a QuerySet of Publications that were being read on date . date is a date tobject .
1,796
def day_publications_card ( date ) : d = date . strftime ( app_settings . DATE_FORMAT ) card_title = 'Reading on {}' . format ( d ) return { 'card_title' : card_title , 'publication_list' : day_publications ( date = date ) , }
Displays Publications that were being read on date . date is a date tobject .
1,797
def forwards ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) for ev in Event . objects . filter ( kind = 'movie' ) : ev . kind = 'cinema' ev . save ( ) for ev in Event . objects . filter ( kind = 'play' ) : ev . kind = 'theatre' ev . save ( )
Change Events with kind movie to cinema and Events with kind play to theatre .
1,798
def get_env_variable ( var_name , default = None ) : try : return os . environ [ var_name ] except KeyError : if default is None : error_msg = "Set the %s environment variable" % var_name raise ImproperlyConfigured ( error_msg ) else : return default
Get the environment variable or return exception .
1,799
def forwards ( apps , schema_editor ) : Event = apps . get_model ( 'spectator_events' , 'Event' ) Work = apps . get_model ( 'spectator_events' , 'Work' ) WorkRole = apps . get_model ( 'spectator_events' , 'WorkRole' ) WorkSelection = apps . get_model ( 'spectator_events' , 'WorkSelection' ) for event in Event . objects . filter ( kind = 'museum' ) : work = Work . objects . create ( kind = 'exhibition' , title = event . title , title_sort = event . title_sort ) work . slug = generate_slug ( work . pk ) work . save ( ) WorkSelection . objects . create ( event = event , work = work ) for role in event . roles . all ( ) : WorkRole . objects . create ( creator = role . creator , work = work , role_name = role . role_name , role_order = role . role_order ) role . delete ( )
Having added the new exhibition Work type we re going to assume that every Event of type museum should actually have one Exhibition attached .