idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
61,700 |
def track_exception ( self , type = None , value = None , tb = None , properties = None , measurements = None ) : if not type or not value or not tb : type , value , tb = sys . exc_info ( ) if not type or not value or not tb : try : raise Exception ( NULL_CONSTANT_STRING ) except : type , value , tb = sys . exc_info ( ) details = channel . contracts . ExceptionDetails ( ) details . id = 1 details . outer_id = 0 details . type_name = type . __name__ details . message = str ( value ) details . has_full_stack = True counter = 0 for tb_frame_file , tb_frame_line , tb_frame_function , tb_frame_text in traceback . extract_tb ( tb ) : frame = channel . contracts . StackFrame ( ) frame . assembly = 'Unknown' frame . file_name = tb_frame_file frame . level = counter frame . line = tb_frame_line frame . method = tb_frame_function details . parsed_stack . append ( frame ) counter += 1 details . parsed_stack . reverse ( ) data = channel . contracts . ExceptionData ( ) data . handled_at = 'UserCode' data . exceptions . append ( details ) if properties : data . properties = properties if measurements : data . measurements = measurements self . track ( data , self . _context )
|
Send information about a single exception that occurred in the application .
|
61,701 |
def track_event ( self , name , properties = None , measurements = None ) : data = channel . contracts . EventData ( ) data . name = name or NULL_CONSTANT_STRING if properties : data . properties = properties if measurements : data . measurements = measurements self . track ( data , self . _context )
|
Send information about a single event that has occurred in the context of the application .
|
61,702 |
def track_metric ( self , name , value , type = None , count = None , min = None , max = None , std_dev = None , properties = None ) : dataPoint = channel . contracts . DataPoint ( ) dataPoint . name = name or NULL_CONSTANT_STRING dataPoint . value = value or 0 dataPoint . kind = type or channel . contracts . DataPointType . aggregation dataPoint . count = count dataPoint . min = min dataPoint . max = max dataPoint . std_dev = std_dev data = channel . contracts . MetricData ( ) data . metrics . append ( dataPoint ) if properties : data . properties = properties self . track ( data , self . _context )
|
Send information about a single metric data point that was captured for the application .
|
61,703 |
def track_trace ( self , name , properties = None , severity = None ) : data = channel . contracts . MessageData ( ) data . message = name or NULL_CONSTANT_STRING if properties : data . properties = properties if severity is not None : data . severity_level = channel . contracts . MessageData . PYTHON_LOGGING_LEVELS . get ( severity ) self . track ( data , self . _context )
|
Sends a single trace statement .
|
61,704 |
def track_request ( self , name , url , success , start_time = None , duration = None , response_code = None , http_method = None , properties = None , measurements = None , request_id = None ) : data = channel . contracts . RequestData ( ) data . id = request_id or str ( uuid . uuid4 ( ) ) data . name = name data . url = url data . success = success data . start_time = start_time or datetime . datetime . utcnow ( ) . isoformat ( ) + 'Z' data . duration = self . __ms_to_duration ( duration ) data . response_code = str ( response_code ) or '200' data . http_method = http_method or 'GET' if properties : data . properties = properties if measurements : data . measurements = measurements self . track ( data , self . _context )
|
Sends a single request that was captured for the application .
|
61,705 |
def track_dependency ( self , name , data , type = None , target = None , duration = None , success = None , result_code = None , properties = None , measurements = None , dependency_id = None ) : dependency_data = channel . contracts . RemoteDependencyData ( ) dependency_data . id = dependency_id or str ( uuid . uuid4 ( ) ) dependency_data . name = name dependency_data . data = data dependency_data . type = type dependency_data . target = target dependency_data . duration = self . __ms_to_duration ( duration ) dependency_data . success = success dependency_data . result_code = str ( result_code ) or '200' if properties : dependency_data . properties = properties if measurements : dependency_data . measurements = measurements self . track ( dependency_data , self . _context )
|
Sends a single dependency telemetry that was captured for the application .
|
61,706 |
def dummy_client ( reason ) : sender = applicationinsights . channel . NullSender ( ) queue = applicationinsights . channel . SynchronousQueue ( sender ) channel = applicationinsights . channel . TelemetryChannel ( None , queue ) return applicationinsights . TelemetryClient ( "00000000-0000-0000-0000-000000000000" , channel )
|
Creates a dummy channel so even if we re not logging telemetry we can still send along the real object to things that depend on it to exist
|
61,707 |
def enable ( instrumentation_key , * args , ** kwargs ) : if not instrumentation_key : raise Exception ( 'Instrumentation key was required but not provided' ) global original_excepthook global telemetry_channel telemetry_channel = kwargs . get ( 'telemetry_channel' ) if not original_excepthook : original_excepthook = sys . excepthook sys . excepthook = intercept_excepthook if instrumentation_key not in enabled_instrumentation_keys : enabled_instrumentation_keys . append ( instrumentation_key )
|
Enables the automatic collection of unhandled exceptions . Captured exceptions will be sent to the Application Insights service before being re - thrown . Multiple calls to this function with different instrumentation keys result in multiple instances being submitted one for each key .
|
61,708 |
def init_app ( self , app ) : self . _key = app . config . get ( CONF_KEY ) or getenv ( CONF_KEY ) if not self . _key : return self . _endpoint_uri = app . config . get ( CONF_ENDPOINT_URI ) sender = AsynchronousSender ( self . _endpoint_uri ) queue = AsynchronousQueue ( sender ) self . _channel = TelemetryChannel ( None , queue ) self . _init_request_logging ( app ) self . _init_trace_logging ( app ) self . _init_exception_logging ( app )
|
Initializes the extension for the provided Flask application .
|
61,709 |
def _init_request_logging ( self , app ) : enabled = not app . config . get ( CONF_DISABLE_REQUEST_LOGGING , False ) if not enabled : return self . _requests_middleware = WSGIApplication ( self . _key , app . wsgi_app , telemetry_channel = self . _channel ) app . wsgi_app = self . _requests_middleware
|
Sets up request logging unless APPINSIGHTS_DISABLE_REQUEST_LOGGING is set in the Flask config .
|
61,710 |
def _init_trace_logging ( self , app ) : enabled = not app . config . get ( CONF_DISABLE_TRACE_LOGGING , False ) if not enabled : return self . _trace_log_handler = LoggingHandler ( self . _key , telemetry_channel = self . _channel ) app . logger . addHandler ( self . _trace_log_handler )
|
Sets up trace logging unless APPINSIGHTS_DISABLE_TRACE_LOGGING is set in the Flask config .
|
61,711 |
def _init_exception_logging ( self , app ) : enabled = not app . config . get ( CONF_DISABLE_EXCEPTION_LOGGING , False ) if not enabled : return exception_telemetry_client = TelemetryClient ( self . _key , telemetry_channel = self . _channel ) @ app . errorhandler ( Exception ) def exception_handler ( exception ) : if HTTPException and isinstance ( exception , HTTPException ) : return exception try : raise exception except Exception : exception_telemetry_client . track_exception ( ) finally : raise exception self . _exception_telemetry_client = exception_telemetry_client
|
Sets up exception logging unless APPINSIGHTS_DISABLE_EXCEPTION_LOGGING is set in the Flask config .
|
61,712 |
def flush ( self ) : if self . _requests_middleware : self . _requests_middleware . flush ( ) if self . _trace_log_handler : self . _trace_log_handler . flush ( ) if self . _exception_telemetry_client : self . _exception_telemetry_client . flush ( )
|
Flushes the queued up telemetry to the service .
|
61,713 |
def get ( self ) : try : item = self . _queue . get_nowait ( ) except ( Empty , PersistEmpty ) : return None if self . _persistence_path : self . _queue . task_done ( ) return item
|
Gets a single item from the queue and returns it . If the queue is empty this method will return None .
|
61,714 |
def enable ( instrumentation_key , * args , ** kwargs ) : if not instrumentation_key : raise Exception ( 'Instrumentation key was required but not provided' ) if instrumentation_key in enabled_instrumentation_keys : logging . getLogger ( ) . removeHandler ( enabled_instrumentation_keys [ instrumentation_key ] ) async_ = kwargs . pop ( 'async_' , False ) endpoint = kwargs . pop ( 'endpoint' , None ) telemetry_channel = kwargs . get ( 'telemetry_channel' ) if telemetry_channel and async_ : raise Exception ( 'Incompatible arguments async_ and telemetry_channel' ) if telemetry_channel and endpoint : raise Exception ( 'Incompatible arguments endpoint and telemetry_channel' ) if not telemetry_channel : if async_ : sender , queue = AsynchronousSender , AsynchronousQueue else : sender , queue = SynchronousSender , SynchronousQueue kwargs [ 'telemetry_channel' ] = TelemetryChannel ( queue = queue ( sender ( endpoint ) ) ) log_level = kwargs . pop ( 'level' , logging . INFO ) handler = LoggingHandler ( instrumentation_key , * args , ** kwargs ) handler . setLevel ( log_level ) enabled_instrumentation_keys [ instrumentation_key ] = handler logging . getLogger ( ) . addHandler ( handler ) return handler
|
Enables the Application Insights logging handler for the root logger for the supplied instrumentation key . Multiple calls to this function with different instrumentation keys result in multiple handler instances .
|
61,715 |
def start ( self ) : with self . _lock_send_remaining_time : if self . _send_remaining_time <= 0.0 : local_send_interval = self . _send_interval if self . _send_interval < 0.1 : local_send_interval = 0.1 self . _send_remaining_time = self . _send_time if self . _send_remaining_time < local_send_interval : self . _send_remaining_time = local_send_interval thread = Thread ( target = self . _run ) thread . daemon = True thread . start ( )
|
Starts a new sender thread if none is not already there
|
61,716 |
def device_initialize ( self ) : existing_device_initialize ( self ) self . type = 'Other' self . id = platform . node ( ) self . os_version = platform . version ( ) self . locale = locale . getdefaultlocale ( ) [ 0 ]
|
The device initializer used to assign special properties to all device context objects
|
61,717 |
def sign ( message : bytes , sign_key : SignKey ) -> Signature : logger = logging . getLogger ( __name__ ) logger . debug ( "Bls::sign: >>> message: %r, sign_key: %r" , message , sign_key ) c_instance = c_void_p ( ) do_call ( 'indy_crypto_bls_sign' , message , len ( message ) , sign_key . c_instance , byref ( c_instance ) ) res = Signature ( c_instance ) logger . debug ( "Bls::sign: <<< res: %r" , res ) return res
|
Signs the message and returns signature .
|
61,718 |
def verify ( signature : Signature , message : bytes , ver_key : VerKey , gen : Generator ) -> bool : logger = logging . getLogger ( __name__ ) logger . debug ( "Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r" , signature , message , ver_key , gen ) valid = c_bool ( ) do_call ( 'indy_crypto_bsl_verify' , signature . c_instance , message , len ( message ) , ver_key . c_instance , gen . c_instance , byref ( valid ) ) res = valid logger . debug ( "Bls::verify: <<< res: %r" , res ) return res
|
Verifies the message signature and returns true - if signature valid or false otherwise .
|
61,719 |
def verify_pop ( pop : ProofOfPossession , ver_key : VerKey , gen : Generator ) -> bool : logger = logging . getLogger ( __name__ ) logger . debug ( "Bls::verify_pop: >>> pop: %r, ver_key: %r, gen: %r" , pop , ver_key , gen ) valid = c_bool ( ) do_call ( 'indy_crypto_bsl_verify_pop' , pop . c_instance , ver_key . c_instance , gen . c_instance , byref ( valid ) ) res = valid logger . debug ( "Bls::verify_pop: <<< res: %r" , res ) return res
|
Verifies the proof of possession and returns true - if signature valid or false otherwise .
|
61,720 |
def verify_multi_sig ( multi_sig : MultiSignature , message : bytes , ver_keys : [ VerKey ] , gen : Generator ) -> bool : logger = logging . getLogger ( __name__ ) logger . debug ( "Bls::verify_multi_sig: >>> multi_sig: %r, message: %r, ver_keys: %r, gen: %r" , multi_sig , message , ver_keys , gen ) ver_key_c_instances = ( c_void_p * len ( ver_keys ) ) ( ) for i in range ( len ( ver_keys ) ) : ver_key_c_instances [ i ] = ver_keys [ i ] . c_instance valid = c_bool ( ) do_call ( 'indy_crypto_bls_verify_multi_sig' , multi_sig . c_instance , message , len ( message ) , ver_key_c_instances , len ( ver_keys ) , gen . c_instance , byref ( valid ) ) res = valid logger . debug ( "Bls::verify_multi_sig: <<< res: %r" , res ) return res
|
Verifies the message multi signature and returns true - if signature valid or false otherwise .
|
61,721 |
def get_urls ( self ) : urls = [ url ( r'^publish/([0-9]+)/$' , self . admin_site . admin_view ( self . publish_post ) , name = 'djangocms_blog_publish_article' ) , ] urls . extend ( super ( PostAdmin , self ) . get_urls ( ) ) return urls
|
Customize the modeladmin urls
|
61,722 |
def publish_post ( self , request , pk ) : language = get_language_from_request ( request , check_path = True ) try : post = Post . objects . get ( pk = int ( pk ) ) post . publish = True post . save ( ) return HttpResponseRedirect ( post . get_absolute_url ( language ) ) except Exception : try : return HttpResponseRedirect ( request . META [ 'HTTP_REFERER' ] ) except KeyError : return HttpResponseRedirect ( reverse ( 'djangocms_blog:posts-latest' ) )
|
Admin view to publish a single post
|
61,723 |
def has_restricted_sites ( self , request ) : sites = self . get_restricted_sites ( request ) return sites and sites . count ( ) == 1
|
Whether the current user has permission on one site only
|
61,724 |
def get_restricted_sites ( self , request ) : try : return request . user . get_sites ( ) except AttributeError : return Site . objects . none ( )
|
The sites on which the user has permission on .
|
61,725 |
def get_fieldsets ( self , request , obj = None ) : app_config_default = self . _app_config_select ( request , obj ) if app_config_default is None and request . method == 'GET' : return super ( PostAdmin , self ) . get_fieldsets ( request , obj ) if not obj : config = app_config_default else : config = obj . app_config fsets = deepcopy ( self . _fieldsets ) if config : abstract = bool ( config . use_abstract ) placeholder = bool ( config . use_placeholder ) related = bool ( config . use_related ) else : abstract = get_setting ( 'USE_ABSTRACT' ) placeholder = get_setting ( 'USE_PLACEHOLDER' ) related = get_setting ( 'USE_RELATED' ) if abstract : fsets [ 0 ] [ 1 ] [ 'fields' ] . append ( 'abstract' ) if not placeholder : fsets [ 0 ] [ 1 ] [ 'fields' ] . append ( 'post_text' ) if get_setting ( 'MULTISITE' ) and not self . has_restricted_sites ( request ) : fsets [ 1 ] [ 1 ] [ 'fields' ] [ 0 ] . append ( 'sites' ) if request . user . is_superuser : fsets [ 1 ] [ 1 ] [ 'fields' ] [ 0 ] . append ( 'author' ) if apps . is_installed ( 'djangocms_blog.liveblog' ) : fsets [ 2 ] [ 1 ] [ 'fields' ] [ 2 ] . append ( 'enable_liveblog' ) filter_function = get_setting ( 'ADMIN_POST_FIELDSET_FILTER' ) if related and Post . objects . namespace ( config . namespace ) . active_translations ( ) . exists ( ) : fsets [ 1 ] [ 1 ] [ 'fields' ] [ 0 ] . append ( 'related' ) if callable ( filter_function ) : fsets = filter_function ( fsets , request , obj = obj ) return fsets
|
Customize the fieldsets according to the app settings
|
61,726 |
def save_model ( self , request , obj , form , change ) : if 'config.menu_structure' in form . changed_data : from menus . menu_pool import menu_pool menu_pool . clear ( all = True ) return super ( BlogConfigAdmin , self ) . save_model ( request , obj , form , change )
|
Clear menu cache when changing menu structure
|
61,727 |
def clean_slug ( self ) : source = self . cleaned_data . get ( 'slug' , '' ) lang_choice = self . language_code if not source : source = slugify ( self . cleaned_data . get ( 'title' , '' ) ) qs = Post . _default_manager . active_translations ( lang_choice ) . language ( lang_choice ) used = list ( qs . values_list ( 'translations__slug' , flat = True ) ) slug = source i = 1 while slug in used : slug = '%s-%s' % ( source , i ) i += 1 return slug
|
Generate a valid slug in case the given one is taken
|
61,728 |
def tagged ( self , other_model = None , queryset = None ) : tags = self . _taglist ( other_model , queryset ) return self . get_queryset ( ) . filter ( tags__in = tags ) . distinct ( )
|
Restituisce una queryset di elementi del model taggati o con gli stessi tag di un model o un queryset
|
61,729 |
def _taglist ( self , other_model = None , queryset = None ) : from taggit . models import TaggedItem filter = None if queryset is not None : filter = set ( ) for item in queryset . all ( ) : filter . update ( item . tags . all ( ) ) filter = set ( [ tag . id for tag in filter ] ) elif other_model is not None : filter = set ( TaggedItem . objects . filter ( content_type__model = other_model . __name__ . lower ( ) ) . values_list ( 'tag_id' , flat = True ) ) tags = set ( TaggedItem . objects . filter ( content_type__model = self . model . __name__ . lower ( ) ) . values_list ( 'tag_id' , flat = True ) ) if filter is not None : tags = tags . intersection ( filter ) return list ( tags )
|
Restituisce una lista di id di tag comuni al model corrente e al model o queryset passati come argomento
|
61,730 |
def tag_list ( self , other_model = None , queryset = None ) : from taggit . models import Tag return Tag . objects . filter ( id__in = self . _taglist ( other_model , queryset ) )
|
Restituisce un queryset di tag comuni al model corrente e al model o queryset passati come argomento
|
61,731 |
def liveblog_connect ( message , apphook , lang , post ) : try : post = Post . objects . namespace ( apphook ) . language ( lang ) . active_translations ( slug = post ) . get ( ) except Post . DoesNotExist : message . reply_channel . send ( { 'text' : json . dumps ( { 'error' : 'no_post' } ) , } ) return Group ( post . liveblog_group ) . add ( message . reply_channel ) message . reply_channel . send ( { "accept" : True } )
|
Connect users to the group of the given post according to the given language
|
61,732 |
def liveblog_disconnect ( message , apphook , lang , post ) : try : post = Post . objects . namespace ( apphook ) . language ( lang ) . active_translations ( slug = post ) . get ( ) except Post . DoesNotExist : message . reply_channel . send ( { 'text' : json . dumps ( { 'error' : 'no_post' } ) , } ) return Group ( post . liveblog_group ) . discard ( message . reply_channel )
|
Disconnect users to the group of the given post according to the given language
|
61,733 |
def video_in_option ( self , param , profile = 'Day' ) : if profile == 'Day' : field = param else : field = '{}Options.{}' . format ( profile , param ) return utils . pretty ( [ opt for opt in self . video_in_options . split ( ) if '].{}=' . format ( field ) in opt ] [ 0 ] )
|
Return video input option .
|
61,734 |
def _generate_token ( self ) : session = self . get_session ( ) url = self . __base_url ( 'magicBox.cgi?action=getMachineName' ) try : auth = requests . auth . HTTPBasicAuth ( self . _user , self . _password ) req = session . get ( url , auth = auth , timeout = self . _timeout_default ) if not req . ok : auth = requests . auth . HTTPDigestAuth ( self . _user , self . _password ) req = session . get ( url , auth = auth , timeout = self . _timeout_default ) req . raise_for_status ( ) except requests . RequestException as error : _LOGGER . error ( error ) raise CommError ( 'Could not communicate with camera' ) result = req . text . lower ( ) if 'invalid' in result or 'error' in result : _LOGGER . error ( 'Result from camera: %s' , req . text . strip ( ) . replace ( '\r\n' , ': ' ) ) raise LoginError ( 'Invalid credentials' ) return auth
|
Create authentation to use with requests .
|
61,735 |
def _set_name ( self ) : try : self . _name = pretty ( self . machine_name ) self . _serial = self . serial_number except AttributeError : self . _name = None self . _serial = None
|
Set device name .
|
61,736 |
def to_unit ( value , unit = 'B' ) : byte_array = [ 'B' , 'KB' , 'MB' , 'GB' , 'TB' ] if not isinstance ( value , ( int , float ) ) : value = float ( value ) if unit in byte_array : result = value / 1024 ** byte_array . index ( unit ) return round ( result , PRECISION ) , unit return value
|
Convert bytes to give unit .
|
61,737 |
def realtime_stream ( self , channel = 1 , typeno = 0 , path_file = None ) : ret = self . command ( 'realmonitor.cgi?action=getStream&channel={0}&subtype={1}' . format ( channel , typeno ) ) if path_file : with open ( path_file , 'wb' ) as out_file : shutil . copyfileobj ( ret . raw , out_file ) return ret . raw
|
If the stream is redirect to a file use mplayer tool to visualize the video record
|
61,738 |
def rtsp_url ( self , channelno = None , typeno = None ) : if channelno is None : channelno = 1 if typeno is None : typeno = 0 cmd = 'cam/realmonitor?channel={0}&subtype={1}' . format ( channelno , typeno ) try : port = ':' + [ x . split ( '=' ) [ 1 ] for x in self . rtsp_config . split ( ) if x . startswith ( 'table.RTSP.Port=' ) ] [ 0 ] except IndexError : port = '' return 'rtsp://{}:{}@{}{}/{}' . format ( self . _user , self . _password , self . _host , port , cmd )
|
Return RTSP streaming url
|
61,739 |
def mjpeg_url ( self , channelno = None , typeno = None ) : if channelno is None : channelno = 0 if typeno is None : typeno = 1 cmd = "mjpg/video.cgi?channel={0}&subtype={1}" . format ( channelno , typeno ) return '{0}{1}' . format ( self . _base_url , cmd )
|
Return MJPEG streaming url
|
61,740 |
def scan_devices ( self , subnet , timeout = None ) : max_range = { 16 : 256 , 24 : 256 , 25 : 128 , 27 : 32 , 28 : 16 , 29 : 8 , 30 : 4 , 31 : 2 } if "/" not in subnet : mask = int ( 24 ) network = subnet else : network , mask = subnet . split ( "/" ) mask = int ( mask ) if mask not in max_range : raise RuntimeError ( "Cannot determine the subnet mask!" ) network = network . rpartition ( "." ) [ 0 ] if mask == 16 : for i in range ( 0 , 1 ) : network = network . rpartition ( "." ) [ 0 ] if mask == 16 : for seq1 in range ( 0 , max_range [ mask ] ) : for seq2 in range ( 0 , max_range [ mask ] ) : ipaddr = "{0}.{1}.{2}" . format ( network , seq1 , seq2 ) thd = threading . Thread ( target = self . __raw_scan , args = ( ipaddr , timeout ) ) thd . start ( ) else : for seq1 in range ( 0 , max_range [ mask ] ) : ipaddr = "{0}.{1}" . format ( network , seq1 ) thd = threading . Thread ( target = self . __raw_scan , args = ( ipaddr , timeout ) ) thd . start ( ) return self . amcrest_ips
|
Scan cameras in a range of ips
|
61,741 |
def disallow ( self , foreign , permission = "active" , account = None , threshold = None , ** kwargs ) : if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) if permission not in [ "owner" , "active" ] : raise ValueError ( "Permission needs to be either 'owner', or 'active" ) account = Account ( account , blockchain_instance = self ) authority = account [ permission ] try : pubkey = PublicKey ( foreign , prefix = self . prefix ) affected_items = list ( filter ( lambda x : x [ 0 ] == str ( pubkey ) , authority [ "key_auths" ] ) ) authority [ "key_auths" ] = list ( filter ( lambda x : x [ 0 ] != str ( pubkey ) , authority [ "key_auths" ] ) ) except : try : foreign_account = Account ( foreign , blockchain_instance = self ) affected_items = list ( filter ( lambda x : x [ 0 ] == foreign_account [ "id" ] , authority [ "account_auths" ] , ) ) authority [ "account_auths" ] = list ( filter ( lambda x : x [ 0 ] != foreign_account [ "id" ] , authority [ "account_auths" ] , ) ) except : raise ValueError ( "Unknown foreign account or unvalid public key" ) if not affected_items : raise ValueError ( "Changes nothing!" ) removed_weight = affected_items [ 0 ] [ 1 ] if threshold : authority [ "weight_threshold" ] = threshold try : self . _test_weights_treshold ( authority ) except : log . critical ( "The account's threshold will be reduced by %d" % ( removed_weight ) ) authority [ "weight_threshold" ] -= removed_weight self . _test_weights_treshold ( authority ) op = operations . Account_update ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "account" : account [ "id" ] , permission : authority , "extensions" : { } , } ) if permission == "owner" : return self . finalizeOp ( op , account [ "name" ] , "owner" , ** kwargs ) else : return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Remove additional access to an account by some other public key or account .
|
61,742 |
def approvewitness ( self , witnesses , account = None , ** kwargs ) : if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account , blockchain_instance = self ) options = account [ "options" ] if not isinstance ( witnesses , ( list , set , tuple ) ) : witnesses = { witnesses } for witness in witnesses : witness = Witness ( witness , blockchain_instance = self ) options [ "votes" ] . append ( witness [ "vote_id" ] ) options [ "votes" ] = list ( set ( options [ "votes" ] ) ) options [ "num_witness" ] = len ( list ( filter ( lambda x : float ( x . split ( ":" ) [ 0 ] ) == 1 , options [ "votes" ] ) ) ) op = operations . Account_update ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "account" : account [ "id" ] , "new_options" : options , "extensions" : { } , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Approve a witness
|
61,743 |
def approvecommittee ( self , committees , account = None , ** kwargs ) : if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account , blockchain_instance = self ) options = account [ "options" ] if not isinstance ( committees , ( list , set , tuple ) ) : committees = { committees } for committee in committees : committee = Committee ( committee , blockchain_instance = self ) options [ "votes" ] . append ( committee [ "vote_id" ] ) options [ "votes" ] = list ( set ( options [ "votes" ] ) ) options [ "num_committee" ] = len ( list ( filter ( lambda x : float ( x . split ( ":" ) [ 0 ] ) == 0 , options [ "votes" ] ) ) ) op = operations . Account_update ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "account" : account [ "id" ] , "new_options" : options , "extensions" : { } , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Approve a committee
|
61,744 |
def betting_market_rules_create ( self , names , descriptions , account = None , ** kwargs ) : assert isinstance ( names , list ) assert isinstance ( descriptions , list ) if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account ) op = operations . Betting_market_rules_create ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "name" : names , "description" : descriptions , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Create betting market rules
|
61,745 |
def betting_market_rules_update ( self , rules_id , names , descriptions , account = None , ** kwargs ) : assert isinstance ( names , list ) assert isinstance ( descriptions , list ) if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account ) rule = Rule ( rules_id ) op = operations . Betting_market_rules_update ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "betting_market_rules_id" : rule [ "id" ] , "new_name" : names , "new_description" : descriptions , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Update betting market rules
|
61,746 |
def bet_place ( self , betting_market_id , amount_to_bet , backer_multiplier , back_or_lay , account = None , ** kwargs ) : from . import GRAPHENE_BETTING_ODDS_PRECISION assert isinstance ( amount_to_bet , Amount ) assert back_or_lay in [ "back" , "lay" ] if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account ) bm = BettingMarket ( betting_market_id ) op = operations . Bet_place ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "bettor_id" : account [ "id" ] , "betting_market_id" : bm [ "id" ] , "amount_to_bet" : amount_to_bet . json ( ) , "backer_multiplier" : ( int ( backer_multiplier * GRAPHENE_BETTING_ODDS_PRECISION ) ) , "back_or_lay" : back_or_lay , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Place a bet
|
61,747 |
def bet_cancel ( self , bet_to_cancel , account = None , ** kwargs ) : if not account : if "default_account" in self . config : account = self . config [ "default_account" ] if not account : raise ValueError ( "You need to provide an account" ) account = Account ( account ) bet = Bet ( bet_to_cancel ) op = operations . Bet_cancel ( ** { "fee" : { "amount" : 0 , "asset_id" : "1.3.0" } , "bettor_id" : account [ "id" ] , "bet_to_cancel" : bet [ "id" ] , "prefix" : self . prefix , } ) return self . finalizeOp ( op , account [ "name" ] , "active" , ** kwargs )
|
Cancel a bet
|
61,748 |
def verbose ( f ) : @ click . pass_context def new_func ( ctx , * args , ** kwargs ) : global log verbosity = [ "critical" , "error" , "warn" , "info" , "debug" ] [ int ( min ( ctx . obj . get ( "verbose" , 0 ) , 4 ) ) ] log . setLevel ( getattr ( logging , verbosity . upper ( ) ) ) formatter = logging . Formatter ( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) ch = logging . StreamHandler ( ) ch . setLevel ( getattr ( logging , verbosity . upper ( ) ) ) ch . setFormatter ( formatter ) log . addHandler ( ch ) if ctx . obj . get ( "verbose" , 0 ) > 4 : verbosity = [ "critical" , "error" , "warn" , "info" , "debug" ] [ int ( min ( ctx . obj . get ( "verbose" , 4 ) - 4 , 4 ) ) ] log = logging . getLogger ( "grapheneapi" ) log . setLevel ( getattr ( logging , verbosity . upper ( ) ) ) log . addHandler ( ch ) if ctx . obj . get ( "verbose" , 0 ) > 8 : verbosity = [ "critical" , "error" , "warn" , "info" , "debug" ] [ int ( min ( ctx . obj . get ( "verbose" , 8 ) - 8 , 4 ) ) ] log = logging . getLogger ( "graphenebase" ) log . setLevel ( getattr ( logging , verbosity . upper ( ) ) ) log . addHandler ( ch ) return ctx . invoke ( f , * args , ** kwargs ) return update_wrapper ( new_func , f )
|
Add verbose flags and add logging handlers
|
61,749 |
def offline ( f ) : @ click . pass_context @ verbose def new_func ( ctx , * args , ** kwargs ) : ctx . obj [ "offline" ] = True ctx . peerplays = PeerPlays ( ** ctx . obj ) ctx . blockchain = ctx . peerplays set_shared_peerplays_instance ( ctx . peerplays ) return ctx . invoke ( f , * args , ** kwargs ) return update_wrapper ( new_func , f )
|
This decorator allows you to access ctx . peerplays which is an instance of PeerPlays with offline = True .
|
61,750 |
def configfile ( f ) : @ click . pass_context def new_func ( ctx , * args , ** kwargs ) : ctx . config = yaml . load ( open ( ctx . obj [ "configfile" ] ) ) return ctx . invoke ( f , * args , ** kwargs ) return update_wrapper ( new_func , f )
|
This decorator will parse a configuration file in YAML format and store the dictionary in ctx . blockchain . config
|
61,751 |
def on_message ( self , ws , reply , * args ) : log . debug ( "Received message: %s" % str ( reply ) ) data = { } try : data = json . loads ( reply , strict = False ) except ValueError : raise ValueError ( "API node returned invalid format. Expected JSON!" ) if data . get ( "method" ) == "notice" : id = data [ "params" ] [ 0 ] if id >= len ( self . __events__ ) : log . critical ( "Received an id that is out of range\n\n" + str ( data ) ) return if id == self . __events__ . index ( "on_object" ) : for notice in data [ "params" ] [ 1 ] : try : if "id" in notice : self . process_notice ( notice ) else : for obj in notice : if "id" in obj : self . process_notice ( obj ) except Exception as e : log . critical ( "Error in process_notice: {}\n\n{}" . format ( str ( e ) , traceback . format_exc ) ) else : try : callbackname = self . __events__ [ id ] log . info ( "Patching through to call %s" % callbackname ) [ getattr ( self . events , callbackname ) ( x ) for x in data [ "params" ] [ 1 ] ] except Exception as e : log . critical ( "Error in {}: {}\n\n{}" . format ( callbackname , str ( e ) , traceback . format_exc ( ) ) )
|
This method is called by the websocket connection on every message that is received . If we receive a notice we hand over post - processing and signalling of events to process_notice .
|
61,752 |
def on_close ( self , ws ) : log . debug ( "Closing WebSocket connection with {}" . format ( self . url ) ) if self . keepalive and self . keepalive . is_alive ( ) : self . keepalive . do_run = False self . keepalive . join ( )
|
Called when websocket connection is closed
|
61,753 |
def run_forever ( self ) : cnt = 0 while True : cnt += 1 self . url = next ( self . urls ) log . debug ( "Trying to connect to node %s" % self . url ) try : self . ws = websocket . WebSocketApp ( self . url , on_message = self . on_message , on_error = self . on_error , on_close = self . on_close , on_open = self . on_open , ) self . ws . run_forever ( ) except websocket . WebSocketException as exc : if self . num_retries >= 0 and cnt > self . num_retries : raise NumRetriesReached ( ) sleeptime = ( cnt - 1 ) * 2 if cnt < 10 else 10 if sleeptime : log . warning ( "Lost connection to node during wsconnect(): %s (%d/%d) " % ( self . url , cnt , self . num_retries ) + "Retrying in %d seconds" % sleeptime ) time . sleep ( sleeptime ) except KeyboardInterrupt : self . ws . keep_running = False raise except Exception as e : log . critical ( "{}\n\n{}" . format ( str ( e ) , traceback . format_exc ( ) ) )
|
This method is used to run the websocket app continuously . It will execute callbacks as defined and try to stay connected with the provided APIs
|
61,754 |
def register_dataframe_method ( method ) : def inner ( * args , ** kwargs ) : class AccessorMethod ( object ) : def __init__ ( self , pandas_obj ) : self . _obj = pandas_obj @ wraps ( method ) def __call__ ( self , * args , ** kwargs ) : return method ( self . _obj , * args , ** kwargs ) register_dataframe_accessor ( method . __name__ ) ( AccessorMethod ) return method return inner ( )
|
Register a function as a method attached to the Pandas DataFrame .
|
61,755 |
def register_series_method ( method ) : def inner ( * args , ** kwargs ) : class AccessorMethod ( object ) : __doc__ = method . __doc__ def __init__ ( self , pandas_obj ) : self . _obj = pandas_obj @ wraps ( method ) def __call__ ( self , * args , ** kwargs ) : return method ( self . _obj , * args , ** kwargs ) register_series_accessor ( method . __name__ ) ( AccessorMethod ) return method return inner ( )
|
Register a function as a method attached to the Pandas Series .
|
61,756 |
def add_invites_to_user ( cls , user , amount ) : stat , _ = InvitationStat . objects . get_or_create ( user = user ) if stat . invites_allocated != - 1 : stat . invites_allocated += amount stat . save ( )
|
Add the specified number of invites to current allocated total .
|
61,757 |
def add_invites ( cls , amount ) : for user in get_user_model ( ) . objects . all ( ) : cls . add_invites_to_user ( user , amount )
|
Add invites for all users .
|
61,758 |
def topoff_user ( cls , user , amount ) : stat , _ = cls . objects . get_or_create ( user = user ) remaining = stat . invites_remaining ( ) if remaining != - 1 and remaining < amount : stat . invites_allocated += ( amount - remaining ) stat . save ( )
|
Ensure user has a minimum number of invites .
|
61,759 |
def topoff ( cls , amount ) : for user in get_user_model ( ) . objects . all ( ) : cls . topoff_user ( user , amount )
|
Ensure all users have a minimum number of invites .
|
61,760 |
def align ( self , alignment = None ) : if alignment is None : if self . reader . sysinfo . ProcessorArchitecture == PROCESSOR_ARCHITECTURE . AMD64 : alignment = 8 else : alignment = 4 offset = self . current_position % alignment if offset == 0 : return offset_to_aligned = ( alignment - offset ) % alignment self . seek ( offset_to_aligned , 1 ) return
|
Repositions the current reader to match architecture alignment
|
61,761 |
def peek ( self , length ) : t = self . current_position + length if not self . current_segment . inrange ( t ) : raise Exception ( 'Would read over segment boundaries!' ) return self . current_segment . data [ self . current_position - self . current_segment . start_address : t - self . current_segment . start_address ]
|
Returns up to length bytes from the current memory segment
|
61,762 |
def read ( self , size = - 1 ) : if size < - 1 : raise Exception ( 'You shouldnt be doing this' ) if size == - 1 : t = self . current_segment . remaining_len ( self . current_position ) if not t : return None old_new_pos = self . current_position self . current_position = self . current_segment . end_address return self . current_segment . data [ old_new_pos - self . current_segment . start_address : ] t = self . current_position + size if not self . current_segment . inrange ( t ) : raise Exception ( 'Would read over segment boundaries!' ) old_new_pos = self . current_position self . current_position = t return self . current_segment . data [ old_new_pos - self . current_segment . start_address : t - self . current_segment . start_address ]
|
Returns data bytes of size size from the current segment . If size is - 1 it returns all the remaining data bytes from memory segment
|
61,763 |
def read_int ( self ) : if self . reader . sysinfo . ProcessorArchitecture == PROCESSOR_ARCHITECTURE . AMD64 : return int . from_bytes ( self . read ( 8 ) , byteorder = 'little' , signed = True ) else : return int . from_bytes ( self . read ( 4 ) , byteorder = 'little' , signed = True )
|
Reads an integer . The size depends on the architecture . Reads a 4 byte small - endian singed int on 32 bit arch Reads an 8 byte small - endian singed int on 64 bit arch
|
61,764 |
def read_uint ( self ) : if self . reader . sysinfo . ProcessorArchitecture == PROCESSOR_ARCHITECTURE . AMD64 : return int . from_bytes ( self . read ( 8 ) , byteorder = 'little' , signed = False ) else : return int . from_bytes ( self . read ( 4 ) , byteorder = 'little' , signed = False )
|
Reads an integer . The size depends on the architecture . Reads a 4 byte small - endian unsinged int on 32 bit arch Reads an 8 byte small - endian unsinged int on 64 bit arch
|
61,765 |
def find ( self , pattern ) : pos = self . current_segment . data . find ( pattern ) if pos == - 1 : return - 1 return pos + self . current_position
|
Searches for a pattern in the current memory segment
|
61,766 |
def find_all ( self , pattern ) : pos = [ ] last_found = - 1 while True : last_found = self . current_segment . data . find ( pattern , last_found + 1 ) if last_found == - 1 : break pos . append ( last_found + self . current_segment . start_address ) return pos
|
Searches for all occurrences of a pattern in the current memory segment returns all occurrences as a list
|
61,767 |
def find_global ( self , pattern ) : pos_s = self . reader . search ( pattern ) if len ( pos_s ) == 0 : return - 1 return pos_s [ 0 ]
|
Searches for the pattern in the whole process memory space and returns the first occurrence . This is exhaustive!
|
61,768 |
def report_privilege_information ( ) : "Report all privilege information assigned to the current process." privileges = get_privilege_information ( ) print ( "found {0} privileges" . format ( privileges . count ) ) tuple ( map ( print , privileges ) )
|
Report all privilege information assigned to the current process .
|
61,769 |
async def handle ( self ) : listeners = [ ] for key , value in self . beat_config . items ( ) : listeners . append ( asyncio . ensure_future ( self . listener ( key ) ) ) emitters = [ ] for key , value in self . beat_config . items ( ) : emitters . append ( asyncio . ensure_future ( self . emitters ( key , value ) ) ) await asyncio . wait ( emitters ) await asyncio . wait ( listeners )
|
Listens on all the provided channels and handles the messages .
|
61,770 |
async def emitters ( self , key , value ) : while True : await asyncio . sleep ( value [ 'schedule' ] . total_seconds ( ) ) await self . channel_layer . send ( key , { "type" : value [ 'type' ] , "message" : value [ 'message' ] } )
|
Single - channel emitter
|
61,771 |
async def listener ( self , channel ) : while True : message = await self . channel_layer . receive ( channel ) if not message . get ( "type" , None ) : raise ValueError ( "Worker received message with no type." ) scope = { "type" : "channel" , "channel" : channel } instance_queue = self . get_or_create_application_instance ( channel , scope ) await instance_queue . put ( message )
|
Single - channel listener
|
61,772 |
def rating_count ( obj ) : count = Rating . objects . filter ( object_id = obj . pk , content_type = ContentType . objects . get_for_model ( obj ) , ) . exclude ( rating = 0 ) . count ( ) return count
|
Total amount of users who have submitted a positive rating for this object .
|
61,773 |
def set_pixel ( self , x , y , value ) : if x < 0 or x > 7 or y < 0 or y > 7 : return self . set_led ( y * 16 + x , 1 if value & GREEN > 0 else 0 ) self . set_led ( y * 16 + x + 8 , 1 if value & RED > 0 else 0 )
|
Set pixel at position x y to the given value . X and Y should be values of 0 to 8 . Value should be OFF GREEN RED or YELLOW .
|
61,774 |
def set_bar ( self , bar , value ) : if bar < 0 or bar > 23 : return c = ( bar if bar < 12 else bar - 12 ) // 4 a = bar % 4 if bar >= 12 : a += 4 self . set_led ( c * 16 + a + 8 , 1 if value & GREEN > 0 else 0 ) self . set_led ( c * 16 + a , 1 if value & RED > 0 else 0 )
|
Set bar to desired color . Bar should be a value of 0 to 23 and value should be OFF GREEN RED or YELLOW .
|
61,775 |
def animate ( self , images , delay = .25 ) : for image in images : self . set_image ( image ) self . write_display ( ) time . sleep ( delay )
|
Displays each of the input images in order pausing for delay seconds after each image .
|
61,776 |
def set_pixel ( self , x , y , value ) : if x < 0 or x > 7 or y < 0 or y > 15 : return self . set_led ( ( 7 - x ) * 16 + y , value )
|
Set pixel at position x y to the given value . X and Y should be values of 0 to 7 and 0 to 15 resp . Value should be 0 for off and non - zero for on .
|
61,777 |
def set_image ( self , image ) : imwidth , imheight = image . size if imwidth != 8 or imheight != 16 : raise ValueError ( 'Image must be an 8x16 pixels in size.' ) pix = image . convert ( '1' ) . load ( ) for x in xrange ( 8 ) : for y in xrange ( 16 ) : color = pix [ ( x , y ) ] if color == 0 : self . set_pixel ( x , y , 0 ) else : self . set_pixel ( x , y , 1 )
|
Set display buffer to Python Image Library image . Image will be converted to 1 bit color and non - zero color values will light the LEDs .
|
61,778 |
def horizontal_scroll ( self , image , padding = True ) : image_list = list ( ) width = image . size [ 0 ] if padding : for x in range ( 8 ) : section = image . crop ( ( 0 , 0 , x , 16 ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 8 - x , 0 , 8 , 16 ) ) image_list . append ( display_section ) for x in range ( 8 , width + 1 ) : section = image . crop ( ( x - 8 , 0 , x , 16 ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 0 , 0 , 8 , 16 ) ) image_list . append ( display_section ) if padding : for x in range ( width - 7 , width + 1 ) : section = image . crop ( ( x , 0 , width , 16 ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 0 , 0 , 7 - ( x - ( width - 7 ) ) , 16 ) ) image_list . append ( display_section ) return image_list
|
Returns a list of images which appear to scroll from left to right across the input image when displayed on the LED matrix in order .
|
61,779 |
def vertical_scroll ( self , image , padding = True ) : image_list = list ( ) height = image . size [ 1 ] if padding : for y in range ( 16 ) : section = image . crop ( ( 0 , 0 , 8 , y ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 0 , 8 - y , 8 , 16 ) ) image_list . append ( display_section ) for y in range ( 16 , height + 1 ) : section = image . crop ( ( 0 , y - 16 , 8 , y ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 0 , 0 , 8 , 16 ) ) image_list . append ( display_section ) if padding : for y in range ( height - 15 , height + 1 ) : section = image . crop ( ( 0 , y , 8 , height ) ) display_section = self . create_blank_image ( ) display_section . paste ( section , ( 0 , 0 , 8 , 7 - ( y - ( height - 15 ) ) ) ) image_list . append ( display_section ) return image_list
|
Returns a list of images which appear to scroll from top to bottom down the input image when displayed on the LED matrix in order .
|
61,780 |
def print_number_str ( self , value , justify_right = True ) : length = len ( value . translate ( None , '.' ) ) if length > 4 : self . print_str ( '----' ) return pos = ( 4 - length ) if justify_right else 0 for i , ch in enumerate ( value ) : if ch == '.' : self . set_decimal ( pos - 1 , True ) else : self . set_digit ( pos , ch ) pos += 1
|
Print a 4 character long string of numeric values to the display . This function is similar to print_str but will interpret periods not as characters but as decimal points associated with the previous character .
|
61,781 |
def print_float ( self , value , decimal_digits = 2 , justify_right = True ) : format_string = '{{0:0.{0}F}}' . format ( decimal_digits ) self . print_number_str ( format_string . format ( value ) , justify_right )
|
Print a numeric value to the display . If value is negative it will be printed with a leading minus sign . Decimal digits is the desired number of digits after the decimal point .
|
61,782 |
def set_left_colon ( self , show_colon ) : if show_colon : self . buffer [ 4 ] |= 0x04 self . buffer [ 4 ] |= 0x08 else : self . buffer [ 4 ] &= ( ~ 0x04 ) & 0xFF self . buffer [ 4 ] &= ( ~ 0x08 ) & 0xFF
|
Turn the left colon on with show color True or off with show colon False . Only the large 1 . 2 7 - segment display has a left colon .
|
61,783 |
def print_number_str ( self , value , justify_right = True ) : length = sum ( map ( lambda x : 1 if x != '.' else 0 , value ) ) if length > 4 : self . print_number_str ( '----' ) return pos = ( 4 - length ) if justify_right else 0 for i , ch in enumerate ( value ) : if ch == '.' : self . set_decimal ( pos - 1 , True ) else : self . set_digit ( pos , ch ) pos += 1
|
Print a 4 character long string of numeric values to the display . Characters in the string should be any supported character by set_digit or a decimal point . Decimal point characters will be associated with the previous character .
|
61,784 |
def begin ( self ) : self . _device . writeList ( HT16K33_SYSTEM_SETUP | HT16K33_OSCILLATOR , [ ] ) self . set_blink ( HT16K33_BLINK_OFF ) self . set_brightness ( 15 )
|
Initialize driver with LEDs enabled and all turned off .
|
61,785 |
def write_display ( self ) : for i , value in enumerate ( self . buffer ) : self . _device . write8 ( i , value )
|
Write display buffer to display hardware .
|
61,786 |
def clear ( self ) : for i , value in enumerate ( self . buffer ) : self . buffer [ i ] = 0
|
Clear contents of display buffer .
|
61,787 |
def get_readonly_fields ( self , request , obj = None ) : if obj : return list ( self . readonly_fields ) + [ 'id' , 'identity' , 'is_current' ] return self . readonly_fields
|
This is required a subclass of VersionedAdmin has readonly_fields ours won t be undone
|
61,788 |
def get_list_display ( self , request ) : list_display = list ( super ( VersionedAdmin , self ) . get_list_display ( request ) ) if self . list_display_show_identity : list_display = [ 'identity_shortener' , ] + list_display if self . list_display_show_start_date : list_display += [ 'version_start_date' , ] if self . list_display_show_end_date : list_display += [ 'version_end_date' , ] return list_display + [ 'is_current' , ]
|
This method determines which fields go in the changelist
|
61,789 |
def get_list_filter ( self , request ) : list_filter = super ( VersionedAdmin , self ) . get_list_filter ( request ) return list ( list_filter ) + [ ( 'version_start_date' , DateTimeFilter ) , IsCurrentFilter ]
|
Adds versionable custom filtering ability to changelist
|
61,790 |
def restore ( self , request , * args , ** kwargs ) : paths = request . path_info . split ( '/' ) object_id_index = paths . index ( "restore" ) - 2 object_id = paths [ object_id_index ] obj = super ( VersionedAdmin , self ) . get_object ( request , object_id ) obj . restore ( ) admin_wordIndex = object_id_index - 3 path = "/%s" % ( "/" . join ( paths [ admin_wordIndex : object_id_index ] ) ) opts = self . model . _meta msg_dict = { 'name' : force_text ( opts . verbose_name ) , 'obj' : format_html ( '<a href="{}">{}</a>' , urlquote ( request . path ) , obj ) , } msg = format_html ( _ ( 'The {name} "{obj}" was restored successfully.' ) , ** msg_dict ) self . message_user ( request , msg , messages . SUCCESS ) return HttpResponseRedirect ( path )
|
View for restoring object from change view
|
61,791 |
def will_not_clone ( self , request , * args , ** kwargs ) : paths = request . path_info . split ( '/' ) index_of_object_id = paths . index ( "will_not_clone" ) - 1 object_id = paths [ index_of_object_id ] self . change_view ( request , object_id ) admin_wordInUrl = index_of_object_id - 3 path = '/' + '/' . join ( paths [ admin_wordInUrl : index_of_object_id ] ) return HttpResponseRedirect ( path )
|
Add save but not clone capability in the changeview
|
61,792 |
def exclude ( self ) : exclude = self . VERSIONED_EXCLUDE if super ( VersionedAdmin , self ) . exclude is not None : exclude = list ( super ( VersionedAdmin , self ) . exclude ) + exclude return exclude
|
Custom descriptor for exclude since there is no get_exclude method to be overridden
|
61,793 |
def get_object ( self , request , object_id , from_field = None ) : obj = super ( VersionedAdmin , self ) . get_object ( request , object_id ) if request . method == 'POST' and obj and obj . is_latest and 'will_not_clone' not in request . path and 'delete' not in request . path and 'restore' not in request . path : obj = obj . clone ( ) return obj
|
our implementation of get_object allows for cloning when updating an object not cloning when the button save but not clone is pushed and at no other time will clone be called
|
61,794 |
def get_urls ( self ) : not_clone_url = [ url ( r'^(.+)/will_not_clone/$' , admin . site . admin_view ( self . will_not_clone ) ) ] restore_url = [ url ( r'^(.+)/restore/$' , admin . site . admin_view ( self . restore ) ) ] return not_clone_url + restore_url + super ( VersionedAdmin , self ) . get_urls ( )
|
Appends the custom will_not_clone url to the admin site
|
61,795 |
def create_current_version_unique_identity_indexes ( app_name , database = None ) : indexes_created = 0 connection = database_connection ( database ) with connection . cursor ( ) as cursor : for model in versionable_models ( app_name ) : if getattr ( model . _meta , 'managed' , True ) : table_name = model . _meta . db_table index_name = '%s_%s_identity_v_uniq' % ( app_name , table_name ) if not index_exists ( cursor , index_name ) : cursor . execute ( "CREATE UNIQUE INDEX %s ON %s(%s) " "WHERE version_end_date IS NULL" % ( index_name , table_name , 'identity' ) ) indexes_created += 1 return indexes_created
|
Add partial unique indexes for the the identity column of versionable models .
|
61,796 |
def get_queryset ( self ) : qs = VersionedQuerySet ( self . model , using = self . _db ) if hasattr ( self , 'instance' ) and hasattr ( self . instance , '_querytime' ) : qs . querytime = self . instance . _querytime return qs
|
Returns a VersionedQuerySet capable of handling version time restrictions .
|
61,797 |
def next_version ( self , object , relations_as_of = 'end' ) : if object . version_end_date is None : next = object else : next = self . filter ( Q ( identity = object . identity ) , Q ( version_start_date__gte = object . version_end_date ) ) . order_by ( 'version_start_date' ) . first ( ) if not next : raise ObjectDoesNotExist ( "next_version couldn't find a next version of object " + str ( object . identity ) ) return self . adjust_version_as_of ( next , relations_as_of )
|
Return the next version of the given object .
|
61,798 |
def previous_version ( self , object , relations_as_of = 'end' ) : if object . version_birth_date == object . version_start_date : previous = object else : previous = self . filter ( Q ( identity = object . identity ) , Q ( version_end_date__lte = object . version_start_date ) ) . order_by ( '-version_end_date' ) . first ( ) if not previous : raise ObjectDoesNotExist ( "previous_version couldn't find a previous version of " "object " + str ( object . identity ) ) return self . adjust_version_as_of ( previous , relations_as_of )
|
Return the previous version of the given object .
|
61,799 |
def current_version ( self , object , relations_as_of = None , check_db = False ) : if object . version_end_date is None and not check_db : current = object else : current = self . current . filter ( identity = object . identity ) . first ( ) return self . adjust_version_as_of ( current , relations_as_of )
|
Return the current version of the given object .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.