idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
2,400
def query ( self , constraint , sortby = None , typenames = None , maxrecords = 10 , startposition = 0 ) : if 'where' in constraint : query = self . _get_repo_filter ( Layer . objects ) . filter ( is_valid = True ) . extra ( where = [ constraint [ 'where' ] ] , params = constraint [ 'values' ] ) else : query = self . _get_repo_filter ( Layer . objects ) . filter ( is_valid = True ) total = query . count ( ) if sortby is not None : if 'spatial' in sortby and sortby [ 'spatial' ] : desc = False if sortby [ 'order' ] == 'DESC' : desc = True query = query . all ( ) return [ str ( total ) , sorted ( query , key = lambda x : float ( util . get_geometry_area ( getattr ( x , sortby [ 'propertyname' ] ) ) ) , reverse = desc , ) [ startposition : startposition + int ( maxrecords ) ] ] else : if sortby [ 'order' ] == 'DESC' : pname = '-%s' % sortby [ 'propertyname' ] else : pname = sortby [ 'propertyname' ] return [ str ( total ) , query . order_by ( pname ) [ startposition : startposition + int ( maxrecords ) ] ] else : return [ str ( total ) , query . all ( ) [ startposition : startposition + int ( maxrecords ) ] ]
Query records from underlying repository
2,401
def insert ( self , resourcetype , source , insert_date = None ) : caller = inspect . stack ( ) [ 1 ] [ 3 ] if caller == 'transaction' : hhclass = 'Layer' source = resourcetype resourcetype = resourcetype . csw_schema else : hhclass = 'Service' if resourcetype not in HYPERMAP_SERVICE_TYPES . keys ( ) : raise RuntimeError ( 'Unsupported Service Type' ) return self . _insert_or_update ( resourcetype , source , mode = 'insert' , hhclass = hhclass )
Insert a record into the repository
2,402
def _insert_or_update ( self , resourcetype , source , mode = 'insert' , hhclass = 'Service' ) : keywords = [ ] if self . filter is not None : catalog = Catalog . objects . get ( id = int ( self . filter . split ( ) [ - 1 ] ) ) try : if hhclass == 'Layer' : match = Layer . objects . filter ( name = source . name , title = source . title , abstract = source . abstract , is_monitored = False ) matches = match . all ( ) if matches : if mode == 'insert' : raise RuntimeError ( 'HHypermap error: Layer %d \'%s\' already exists' % ( matches [ 0 ] . id , source . title ) ) elif mode == 'update' : match . update ( name = source . name , title = source . title , abstract = source . abstract , is_monitored = False , xml = source . xml , wkt_geometry = source . wkt_geometry , anytext = util . get_anytext ( [ source . title , source . abstract , source . keywords_csv ] ) ) service = get_service ( source . xml ) res , keywords = create_layer_from_metadata_xml ( resourcetype , source . xml , monitor = False , service = service , catalog = catalog ) res . save ( ) LOGGER . debug ( 'Indexing layer with id %s on search engine' % res . uuid ) index_layer ( res . id , use_cache = True ) else : if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2' : res = Endpoint ( url = source , catalog = catalog ) else : res = Service ( type = HYPERMAP_SERVICE_TYPES [ resourcetype ] , url = source , catalog = catalog ) res . save ( ) if keywords : for kw in keywords : res . keywords . add ( kw ) except Exception as err : raise RuntimeError ( 'HHypermap error: %s' % err ) ids = [ ] if hhclass == 'Layer' : ids . append ( { 'identifier' : res . uuid , 'title' : res . title } ) else : if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2' : for res in Endpoint . objects . filter ( url = source ) . all ( ) : ids . append ( { 'identifier' : res . uuid , 'title' : res . url } ) else : for res in Service . objects . filter ( url = source ) . all ( ) : ids . append ( { 'identifier' : res . uuid , 'title' : res . title } ) return ids
Insert or update a record in the repository
2,403
def delete ( self , constraint ) : results = self . _get_repo_filter ( Service . objects ) . extra ( where = [ constraint [ 'where' ] ] , params = constraint [ 'values' ] ) . all ( ) deleted = len ( results ) results . delete ( ) return deleted
Delete a record from the repository
2,404
def check ( func ) : def iCheck ( request , * args , ** kwargs ) : if not request . method == "POST" : return HttpResponseBadRequest ( "Must be POST request." ) follow = func ( request , * args , ** kwargs ) if request . is_ajax ( ) : return HttpResponse ( 'ok' ) try : if 'next' in request . GET : return HttpResponseRedirect ( request . GET . get ( 'next' ) ) if 'next' in request . POST : return HttpResponseRedirect ( request . POST . get ( 'next' ) ) return HttpResponseRedirect ( follow . target . get_absolute_url ( ) ) except ( AttributeError , TypeError ) : if 'HTTP_REFERER' in request . META : return HttpResponseRedirect ( request . META . get ( 'HTTP_REFERER' , '/' ) ) if follow : return HttpResponseServerError ( '"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % ( unicode ( follow . target ) , follow . target . __class__ ) ) return HttpResponseServerError ( 'No follow object and `next` parameter found.' ) return iCheck
Check the permissions http method and login state .
2,405
def register ( model , field_name = None , related_name = None , lookup_method_name = 'get_follows' ) : if model in registry : return registry . append ( model ) if not field_name : field_name = 'target_%s' % model . _meta . module_name if not related_name : related_name = 'follow_%s' % model . _meta . module_name field = ForeignKey ( model , related_name = related_name , null = True , blank = True , db_index = True ) field . contribute_to_class ( Follow , field_name ) setattr ( model , lookup_method_name , get_followers_for_object ) model_map [ model ] = [ related_name , field_name ]
This registers any model class to be follow - able .
2,406
def follow ( user , obj ) : follow , created = Follow . objects . get_or_create ( user , obj ) return follow
Make a user follow an object
2,407
def unfollow ( user , obj ) : try : follow = Follow . objects . get_follows ( obj ) . get ( user = user ) follow . delete ( ) return follow except Follow . DoesNotExist : pass
Make a user unfollow an object
2,408
def create ( self , user , obj , ** kwargs ) : follow = Follow ( user = user ) follow . target = obj follow . save ( ) return follow
Create a new follow link between a user and an object of a registered model type .
2,409
def get_or_create ( self , user , obj , ** kwargs ) : if not self . is_following ( user , obj ) : return self . create ( user , obj , ** kwargs ) , True return self . get_follows ( obj ) . get ( user = user ) , False
Almost the same as FollowManager . objects . create - behaves the same as the normal get_or_create methods in django though .
2,410
def is_following ( self , user , obj ) : if isinstance ( user , AnonymousUser ) : return False return 0 < self . get_follows ( obj ) . filter ( user = user ) . count ( )
Returns True or False
2,411
def get_follows ( self , model_or_obj_or_qs ) : fname = self . fname ( model_or_obj_or_qs ) if isinstance ( model_or_obj_or_qs , QuerySet ) : return self . filter ( ** { '%s__in' % fname : model_or_obj_or_qs } ) if inspect . isclass ( model_or_obj_or_qs ) : return self . exclude ( ** { fname : None } ) return self . filter ( ** { fname : model_or_obj_or_qs } )
Returns all the followers of a model an object or a queryset .
2,412
def create_event_regressors ( self , event_times_indices , covariates = None , durations = None ) : if covariates is None : covariates = np . ones ( self . event_times_indices . shape ) if durations is None : durations = np . ones ( self . event_times_indices . shape ) else : durations = np . round ( durations * self . deconvolution_frequency ) . astype ( int ) mean_duration = np . mean ( durations ) regressors_for_event = np . zeros ( ( self . deconvolution_interval_size , self . resampled_signal_size ) ) for cov , eti , dur in zip ( covariates , event_times_indices , durations ) : valid = True if eti < 0 : self . logger . debug ( 'deconv samples are starting before the data starts.' ) valid = False if eti + self . deconvolution_interval_size > self . resampled_signal_size : self . logger . debug ( 'deconv samples are continuing after the data stops.' ) valid = False if eti > self . resampled_signal_size : self . logger . debug ( 'event falls outside of the scope of the data.' ) valid = False if valid : this_event_design_matrix = ( np . diag ( np . ones ( self . deconvolution_interval_size ) ) * cov ) over_durations_dm = np . copy ( this_event_design_matrix ) if dur > 1 : for d in np . arange ( 1 , dur ) : over_durations_dm [ d : ] += this_event_design_matrix [ : - d ] over_durations_dm /= mean_duration regressors_for_event [ : , eti : int ( eti + self . deconvolution_interval_size ) ] += over_durations_dm return regressors_for_event
create_event_regressors creates the part of the design matrix corresponding to one event type .
2,413
def regress ( self , method = 'lstsq' ) : if method is 'lstsq' : self . betas , residuals_sum , rank , s = LA . lstsq ( self . design_matrix . T , self . resampled_signal . T ) self . residuals = self . resampled_signal - self . predict_from_design_matrix ( self . design_matrix ) elif method is 'sm_ols' : import statsmodels . api as sm assert self . resampled_signal . shape [ 0 ] == 1 , 'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str ( self . resampled_signal . shape ) model = sm . OLS ( np . squeeze ( self . resampled_signal ) , self . design_matrix . T ) results = model . fit ( ) self . betas = np . array ( results . params ) . reshape ( ( self . design_matrix . shape [ 0 ] , self . resampled_signal . shape [ 0 ] ) ) self . residuals = np . array ( results . resid ) . reshape ( self . resampled_signal . shape ) self . logger . debug ( 'performed %s regression on %s design_matrix and %s signal' % ( method , str ( self . design_matrix . shape ) , str ( self . resampled_signal . shape ) ) )
regress performs linear least squares regression of the designmatrix on the data .
2,414
def predict_from_design_matrix ( self , design_matrix ) : assert hasattr ( self , 'betas' ) , 'no betas found, please run regression before prediction' assert design_matrix . shape [ 0 ] == self . betas . shape [ 0 ] , 'designmatrix needs to have the same number of regressors as the betas already calculated' prediction = np . dot ( self . betas . astype ( np . float32 ) . T , design_matrix . astype ( np . float32 ) ) return prediction
predict_from_design_matrix predicts signals given a design matrix .
2,415
def resource_urls ( request ) : url_parsed = urlparse ( settings . SEARCH_URL ) defaults = dict ( APP_NAME = __description__ , APP_VERSION = __version__ , SITE_URL = settings . SITE_URL . rstrip ( '/' ) , SEARCH_TYPE = settings . SEARCH_TYPE , SEARCH_URL = settings . SEARCH_URL , SEARCH_IP = '%s://%s:%s' % ( url_parsed . scheme , url_parsed . hostname , url_parsed . port ) ) return defaults
Global values to pass to templates
2,416
def remove_service_checks ( self , service_id ) : from hypermap . aggregator . models import Service service = Service . objects . get ( id = service_id ) service . check_set . all ( ) . delete ( ) layer_to_process = service . layer_set . all ( ) for layer in layer_to_process : layer . check_set . all ( ) . delete ( )
Remove all checks from a service .
2,417
def index_service ( self , service_id ) : from hypermap . aggregator . models import Service service = Service . objects . get ( id = service_id ) if not service . is_valid : LOGGER . debug ( 'Not indexing service with id %s in search engine as it is not valid' % service . id ) return LOGGER . debug ( 'Indexing service %s' % service . id ) layer_to_process = service . layer_set . all ( ) for layer in layer_to_process : if not settings . REGISTRY_SKIP_CELERY : index_layer ( layer . id , use_cache = True ) else : index_layer ( layer . id )
Index a service in search engine .
2,418
def index_layer ( self , layer_id , use_cache = False ) : from hypermap . aggregator . models import Layer layer = Layer . objects . get ( id = layer_id ) if not layer . is_valid : LOGGER . debug ( 'Not indexing or removing layer with id %s in search engine as it is not valid' % layer . id ) unindex_layer ( layer . id , use_cache ) return if layer . was_deleted : LOGGER . debug ( 'Not indexing or removing layer with id %s in search engine as was_deleted is true' % layer . id ) unindex_layer ( layer . id , use_cache ) return if use_cache : LOGGER . debug ( 'Caching layer with id %s for syncing with search engine' % layer . id ) layers = cache . get ( 'layers' ) if layers is None : layers = set ( [ layer . id ] ) else : layers . add ( layer . id ) cache . set ( 'layers' , layers ) return if SEARCH_TYPE == 'solr' : from hypermap . aggregator . solr import SolrHypermap LOGGER . debug ( 'Syncing layer %s to solr' % layer . name ) solrobject = SolrHypermap ( ) success , message = solrobject . layer_to_solr ( layer ) if not settings . REGISTRY_SKIP_CELERY : if not success : self . update_state ( state = states . FAILURE , meta = message ) raise Ignore ( ) elif SEARCH_TYPE == 'elasticsearch' : from hypermap . aggregator . elasticsearch_client import ESHypermap LOGGER . debug ( 'Syncing layer %s to es' % layer . name ) esobject = ESHypermap ( ) success , message = esobject . layer_to_es ( layer ) if not settings . REGISTRY_SKIP_CELERY : if not success : self . update_state ( state = states . FAILURE , meta = message ) raise Ignore ( )
Index a layer in the search backend . If cache is set append it to the list if it isn t send the transaction right away . cache needs memcached to be available .
2,419
def unindex_layers_with_issues ( self , use_cache = False ) : from hypermap . aggregator . models import Issue , Layer , Service from django . contrib . contenttypes . models import ContentType layer_type = ContentType . objects . get_for_model ( Layer ) service_type = ContentType . objects . get_for_model ( Service ) for issue in Issue . objects . filter ( content_type__pk = layer_type . id ) : unindex_layer ( issue . content_object . id , use_cache ) for issue in Issue . objects . filter ( content_type__pk = service_type . id ) : for layer in issue . content_object . layer_set . all ( ) : unindex_layer ( layer . id , use_cache )
Remove the index for layers in search backend which are linked to an issue .
2,420
def unindex_layer ( self , layer_id , use_cache = False ) : from hypermap . aggregator . models import Layer layer = Layer . objects . get ( id = layer_id ) if use_cache : LOGGER . debug ( 'Caching layer with id %s for being removed from search engine' % layer . id ) deleted_layers = cache . get ( 'deleted_layers' ) if deleted_layers is None : deleted_layers = set ( [ layer . id ] ) else : deleted_layers . add ( layer . id ) cache . set ( 'deleted_layers' , deleted_layers ) return if SEARCH_TYPE == 'solr' : from hypermap . aggregator . solr import SolrHypermap LOGGER . debug ( 'Removing layer %s from solr' % layer . id ) try : solrobject = SolrHypermap ( ) solrobject . remove_layer ( layer . uuid ) except Exception : LOGGER . error ( 'Layer NOT correctly removed from Solr' ) elif SEARCH_TYPE == 'elasticsearch' : pass
Remove the index for a layer in the search backend . If cache is set append it to the list of removed layers if it isn t send the transaction right away .
2,421
def index_all_layers ( self ) : from hypermap . aggregator . models import Layer if not settings . REGISTRY_SKIP_CELERY : layers_cache = set ( Layer . objects . filter ( is_valid = True ) . values_list ( 'id' , flat = True ) ) deleted_layers_cache = set ( Layer . objects . filter ( is_valid = False ) . values_list ( 'id' , flat = True ) ) cache . set ( 'layers' , layers_cache ) cache . set ( 'deleted_layers' , deleted_layers_cache ) else : for layer in Layer . objects . all ( ) : index_layer ( layer . id )
Index all layers in search engine .
2,422
def bbox2wktpolygon ( bbox ) : try : minx = float ( bbox [ 0 ] ) miny = float ( bbox [ 1 ] ) maxx = float ( bbox [ 2 ] ) maxy = float ( bbox [ 3 ] ) except : LOGGER . debug ( "Invalid bbox, setting it to a zero POLYGON" ) minx = 0 miny = 0 maxx = 0 maxy = 0 return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' % ( minx , miny , minx , maxy , maxx , maxy , maxx , miny , minx , miny )
Return OGC WKT Polygon of a simple bbox list
2,423
def gen_anytext ( * args ) : bag = [ ] for term in args : if term is not None : if isinstance ( term , list ) : for term2 in term : if term2 is not None : bag . append ( term2 ) else : bag . append ( term ) return ' ' . join ( bag )
Convenience function to create bag of words for anytext property
2,424
def endpointlist_post_save ( instance , * args , ** kwargs ) : with open ( instance . upload . file . name , mode = 'rb' ) as f : lines = f . readlines ( ) for url in lines : if len ( url ) > 255 : LOGGER . debug ( 'Skipping this endpoint, as it is more than 255 characters: %s' % url ) else : if Endpoint . objects . filter ( url = url , catalog = instance . catalog ) . count ( ) == 0 : endpoint = Endpoint ( url = url , endpoint_list = instance ) endpoint . catalog = instance . catalog endpoint . save ( ) if not settings . REGISTRY_SKIP_CELERY : update_endpoints . delay ( instance . id ) else : update_endpoints ( instance . id )
Used to process the lines of the endpoint list .
2,425
def layer_pre_save ( instance , * args , ** kwargs ) : is_valid = True if not instance . service . type == 'Hypermap:WorldMap' : if not instance . service . is_valid : is_valid = False LOGGER . debug ( 'Layer with id %s is marked invalid because its service is invalid' % instance . id ) if instance . bbox_x0 > - 2 and instance . bbox_x1 < 2 and instance . bbox_y0 > - 2 and instance . bbox_y1 < 2 : is_valid = False LOGGER . debug ( 'Layer with id %s is marked invalid because its extent is within (-2, -2, +2, +2)' % instance . id ) instance . is_valid = is_valid
Used to check layer validity .
2,426
def layer_post_save ( instance , * args , ** kwargs ) : if instance . is_monitored and instance . service . is_monitored : if not settings . REGISTRY_SKIP_CELERY : check_layer . delay ( instance . id ) else : check_layer ( instance . id ) else : index_layer ( instance . id )
Used to do a layer full check when saving it .
2,427
def update_layers ( self ) : signals . post_save . disconnect ( layer_post_save , sender = Layer ) try : LOGGER . debug ( 'Updating layers for service id %s' % self . id ) if self . type == 'OGC:WMS' : update_layers_wms ( self ) elif self . type == 'OGC:WMTS' : update_layers_wmts ( self ) elif self . type == 'ESRI:ArcGIS:MapServer' : update_layers_esri_mapserver ( self ) elif self . type == 'ESRI:ArcGIS:ImageServer' : update_layers_esri_imageserver ( self ) elif self . type == 'Hypermap:WorldMapLegacy' : update_layers_wm_legacy ( self ) elif self . type == 'Hypermap:WorldMap' : update_layers_geonode_wm ( self ) elif self . type == 'Hypermap:WARPER' : update_layers_warper ( self ) except : LOGGER . error ( 'Error updating layers for service %s' % self . uuid ) signals . post_save . connect ( layer_post_save , sender = Layer )
Update layers for a service .
2,428
def update_validity ( self ) : if self . type == 'Hypermap:WorldMap' : return signals . post_save . disconnect ( service_post_save , sender = Service ) try : is_valid = True if self . srs . filter ( code__in = SUPPORTED_SRS ) . count ( ) == 0 : LOGGER . debug ( 'Service with id %s is marked invalid because in not exposed in SUPPORTED_SRS' % self . id ) is_valid = False if self . type == 'OGC:WMTS' : LOGGER . debug ( 'Service with id %s is marked invalid because it is of type OGC:WMTS' % self . id ) is_valid = False if 'noaa' in self . url . lower ( ) : LOGGER . debug ( 'Service with id %s is marked invalid because it is from NOAA' % self . id ) is_valid = False self . is_valid = is_valid self . save ( ) except : LOGGER . error ( 'Error updating validity of the service!' ) signals . post_save . connect ( service_post_save , sender = Service )
Update validity of a service .
2,429
def get_url_endpoint ( self ) : endpoint = self . url if self . type not in ( 'Hypermap:WorldMap' , ) : endpoint = 'registry/%s/layer/%s/map/wmts/1.0.0/WMTSCapabilities.xml' % ( self . catalog . slug , self . id ) return endpoint
Returns the Hypermap endpoint for a layer . This endpoint will be the WMTS MapProxy endpoint only for WM we use the original endpoint .
2,430
def check_available ( self ) : success = True start_time = datetime . datetime . utcnow ( ) message = '' LOGGER . debug ( 'Checking layer id %s' % self . id ) signals . post_save . disconnect ( layer_post_save , sender = Layer ) try : self . update_thumbnail ( ) except ValueError , err : if str ( err ) . startswith ( "unknown url type:" ) : LOGGER . debug ( 'Thumbnail can not be updated: %s' % str ( err ) ) except Exception , err : message = str ( err ) success = False signals . post_save . connect ( layer_post_save , sender = Layer ) end_time = datetime . datetime . utcnow ( ) delta = end_time - start_time response_time = '%s.%s' % ( delta . seconds , delta . microseconds ) check = Check ( content_object = self , success = success , response_time = response_time , message = message ) check . save ( ) LOGGER . debug ( 'Layer checked in %s seconds, status is %s' % ( response_time , success ) ) return success , message
Check for availability of a layer and provide run metrics .
2,431
def _input_github_repo ( url = None ) : if url is None : url = user_input ( 'Input the URL of the GitHub repository ' 'to use as a `trytravis` repository: ' ) url = url . strip ( ) http_match = _HTTPS_REGEX . match ( url ) ssh_match = _SSH_REGEX . match ( url ) if not http_match and not ssh_match : raise RuntimeError ( 'That URL doesn\'t look like a valid ' 'GitHub URL. We expect something ' 'of the form: `https://github.com/[USERNAME]/' '[REPOSITORY]` or `ssh://[email protected]/' '[USERNAME]/[REPOSITORY]' ) if http_match : _ , name = http_match . groups ( ) else : _ , name = ssh_match . groups ( ) if 'trytravis' not in name : raise RuntimeError ( 'You must have `trytravis` in the name of your ' 'repository. This is a security feature to reduce ' 'chances of running git push -f on a repository ' 'you don\'t mean to.' ) accept = user_input ( 'Remember that `trytravis` will make commits on your ' 'behalf to `%s`. Are you sure you wish to use this ' 'repository? Type `y` or `yes` to accept: ' % url ) if accept . lower ( ) not in [ 'y' , 'yes' ] : raise RuntimeError ( 'Operation aborted by user.' ) if not os . path . isdir ( config_dir ) : os . makedirs ( config_dir ) with open ( os . path . join ( config_dir , 'repo' ) , 'w+' ) as f : f . truncate ( ) f . write ( url ) print ( 'Repository saved successfully.' )
Grabs input from the user and saves it as their trytravis target repo
2,432
def _load_github_repo ( ) : if 'TRAVIS' in os . environ : raise RuntimeError ( 'Detected that we are running in Travis. ' 'Stopping to prevent infinite loops.' ) try : with open ( os . path . join ( config_dir , 'repo' ) , 'r' ) as f : return f . read ( ) except ( OSError , IOError ) : raise RuntimeError ( 'Could not find your repository. ' 'Have you ran `trytravis --repo`?' )
Loads the GitHub repository from the users config .
2,433
def _submit_changes_to_github_repo ( path , url ) : try : repo = git . Repo ( path ) except Exception : raise RuntimeError ( 'Couldn\'t locate a repository at `%s`.' % path ) commited = False try : try : repo . delete_remote ( 'trytravis' ) except Exception : pass print ( 'Adding a temporary remote to ' '`%s`...' % url ) remote = repo . create_remote ( 'trytravis' , url ) print ( 'Adding all local changes...' ) repo . git . add ( '--all' ) try : print ( 'Committing local changes...' ) timestamp = datetime . datetime . now ( ) . isoformat ( ) repo . git . commit ( m = 'trytravis-' + timestamp ) commited = True except git . exc . GitCommandError as e : if 'nothing to commit' in str ( e ) : commited = False else : raise commit = repo . head . commit . hexsha committed_at = repo . head . commit . committed_datetime print ( 'Pushing to `trytravis` remote...' ) remote . push ( force = True ) finally : if commited : print ( 'Reverting to old state...' ) repo . git . reset ( 'HEAD^' ) try : repo . delete_remote ( 'trytravis' ) except Exception : pass return commit , committed_at
Temporarily commits local changes and submits them to the GitHub repository that the user has specified . Then reverts the changes to the git repository if a commit was necessary .
2,434
def _wait_for_travis_build ( url , commit , committed_at ) : print ( 'Waiting for a Travis build to appear ' 'for `%s` after `%s`...' % ( commit , committed_at ) ) import requests slug = _slug_from_url ( url ) start_time = time . time ( ) build_id = None while time . time ( ) - start_time < 60 : with requests . get ( 'https://api.travis-ci.org/repos/%s/builds' % slug , headers = _travis_headers ( ) ) as r : if not r . ok : raise RuntimeError ( 'Could not reach the Travis API ' 'endpoint. Additional information: ' '%s' % str ( r . content ) ) commit_to_sha = { } json = r . json ( ) for travis_commit in sorted ( json [ 'commits' ] , key = lambda x : x [ 'committed_at' ] ) : travis_committed_at = datetime . datetime . strptime ( travis_commit [ 'committed_at' ] , '%Y-%m-%dT%H:%M:%SZ' ) . replace ( tzinfo = utc ) if travis_committed_at < committed_at : continue commit_to_sha [ travis_commit [ 'id' ] ] = travis_commit [ 'sha' ] for build in json [ 'builds' ] : if ( build [ 'commit_id' ] in commit_to_sha and commit_to_sha [ build [ 'commit_id' ] ] == commit ) : build_id = build [ 'id' ] print ( 'Travis build id: `%d`' % build_id ) print ( 'Travis build URL: `https://travis-ci.org/' '%s/builds/%d`' % ( slug , build_id ) ) if build_id is not None : break time . sleep ( 3.0 ) else : raise RuntimeError ( 'Timed out while waiting for a Travis build ' 'to start. Is Travis configured for `%s`?' % url ) return build_id
Waits for a Travis build to appear with the given commit SHA
2,435
def _watch_travis_build ( build_id ) : import requests try : build_size = None running = True while running : with requests . get ( 'https://api.travis-ci.org/builds/%d' % build_id , headers = _travis_headers ( ) ) as r : json = r . json ( ) if build_size is not None : if build_size > 1 : sys . stdout . write ( '\r\x1b[%dA' % build_size ) else : sys . stdout . write ( '\r' ) build_size = len ( json [ 'jobs' ] ) running = False current_number = 1 for job in json [ 'jobs' ] : color , state , is_running = _travis_job_state ( job [ 'state' ] ) if is_running : running = True platform = job [ 'config' ] [ 'os' ] if platform == 'osx' : platform = ' osx ' env = job [ 'config' ] . get ( 'env' , '' ) sudo = 's' if job [ 'config' ] . get ( 'sudo' , True ) else 'c' lang = job [ 'config' ] . get ( 'language' , 'generic' ) padding = ' ' * ( len ( str ( build_size ) ) - len ( str ( current_number ) ) ) number = str ( current_number ) + padding current_number += 1 job_display = '#' + ' ' . join ( [ number , state , platform , sudo , lang , env ] ) print ( color + job_display + colorama . Style . RESET_ALL ) time . sleep ( 3.0 ) except KeyboardInterrupt : pass
Watches and progressively outputs information about a given Travis build
2,436
def _travis_job_state ( state ) : if state in [ None , 'queued' , 'created' , 'received' ] : return colorama . Fore . YELLOW , '*' , True elif state in [ 'started' , 'running' ] : return colorama . Fore . LIGHTYELLOW_EX , '*' , True elif state == 'passed' : return colorama . Fore . LIGHTGREEN_EX , 'P' , False elif state == 'failed' : return colorama . Fore . LIGHTRED_EX , 'X' , False elif state == 'errored' : return colorama . Fore . LIGHTRED_EX , '!' , False elif state == 'canceled' : return colorama . Fore . LIGHTBLACK_EX , 'X' , False else : raise RuntimeError ( 'unknown state: %s' % str ( state ) )
Converts a Travis state into a state character color and whether it s still running or a stopped state .
2,437
def _slug_from_url ( url ) : http_match = _HTTPS_REGEX . match ( url ) ssh_match = _SSH_REGEX . match ( url ) if not http_match and not ssh_match : raise RuntimeError ( 'Could not parse the URL (`%s`) ' 'for your repository.' % url ) if http_match : return '/' . join ( http_match . groups ( ) ) else : return '/' . join ( ssh_match . groups ( ) )
Parses a project slug out of either an HTTPS or SSH URL .
2,438
def main ( argv = None ) : try : colorama . init ( ) if argv is None : argv = sys . argv [ 1 : ] _main ( argv ) except RuntimeError as e : print ( colorama . Fore . RED + 'ERROR: ' + str ( e ) + colorama . Style . RESET_ALL ) sys . exit ( 1 ) else : sys . exit ( 0 )
Main entry point when the user runs the trytravis command .
2,439
def csw_global_dispatch_by_catalog ( request , catalog_slug ) : catalog = get_object_or_404 ( Catalog , slug = catalog_slug ) if catalog : url = settings . SITE_URL . rstrip ( '/' ) + request . path . rstrip ( '/' ) return csw_global_dispatch ( request , url = url , catalog_id = catalog . id )
pycsw wrapper for catalogs
2,440
def good_coords ( coords ) : if ( len ( coords ) != 4 ) : return False for coord in coords [ 0 : 3 ] : try : num = float ( coord ) if ( math . isnan ( num ) ) : return False if ( math . isinf ( num ) ) : return False except ValueError : return False return True
passed a string array
2,441
def clear_es ( ) : ESHypermap . es . indices . delete ( ESHypermap . index_name , ignore = [ 400 , 404 ] ) LOGGER . debug ( 'Elasticsearch: Index cleared' )
Clear all indexes in the es core
2,442
def create_indices ( catalog_slug ) : mapping = { "mappings" : { "layer" : { "properties" : { "layer_geoshape" : { "type" : "geo_shape" , "tree" : "quadtree" , "precision" : REGISTRY_MAPPING_PRECISION } } } } } ESHypermap . es . indices . create ( catalog_slug , ignore = [ 400 , 404 ] , body = mapping )
Create ES core indices
2,443
def kill_process ( procname , scriptname ) : import signal import subprocess p = subprocess . Popen ( [ 'ps' , 'aux' ] , stdout = subprocess . PIPE ) out , err = p . communicate ( ) for line in out . decode ( ) . splitlines ( ) : if procname in line and scriptname in line : pid = int ( line . split ( ) [ 1 ] ) info ( 'Stopping %s %s %d' % ( procname , scriptname , pid ) ) os . kill ( pid , signal . SIGKILL )
kill WSGI processes that may be running in development
2,444
def populate_initial_services ( ) : services_list = ( ( 'Harvard WorldMap' , 'Harvard WorldMap open source web geospatial platform' , 'Hypermap:WorldMap' , 'http://worldmap.harvard.edu' ) , ( 'NYPL MapWarper' , 'The New York Public Library (NYPL) MapWarper web site' , 'Hypermap:WARPER' , 'http://maps.nypl.org/warper/maps' ) , ( 'Map Warper' , 'The MapWarper web site developed, hosted and maintained by Tim Waters' , 'Hypermap:WARPER' , 'http://mapwarper.net/maps' ) , ( 'WorldMap Warp' , 'The MapWarper instance part of the Harvard WorldMap project' , 'Hypermap:WARPER' , 'http://warp.worldmap.harvard.edu/maps' ) , ( 'WFP GeoNode' , 'World Food Programme GeoNode' , 'OGC:WMS' , 'http://geonode.wfp.org/geoserver/ows?' ) , ( 'NASA EARTHDATA' , 'NASA EARTHDATA, powered by EOSDIS' , 'OGC:WMTS' , 'http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml' ) , ) esri_endpoint = 'https://gis.ngdc.noaa.gov/arcgis/rest/services' LOGGER . debug ( '*** Importing esri endpoint: %s' % esri_endpoint ) create_services_from_endpoint ( esri_endpoint ) for service in services_list : LOGGER . debug ( '*** Importing %s' % service [ 0 ] ) service = Service ( title = service [ 0 ] , abstract = service [ 1 ] , type = service [ 2 ] , url = service [ 3 ] ) service . save ( )
Populate a fresh installed Hypermap instances with basic services .
2,445
def main ( ) : tcp_adapter = TcpAdapter ( "192.168.1.3" , name = "HASS" , activate_source = False ) hdmi_network = HDMINetwork ( tcp_adapter ) hdmi_network . start ( ) while True : for d in hdmi_network . devices : _LOGGER . info ( "Device: %s" , d ) time . sleep ( 7 )
For testing purpose
2,446
def compare_hexdigests ( digest1 , digest2 ) : digest1 = tuple ( [ int ( digest1 [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) digest2 = tuple ( [ int ( digest2 [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) bits = 0 for i in range ( 32 ) : bits += POPC [ 255 & digest1 [ i ] ^ digest2 [ i ] ] return 128 - bits
Compute difference in bits between digest1 and digest2 returns - 127 to 128 ; 128 is the same - 127 is different
2,447
def tran3 ( self , a , b , c , n ) : return ( ( ( TRAN [ ( a + n ) & 255 ] ^ TRAN [ b ] * ( n + n + 1 ) ) + TRAN [ ( c ) ^ TRAN [ n ] ] ) & 255 )
Get accumulator for a transition n between chars a b c .
2,448
def update ( self , data ) : for character in data : if PY3 : ch = character else : ch = ord ( character ) self . count += 1 if self . lastch [ 1 ] > - 1 : self . acc [ self . tran3 ( ch , self . lastch [ 0 ] , self . lastch [ 1 ] , 0 ) ] += 1 if self . lastch [ 2 ] > - 1 : self . acc [ self . tran3 ( ch , self . lastch [ 0 ] , self . lastch [ 2 ] , 1 ) ] += 1 self . acc [ self . tran3 ( ch , self . lastch [ 1 ] , self . lastch [ 2 ] , 2 ) ] += 1 if self . lastch [ 3 ] > - 1 : self . acc [ self . tran3 ( ch , self . lastch [ 0 ] , self . lastch [ 3 ] , 3 ) ] += 1 self . acc [ self . tran3 ( ch , self . lastch [ 1 ] , self . lastch [ 3 ] , 4 ) ] += 1 self . acc [ self . tran3 ( ch , self . lastch [ 2 ] , self . lastch [ 3 ] , 5 ) ] += 1 self . acc [ self . tran3 ( self . lastch [ 3 ] , self . lastch [ 0 ] , ch , 6 ) ] += 1 self . acc [ self . tran3 ( self . lastch [ 3 ] , self . lastch [ 2 ] , ch , 7 ) ] += 1 self . lastch = [ ch ] + self . lastch [ : 3 ]
Add data to running digest increasing the accumulators for 0 - 8 triplets formed by this char and the previous 0 - 3 chars .
2,449
def digest ( self ) : total = 0 if self . count == 3 : total = 1 elif self . count == 4 : total = 4 elif self . count > 4 : total = 8 * self . count - 28 threshold = total / 256 code = [ 0 ] * 32 for i in range ( 256 ) : if self . acc [ i ] > threshold : code [ i >> 3 ] += 1 << ( i & 7 ) return code [ : : - 1 ]
Get digest of data seen thus far as a list of bytes .
2,450
def from_file ( self , filename ) : f = open ( filename , 'rb' ) while True : data = f . read ( 10480 ) if not data : break self . update ( data ) f . close ( )
Update running digest with content of named file .
2,451
def compare ( self , otherdigest , ishex = False ) : bits = 0 myd = self . digest ( ) if ishex : otherdigest = tuple ( [ int ( otherdigest [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) for i in range ( 32 ) : bits += POPC [ 255 & myd [ i ] ^ otherdigest [ i ] ] return 128 - bits
Compute difference in bits between own digest and another . returns - 127 to 128 ; 128 is the same - 127 is different
2,452
def jdout ( api_response ) : try : output = json . dumps ( api_response . cgx_content , indent = 4 ) except ( TypeError , ValueError , AttributeError ) : try : output = json . dumps ( api_response , indent = 4 ) except ( TypeError , ValueError , AttributeError ) : output = api_response return output
JD Output function . Does quick pretty printing of a CloudGenix Response body . This function returns a string instead of directly printing content .
2,453
def jdout_detailed ( api_response , sensitive = False ) : try : output = "REQUEST: {0} {1}\n" . format ( api_response . request . method , api_response . request . path_url ) output += "REQUEST HEADERS:\n" for key , value in api_response . request . headers . items ( ) : if key . lower ( ) in [ 'cookie' ] and not sensitive : cookie_list = value . split ( '; ' ) muted_cookie_list = [ ] for cookie in cookie_list : if cookie . lower ( ) . strip ( ) . startswith ( 'auth_token=' ) : newcookie = cookie . strip ( ) [ : 11 ] + "\"<SENSITIVE - NOT SHOWN BY DEFAULT>\"" muted_cookie_list . append ( newcookie ) else : muted_cookie_list . append ( cookie ) muted_value = "; " . join ( muted_cookie_list ) output += "\t{0}: {1}\n" . format ( key , muted_value ) elif key . lower ( ) in [ 'x-auth-token' ] and not sensitive : output += "\t{0}: {1}\n" . format ( key , "<SENSITIVE - NOT SHOWN BY DEFAULT>" ) else : output += "\t{0}: {1}\n" . format ( key , value ) if not api_response . request . body : output += "REQUEST BODY:\n{0}\n\n" . format ( { } ) else : try : output += "REQUEST BODY:\n{0}\n\n" . format ( json . dumps ( json . loads ( api_response . request . body ) , indent = 4 ) ) except ( TypeError , ValueError , AttributeError ) : output += "REQUEST BODY:\n{0}\n\n" . format ( jdout ( api_response . request . body ) ) output += "RESPONSE: {0} {1}\n" . format ( api_response . status_code , api_response . reason ) output += "RESPONSE HEADERS:\n" for key , value in api_response . headers . items ( ) : output += "\t{0}: {1}\n" . format ( key , value ) try : output += "RESPONSE DATA:\n{0}" . format ( json . dumps ( api_response . cgx_content , indent = 4 ) ) except ( TypeError , ValueError , AttributeError ) : output += "RESPONSE DATA:\n{0}" . format ( json . dumps ( json . loads ( api_response . content ) , indent = 4 ) ) except ( TypeError , ValueError , AttributeError , UnicodeDecodeError ) : try : output = json . dumps ( api_response , indent = 4 ) except ( TypeError , ValueError , AttributeError ) : output = api_response return output
JD Output Detailed function . Meant for quick DETAILED pretty - printing of CloudGenix Request and Response objects for troubleshooting . This function returns a string instead of directly printing content .
2,454
def notify_for_new_version ( self ) : try : recommend_update = False update_check_resp = requests . get ( self . update_info_url , timeout = 3 ) web_version = update_check_resp . json ( ) [ "info" ] [ "version" ] api_logger . debug ( "RETRIEVED_VERSION: %s" , web_version ) available_version = SDK_BUILD_REGEX . search ( web_version ) . groupdict ( ) current_version = SDK_BUILD_REGEX . search ( self . version ) . groupdict ( ) available_major = available_version . get ( 'major' ) available_minor = available_version . get ( 'minor' ) available_patch = available_version . get ( 'patch' ) available_build = available_version . get ( 'build' ) current_major = current_version . get ( 'major' ) current_minor = current_version . get ( 'minor' ) current_patch = current_version . get ( 'patch' ) current_build = current_version . get ( 'build' ) api_logger . debug ( "AVAILABLE_VERSION: %s" , available_version ) api_logger . debug ( "CURRENT_VERSION: %s" , current_version ) if available_major > current_major : recommend_update = True elif available_major >= current_major and available_minor > current_minor : recommend_update = True elif available_major >= current_major and available_minor >= current_minor and available_patch > current_patch : recommend_update = True api_logger . debug ( "NEED_UPDATE: %s" , recommend_update ) if recommend_update : sys . stderr . write ( "WARNING: CloudGenix Python SDK upgrade available. SDKs are typically deprecated 6 " "months after release of a new version.\n" "\tLatest Version: {0}\n" "\tCurrent Version: {1}\n" "\tFor more info, see 'https://github.com/cloudgenix/sdk-python'. Additionally, this " "message can be suppressed by instantiating the API with API(update_check=False).\n\n" "" . format ( web_version , self . version ) ) return except Exception : return
Check for a new version of the SDK on API constructor instantiation . If new version found print Notification to STDERR .
2,455
def ssl_verify ( self , ssl_verify ) : self . verify = ssl_verify if isinstance ( self . verify , bool ) : if self . verify : if os . name == 'nt' : self . _ca_verify_file_handle = temp_ca_bundle ( delete = False ) self . _ca_verify_file_handle . write ( BYTE_CA_BUNDLE ) self . _ca_verify_file_handle . flush ( ) self . ca_verify_filename = self . _ca_verify_file_handle . name self . _ca_verify_file_handle . close ( ) else : self . _ca_verify_file_handle = temp_ca_bundle ( ) self . _ca_verify_file_handle . write ( BYTE_CA_BUNDLE ) self . _ca_verify_file_handle . flush ( ) self . ca_verify_filename = self . _ca_verify_file_handle . name atexit . register ( self . _cleanup_ca_temp_file ) else : urllib3 . disable_warnings ( ) self . ca_verify_filename = False else : self . ca_verify_filename = self . verify return
Modify ssl verification settings
2,456
def modify_rest_retry ( self , total = 8 , connect = None , read = None , redirect = None , status = None , method_whitelist = urllib3 . util . retry . Retry . DEFAULT_METHOD_WHITELIST , status_forcelist = None , backoff_factor = 0.705883 , raise_on_redirect = True , raise_on_status = True , respect_retry_after_header = True , adapter_url = "https://" ) : if status_forcelist is None : status_forcelist = ( 413 , 429 , 502 , 503 , 504 ) retry = urllib3 . util . retry . Retry ( total = total , connect = connect , read = read , redirect = redirect , status = status , method_whitelist = method_whitelist , status_forcelist = status_forcelist , backoff_factor = backoff_factor , raise_on_redirect = raise_on_redirect , raise_on_status = raise_on_status , respect_retry_after_header = respect_retry_after_header ) adapter = requests . adapters . HTTPAdapter ( max_retries = retry ) self . _session . mount ( adapter_url , adapter ) return
Modify retry parameters for the SDK s rest call object .
2,457
def set_debug ( self , debuglevel ) : if isinstance ( debuglevel , int ) : self . _debuglevel = debuglevel if self . _debuglevel == 1 : logging . basicConfig ( level = logging . INFO , format = "%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s" ) api_logger . setLevel ( logging . INFO ) elif self . _debuglevel == 2 : logging . basicConfig ( level = logging . DEBUG , format = "%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s" ) requests . cookies . cookielib . debug = True api_logger . setLevel ( logging . DEBUG ) elif self . _debuglevel >= 3 : logging . basicConfig ( level = logging . DEBUG , format = "%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s" ) requests . cookies . cookielib . debug = True api_logger . setLevel ( logging . DEBUG ) urllib3_logger = logging . getLogger ( "requests.packages.urllib3" ) urllib3_logger . setLevel ( logging . DEBUG ) urllib3_logger . propagate = True else : for handler in logging . root . handlers [ : ] : logging . root . removeHandler ( handler ) requests . cookies . cookielib . debug = False api_logger . setLevel ( logging . WARNING ) return
Change the debug level of the API
2,458
def _subclass_container ( self ) : _parent_class = self class GetWrapper ( Get ) : def __init__ ( self ) : self . _parent_class = _parent_class class PostWrapper ( Post ) : def __init__ ( self ) : self . _parent_class = _parent_class class PutWrapper ( Put ) : def __init__ ( self ) : self . _parent_class = _parent_class class PatchWrapper ( Patch ) : def __init__ ( self ) : self . _parent_class = _parent_class class DeleteWrapper ( Delete ) : def __init__ ( self ) : self . _parent_class = _parent_class class InteractiveWrapper ( Interactive ) : def __init__ ( self ) : self . _parent_class = _parent_class return { "get" : GetWrapper , "post" : PostWrapper , "put" : PutWrapper , "patch" : PatchWrapper , "delete" : DeleteWrapper , "interactive" : InteractiveWrapper }
Call subclasses via function to allow passing parent namespace to subclasses .
2,459
def _cleanup_ca_temp_file ( self ) : if os . name == 'nt' : if isinstance ( self . ca_verify_filename , ( binary_type , text_type ) ) : os . unlink ( self . ca_verify_filename ) else : self . _ca_verify_file_handle . close ( )
Function to clean up ca temp file for requests .
2,460
def parse_auth_token ( self , auth_token ) : auth_token_cleaned = auth_token . split ( '-' , 1 ) [ 1 ] auth_token_decoded = self . url_decode ( auth_token_cleaned ) auth_dict = { } for key_value in auth_token_decoded . split ( "&" ) : key_value_list = key_value . split ( "=" ) if len ( key_value_list ) == 2 and type ( key_value_list [ 0 ] ) in [ text_type , binary_type ] : auth_dict [ key_value_list [ 0 ] ] = key_value_list [ 1 ] return auth_dict
Break auth_token up into it s constituent values .
2,461
def parse_region ( self , login_response ) : auth_token = login_response . cgx_content [ 'x_auth_token' ] auth_token_dict = self . parse_auth_token ( auth_token ) auth_region = auth_token_dict . get ( 'region' ) return auth_region
Return region from a successful login response .
2,462
def _catch_nonjson_streamresponse ( rawresponse ) : try : response = json . loads ( rawresponse ) except ( ValueError , TypeError ) : if rawresponse : response = { '_error' : [ { 'message' : 'Response not in JSON format.' , 'data' : rawresponse , } ] } else : response = { } return response
Validate a streamed response is JSON . Return a Python dictionary either way .
2,463
def url_decode ( url ) : return re . compile ( '%([0-9a-fA-F]{2})' , re . M ) . sub ( lambda m : chr ( int ( m . group ( 1 ) , 16 ) ) , url )
URL Decode function using REGEX
2,464
def jcrop_css ( css_url = None ) : if css_url is None : if current_app . config [ 'AVATARS_SERVE_LOCAL' ] : css_url = url_for ( 'avatars.static' , filename = 'jcrop/css/jquery.Jcrop.min.css' ) else : css_url = 'https://cdn.jsdelivr.net/npm/[email protected]/css/jquery.Jcrop.min.css' return Markup ( '<link rel="stylesheet" href="%s">' % css_url )
Load jcrop css file .
2,465
def crop_box ( endpoint = None , filename = None ) : crop_size = current_app . config [ 'AVATARS_CROP_BASE_WIDTH' ] if endpoint is None or filename is None : url = url_for ( 'avatars.static' , filename = 'default/default_l.jpg' ) else : url = url_for ( endpoint , filename = filename ) return Markup ( '<img src="%s" id="crop-box" style="max-width: %dpx; display: block;">' % ( url , crop_size ) )
Create a crop box .
2,466
def resize_avatar ( self , img , base_width ) : w_percent = ( base_width / float ( img . size [ 0 ] ) ) h_size = int ( ( float ( img . size [ 1 ] ) * float ( w_percent ) ) ) img = img . resize ( ( base_width , h_size ) , PIL . Image . ANTIALIAS ) return img
Resize an avatar .
2,467
def save_avatar ( self , image ) : path = current_app . config [ 'AVATARS_SAVE_PATH' ] filename = uuid4 ( ) . hex + '_raw.png' image . save ( os . path . join ( path , filename ) ) return filename
Save an avatar as raw image return new filename .
2,468
def get_image ( self , string , width , height , pad = 0 ) : hex_digest_byte_list = self . _string_to_byte_list ( string ) matrix = self . _create_matrix ( hex_digest_byte_list ) return self . _create_image ( matrix , width , height , pad )
Byte representation of a PNG image
2,469
def _get_pastel_colour ( self , lighten = 127 ) : def r ( ) : return random . randint ( 0 , 128 ) + lighten return r ( ) , r ( ) , r ( )
Create a pastel colour hex colour string
2,470
def _luminance ( self , rgb ) : a = [ ] for v in rgb : v = v / float ( 255 ) if v < 0.03928 : result = v / 12.92 else : result = math . pow ( ( ( v + 0.055 ) / 1.055 ) , 2.4 ) a . append ( result ) return a [ 0 ] * 0.2126 + a [ 1 ] * 0.7152 + a [ 2 ] * 0.0722
Determine the liminanace of an RGB colour
2,471
def _string_to_byte_list ( self , data ) : bytes_length = 16 m = self . digest ( ) m . update ( str . encode ( data ) ) hex_digest = m . hexdigest ( ) return list ( int ( hex_digest [ num * 2 : num * 2 + 2 ] , bytes_length ) for num in range ( bytes_length ) )
Creates a hex digest of the input string given to create the image if it s not already hexadecimal
2,472
def _create_image ( self , matrix , width , height , pad ) : image = Image . new ( "RGB" , ( width + ( pad * 2 ) , height + ( pad * 2 ) ) , self . bg_colour ) image_draw = ImageDraw . Draw ( image ) block_width = float ( width ) / self . cols block_height = float ( height ) / self . rows for row , cols in enumerate ( matrix ) : for col , cell in enumerate ( cols ) : if cell : image_draw . rectangle ( ( pad + col * block_width , pad + row * block_height , pad + ( col + 1 ) * block_width - 1 , pad + ( row + 1 ) * block_height - 1 ) , fill = self . fg_colour ) stream = BytesIO ( ) image . save ( stream , format = "png" , optimize = True ) return stream . getvalue ( )
Generates a PNG byte list
2,473
def city ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `city`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `city`' ) self . _city = value
Corresponds to IDD Field city
2,474
def state_province_region ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `state_province_region`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `state_province_region`' ) self . _state_province_region = value
Corresponds to IDD Field state_province_region
2,475
def country ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `country`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `country`' ) self . _country = value
Corresponds to IDD Field country
2,476
def source ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `source`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `source`' ) self . _source = value
Corresponds to IDD Field source
2,477
def wmo ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `wmo`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `wmo`' ) self . _wmo = value
Corresponds to IDD Field wmo usually a 6 digit field . Used as alpha in EnergyPlus .
2,478
def latitude ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `latitude`' . format ( value ) ) if value < - 90.0 : raise ValueError ( 'value need to be greater or equal -90.0 ' 'for field `latitude`' ) if value > 90.0 : raise ValueError ( 'value need to be smaller 90.0 ' 'for field `latitude`' ) self . _latitude = value
Corresponds to IDD Field latitude
2,479
def longitude ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `longitude`' . format ( value ) ) if value < - 180.0 : raise ValueError ( 'value need to be greater or equal -180.0 ' 'for field `longitude`' ) if value > 180.0 : raise ValueError ( 'value need to be smaller 180.0 ' 'for field `longitude`' ) self . _longitude = value
Corresponds to IDD Field longitude
2,480
def timezone ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `timezone`' . format ( value ) ) if value < - 12.0 : raise ValueError ( 'value need to be greater or equal -12.0 ' 'for field `timezone`' ) if value > 12.0 : raise ValueError ( 'value need to be smaller 12.0 ' 'for field `timezone`' ) self . _timezone = value
Corresponds to IDD Field timezone Time relative to GMT .
2,481
def elevation ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `elevation`' . format ( value ) ) if value < - 1000.0 : raise ValueError ( 'value need to be greater or equal -1000.0 ' 'for field `elevation`' ) if value >= 9999.9 : raise ValueError ( 'value need to be smaller 9999.9 ' 'for field `elevation`' ) self . _elevation = value
Corresponds to IDD Field elevation
2,482
def title_of_design_condition ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `title_of_design_condition`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `title_of_design_condition`' ) self . _title_of_design_condition = value
Corresponds to IDD Field title_of_design_condition
2,483
def unkown_field ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `unkown_field`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `unkown_field`' ) self . _unkown_field = value
Corresponds to IDD Field unkown_field Empty field in data .
2,484
def design_stat_heating ( self , value = "Heating" ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `design_stat_heating`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `design_stat_heating`' ) vals = set ( ) vals . add ( "Heating" ) if value not in vals : raise ValueError ( 'value {} is not an accepted value for ' 'field `design_stat_heating`' . format ( value ) ) self . _design_stat_heating = value
Corresponds to IDD Field design_stat_heating
2,485
def coldestmonth ( self , value = None ) : if value is not None : try : value = int ( value ) except ValueError : raise ValueError ( 'value {} need to be of type int ' 'for field `coldestmonth`' . format ( value ) ) if value < 1 : raise ValueError ( 'value need to be greater or equal 1 ' 'for field `coldestmonth`' ) if value > 12 : raise ValueError ( 'value need to be smaller 12 ' 'for field `coldestmonth`' ) self . _coldestmonth = value
Corresponds to IDD Field coldestmonth
2,486
def ws004c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws004c`' . format ( value ) ) self . _ws004c = value
Corresponds to IDD Field ws004c
2,487
def db_ws004c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_ws004c`' . format ( value ) ) self . _db_ws004c = value
Corresponds to IDD Field db_ws004c Mean coincident dry - bulb temperature to wind speed corresponding to 0 . 40% cumulative frequency for coldest month
2,488
def ws010c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws010c`' . format ( value ) ) self . _ws010c = value
Corresponds to IDD Field ws010c Wind speed corresponding to 1 . 0% cumulative frequency of occurrence for coldest month ;
2,489
def db_ws010c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_ws010c`' . format ( value ) ) self . _db_ws010c = value
Corresponds to IDD Field db_ws010c Mean coincident dry - bulb temperature to wind speed corresponding to 1 . 0% cumulative frequency for coldest month
2,490
def ws_db996 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws_db996`' . format ( value ) ) self . _ws_db996 = value
Corresponds to IDD Field ws_db996 Mean wind speed coincident with 99 . 6% dry - bulb temperature
2,491
def design_stat_cooling ( self , value = "Cooling" ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `design_stat_cooling`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `design_stat_cooling`' ) vals = set ( ) vals . add ( "Cooling" ) if value not in vals : raise ValueError ( 'value {} is not an accepted value for ' 'field `design_stat_cooling`' . format ( value ) ) self . _design_stat_cooling = value
Corresponds to IDD Field design_stat_cooling
2,492
def dbr ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `dbr`' . format ( value ) ) self . _dbr = value
Corresponds to IDD Field dbr Daily temperature range for hottest month .
2,493
def wb004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb004`' . format ( value ) ) self . _wb004 = value
Corresponds to IDD Field wb004 Wet - bulb temperature corresponding to 0 . 4% annual cumulative frequency of occurrence
2,494
def db_wb004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb004`' . format ( value ) ) self . _db_wb004 = value
Corresponds to IDD Field db_wb004 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 0 . 4% annual cumulative frequency of occurrence
2,495
def wb010 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb010`' . format ( value ) ) self . _wb010 = value
Corresponds to IDD Field wb010 Wet - bulb temperature corresponding to 1 . 0% annual cumulative frequency of occurrence
2,496
def db_wb010 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb010`' . format ( value ) ) self . _db_wb010 = value
Corresponds to IDD Field db_wb010 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 1 . 0% annual cumulative frequency of occurrence
2,497
def wb020 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb020`' . format ( value ) ) self . _wb020 = value
Corresponds to IDD Field wb020 Wet - bulb temperature corresponding to 02 . 0% annual cumulative frequency of occurrence
2,498
def db_wb020 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb020`' . format ( value ) ) self . _db_wb020 = value
Corresponds to IDD Field db_wb020 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 2 . 0% annual cumulative frequency of occurrence
2,499
def ws_db004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws_db004`' . format ( value ) ) self . _ws_db004 = value
Corresponds to IDD Field ws_db004 Mean wind speed coincident with 0 . 4% dry - bulb temperature