text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Returns contents of the manifest where environment variables <END_TASK> <USER_TASK:> Description: def _get_encrypted_manifest(self): """ Returns contents of the manifest where environment variables that are secret will be encrypted without modifying the existing state in memory which will remain unencrypted. """
key = predix.config.get_crypt_key(self.manifest_key) f = Fernet(key) manifest = copy.deepcopy(self.manifest) for var in self.manifest['env'].keys(): value = str(self.manifest['env'][var]) manifest['env'][var] = f.encrypt(bytes(value, 'utf-8')).decode('utf-8') return manifest
<SYSTEM_TASK:> Write manifest to disk. <END_TASK> <USER_TASK:> Description: def write_manifest(self, manifest_path=None, encrypted=None): """ Write manifest to disk. :param manifest_path: write to a different location :param encrypted: write with env data encrypted """
manifest_path = manifest_path or self.manifest_path self.manifest['env']['PREDIXPY_VERSION'] = str(predix.version) with open(manifest_path, 'w') as output_file: if encrypted or self.encrypted: self.manifest['env']['PREDIXPY_ENCRYPTED'] = self.manifest_key content = self._get_encrypted_manifest() else: content = self.manifest # shallow reference if 'PREDIXPY_ENCRYPTED' in content['env']: del(content['env']['PREDIXPY_ENCRYPTED']) yaml.safe_dump(content, output_file, default_flow_style=False, explicit_start=True) output_file.close()
<SYSTEM_TASK:> Add the given service to the manifest. <END_TASK> <USER_TASK:> Description: def add_service(self, service_name): """ Add the given service to the manifest. """
if service_name not in self.manifest['services']: self.manifest['services'].append(service_name)
<SYSTEM_TASK:> Will load any environment variables found in the <END_TASK> <USER_TASK:> Description: def set_os_environ(self): """ Will load any environment variables found in the manifest file into the current process for use by applications. When apps run in cloud foundry this would happen automatically. """
for key in self.manifest['env'].keys(): os.environ[key] = str(self.manifest['env'][key])
<SYSTEM_TASK:> Return the client id that should have all the <END_TASK> <USER_TASK:> Description: def get_client_id(self): """ Return the client id that should have all the needed scopes and authorities for the services in this manifest. """
self._client_id = predix.config.get_env_value(predix.app.Manifest, 'client_id') return self._client_id
<SYSTEM_TASK:> Return the client secret that should correspond with <END_TASK> <USER_TASK:> Description: def get_client_secret(self): """ Return the client secret that should correspond with the client id. """
self._client_secret = predix.config.get_env_value(predix.app.Manifest, 'client_secret') return self._client_secret
<SYSTEM_TASK:> Returns an instance of the Time Series Service. <END_TASK> <USER_TASK:> Description: def get_timeseries(self, *args, **kwargs): """ Returns an instance of the Time Series Service. """
import predix.data.timeseries ts = predix.data.timeseries.TimeSeries(*args, **kwargs) return ts
<SYSTEM_TASK:> Returns an instance of the Asset Control Service. <END_TASK> <USER_TASK:> Description: def get_acs(self): """ Returns an instance of the Asset Control Service. """
import predix.security.acs acs = predix.security.acs.AccessControl() return acs
<SYSTEM_TASK:> Returns an instance of the Weather Service. <END_TASK> <USER_TASK:> Description: def get_weather(self): """ Returns an instance of the Weather Service. """
import predix.data.weather weather = predix.data.weather.WeatherForecast() return weather
<SYSTEM_TASK:> Return the weather forecast for a given location. <END_TASK> <USER_TASK:> Description: def get_weather_forecast_days(self, latitude, longitude, days=1, frequency=1, reading_type=None): """ Return the weather forecast for a given location. :: results = ws.get_weather_forecast_days(lat, long) for w in results['hits']: print w['start_datetime_local'] print w['reading_type'], w['reading_value'] For description of reading types: https://graphical.weather.gov/xml/docs/elementInputNames.php """
params = {} # Can get data from NWS1 or NWS3 representing 1-hr and 3-hr # intervals. if frequency not in [1, 3]: raise ValueError("Reading frequency must be 1 or 3") params['days'] = days params['source'] = 'NWS' + str(frequency) params['latitude'] = latitude params['longitude'] = longitude if reading_type: # url encoding will make spaces a + instead of %20, which service # interprets as an "and" search which is undesirable reading_type = reading_type.replace(' ', '%20') params['reading_type'] = urllib.quote_plus(reading_type) url = self.uri + '/v1/weather-forecast-days/' return self.service._get(url, params=params)
<SYSTEM_TASK:> Return the weather forecast for a given location for specific <END_TASK> <USER_TASK:> Description: def get_weather_forecast(self, latitude, longitude, start, end, frequency=1, reading_type=None): """ Return the weather forecast for a given location for specific datetime specified in UTC format. :: results = ws.get_weather_forecast(lat, long, start, end) for w in results['hits']: print w['start_datetime_local'] print w['reading_type'], '=', w['reading_value'] For description of reading types: https://graphical.weather.gov/xml/docs/elementInputNames.php """
params = {} # Can get data from NWS1 or NWS3 representing 1-hr and 3-hr # intervals. if frequency not in [1, 3]: raise ValueError("Reading frequency must be 1 or 3") params['source'] = 'NWS' + str(frequency) params['latitude'] = latitude params['longitude'] = longitude params['start_datetime_utc'] = start params['end_datetime_utc'] = end if reading_type: # Not using urllib.quote_plus() because its using a + which is # being interpreted by service as an and instead of a space. reading_type = reading_type.replace(' ', '%20') params['reading_type'] = reading_type url = self.uri + '/v1/weather-forecast-datetime/' return self.service._get(url, params=params)
<SYSTEM_TASK:> Can generate a name based on the space, service name and plan. <END_TASK> <USER_TASK:> Description: def _generate_name(self, space, service_name, plan_name): """ Can generate a name based on the space, service name and plan. """
return str.join('-', [space, service_name, plan_name]).lower()
<SYSTEM_TASK:> Return a sensible configuration path for caching config <END_TASK> <USER_TASK:> Description: def _get_config_path(self): """ Return a sensible configuration path for caching config settings. """
org = self.service.space.org.name space = self.service.space.name name = self.name return "~/.predix/%s/%s/%s.json" % (org, space, name)
<SYSTEM_TASK:> Create a Cloud Foundry service that has custom parameters. <END_TASK> <USER_TASK:> Description: def _create_service(self, parameters={}, **kwargs): """ Create a Cloud Foundry service that has custom parameters. """
logging.debug("_create_service()") logging.debug(str.join(',', [self.service_name, self.plan_name, self.name, str(parameters)])) return self.service.create_service(self.service_name, self.plan_name, self.name, parameters, **kwargs)
<SYSTEM_TASK:> Delete a Cloud Foundry service and any associations. <END_TASK> <USER_TASK:> Description: def _delete_service(self, service_only=False): """ Delete a Cloud Foundry service and any associations. """
logging.debug('_delete_service()') return self.service.delete_service(self.service_name)
<SYSTEM_TASK:> Get a service key or create one if needed. <END_TASK> <USER_TASK:> Description: def _get_or_create_service_key(self): """ Get a service key or create one if needed. """
keys = self.service._get_service_keys(self.name) for key in keys['resources']: if key['entity']['name'] == self.service_name: return self.service.get_service_key(self.name, self.service_name) self.service.create_service_key(self.name, self.service_name) return self.service.get_service_key(self.name, self.service_name)
<SYSTEM_TASK:> Will get configuration for the service from a service key. <END_TASK> <USER_TASK:> Description: def _get_service_config(self): """ Will get configuration for the service from a service key. """
key = self._get_or_create_service_key() config = {} config['service_key'] = [{'name': self.name}] config.update(key['entity']['credentials']) return config
<SYSTEM_TASK:> Create the service. <END_TASK> <USER_TASK:> Description: def create(self, parameters={}, create_keys=True, **kwargs): """ Create the service. """
# Create the service cs = self._create_service(parameters=parameters, **kwargs) # Create the service key to get config details and # store in local cache file. if create_keys: cfg = parameters cfg.update(self._get_service_config()) self.settings.save(cfg)
<SYSTEM_TASK:> Returns a valid UAA instance for performing administrative functions <END_TASK> <USER_TASK:> Description: def _get_or_create_uaa(self, uaa): """ Returns a valid UAA instance for performing administrative functions on services. """
if isinstance(uaa, predix.admin.uaa.UserAccountAuthentication): return uaa logging.debug("Initializing a new UAA") return predix.admin.uaa.UserAccountAuthentication()
<SYSTEM_TASK:> Grant the given client id all the scopes and authorities <END_TASK> <USER_TASK:> Description: def grant_client(self, client_id, publish=False, subscribe=False, publish_protocol=None, publish_topics=None, subscribe_topics=None, scope_prefix='predix-event-hub', **kwargs): """ Grant the given client id all the scopes and authorities needed to work with the eventhub service. """
scopes = ['openid'] authorities = ['uaa.resource'] zone_id = self.get_zone_id() # always must be part of base user scope scopes.append('%s.zones.%s.user' % (scope_prefix, zone_id)) authorities.append('%s.zones.%s.user' % (scope_prefix, zone_id)) if publish_topics is not None or subscribe_topics is not None: raise Exception("multiple topics are not currently available in preidx-py") if publish_topics is None: publish_topics = ['topic'] if subscribe_topics is None: subscribe_topics = ['topic'] if publish: # we are granting just the default topic if publish_protocol is None: scopes.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id)) authorities.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id)) scopes.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id)) authorities.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id)) else: scopes.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol)) authorities.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol)) # we are requesting multiple topics for topic in publish_topics: if publish_protocol is None: scopes.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic)) scopes.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic)) scopes.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic)) authorities.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic)) authorities.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic)) authorities.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic)) else: scopes.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol)) authorities.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol)) if subscribe: # we are granting just the default topic scopes.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id)) authorities.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id)) # we are requesting multiple topics for topic in subscribe_topics: scopes.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic)) authorities.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic)) self.service.uaa.uaac.update_client_grants(client_id, scope=scopes, authorities=authorities) return self.service.uaa.uaac.get_client(client_id)
<SYSTEM_TASK:> returns the publish grpc endpoint for ingestion. <END_TASK> <USER_TASK:> Description: def get_eventhub_host(self): """ returns the publish grpc endpoint for ingestion. """
for protocol in self.service.settings.data['publish']['protocol_details']: if protocol['protocol'] == 'grpc': return protocol['uri'][0:protocol['uri'].index(':')]
<SYSTEM_TASK:> Returns the host address for an instance of Blob Store service from <END_TASK> <USER_TASK:> Description: def _get_host(self): """ Returns the host address for an instance of Blob Store service from environment inspection. """
if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) host = services['predix-blobstore'][0]['credentials']['host'] else: host = predix.config.get_env_value(self, 'host') # Protocol may not always be included in host setting if 'https://' not in host: host = 'https://' + host return host
<SYSTEM_TASK:> Returns the access key for an instance of Blob Store service from <END_TASK> <USER_TASK:> Description: def _get_access_key_id(self): """ Returns the access key for an instance of Blob Store service from environment inspection. """
if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) return services['predix-blobstore'][0]['credentials']['access_key_id'] else: return predix.config.get_env_value(self, 'access_key_id')
<SYSTEM_TASK:> This method is primarily for illustration and just calls the <END_TASK> <USER_TASK:> Description: def list_objects(self, bucket_name=None, **kwargs): """ This method is primarily for illustration and just calls the boto3 client implementation of list_objects but is a common task for first time Predix BlobStore users. """
if not bucket_name: bucket_name = self.bucket_name return self.client.list_objects(Bucket=bucket_name, **kwargs)
<SYSTEM_TASK:> This method is primarily for illustration and just calls the <END_TASK> <USER_TASK:> Description: def upload_file(self, src_filepath, dest_filename=None, bucket_name=None, **kwargs): """ This method is primarily for illustration and just calls the boto3 client implementation of upload_file but is a common task for first time Predix BlobStore users. """
if not bucket_name: bucket_name = self.bucket_name if not dest_filename: dest_filename = src_filepath return self.client.upload_file(src_filepath, bucket_name, dest_filename, **kwargs)
<SYSTEM_TASK:> Reads the local cf CLI cache stored in the users <END_TASK> <USER_TASK:> Description: def _get_cloud_foundry_config(self): """ Reads the local cf CLI cache stored in the users home directory. """
config = os.path.expanduser(self.config_file) if not os.path.exists(config): raise CloudFoundryLoginError('You must run `cf login` to authenticate') with open(config, "r") as data: return json.load(data)
<SYSTEM_TASK:> Returns the GUID for the organization currently targeted. <END_TASK> <USER_TASK:> Description: def get_organization_guid(self): """ Returns the GUID for the organization currently targeted. """
if 'PREDIX_ORGANIZATION_GUID' in os.environ: return os.environ['PREDIX_ORGANIZATION_GUID'] else: info = self._get_organization_info() for key in ('Guid', 'GUID'): if key in info.keys(): return info[key] raise ValueError('Unable to determine cf organization guid')
<SYSTEM_TASK:> Returns the GUID for the space currently targeted. <END_TASK> <USER_TASK:> Description: def get_space_guid(self): """ Returns the GUID for the space currently targeted. Can be set by environment variable with PREDIX_SPACE_GUID. Can be determined by ~/.cf/config.json. """
if 'PREDIX_SPACE_GUID' in os.environ: return os.environ['PREDIX_SPACE_GUID'] else: info = self._get_space_info() for key in ('Guid', 'GUID'): if key in info.keys(): return info[key] raise ValueError('Unable to determine cf space guid')
<SYSTEM_TASK:> Get the user's PredixPy manifest key. Generate and store one if not <END_TASK> <USER_TASK:> Description: def get_crypt_key(key_path): """ Get the user's PredixPy manifest key. Generate and store one if not yet generated. """
key_path = os.path.expanduser(key_path) if os.path.exists(key_path): with open(key_path, 'r') as data: key = data.read() else: key = Fernet.generate_key() with open(key_path, 'w') as output: output.write(key) return key
<SYSTEM_TASK:> Return environment variable key to use for lookups within a <END_TASK> <USER_TASK:> Description: def get_env_key(obj, key=None): """ Return environment variable key to use for lookups within a namespace represented by the package name. For example, any varialbes for predix.security.uaa are stored as PREDIX_SECURITY_UAA_KEY """
return str.join('_', [obj.__module__.replace('.','_').upper(), key.upper()])
<SYSTEM_TASK:> Returns the environment variable value for the attribute of <END_TASK> <USER_TASK:> Description: def get_env_value(obj, attribute): """ Returns the environment variable value for the attribute of the given object. For example `get_env_value(predix.security.uaa, 'uri')` will return value of environment variable PREDIX_SECURITY_UAA_URI. """
varname = get_env_key(obj, attribute) var = os.environ.get(varname) if not var: raise ValueError("%s must be set in your environment." % varname) return var
<SYSTEM_TASK:> Set the environment variable value for the attribute of the <END_TASK> <USER_TASK:> Description: def set_env_value(obj, attribute, value): """ Set the environment variable value for the attribute of the given object. For example, `set_env_value(predix.security.uaa, 'uri', 'http://...')` will set the environment variable PREDIX_SECURITY_UAA_URI to the given uri. """
varname = get_env_key(obj, attribute) os.environ[varname] = value return varname
<SYSTEM_TASK:> Returns the GUID for the service instance with <END_TASK> <USER_TASK:> Description: def get_instance_guid(self, service_name): """ Returns the GUID for the service instance with the given name. """
summary = self.space.get_space_summary() for service in summary['services']: if service['name'] == service_name: return service['guid'] raise ValueError("No service with name '%s' found." % (service_name))
<SYSTEM_TASK:> Return the service bindings for the service instance. <END_TASK> <USER_TASK:> Description: def _get_service_bindings(self, service_name): """ Return the service bindings for the service instance. """
instance = self.get_instance(service_name) return self.api.get(instance['service_bindings_url'])
<SYSTEM_TASK:> Remove service bindings to applications. <END_TASK> <USER_TASK:> Description: def delete_service_bindings(self, service_name): """ Remove service bindings to applications. """
instance = self.get_instance(service_name) return self.api.delete(instance['service_bindings_url'])
<SYSTEM_TASK:> Return the service keys for the given service. <END_TASK> <USER_TASK:> Description: def _get_service_keys(self, service_name): """ Return the service keys for the given service. """
guid = self.get_instance_guid(service_name) uri = "/v2/service_instances/%s/service_keys" % (guid) return self.api.get(uri)
<SYSTEM_TASK:> Returns a flat list of the names of the service keys <END_TASK> <USER_TASK:> Description: def get_service_keys(self, service_name): """ Returns a flat list of the names of the service keys for the given service. """
keys = [] for key in self._get_service_keys(service_name)['resources']: keys.append(key['entity']['name']) return keys
<SYSTEM_TASK:> Returns the service key details. <END_TASK> <USER_TASK:> Description: def get_service_key(self, service_name, key_name): """ Returns the service key details. Similar to `cf service-key`. """
for key in self._get_service_keys(service_name)['resources']: if key_name == key['entity']['name']: guid = key['metadata']['guid'] uri = "/v2/service_keys/%s" % (guid) return self.api.get(uri) return None
<SYSTEM_TASK:> Create a service key for the given service. <END_TASK> <USER_TASK:> Description: def create_service_key(self, service_name, key_name): """ Create a service key for the given service. """
if self.has_key(service_name, key_name): logging.warning("Reusing existing service key %s" % (key_name)) return self.get_service_key(service_name, key_name) body = { 'service_instance_guid': self.get_instance_guid(service_name), 'name': key_name } return self.api.post('/v2/service_keys', body)
<SYSTEM_TASK:> Delete a service key for the given service. <END_TASK> <USER_TASK:> Description: def delete_service_key(self, service_name, key_name): """ Delete a service key for the given service. """
key = self.get_service_key(service_name, key_name) logging.info("Deleting service key %s for service %s" % (key, service_name)) return self.api.delete(key['metadata']['url'])
<SYSTEM_TASK:> Retrieves a service instance with the given name. <END_TASK> <USER_TASK:> Description: def get_instance(self, service_name): """ Retrieves a service instance with the given name. """
for resource in self.space._get_instances(): if resource['entity']['name'] == service_name: return resource['entity']
<SYSTEM_TASK:> Return the service plans available for a given service. <END_TASK> <USER_TASK:> Description: def get_service_plan_for_service(self, service_name): """ Return the service plans available for a given service. """
services = self.get_services() for service in services['resources']: if service['entity']['label'] == service_name: response = self.api.get(service['entity']['service_plans_url']) return response['resources']
<SYSTEM_TASK:> Create a service instance. <END_TASK> <USER_TASK:> Description: def create_service(self, service_type, plan_name, service_name, params, async=False, **kwargs): """ Create a service instance. """
if self.space.has_service_with_name(service_name): logging.warning("Service already exists with that name.") return self.get_instance(service_name) if self.space.has_service_of_type(service_type): logging.warning("Service type already exists.") guid = self.get_service_plan_guid(service_type, plan_name) if not guid: raise ValueError("No service plan named: %s" % (plan_name)) body = { 'name': service_name, 'space_guid': self.space.guid, 'service_plan_guid': guid, 'parameters': params } uri = '/v2/service_instances?accepts_incomplete=true' if async: uri += '&async=true' return self.api.post(uri, body)
<SYSTEM_TASK:> Returns the URI endpoint for performing queries of a <END_TASK> <USER_TASK:> Description: def _get_query_uri(self): """ Returns the URI endpoint for performing queries of a Predix Time Series instance from environment inspection. """
if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) predix_timeseries = services['predix-timeseries'][0]['credentials'] return predix_timeseries['query']['uri'].partition('/v1')[0] else: return predix.config.get_env_value(self, 'query_uri')
<SYSTEM_TASK:> Returns the ZoneId for performing queries of a Predix <END_TASK> <USER_TASK:> Description: def _get_query_zone_id(self): """ Returns the ZoneId for performing queries of a Predix Time Series instance from environment inspection. """
if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) predix_timeseries = services['predix-timeseries'][0]['credentials'] return predix_timeseries['query']['zone-http-header-value'] else: return predix.config.get_env_value(self, 'query_zone_id')
<SYSTEM_TASK:> Will make a direct REST call with the given json body payload to <END_TASK> <USER_TASK:> Description: def _get_datapoints(self, params): """ Will make a direct REST call with the given json body payload to get datapoints. """
url = self.query_uri + '/v1/datapoints' return self.service._get(url, params=params)
<SYSTEM_TASK:> Convenience method that for simple single tag queries will <END_TASK> <USER_TASK:> Description: def get_values(self, *args, **kwargs): """ Convenience method that for simple single tag queries will return just the values to be iterated on. """
if isinstance(args[0], list): raise ValueError("Can only get_values() for a single tag.") response = self.get_datapoints(*args, **kwargs) for value in response['tags'][0]['results'][0]['values']: yield [datetime.datetime.utcfromtimestamp(value[0]/1000), value[1], value[2]]
<SYSTEM_TASK:> Returns all of the datapoints that match the given query. <END_TASK> <USER_TASK:> Description: def get_datapoints(self, tags, start=None, end=None, order=None, limit=None, qualities=None, attributes=None, measurement=None, aggregations=None, post=False): """ Returns all of the datapoints that match the given query. - tags: list or string identifying the name/tag (ie. "temp") - start: data after this, absolute or relative (ie. '1w-ago' or 1494015972386) - end: data before this value - order: ascending (asc) or descending (desc) - limit: only return a few values (ie. 25) - qualities: data quality value (ie. [ts.GOOD, ts.UNCERTAIN]) - attributes: dictionary of key-values (ie. {'unit': 'mph'}) - measurement: tuple of operation and value (ie. ('gt', 30)) - aggregations: summary statistics on data results (ie. 'avg') - post: POST query instead of GET (caching implication) A few additional observations: - allow service to do most data validation - order is applied before limit so resultset will differ The returned results match what the service response is so you'll need to unpack it as appropriate. Oftentimes what you want for a simple single tag query will be: response['tags'][0]['results'][0]['values'] """
params = {} # Documentation says start is required for GET but not POST, but # seems to be required all the time, so using sensible default. if not start: start = '1w-ago' logging.warning("Defaulting query for data with start date %s" % (start)) # Start date can be absolute or relative, only certain legal values # but service will throw error if used improperly. (ms, s, mi, h, d, # w, mm, y). Relative dates must end in -ago. params['start'] = start # Docs say when making POST with a start that end must also be # specified, but this does not seem to be the case. if end: # MAINT: error when end < start which is handled by service params['end'] = end params['tags'] = [] if not isinstance(tags, list): tags = [tags] for tag in tags: query = {} query['name'] = tag # Limit resultset with an integer value if limit: query['limit'] = int(limit) # Order must be 'asc' or 'desc' but will get sensible error # from service. if order: query['order'] = order # Filters are complex and support filtering by # quality, measurement, and attributes. filters = {} # Check for the quality of the datapoints if qualities is not None: if isinstance(qualities, int) or isinstance(qualities, str): qualities = [qualities] # Timeseries expects quality to be a string, not integer, # so coerce each into a string for i, quality in enumerate(qualities): qualities[i] = str(quality) filters['qualities'] = {"values": qualities} # Check for attributes on the datapoints, expected to be # a dictionary of key / value pairs that datapoints must match. if attributes is not None: if not isinstance(attributes, dict): raise ValueError("Attribute filters must be dictionary.") filters['attributes'] = attributes # Check for measurements that meets a given comparison operation # such as ge, gt, eq, ne, le, lt if measurement is not None: filters['measurements'] = { 'condition': measurement[0], 'values': measurement[1] } # If we found any filters add them to the query if filters: query['filters'] = filters # Handle any additional aggregations of dataset if aggregations is not None: if not isinstance(aggregations, list): aggregations = [aggregations] query['aggregations'] = [] for aggregation in aggregations: query['aggregations'].append({ 'sampling': {'datapoints': 1}, 'type': aggregation }) params['tags'].append(query) if post: return self._post_datapoints(params) else: return self._get_datapoints({"query": json.dumps(params)})
<SYSTEM_TASK:> Create a new websocket connection with proper headers. <END_TASK> <USER_TASK:> Description: def _create_connection(self): """ Create a new websocket connection with proper headers. """
logging.debug("Initializing new websocket connection.") headers = { 'Authorization': self.service._get_bearer_token(), 'Predix-Zone-Id': self.ingest_zone_id, 'Content-Type': 'application/json', } url = self.ingest_uri logging.debug("URL=" + str(url)) logging.debug("HEADERS=" + str(headers)) # Should consider connection pooling and longer timeouts return websocket.create_connection(url, header=headers)
<SYSTEM_TASK:> Reuse existing connection or create a new connection. <END_TASK> <USER_TASK:> Description: def _get_websocket(self, reuse=True): """ Reuse existing connection or create a new connection. """
# Check if still connected if self.ws and reuse: if self.ws.connected: return self.ws logging.debug("Stale connection, reconnecting.") self.ws = self._create_connection() return self.ws
<SYSTEM_TASK:> Establish or reuse socket connection and send <END_TASK> <USER_TASK:> Description: def _send_to_timeseries(self, message): """ Establish or reuse socket connection and send the given message to the timeseries service. """
logging.debug("MESSAGE=" + str(message)) result = None try: ws = self._get_websocket() ws.send(json.dumps(message)) result = ws.recv() except (websocket.WebSocketConnectionClosedException, Exception) as e: logging.debug("Connection failed, will try again.") logging.debug(e) ws = self._get_websocket(reuse=False) ws.send(json.dumps(message)) result = ws.recv() logging.debug("RESULT=" + str(result)) return result
<SYSTEM_TASK:> This convenience method will execute the query passed in as is. For <END_TASK> <USER_TASK:> Description: def execute(self, statement, *args, **kwargs): """ This convenience method will execute the query passed in as is. For more complex functionality you may want to use the sqlalchemy engine directly, but this serves as an example implementation. :param select_query: SQL statement to execute that will identify the resultset of interest. """
with self.engine.connect() as conn: s = sqlalchemy.sql.text(statement) return conn.execute(s, **kwargs)
<SYSTEM_TASK:> Create a new temporary cloud foundry space for <END_TASK> <USER_TASK:> Description: def create_temp_space(): """ Create a new temporary cloud foundry space for a project. """
# Truncating uuid to just take final 12 characters since space name # is used to name services and there is a 50 character limit on instance # names. # MAINT: hacky with possible collisions unique_name = str(uuid.uuid4()).split('-')[-1] admin = predix.admin.cf.spaces.Space() res = admin.create_space(unique_name) space = predix.admin.cf.spaces.Space( guid=res['metadata']['guid'], name=res['entity']['name']) space.target() return space
<SYSTEM_TASK:> Target the current space for any forthcoming Cloud Foundry <END_TASK> <USER_TASK:> Description: def target(self): """ Target the current space for any forthcoming Cloud Foundry operations. """
# MAINT: I don't like this, but will deal later os.environ['PREDIX_SPACE_GUID'] = self.guid os.environ['PREDIX_SPACE_NAME'] = self.name os.environ['PREDIX_ORGANIZATION_GUID'] = self.org.guid os.environ['PREDIX_ORGANIZATION_NAME'] = self.org.name
<SYSTEM_TASK:> Return a flat list of the names for spaces in the organization. <END_TASK> <USER_TASK:> Description: def get_spaces(self): """ Return a flat list of the names for spaces in the organization. """
self.spaces = [] for resource in self._get_spaces()['resources']: self.spaces.append(resource['entity']['name']) return self.spaces
<SYSTEM_TASK:> Create a new space with the given name in the current target <END_TASK> <USER_TASK:> Description: def create_space(self, space_name, add_users=True): """ Create a new space with the given name in the current target organization. """
body = { 'name': space_name, 'organization_guid': self.api.config.get_organization_guid() } # MAINT: may need to do this more generally later if add_users: space_users = [] org_users = self.org.get_users() for org_user in org_users['resources']: guid = org_user['metadata']['guid'] space_users.append(guid) body['manager_guids'] = space_users body['developer_guids'] = space_users return self.api.post('/v2/spaces', body)
<SYSTEM_TASK:> Delete the current space, or a space with the given name <END_TASK> <USER_TASK:> Description: def delete_space(self, name=None, guid=None): """ Delete the current space, or a space with the given name or guid. """
if not guid: if name: spaces = self._get_spaces() for space in spaces['resources']: if space['entity']['name'] == name: guid = space['metadata']['guid'] break if not guid: raise ValueError("Space with name %s not found." % (name)) else: guid = self.guid logging.warning("Deleting space (%s) and all services." % (guid)) return self.api.delete("/v2/spaces/%s" % (guid), params={'recursive': 'true'})
<SYSTEM_TASK:> Returns a flat list of the service names available <END_TASK> <USER_TASK:> Description: def get_services(self): """ Returns a flat list of the service names available from the marketplace for this space. """
services = [] for resource in self._get_services()['resources']: services.append(resource['entity']['label']) return services
<SYSTEM_TASK:> Returns the service instances activated in this space. <END_TASK> <USER_TASK:> Description: def _get_instances(self, page_number=None): """ Returns the service instances activated in this space. """
instances = [] uri = '/v2/spaces/%s/service_instances' % self.guid json_response = self.api.get(uri) instances += json_response['resources'] while json_response['next_url'] is not None: json_response = self.api.get(json_response['next_url']) instances += json_response['resources'] return instances
<SYSTEM_TASK:> Returns a flat list of the names of services created <END_TASK> <USER_TASK:> Description: def get_instances(self): """ Returns a flat list of the names of services created in this space. """
services = [] for resource in self._get_instances(): services.append(resource['entity']['name']) return services
<SYSTEM_TASK:> Tests whether a service instance exists for the given <END_TASK> <USER_TASK:> Description: def has_service_of_type(self, service_type): """ Tests whether a service instance exists for the given service. """
summary = self.get_space_summary() for instance in summary['services']: if 'service_plan' in instance: if service_type == instance['service_plan']['service']['label']: return True return False
<SYSTEM_TASK:> Remove all services and apps from the space. <END_TASK> <USER_TASK:> Description: def purge(self): """ Remove all services and apps from the space. Will leave the space itself, call delete_space() if you want to remove that too. Similar to `cf delete-space -f <space-name>`. """
logging.warning("Purging all services from space %s" % (self.name)) service = predix.admin.cf.services.Service() for service_name in self.get_instances(): service.purge(service_name) apps = predix.admin.cf.apps.App() for app_name in self.get_apps(): apps.delete_app(app_name)
<SYSTEM_TASK:> Create an instance of the PostgreSQL service with the typical starting <END_TASK> <USER_TASK:> Description: def create(self, max_wait=300, allocated_storage=None, encryption_at_rest=None, restore_to_time=None, **kwargs): """ Create an instance of the PostgreSQL service with the typical starting settings. :param max_wait: service is created asynchronously, so will only wait this number of seconds before giving up. :param allocated_storage: int for GBs to be allocated for storage :param encryption_at_rest: boolean for encrypting data that is stored :param restore_to_time: UTC date within recovery period for db backup to be used when initiating """
# MAINT: Add these if there is demand for it and validated if allocated_storage or encryption_at_rest or restore_to_time: raise NotImplementedError() # Will need to wait for the service to be provisioned before can add # service keys and get env details. self.service.create(async=True, create_keys=False) while self._create_in_progress() and max_wait > 0: if max_wait % 5 == 0: logging.warning('Can take {}s for create to finish.'.format(max_wait)) time.sleep(1) max_wait -= 1 # Now get the service env (via service keys) cfg = self.service._get_service_config() self.service.settings.save(cfg) hostname = predix.config.get_env_key(self.use_class, 'hostname') os.environ[hostname] = self.service.settings.data['hostname'] password = predix.config.get_env_key(self.use_class, 'password') os.environ[password] = self.service.settings.data['password'] port = predix.config.get_env_key(self.use_class, 'port') os.environ[port] = str(self.service.settings.data['port']) username = predix.config.get_env_key(self.use_class, 'username') os.environ[username] = self.service.settings.data['username'] uri = predix.config.get_env_key(self.use_class, 'uri') os.environ[uri] = self.service.settings.data['uri']
<SYSTEM_TASK:> Create an instance of the Analytics Framework Service with the <END_TASK> <USER_TASK:> Description: def create(self, asset, timeseries, client_id, client_secret, ui_client_id=None, ui_client_secret=None): """ Create an instance of the Analytics Framework Service with the typical starting settings. If not provided, will reuse the runtime client for the ui as well. """
assert isinstance(asset, predix.admin.asset.Asset), \ "Require an existing predix.admin.asset.Asset instance" assert isinstance(timeseries, predix.admin.timeseries.TimeSeries), \ "Require an existing predix.admin.timeseries.TimeSeries instance" parameters = { 'predixAssetZoneId': asset.get_zone_id(), 'predixTimeseriesZoneId': timeseries.get_query_zone_id(), 'runtimeClientId': client_id, 'runtimeClientSecret': client_secret, 'uiClientId': ui_client_id or client_id, 'uiClientSecret': ui_client_secret or client_secret, 'uiDomainPrefix': self.service.name, } self.service.create(parameters=parameters)
<SYSTEM_TASK:> Add to the manifest to make sure it is bound to the <END_TASK> <USER_TASK:> Description: def add_to_manifest(self, manifest): """ Add to the manifest to make sure it is bound to the application. """
manifest.add_service(self.service.name) manifest.write_manifest()
<SYSTEM_TASK:> Returns the Predix Zone Id for the service that is a required <END_TASK> <USER_TASK:> Description: def _get_zone_id(self): """ Returns the Predix Zone Id for the service that is a required header in service calls. """
if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) predix_asset = services['predix-asset'][0]['credentials'] return predix_asset['zone']['http-header-value'] else: return predix.config.get_env_value(self, 'zone_id')
<SYSTEM_TASK:> Returns a flat list of the names of collections in the asset <END_TASK> <USER_TASK:> Description: def get_collections(self): """ Returns a flat list of the names of collections in the asset service. .. ['wind-turbines', 'jet-engines'] """
collections = [] for result in self._get_collections(): collections.append(result['collection']) return collections
<SYSTEM_TASK:> Returns a specific collection from the asset service with <END_TASK> <USER_TASK:> Description: def get_collection(self, collection, filter=None, fields=None, page_size=None): """ Returns a specific collection from the asset service with the given collection endpoint. Supports passing through parameters such as... - filters such as "name=Vesuvius" following GEL spec - fields such as "uri,description" comma delimited - page_size such as "100" (the default) """
params = {} if filter: params['filter'] = filter if fields: params['fields'] = fields if page_size: params['pageSize'] = page_size uri = self.uri + '/v1' + collection return self.service._get(uri, params=params)
<SYSTEM_TASK:> Returns a new guid for use in posting a new asset to a collection. <END_TASK> <USER_TASK:> Description: def create_guid(self, collection=None): """ Returns a new guid for use in posting a new asset to a collection. """
guid = str(uuid.uuid4()) if collection: return str.join('/', [collection, guid]) else: return guid
<SYSTEM_TASK:> Creates a new collection. This is mostly just transport layer <END_TASK> <USER_TASK:> Description: def post_collection(self, collection, body): """ Creates a new collection. This is mostly just transport layer and passes collection and body along. It presumes the body already has generated. The collection is *not* expected to have the id. """
assert isinstance(body, (list)), "POST requires body to be a list" assert collection.startswith('/'), "Collections must start with /" uri = self.uri + '/v1' + collection return self.service._post(uri, body)
<SYSTEM_TASK:> Updates an existing collection. <END_TASK> <USER_TASK:> Description: def put_collection(self, collection, body): """ Updates an existing collection. The collection being updated *is* expected to include the id. """
uri = self.uri + '/v1' + collection return self.service._put(uri, body)
<SYSTEM_TASK:> Deletes an existing collection. <END_TASK> <USER_TASK:> Description: def delete_collection(self, collection): """ Deletes an existing collection. The collection being updated *is* expected to include the id. """
uri = str.join('/', [self.uri, collection]) return self.service._delete(uri)
<SYSTEM_TASK:> Will make specific updates to a record based on JSON Patch <END_TASK> <USER_TASK:> Description: def patch_collection(self, collection, changes): """ Will make specific updates to a record based on JSON Patch documentation. https://tools.ietf.org/html/rfc6902 the format of changes is something like:: [{ 'op': 'add', 'path': '/newfield', 'value': 'just added' }] """
uri = str.join('/', [self.uri, collection]) return self.service._patch(uri, changes)
<SYSTEM_TASK:> Save an asset collection to the service. <END_TASK> <USER_TASK:> Description: def save(self, collection): """ Save an asset collection to the service. """
assert isinstance(collection, predix.data.asset.AssetCollection), "Expected AssetCollection" collection.validate() self.put_collection(collection.uri, collection.__dict__)
<SYSTEM_TASK:> Populate a manifest file generated from details from the <END_TASK> <USER_TASK:> Description: def create_manifest_from_space(self): """ Populate a manifest file generated from details from the cloud foundry space environment. """
space = predix.admin.cf.spaces.Space() summary = space.get_space_summary() for instance in summary['services']: service_type = instance['service_plan']['service']['label'] name = instance['name'] if service_type in self.supported: service = self.supported[service_type](name=name) service.add_to_manifest(self) elif service_type == 'us-weather-forecast': weather = predix.admin.weather.WeatherForecast(name=name) weather.add_to_manifest(self) else: logging.warning("Unsupported service type: %s" % service_type)
<SYSTEM_TASK:> Lock the manifest to the current organization and space regardless of <END_TASK> <USER_TASK:> Description: def lock_to_org_space(self): """ Lock the manifest to the current organization and space regardless of Cloud Foundry target. """
self.add_env_var('PREDIX_ORGANIZATION_GUID', self.space.org.guid) self.add_env_var('PREDIX_ORGANIZATION_NAME', self.space.org.name) self.add_env_var('PREDIX_SPACE_GUID', self.space.guid) self.add_env_var('PREDIX_SPACE_NAME', self.space.name) self.write_manifest()
<SYSTEM_TASK:> Creates an instance of UAA Service. <END_TASK> <USER_TASK:> Description: def create_uaa(self, admin_secret, **kwargs): """ Creates an instance of UAA Service. :param admin_secret: The secret password for administering the service such as adding clients and users. """
uaa = predix.admin.uaa.UserAccountAuthentication(**kwargs) if not uaa.exists(): uaa.create(admin_secret, **kwargs) uaa.add_to_manifest(self) return uaa
<SYSTEM_TASK:> Create a client and add it to the manifest. <END_TASK> <USER_TASK:> Description: def create_client(self, client_id=None, client_secret=None, uaa=None): """ Create a client and add it to the manifest. :param client_id: The client id used to authenticate as a client in UAA. :param client_secret: The secret password used by a client to authenticate and generate a UAA token. :param uaa: The UAA to create client with """
if not uaa: uaa = predix.admin.uaa.UserAccountAuthentication() # Client id and secret can be generated if not provided as arguments if not client_id: client_id = uaa._create_id() if not client_secret: client_secret = uaa._create_secret() uaa.create_client(client_id, client_secret) uaa.add_client_to_manifest(client_id, client_secret, self)
<SYSTEM_TASK:> Creates an instance of the Time Series Service. <END_TASK> <USER_TASK:> Description: def create_timeseries(self, **kwargs): """ Creates an instance of the Time Series Service. """
ts = predix.admin.timeseries.TimeSeries(**kwargs) ts.create() client_id = self.get_client_id() if client_id: ts.grant_client(client_id) ts.add_to_manifest(self) return ts
<SYSTEM_TASK:> Creates an instance of the BlobStore Service. <END_TASK> <USER_TASK:> Description: def create_blobstore(self, **kwargs): """ Creates an instance of the BlobStore Service. """
blobstore = predix.admin.blobstore.BlobStore(**kwargs) blobstore.create() blobstore.add_to_manifest(self) return blobstore
<SYSTEM_TASK:> Creates an instance of the Logging Service. <END_TASK> <USER_TASK:> Description: def create_logstash(self, **kwargs): """ Creates an instance of the Logging Service. """
logstash = predix.admin.logstash.Logging(**kwargs) logstash.create() logstash.add_to_manifest(self) logging.info('Install Kibana-Me-Logs application by following GitHub instructions') logging.info('git clone https://github.com/cloudfoundry-community/kibana-me-logs.git') return logstash
<SYSTEM_TASK:> Creates an instance of the Cache Service. <END_TASK> <USER_TASK:> Description: def create_cache(self, **kwargs): """ Creates an instance of the Cache Service. """
cache = predix.admin.cache.Cache(**kwargs) cache.create(**kwargs) cache.add_to_manifest(self) return cache
<SYSTEM_TASK:> Returns a list of service names. Can return all services, just <END_TASK> <USER_TASK:> Description: def get_service_marketplace(self, available=True, unavailable=False, deprecated=False): """ Returns a list of service names. Can return all services, just those supported by PredixPy, or just those not yet supported by PredixPy. :param available: Return the services that are available in PredixPy. (Defaults to True) :param unavailable: Return the services that are not yet supported by PredixPy. (Defaults to False) :param deprecated: Return the services that are supported by PredixPy but no longer available. (True) """
supported = set(self.supported.keys()) all_services = set(self.space.get_services()) results = set() if available: results.update(supported) if unavailable: results.update(all_services.difference(supported)) if deprecated: results.update(supported.difference(all_services)) return list(results)
<SYSTEM_TASK:> If we are in an app context we can authenticate immediately. <END_TASK> <USER_TASK:> Description: def _auto_authenticate(self): """ If we are in an app context we can authenticate immediately. """
client_id = predix.config.get_env_value(predix.app.Manifest, 'client_id') client_secret = predix.config.get_env_value(predix.app.Manifest, 'client_secret') if client_id and client_secret: logging.info("Automatically authenticated as %s" % (client_id)) self.uaa.authenticate(client_id, client_secret)
<SYSTEM_TASK:> Simple GET request for a given path. <END_TASK> <USER_TASK:> Description: def _get(self, uri, params=None, headers=None): """ Simple GET request for a given path. """
if not headers: headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("HEADERS=" + str(headers)) response = self.session.get(uri, headers=headers, params=params) logging.debug("STATUS=" + str(response.status_code)) if response.status_code == 200: return response.json() else: logging.error(b"ERROR=" + response.content) response.raise_for_status()
<SYSTEM_TASK:> Simple POST request for a given path. <END_TASK> <USER_TASK:> Description: def _post(self, uri, data): """ Simple POST request for a given path. """
headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("BODY=" + json.dumps(data)) response = self.session.post(uri, headers=headers, data=json.dumps(data)) if response.status_code in [200, 204]: try: return response.json() except ValueError: return "{}" else: logging.error(response.content) response.raise_for_status()
<SYSTEM_TASK:> Simple PUT operation for a given path. <END_TASK> <USER_TASK:> Description: def _put(self, uri, data): """ Simple PUT operation for a given path. """
headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("BODY=" + json.dumps(data)) response = self.session.put(uri, headers=headers, data=json.dumps(data)) if response.status_code in [201, 204]: return data else: logging.error(response.content) response.raise_for_status()
<SYSTEM_TASK:> Simple DELETE operation for a given path. <END_TASK> <USER_TASK:> Description: def _delete(self, uri): """ Simple DELETE operation for a given path. """
headers = self._get_headers() response = self.session.delete(uri, headers=headers) # Will return a 204 on successful delete if response.status_code == 204: return response else: logging.error(response.content) response.raise_for_status()
<SYSTEM_TASK:> Simple PATCH operation for a given path. <END_TASK> <USER_TASK:> Description: def _patch(self, uri, data): """ Simple PATCH operation for a given path. The body is expected to list operations to perform to update the data. Operations include: - add - remove - replace - move - copy - test [ { "op": "test", "path": "/a/b/c", "value": "foo" }, ] """
headers = self._get_headers() response = self.session.patch(uri, headers=headers, data=json.dumps(data)) # Will return a 204 on successful patch if response.status_code == 204: return response else: logging.error(response.content) response.raise_for_status()
<SYSTEM_TASK:> Returns the full path that uniquely identifies <END_TASK> <USER_TASK:> Description: def _get_resource_uri(self, guid=None): """ Returns the full path that uniquely identifies the resource endpoint. """
uri = self.uri + '/v1/resource' if guid: uri += '/' + urllib.quote_plus(guid) return uri
<SYSTEM_TASK:> Create new resources and associated attributes. <END_TASK> <USER_TASK:> Description: def _post_resource(self, body): """ Create new resources and associated attributes. Example: acs.post_resource([ { "resourceIdentifier": "masaya", "parents": [], "attributes": [ { "issuer": "default", "name": "country", "value": "Nicaragua" } ], } ]) The issuer is effectively a namespace, and in policy evaluations you identify an attribute by a specific namespace. Many examples provide a URL but it could be any arbitrary string. The body is a list, so many resources can be added at the same time. """
assert isinstance(body, (list)), "POST for requires body to be a list" uri = self._get_resource_uri() return self.service._post(uri, body)
<SYSTEM_TASK:> Remove a specific resource by its identifier. <END_TASK> <USER_TASK:> Description: def delete_resource(self, resource_id): """ Remove a specific resource by its identifier. """
# resource_id could be a path such as '/asset/123' so quote uri = self._get_resource_uri(guid=resource_id) return self.service._delete(uri)
<SYSTEM_TASK:> Update a resource for the given resource id. The body is not <END_TASK> <USER_TASK:> Description: def _put_resource(self, resource_id, body): """ Update a resource for the given resource id. The body is not a list but a dictionary of a single resource. """
assert isinstance(body, (dict)), "PUT requires body to be a dict." # resource_id could be a path such as '/asset/123' so quote uri = self._get_resource_uri(guid=resource_id) return self.service._put(uri, body)
<SYSTEM_TASK:> Will add the given resource with a given identifier and attribute <END_TASK> <USER_TASK:> Description: def add_resource(self, resource_id, attributes, parents=[], issuer='default'): """ Will add the given resource with a given identifier and attribute dictionary. example/ add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'}) """
# MAINT: consider test to avoid adding duplicate resource id assert isinstance(attributes, (dict)), "attributes expected to be dict" attrs = [] for key in attributes.keys(): attrs.append({ 'issuer': issuer, 'name': key, 'value': attributes[key] }) body = { "resourceIdentifier": resource_id, "parents": parents, "attributes": attrs, } return self._put_resource(resource_id, body)
<SYSTEM_TASK:> Returns a specific subject by subject id. <END_TASK> <USER_TASK:> Description: def get_subject(self, subject_id): """ Returns a specific subject by subject id. """
# subject_id could be a path such as '/user/j12y' so quote uri = self._get_subject_uri(guid=subject_id) return self.service._get(uri)
<SYSTEM_TASK:> Create new subjects and associated attributes. <END_TASK> <USER_TASK:> Description: def _post_subject(self, body): """ Create new subjects and associated attributes. Example: acs.post_subject([ { "subjectIdentifier": "/role/evangelist", "parents": [], "attributes": [ { "issuer": "default", "name": "role", "value": "developer evangelist", } ] } ]) The issuer is effectively a namespace, and in policy evaluations you identify an attribute by a specific namespace. Many examples provide a URL but it could be any arbitrary string. The body is a list, so many subjects can be added at the same time. """
assert isinstance(body, (list)), "POST requires body to be a list" uri = self._get_subject_uri() return self.service._post(uri, body)
<SYSTEM_TASK:> Remove a specific subject by its identifier. <END_TASK> <USER_TASK:> Description: def delete_subject(self, subject_id): """ Remove a specific subject by its identifier. """
# subject_id could be a path such as '/role/analyst' so quote uri = self._get_subject_uri(guid=subject_id) return self.service._delete(uri)
<SYSTEM_TASK:> Update a subject for the given subject id. The body is not <END_TASK> <USER_TASK:> Description: def _put_subject(self, subject_id, body): """ Update a subject for the given subject id. The body is not a list but a dictionary of a single resource. """
assert isinstance(body, (dict)), "PUT requires body to be dict." # subject_id could be a path such as '/asset/123' so quote uri = self._get_subject_uri(guid=subject_id) return self.service._put(uri, body)
<SYSTEM_TASK:> Will add the given subject with a given identifier and attribute <END_TASK> <USER_TASK:> Description: def add_subject(self, subject_id, attributes, parents=[], issuer='default'): """ Will add the given subject with a given identifier and attribute dictionary. example/ add_subject('/user/j12y', {'username': 'j12y'}) """
# MAINT: consider test to avoid adding duplicate subject id assert isinstance(attributes, (dict)), "attributes expected to be dict" attrs = [] for key in attributes.keys(): attrs.append({ 'issuer': issuer, 'name': key, 'value': attributes[key] }) body = { "subjectIdentifier": subject_id, "parents": parents, "attributes": attrs, } return self._put_subject(subject_id, body)
<SYSTEM_TASK:> Tests whether or not the ACS service being monitored is alive. <END_TASK> <USER_TASK:> Description: def _get_monitoring_heartbeat(self): """ Tests whether or not the ACS service being monitored is alive. """
target = self.uri + '/monitoring/heartbeat' response = self.session.get(target) return response