id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
6,100
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.create_resource
def create_resource(self, parent_id=""): """Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway """ resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') if not self.resource_id: created_resource = self.client.create_resource( restApiId=self.api_id, parentId=parent_id, pathPart=resource_name) self.resource_id = created_resource['id'] self.log.info("Successfully created resource") else: self.log.info("Resource already exists. To update resource please delete existing resource: %s", resource_name)
python
def create_resource(self, parent_id=""): """Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway """ resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') if not self.resource_id: created_resource = self.client.create_resource( restApiId=self.api_id, parentId=parent_id, pathPart=resource_name) self.resource_id = created_resource['id'] self.log.info("Successfully created resource") else: self.log.info("Resource already exists. To update resource please delete existing resource: %s", resource_name)
[ "def", "create_resource", "(", "self", ",", "parent_id", "=", "\"\"", ")", ":", "resource_name", "=", "self", ".", "trigger_settings", ".", "get", "(", "'resource'", ",", "''", ")", "resource_name", "=", "resource_name", ".", "replace", "(", "'/'", ",", "''", ")", "if", "not", "self", ".", "resource_id", ":", "created_resource", "=", "self", ".", "client", ".", "create_resource", "(", "restApiId", "=", "self", ".", "api_id", ",", "parentId", "=", "parent_id", ",", "pathPart", "=", "resource_name", ")", "self", ".", "resource_id", "=", "created_resource", "[", "'id'", "]", "self", ".", "log", ".", "info", "(", "\"Successfully created resource\"", ")", "else", ":", "self", ".", "log", ".", "info", "(", "\"Resource already exists. To update resource please delete existing resource: %s\"", ",", "resource_name", ")" ]
Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway
[ "Create", "the", "specified", "resource", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L234-L249
6,101
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.attach_method
def attach_method(self, resource_id): """Attach the defined method.""" try: _response = self.client.put_method( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], authorizationType="NONE", apiKeyRequired=False, ) self.log.debug('Response for resource (%s) push authorization: %s', resource_id, _response) _response = self.client.put_method_response( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], statusCode='200') self.log.debug('Response for resource (%s) no authorization: %s', resource_id, _response) self.log.info("Successfully attached method: %s", self.trigger_settings['method']) except botocore.exceptions.ClientError: self.log.info("Method %s already exists", self.trigger_settings['method'])
python
def attach_method(self, resource_id): """Attach the defined method.""" try: _response = self.client.put_method( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], authorizationType="NONE", apiKeyRequired=False, ) self.log.debug('Response for resource (%s) push authorization: %s', resource_id, _response) _response = self.client.put_method_response( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], statusCode='200') self.log.debug('Response for resource (%s) no authorization: %s', resource_id, _response) self.log.info("Successfully attached method: %s", self.trigger_settings['method']) except botocore.exceptions.ClientError: self.log.info("Method %s already exists", self.trigger_settings['method'])
[ "def", "attach_method", "(", "self", ",", "resource_id", ")", ":", "try", ":", "_response", "=", "self", ".", "client", ".", "put_method", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "authorizationType", "=", "\"NONE\"", ",", "apiKeyRequired", "=", "False", ",", ")", "self", ".", "log", ".", "debug", "(", "'Response for resource (%s) push authorization: %s'", ",", "resource_id", ",", "_response", ")", "_response", "=", "self", ".", "client", ".", "put_method_response", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "statusCode", "=", "'200'", ")", "self", ".", "log", ".", "debug", "(", "'Response for resource (%s) no authorization: %s'", ",", "resource_id", ",", "_response", ")", "self", ".", "log", ".", "info", "(", "\"Successfully attached method: %s\"", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", ":", "self", ".", "log", ".", "info", "(", "\"Method %s already exists\"", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ")" ]
Attach the defined method.
[ "Attach", "the", "defined", "method", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L251-L270
6,102
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.setup_lambda_api
def setup_lambda_api(self): """A wrapper for all the steps needed to setup the integration.""" self.create_resource(self.parent_id) self.attach_method(self.resource_id) self.add_lambda_integration() self.add_permission() self.create_api_deployment() self.create_api_key() self.update_api_mappings()
python
def setup_lambda_api(self): """A wrapper for all the steps needed to setup the integration.""" self.create_resource(self.parent_id) self.attach_method(self.resource_id) self.add_lambda_integration() self.add_permission() self.create_api_deployment() self.create_api_key() self.update_api_mappings()
[ "def", "setup_lambda_api", "(", "self", ")", ":", "self", ".", "create_resource", "(", "self", ".", "parent_id", ")", "self", ".", "attach_method", "(", "self", ".", "resource_id", ")", "self", ".", "add_lambda_integration", "(", ")", "self", ".", "add_permission", "(", ")", "self", ".", "create_api_deployment", "(", ")", "self", ".", "create_api_key", "(", ")", "self", ".", "update_api_mappings", "(", ")" ]
A wrapper for all the steps needed to setup the integration.
[ "A", "wrapper", "for", "all", "the", "steps", "needed", "to", "setup", "the", "integration", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L272-L280
6,103
foremast/foremast
src/foremast/dns/__main__.py
main
def main(): """Run newer stuffs.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) parser.add_argument("--elb-subnet", help="Subnetnet type, e.g. external, internal", required=True) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) spinnakerapps = SpinnakerDns( app=args.app, env=args.env, region=args.region, prop_path=args.properties, elb_subnet=args.elb_subnet) spinnakerapps.create_elb_dns()
python
def main(): """Run newer stuffs.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) parser.add_argument("--elb-subnet", help="Subnetnet type, e.g. external, internal", required=True) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) spinnakerapps = SpinnakerDns( app=args.app, env=args.env, region=args.region, prop_path=args.properties, elb_subnet=args.elb_subnet) spinnakerapps.create_elb_dns()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "add_properties", "(", "parser", ")", "parser", ".", "add_argument", "(", "\"--elb-subnet\"", ",", "help", "=", "\"Subnetnet type, e.g. external, internal\"", ",", "required", "=", "True", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "log", ".", "debug", "(", "'Parsed arguments: %s'", ",", "args", ")", "spinnakerapps", "=", "SpinnakerDns", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ",", "elb_subnet", "=", "args", ".", "elb_subnet", ")", "spinnakerapps", ".", "create_elb_dns", "(", ")" ]
Run newer stuffs.
[ "Run", "newer", "stuffs", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/__main__.py#L28-L48
6,104
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._validate_cidr
def _validate_cidr(self, rule): """Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide. """ try: network = ipaddress.IPv4Network(rule['app']) except (ipaddress.NetmaskValueError, ValueError) as error: raise SpinnakerSecurityGroupCreationFailed(error) self.log.debug('Validating CIDR: %s', network.exploded) return True
python
def _validate_cidr(self, rule): """Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide. """ try: network = ipaddress.IPv4Network(rule['app']) except (ipaddress.NetmaskValueError, ValueError) as error: raise SpinnakerSecurityGroupCreationFailed(error) self.log.debug('Validating CIDR: %s', network.exploded) return True
[ "def", "_validate_cidr", "(", "self", ",", "rule", ")", ":", "try", ":", "network", "=", "ipaddress", ".", "IPv4Network", "(", "rule", "[", "'app'", "]", ")", "except", "(", "ipaddress", ".", "NetmaskValueError", ",", "ValueError", ")", "as", "error", ":", "raise", "SpinnakerSecurityGroupCreationFailed", "(", "error", ")", "self", ".", "log", ".", "debug", "(", "'Validating CIDR: %s'", ",", "network", ".", "exploded", ")", "return", "True" ]
Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide.
[ "Validate", "the", "cidr", "block", "in", "a", "rule", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L77-L94
6,105
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._process_rules
def _process_rules(self, rules): """Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules. """ cidr = [] non_cidr = [] for rule in rules: if '.' in rule['app']: self.log.debug('Custom CIDR rule: %s', rule) self._validate_cidr(rule) cidr.append(rule) else: self.log.debug('SG reference rule: %s', rule) non_cidr.append(rule) self.log.debug('Custom CIDR rules: %s', cidr) self.log.debug('SG reference rules: %s', non_cidr) return non_cidr, cidr
python
def _process_rules(self, rules): """Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules. """ cidr = [] non_cidr = [] for rule in rules: if '.' in rule['app']: self.log.debug('Custom CIDR rule: %s', rule) self._validate_cidr(rule) cidr.append(rule) else: self.log.debug('SG reference rule: %s', rule) non_cidr.append(rule) self.log.debug('Custom CIDR rules: %s', cidr) self.log.debug('SG reference rules: %s', non_cidr) return non_cidr, cidr
[ "def", "_process_rules", "(", "self", ",", "rules", ")", ":", "cidr", "=", "[", "]", "non_cidr", "=", "[", "]", "for", "rule", "in", "rules", ":", "if", "'.'", "in", "rule", "[", "'app'", "]", ":", "self", ".", "log", ".", "debug", "(", "'Custom CIDR rule: %s'", ",", "rule", ")", "self", ".", "_validate_cidr", "(", "rule", ")", "cidr", ".", "append", "(", "rule", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "'SG reference rule: %s'", ",", "rule", ")", "non_cidr", ".", "append", "(", "rule", ")", "self", ".", "log", ".", "debug", "(", "'Custom CIDR rules: %s'", ",", "cidr", ")", "self", ".", "log", ".", "debug", "(", "'SG reference rules: %s'", ",", "non_cidr", ")", "return", "non_cidr", ",", "cidr" ]
Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules.
[ "Process", "rules", "into", "cidr", "and", "non", "-", "cidr", "lists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L96-L119
6,106
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.add_tags
def add_tags(self): """Add tags to security group. Returns: True: Upon successful completion. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) resource = session.resource('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) security_group = resource.SecurityGroup(group_id) try: tag = security_group.create_tags( DryRun=False, Tags=[{ 'Key': 'app_group', 'Value': self.group }, { 'Key': 'app_name', 'Value': self.app_name }]) self.log.debug('Security group has been tagged: %s', tag) except botocore.exceptions.ClientError as error: self.log.warning(error) return True
python
def add_tags(self): """Add tags to security group. Returns: True: Upon successful completion. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) resource = session.resource('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) security_group = resource.SecurityGroup(group_id) try: tag = security_group.create_tags( DryRun=False, Tags=[{ 'Key': 'app_group', 'Value': self.group }, { 'Key': 'app_name', 'Value': self.app_name }]) self.log.debug('Security group has been tagged: %s', tag) except botocore.exceptions.ClientError as error: self.log.warning(error) return True
[ "def", "add_tags", "(", "self", ")", ":", "session", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "resource", "=", "session", ".", "resource", "(", "'ec2'", ")", "group_id", "=", "get_security_group_id", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "security_group", "=", "resource", ".", "SecurityGroup", "(", "group_id", ")", "try", ":", "tag", "=", "security_group", ".", "create_tags", "(", "DryRun", "=", "False", ",", "Tags", "=", "[", "{", "'Key'", ":", "'app_group'", ",", "'Value'", ":", "self", ".", "group", "}", ",", "{", "'Key'", ":", "'app_name'", ",", "'Value'", ":", "self", ".", "app_name", "}", "]", ")", "self", ".", "log", ".", "debug", "(", "'Security group has been tagged: %s'", ",", "tag", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "self", ".", "log", ".", "warning", "(", "error", ")", "return", "True" ]
Add tags to security group. Returns: True: Upon successful completion.
[ "Add", "tags", "to", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L121-L146
6,107
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.add_cidr_rules
def add_cidr_rules(self, rules): """Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) client = session.client('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) for rule in rules: data = { 'DryRun': False, 'GroupId': group_id, 'IpPermissions': [{ 'IpProtocol': rule['protocol'], 'FromPort': rule['start_port'], 'ToPort': rule['end_port'], 'IpRanges': [{ 'CidrIp': rule['app'] }] }] } self.log.debug('Security Group rule: %s', data) try: client.authorize_security_group_ingress(**data) except botocore.exceptions.ClientError as error: if 'InvalidPermission.Duplicate' in str(error): self.log.debug('Duplicate rule exist, that is OK.') else: msg = 'Unable to add cidr rules to {}'.format(rule.get('app')) self.log.error(msg) raise SpinnakerSecurityGroupError(msg) return True
python
def add_cidr_rules(self, rules): """Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) client = session.client('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) for rule in rules: data = { 'DryRun': False, 'GroupId': group_id, 'IpPermissions': [{ 'IpProtocol': rule['protocol'], 'FromPort': rule['start_port'], 'ToPort': rule['end_port'], 'IpRanges': [{ 'CidrIp': rule['app'] }] }] } self.log.debug('Security Group rule: %s', data) try: client.authorize_security_group_ingress(**data) except botocore.exceptions.ClientError as error: if 'InvalidPermission.Duplicate' in str(error): self.log.debug('Duplicate rule exist, that is OK.') else: msg = 'Unable to add cidr rules to {}'.format(rule.get('app')) self.log.error(msg) raise SpinnakerSecurityGroupError(msg) return True
[ "def", "add_cidr_rules", "(", "self", ",", "rules", ")", ":", "session", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "client", "=", "session", ".", "client", "(", "'ec2'", ")", "group_id", "=", "get_security_group_id", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "for", "rule", "in", "rules", ":", "data", "=", "{", "'DryRun'", ":", "False", ",", "'GroupId'", ":", "group_id", ",", "'IpPermissions'", ":", "[", "{", "'IpProtocol'", ":", "rule", "[", "'protocol'", "]", ",", "'FromPort'", ":", "rule", "[", "'start_port'", "]", ",", "'ToPort'", ":", "rule", "[", "'end_port'", "]", ",", "'IpRanges'", ":", "[", "{", "'CidrIp'", ":", "rule", "[", "'app'", "]", "}", "]", "}", "]", "}", "self", ".", "log", ".", "debug", "(", "'Security Group rule: %s'", ",", "data", ")", "try", ":", "client", ".", "authorize_security_group_ingress", "(", "*", "*", "data", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "if", "'InvalidPermission.Duplicate'", "in", "str", "(", "error", ")", ":", "self", ".", "log", ".", "debug", "(", "'Duplicate rule exist, that is OK.'", ")", "else", ":", "msg", "=", "'Unable to add cidr rules to {}'", ".", "format", "(", "rule", ".", "get", "(", "'app'", ")", ")", "self", ".", "log", ".", "error", "(", "msg", ")", "raise", "SpinnakerSecurityGroupError", "(", "msg", ")", "return", "True" ]
Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group.
[ "Add", "cidr", "rules", "to", "security", "group", "via", "boto", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L148-L193
6,108
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.update_default_rules
def update_default_rules(self): """Concatinate application and global security group rules.""" app_ingress = self.properties['security_group']['ingress'] ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress) resolved_ingress = self.resolve_self_references(ingress) self.log.info('Updated default rules:\n%s', ingress) return resolved_ingress
python
def update_default_rules(self): """Concatinate application and global security group rules.""" app_ingress = self.properties['security_group']['ingress'] ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress) resolved_ingress = self.resolve_self_references(ingress) self.log.info('Updated default rules:\n%s', ingress) return resolved_ingress
[ "def", "update_default_rules", "(", "self", ")", ":", "app_ingress", "=", "self", ".", "properties", "[", "'security_group'", "]", "[", "'ingress'", "]", "ingress", "=", "conservative_merger", ".", "merge", "(", "DEFAULT_SECURITYGROUP_RULES", ",", "app_ingress", ")", "resolved_ingress", "=", "self", ".", "resolve_self_references", "(", "ingress", ")", "self", ".", "log", ".", "info", "(", "'Updated default rules:\\n%s'", ",", "ingress", ")", "return", "resolved_ingress" ]
Concatinate application and global security group rules.
[ "Concatinate", "application", "and", "global", "security", "group", "rules", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L202-L208
6,109
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._create_security_group
def _create_security_group(self, ingress): """Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully """ template_kwargs = { 'app': self.app_name, 'env': self.env, 'region': self.region, 'vpc': get_vpc_id(self.env, self.region), 'description': self.properties['security_group']['description'], 'ingress': ingress, } secgroup_json = get_template( template_file='infrastructure/securitygroup_data.json.j2', formats=self.generated, **template_kwargs) wait_for_task(secgroup_json) return True
python
def _create_security_group(self, ingress): """Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully """ template_kwargs = { 'app': self.app_name, 'env': self.env, 'region': self.region, 'vpc': get_vpc_id(self.env, self.region), 'description': self.properties['security_group']['description'], 'ingress': ingress, } secgroup_json = get_template( template_file='infrastructure/securitygroup_data.json.j2', formats=self.generated, **template_kwargs) wait_for_task(secgroup_json) return True
[ "def", "_create_security_group", "(", "self", ",", "ingress", ")", ":", "template_kwargs", "=", "{", "'app'", ":", "self", ".", "app_name", ",", "'env'", ":", "self", ".", "env", ",", "'region'", ":", "self", ".", "region", ",", "'vpc'", ":", "get_vpc_id", "(", "self", ".", "env", ",", "self", ".", "region", ")", ",", "'description'", ":", "self", ".", "properties", "[", "'security_group'", "]", "[", "'description'", "]", ",", "'ingress'", ":", "ingress", ",", "}", "secgroup_json", "=", "get_template", "(", "template_file", "=", "'infrastructure/securitygroup_data.json.j2'", ",", "formats", "=", "self", ".", "generated", ",", "*", "*", "template_kwargs", ")", "wait_for_task", "(", "secgroup_json", ")", "return", "True" ]
Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully
[ "Send", "a", "POST", "to", "spinnaker", "to", "create", "a", "new", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L210-L230
6,110
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.create_security_group
def create_security_group(self): # noqa """Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition. """ ingress_rules = [] try: security_id = get_security_group_id(name=self.app_name, env=self.env, region=self.region) except (SpinnakerSecurityGroupError, AssertionError): self._create_security_group(ingress_rules) else: self.log.debug('Security Group ID %s found for %s.', security_id, self.app_name) try: ingress = self.update_default_rules() except KeyError: msg = 'Possible missing configuration for "{0}".'.format(self.env) self.log.error(msg) raise ForemastConfigurationFileError(msg) for app in ingress: rules = ingress[app] # Essentially we have two formats: simple, advanced # - simple: is just a list of ports # - advanced: selects ports ranges and protocols for rule in rules: ingress_rule = self.create_ingress_rule(app, rule) ingress_rules.append(ingress_rule) ingress_rules_no_cidr, ingress_rules_cidr = self._process_rules(ingress_rules) self._create_security_group(ingress_rules_no_cidr) # Append cidr rules self.add_cidr_rules(ingress_rules_cidr) # Tag security group self.add_tags() self.log.info('Successfully created %s security group', self.app_name) return True
python
def create_security_group(self): # noqa """Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition. """ ingress_rules = [] try: security_id = get_security_group_id(name=self.app_name, env=self.env, region=self.region) except (SpinnakerSecurityGroupError, AssertionError): self._create_security_group(ingress_rules) else: self.log.debug('Security Group ID %s found for %s.', security_id, self.app_name) try: ingress = self.update_default_rules() except KeyError: msg = 'Possible missing configuration for "{0}".'.format(self.env) self.log.error(msg) raise ForemastConfigurationFileError(msg) for app in ingress: rules = ingress[app] # Essentially we have two formats: simple, advanced # - simple: is just a list of ports # - advanced: selects ports ranges and protocols for rule in rules: ingress_rule = self.create_ingress_rule(app, rule) ingress_rules.append(ingress_rule) ingress_rules_no_cidr, ingress_rules_cidr = self._process_rules(ingress_rules) self._create_security_group(ingress_rules_no_cidr) # Append cidr rules self.add_cidr_rules(ingress_rules_cidr) # Tag security group self.add_tags() self.log.info('Successfully created %s security group', self.app_name) return True
[ "def", "create_security_group", "(", "self", ")", ":", "# noqa", "ingress_rules", "=", "[", "]", "try", ":", "security_id", "=", "get_security_group_id", "(", "name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ")", "except", "(", "SpinnakerSecurityGroupError", ",", "AssertionError", ")", ":", "self", ".", "_create_security_group", "(", "ingress_rules", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "'Security Group ID %s found for %s.'", ",", "security_id", ",", "self", ".", "app_name", ")", "try", ":", "ingress", "=", "self", ".", "update_default_rules", "(", ")", "except", "KeyError", ":", "msg", "=", "'Possible missing configuration for \"{0}\".'", ".", "format", "(", "self", ".", "env", ")", "self", ".", "log", ".", "error", "(", "msg", ")", "raise", "ForemastConfigurationFileError", "(", "msg", ")", "for", "app", "in", "ingress", ":", "rules", "=", "ingress", "[", "app", "]", "# Essentially we have two formats: simple, advanced", "# - simple: is just a list of ports", "# - advanced: selects ports ranges and protocols", "for", "rule", "in", "rules", ":", "ingress_rule", "=", "self", ".", "create_ingress_rule", "(", "app", ",", "rule", ")", "ingress_rules", ".", "append", "(", "ingress_rule", ")", "ingress_rules_no_cidr", ",", "ingress_rules_cidr", "=", "self", ".", "_process_rules", "(", "ingress_rules", ")", "self", ".", "_create_security_group", "(", "ingress_rules_no_cidr", ")", "# Append cidr rules", "self", ".", "add_cidr_rules", "(", "ingress_rules_cidr", ")", "# Tag security group", "self", ".", "add_tags", "(", ")", "self", ".", "log", ".", "info", "(", "'Successfully created %s security group'", ",", "self", ".", "app_name", ")", "return", "True" ]
Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition.
[ "Send", "a", "POST", "to", "spinnaker", "to", "create", "or", "update", "a", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L232-L279
6,111
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.create_ingress_rule
def create_ingress_rule(self, app, rule): """Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id """ if isinstance(rule, dict): # Advanced start_port = rule.get('start_port') end_port = rule.get('end_port') protocol = rule.get('protocol', 'tcp') requested_cross_account = rule.get('env', self.env) if self.env == requested_cross_account: # We are trying to use cross-account security group settings within the same account # We should not allow this. cross_account_env = None cross_account_vpc_id = None else: cross_account_env = requested_cross_account cross_account_vpc_id = get_vpc_id(cross_account_env, self.region) else: start_port = rule end_port = rule protocol = 'tcp' cross_account_env = None cross_account_vpc_id = None created_rule = { 'app': app, 'start_port': start_port, 'end_port': end_port, 'protocol': protocol, 'cross_account_env': cross_account_env, 'cross_account_vpc_id': cross_account_vpc_id } self.log.debug('Normalized ingress rule: %s', created_rule) return created_rule
python
def create_ingress_rule(self, app, rule): """Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id """ if isinstance(rule, dict): # Advanced start_port = rule.get('start_port') end_port = rule.get('end_port') protocol = rule.get('protocol', 'tcp') requested_cross_account = rule.get('env', self.env) if self.env == requested_cross_account: # We are trying to use cross-account security group settings within the same account # We should not allow this. cross_account_env = None cross_account_vpc_id = None else: cross_account_env = requested_cross_account cross_account_vpc_id = get_vpc_id(cross_account_env, self.region) else: start_port = rule end_port = rule protocol = 'tcp' cross_account_env = None cross_account_vpc_id = None created_rule = { 'app': app, 'start_port': start_port, 'end_port': end_port, 'protocol': protocol, 'cross_account_env': cross_account_env, 'cross_account_vpc_id': cross_account_vpc_id } self.log.debug('Normalized ingress rule: %s', created_rule) return created_rule
[ "def", "create_ingress_rule", "(", "self", ",", "app", ",", "rule", ")", ":", "if", "isinstance", "(", "rule", ",", "dict", ")", ":", "# Advanced", "start_port", "=", "rule", ".", "get", "(", "'start_port'", ")", "end_port", "=", "rule", ".", "get", "(", "'end_port'", ")", "protocol", "=", "rule", ".", "get", "(", "'protocol'", ",", "'tcp'", ")", "requested_cross_account", "=", "rule", ".", "get", "(", "'env'", ",", "self", ".", "env", ")", "if", "self", ".", "env", "==", "requested_cross_account", ":", "# We are trying to use cross-account security group settings within the same account", "# We should not allow this.", "cross_account_env", "=", "None", "cross_account_vpc_id", "=", "None", "else", ":", "cross_account_env", "=", "requested_cross_account", "cross_account_vpc_id", "=", "get_vpc_id", "(", "cross_account_env", ",", "self", ".", "region", ")", "else", ":", "start_port", "=", "rule", "end_port", "=", "rule", "protocol", "=", "'tcp'", "cross_account_env", "=", "None", "cross_account_vpc_id", "=", "None", "created_rule", "=", "{", "'app'", ":", "app", ",", "'start_port'", ":", "start_port", ",", "'end_port'", ":", "end_port", ",", "'protocol'", ":", "protocol", ",", "'cross_account_env'", ":", "cross_account_env", ",", "'cross_account_vpc_id'", ":", "cross_account_vpc_id", "}", "self", ".", "log", ".", "debug", "(", "'Normalized ingress rule: %s'", ",", "created_rule", ")", "return", "created_rule" ]
Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id
[ "Create", "a", "normalized", "ingress", "rule", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L281-L324
6,112
foremast/foremast
src/foremast/utils/awslambda.py
get_lambda_arn
def get_lambda_arn(app, account, region): """Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_arn = None paginator = lambda_client.get_paginator('list_functions') for lambda_functions in paginator.paginate(): for lambda_function in lambda_functions['Functions']: if lambda_function['FunctionName'] == app: lambda_arn = lambda_function['FunctionArn'] LOG.debug("Lambda ARN for lambda function %s is %s.", app, lambda_arn) break if lambda_arn: break if not lambda_arn: LOG.fatal('Lambda function with name %s not found in %s %s', app, account, region) raise LambdaFunctionDoesNotExist( 'Lambda function with name {0} not found in {1} {2}'.format(app, account, region)) return lambda_arn
python
def get_lambda_arn(app, account, region): """Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_arn = None paginator = lambda_client.get_paginator('list_functions') for lambda_functions in paginator.paginate(): for lambda_function in lambda_functions['Functions']: if lambda_function['FunctionName'] == app: lambda_arn = lambda_function['FunctionArn'] LOG.debug("Lambda ARN for lambda function %s is %s.", app, lambda_arn) break if lambda_arn: break if not lambda_arn: LOG.fatal('Lambda function with name %s not found in %s %s', app, account, region) raise LambdaFunctionDoesNotExist( 'Lambda function with name {0} not found in {1} {2}'.format(app, account, region)) return lambda_arn
[ "def", "get_lambda_arn", "(", "app", ",", "account", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "lambda_arn", "=", "None", "paginator", "=", "lambda_client", ".", "get_paginator", "(", "'list_functions'", ")", "for", "lambda_functions", "in", "paginator", ".", "paginate", "(", ")", ":", "for", "lambda_function", "in", "lambda_functions", "[", "'Functions'", "]", ":", "if", "lambda_function", "[", "'FunctionName'", "]", "==", "app", ":", "lambda_arn", "=", "lambda_function", "[", "'FunctionArn'", "]", "LOG", ".", "debug", "(", "\"Lambda ARN for lambda function %s is %s.\"", ",", "app", ",", "lambda_arn", ")", "break", "if", "lambda_arn", ":", "break", "if", "not", "lambda_arn", ":", "LOG", ".", "fatal", "(", "'Lambda function with name %s not found in %s %s'", ",", "app", ",", "account", ",", "region", ")", "raise", "LambdaFunctionDoesNotExist", "(", "'Lambda function with name {0} not found in {1} {2}'", ".", "format", "(", "app", ",", "account", ",", "region", ")", ")", "return", "lambda_arn" ]
Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function
[ "Get", "lambda", "ARN", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L28-L60
6,113
foremast/foremast
src/foremast/utils/awslambda.py
get_lambda_alias_arn
def get_lambda_alias_arn(app, account, region): """Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_aliases = lambda_client.list_aliases(FunctionName=app) matched_alias = None for alias in lambda_aliases['Aliases']: if alias['Name'] == account: lambda_alias_arn = alias['AliasArn'] LOG.info('Found ARN for alias %s for function %s', account, app) matched_alias = lambda_alias_arn break else: fatal_message = 'Lambda alias {0} of function {1} not found'.format(account, app) LOG.fatal(fatal_message) raise LambdaAliasDoesNotExist(fatal_message) return matched_alias
python
def get_lambda_alias_arn(app, account, region): """Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_aliases = lambda_client.list_aliases(FunctionName=app) matched_alias = None for alias in lambda_aliases['Aliases']: if alias['Name'] == account: lambda_alias_arn = alias['AliasArn'] LOG.info('Found ARN for alias %s for function %s', account, app) matched_alias = lambda_alias_arn break else: fatal_message = 'Lambda alias {0} of function {1} not found'.format(account, app) LOG.fatal(fatal_message) raise LambdaAliasDoesNotExist(fatal_message) return matched_alias
[ "def", "get_lambda_alias_arn", "(", "app", ",", "account", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "lambda_aliases", "=", "lambda_client", ".", "list_aliases", "(", "FunctionName", "=", "app", ")", "matched_alias", "=", "None", "for", "alias", "in", "lambda_aliases", "[", "'Aliases'", "]", ":", "if", "alias", "[", "'Name'", "]", "==", "account", ":", "lambda_alias_arn", "=", "alias", "[", "'AliasArn'", "]", "LOG", ".", "info", "(", "'Found ARN for alias %s for function %s'", ",", "account", ",", "app", ")", "matched_alias", "=", "lambda_alias_arn", "break", "else", ":", "fatal_message", "=", "'Lambda alias {0} of function {1} not found'", ".", "format", "(", "account", ",", "app", ")", "LOG", ".", "fatal", "(", "fatal_message", ")", "raise", "LambdaAliasDoesNotExist", "(", "fatal_message", ")", "return", "matched_alias" ]
Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias
[ "Get", "lambda", "alias", "ARN", ".", "Assumes", "that", "account", "name", "is", "equal", "to", "alias", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L63-L91
6,114
foremast/foremast
src/foremast/utils/awslambda.py
add_lambda_permissions
def add_lambda_permissions(function='', statement_id='', action='lambda:InvokeFunction', principal='', source_arn='', env='', region='us-east-1'): """Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function """ session = boto3.Session(profile_name=env, region_name=region) lambda_client = session.client('lambda') response_action = None prefixed_sid = FOREMAST_PREFIX + statement_id add_permissions_kwargs = { 'FunctionName': function, 'StatementId': prefixed_sid, 'Action': action, 'Principal': principal, } if source_arn: add_permissions_kwargs['SourceArn'] = source_arn try: lambda_client.add_permission(**add_permissions_kwargs) response_action = 'Add permission with Sid: {}'.format(prefixed_sid) except boto3.exceptions.botocore.exceptions.ClientError as error: LOG.debug('Add permission error: %s', error) response_action = "Did not add permissions" LOG.debug('Related StatementId (SID): %s', prefixed_sid) LOG.info(response_action)
python
def add_lambda_permissions(function='', statement_id='', action='lambda:InvokeFunction', principal='', source_arn='', env='', region='us-east-1'): """Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function """ session = boto3.Session(profile_name=env, region_name=region) lambda_client = session.client('lambda') response_action = None prefixed_sid = FOREMAST_PREFIX + statement_id add_permissions_kwargs = { 'FunctionName': function, 'StatementId': prefixed_sid, 'Action': action, 'Principal': principal, } if source_arn: add_permissions_kwargs['SourceArn'] = source_arn try: lambda_client.add_permission(**add_permissions_kwargs) response_action = 'Add permission with Sid: {}'.format(prefixed_sid) except boto3.exceptions.botocore.exceptions.ClientError as error: LOG.debug('Add permission error: %s', error) response_action = "Did not add permissions" LOG.debug('Related StatementId (SID): %s', prefixed_sid) LOG.info(response_action)
[ "def", "add_lambda_permissions", "(", "function", "=", "''", ",", "statement_id", "=", "''", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "''", ",", "source_arn", "=", "''", ",", "env", "=", "''", ",", "region", "=", "'us-east-1'", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "response_action", "=", "None", "prefixed_sid", "=", "FOREMAST_PREFIX", "+", "statement_id", "add_permissions_kwargs", "=", "{", "'FunctionName'", ":", "function", ",", "'StatementId'", ":", "prefixed_sid", ",", "'Action'", ":", "action", ",", "'Principal'", ":", "principal", ",", "}", "if", "source_arn", ":", "add_permissions_kwargs", "[", "'SourceArn'", "]", "=", "source_arn", "try", ":", "lambda_client", ".", "add_permission", "(", "*", "*", "add_permissions_kwargs", ")", "response_action", "=", "'Add permission with Sid: {}'", ".", "format", "(", "prefixed_sid", ")", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "LOG", ".", "debug", "(", "'Add permission error: %s'", ",", "error", ")", "response_action", "=", "\"Did not add permissions\"", "LOG", ".", "debug", "(", "'Related StatementId (SID): %s'", ",", "prefixed_sid", ")", "LOG", ".", "info", "(", "response_action", ")" ]
Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function
[ "Add", "permission", "to", "Lambda", "for", "the", "event", "trigger", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L94-L135
6,115
foremast/foremast
src/foremast/iam/resource_action.py
resource_action
def resource_action(client, action='', log_format='item: %(key)s', **kwargs): """Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response. """ result = None try: result = getattr(client, action)(**kwargs) LOG.info(log_format, kwargs) except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'AccessDenied': LOG.fatal(error) raise elif error_code == 'EntityAlreadyExists': LOG.info(' '.join(('Found', log_format)), kwargs) else: LOG.fatal(error) return result
python
def resource_action(client, action='', log_format='item: %(key)s', **kwargs): """Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response. """ result = None try: result = getattr(client, action)(**kwargs) LOG.info(log_format, kwargs) except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'AccessDenied': LOG.fatal(error) raise elif error_code == 'EntityAlreadyExists': LOG.info(' '.join(('Found', log_format)), kwargs) else: LOG.fatal(error) return result
[ "def", "resource_action", "(", "client", ",", "action", "=", "''", ",", "log_format", "=", "'item: %(key)s'", ",", "*", "*", "kwargs", ")", ":", "result", "=", "None", "try", ":", "result", "=", "getattr", "(", "client", ",", "action", ")", "(", "*", "*", "kwargs", ")", "LOG", ".", "info", "(", "log_format", ",", "kwargs", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "error_code", "=", "error", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "if", "error_code", "==", "'AccessDenied'", ":", "LOG", ".", "fatal", "(", "error", ")", "raise", "elif", "error_code", "==", "'EntityAlreadyExists'", ":", "LOG", ".", "info", "(", "' '", ".", "join", "(", "(", "'Found'", ",", "log_format", ")", ")", ",", "kwargs", ")", "else", ":", "LOG", ".", "fatal", "(", "error", ")", "return", "result" ]
Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response.
[ "Call", "_action_", "using", "boto3", "_client_", "with", "_kwargs_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/resource_action.py#L24-L59
6,116
foremast/foremast
src/foremast/elb/__main__.py
main
def main(): """Entry point for ELB creation""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description='Example with non-optional arguments') add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) elb = SpinnakerELB(app=args.app, env=args.env, region=args.region, prop_path=args.properties) elb.create_elb()
python
def main(): """Entry point for ELB creation""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description='Example with non-optional arguments') add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) elb = SpinnakerELB(app=args.app, env=args.env, region=args.region, prop_path=args.properties) elb.create_elb()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Example with non-optional arguments'", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "add_properties", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "elb", "=", "SpinnakerELB", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ")", "elb", ".", "create_elb", "(", ")" ]
Entry point for ELB creation
[ "Entry", "point", "for", "ELB", "creation" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/__main__.py#L30-L47
6,117
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.make_elb_json
def make_elb_json(self): """Render the JSON template with arguments. Returns: str: Rendered ELB template. """ env = self.env region = self.region elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings:\n%s', pformat(elb_settings)) health_settings = elb_settings['health'] elb_subnet_purpose = elb_settings.get('subnet_purpose', 'internal') region_subnets = get_subnets(target='elb', purpose=elb_subnet_purpose, env=env, region=region) region_subnets.pop("subnet_ids", None) # CAVEAT: Setting the ELB to public, you must use a public subnet, # otherwise AWS complains about missing IGW on subnet. if elb_subnet_purpose == 'internal': is_internal = 'true' else: is_internal = 'false' target = elb_settings.get('target', 'HTTP:80/health') health = splay_health(target) listeners = format_listeners(elb_settings=elb_settings, env=self.env, region=region) idle_timeout = elb_settings.get('idle_timeout', None) access_log = elb_settings.get('access_log', {}) connection_draining_timeout = elb_settings.get('connection_draining_timeout', None) security_groups = DEFAULT_ELB_SECURITYGROUPS[env] security_groups.append(self.app) security_groups.extend(self.properties['security_group']['elb_extras']) security_groups = remove_duplicate_sg(security_groups) template_kwargs = { 'access_log': json.dumps(access_log), 'app_name': self.app, 'availability_zones': json.dumps(region_subnets), 'connection_draining_timeout': json.dumps(connection_draining_timeout), 'env': env, 'hc_string': target, 'health_interval': health_settings['interval'], 'health_path': health.path, 'health_port': health.port, 'health_protocol': health.proto, 'health_timeout': health_settings['timeout'], 'healthy_threshold': health_settings['threshold'], 'idle_timeout': json.dumps(idle_timeout), 'isInternal': is_internal, 'listeners': json.dumps(listeners), 'region_zones': json.dumps(region_subnets[region]), 'region': region, 'security_groups': json.dumps(security_groups), 'subnet_type': elb_subnet_purpose, 'unhealthy_threshold': health_settings['unhealthy_threshold'], 'vpc_id': get_vpc_id(env, region), } rendered_template = get_template(template_file='infrastructure/elb_data.json.j2', **template_kwargs) return rendered_template
python
def make_elb_json(self): """Render the JSON template with arguments. Returns: str: Rendered ELB template. """ env = self.env region = self.region elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings:\n%s', pformat(elb_settings)) health_settings = elb_settings['health'] elb_subnet_purpose = elb_settings.get('subnet_purpose', 'internal') region_subnets = get_subnets(target='elb', purpose=elb_subnet_purpose, env=env, region=region) region_subnets.pop("subnet_ids", None) # CAVEAT: Setting the ELB to public, you must use a public subnet, # otherwise AWS complains about missing IGW on subnet. if elb_subnet_purpose == 'internal': is_internal = 'true' else: is_internal = 'false' target = elb_settings.get('target', 'HTTP:80/health') health = splay_health(target) listeners = format_listeners(elb_settings=elb_settings, env=self.env, region=region) idle_timeout = elb_settings.get('idle_timeout', None) access_log = elb_settings.get('access_log', {}) connection_draining_timeout = elb_settings.get('connection_draining_timeout', None) security_groups = DEFAULT_ELB_SECURITYGROUPS[env] security_groups.append(self.app) security_groups.extend(self.properties['security_group']['elb_extras']) security_groups = remove_duplicate_sg(security_groups) template_kwargs = { 'access_log': json.dumps(access_log), 'app_name': self.app, 'availability_zones': json.dumps(region_subnets), 'connection_draining_timeout': json.dumps(connection_draining_timeout), 'env': env, 'hc_string': target, 'health_interval': health_settings['interval'], 'health_path': health.path, 'health_port': health.port, 'health_protocol': health.proto, 'health_timeout': health_settings['timeout'], 'healthy_threshold': health_settings['threshold'], 'idle_timeout': json.dumps(idle_timeout), 'isInternal': is_internal, 'listeners': json.dumps(listeners), 'region_zones': json.dumps(region_subnets[region]), 'region': region, 'security_groups': json.dumps(security_groups), 'subnet_type': elb_subnet_purpose, 'unhealthy_threshold': health_settings['unhealthy_threshold'], 'vpc_id': get_vpc_id(env, region), } rendered_template = get_template(template_file='infrastructure/elb_data.json.j2', **template_kwargs) return rendered_template
[ "def", "make_elb_json", "(", "self", ")", ":", "env", "=", "self", ".", "env", "region", "=", "self", ".", "region", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "LOG", ".", "debug", "(", "'Block ELB Settings:\\n%s'", ",", "pformat", "(", "elb_settings", ")", ")", "health_settings", "=", "elb_settings", "[", "'health'", "]", "elb_subnet_purpose", "=", "elb_settings", ".", "get", "(", "'subnet_purpose'", ",", "'internal'", ")", "region_subnets", "=", "get_subnets", "(", "target", "=", "'elb'", ",", "purpose", "=", "elb_subnet_purpose", ",", "env", "=", "env", ",", "region", "=", "region", ")", "region_subnets", ".", "pop", "(", "\"subnet_ids\"", ",", "None", ")", "# CAVEAT: Setting the ELB to public, you must use a public subnet,", "# otherwise AWS complains about missing IGW on subnet.", "if", "elb_subnet_purpose", "==", "'internal'", ":", "is_internal", "=", "'true'", "else", ":", "is_internal", "=", "'false'", "target", "=", "elb_settings", ".", "get", "(", "'target'", ",", "'HTTP:80/health'", ")", "health", "=", "splay_health", "(", "target", ")", "listeners", "=", "format_listeners", "(", "elb_settings", "=", "elb_settings", ",", "env", "=", "self", ".", "env", ",", "region", "=", "region", ")", "idle_timeout", "=", "elb_settings", ".", "get", "(", "'idle_timeout'", ",", "None", ")", "access_log", "=", "elb_settings", ".", "get", "(", "'access_log'", ",", "{", "}", ")", "connection_draining_timeout", "=", "elb_settings", ".", "get", "(", "'connection_draining_timeout'", ",", "None", ")", "security_groups", "=", "DEFAULT_ELB_SECURITYGROUPS", "[", "env", "]", "security_groups", ".", "append", "(", "self", ".", "app", ")", "security_groups", ".", "extend", "(", "self", ".", "properties", "[", "'security_group'", "]", "[", "'elb_extras'", "]", ")", "security_groups", "=", "remove_duplicate_sg", "(", "security_groups", ")", "template_kwargs", "=", "{", "'access_log'", ":", "json", ".", "dumps", "(", "access_log", ")", ",", "'app_name'", ":", "self", ".", "app", ",", "'availability_zones'", ":", "json", ".", "dumps", "(", "region_subnets", ")", ",", "'connection_draining_timeout'", ":", "json", ".", "dumps", "(", "connection_draining_timeout", ")", ",", "'env'", ":", "env", ",", "'hc_string'", ":", "target", ",", "'health_interval'", ":", "health_settings", "[", "'interval'", "]", ",", "'health_path'", ":", "health", ".", "path", ",", "'health_port'", ":", "health", ".", "port", ",", "'health_protocol'", ":", "health", ".", "proto", ",", "'health_timeout'", ":", "health_settings", "[", "'timeout'", "]", ",", "'healthy_threshold'", ":", "health_settings", "[", "'threshold'", "]", ",", "'idle_timeout'", ":", "json", ".", "dumps", "(", "idle_timeout", ")", ",", "'isInternal'", ":", "is_internal", ",", "'listeners'", ":", "json", ".", "dumps", "(", "listeners", ")", ",", "'region_zones'", ":", "json", ".", "dumps", "(", "region_subnets", "[", "region", "]", ")", ",", "'region'", ":", "region", ",", "'security_groups'", ":", "json", ".", "dumps", "(", "security_groups", ")", ",", "'subnet_type'", ":", "elb_subnet_purpose", ",", "'unhealthy_threshold'", ":", "health_settings", "[", "'unhealthy_threshold'", "]", ",", "'vpc_id'", ":", "get_vpc_id", "(", "env", ",", "region", ")", ",", "}", "rendered_template", "=", "get_template", "(", "template_file", "=", "'infrastructure/elb_data.json.j2'", ",", "*", "*", "template_kwargs", ")", "return", "rendered_template" ]
Render the JSON template with arguments. Returns: str: Rendered ELB template.
[ "Render", "the", "JSON", "template", "with", "arguments", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L47-L112
6,118
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.create_elb
def create_elb(self): """Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful. """ json_data = self.make_elb_json() LOG.debug('Block ELB JSON Data:\n%s', pformat(json_data)) wait_for_task(json_data) self.add_listener_policy(json_data) self.add_backend_policy(json_data) self.configure_attributes(json_data)
python
def create_elb(self): """Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful. """ json_data = self.make_elb_json() LOG.debug('Block ELB JSON Data:\n%s', pformat(json_data)) wait_for_task(json_data) self.add_listener_policy(json_data) self.add_backend_policy(json_data) self.configure_attributes(json_data)
[ "def", "create_elb", "(", "self", ")", ":", "json_data", "=", "self", ".", "make_elb_json", "(", ")", "LOG", ".", "debug", "(", "'Block ELB JSON Data:\\n%s'", ",", "pformat", "(", "json_data", ")", ")", "wait_for_task", "(", "json_data", ")", "self", ".", "add_listener_policy", "(", "json_data", ")", "self", ".", "add_backend_policy", "(", "json_data", ")", "self", ".", "configure_attributes", "(", "json_data", ")" ]
Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful.
[ "Create", "or", "Update", "the", "ELB", "after", "rendering", "JSON", "data", "from", "configs", ".", "Asserts", "that", "the", "ELB", "task", "was", "successful", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L114-L127
6,119
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_listener_policy
def add_listener_policy(self, json_data): """Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # create stickiness policy if set in configs stickiness = {} elb_settings = self.properties['elb'] if elb_settings.get('ports'): ports = elb_settings['ports'] for listener in ports: if listener.get("stickiness"): stickiness = self.add_stickiness() LOG.info('Stickiness Found: %s', stickiness) break # Attach policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: policies = [] ext_port = listener['externalPort'] if listener['listenerPolicies']: policies.extend(listener['listenerPolicies']) if stickiness.get(ext_port): policies.append(stickiness.get(ext_port)) if policies: LOG.info('Adding listener policies: %s', policies) elbclient.set_load_balancer_policies_of_listener( LoadBalancerName=self.app, LoadBalancerPort=ext_port, PolicyNames=policies)
python
def add_listener_policy(self, json_data): """Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # create stickiness policy if set in configs stickiness = {} elb_settings = self.properties['elb'] if elb_settings.get('ports'): ports = elb_settings['ports'] for listener in ports: if listener.get("stickiness"): stickiness = self.add_stickiness() LOG.info('Stickiness Found: %s', stickiness) break # Attach policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: policies = [] ext_port = listener['externalPort'] if listener['listenerPolicies']: policies.extend(listener['listenerPolicies']) if stickiness.get(ext_port): policies.append(stickiness.get(ext_port)) if policies: LOG.info('Adding listener policies: %s', policies) elbclient.set_load_balancer_policies_of_listener( LoadBalancerName=self.app, LoadBalancerPort=ext_port, PolicyNames=policies)
[ "def", "add_listener_policy", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "# create stickiness policy if set in configs", "stickiness", "=", "{", "}", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "if", "elb_settings", ".", "get", "(", "'ports'", ")", ":", "ports", "=", "elb_settings", "[", "'ports'", "]", "for", "listener", "in", "ports", ":", "if", "listener", ".", "get", "(", "\"stickiness\"", ")", ":", "stickiness", "=", "self", ".", "add_stickiness", "(", ")", "LOG", ".", "info", "(", "'Stickiness Found: %s'", ",", "stickiness", ")", "break", "# Attach policies to created ELB", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "for", "listener", "in", "job", "[", "'listeners'", "]", ":", "policies", "=", "[", "]", "ext_port", "=", "listener", "[", "'externalPort'", "]", "if", "listener", "[", "'listenerPolicies'", "]", ":", "policies", ".", "extend", "(", "listener", "[", "'listenerPolicies'", "]", ")", "if", "stickiness", ".", "get", "(", "ext_port", ")", ":", "policies", ".", "append", "(", "stickiness", ".", "get", "(", "ext_port", ")", ")", "if", "policies", ":", "LOG", ".", "info", "(", "'Adding listener policies: %s'", ",", "policies", ")", "elbclient", ".", "set_load_balancer_policies_of_listener", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "LoadBalancerPort", "=", "ext_port", ",", "PolicyNames", "=", "policies", ")" ]
Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert
[ "Attaches", "listerner", "policies", "to", "an", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L129-L161
6,120
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_backend_policy
def add_backend_policy(self, json_data): """Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # Attach backend server policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: instance_port = listener['internalPort'] backend_policy_list = listener['backendPolicies'] if backend_policy_list: LOG.info('Adding backend server policies: %s', backend_policy_list) elbclient.set_load_balancer_policies_for_backend_server( LoadBalancerName=self.app, InstancePort=instance_port, PolicyNames=backend_policy_list)
python
def add_backend_policy(self, json_data): """Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # Attach backend server policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: instance_port = listener['internalPort'] backend_policy_list = listener['backendPolicies'] if backend_policy_list: LOG.info('Adding backend server policies: %s', backend_policy_list) elbclient.set_load_balancer_policies_for_backend_server( LoadBalancerName=self.app, InstancePort=instance_port, PolicyNames=backend_policy_list)
[ "def", "add_backend_policy", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "# Attach backend server policies to created ELB", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "for", "listener", "in", "job", "[", "'listeners'", "]", ":", "instance_port", "=", "listener", "[", "'internalPort'", "]", "backend_policy_list", "=", "listener", "[", "'backendPolicies'", "]", "if", "backend_policy_list", ":", "LOG", ".", "info", "(", "'Adding backend server policies: %s'", ",", "backend_policy_list", ")", "elbclient", ".", "set_load_balancer_policies_for_backend_server", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "InstancePort", "=", "instance_port", ",", "PolicyNames", "=", "backend_policy_list", ")" ]
Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert
[ "Attaches", "backend", "server", "policies", "to", "an", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L163-L180
6,121
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_stickiness
def add_stickiness(self): """ Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" } """ stickiness_dict = {} env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] for listener in elb_settings.get('ports'): if listener.get("stickiness"): sticky_type = listener['stickiness']['type'].lower() externalport = int(listener['loadbalancer'].split(":")[-1]) policyname_tmp = "{0}-{1}-{2}-{3}" if sticky_type == 'app': cookiename = listener['stickiness']['cookie_name'] policy_key = cookiename.replace('.', '') policyname = policyname_tmp.format(self.app, sticky_type, externalport, policy_key) elbclient.create_app_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieName=cookiename) stickiness_dict[externalport] = policyname elif sticky_type == 'elb': cookie_ttl = listener['stickiness'].get('cookie_ttl', None) policyname = policyname_tmp.format(self.app, sticky_type, externalport, cookie_ttl) if cookie_ttl: elbclient.create_lb_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieExpirationPeriod=cookie_ttl) else: elbclient.create_lb_cookie_stickiness_policy(LoadBalancerName=self.app, PolicyName=policyname) stickiness_dict[externalport] = policyname return stickiness_dict
python
def add_stickiness(self): """ Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" } """ stickiness_dict = {} env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] for listener in elb_settings.get('ports'): if listener.get("stickiness"): sticky_type = listener['stickiness']['type'].lower() externalport = int(listener['loadbalancer'].split(":")[-1]) policyname_tmp = "{0}-{1}-{2}-{3}" if sticky_type == 'app': cookiename = listener['stickiness']['cookie_name'] policy_key = cookiename.replace('.', '') policyname = policyname_tmp.format(self.app, sticky_type, externalport, policy_key) elbclient.create_app_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieName=cookiename) stickiness_dict[externalport] = policyname elif sticky_type == 'elb': cookie_ttl = listener['stickiness'].get('cookie_ttl', None) policyname = policyname_tmp.format(self.app, sticky_type, externalport, cookie_ttl) if cookie_ttl: elbclient.create_lb_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieExpirationPeriod=cookie_ttl) else: elbclient.create_lb_cookie_stickiness_policy(LoadBalancerName=self.app, PolicyName=policyname) stickiness_dict[externalport] = policyname return stickiness_dict
[ "def", "add_stickiness", "(", "self", ")", ":", "stickiness_dict", "=", "{", "}", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "for", "listener", "in", "elb_settings", ".", "get", "(", "'ports'", ")", ":", "if", "listener", ".", "get", "(", "\"stickiness\"", ")", ":", "sticky_type", "=", "listener", "[", "'stickiness'", "]", "[", "'type'", "]", ".", "lower", "(", ")", "externalport", "=", "int", "(", "listener", "[", "'loadbalancer'", "]", ".", "split", "(", "\":\"", ")", "[", "-", "1", "]", ")", "policyname_tmp", "=", "\"{0}-{1}-{2}-{3}\"", "if", "sticky_type", "==", "'app'", ":", "cookiename", "=", "listener", "[", "'stickiness'", "]", "[", "'cookie_name'", "]", "policy_key", "=", "cookiename", ".", "replace", "(", "'.'", ",", "''", ")", "policyname", "=", "policyname_tmp", ".", "format", "(", "self", ".", "app", ",", "sticky_type", ",", "externalport", ",", "policy_key", ")", "elbclient", ".", "create_app_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ",", "CookieName", "=", "cookiename", ")", "stickiness_dict", "[", "externalport", "]", "=", "policyname", "elif", "sticky_type", "==", "'elb'", ":", "cookie_ttl", "=", "listener", "[", "'stickiness'", "]", ".", "get", "(", "'cookie_ttl'", ",", "None", ")", "policyname", "=", "policyname_tmp", ".", "format", "(", "self", ".", "app", ",", "sticky_type", ",", "externalport", ",", "cookie_ttl", ")", "if", "cookie_ttl", ":", "elbclient", ".", "create_lb_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ",", "CookieExpirationPeriod", "=", "cookie_ttl", ")", "else", ":", "elbclient", ".", "create_lb_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ")", "stickiness_dict", "[", "externalport", "]", "=", "policyname", "return", "stickiness_dict" ]
Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" }
[ "Adds", "stickiness", "policy", "to", "created", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L182-L218
6,122
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.configure_attributes
def configure_attributes(self, json_data): """Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings Pre Configure Load Balancer Attributes:\n%s', pformat(elb_settings)) # FIXME: Determine why 'job' is not being used # pylint: disable=unused-variable for job in json.loads(json_data)['job']: load_balancer_attributes = { 'CrossZoneLoadBalancing': { 'Enabled': True }, 'AccessLog': { 'Enabled': False, }, 'ConnectionDraining': { 'Enabled': False, }, 'ConnectionSettings': { 'IdleTimeout': 60 } } if elb_settings.get('connection_draining_timeout'): connection_draining_timeout = int(elb_settings['connection_draining_timeout']) LOG.info('Applying Custom Load Balancer Connection Draining Timeout: %d', connection_draining_timeout) load_balancer_attributes['ConnectionDraining'] = { 'Enabled': True, 'Timeout': connection_draining_timeout } if elb_settings.get('idle_timeout'): idle_timeout = int(elb_settings['idle_timeout']) LOG.info('Applying Custom Load Balancer Idle Timeout: %d', idle_timeout) load_balancer_attributes['ConnectionSettings'] = {'IdleTimeout': idle_timeout} if elb_settings.get('access_log'): access_log_bucket_name = elb_settings['access_log']['bucket_name'] access_log_bucket_prefix = elb_settings['access_log']['bucket_prefix'] access_log_emit_interval = int(elb_settings['access_log']['emit_interval']) LOG.info('Applying Custom Load Balancer Access Log: %s/%s every %d minutes', access_log_bucket_name, access_log_bucket_prefix, access_log_emit_interval) load_balancer_attributes['AccessLog'] = { 'Enabled': True, 'S3BucketName': access_log_bucket_name, 'EmitInterval': access_log_emit_interval, 'S3BucketPrefix': access_log_bucket_prefix } LOG.info('Applying Load Balancer Attributes') LOG.debug('Load Balancer Attributes:\n%s', pformat(load_balancer_attributes)) elbclient.modify_load_balancer_attributes( LoadBalancerName=self.app, LoadBalancerAttributes=load_balancer_attributes)
python
def configure_attributes(self, json_data): """Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings Pre Configure Load Balancer Attributes:\n%s', pformat(elb_settings)) # FIXME: Determine why 'job' is not being used # pylint: disable=unused-variable for job in json.loads(json_data)['job']: load_balancer_attributes = { 'CrossZoneLoadBalancing': { 'Enabled': True }, 'AccessLog': { 'Enabled': False, }, 'ConnectionDraining': { 'Enabled': False, }, 'ConnectionSettings': { 'IdleTimeout': 60 } } if elb_settings.get('connection_draining_timeout'): connection_draining_timeout = int(elb_settings['connection_draining_timeout']) LOG.info('Applying Custom Load Balancer Connection Draining Timeout: %d', connection_draining_timeout) load_balancer_attributes['ConnectionDraining'] = { 'Enabled': True, 'Timeout': connection_draining_timeout } if elb_settings.get('idle_timeout'): idle_timeout = int(elb_settings['idle_timeout']) LOG.info('Applying Custom Load Balancer Idle Timeout: %d', idle_timeout) load_balancer_attributes['ConnectionSettings'] = {'IdleTimeout': idle_timeout} if elb_settings.get('access_log'): access_log_bucket_name = elb_settings['access_log']['bucket_name'] access_log_bucket_prefix = elb_settings['access_log']['bucket_prefix'] access_log_emit_interval = int(elb_settings['access_log']['emit_interval']) LOG.info('Applying Custom Load Balancer Access Log: %s/%s every %d minutes', access_log_bucket_name, access_log_bucket_prefix, access_log_emit_interval) load_balancer_attributes['AccessLog'] = { 'Enabled': True, 'S3BucketName': access_log_bucket_name, 'EmitInterval': access_log_emit_interval, 'S3BucketPrefix': access_log_bucket_prefix } LOG.info('Applying Load Balancer Attributes') LOG.debug('Load Balancer Attributes:\n%s', pformat(load_balancer_attributes)) elbclient.modify_load_balancer_attributes( LoadBalancerName=self.app, LoadBalancerAttributes=load_balancer_attributes)
[ "def", "configure_attributes", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "LOG", ".", "debug", "(", "'Block ELB Settings Pre Configure Load Balancer Attributes:\\n%s'", ",", "pformat", "(", "elb_settings", ")", ")", "# FIXME: Determine why 'job' is not being used", "# pylint: disable=unused-variable", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "load_balancer_attributes", "=", "{", "'CrossZoneLoadBalancing'", ":", "{", "'Enabled'", ":", "True", "}", ",", "'AccessLog'", ":", "{", "'Enabled'", ":", "False", ",", "}", ",", "'ConnectionDraining'", ":", "{", "'Enabled'", ":", "False", ",", "}", ",", "'ConnectionSettings'", ":", "{", "'IdleTimeout'", ":", "60", "}", "}", "if", "elb_settings", ".", "get", "(", "'connection_draining_timeout'", ")", ":", "connection_draining_timeout", "=", "int", "(", "elb_settings", "[", "'connection_draining_timeout'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Connection Draining Timeout: %d'", ",", "connection_draining_timeout", ")", "load_balancer_attributes", "[", "'ConnectionDraining'", "]", "=", "{", "'Enabled'", ":", "True", ",", "'Timeout'", ":", "connection_draining_timeout", "}", "if", "elb_settings", ".", "get", "(", "'idle_timeout'", ")", ":", "idle_timeout", "=", "int", "(", "elb_settings", "[", "'idle_timeout'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Idle Timeout: %d'", ",", "idle_timeout", ")", "load_balancer_attributes", "[", "'ConnectionSettings'", "]", "=", "{", "'IdleTimeout'", ":", "idle_timeout", "}", "if", "elb_settings", ".", "get", "(", "'access_log'", ")", ":", "access_log_bucket_name", "=", "elb_settings", "[", "'access_log'", "]", "[", "'bucket_name'", "]", "access_log_bucket_prefix", "=", "elb_settings", "[", "'access_log'", "]", "[", "'bucket_prefix'", "]", "access_log_emit_interval", "=", "int", "(", "elb_settings", "[", "'access_log'", "]", "[", "'emit_interval'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Access Log: %s/%s every %d minutes'", ",", "access_log_bucket_name", ",", "access_log_bucket_prefix", ",", "access_log_emit_interval", ")", "load_balancer_attributes", "[", "'AccessLog'", "]", "=", "{", "'Enabled'", ":", "True", ",", "'S3BucketName'", ":", "access_log_bucket_name", ",", "'EmitInterval'", ":", "access_log_emit_interval", ",", "'S3BucketPrefix'", ":", "access_log_bucket_prefix", "}", "LOG", ".", "info", "(", "'Applying Load Balancer Attributes'", ")", "LOG", ".", "debug", "(", "'Load Balancer Attributes:\\n%s'", ",", "pformat", "(", "load_balancer_attributes", ")", ")", "elbclient", ".", "modify_load_balancer_attributes", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "LoadBalancerAttributes", "=", "load_balancer_attributes", ")" ]
Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert
[ "Configure", "load", "balancer", "attributes", "such", "as", "idle", "timeout", "connection", "draining", "etc" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L220-L276
6,123
foremast/foremast
src/foremast/configs/__main__.py
main
def main(): """Append Application Configurations to a given file in multiple formats.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) parser.add_argument('-o', '--output', required=True, help='Name of environment file to append to') parser.add_argument( '-g', '--git-short', metavar='GROUP/PROJECT', required=True, help='Short name for Git, e.g. forrest/core') parser.add_argument('-r', '--runway-dir', help='Runway directory with app.json files, requires --git-short') args = parser.parse_args() LOG.setLevel(args.debug) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) generated = gogoutils.Generator(*gogoutils.Parser(args.git_short).parse_url(), formats=APP_FORMATS) git_short = generated.gitlab()['main'] if args.runway_dir: configs = process_runway_configs(runway_dir=args.runway_dir) else: configs = process_git_configs(git_short=git_short) write_variables(app_configs=configs, out_file=args.output, git_short=git_short)
python
def main(): """Append Application Configurations to a given file in multiple formats.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) parser.add_argument('-o', '--output', required=True, help='Name of environment file to append to') parser.add_argument( '-g', '--git-short', metavar='GROUP/PROJECT', required=True, help='Short name for Git, e.g. forrest/core') parser.add_argument('-r', '--runway-dir', help='Runway directory with app.json files, requires --git-short') args = parser.parse_args() LOG.setLevel(args.debug) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) generated = gogoutils.Generator(*gogoutils.Parser(args.git_short).parse_url(), formats=APP_FORMATS) git_short = generated.gitlab()['main'] if args.runway_dir: configs = process_runway_configs(runway_dir=args.runway_dir) else: configs = process_git_configs(git_short=git_short) write_variables(app_configs=configs, out_file=args.output, git_short=git_short)
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "main", ".", "__doc__", ")", "add_debug", "(", "parser", ")", "parser", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "required", "=", "True", ",", "help", "=", "'Name of environment file to append to'", ")", "parser", ".", "add_argument", "(", "'-g'", ",", "'--git-short'", ",", "metavar", "=", "'GROUP/PROJECT'", ",", "required", "=", "True", ",", "help", "=", "'Short name for Git, e.g. forrest/core'", ")", "parser", ".", "add_argument", "(", "'-r'", ",", "'--runway-dir'", ",", "help", "=", "'Runway directory with app.json files, requires --git-short'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "LOG", ".", "setLevel", "(", "args", ".", "debug", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "generated", "=", "gogoutils", ".", "Generator", "(", "*", "gogoutils", ".", "Parser", "(", "args", ".", "git_short", ")", ".", "parse_url", "(", ")", ",", "formats", "=", "APP_FORMATS", ")", "git_short", "=", "generated", ".", "gitlab", "(", ")", "[", "'main'", "]", "if", "args", ".", "runway_dir", ":", "configs", "=", "process_runway_configs", "(", "runway_dir", "=", "args", ".", "runway_dir", ")", "else", ":", "configs", "=", "process_git_configs", "(", "git_short", "=", "git_short", ")", "write_variables", "(", "app_configs", "=", "configs", ",", "out_file", "=", "args", ".", "output", ",", "git_short", "=", "git_short", ")" ]
Append Application Configurations to a given file in multiple formats.
[ "Append", "Application", "Configurations", "to", "a", "given", "file", "in", "multiple", "formats", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/configs/__main__.py#L33-L56
6,124
foremast/foremast
src/foremast/__main__.py
add_infra
def add_infra(subparsers): """Infrastructure subcommands.""" infra_parser = subparsers.add_parser('infra', help=runner.prepare_infrastructure.__doc__) infra_parser.set_defaults(func=runner.prepare_infrastructure)
python
def add_infra(subparsers): """Infrastructure subcommands.""" infra_parser = subparsers.add_parser('infra', help=runner.prepare_infrastructure.__doc__) infra_parser.set_defaults(func=runner.prepare_infrastructure)
[ "def", "add_infra", "(", "subparsers", ")", ":", "infra_parser", "=", "subparsers", ".", "add_parser", "(", "'infra'", ",", "help", "=", "runner", ".", "prepare_infrastructure", ".", "__doc__", ")", "infra_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_infrastructure", ")" ]
Infrastructure subcommands.
[ "Infrastructure", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L15-L18
6,125
foremast/foremast
src/foremast/__main__.py
add_pipeline
def add_pipeline(subparsers): """Pipeline subcommands.""" pipeline_parser = subparsers.add_parser( 'pipeline', help=add_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_parser.set_defaults(func=pipeline_parser.print_help) pipeline_subparsers = pipeline_parser.add_subparsers(title='Pipelines') pipeline_full_parser = pipeline_subparsers.add_parser( 'app', help=runner.prepare_app_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_full_parser.set_defaults(func=runner.prepare_app_pipeline) pipeline_onetime_parser = pipeline_subparsers.add_parser( 'onetime', help=runner.prepare_onetime_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_onetime_parser.set_defaults(func=runner.prepare_onetime_pipeline) add_env(pipeline_onetime_parser)
python
def add_pipeline(subparsers): """Pipeline subcommands.""" pipeline_parser = subparsers.add_parser( 'pipeline', help=add_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_parser.set_defaults(func=pipeline_parser.print_help) pipeline_subparsers = pipeline_parser.add_subparsers(title='Pipelines') pipeline_full_parser = pipeline_subparsers.add_parser( 'app', help=runner.prepare_app_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_full_parser.set_defaults(func=runner.prepare_app_pipeline) pipeline_onetime_parser = pipeline_subparsers.add_parser( 'onetime', help=runner.prepare_onetime_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_onetime_parser.set_defaults(func=runner.prepare_onetime_pipeline) add_env(pipeline_onetime_parser)
[ "def", "add_pipeline", "(", "subparsers", ")", ":", "pipeline_parser", "=", "subparsers", ".", "add_parser", "(", "'pipeline'", ",", "help", "=", "add_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_parser", ".", "set_defaults", "(", "func", "=", "pipeline_parser", ".", "print_help", ")", "pipeline_subparsers", "=", "pipeline_parser", ".", "add_subparsers", "(", "title", "=", "'Pipelines'", ")", "pipeline_full_parser", "=", "pipeline_subparsers", ".", "add_parser", "(", "'app'", ",", "help", "=", "runner", ".", "prepare_app_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_full_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_app_pipeline", ")", "pipeline_onetime_parser", "=", "pipeline_subparsers", ".", "add_parser", "(", "'onetime'", ",", "help", "=", "runner", ".", "prepare_onetime_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_onetime_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_onetime_pipeline", ")", "add_env", "(", "pipeline_onetime_parser", ")" ]
Pipeline subcommands.
[ "Pipeline", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L21-L36
6,126
foremast/foremast
src/foremast/__main__.py
add_rebuild
def add_rebuild(subparsers): """Rebuild Pipeline subcommands.""" rebuild_parser = subparsers.add_parser( 'rebuild', help=runner.rebuild_pipelines.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) rebuild_parser.set_defaults(func=runner.rebuild_pipelines) rebuild_parser.add_argument('-a', '--all', action='store_true', help='Rebuild all Pipelines') rebuild_parser.add_argument( 'project', nargs='?', default=os.getenv('REBUILD_PROJECT'), help='Project to rebuild, overrides $REBUILD_PROJECT')
python
def add_rebuild(subparsers): """Rebuild Pipeline subcommands.""" rebuild_parser = subparsers.add_parser( 'rebuild', help=runner.rebuild_pipelines.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) rebuild_parser.set_defaults(func=runner.rebuild_pipelines) rebuild_parser.add_argument('-a', '--all', action='store_true', help='Rebuild all Pipelines') rebuild_parser.add_argument( 'project', nargs='?', default=os.getenv('REBUILD_PROJECT'), help='Project to rebuild, overrides $REBUILD_PROJECT')
[ "def", "add_rebuild", "(", "subparsers", ")", ":", "rebuild_parser", "=", "subparsers", ".", "add_parser", "(", "'rebuild'", ",", "help", "=", "runner", ".", "rebuild_pipelines", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "rebuild_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "rebuild_pipelines", ")", "rebuild_parser", ".", "add_argument", "(", "'-a'", ",", "'--all'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Rebuild all Pipelines'", ")", "rebuild_parser", ".", "add_argument", "(", "'project'", ",", "nargs", "=", "'?'", ",", "default", "=", "os", ".", "getenv", "(", "'REBUILD_PROJECT'", ")", ",", "help", "=", "'Project to rebuild, overrides $REBUILD_PROJECT'", ")" ]
Rebuild Pipeline subcommands.
[ "Rebuild", "Pipeline", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L39-L49
6,127
foremast/foremast
src/foremast/__main__.py
add_autoscaling
def add_autoscaling(subparsers): """Auto Scaling Group Policy subcommands.""" autoscaling_parser = subparsers.add_parser( 'autoscaling', help=runner.create_scaling_policy.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) autoscaling_parser.set_defaults(func=runner.create_scaling_policy)
python
def add_autoscaling(subparsers): """Auto Scaling Group Policy subcommands.""" autoscaling_parser = subparsers.add_parser( 'autoscaling', help=runner.create_scaling_policy.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) autoscaling_parser.set_defaults(func=runner.create_scaling_policy)
[ "def", "add_autoscaling", "(", "subparsers", ")", ":", "autoscaling_parser", "=", "subparsers", ".", "add_parser", "(", "'autoscaling'", ",", "help", "=", "runner", ".", "create_scaling_policy", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "autoscaling_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "create_scaling_policy", ")" ]
Auto Scaling Group Policy subcommands.
[ "Auto", "Scaling", "Group", "Policy", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L52-L58
6,128
foremast/foremast
src/foremast/__main__.py
add_validate
def add_validate(subparsers): """Validate Spinnaker setup.""" validate_parser = subparsers.add_parser( 'validate', help=add_validate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_parser.set_defaults(func=validate_parser.print_help) validate_subparsers = validate_parser.add_subparsers(title='Testers') validate_all_parser = validate_subparsers.add_parser( 'all', help=validate.validate_all.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_all_parser.set_defaults(func=validate.validate_all) validate_gate_parser = validate_subparsers.add_parser( 'gate', help=validate.validate_gate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_gate_parser.set_defaults(func=validate.validate_gate)
python
def add_validate(subparsers): """Validate Spinnaker setup.""" validate_parser = subparsers.add_parser( 'validate', help=add_validate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_parser.set_defaults(func=validate_parser.print_help) validate_subparsers = validate_parser.add_subparsers(title='Testers') validate_all_parser = validate_subparsers.add_parser( 'all', help=validate.validate_all.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_all_parser.set_defaults(func=validate.validate_all) validate_gate_parser = validate_subparsers.add_parser( 'gate', help=validate.validate_gate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_gate_parser.set_defaults(func=validate.validate_gate)
[ "def", "add_validate", "(", "subparsers", ")", ":", "validate_parser", "=", "subparsers", ".", "add_parser", "(", "'validate'", ",", "help", "=", "add_validate", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_parser", ".", "set_defaults", "(", "func", "=", "validate_parser", ".", "print_help", ")", "validate_subparsers", "=", "validate_parser", ".", "add_subparsers", "(", "title", "=", "'Testers'", ")", "validate_all_parser", "=", "validate_subparsers", ".", "add_parser", "(", "'all'", ",", "help", "=", "validate", ".", "validate_all", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_all_parser", ".", "set_defaults", "(", "func", "=", "validate", ".", "validate_all", ")", "validate_gate_parser", "=", "validate_subparsers", ".", "add_parser", "(", "'gate'", ",", "help", "=", "validate", ".", "validate_gate", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_gate_parser", ".", "set_defaults", "(", "func", "=", "validate", ".", "validate_gate", ")" ]
Validate Spinnaker setup.
[ "Validate", "Spinnaker", "setup", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L61-L75
6,129
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.get_existing_pipelines
def get_existing_pipelines(self): """Get existing pipeline configs for specific application. Returns: str: Pipeline config json """ url = "{0}/applications/{1}/pipelineConfigs".format(API_URL, self.app_name) resp = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert resp.ok, 'Failed to lookup pipelines for {0}: {1}'.format(self.app_name, resp.text) return resp.json()
python
def get_existing_pipelines(self): """Get existing pipeline configs for specific application. Returns: str: Pipeline config json """ url = "{0}/applications/{1}/pipelineConfigs".format(API_URL, self.app_name) resp = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert resp.ok, 'Failed to lookup pipelines for {0}: {1}'.format(self.app_name, resp.text) return resp.json()
[ "def", "get_existing_pipelines", "(", "self", ")", ":", "url", "=", "\"{0}/applications/{1}/pipelineConfigs\"", ".", "format", "(", "API_URL", ",", "self", ".", "app_name", ")", "resp", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "resp", ".", "ok", ",", "'Failed to lookup pipelines for {0}: {1}'", ".", "format", "(", "self", ".", "app_name", ",", "resp", ".", "text", ")", "return", "resp", ".", "json", "(", ")" ]
Get existing pipeline configs for specific application. Returns: str: Pipeline config json
[ "Get", "existing", "pipeline", "configs", "for", "specific", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L148-L159
6,130
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.compare_with_existing
def compare_with_existing(self, region='us-east-1', onetime=False): """Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not. """ pipelines = self.get_existing_pipelines() pipeline_id = None found = False for pipeline in pipelines: correct_app_and_region = (pipeline['application'] == self.app_name) and (region in pipeline['name']) if onetime: onetime_str = "(onetime-{})".format(self.environments[0]) if correct_app_and_region and onetime_str in pipeline['name']: found = True elif correct_app_and_region: found = True if found: self.log.info('Existing pipeline found - %s', pipeline['name']) pipeline_id = pipeline['id'] break else: self.log.info('No existing pipeline found') return pipeline_id
python
def compare_with_existing(self, region='us-east-1', onetime=False): """Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not. """ pipelines = self.get_existing_pipelines() pipeline_id = None found = False for pipeline in pipelines: correct_app_and_region = (pipeline['application'] == self.app_name) and (region in pipeline['name']) if onetime: onetime_str = "(onetime-{})".format(self.environments[0]) if correct_app_and_region and onetime_str in pipeline['name']: found = True elif correct_app_and_region: found = True if found: self.log.info('Existing pipeline found - %s', pipeline['name']) pipeline_id = pipeline['id'] break else: self.log.info('No existing pipeline found') return pipeline_id
[ "def", "compare_with_existing", "(", "self", ",", "region", "=", "'us-east-1'", ",", "onetime", "=", "False", ")", ":", "pipelines", "=", "self", ".", "get_existing_pipelines", "(", ")", "pipeline_id", "=", "None", "found", "=", "False", "for", "pipeline", "in", "pipelines", ":", "correct_app_and_region", "=", "(", "pipeline", "[", "'application'", "]", "==", "self", ".", "app_name", ")", "and", "(", "region", "in", "pipeline", "[", "'name'", "]", ")", "if", "onetime", ":", "onetime_str", "=", "\"(onetime-{})\"", ".", "format", "(", "self", ".", "environments", "[", "0", "]", ")", "if", "correct_app_and_region", "and", "onetime_str", "in", "pipeline", "[", "'name'", "]", ":", "found", "=", "True", "elif", "correct_app_and_region", ":", "found", "=", "True", "if", "found", ":", "self", ".", "log", ".", "info", "(", "'Existing pipeline found - %s'", ",", "pipeline", "[", "'name'", "]", ")", "pipeline_id", "=", "pipeline", "[", "'id'", "]", "break", "else", ":", "self", ".", "log", ".", "info", "(", "'No existing pipeline found'", ")", "return", "pipeline_id" ]
Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not.
[ "Compare", "desired", "pipeline", "with", "existing", "pipelines", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L161-L191
6,131
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.create_pipeline
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) subnets = None pipelines = {} for region, envs in regions_envs.items(): self.generated.data.update({ 'region': region, }) # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: self.generated.data.update({ 'env': env, }) pipeline_block_data = { "env": env, "generated": self.generated, "previous_env": previous_env, "region": region, "settings": self.settings[env][region], "pipeline_data": self.settings['pipeline'], } if self.settings['pipeline']['type'] in EC2_PIPELINE_TYPES: if not subnets: subnets = get_subnets() try: region_subnets = {region: subnets[env][region]} except KeyError: self.log.info('%s is not available for %s.', env, region) continue pipeline_block_data['region_subnets'] = region_subnets block = construct_pipeline_block(**pipeline_block_data) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
python
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) subnets = None pipelines = {} for region, envs in regions_envs.items(): self.generated.data.update({ 'region': region, }) # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: self.generated.data.update({ 'env': env, }) pipeline_block_data = { "env": env, "generated": self.generated, "previous_env": previous_env, "region": region, "settings": self.settings[env][region], "pipeline_data": self.settings['pipeline'], } if self.settings['pipeline']['type'] in EC2_PIPELINE_TYPES: if not subnets: subnets = get_subnets() try: region_subnets = {region: subnets[env][region]} except KeyError: self.log.info('%s is not available for %s.', env, region) continue pipeline_block_data['region_subnets'] = region_subnets block = construct_pipeline_block(**pipeline_block_data) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
[ "def", "create_pipeline", "(", "self", ")", ":", "clean_pipelines", "(", "app", "=", "self", ".", "app_name", ",", "settings", "=", "self", ".", "settings", ")", "pipeline_envs", "=", "self", ".", "environments", "self", ".", "log", ".", "debug", "(", "'Envs from pipeline.json: %s'", ",", "pipeline_envs", ")", "regions_envs", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "env", "in", "pipeline_envs", ":", "for", "region", "in", "self", ".", "settings", "[", "env", "]", "[", "'regions'", "]", ":", "regions_envs", "[", "region", "]", ".", "append", "(", "env", ")", "self", ".", "log", ".", "info", "(", "'Environments and Regions for Pipelines:\\n%s'", ",", "json", ".", "dumps", "(", "regions_envs", ",", "indent", "=", "4", ")", ")", "subnets", "=", "None", "pipelines", "=", "{", "}", "for", "region", ",", "envs", "in", "regions_envs", ".", "items", "(", ")", ":", "self", ".", "generated", ".", "data", ".", "update", "(", "{", "'region'", ":", "region", ",", "}", ")", "# TODO: Overrides for an environment no longer makes sense. Need to", "# provide override for entire Region possibly.", "pipelines", "[", "region", "]", "=", "self", ".", "render_wrapper", "(", "region", "=", "region", ")", "previous_env", "=", "None", "for", "env", "in", "envs", ":", "self", ".", "generated", ".", "data", ".", "update", "(", "{", "'env'", ":", "env", ",", "}", ")", "pipeline_block_data", "=", "{", "\"env\"", ":", "env", ",", "\"generated\"", ":", "self", ".", "generated", ",", "\"previous_env\"", ":", "previous_env", ",", "\"region\"", ":", "region", ",", "\"settings\"", ":", "self", ".", "settings", "[", "env", "]", "[", "region", "]", ",", "\"pipeline_data\"", ":", "self", ".", "settings", "[", "'pipeline'", "]", ",", "}", "if", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'type'", "]", "in", "EC2_PIPELINE_TYPES", ":", "if", "not", "subnets", ":", "subnets", "=", "get_subnets", "(", ")", "try", ":", "region_subnets", "=", "{", "region", ":", "subnets", "[", "env", "]", "[", "region", "]", "}", "except", "KeyError", ":", "self", ".", "log", ".", "info", "(", "'%s is not available for %s.'", ",", "env", ",", "region", ")", "continue", "pipeline_block_data", "[", "'region_subnets'", "]", "=", "region_subnets", "block", "=", "construct_pipeline_block", "(", "*", "*", "pipeline_block_data", ")", "pipelines", "[", "region", "]", "[", "'stages'", "]", ".", "extend", "(", "json", ".", "loads", "(", "block", ")", ")", "previous_env", "=", "env", "self", ".", "log", ".", "debug", "(", "'Assembled Pipelines:\\n%s'", ",", "pformat", "(", "pipelines", ")", ")", "for", "region", ",", "pipeline", "in", "pipelines", ".", "items", "(", ")", ":", "renumerate_stages", "(", "pipeline", ")", "self", ".", "post_pipeline", "(", "pipeline", ")", "return", "True" ]
Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline
[ "Main", "wrapper", "for", "pipeline", "creation", ".", "1", ".", "Runs", "clean_pipelines", "to", "clean", "up", "existing", "ones", "2", ".", "determines", "which", "environments", "the", "pipeline", "needs", "3", ".", "gets", "all", "subnets", "for", "template", "rendering", "4", ".", "Renders", "all", "of", "the", "pipeline", "blocks", "as", "defined", "in", "configs", "5", ".", "Runs", "post_pipeline", "to", "create", "pipeline" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L193-L259
6,132
foremast/foremast
src/foremast/utils/lookups.py
ami_lookup
def ami_lookup(region='us-east-1', name='tomcat8'): """Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_. """ if AMI_JSON_URL: ami_dict = _get_ami_dict(AMI_JSON_URL) ami_id = ami_dict[region][name] elif GITLAB_TOKEN: warn_user('Use AMI_JSON_URL feature instead.') ami_contents = _get_ami_file(region=region) ami_dict = json.loads(ami_contents) ami_id = ami_dict[name] else: ami_id = name LOG.info('Using AMI: %s', ami_id) return ami_id
python
def ami_lookup(region='us-east-1', name='tomcat8'): """Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_. """ if AMI_JSON_URL: ami_dict = _get_ami_dict(AMI_JSON_URL) ami_id = ami_dict[region][name] elif GITLAB_TOKEN: warn_user('Use AMI_JSON_URL feature instead.') ami_contents = _get_ami_file(region=region) ami_dict = json.loads(ami_contents) ami_id = ami_dict[name] else: ami_id = name LOG.info('Using AMI: %s', ami_id) return ami_id
[ "def", "ami_lookup", "(", "region", "=", "'us-east-1'", ",", "name", "=", "'tomcat8'", ")", ":", "if", "AMI_JSON_URL", ":", "ami_dict", "=", "_get_ami_dict", "(", "AMI_JSON_URL", ")", "ami_id", "=", "ami_dict", "[", "region", "]", "[", "name", "]", "elif", "GITLAB_TOKEN", ":", "warn_user", "(", "'Use AMI_JSON_URL feature instead.'", ")", "ami_contents", "=", "_get_ami_file", "(", "region", "=", "region", ")", "ami_dict", "=", "json", ".", "loads", "(", "ami_contents", ")", "ami_id", "=", "ami_dict", "[", "name", "]", "else", ":", "ami_id", "=", "name", "LOG", ".", "info", "(", "'Using AMI: %s'", ",", "ami_id", ")", "return", "ami_id" ]
Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_.
[ "Look", "up", "AMI", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L32-L59
6,133
foremast/foremast
src/foremast/utils/lookups.py
_get_ami_file
def _get_ami_file(region='us-east-1'): """Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format. """ LOG.info("Getting AMI from Gitlab") lookup = FileLookup(git_short='devops/ansible') filename = 'scripts/{0}.json'.format(region) ami_contents = lookup.remote_file(filename=filename, branch='master') LOG.debug('AMI file contents in %s: %s', filename, ami_contents) return ami_contents
python
def _get_ami_file(region='us-east-1'): """Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format. """ LOG.info("Getting AMI from Gitlab") lookup = FileLookup(git_short='devops/ansible') filename = 'scripts/{0}.json'.format(region) ami_contents = lookup.remote_file(filename=filename, branch='master') LOG.debug('AMI file contents in %s: %s', filename, ami_contents) return ami_contents
[ "def", "_get_ami_file", "(", "region", "=", "'us-east-1'", ")", ":", "LOG", ".", "info", "(", "\"Getting AMI from Gitlab\"", ")", "lookup", "=", "FileLookup", "(", "git_short", "=", "'devops/ansible'", ")", "filename", "=", "'scripts/{0}.json'", ".", "format", "(", "region", ")", "ami_contents", "=", "lookup", ".", "remote_file", "(", "filename", "=", "filename", ",", "branch", "=", "'master'", ")", "LOG", ".", "debug", "(", "'AMI file contents in %s: %s'", ",", "filename", ",", "ami_contents", ")", "return", "ami_contents" ]
Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format.
[ "Get", "file", "from", "Gitlab", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L62-L77
6,134
foremast/foremast
src/foremast/utils/lookups.py
_get_ami_dict
def _get_ami_dict(json_url): """Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format. """ LOG.info("Getting AMI from %s", json_url) response = requests.get(json_url) assert response.ok, "Error getting ami info from {}".format(json_url) ami_dict = response.json() LOG.debug('AMI json contents: %s', ami_dict) return ami_dict
python
def _get_ami_dict(json_url): """Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format. """ LOG.info("Getting AMI from %s", json_url) response = requests.get(json_url) assert response.ok, "Error getting ami info from {}".format(json_url) ami_dict = response.json() LOG.debug('AMI json contents: %s', ami_dict) return ami_dict
[ "def", "_get_ami_dict", "(", "json_url", ")", ":", "LOG", ".", "info", "(", "\"Getting AMI from %s\"", ",", "json_url", ")", "response", "=", "requests", ".", "get", "(", "json_url", ")", "assert", "response", ".", "ok", ",", "\"Error getting ami info from {}\"", ".", "format", "(", "json_url", ")", "ami_dict", "=", "response", ".", "json", "(", ")", "LOG", ".", "debug", "(", "'AMI json contents: %s'", ",", "ami_dict", ")", "return", "ami_dict" ]
Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format.
[ "Get", "ami", "from", "a", "web", "url", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L80-L95
6,135
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.get_gitlab_project
def get_gitlab_project(self): """Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code. """ self.server = gitlab.Gitlab(GIT_URL, private_token=GITLAB_TOKEN, api_version=4) project = self.server.projects.get(self.git_short) if not project: raise GitLabApiError('Could not get Project "{0}" from GitLab API.'.format(self.git_short)) self.project = project return self.project
python
def get_gitlab_project(self): """Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code. """ self.server = gitlab.Gitlab(GIT_URL, private_token=GITLAB_TOKEN, api_version=4) project = self.server.projects.get(self.git_short) if not project: raise GitLabApiError('Could not get Project "{0}" from GitLab API.'.format(self.git_short)) self.project = project return self.project
[ "def", "get_gitlab_project", "(", "self", ")", ":", "self", ".", "server", "=", "gitlab", ".", "Gitlab", "(", "GIT_URL", ",", "private_token", "=", "GITLAB_TOKEN", ",", "api_version", "=", "4", ")", "project", "=", "self", ".", "server", ".", "projects", ".", "get", "(", "self", ".", "git_short", ")", "if", "not", "project", ":", "raise", "GitLabApiError", "(", "'Could not get Project \"{0}\" from GitLab API.'", ".", "format", "(", "self", ".", "git_short", ")", ")", "self", ".", "project", "=", "project", "return", "self", ".", "project" ]
Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code.
[ "Get", "numerical", "GitLab", "Project", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L121-L139
6,136
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.local_file
def local_file(self, filename): """Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.runway_dir) file_contents = '' file_path = os.path.join(self.runway_dir, filename) try: with open(file_path, 'rt') as lookup_file: file_contents = lookup_file.read() except FileNotFoundError: LOG.warning('File missing "%s".', file_path) raise LOG.debug('Local file contents:\n%s', file_contents) return file_contents
python
def local_file(self, filename): """Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.runway_dir) file_contents = '' file_path = os.path.join(self.runway_dir, filename) try: with open(file_path, 'rt') as lookup_file: file_contents = lookup_file.read() except FileNotFoundError: LOG.warning('File missing "%s".', file_path) raise LOG.debug('Local file contents:\n%s', file_contents) return file_contents
[ "def", "local_file", "(", "self", ",", "filename", ")", ":", "LOG", ".", "info", "(", "'Retrieving \"%s\" from \"%s\".'", ",", "filename", ",", "self", ".", "runway_dir", ")", "file_contents", "=", "''", "file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "runway_dir", ",", "filename", ")", "try", ":", "with", "open", "(", "file_path", ",", "'rt'", ")", "as", "lookup_file", ":", "file_contents", "=", "lookup_file", ".", "read", "(", ")", "except", "FileNotFoundError", ":", "LOG", ".", "warning", "(", "'File missing \"%s\".'", ",", "file_path", ")", "raise", "LOG", ".", "debug", "(", "'Local file contents:\\n%s'", ",", "file_contents", ")", "return", "file_contents" ]
Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing.
[ "Read", "the", "local", "file", "in", "_self", ".", "runway_dir_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L141-L169
6,137
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.remote_file
def remote_file(self, branch='master', filename=''): """Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.git_short) file_contents = '' try: file_blob = self.project.files.get(file_path=filename, ref=branch) except gitlab.exceptions.GitlabGetError: file_blob = None LOG.debug('GitLab file response:\n%s', file_blob) if not file_blob: msg = 'Project "{0}" is missing file "{1}" in "{2}" branch.'.format(self.git_short, filename, branch) LOG.warning(msg) raise FileNotFoundError(msg) else: file_contents = b64decode(file_blob.content).decode() LOG.debug('Remote file contents:\n%s', file_contents) return file_contents
python
def remote_file(self, branch='master', filename=''): """Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.git_short) file_contents = '' try: file_blob = self.project.files.get(file_path=filename, ref=branch) except gitlab.exceptions.GitlabGetError: file_blob = None LOG.debug('GitLab file response:\n%s', file_blob) if not file_blob: msg = 'Project "{0}" is missing file "{1}" in "{2}" branch.'.format(self.git_short, filename, branch) LOG.warning(msg) raise FileNotFoundError(msg) else: file_contents = b64decode(file_blob.content).decode() LOG.debug('Remote file contents:\n%s', file_contents) return file_contents
[ "def", "remote_file", "(", "self", ",", "branch", "=", "'master'", ",", "filename", "=", "''", ")", ":", "LOG", ".", "info", "(", "'Retrieving \"%s\" from \"%s\".'", ",", "filename", ",", "self", ".", "git_short", ")", "file_contents", "=", "''", "try", ":", "file_blob", "=", "self", ".", "project", ".", "files", ".", "get", "(", "file_path", "=", "filename", ",", "ref", "=", "branch", ")", "except", "gitlab", ".", "exceptions", ".", "GitlabGetError", ":", "file_blob", "=", "None", "LOG", ".", "debug", "(", "'GitLab file response:\\n%s'", ",", "file_blob", ")", "if", "not", "file_blob", ":", "msg", "=", "'Project \"{0}\" is missing file \"{1}\" in \"{2}\" branch.'", ".", "format", "(", "self", ".", "git_short", ",", "filename", ",", "branch", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "FileNotFoundError", "(", "msg", ")", "else", ":", "file_contents", "=", "b64decode", "(", "file_blob", ".", "content", ")", ".", "decode", "(", ")", "LOG", ".", "debug", "(", "'Remote file contents:\\n%s'", ",", "file_contents", ")", "return", "file_contents" ]
Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing.
[ "Read", "the", "remote", "file", "on", "Git", "Server", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L171-L205
6,138
foremast/foremast
src/foremast/utils/banners.py
banner
def banner(text, border='=', width=80): """Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be """ text_padding = '{0:^%d}' % (width) LOG.info(border * width) LOG.info(text_padding.format(text)) LOG.info(border * width)
python
def banner(text, border='=', width=80): """Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be """ text_padding = '{0:^%d}' % (width) LOG.info(border * width) LOG.info(text_padding.format(text)) LOG.info(border * width)
[ "def", "banner", "(", "text", ",", "border", "=", "'='", ",", "width", "=", "80", ")", ":", "text_padding", "=", "'{0:^%d}'", "%", "(", "width", ")", "LOG", ".", "info", "(", "border", "*", "width", ")", "LOG", ".", "info", "(", "text_padding", ".", "format", "(", "text", ")", ")", "LOG", ".", "info", "(", "border", "*", "width", ")" ]
Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be
[ "Center", "_text_", "in", "a", "banner", "_width_", "wide", "with", "_border_", "characters", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/banners.py#L30-L41
6,139
foremast/foremast
src/foremast/utils/get_sns_topic_arn.py
get_sns_topic_arn
def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5 and topic_name.startswith('arn:aws:sns:'): return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
python
def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5 and topic_name.startswith('arn:aws:sns:'): return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
[ "def", "get_sns_topic_arn", "(", "topic_name", ",", "account", ",", "region", ")", ":", "if", "topic_name", ".", "count", "(", "':'", ")", "==", "5", "and", "topic_name", ".", "startswith", "(", "'arn:aws:sns:'", ")", ":", "return", "topic_name", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "sns_client", "=", "session", ".", "client", "(", "'sns'", ")", "topics", "=", "sns_client", ".", "list_topics", "(", ")", "[", "'Topics'", "]", "matched_topic", "=", "None", "for", "topic", "in", "topics", ":", "topic_arn", "=", "topic", "[", "'TopicArn'", "]", "if", "topic_name", "==", "topic_arn", ".", "split", "(", "':'", ")", "[", "-", "1", "]", ":", "matched_topic", "=", "topic_arn", "break", "else", ":", "LOG", ".", "critical", "(", "\"No topic with name %s found.\"", ",", "topic_name", ")", "raise", "SNSTopicNotFound", "(", "'No topic with name {0} found'", ".", "format", "(", "topic_name", ")", ")", "return", "matched_topic" ]
Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name
[ "Get", "SNS", "topic", "ARN", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/get_sns_topic_arn.py#L11-L39
6,140
foremast/foremast
src/foremast/slacknotify/slack_notification.py
SlackNotification.notify_slack_channel
def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message=message, channel=self.settings['pipeline']['notifications']['slack'], username='pipeline-bot', icon_emoji=':gear:')
python
def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message=message, channel=self.settings['pipeline']['notifications']['slack'], username='pipeline-bot', icon_emoji=':gear:')
[ "def", "notify_slack_channel", "(", "self", ")", ":", "message", "=", "get_template", "(", "template_file", "=", "'slack/pipeline-prepare-ran.j2'", ",", "info", "=", "self", ".", "info", ")", "if", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'notifications'", "]", "[", "'slack'", "]", ":", "post_slack_message", "(", "message", "=", "message", ",", "channel", "=", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'notifications'", "]", "[", "'slack'", "]", ",", "username", "=", "'pipeline-bot'", ",", "icon_emoji", "=", "':gear:'", ")" ]
Post message to a defined Slack channel.
[ "Post", "message", "to", "a", "defined", "Slack", "channel", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/slacknotify/slack_notification.py#L54-L63
6,141
foremast/foremast
src/foremast/utils/properties.py
get_properties
def get_properties(properties_file='raw.properties.json', env=None, region=None): """Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output. """ with open(properties_file, 'rt') as file_handle: properties = json.load(file_handle) env_properties = properties.get(env, properties) contents = env_properties.get(region, env_properties) LOG.debug('Found properties for %s:\n%s', env, contents) return contents
python
def get_properties(properties_file='raw.properties.json', env=None, region=None): """Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output. """ with open(properties_file, 'rt') as file_handle: properties = json.load(file_handle) env_properties = properties.get(env, properties) contents = env_properties.get(region, env_properties) LOG.debug('Found properties for %s:\n%s', env, contents) return contents
[ "def", "get_properties", "(", "properties_file", "=", "'raw.properties.json'", ",", "env", "=", "None", ",", "region", "=", "None", ")", ":", "with", "open", "(", "properties_file", ",", "'rt'", ")", "as", "file_handle", ":", "properties", "=", "json", ".", "load", "(", "file_handle", ")", "env_properties", "=", "properties", ".", "get", "(", "env", ",", "properties", ")", "contents", "=", "env_properties", ".", "get", "(", "region", ",", "env_properties", ")", "LOG", ".", "debug", "(", "'Found properties for %s:\\n%s'", ",", "env", ",", "contents", ")", "return", "contents" ]
Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output.
[ "Get", "contents", "of", "_properties_file_", "for", "the", "_env_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/properties.py#L23-L42
6,142
foremast/foremast
src/foremast/elb/destroy_elb/__main__.py
main
def main(): """Destroy any ELB related Resources.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) assert destroy_elb(**vars(args))
python
def main(): """Destroy any ELB related Resources.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) assert destroy_elb(**vars(args))
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "main", ".", "__doc__", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "assert", "destroy_elb", "(", "*", "*", "vars", "(", "args", ")", ")" ]
Destroy any ELB related Resources.
[ "Destroy", "any", "ELB", "related", "Resources", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/destroy_elb/__main__.py#L27-L40
6,143
foremast/foremast
src/foremast/utils/security_group.py
get_security_group_id
def get_security_group_id(name='', env='', region=''): """Get a security group ID. Args: name (str): Security Group name to find. env (str): Deployment environment to search. region (str): AWS Region to search. Returns: str: ID of Security Group, e.g. sg-xxxx. Raises: AssertionError: Call to Gate API was not successful. SpinnakerSecurityGroupError: Security Group _name_ was not found for _env_ in _region_. """ vpc_id = get_vpc_id(env, region) LOG.info('Find %s sg in %s [%s] in %s', name, env, region, vpc_id) url = '{0}/securityGroups/{1}/{2}/{3}?vpcId={4}'.format(API_URL, env, region, name, vpc_id) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok result = response.json() try: security_group_id = result['id'] except KeyError: msg = 'Security group ({0}) not found'.format(name) raise SpinnakerSecurityGroupError(msg) LOG.info('Found: %s', security_group_id) return security_group_id
python
def get_security_group_id(name='', env='', region=''): """Get a security group ID. Args: name (str): Security Group name to find. env (str): Deployment environment to search. region (str): AWS Region to search. Returns: str: ID of Security Group, e.g. sg-xxxx. Raises: AssertionError: Call to Gate API was not successful. SpinnakerSecurityGroupError: Security Group _name_ was not found for _env_ in _region_. """ vpc_id = get_vpc_id(env, region) LOG.info('Find %s sg in %s [%s] in %s', name, env, region, vpc_id) url = '{0}/securityGroups/{1}/{2}/{3}?vpcId={4}'.format(API_URL, env, region, name, vpc_id) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok result = response.json() try: security_group_id = result['id'] except KeyError: msg = 'Security group ({0}) not found'.format(name) raise SpinnakerSecurityGroupError(msg) LOG.info('Found: %s', security_group_id) return security_group_id
[ "def", "get_security_group_id", "(", "name", "=", "''", ",", "env", "=", "''", ",", "region", "=", "''", ")", ":", "vpc_id", "=", "get_vpc_id", "(", "env", ",", "region", ")", "LOG", ".", "info", "(", "'Find %s sg in %s [%s] in %s'", ",", "name", ",", "env", ",", "region", ",", "vpc_id", ")", "url", "=", "'{0}/securityGroups/{1}/{2}/{3}?vpcId={4}'", ".", "format", "(", "API_URL", ",", "env", ",", "region", ",", "name", ",", "vpc_id", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "response", ".", "ok", "result", "=", "response", ".", "json", "(", ")", "try", ":", "security_group_id", "=", "result", "[", "'id'", "]", "except", "KeyError", ":", "msg", "=", "'Security group ({0}) not found'", ".", "format", "(", "name", ")", "raise", "SpinnakerSecurityGroupError", "(", "msg", ")", "LOG", ".", "info", "(", "'Found: %s'", ",", "security_group_id", ")", "return", "security_group_id" ]
Get a security group ID. Args: name (str): Security Group name to find. env (str): Deployment environment to search. region (str): AWS Region to search. Returns: str: ID of Security Group, e.g. sg-xxxx. Raises: AssertionError: Call to Gate API was not successful. SpinnakerSecurityGroupError: Security Group _name_ was not found for _env_ in _region_.
[ "Get", "a", "security", "group", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/security_group.py#L30-L63
6,144
foremast/foremast
src/foremast/utils/security_group.py
remove_duplicate_sg
def remove_duplicate_sg(security_groups): """Removes duplicate Security Groups that share a same name alias Args: security_groups (list): A list of security group id to compare against SECURITYGROUP_REPLACEMENTS Returns: security_groups (list): A list of security groups with duplicate aliases removed """ for each_sg, duplicate_sg_name in SECURITYGROUP_REPLACEMENTS.items(): if each_sg in security_groups and duplicate_sg_name in security_groups: LOG.info('Duplicate SG found. Removing %s in favor of %s.', duplicate_sg_name, each_sg) security_groups.remove(duplicate_sg_name) return security_groups
python
def remove_duplicate_sg(security_groups): """Removes duplicate Security Groups that share a same name alias Args: security_groups (list): A list of security group id to compare against SECURITYGROUP_REPLACEMENTS Returns: security_groups (list): A list of security groups with duplicate aliases removed """ for each_sg, duplicate_sg_name in SECURITYGROUP_REPLACEMENTS.items(): if each_sg in security_groups and duplicate_sg_name in security_groups: LOG.info('Duplicate SG found. Removing %s in favor of %s.', duplicate_sg_name, each_sg) security_groups.remove(duplicate_sg_name) return security_groups
[ "def", "remove_duplicate_sg", "(", "security_groups", ")", ":", "for", "each_sg", ",", "duplicate_sg_name", "in", "SECURITYGROUP_REPLACEMENTS", ".", "items", "(", ")", ":", "if", "each_sg", "in", "security_groups", "and", "duplicate_sg_name", "in", "security_groups", ":", "LOG", ".", "info", "(", "'Duplicate SG found. Removing %s in favor of %s.'", ",", "duplicate_sg_name", ",", "each_sg", ")", "security_groups", ".", "remove", "(", "duplicate_sg_name", ")", "return", "security_groups" ]
Removes duplicate Security Groups that share a same name alias Args: security_groups (list): A list of security group id to compare against SECURITYGROUP_REPLACEMENTS Returns: security_groups (list): A list of security groups with duplicate aliases removed
[ "Removes", "duplicate", "Security", "Groups", "that", "share", "a", "same", "name", "alias" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/security_group.py#L66-L80
6,145
foremast/foremast
src/foremast/utils/encoding.py
generate_encoded_user_data
def generate_encoded_user_data( env='dev', region='us-east-1', generated=None, group_name='', pipeline_type='', canary=False, ): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. generated (gogoutils.Generator): Generated naming formats. group_name (str): Application group nane, e.g. core. pipeline_type (str): Type of Foremast Pipeline to configure. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_ENVIRONMENT_C=dev export CLOUD_ENVIRONMENT_P=dev export CLOUD_ENVIRONMENT_S=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ # We need to handle the case of prodp and prods for different URL generation if env in ["prod", "prodp", "prods"]: env_c, env_p, env_s = "prod", "prodp", "prods" else: env_c, env_p, env_s = env, env, env user_data = get_template( template_file='infrastructure/user_data.sh.j2', env=env, env_c=env_c, env_p=env_p, env_s=env_s, region=region, app_name=generated.app_name(), group_name=group_name, pipeline_type=pipeline_type, canary=canary, formats=generated, ) return base64.b64encode(user_data.encode()).decode()
python
def generate_encoded_user_data( env='dev', region='us-east-1', generated=None, group_name='', pipeline_type='', canary=False, ): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. generated (gogoutils.Generator): Generated naming formats. group_name (str): Application group nane, e.g. core. pipeline_type (str): Type of Foremast Pipeline to configure. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_ENVIRONMENT_C=dev export CLOUD_ENVIRONMENT_P=dev export CLOUD_ENVIRONMENT_S=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ # We need to handle the case of prodp and prods for different URL generation if env in ["prod", "prodp", "prods"]: env_c, env_p, env_s = "prod", "prodp", "prods" else: env_c, env_p, env_s = env, env, env user_data = get_template( template_file='infrastructure/user_data.sh.j2', env=env, env_c=env_c, env_p=env_p, env_s=env_s, region=region, app_name=generated.app_name(), group_name=group_name, pipeline_type=pipeline_type, canary=canary, formats=generated, ) return base64.b64encode(user_data.encode()).decode()
[ "def", "generate_encoded_user_data", "(", "env", "=", "'dev'", ",", "region", "=", "'us-east-1'", ",", "generated", "=", "None", ",", "group_name", "=", "''", ",", "pipeline_type", "=", "''", ",", "canary", "=", "False", ",", ")", ":", "# We need to handle the case of prodp and prods for different URL generation", "if", "env", "in", "[", "\"prod\"", ",", "\"prodp\"", ",", "\"prods\"", "]", ":", "env_c", ",", "env_p", ",", "env_s", "=", "\"prod\"", ",", "\"prodp\"", ",", "\"prods\"", "else", ":", "env_c", ",", "env_p", ",", "env_s", "=", "env", ",", "env", ",", "env", "user_data", "=", "get_template", "(", "template_file", "=", "'infrastructure/user_data.sh.j2'", ",", "env", "=", "env", ",", "env_c", "=", "env_c", ",", "env_p", "=", "env_p", ",", "env_s", "=", "env_s", ",", "region", "=", "region", ",", "app_name", "=", "generated", ".", "app_name", "(", ")", ",", "group_name", "=", "group_name", ",", "pipeline_type", "=", "pipeline_type", ",", "canary", "=", "canary", ",", "formats", "=", "generated", ",", ")", "return", "base64", ".", "b64encode", "(", "user_data", ".", "encode", "(", ")", ")", ".", "decode", "(", ")" ]
r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. generated (gogoutils.Generator): Generated naming formats. group_name (str): Application group nane, e.g. core. pipeline_type (str): Type of Foremast Pipeline to configure. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_ENVIRONMENT_C=dev export CLOUD_ENVIRONMENT_P=dev export CLOUD_ENVIRONMENT_S=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
[ "r", "Generate", "base64", "encoded", "User", "Data", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/encoding.py#L22-L74
6,146
foremast/foremast
src/foremast/runner.py
prepare_infrastructure
def prepare_infrastructure(): """Entry point for preparing the infrastructure in a specific env.""" runner = ForemastRunner() runner.write_configs() runner.create_app() archaius = runner.configs[runner.env]['app']['archaius_enabled'] eureka = runner.configs[runner.env]['app']['eureka_enabled'] deploy_type = runner.configs['pipeline']['type'] if deploy_type not in ['s3', 'datapipeline']: runner.create_iam() # TODO: Refactor Archaius to be fully featured if archaius: runner.create_archaius() runner.create_secgroups() if eureka: LOG.info("Eureka Enabled, skipping ELB and DNS setup") elif deploy_type == "lambda": LOG.info("Lambda Enabled, skipping ELB and DNS setup") runner.create_awslambda() elif deploy_type == "s3": runner.create_s3app() elif deploy_type == 'datapipeline': runner.create_datapipeline() else: LOG.info("No Eureka, running ELB and DNS setup") runner.create_elb() runner.create_dns() runner.slack_notify() runner.cleanup()
python
def prepare_infrastructure(): """Entry point for preparing the infrastructure in a specific env.""" runner = ForemastRunner() runner.write_configs() runner.create_app() archaius = runner.configs[runner.env]['app']['archaius_enabled'] eureka = runner.configs[runner.env]['app']['eureka_enabled'] deploy_type = runner.configs['pipeline']['type'] if deploy_type not in ['s3', 'datapipeline']: runner.create_iam() # TODO: Refactor Archaius to be fully featured if archaius: runner.create_archaius() runner.create_secgroups() if eureka: LOG.info("Eureka Enabled, skipping ELB and DNS setup") elif deploy_type == "lambda": LOG.info("Lambda Enabled, skipping ELB and DNS setup") runner.create_awslambda() elif deploy_type == "s3": runner.create_s3app() elif deploy_type == 'datapipeline': runner.create_datapipeline() else: LOG.info("No Eureka, running ELB and DNS setup") runner.create_elb() runner.create_dns() runner.slack_notify() runner.cleanup()
[ "def", "prepare_infrastructure", "(", ")", ":", "runner", "=", "ForemastRunner", "(", ")", "runner", ".", "write_configs", "(", ")", "runner", ".", "create_app", "(", ")", "archaius", "=", "runner", ".", "configs", "[", "runner", ".", "env", "]", "[", "'app'", "]", "[", "'archaius_enabled'", "]", "eureka", "=", "runner", ".", "configs", "[", "runner", ".", "env", "]", "[", "'app'", "]", "[", "'eureka_enabled'", "]", "deploy_type", "=", "runner", ".", "configs", "[", "'pipeline'", "]", "[", "'type'", "]", "if", "deploy_type", "not", "in", "[", "'s3'", ",", "'datapipeline'", "]", ":", "runner", ".", "create_iam", "(", ")", "# TODO: Refactor Archaius to be fully featured", "if", "archaius", ":", "runner", ".", "create_archaius", "(", ")", "runner", ".", "create_secgroups", "(", ")", "if", "eureka", ":", "LOG", ".", "info", "(", "\"Eureka Enabled, skipping ELB and DNS setup\"", ")", "elif", "deploy_type", "==", "\"lambda\"", ":", "LOG", ".", "info", "(", "\"Lambda Enabled, skipping ELB and DNS setup\"", ")", "runner", ".", "create_awslambda", "(", ")", "elif", "deploy_type", "==", "\"s3\"", ":", "runner", ".", "create_s3app", "(", ")", "elif", "deploy_type", "==", "'datapipeline'", ":", "runner", ".", "create_datapipeline", "(", ")", "else", ":", "LOG", ".", "info", "(", "\"No Eureka, running ELB and DNS setup\"", ")", "runner", ".", "create_elb", "(", ")", "runner", ".", "create_dns", "(", ")", "runner", ".", "slack_notify", "(", ")", "runner", ".", "cleanup", "(", ")" ]
Entry point for preparing the infrastructure in a specific env.
[ "Entry", "point", "for", "preparing", "the", "infrastructure", "in", "a", "specific", "env", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L250-L283
6,147
foremast/foremast
src/foremast/runner.py
prepare_app_pipeline
def prepare_app_pipeline(): """Entry point for application setup and initial pipeline in Spinnaker.""" runner = ForemastRunner() runner.write_configs() runner.create_app() runner.create_pipeline() runner.cleanup()
python
def prepare_app_pipeline(): """Entry point for application setup and initial pipeline in Spinnaker.""" runner = ForemastRunner() runner.write_configs() runner.create_app() runner.create_pipeline() runner.cleanup()
[ "def", "prepare_app_pipeline", "(", ")", ":", "runner", "=", "ForemastRunner", "(", ")", "runner", ".", "write_configs", "(", ")", "runner", ".", "create_app", "(", ")", "runner", ".", "create_pipeline", "(", ")", "runner", ".", "cleanup", "(", ")" ]
Entry point for application setup and initial pipeline in Spinnaker.
[ "Entry", "point", "for", "application", "setup", "and", "initial", "pipeline", "in", "Spinnaker", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L286-L292
6,148
foremast/foremast
src/foremast/runner.py
prepare_onetime_pipeline
def prepare_onetime_pipeline(): """Entry point for single use pipeline setup in the defined app.""" runner = ForemastRunner() runner.write_configs() runner.create_pipeline(onetime=os.getenv('ENV')) runner.cleanup()
python
def prepare_onetime_pipeline(): """Entry point for single use pipeline setup in the defined app.""" runner = ForemastRunner() runner.write_configs() runner.create_pipeline(onetime=os.getenv('ENV')) runner.cleanup()
[ "def", "prepare_onetime_pipeline", "(", ")", ":", "runner", "=", "ForemastRunner", "(", ")", "runner", ".", "write_configs", "(", ")", "runner", ".", "create_pipeline", "(", "onetime", "=", "os", ".", "getenv", "(", "'ENV'", ")", ")", "runner", ".", "cleanup", "(", ")" ]
Entry point for single use pipeline setup in the defined app.
[ "Entry", "point", "for", "single", "use", "pipeline", "setup", "in", "the", "defined", "app", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L295-L300
6,149
foremast/foremast
src/foremast/runner.py
ForemastRunner.write_configs
def write_configs(self): """Generate the configurations needed for pipes.""" utils.banner("Generating Configs") if not self.runway_dir: app_configs = configs.process_git_configs(git_short=self.git_short) else: app_configs = configs.process_runway_configs(runway_dir=self.runway_dir) self.configs = configs.write_variables( app_configs=app_configs, out_file=self.raw_path, git_short=self.git_short)
python
def write_configs(self): """Generate the configurations needed for pipes.""" utils.banner("Generating Configs") if not self.runway_dir: app_configs = configs.process_git_configs(git_short=self.git_short) else: app_configs = configs.process_runway_configs(runway_dir=self.runway_dir) self.configs = configs.write_variables( app_configs=app_configs, out_file=self.raw_path, git_short=self.git_short)
[ "def", "write_configs", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Generating Configs\"", ")", "if", "not", "self", ".", "runway_dir", ":", "app_configs", "=", "configs", ".", "process_git_configs", "(", "git_short", "=", "self", ".", "git_short", ")", "else", ":", "app_configs", "=", "configs", ".", "process_runway_configs", "(", "runway_dir", "=", "self", ".", "runway_dir", ")", "self", ".", "configs", "=", "configs", ".", "write_variables", "(", "app_configs", "=", "app_configs", ",", "out_file", "=", "self", ".", "raw_path", ",", "git_short", "=", "self", ".", "git_short", ")" ]
Generate the configurations needed for pipes.
[ "Generate", "the", "configurations", "needed", "for", "pipes", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L72-L81
6,150
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_app
def create_app(self): """Create the spinnaker application.""" utils.banner("Creating Spinnaker App") spinnakerapp = app.SpinnakerApp(app=self.app, email=self.email, project=self.group, repo=self.repo, pipeline_config=self.configs['pipeline']) spinnakerapp.create_app()
python
def create_app(self): """Create the spinnaker application.""" utils.banner("Creating Spinnaker App") spinnakerapp = app.SpinnakerApp(app=self.app, email=self.email, project=self.group, repo=self.repo, pipeline_config=self.configs['pipeline']) spinnakerapp.create_app()
[ "def", "create_app", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating Spinnaker App\"", ")", "spinnakerapp", "=", "app", ".", "SpinnakerApp", "(", "app", "=", "self", ".", "app", ",", "email", "=", "self", ".", "email", ",", "project", "=", "self", ".", "group", ",", "repo", "=", "self", ".", "repo", ",", "pipeline_config", "=", "self", ".", "configs", "[", "'pipeline'", "]", ")", "spinnakerapp", ".", "create_app", "(", ")" ]
Create the spinnaker application.
[ "Create", "the", "spinnaker", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L83-L88
6,151
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_iam
def create_iam(self): """Create IAM resources.""" utils.banner("Creating IAM") iam.create_iam_resources(env=self.env, app=self.app)
python
def create_iam(self): """Create IAM resources.""" utils.banner("Creating IAM") iam.create_iam_resources(env=self.env, app=self.app)
[ "def", "create_iam", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating IAM\"", ")", "iam", ".", "create_iam_resources", "(", "env", "=", "self", ".", "env", ",", "app", "=", "self", ".", "app", ")" ]
Create IAM resources.
[ "Create", "IAM", "resources", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L124-L127
6,152
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_archaius
def create_archaius(self): """Create S3 bucket for Archaius.""" utils.banner("Creating S3") s3.init_properties(env=self.env, app=self.app)
python
def create_archaius(self): """Create S3 bucket for Archaius.""" utils.banner("Creating S3") s3.init_properties(env=self.env, app=self.app)
[ "def", "create_archaius", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating S3\"", ")", "s3", ".", "init_properties", "(", "env", "=", "self", ".", "env", ",", "app", "=", "self", ".", "app", ")" ]
Create S3 bucket for Archaius.
[ "Create", "S3", "bucket", "for", "Archaius", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L129-L132
6,153
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_s3app
def create_s3app(self): """Create S3 infra for s3 applications""" utils.banner("Creating S3 App Infrastructure") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Apps(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, primary_region=primary_region) s3obj.create_bucket()
python
def create_s3app(self): """Create S3 infra for s3 applications""" utils.banner("Creating S3 App Infrastructure") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Apps(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, primary_region=primary_region) s3obj.create_bucket()
[ "def", "create_s3app", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating S3 App Infrastructure\"", ")", "primary_region", "=", "self", ".", "configs", "[", "'pipeline'", "]", "[", "'primary_region'", "]", "s3obj", "=", "s3", ".", "S3Apps", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ",", "primary_region", "=", "primary_region", ")", "s3obj", ".", "create_bucket", "(", ")" ]
Create S3 infra for s3 applications
[ "Create", "S3", "infra", "for", "s3", "applications" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L134-L143
6,154
foremast/foremast
src/foremast/runner.py
ForemastRunner.deploy_s3app
def deploy_s3app(self): """Deploys artifacts contents to S3 bucket""" utils.banner("Deploying S3 App") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, primary_region=primary_region) s3obj.upload_artifacts()
python
def deploy_s3app(self): """Deploys artifacts contents to S3 bucket""" utils.banner("Deploying S3 App") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, primary_region=primary_region) s3obj.upload_artifacts()
[ "def", "deploy_s3app", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Deploying S3 App\"", ")", "primary_region", "=", "self", ".", "configs", "[", "'pipeline'", "]", "[", "'primary_region'", "]", "s3obj", "=", "s3", ".", "S3Deployment", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ",", "artifact_path", "=", "self", ".", "artifact_path", ",", "artifact_version", "=", "self", ".", "artifact_version", ",", "primary_region", "=", "primary_region", ")", "s3obj", ".", "upload_artifacts", "(", ")" ]
Deploys artifacts contents to S3 bucket
[ "Deploys", "artifacts", "contents", "to", "S3", "bucket" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L145-L157
6,155
foremast/foremast
src/foremast/runner.py
ForemastRunner.promote_s3app
def promote_s3app(self): """promotes S3 deployment to LATEST""" utils.banner("Promoting S3 App") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, primary_region=primary_region) s3obj.promote_artifacts(promote_stage=self.promote_stage)
python
def promote_s3app(self): """promotes S3 deployment to LATEST""" utils.banner("Promoting S3 App") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, primary_region=primary_region) s3obj.promote_artifacts(promote_stage=self.promote_stage)
[ "def", "promote_s3app", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Promoting S3 App\"", ")", "primary_region", "=", "self", ".", "configs", "[", "'pipeline'", "]", "[", "'primary_region'", "]", "s3obj", "=", "s3", ".", "S3Deployment", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ",", "artifact_path", "=", "self", ".", "artifact_path", ",", "artifact_version", "=", "self", ".", "artifact_version", ",", "primary_region", "=", "primary_region", ")", "s3obj", ".", "promote_artifacts", "(", "promote_stage", "=", "self", ".", "promote_stage", ")" ]
promotes S3 deployment to LATEST
[ "promotes", "S3", "deployment", "to", "LATEST" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L159-L171
6,156
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_elb
def create_elb(self): """Create the ELB for the defined environment.""" utils.banner("Creating ELB") elbobj = elb.SpinnakerELB(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) elbobj.create_elb()
python
def create_elb(self): """Create the ELB for the defined environment.""" utils.banner("Creating ELB") elbobj = elb.SpinnakerELB(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) elbobj.create_elb()
[ "def", "create_elb", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating ELB\"", ")", "elbobj", "=", "elb", ".", "SpinnakerELB", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ")", "elbobj", ".", "create_elb", "(", ")" ]
Create the ELB for the defined environment.
[ "Create", "the", "ELB", "for", "the", "defined", "environment", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L191-L195
6,157
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_dns
def create_dns(self): """Create DNS for the defined app and environment.""" utils.banner("Creating DNS") elb_subnet = self.configs[self.env]['elb']['subnet_purpose'] regions = self.configs[self.env]['regions'] failover = self.configs[self.env]['dns']['failover_dns'] primary_region = self.configs['pipeline']['primary_region'] regionspecific_dns = self.configs[self.env]['dns']['region_specific'] dnsobj = dns.SpinnakerDns( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, elb_subnet=elb_subnet) if len(regions) > 1 and failover: dnsobj.create_elb_dns(regionspecific=True) dnsobj.create_failover_dns(primary_region=primary_region) else: if regionspecific_dns: dnsobj.create_elb_dns(regionspecific=True) if self.region == primary_region: dnsobj.create_elb_dns(regionspecific=False)
python
def create_dns(self): """Create DNS for the defined app and environment.""" utils.banner("Creating DNS") elb_subnet = self.configs[self.env]['elb']['subnet_purpose'] regions = self.configs[self.env]['regions'] failover = self.configs[self.env]['dns']['failover_dns'] primary_region = self.configs['pipeline']['primary_region'] regionspecific_dns = self.configs[self.env]['dns']['region_specific'] dnsobj = dns.SpinnakerDns( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, elb_subnet=elb_subnet) if len(regions) > 1 and failover: dnsobj.create_elb_dns(regionspecific=True) dnsobj.create_failover_dns(primary_region=primary_region) else: if regionspecific_dns: dnsobj.create_elb_dns(regionspecific=True) if self.region == primary_region: dnsobj.create_elb_dns(regionspecific=False)
[ "def", "create_dns", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating DNS\"", ")", "elb_subnet", "=", "self", ".", "configs", "[", "self", ".", "env", "]", "[", "'elb'", "]", "[", "'subnet_purpose'", "]", "regions", "=", "self", ".", "configs", "[", "self", ".", "env", "]", "[", "'regions'", "]", "failover", "=", "self", ".", "configs", "[", "self", ".", "env", "]", "[", "'dns'", "]", "[", "'failover_dns'", "]", "primary_region", "=", "self", ".", "configs", "[", "'pipeline'", "]", "[", "'primary_region'", "]", "regionspecific_dns", "=", "self", ".", "configs", "[", "self", ".", "env", "]", "[", "'dns'", "]", "[", "'region_specific'", "]", "dnsobj", "=", "dns", ".", "SpinnakerDns", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ",", "elb_subnet", "=", "elb_subnet", ")", "if", "len", "(", "regions", ")", ">", "1", "and", "failover", ":", "dnsobj", ".", "create_elb_dns", "(", "regionspecific", "=", "True", ")", "dnsobj", ".", "create_failover_dns", "(", "primary_region", "=", "primary_region", ")", "else", ":", "if", "regionspecific_dns", ":", "dnsobj", ".", "create_elb_dns", "(", "regionspecific", "=", "True", ")", "if", "self", ".", "region", "==", "primary_region", ":", "dnsobj", ".", "create_elb_dns", "(", "regionspecific", "=", "False", ")" ]
Create DNS for the defined app and environment.
[ "Create", "DNS", "for", "the", "defined", "app", "and", "environment", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L197-L217
6,158
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_autoscaling_policy
def create_autoscaling_policy(self): """Create Scaling Policy for app in environment""" utils.banner("Creating Scaling Policy") policyobj = autoscaling_policy.AutoScalingPolicy( app=self.app, env=self.env, region=self.region, prop_path=self.json_path) policyobj.create_policy()
python
def create_autoscaling_policy(self): """Create Scaling Policy for app in environment""" utils.banner("Creating Scaling Policy") policyobj = autoscaling_policy.AutoScalingPolicy( app=self.app, env=self.env, region=self.region, prop_path=self.json_path) policyobj.create_policy()
[ "def", "create_autoscaling_policy", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating Scaling Policy\"", ")", "policyobj", "=", "autoscaling_policy", ".", "AutoScalingPolicy", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ")", "policyobj", ".", "create_policy", "(", ")" ]
Create Scaling Policy for app in environment
[ "Create", "Scaling", "Policy", "for", "app", "in", "environment" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L219-L224
6,159
foremast/foremast
src/foremast/runner.py
ForemastRunner.create_datapipeline
def create_datapipeline(self): """Creates data pipeline and adds definition""" utils.banner("Creating Data Pipeline") dpobj = datapipeline.AWSDataPipeline(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) dpobj.create_datapipeline() dpobj.set_pipeline_definition() if self.configs[self.env].get('datapipeline').get('activate_on_deploy'): dpobj.activate_pipeline()
python
def create_datapipeline(self): """Creates data pipeline and adds definition""" utils.banner("Creating Data Pipeline") dpobj = datapipeline.AWSDataPipeline(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) dpobj.create_datapipeline() dpobj.set_pipeline_definition() if self.configs[self.env].get('datapipeline').get('activate_on_deploy'): dpobj.activate_pipeline()
[ "def", "create_datapipeline", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Creating Data Pipeline\"", ")", "dpobj", "=", "datapipeline", ".", "AWSDataPipeline", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "prop_path", "=", "self", ".", "json_path", ")", "dpobj", ".", "create_datapipeline", "(", ")", "dpobj", ".", "set_pipeline_definition", "(", ")", "if", "self", ".", "configs", "[", "self", ".", "env", "]", ".", "get", "(", "'datapipeline'", ")", ".", "get", "(", "'activate_on_deploy'", ")", ":", "dpobj", ".", "activate_pipeline", "(", ")" ]
Creates data pipeline and adds definition
[ "Creates", "data", "pipeline", "and", "adds", "definition" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L226-L233
6,160
foremast/foremast
src/foremast/runner.py
ForemastRunner.slack_notify
def slack_notify(self): """Send out a slack notification.""" utils.banner("Sending slack notification") if self.env.startswith("prod"): notify = slacknotify.SlackNotification(app=self.app, env=self.env, prop_path=self.json_path) notify.post_message() else: LOG.info("No slack message sent, not production environment")
python
def slack_notify(self): """Send out a slack notification.""" utils.banner("Sending slack notification") if self.env.startswith("prod"): notify = slacknotify.SlackNotification(app=self.app, env=self.env, prop_path=self.json_path) notify.post_message() else: LOG.info("No slack message sent, not production environment")
[ "def", "slack_notify", "(", "self", ")", ":", "utils", ".", "banner", "(", "\"Sending slack notification\"", ")", "if", "self", ".", "env", ".", "startswith", "(", "\"prod\"", ")", ":", "notify", "=", "slacknotify", ".", "SlackNotification", "(", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ",", "prop_path", "=", "self", ".", "json_path", ")", "notify", ".", "post_message", "(", ")", "else", ":", "LOG", ".", "info", "(", "\"No slack message sent, not production environment\"", ")" ]
Send out a slack notification.
[ "Send", "out", "a", "slack", "notification", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/runner.py#L235-L243
6,161
foremast/foremast
src/foremast/args.py
add_debug
def add_debug(parser): """Add a `debug` flag to the _parser_.""" parser.add_argument( '-d', '--debug', action='store_const', const=logging.DEBUG, default=logging.INFO, help='Set DEBUG output')
python
def add_debug(parser): """Add a `debug` flag to the _parser_.""" parser.add_argument( '-d', '--debug', action='store_const', const=logging.DEBUG, default=logging.INFO, help='Set DEBUG output')
[ "def", "add_debug", "(", "parser", ")", ":", "parser", ".", "add_argument", "(", "'-d'", ",", "'--debug'", ",", "action", "=", "'store_const'", ",", "const", "=", "logging", ".", "DEBUG", ",", "default", "=", "logging", ".", "INFO", ",", "help", "=", "'Set DEBUG output'", ")" ]
Add a `debug` flag to the _parser_.
[ "Add", "a", "debug", "flag", "to", "the", "_parser_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/args.py#L28-L31
6,162
foremast/foremast
src/foremast/args.py
add_env
def add_env(parser): """Add an `env` flag to the _parser_.""" parser.add_argument( '-e', '--env', choices=ENVS, default=os.getenv('ENV', default='dev'), help='Deploy environment, overrides $ENV')
python
def add_env(parser): """Add an `env` flag to the _parser_.""" parser.add_argument( '-e', '--env', choices=ENVS, default=os.getenv('ENV', default='dev'), help='Deploy environment, overrides $ENV')
[ "def", "add_env", "(", "parser", ")", ":", "parser", ".", "add_argument", "(", "'-e'", ",", "'--env'", ",", "choices", "=", "ENVS", ",", "default", "=", "os", ".", "getenv", "(", "'ENV'", ",", "default", "=", "'dev'", ")", ",", "help", "=", "'Deploy environment, overrides $ENV'", ")" ]
Add an `env` flag to the _parser_.
[ "Add", "an", "env", "flag", "to", "the", "_parser_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/args.py#L34-L37
6,163
foremast/foremast
src/foremast/autoscaling_policy/__main__.py
main
def main(): """CLI entrypoint for scaling policy creation""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_properties(parser) add_env(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) asgpolicy = AutoScalingPolicy(app=args.app, prop_path=args.properties, env=args.env, region=args.region) asgpolicy.create_policy()
python
def main(): """CLI entrypoint for scaling policy creation""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_properties(parser) add_env(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) asgpolicy = AutoScalingPolicy(app=args.app, prop_path=args.properties, env=args.env, region=args.region) asgpolicy.create_policy()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_properties", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "log", ".", "debug", "(", "'Parsed arguments: %s'", ",", "args", ")", "asgpolicy", "=", "AutoScalingPolicy", "(", "app", "=", "args", ".", "app", ",", "prop_path", "=", "args", ".", "properties", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ")", "asgpolicy", ".", "create_policy", "(", ")" ]
CLI entrypoint for scaling policy creation
[ "CLI", "entrypoint", "for", "scaling", "policy", "creation" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/__main__.py#L29-L48
6,164
foremast/foremast
src/foremast/awslambda/__main__.py
main
def main(): """Create Lambda events.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_properties(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) lambda_function = LambdaFunction(app=args.app, env=args.env, region=args.region, prop_path=args.properties) lambda_function.create_lambda_function() lambda_event = LambdaEvent(app=args.app, env=args.env, region=args.region, prop_path=args.properties) lambda_event.create_lambda_events()
python
def main(): """Create Lambda events.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_properties(parser) add_region(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) lambda_function = LambdaFunction(app=args.app, env=args.env, region=args.region, prop_path=args.properties) lambda_function.create_lambda_function() lambda_event = LambdaEvent(app=args.app, env=args.env, region=args.region, prop_path=args.properties) lambda_event.create_lambda_events()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "main", ".", "__doc__", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_properties", "(", "parser", ")", "add_region", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "log", ".", "debug", "(", "'Parsed arguments: %s'", ",", "args", ")", "lambda_function", "=", "LambdaFunction", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ")", "lambda_function", ".", "create_lambda_function", "(", ")", "lambda_event", "=", "LambdaEvent", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ")", "lambda_event", ".", "create_lambda_events", "(", ")" ]
Create Lambda events.
[ "Create", "Lambda", "events", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/__main__.py#L29-L51
6,165
foremast/foremast
src/foremast/utils/slack.py
post_slack_message
def post_slack_message(message=None, channel=None, username=None, icon_emoji=None): """Format the message and post to the appropriate slack channel. Args: message (str): Message to post to slack channel (str): Desired channel. Must start with # """ LOG.debug('Slack Channel: %s\nSlack Message: %s', channel, message) slack = slacker.Slacker(SLACK_TOKEN) try: slack.chat.post_message(channel=channel, text=message, username=username, icon_emoji=icon_emoji) LOG.info('Message posted to %s', channel) except slacker.Error: LOG.info("error posted message to %s", channel)
python
def post_slack_message(message=None, channel=None, username=None, icon_emoji=None): """Format the message and post to the appropriate slack channel. Args: message (str): Message to post to slack channel (str): Desired channel. Must start with # """ LOG.debug('Slack Channel: %s\nSlack Message: %s', channel, message) slack = slacker.Slacker(SLACK_TOKEN) try: slack.chat.post_message(channel=channel, text=message, username=username, icon_emoji=icon_emoji) LOG.info('Message posted to %s', channel) except slacker.Error: LOG.info("error posted message to %s", channel)
[ "def", "post_slack_message", "(", "message", "=", "None", ",", "channel", "=", "None", ",", "username", "=", "None", ",", "icon_emoji", "=", "None", ")", ":", "LOG", ".", "debug", "(", "'Slack Channel: %s\\nSlack Message: %s'", ",", "channel", ",", "message", ")", "slack", "=", "slacker", ".", "Slacker", "(", "SLACK_TOKEN", ")", "try", ":", "slack", ".", "chat", ".", "post_message", "(", "channel", "=", "channel", ",", "text", "=", "message", ",", "username", "=", "username", ",", "icon_emoji", "=", "icon_emoji", ")", "LOG", ".", "info", "(", "'Message posted to %s'", ",", "channel", ")", "except", "slacker", ".", "Error", ":", "LOG", ".", "info", "(", "\"error posted message to %s\"", ",", "channel", ")" ]
Format the message and post to the appropriate slack channel. Args: message (str): Message to post to slack channel (str): Desired channel. Must start with #
[ "Format", "the", "message", "and", "post", "to", "the", "appropriate", "slack", "channel", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/slack.py#L26-L40
6,166
foremast/foremast
src/foremast/dns/destroy_dns/destroy_dns.py
destroy_dns
def destroy_dns(app='', env='dev', **_): """Destroy DNS records. Args: app (str): Spinnaker Application name. env (str): Deployment environment. regions (str): AWS region. Returns: bool: True upon successful completion. """ client = boto3.Session(profile_name=env).client('route53') generated = get_details(app=app, env=env) record = generated.dns_elb() zone_ids = get_dns_zone_ids(env=env, facing='external') for zone_id in zone_ids: record_sets = client.list_resource_record_sets( HostedZoneId=zone_id, StartRecordName=record, StartRecordType='CNAME', MaxItems='1') for found_record in record_sets['ResourceRecordSets']: assert destroy_record(client=client, found_record=found_record, record=record, zone_id=zone_id) return True
python
def destroy_dns(app='', env='dev', **_): """Destroy DNS records. Args: app (str): Spinnaker Application name. env (str): Deployment environment. regions (str): AWS region. Returns: bool: True upon successful completion. """ client = boto3.Session(profile_name=env).client('route53') generated = get_details(app=app, env=env) record = generated.dns_elb() zone_ids = get_dns_zone_ids(env=env, facing='external') for zone_id in zone_ids: record_sets = client.list_resource_record_sets( HostedZoneId=zone_id, StartRecordName=record, StartRecordType='CNAME', MaxItems='1') for found_record in record_sets['ResourceRecordSets']: assert destroy_record(client=client, found_record=found_record, record=record, zone_id=zone_id) return True
[ "def", "destroy_dns", "(", "app", "=", "''", ",", "env", "=", "'dev'", ",", "*", "*", "_", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "generated", "=", "get_details", "(", "app", "=", "app", ",", "env", "=", "env", ")", "record", "=", "generated", ".", "dns_elb", "(", ")", "zone_ids", "=", "get_dns_zone_ids", "(", "env", "=", "env", ",", "facing", "=", "'external'", ")", "for", "zone_id", "in", "zone_ids", ":", "record_sets", "=", "client", ".", "list_resource_record_sets", "(", "HostedZoneId", "=", "zone_id", ",", "StartRecordName", "=", "record", ",", "StartRecordType", "=", "'CNAME'", ",", "MaxItems", "=", "'1'", ")", "for", "found_record", "in", "record_sets", "[", "'ResourceRecordSets'", "]", ":", "assert", "destroy_record", "(", "client", "=", "client", ",", "found_record", "=", "found_record", ",", "record", "=", "record", ",", "zone_id", "=", "zone_id", ")", "return", "True" ]
Destroy DNS records. Args: app (str): Spinnaker Application name. env (str): Deployment environment. regions (str): AWS region. Returns: bool: True upon successful completion.
[ "Destroy", "DNS", "records", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/destroy_dns/destroy_dns.py#L27-L52
6,167
foremast/foremast
src/foremast/dns/destroy_dns/destroy_dns.py
destroy_record
def destroy_record(client=None, found_record=None, record='', zone_id=''): """Destroy an individual DNS record. Args: client (botocore.client.Route53): Route 53 boto3 client. found_record (dict): Route 53 record set:: {'Name': 'unicorn.forrest.dev.example.com.', 'ResourceRecords': [{'Value': 'internal-unicornforrest-1777489395.us-east-1.elb.amazonaws.com' }], 'TTL': 60, 'Type': 'CNAME'} record (str): Application DNS record name. e.g. zone_id (str): Route 53 Hosted Zone ID, e.g. /hostedzone/ZSVGJWJ979WQD. Returns: bool: True upon successful completion. """ LOG.debug('Found DNS record: %s', found_record) if found_record['Name'].strip('.') == record: dns_json = get_template(template_file='destroy/destroy_dns.json.j2', record=json.dumps(found_record)) dns_dict = json.loads(dns_json) client.change_resource_record_sets(HostedZoneId=zone_id, ChangeBatch=dns_dict) LOG.info('Destroyed "%s" in %s', found_record['Name'], zone_id) else: LOG.info('DNS record "%s" missing from %s.', record, zone_id) LOG.debug('Found someone else\'s record: %s', found_record['Name']) return True
python
def destroy_record(client=None, found_record=None, record='', zone_id=''): """Destroy an individual DNS record. Args: client (botocore.client.Route53): Route 53 boto3 client. found_record (dict): Route 53 record set:: {'Name': 'unicorn.forrest.dev.example.com.', 'ResourceRecords': [{'Value': 'internal-unicornforrest-1777489395.us-east-1.elb.amazonaws.com' }], 'TTL': 60, 'Type': 'CNAME'} record (str): Application DNS record name. e.g. zone_id (str): Route 53 Hosted Zone ID, e.g. /hostedzone/ZSVGJWJ979WQD. Returns: bool: True upon successful completion. """ LOG.debug('Found DNS record: %s', found_record) if found_record['Name'].strip('.') == record: dns_json = get_template(template_file='destroy/destroy_dns.json.j2', record=json.dumps(found_record)) dns_dict = json.loads(dns_json) client.change_resource_record_sets(HostedZoneId=zone_id, ChangeBatch=dns_dict) LOG.info('Destroyed "%s" in %s', found_record['Name'], zone_id) else: LOG.info('DNS record "%s" missing from %s.', record, zone_id) LOG.debug('Found someone else\'s record: %s', found_record['Name']) return True
[ "def", "destroy_record", "(", "client", "=", "None", ",", "found_record", "=", "None", ",", "record", "=", "''", ",", "zone_id", "=", "''", ")", ":", "LOG", ".", "debug", "(", "'Found DNS record: %s'", ",", "found_record", ")", "if", "found_record", "[", "'Name'", "]", ".", "strip", "(", "'.'", ")", "==", "record", ":", "dns_json", "=", "get_template", "(", "template_file", "=", "'destroy/destroy_dns.json.j2'", ",", "record", "=", "json", ".", "dumps", "(", "found_record", ")", ")", "dns_dict", "=", "json", ".", "loads", "(", "dns_json", ")", "client", ".", "change_resource_record_sets", "(", "HostedZoneId", "=", "zone_id", ",", "ChangeBatch", "=", "dns_dict", ")", "LOG", ".", "info", "(", "'Destroyed \"%s\" in %s'", ",", "found_record", "[", "'Name'", "]", ",", "zone_id", ")", "else", ":", "LOG", ".", "info", "(", "'DNS record \"%s\" missing from %s.'", ",", "record", ",", "zone_id", ")", "LOG", ".", "debug", "(", "'Found someone else\\'s record: %s'", ",", "found_record", "[", "'Name'", "]", ")", "return", "True" ]
Destroy an individual DNS record. Args: client (botocore.client.Route53): Route 53 boto3 client. found_record (dict): Route 53 record set:: {'Name': 'unicorn.forrest.dev.example.com.', 'ResourceRecords': [{'Value': 'internal-unicornforrest-1777489395.us-east-1.elb.amazonaws.com' }], 'TTL': 60, 'Type': 'CNAME'} record (str): Application DNS record name. e.g. zone_id (str): Route 53 Hosted Zone ID, e.g. /hostedzone/ZSVGJWJ979WQD. Returns: bool: True upon successful completion.
[ "Destroy", "an", "individual", "DNS", "record", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/destroy_dns/destroy_dns.py#L55-L88
6,168
foremast/foremast
src/foremast/awslambda/sns_event/sns_event.py
create_sns_event
def create_sns_event(app_name, env, region, rules): """Create SNS lambda event from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) sns_client = session.client('sns') topic_name = rules.get('topic') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region) topic_arn = get_sns_topic_arn(topic_name=topic_name, account=env, region=region) protocol = 'lambda' statement_id = '{}_sns_{}'.format(app_name, topic_name) principal = 'sns.amazonaws.com' add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=topic_arn, env=env, region=region) sns_client.subscribe(TopicArn=topic_arn, Protocol=protocol, Endpoint=lambda_alias_arn) LOG.debug("SNS Lambda event created") LOG.info("Created SNS event subscription on topic %s", topic_name)
python
def create_sns_event(app_name, env, region, rules): """Create SNS lambda event from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) sns_client = session.client('sns') topic_name = rules.get('topic') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region) topic_arn = get_sns_topic_arn(topic_name=topic_name, account=env, region=region) protocol = 'lambda' statement_id = '{}_sns_{}'.format(app_name, topic_name) principal = 'sns.amazonaws.com' add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=topic_arn, env=env, region=region) sns_client.subscribe(TopicArn=topic_arn, Protocol=protocol, Endpoint=lambda_alias_arn) LOG.debug("SNS Lambda event created") LOG.info("Created SNS event subscription on topic %s", topic_name)
[ "def", "create_sns_event", "(", "app_name", ",", "env", ",", "region", ",", "rules", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "sns_client", "=", "session", ".", "client", "(", "'sns'", ")", "topic_name", "=", "rules", ".", "get", "(", "'topic'", ")", "lambda_alias_arn", "=", "get_lambda_alias_arn", "(", "app", "=", "app_name", ",", "account", "=", "env", ",", "region", "=", "region", ")", "topic_arn", "=", "get_sns_topic_arn", "(", "topic_name", "=", "topic_name", ",", "account", "=", "env", ",", "region", "=", "region", ")", "protocol", "=", "'lambda'", "statement_id", "=", "'{}_sns_{}'", ".", "format", "(", "app_name", ",", "topic_name", ")", "principal", "=", "'sns.amazonaws.com'", "add_lambda_permissions", "(", "function", "=", "lambda_alias_arn", ",", "statement_id", "=", "statement_id", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "source_arn", "=", "topic_arn", ",", "env", "=", "env", ",", "region", "=", "region", ")", "sns_client", ".", "subscribe", "(", "TopicArn", "=", "topic_arn", ",", "Protocol", "=", "protocol", ",", "Endpoint", "=", "lambda_alias_arn", ")", "LOG", ".", "debug", "(", "\"SNS Lambda event created\"", ")", "LOG", ".", "info", "(", "\"Created SNS event subscription on topic %s\"", ",", "topic_name", ")" ]
Create SNS lambda event from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings
[ "Create", "SNS", "lambda", "event", "from", "rules", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/sns_event/sns_event.py#L27-L58
6,169
foremast/foremast
src/foremast/awslambda/sns_event/destroy_sns_event/destroy_sns_event.py
destroy_sns_event
def destroy_sns_event(app_name, env, region): """ Destroy all Lambda SNS subscriptions. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function Returns: boolean: True if subscription destroyed successfully """ session = boto3.Session(profile_name=env, region_name=region) sns_client = session.client('sns') lambda_subscriptions = get_sns_subscriptions(app_name=app_name, env=env, region=region) for subscription_arn in lambda_subscriptions: sns_client.unsubscribe(SubscriptionArn=subscription_arn) LOG.debug("Lambda SNS event deleted") return True
python
def destroy_sns_event(app_name, env, region): """ Destroy all Lambda SNS subscriptions. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function Returns: boolean: True if subscription destroyed successfully """ session = boto3.Session(profile_name=env, region_name=region) sns_client = session.client('sns') lambda_subscriptions = get_sns_subscriptions(app_name=app_name, env=env, region=region) for subscription_arn in lambda_subscriptions: sns_client.unsubscribe(SubscriptionArn=subscription_arn) LOG.debug("Lambda SNS event deleted") return True
[ "def", "destroy_sns_event", "(", "app_name", ",", "env", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "sns_client", "=", "session", ".", "client", "(", "'sns'", ")", "lambda_subscriptions", "=", "get_sns_subscriptions", "(", "app_name", "=", "app_name", ",", "env", "=", "env", ",", "region", "=", "region", ")", "for", "subscription_arn", "in", "lambda_subscriptions", ":", "sns_client", ".", "unsubscribe", "(", "SubscriptionArn", "=", "subscription_arn", ")", "LOG", ".", "debug", "(", "\"Lambda SNS event deleted\"", ")", "return", "True" ]
Destroy all Lambda SNS subscriptions. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function Returns: boolean: True if subscription destroyed successfully
[ "Destroy", "all", "Lambda", "SNS", "subscriptions", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/sns_event/destroy_sns_event/destroy_sns_event.py#L27-L47
6,170
foremast/foremast
src/foremast/elb/destroy_elb/destroy_elb.py
destroy_elb
def destroy_elb(app='', env='dev', region='us-east-1', **_): """Destroy ELB Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: True upon successful completion. """ task_json = get_template( template_file='destroy/destroy_elb.json.j2', app=app, env=env, region=region, vpc=get_vpc_id(account=env, region=region)) wait_for_task(task_json) return True
python
def destroy_elb(app='', env='dev', region='us-east-1', **_): """Destroy ELB Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: True upon successful completion. """ task_json = get_template( template_file='destroy/destroy_elb.json.j2', app=app, env=env, region=region, vpc=get_vpc_id(account=env, region=region)) wait_for_task(task_json) return True
[ "def", "destroy_elb", "(", "app", "=", "''", ",", "env", "=", "'dev'", ",", "region", "=", "'us-east-1'", ",", "*", "*", "_", ")", ":", "task_json", "=", "get_template", "(", "template_file", "=", "'destroy/destroy_elb.json.j2'", ",", "app", "=", "app", ",", "env", "=", "env", ",", "region", "=", "region", ",", "vpc", "=", "get_vpc_id", "(", "account", "=", "env", ",", "region", "=", "region", ")", ")", "wait_for_task", "(", "task_json", ")", "return", "True" ]
Destroy ELB Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: True upon successful completion.
[ "Destroy", "ELB", "Resources", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/destroy_elb/destroy_elb.py#L21-L41
6,171
foremast/foremast
src/foremast/pipeline/clean_pipelines.py
delete_pipeline
def delete_pipeline(app='', pipeline_name=''): """Delete _pipeline_name_ from _app_.""" safe_pipeline_name = normalize_pipeline_name(name=pipeline_name) LOG.warning('Deleting Pipeline: %s', safe_pipeline_name) url = '{host}/pipelines/{app}/{pipeline}'.format(host=API_URL, app=app, pipeline=safe_pipeline_name) response = requests.delete(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not response.ok: LOG.debug('Delete response code: %d', response.status_code) if response.status_code == requests.status_codes.codes['method_not_allowed']: raise SpinnakerPipelineDeletionFailed('Failed to delete "{0}" from "{1}", ' 'possibly invalid Pipeline name.'.format(safe_pipeline_name, app)) else: LOG.debug('Pipeline missing, no delete required.') LOG.debug('Deleted "%s" Pipeline response:\n%s', safe_pipeline_name, response.text) return response.text
python
def delete_pipeline(app='', pipeline_name=''): """Delete _pipeline_name_ from _app_.""" safe_pipeline_name = normalize_pipeline_name(name=pipeline_name) LOG.warning('Deleting Pipeline: %s', safe_pipeline_name) url = '{host}/pipelines/{app}/{pipeline}'.format(host=API_URL, app=app, pipeline=safe_pipeline_name) response = requests.delete(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not response.ok: LOG.debug('Delete response code: %d', response.status_code) if response.status_code == requests.status_codes.codes['method_not_allowed']: raise SpinnakerPipelineDeletionFailed('Failed to delete "{0}" from "{1}", ' 'possibly invalid Pipeline name.'.format(safe_pipeline_name, app)) else: LOG.debug('Pipeline missing, no delete required.') LOG.debug('Deleted "%s" Pipeline response:\n%s', safe_pipeline_name, response.text) return response.text
[ "def", "delete_pipeline", "(", "app", "=", "''", ",", "pipeline_name", "=", "''", ")", ":", "safe_pipeline_name", "=", "normalize_pipeline_name", "(", "name", "=", "pipeline_name", ")", "LOG", ".", "warning", "(", "'Deleting Pipeline: %s'", ",", "safe_pipeline_name", ")", "url", "=", "'{host}/pipelines/{app}/{pipeline}'", ".", "format", "(", "host", "=", "API_URL", ",", "app", "=", "app", ",", "pipeline", "=", "safe_pipeline_name", ")", "response", "=", "requests", ".", "delete", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "if", "not", "response", ".", "ok", ":", "LOG", ".", "debug", "(", "'Delete response code: %d'", ",", "response", ".", "status_code", ")", "if", "response", ".", "status_code", "==", "requests", ".", "status_codes", ".", "codes", "[", "'method_not_allowed'", "]", ":", "raise", "SpinnakerPipelineDeletionFailed", "(", "'Failed to delete \"{0}\" from \"{1}\", '", "'possibly invalid Pipeline name.'", ".", "format", "(", "safe_pipeline_name", ",", "app", ")", ")", "else", ":", "LOG", ".", "debug", "(", "'Pipeline missing, no delete required.'", ")", "LOG", ".", "debug", "(", "'Deleted \"%s\" Pipeline response:\\n%s'", ",", "safe_pipeline_name", ",", "response", ".", "text", ")", "return", "response", ".", "text" ]
Delete _pipeline_name_ from _app_.
[ "Delete", "_pipeline_name_", "from", "_app_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/clean_pipelines.py#L28-L47
6,172
foremast/foremast
src/foremast/pipeline/clean_pipelines.py
clean_pipelines
def clean_pipelines(app='', settings=None): """Delete Pipelines for regions not defined in application.json files. For Pipelines named **app_name [region]**, _region_ will need to appear in at least one application.json file. All other names are assumed unamanaged. Args: app (str): Application name settings (dict): imported configuration settings Returns: True: Upon successful completion. Raises: SpinnakerPipelineCreationFailed: Missing application.json file from `create-configs`. """ pipelines = get_all_pipelines(app=app) envs = settings['pipeline']['env'] LOG.debug('Find Regions in: %s', envs) regions = set() for env in envs: try: regions.update(settings[env]['regions']) except KeyError: error_msg = 'Missing "{}/application-master-{}.json".'.format(RUNWAY_BASE_PATH, env) raise SpinnakerPipelineCreationFailed(error_msg) LOG.debug('Regions defined: %s', regions) for pipeline in pipelines: pipeline_name = pipeline['name'] try: region = check_managed_pipeline(name=pipeline_name, app_name=app) except ValueError: LOG.info('"%s" is not managed.', pipeline_name) continue LOG.debug('Check "%s" in defined Regions.', region) if region not in regions: delete_pipeline(app=app, pipeline_name=pipeline_name) return True
python
def clean_pipelines(app='', settings=None): """Delete Pipelines for regions not defined in application.json files. For Pipelines named **app_name [region]**, _region_ will need to appear in at least one application.json file. All other names are assumed unamanaged. Args: app (str): Application name settings (dict): imported configuration settings Returns: True: Upon successful completion. Raises: SpinnakerPipelineCreationFailed: Missing application.json file from `create-configs`. """ pipelines = get_all_pipelines(app=app) envs = settings['pipeline']['env'] LOG.debug('Find Regions in: %s', envs) regions = set() for env in envs: try: regions.update(settings[env]['regions']) except KeyError: error_msg = 'Missing "{}/application-master-{}.json".'.format(RUNWAY_BASE_PATH, env) raise SpinnakerPipelineCreationFailed(error_msg) LOG.debug('Regions defined: %s', regions) for pipeline in pipelines: pipeline_name = pipeline['name'] try: region = check_managed_pipeline(name=pipeline_name, app_name=app) except ValueError: LOG.info('"%s" is not managed.', pipeline_name) continue LOG.debug('Check "%s" in defined Regions.', region) if region not in regions: delete_pipeline(app=app, pipeline_name=pipeline_name) return True
[ "def", "clean_pipelines", "(", "app", "=", "''", ",", "settings", "=", "None", ")", ":", "pipelines", "=", "get_all_pipelines", "(", "app", "=", "app", ")", "envs", "=", "settings", "[", "'pipeline'", "]", "[", "'env'", "]", "LOG", ".", "debug", "(", "'Find Regions in: %s'", ",", "envs", ")", "regions", "=", "set", "(", ")", "for", "env", "in", "envs", ":", "try", ":", "regions", ".", "update", "(", "settings", "[", "env", "]", "[", "'regions'", "]", ")", "except", "KeyError", ":", "error_msg", "=", "'Missing \"{}/application-master-{}.json\".'", ".", "format", "(", "RUNWAY_BASE_PATH", ",", "env", ")", "raise", "SpinnakerPipelineCreationFailed", "(", "error_msg", ")", "LOG", ".", "debug", "(", "'Regions defined: %s'", ",", "regions", ")", "for", "pipeline", "in", "pipelines", ":", "pipeline_name", "=", "pipeline", "[", "'name'", "]", "try", ":", "region", "=", "check_managed_pipeline", "(", "name", "=", "pipeline_name", ",", "app_name", "=", "app", ")", "except", "ValueError", ":", "LOG", ".", "info", "(", "'\"%s\" is not managed.'", ",", "pipeline_name", ")", "continue", "LOG", ".", "debug", "(", "'Check \"%s\" in defined Regions.'", ",", "region", ")", "if", "region", "not", "in", "regions", ":", "delete_pipeline", "(", "app", "=", "app", ",", "pipeline_name", "=", "pipeline_name", ")", "return", "True" ]
Delete Pipelines for regions not defined in application.json files. For Pipelines named **app_name [region]**, _region_ will need to appear in at least one application.json file. All other names are assumed unamanaged. Args: app (str): Application name settings (dict): imported configuration settings Returns: True: Upon successful completion. Raises: SpinnakerPipelineCreationFailed: Missing application.json file from `create-configs`.
[ "Delete", "Pipelines", "for", "regions", "not", "defined", "in", "application", ".", "json", "files", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/clean_pipelines.py#L50-L96
6,173
foremast/foremast
src/foremast/consts.py
extract_formats
def extract_formats(config_handle): """Get application formats. See :class:`gogoutils.Formats` for available options. Args: config_handle (configparser.ConfigParser): Instance of configurations. Returns: dict: Formats in ``{$format_type: $format_pattern}``. """ configurations = dict(config_handle) formats = dict(configurations.get('formats', {})) return formats
python
def extract_formats(config_handle): """Get application formats. See :class:`gogoutils.Formats` for available options. Args: config_handle (configparser.ConfigParser): Instance of configurations. Returns: dict: Formats in ``{$format_type: $format_pattern}``. """ configurations = dict(config_handle) formats = dict(configurations.get('formats', {})) return formats
[ "def", "extract_formats", "(", "config_handle", ")", ":", "configurations", "=", "dict", "(", "config_handle", ")", "formats", "=", "dict", "(", "configurations", ".", "get", "(", "'formats'", ",", "{", "}", ")", ")", "return", "formats" ]
Get application formats. See :class:`gogoutils.Formats` for available options. Args: config_handle (configparser.ConfigParser): Instance of configurations. Returns: dict: Formats in ``{$format_type: $format_pattern}``.
[ "Get", "application", "formats", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/consts.py#L77-L91
6,174
foremast/foremast
src/foremast/consts.py
load_dynamic_config
def load_dynamic_config(config_file=DEFAULT_DYNAMIC_CONFIG_FILE): """Load and parse dynamic config""" dynamic_configurations = {} # Insert config path so we can import it sys.path.insert(0, path.dirname(path.abspath(config_file))) try: config_module = __import__('config') dynamic_configurations = config_module.CONFIG except ImportError: # Provide a default if config not found LOG.error('ImportError: Unable to load dynamic config. Check config.py file imports!') return dynamic_configurations
python
def load_dynamic_config(config_file=DEFAULT_DYNAMIC_CONFIG_FILE): """Load and parse dynamic config""" dynamic_configurations = {} # Insert config path so we can import it sys.path.insert(0, path.dirname(path.abspath(config_file))) try: config_module = __import__('config') dynamic_configurations = config_module.CONFIG except ImportError: # Provide a default if config not found LOG.error('ImportError: Unable to load dynamic config. Check config.py file imports!') return dynamic_configurations
[ "def", "load_dynamic_config", "(", "config_file", "=", "DEFAULT_DYNAMIC_CONFIG_FILE", ")", ":", "dynamic_configurations", "=", "{", "}", "# Insert config path so we can import it", "sys", ".", "path", ".", "insert", "(", "0", ",", "path", ".", "dirname", "(", "path", ".", "abspath", "(", "config_file", ")", ")", ")", "try", ":", "config_module", "=", "__import__", "(", "'config'", ")", "dynamic_configurations", "=", "config_module", ".", "CONFIG", "except", "ImportError", ":", "# Provide a default if config not found", "LOG", ".", "error", "(", "'ImportError: Unable to load dynamic config. Check config.py file imports!'", ")", "return", "dynamic_configurations" ]
Load and parse dynamic config
[ "Load", "and", "parse", "dynamic", "config" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/consts.py#L94-L108
6,175
foremast/foremast
src/foremast/consts.py
_remove_empty_entries
def _remove_empty_entries(entries): """Remove empty entries in a list""" valid_entries = [] for entry in set(entries): if entry: valid_entries.append(entry) return sorted(valid_entries)
python
def _remove_empty_entries(entries): """Remove empty entries in a list""" valid_entries = [] for entry in set(entries): if entry: valid_entries.append(entry) return sorted(valid_entries)
[ "def", "_remove_empty_entries", "(", "entries", ")", ":", "valid_entries", "=", "[", "]", "for", "entry", "in", "set", "(", "entries", ")", ":", "if", "entry", ":", "valid_entries", ".", "append", "(", "entry", ")", "return", "sorted", "(", "valid_entries", ")" ]
Remove empty entries in a list
[ "Remove", "empty", "entries", "in", "a", "list" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/consts.py#L144-L150
6,176
foremast/foremast
src/foremast/consts.py
_convert_string_to_native
def _convert_string_to_native(value): """Convert a string to its native python type""" result = None try: result = ast.literal_eval(str(value)) except (SyntaxError, ValueError): # Likely a string result = value.split(',') return result
python
def _convert_string_to_native(value): """Convert a string to its native python type""" result = None try: result = ast.literal_eval(str(value)) except (SyntaxError, ValueError): # Likely a string result = value.split(',') return result
[ "def", "_convert_string_to_native", "(", "value", ")", ":", "result", "=", "None", "try", ":", "result", "=", "ast", ".", "literal_eval", "(", "str", "(", "value", ")", ")", "except", "(", "SyntaxError", ",", "ValueError", ")", ":", "# Likely a string", "result", "=", "value", ".", "split", "(", "','", ")", "return", "result" ]
Convert a string to its native python type
[ "Convert", "a", "string", "to", "its", "native", "python", "type" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/consts.py#L153-L162
6,177
foremast/foremast
src/foremast/consts.py
_generate_security_groups
def _generate_security_groups(config_key): """Read config file and generate security group dict by environment. Args: config_key (str): Configuration file key Returns: dict: of environments in {'env1': ['group1', 'group2']} format """ raw_default_groups = validate_key_values(CONFIG, 'base', config_key, default='') default_groups = _convert_string_to_native(raw_default_groups) LOG.debug('Default security group for %s is %s', config_key, default_groups) entries = {} for env in ENVS: entries[env] = [] if isinstance(default_groups, (list)): groups = _remove_empty_entries(default_groups) for env in entries: entries[env] = groups elif isinstance(default_groups, (dict)): entries.update(default_groups) LOG.debug('Generated security group: %s', entries) return entries
python
def _generate_security_groups(config_key): """Read config file and generate security group dict by environment. Args: config_key (str): Configuration file key Returns: dict: of environments in {'env1': ['group1', 'group2']} format """ raw_default_groups = validate_key_values(CONFIG, 'base', config_key, default='') default_groups = _convert_string_to_native(raw_default_groups) LOG.debug('Default security group for %s is %s', config_key, default_groups) entries = {} for env in ENVS: entries[env] = [] if isinstance(default_groups, (list)): groups = _remove_empty_entries(default_groups) for env in entries: entries[env] = groups elif isinstance(default_groups, (dict)): entries.update(default_groups) LOG.debug('Generated security group: %s', entries) return entries
[ "def", "_generate_security_groups", "(", "config_key", ")", ":", "raw_default_groups", "=", "validate_key_values", "(", "CONFIG", ",", "'base'", ",", "config_key", ",", "default", "=", "''", ")", "default_groups", "=", "_convert_string_to_native", "(", "raw_default_groups", ")", "LOG", ".", "debug", "(", "'Default security group for %s is %s'", ",", "config_key", ",", "default_groups", ")", "entries", "=", "{", "}", "for", "env", "in", "ENVS", ":", "entries", "[", "env", "]", "=", "[", "]", "if", "isinstance", "(", "default_groups", ",", "(", "list", ")", ")", ":", "groups", "=", "_remove_empty_entries", "(", "default_groups", ")", "for", "env", "in", "entries", ":", "entries", "[", "env", "]", "=", "groups", "elif", "isinstance", "(", "default_groups", ",", "(", "dict", ")", ")", ":", "entries", ".", "update", "(", "default_groups", ")", "LOG", ".", "debug", "(", "'Generated security group: %s'", ",", "entries", ")", "return", "entries" ]
Read config file and generate security group dict by environment. Args: config_key (str): Configuration file key Returns: dict: of environments in {'env1': ['group1', 'group2']} format
[ "Read", "config", "file", "and", "generate", "security", "group", "dict", "by", "environment", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/consts.py#L165-L190
6,178
foremast/foremast
src/foremast/datapipeline/datapipeline.py
AWSDataPipeline.create_datapipeline
def create_datapipeline(self): """Creates the data pipeline if it does not already exist Returns: dict: the response of the Boto3 command """ tags = [{"key": "app_group", "value": self.group}, {"key": "app_name", "value": self.app_name}] response = self.client.create_pipeline( name=self.datapipeline_data.get('name', self.app_name), uniqueId=self.app_name, description=self.datapipeline_data['description'], tags=tags) self.pipeline_id = response.get('pipelineId') LOG.debug(response) LOG.info("Successfully configured Data Pipeline - %s", self.app_name) return response
python
def create_datapipeline(self): """Creates the data pipeline if it does not already exist Returns: dict: the response of the Boto3 command """ tags = [{"key": "app_group", "value": self.group}, {"key": "app_name", "value": self.app_name}] response = self.client.create_pipeline( name=self.datapipeline_data.get('name', self.app_name), uniqueId=self.app_name, description=self.datapipeline_data['description'], tags=tags) self.pipeline_id = response.get('pipelineId') LOG.debug(response) LOG.info("Successfully configured Data Pipeline - %s", self.app_name) return response
[ "def", "create_datapipeline", "(", "self", ")", ":", "tags", "=", "[", "{", "\"key\"", ":", "\"app_group\"", ",", "\"value\"", ":", "self", ".", "group", "}", ",", "{", "\"key\"", ":", "\"app_name\"", ",", "\"value\"", ":", "self", ".", "app_name", "}", "]", "response", "=", "self", ".", "client", ".", "create_pipeline", "(", "name", "=", "self", ".", "datapipeline_data", ".", "get", "(", "'name'", ",", "self", ".", "app_name", ")", ",", "uniqueId", "=", "self", ".", "app_name", ",", "description", "=", "self", ".", "datapipeline_data", "[", "'description'", "]", ",", "tags", "=", "tags", ")", "self", ".", "pipeline_id", "=", "response", ".", "get", "(", "'pipelineId'", ")", "LOG", ".", "debug", "(", "response", ")", "LOG", ".", "info", "(", "\"Successfully configured Data Pipeline - %s\"", ",", "self", ".", "app_name", ")", "return", "response" ]
Creates the data pipeline if it does not already exist Returns: dict: the response of the Boto3 command
[ "Creates", "the", "data", "pipeline", "if", "it", "does", "not", "already", "exist" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/datapipeline/datapipeline.py#L53-L70
6,179
foremast/foremast
src/foremast/datapipeline/datapipeline.py
AWSDataPipeline.set_pipeline_definition
def set_pipeline_definition(self): """Translates the json definition and puts it on created pipeline Returns: dict: the response of the Boto3 command """ if not self.pipeline_id: self.get_pipeline_id() json_def = self.datapipeline_data['json_definition'] try: pipelineobjects = translator.definition_to_api_objects(json_def) parameterobjects = translator.definition_to_api_parameters(json_def) parametervalues = translator.definition_to_parameter_values(json_def) except translator.PipelineDefinitionError as error: LOG.warning(error) raise DataPipelineDefinitionError response = self.client.put_pipeline_definition( pipelineId=self.pipeline_id, pipelineObjects=pipelineobjects, parameterObjects=parameterobjects, parameterValues=parametervalues) LOG.debug(response) LOG.info("Successfully applied pipeline definition") return response
python
def set_pipeline_definition(self): """Translates the json definition and puts it on created pipeline Returns: dict: the response of the Boto3 command """ if not self.pipeline_id: self.get_pipeline_id() json_def = self.datapipeline_data['json_definition'] try: pipelineobjects = translator.definition_to_api_objects(json_def) parameterobjects = translator.definition_to_api_parameters(json_def) parametervalues = translator.definition_to_parameter_values(json_def) except translator.PipelineDefinitionError as error: LOG.warning(error) raise DataPipelineDefinitionError response = self.client.put_pipeline_definition( pipelineId=self.pipeline_id, pipelineObjects=pipelineobjects, parameterObjects=parameterobjects, parameterValues=parametervalues) LOG.debug(response) LOG.info("Successfully applied pipeline definition") return response
[ "def", "set_pipeline_definition", "(", "self", ")", ":", "if", "not", "self", ".", "pipeline_id", ":", "self", ".", "get_pipeline_id", "(", ")", "json_def", "=", "self", ".", "datapipeline_data", "[", "'json_definition'", "]", "try", ":", "pipelineobjects", "=", "translator", ".", "definition_to_api_objects", "(", "json_def", ")", "parameterobjects", "=", "translator", ".", "definition_to_api_parameters", "(", "json_def", ")", "parametervalues", "=", "translator", ".", "definition_to_parameter_values", "(", "json_def", ")", "except", "translator", ".", "PipelineDefinitionError", "as", "error", ":", "LOG", ".", "warning", "(", "error", ")", "raise", "DataPipelineDefinitionError", "response", "=", "self", ".", "client", ".", "put_pipeline_definition", "(", "pipelineId", "=", "self", ".", "pipeline_id", ",", "pipelineObjects", "=", "pipelineobjects", ",", "parameterObjects", "=", "parameterobjects", ",", "parameterValues", "=", "parametervalues", ")", "LOG", ".", "debug", "(", "response", ")", "LOG", ".", "info", "(", "\"Successfully applied pipeline definition\"", ")", "return", "response" ]
Translates the json definition and puts it on created pipeline Returns: dict: the response of the Boto3 command
[ "Translates", "the", "json", "definition", "and", "puts", "it", "on", "created", "pipeline" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/datapipeline/datapipeline.py#L72-L98
6,180
foremast/foremast
src/foremast/datapipeline/datapipeline.py
AWSDataPipeline.get_pipeline_id
def get_pipeline_id(self): """Finds the pipeline ID for configured pipeline""" all_pipelines = [] paginiator = self.client.get_paginator('list_pipelines') for page in paginiator.paginate(): all_pipelines.extend(page['pipelineIdList']) for pipeline in all_pipelines: if pipeline['name'] == self.datapipeline_data.get('name', self.app_name): self.pipeline_id = pipeline['id'] LOG.info("Pipeline ID Found") return LOG.info("Pipeline ID Not Found for %s", self.app_name)
python
def get_pipeline_id(self): """Finds the pipeline ID for configured pipeline""" all_pipelines = [] paginiator = self.client.get_paginator('list_pipelines') for page in paginiator.paginate(): all_pipelines.extend(page['pipelineIdList']) for pipeline in all_pipelines: if pipeline['name'] == self.datapipeline_data.get('name', self.app_name): self.pipeline_id = pipeline['id'] LOG.info("Pipeline ID Found") return LOG.info("Pipeline ID Not Found for %s", self.app_name)
[ "def", "get_pipeline_id", "(", "self", ")", ":", "all_pipelines", "=", "[", "]", "paginiator", "=", "self", ".", "client", ".", "get_paginator", "(", "'list_pipelines'", ")", "for", "page", "in", "paginiator", ".", "paginate", "(", ")", ":", "all_pipelines", ".", "extend", "(", "page", "[", "'pipelineIdList'", "]", ")", "for", "pipeline", "in", "all_pipelines", ":", "if", "pipeline", "[", "'name'", "]", "==", "self", ".", "datapipeline_data", ".", "get", "(", "'name'", ",", "self", ".", "app_name", ")", ":", "self", ".", "pipeline_id", "=", "pipeline", "[", "'id'", "]", "LOG", ".", "info", "(", "\"Pipeline ID Found\"", ")", "return", "LOG", ".", "info", "(", "\"Pipeline ID Not Found for %s\"", ",", "self", ".", "app_name", ")" ]
Finds the pipeline ID for configured pipeline
[ "Finds", "the", "pipeline", "ID", "for", "configured", "pipeline" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/datapipeline/datapipeline.py#L100-L113
6,181
foremast/foremast
src/foremast/datapipeline/datapipeline.py
AWSDataPipeline.activate_pipeline
def activate_pipeline(self): """Activates a deployed pipeline, useful for OnDemand pipelines""" self.client.activate_pipeline(pipelineId=self.pipeline_id) LOG.info("Activated Pipeline %s", self.pipeline_id)
python
def activate_pipeline(self): """Activates a deployed pipeline, useful for OnDemand pipelines""" self.client.activate_pipeline(pipelineId=self.pipeline_id) LOG.info("Activated Pipeline %s", self.pipeline_id)
[ "def", "activate_pipeline", "(", "self", ")", ":", "self", ".", "client", ".", "activate_pipeline", "(", "pipelineId", "=", "self", ".", "pipeline_id", ")", "LOG", ".", "info", "(", "\"Activated Pipeline %s\"", ",", "self", ".", "pipeline_id", ")" ]
Activates a deployed pipeline, useful for OnDemand pipelines
[ "Activates", "a", "deployed", "pipeline", "useful", "for", "OnDemand", "pipelines" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/datapipeline/datapipeline.py#L115-L118
6,182
foremast/foremast
src/foremast/utils/dns.py
get_dns_zone_ids
def get_dns_zone_ids(env='dev', facing='internal'): """Get Route 53 Hosted Zone IDs for _env_. Args: env (str): Deployment environment. facing (str): Type of ELB, external or internal. Returns: list: Hosted Zone IDs for _env_. Only *PrivateZone* when _facing_ is internal. """ client = boto3.Session(profile_name=env).client('route53') zones = client.list_hosted_zones_by_name(DNSName='.'.join([env, DOMAIN])) zone_ids = [] for zone in zones['HostedZones']: LOG.debug('Found Hosted Zone: %s', zone) if facing == 'external' or zone['Config']['PrivateZone']: LOG.info('Using %(Id)s for "%(Name)s", %(Config)s', zone) zone_ids.append(zone['Id']) LOG.debug('Zone IDs: %s', zone_ids) return zone_ids
python
def get_dns_zone_ids(env='dev', facing='internal'): """Get Route 53 Hosted Zone IDs for _env_. Args: env (str): Deployment environment. facing (str): Type of ELB, external or internal. Returns: list: Hosted Zone IDs for _env_. Only *PrivateZone* when _facing_ is internal. """ client = boto3.Session(profile_name=env).client('route53') zones = client.list_hosted_zones_by_name(DNSName='.'.join([env, DOMAIN])) zone_ids = [] for zone in zones['HostedZones']: LOG.debug('Found Hosted Zone: %s', zone) if facing == 'external' or zone['Config']['PrivateZone']: LOG.info('Using %(Id)s for "%(Name)s", %(Config)s', zone) zone_ids.append(zone['Id']) LOG.debug('Zone IDs: %s', zone_ids) return zone_ids
[ "def", "get_dns_zone_ids", "(", "env", "=", "'dev'", ",", "facing", "=", "'internal'", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "zones", "=", "client", ".", "list_hosted_zones_by_name", "(", "DNSName", "=", "'.'", ".", "join", "(", "[", "env", ",", "DOMAIN", "]", ")", ")", "zone_ids", "=", "[", "]", "for", "zone", "in", "zones", "[", "'HostedZones'", "]", ":", "LOG", ".", "debug", "(", "'Found Hosted Zone: %s'", ",", "zone", ")", "if", "facing", "==", "'external'", "or", "zone", "[", "'Config'", "]", "[", "'PrivateZone'", "]", ":", "LOG", ".", "info", "(", "'Using %(Id)s for \"%(Name)s\", %(Config)s'", ",", "zone", ")", "zone_ids", ".", "append", "(", "zone", "[", "'Id'", "]", ")", "LOG", ".", "debug", "(", "'Zone IDs: %s'", ",", "zone_ids", ")", "return", "zone_ids" ]
Get Route 53 Hosted Zone IDs for _env_. Args: env (str): Deployment environment. facing (str): Type of ELB, external or internal. Returns: list: Hosted Zone IDs for _env_. Only *PrivateZone* when _facing_ is internal.
[ "Get", "Route", "53", "Hosted", "Zone", "IDs", "for", "_env_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/dns.py#L31-L56
6,183
foremast/foremast
src/foremast/utils/dns.py
update_dns_zone_record
def update_dns_zone_record(env, zone_id, **kwargs): """Create a Route53 CNAME record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_name_aws (str): FQDN of AWS resource dns_ttl (int): DNS time-to-live (ttl) """ client = boto3.Session(profile_name=env).client('route53') response = {} hosted_zone_info = client.get_hosted_zone(Id=zone_id) zone_name = hosted_zone_info['HostedZone']['Name'].rstrip('.') dns_name = kwargs.get('dns_name') if dns_name and dns_name.endswith(zone_name): dns_name_aws = kwargs.get('dns_name_aws') # This is what will be added to DNS dns_json = get_template(template_file='infrastructure/dns_upsert.json.j2', **kwargs) LOG.info('Attempting to create DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) try: response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch=json.loads(dns_json), ) LOG.info('Upserted DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) except botocore.exceptions.ClientError as error: LOG.info('Error creating DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) LOG.debug(error) else: LOG.info('Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)', dns_name, zone_id, zone_name) LOG.debug('Route53 JSON Response: \n%s', pformat(response))
python
def update_dns_zone_record(env, zone_id, **kwargs): """Create a Route53 CNAME record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_name_aws (str): FQDN of AWS resource dns_ttl (int): DNS time-to-live (ttl) """ client = boto3.Session(profile_name=env).client('route53') response = {} hosted_zone_info = client.get_hosted_zone(Id=zone_id) zone_name = hosted_zone_info['HostedZone']['Name'].rstrip('.') dns_name = kwargs.get('dns_name') if dns_name and dns_name.endswith(zone_name): dns_name_aws = kwargs.get('dns_name_aws') # This is what will be added to DNS dns_json = get_template(template_file='infrastructure/dns_upsert.json.j2', **kwargs) LOG.info('Attempting to create DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) try: response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch=json.loads(dns_json), ) LOG.info('Upserted DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) except botocore.exceptions.ClientError as error: LOG.info('Error creating DNS record %s (%s) in Hosted Zone %s (%s)', dns_name, dns_name_aws, zone_id, zone_name) LOG.debug(error) else: LOG.info('Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)', dns_name, zone_id, zone_name) LOG.debug('Route53 JSON Response: \n%s', pformat(response))
[ "def", "update_dns_zone_record", "(", "env", ",", "zone_id", ",", "*", "*", "kwargs", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "response", "=", "{", "}", "hosted_zone_info", "=", "client", ".", "get_hosted_zone", "(", "Id", "=", "zone_id", ")", "zone_name", "=", "hosted_zone_info", "[", "'HostedZone'", "]", "[", "'Name'", "]", ".", "rstrip", "(", "'.'", ")", "dns_name", "=", "kwargs", ".", "get", "(", "'dns_name'", ")", "if", "dns_name", "and", "dns_name", ".", "endswith", "(", "zone_name", ")", ":", "dns_name_aws", "=", "kwargs", ".", "get", "(", "'dns_name_aws'", ")", "# This is what will be added to DNS", "dns_json", "=", "get_template", "(", "template_file", "=", "'infrastructure/dns_upsert.json.j2'", ",", "*", "*", "kwargs", ")", "LOG", ".", "info", "(", "'Attempting to create DNS record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "dns_name_aws", ",", "zone_id", ",", "zone_name", ")", "try", ":", "response", "=", "client", ".", "change_resource_record_sets", "(", "HostedZoneId", "=", "zone_id", ",", "ChangeBatch", "=", "json", ".", "loads", "(", "dns_json", ")", ",", ")", "LOG", ".", "info", "(", "'Upserted DNS record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "dns_name_aws", ",", "zone_id", ",", "zone_name", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "LOG", ".", "info", "(", "'Error creating DNS record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "dns_name_aws", ",", "zone_id", ",", "zone_name", ")", "LOG", ".", "debug", "(", "error", ")", "else", ":", "LOG", ".", "info", "(", "'Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)'", ",", "dns_name", ",", "zone_id", ",", "zone_name", ")", "LOG", ".", "debug", "(", "'Route53 JSON Response: \\n%s'", ",", "pformat", "(", "response", ")", ")" ]
Create a Route53 CNAME record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_name_aws (str): FQDN of AWS resource dns_ttl (int): DNS time-to-live (ttl)
[ "Create", "a", "Route53", "CNAME", "record", "in", "_env_", "zone", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/dns.py#L59-L96
6,184
foremast/foremast
src/foremast/utils/dns.py
find_existing_record
def find_existing_record(env, zone_id, dns_name, check_key=None, check_value=None): """Check if a specific DNS record exists. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update. check_key(str): Key to look for in record. Example: "Type" check_value(str): Value to look for with check_key. Example: "CNAME" Returns: json: Found Record. Returns None if no record found """ client = boto3.Session(profile_name=env).client('route53') pager = client.get_paginator('list_resource_record_sets') existingrecord = None for rset in pager.paginate(HostedZoneId=zone_id): for record in rset['ResourceRecordSets']: if check_key: if record['Name'].rstrip('.') == dns_name and record.get(check_key) == check_value: LOG.info("Found existing record: %s", record) existingrecord = record break return existingrecord
python
def find_existing_record(env, zone_id, dns_name, check_key=None, check_value=None): """Check if a specific DNS record exists. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update. check_key(str): Key to look for in record. Example: "Type" check_value(str): Value to look for with check_key. Example: "CNAME" Returns: json: Found Record. Returns None if no record found """ client = boto3.Session(profile_name=env).client('route53') pager = client.get_paginator('list_resource_record_sets') existingrecord = None for rset in pager.paginate(HostedZoneId=zone_id): for record in rset['ResourceRecordSets']: if check_key: if record['Name'].rstrip('.') == dns_name and record.get(check_key) == check_value: LOG.info("Found existing record: %s", record) existingrecord = record break return existingrecord
[ "def", "find_existing_record", "(", "env", ",", "zone_id", ",", "dns_name", ",", "check_key", "=", "None", ",", "check_value", "=", "None", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "pager", "=", "client", ".", "get_paginator", "(", "'list_resource_record_sets'", ")", "existingrecord", "=", "None", "for", "rset", "in", "pager", ".", "paginate", "(", "HostedZoneId", "=", "zone_id", ")", ":", "for", "record", "in", "rset", "[", "'ResourceRecordSets'", "]", ":", "if", "check_key", ":", "if", "record", "[", "'Name'", "]", ".", "rstrip", "(", "'.'", ")", "==", "dns_name", "and", "record", ".", "get", "(", "check_key", ")", "==", "check_value", ":", "LOG", ".", "info", "(", "\"Found existing record: %s\"", ",", "record", ")", "existingrecord", "=", "record", "break", "return", "existingrecord" ]
Check if a specific DNS record exists. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update. check_key(str): Key to look for in record. Example: "Type" check_value(str): Value to look for with check_key. Example: "CNAME" Returns: json: Found Record. Returns None if no record found
[ "Check", "if", "a", "specific", "DNS", "record", "exists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/dns.py#L99-L123
6,185
foremast/foremast
src/foremast/utils/dns.py
delete_existing_cname
def delete_existing_cname(env, zone_id, dns_name): """Delete an existing CNAME record. This is used when updating to multi-region for deleting old records. The record can not just be upserted since it changes types. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update. """ client = boto3.Session(profile_name=env).client('route53') startrecord = None newrecord_name = dns_name startrecord = find_existing_record(env, zone_id, newrecord_name, check_key='Type', check_value='CNAME') if startrecord: LOG.info("Deleting old record: %s", newrecord_name) _response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch={'Changes': [{ 'Action': 'DELETE', 'ResourceRecordSet': startrecord }]}) LOG.debug('Response from deleting %s: %s', dns_name, _response)
python
def delete_existing_cname(env, zone_id, dns_name): """Delete an existing CNAME record. This is used when updating to multi-region for deleting old records. The record can not just be upserted since it changes types. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update. """ client = boto3.Session(profile_name=env).client('route53') startrecord = None newrecord_name = dns_name startrecord = find_existing_record(env, zone_id, newrecord_name, check_key='Type', check_value='CNAME') if startrecord: LOG.info("Deleting old record: %s", newrecord_name) _response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch={'Changes': [{ 'Action': 'DELETE', 'ResourceRecordSet': startrecord }]}) LOG.debug('Response from deleting %s: %s', dns_name, _response)
[ "def", "delete_existing_cname", "(", "env", ",", "zone_id", ",", "dns_name", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "startrecord", "=", "None", "newrecord_name", "=", "dns_name", "startrecord", "=", "find_existing_record", "(", "env", ",", "zone_id", ",", "newrecord_name", ",", "check_key", "=", "'Type'", ",", "check_value", "=", "'CNAME'", ")", "if", "startrecord", ":", "LOG", ".", "info", "(", "\"Deleting old record: %s\"", ",", "newrecord_name", ")", "_response", "=", "client", ".", "change_resource_record_sets", "(", "HostedZoneId", "=", "zone_id", ",", "ChangeBatch", "=", "{", "'Changes'", ":", "[", "{", "'Action'", ":", "'DELETE'", ",", "'ResourceRecordSet'", ":", "startrecord", "}", "]", "}", ")", "LOG", ".", "debug", "(", "'Response from deleting %s: %s'", ",", "dns_name", ",", "_response", ")" ]
Delete an existing CNAME record. This is used when updating to multi-region for deleting old records. The record can not just be upserted since it changes types. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. dns_name (str): FQDN of application's dns entry to add/update.
[ "Delete", "an", "existing", "CNAME", "record", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/dns.py#L126-L148
6,186
foremast/foremast
src/foremast/utils/dns.py
update_failover_dns_record
def update_failover_dns_record(env, zone_id, **kwargs): """Create a Failover Route53 alias record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_ttl (int): DNS time-to-live (ttl) elb_aws_dns (str): DNS A Record of ELB from AWS elb_dns_zone_id (str): Zone ID of ELB DNS failover_state (str): if the record is primary or secondary primary_region (str): Primary AWS region for DNS """ client = boto3.Session(profile_name=env).client('route53') response = {} hosted_zone_info = client.get_hosted_zone(Id=zone_id) zone_name = hosted_zone_info['HostedZone']['Name'].rstrip('.') dns_name = kwargs.get('dns_name') # Check that the primary record exists failover_state = kwargs.get('failover_state') if failover_state.lower() != 'primary': primary_record = find_existing_record(env, zone_id, dns_name, check_key='Failover', check_value='PRIMARY') if not primary_record: raise PrimaryDNSRecordNotFound("Primary Failover DNS record not found: {}".format(dns_name)) if dns_name and dns_name.endswith(zone_name): dns_json = get_template(template_file='infrastructure/dns_failover_upsert.json.j2', **kwargs) LOG.info('Attempting to create DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) try: delete_existing_cname(env, zone_id, dns_name) response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch=json.loads(dns_json), ) LOG.info('Upserted DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) except botocore.exceptions.ClientError as error: LOG.info('Error creating DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) LOG.debug(error) else: LOG.info('Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)', dns_name, zone_id, zone_name) LOG.debug('Route53 JSON Response: \n%s', pformat(response))
python
def update_failover_dns_record(env, zone_id, **kwargs): """Create a Failover Route53 alias record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_ttl (int): DNS time-to-live (ttl) elb_aws_dns (str): DNS A Record of ELB from AWS elb_dns_zone_id (str): Zone ID of ELB DNS failover_state (str): if the record is primary or secondary primary_region (str): Primary AWS region for DNS """ client = boto3.Session(profile_name=env).client('route53') response = {} hosted_zone_info = client.get_hosted_zone(Id=zone_id) zone_name = hosted_zone_info['HostedZone']['Name'].rstrip('.') dns_name = kwargs.get('dns_name') # Check that the primary record exists failover_state = kwargs.get('failover_state') if failover_state.lower() != 'primary': primary_record = find_existing_record(env, zone_id, dns_name, check_key='Failover', check_value='PRIMARY') if not primary_record: raise PrimaryDNSRecordNotFound("Primary Failover DNS record not found: {}".format(dns_name)) if dns_name and dns_name.endswith(zone_name): dns_json = get_template(template_file='infrastructure/dns_failover_upsert.json.j2', **kwargs) LOG.info('Attempting to create DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) try: delete_existing_cname(env, zone_id, dns_name) response = client.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch=json.loads(dns_json), ) LOG.info('Upserted DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) except botocore.exceptions.ClientError as error: LOG.info('Error creating DNS Failover record %s (%s) in Hosted Zone %s (%s)', dns_name, kwargs['elb_aws_dns'], zone_id, zone_name) LOG.debug(error) else: LOG.info('Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)', dns_name, zone_id, zone_name) LOG.debug('Route53 JSON Response: \n%s', pformat(response))
[ "def", "update_failover_dns_record", "(", "env", ",", "zone_id", ",", "*", "*", "kwargs", ")", ":", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'route53'", ")", "response", "=", "{", "}", "hosted_zone_info", "=", "client", ".", "get_hosted_zone", "(", "Id", "=", "zone_id", ")", "zone_name", "=", "hosted_zone_info", "[", "'HostedZone'", "]", "[", "'Name'", "]", ".", "rstrip", "(", "'.'", ")", "dns_name", "=", "kwargs", ".", "get", "(", "'dns_name'", ")", "# Check that the primary record exists", "failover_state", "=", "kwargs", ".", "get", "(", "'failover_state'", ")", "if", "failover_state", ".", "lower", "(", ")", "!=", "'primary'", ":", "primary_record", "=", "find_existing_record", "(", "env", ",", "zone_id", ",", "dns_name", ",", "check_key", "=", "'Failover'", ",", "check_value", "=", "'PRIMARY'", ")", "if", "not", "primary_record", ":", "raise", "PrimaryDNSRecordNotFound", "(", "\"Primary Failover DNS record not found: {}\"", ".", "format", "(", "dns_name", ")", ")", "if", "dns_name", "and", "dns_name", ".", "endswith", "(", "zone_name", ")", ":", "dns_json", "=", "get_template", "(", "template_file", "=", "'infrastructure/dns_failover_upsert.json.j2'", ",", "*", "*", "kwargs", ")", "LOG", ".", "info", "(", "'Attempting to create DNS Failover record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "kwargs", "[", "'elb_aws_dns'", "]", ",", "zone_id", ",", "zone_name", ")", "try", ":", "delete_existing_cname", "(", "env", ",", "zone_id", ",", "dns_name", ")", "response", "=", "client", ".", "change_resource_record_sets", "(", "HostedZoneId", "=", "zone_id", ",", "ChangeBatch", "=", "json", ".", "loads", "(", "dns_json", ")", ",", ")", "LOG", ".", "info", "(", "'Upserted DNS Failover record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "kwargs", "[", "'elb_aws_dns'", "]", ",", "zone_id", ",", "zone_name", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "LOG", ".", "info", "(", "'Error creating DNS Failover record %s (%s) in Hosted Zone %s (%s)'", ",", "dns_name", ",", "kwargs", "[", "'elb_aws_dns'", "]", ",", "zone_id", ",", "zone_name", ")", "LOG", ".", "debug", "(", "error", ")", "else", ":", "LOG", ".", "info", "(", "'Skipping creating DNS record %s in non-matching Hosted Zone %s (%s)'", ",", "dns_name", ",", "zone_id", ",", "zone_name", ")", "LOG", ".", "debug", "(", "'Route53 JSON Response: \\n%s'", ",", "pformat", "(", "response", ")", ")" ]
Create a Failover Route53 alias record in _env_ zone. Args: env (str): Deployment environment. zone_id (str): Route53 zone id. Keyword Args: dns_name (str): FQDN of application's dns entry to add/update. dns_ttl (int): DNS time-to-live (ttl) elb_aws_dns (str): DNS A Record of ELB from AWS elb_dns_zone_id (str): Zone ID of ELB DNS failover_state (str): if the record is primary or secondary primary_region (str): Primary AWS region for DNS
[ "Create", "a", "Failover", "Route53", "alias", "record", "in", "_env_", "zone", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/dns.py#L151-L198
6,187
foremast/foremast
src/foremast/awslambda/cloudwatch_log_event/cloudwatch_log_event.py
create_cloudwatch_log_event
def create_cloudwatch_log_event(app_name, env, region, rules): """Create cloudwatch log event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('logs') log_group = rules.get('log_group') filter_name = rules.get('filter_name') filter_pattern = rules.get('filter_pattern') if not log_group: LOG.critical('Log group is required and no "log_group" is defined!') raise InvalidEventConfiguration('Log group is required and no "log_group" is defined!') if not filter_name: LOG.critical('Filter name is required and no filter_name is defined!') raise InvalidEventConfiguration('Filter name is required and no filter_name is defined!') if filter_pattern is None: LOG.critical('Filter pattern is required and no filter_pattern is defined!') raise InvalidEventConfiguration('Filter pattern is required and no filter_pattern is defined!') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region) statement_id = '{}_cloudwatchlog_{}'.format(app_name, filter_name.replace(" ", "_")) principal = 'logs.{}.amazonaws.com'.format(region) account_id = get_env_credential(env=env)['accountId'] source_arn = "arn:aws:logs:{0}:{1}:log-group:{2}:*".format(region, account_id, log_group) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=source_arn, env=env, region=region) cloudwatch_client.put_subscription_filter( logGroupName=log_group, filterName=filter_name, filterPattern=filter_pattern, destinationArn=lambda_alias_arn) LOG.info("Created Cloudwatch log event with filter: %s", filter_pattern)
python
def create_cloudwatch_log_event(app_name, env, region, rules): """Create cloudwatch log event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('logs') log_group = rules.get('log_group') filter_name = rules.get('filter_name') filter_pattern = rules.get('filter_pattern') if not log_group: LOG.critical('Log group is required and no "log_group" is defined!') raise InvalidEventConfiguration('Log group is required and no "log_group" is defined!') if not filter_name: LOG.critical('Filter name is required and no filter_name is defined!') raise InvalidEventConfiguration('Filter name is required and no filter_name is defined!') if filter_pattern is None: LOG.critical('Filter pattern is required and no filter_pattern is defined!') raise InvalidEventConfiguration('Filter pattern is required and no filter_pattern is defined!') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region) statement_id = '{}_cloudwatchlog_{}'.format(app_name, filter_name.replace(" ", "_")) principal = 'logs.{}.amazonaws.com'.format(region) account_id = get_env_credential(env=env)['accountId'] source_arn = "arn:aws:logs:{0}:{1}:log-group:{2}:*".format(region, account_id, log_group) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=source_arn, env=env, region=region) cloudwatch_client.put_subscription_filter( logGroupName=log_group, filterName=filter_name, filterPattern=filter_pattern, destinationArn=lambda_alias_arn) LOG.info("Created Cloudwatch log event with filter: %s", filter_pattern)
[ "def", "create_cloudwatch_log_event", "(", "app_name", ",", "env", ",", "region", ",", "rules", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "cloudwatch_client", "=", "session", ".", "client", "(", "'logs'", ")", "log_group", "=", "rules", ".", "get", "(", "'log_group'", ")", "filter_name", "=", "rules", ".", "get", "(", "'filter_name'", ")", "filter_pattern", "=", "rules", ".", "get", "(", "'filter_pattern'", ")", "if", "not", "log_group", ":", "LOG", ".", "critical", "(", "'Log group is required and no \"log_group\" is defined!'", ")", "raise", "InvalidEventConfiguration", "(", "'Log group is required and no \"log_group\" is defined!'", ")", "if", "not", "filter_name", ":", "LOG", ".", "critical", "(", "'Filter name is required and no filter_name is defined!'", ")", "raise", "InvalidEventConfiguration", "(", "'Filter name is required and no filter_name is defined!'", ")", "if", "filter_pattern", "is", "None", ":", "LOG", ".", "critical", "(", "'Filter pattern is required and no filter_pattern is defined!'", ")", "raise", "InvalidEventConfiguration", "(", "'Filter pattern is required and no filter_pattern is defined!'", ")", "lambda_alias_arn", "=", "get_lambda_alias_arn", "(", "app", "=", "app_name", ",", "account", "=", "env", ",", "region", "=", "region", ")", "statement_id", "=", "'{}_cloudwatchlog_{}'", ".", "format", "(", "app_name", ",", "filter_name", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ")", "principal", "=", "'logs.{}.amazonaws.com'", ".", "format", "(", "region", ")", "account_id", "=", "get_env_credential", "(", "env", "=", "env", ")", "[", "'accountId'", "]", "source_arn", "=", "\"arn:aws:logs:{0}:{1}:log-group:{2}:*\"", ".", "format", "(", "region", ",", "account_id", ",", "log_group", ")", "add_lambda_permissions", "(", "function", "=", "lambda_alias_arn", ",", "statement_id", "=", "statement_id", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "source_arn", "=", "source_arn", ",", "env", "=", "env", ",", "region", "=", "region", ")", "cloudwatch_client", ".", "put_subscription_filter", "(", "logGroupName", "=", "log_group", ",", "filterName", "=", "filter_name", ",", "filterPattern", "=", "filter_pattern", ",", "destinationArn", "=", "lambda_alias_arn", ")", "LOG", ".", "info", "(", "\"Created Cloudwatch log event with filter: %s\"", ",", "filter_pattern", ")" ]
Create cloudwatch log event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings
[ "Create", "cloudwatch", "log", "event", "for", "lambda", "from", "rules", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/cloudwatch_log_event/cloudwatch_log_event.py#L28-L75
6,188
foremast/foremast
src/foremast/autoscaling_policy/create_policy.py
AutoScalingPolicy.prepare_policy_template
def prepare_policy_template(self, scaling_type, period_sec, server_group): """Renders scaling policy templates based on configs and variables. After rendering, POSTs the json to Spinnaker for creation. Args: scaling_type (str): ``scale_up`` or ``scaling_down``. Type of policy period_sec (int): Period of time to look at metrics for determining scale server_group (str): The name of the server group to render template for """ template_kwargs = { 'app': self.app, 'env': self.env, 'region': self.region, 'server_group': server_group, 'period_sec': period_sec, 'scaling_policy': self.settings['asg']['scaling_policy'], } if scaling_type == 'scale_up': template_kwargs['operation'] = 'increase' template_kwargs['comparisonOperator'] = 'GreaterThanThreshold' template_kwargs['scalingAdjustment'] = 1 elif scaling_type == 'scale_down': cur_threshold = int(self.settings['asg']['scaling_policy']['threshold']) self.settings['asg']['scaling_policy']['threshold'] = floor(cur_threshold * 0.5) template_kwargs['operation'] = 'decrease' template_kwargs['comparisonOperator'] = 'LessThanThreshold' template_kwargs['scalingAdjustment'] = -1 rendered_template = get_template(template_file='infrastructure/autoscaling_policy.json.j2', **template_kwargs) self.log.info('Creating a %s policy in %s for %s', scaling_type, self.env, self.app) wait_for_task(rendered_template) self.log.info('Successfully created a %s policy in %s for %s', scaling_type, self.env, self.app)
python
def prepare_policy_template(self, scaling_type, period_sec, server_group): """Renders scaling policy templates based on configs and variables. After rendering, POSTs the json to Spinnaker for creation. Args: scaling_type (str): ``scale_up`` or ``scaling_down``. Type of policy period_sec (int): Period of time to look at metrics for determining scale server_group (str): The name of the server group to render template for """ template_kwargs = { 'app': self.app, 'env': self.env, 'region': self.region, 'server_group': server_group, 'period_sec': period_sec, 'scaling_policy': self.settings['asg']['scaling_policy'], } if scaling_type == 'scale_up': template_kwargs['operation'] = 'increase' template_kwargs['comparisonOperator'] = 'GreaterThanThreshold' template_kwargs['scalingAdjustment'] = 1 elif scaling_type == 'scale_down': cur_threshold = int(self.settings['asg']['scaling_policy']['threshold']) self.settings['asg']['scaling_policy']['threshold'] = floor(cur_threshold * 0.5) template_kwargs['operation'] = 'decrease' template_kwargs['comparisonOperator'] = 'LessThanThreshold' template_kwargs['scalingAdjustment'] = -1 rendered_template = get_template(template_file='infrastructure/autoscaling_policy.json.j2', **template_kwargs) self.log.info('Creating a %s policy in %s for %s', scaling_type, self.env, self.app) wait_for_task(rendered_template) self.log.info('Successfully created a %s policy in %s for %s', scaling_type, self.env, self.app)
[ "def", "prepare_policy_template", "(", "self", ",", "scaling_type", ",", "period_sec", ",", "server_group", ")", ":", "template_kwargs", "=", "{", "'app'", ":", "self", ".", "app", ",", "'env'", ":", "self", ".", "env", ",", "'region'", ":", "self", ".", "region", ",", "'server_group'", ":", "server_group", ",", "'period_sec'", ":", "period_sec", ",", "'scaling_policy'", ":", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", ",", "}", "if", "scaling_type", "==", "'scale_up'", ":", "template_kwargs", "[", "'operation'", "]", "=", "'increase'", "template_kwargs", "[", "'comparisonOperator'", "]", "=", "'GreaterThanThreshold'", "template_kwargs", "[", "'scalingAdjustment'", "]", "=", "1", "elif", "scaling_type", "==", "'scale_down'", ":", "cur_threshold", "=", "int", "(", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", "[", "'threshold'", "]", ")", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", "[", "'threshold'", "]", "=", "floor", "(", "cur_threshold", "*", "0.5", ")", "template_kwargs", "[", "'operation'", "]", "=", "'decrease'", "template_kwargs", "[", "'comparisonOperator'", "]", "=", "'LessThanThreshold'", "template_kwargs", "[", "'scalingAdjustment'", "]", "=", "-", "1", "rendered_template", "=", "get_template", "(", "template_file", "=", "'infrastructure/autoscaling_policy.json.j2'", ",", "*", "*", "template_kwargs", ")", "self", ".", "log", ".", "info", "(", "'Creating a %s policy in %s for %s'", ",", "scaling_type", ",", "self", ".", "env", ",", "self", ".", "app", ")", "wait_for_task", "(", "rendered_template", ")", "self", ".", "log", ".", "info", "(", "'Successfully created a %s policy in %s for %s'", ",", "scaling_type", ",", "self", ".", "env", ",", "self", ".", "app", ")" ]
Renders scaling policy templates based on configs and variables. After rendering, POSTs the json to Spinnaker for creation. Args: scaling_type (str): ``scale_up`` or ``scaling_down``. Type of policy period_sec (int): Period of time to look at metrics for determining scale server_group (str): The name of the server group to render template for
[ "Renders", "scaling", "policy", "templates", "based", "on", "configs", "and", "variables", ".", "After", "rendering", "POSTs", "the", "json", "to", "Spinnaker", "for", "creation", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/create_policy.py#L56-L88
6,189
foremast/foremast
src/foremast/autoscaling_policy/create_policy.py
AutoScalingPolicy.create_policy
def create_policy(self): """Wrapper function. Gets the server group, sets sane defaults, deletes existing policies, and then runs self.prepare_policy_template for scaling up and scaling down policies. This function acts as the main driver for the scaling policy creationprocess """ if not self.settings['asg']['scaling_policy']: self.log.info("No scaling policy found, skipping...") return server_group = self.get_server_group() # Find all existing and remove them scaling_policies = self.get_all_existing(server_group) for policy in scaling_policies: for subpolicy in policy: self.delete_existing_policy(subpolicy, server_group) if self.settings['asg']['scaling_policy']['period_minutes']: period_sec = int(self.settings['asg']['scaling_policy']['period_minutes']) * 60 else: period_sec = 1800 self.prepare_policy_template('scale_up', period_sec, server_group) if self.settings['asg']['scaling_policy'].get('scale_down', True): self.prepare_policy_template('scale_down', period_sec, server_group)
python
def create_policy(self): """Wrapper function. Gets the server group, sets sane defaults, deletes existing policies, and then runs self.prepare_policy_template for scaling up and scaling down policies. This function acts as the main driver for the scaling policy creationprocess """ if not self.settings['asg']['scaling_policy']: self.log.info("No scaling policy found, skipping...") return server_group = self.get_server_group() # Find all existing and remove them scaling_policies = self.get_all_existing(server_group) for policy in scaling_policies: for subpolicy in policy: self.delete_existing_policy(subpolicy, server_group) if self.settings['asg']['scaling_policy']['period_minutes']: period_sec = int(self.settings['asg']['scaling_policy']['period_minutes']) * 60 else: period_sec = 1800 self.prepare_policy_template('scale_up', period_sec, server_group) if self.settings['asg']['scaling_policy'].get('scale_down', True): self.prepare_policy_template('scale_down', period_sec, server_group)
[ "def", "create_policy", "(", "self", ")", ":", "if", "not", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", ":", "self", ".", "log", ".", "info", "(", "\"No scaling policy found, skipping...\"", ")", "return", "server_group", "=", "self", ".", "get_server_group", "(", ")", "# Find all existing and remove them", "scaling_policies", "=", "self", ".", "get_all_existing", "(", "server_group", ")", "for", "policy", "in", "scaling_policies", ":", "for", "subpolicy", "in", "policy", ":", "self", ".", "delete_existing_policy", "(", "subpolicy", ",", "server_group", ")", "if", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", "[", "'period_minutes'", "]", ":", "period_sec", "=", "int", "(", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", "[", "'period_minutes'", "]", ")", "*", "60", "else", ":", "period_sec", "=", "1800", "self", ".", "prepare_policy_template", "(", "'scale_up'", ",", "period_sec", ",", "server_group", ")", "if", "self", ".", "settings", "[", "'asg'", "]", "[", "'scaling_policy'", "]", ".", "get", "(", "'scale_down'", ",", "True", ")", ":", "self", ".", "prepare_policy_template", "(", "'scale_down'", ",", "period_sec", ",", "server_group", ")" ]
Wrapper function. Gets the server group, sets sane defaults, deletes existing policies, and then runs self.prepare_policy_template for scaling up and scaling down policies. This function acts as the main driver for the scaling policy creationprocess
[ "Wrapper", "function", ".", "Gets", "the", "server", "group", "sets", "sane", "defaults", "deletes", "existing", "policies", "and", "then", "runs", "self", ".", "prepare_policy_template", "for", "scaling", "up", "and", "scaling", "down", "policies", ".", "This", "function", "acts", "as", "the", "main", "driver", "for", "the", "scaling", "policy", "creationprocess" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/create_policy.py#L90-L115
6,190
foremast/foremast
src/foremast/autoscaling_policy/create_policy.py
AutoScalingPolicy.get_server_group
def get_server_group(self): """Finds the most recently deployed server group for the application. This is the server group that the scaling policy will be applied to. Returns: server_group (str): Name of the newest server group """ api_url = "{0}/applications/{1}".format(API_URL, self.app) response = requests.get(api_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) for server_group in response.json()['clusters'][self.env]: return server_group['serverGroups'][-1]
python
def get_server_group(self): """Finds the most recently deployed server group for the application. This is the server group that the scaling policy will be applied to. Returns: server_group (str): Name of the newest server group """ api_url = "{0}/applications/{1}".format(API_URL, self.app) response = requests.get(api_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) for server_group in response.json()['clusters'][self.env]: return server_group['serverGroups'][-1]
[ "def", "get_server_group", "(", "self", ")", ":", "api_url", "=", "\"{0}/applications/{1}\"", ".", "format", "(", "API_URL", ",", "self", ".", "app", ")", "response", "=", "requests", ".", "get", "(", "api_url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "for", "server_group", "in", "response", ".", "json", "(", ")", "[", "'clusters'", "]", "[", "self", ".", "env", "]", ":", "return", "server_group", "[", "'serverGroups'", "]", "[", "-", "1", "]" ]
Finds the most recently deployed server group for the application. This is the server group that the scaling policy will be applied to. Returns: server_group (str): Name of the newest server group
[ "Finds", "the", "most", "recently", "deployed", "server", "group", "for", "the", "application", ".", "This", "is", "the", "server", "group", "that", "the", "scaling", "policy", "will", "be", "applied", "to", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/create_policy.py#L117-L127
6,191
foremast/foremast
src/foremast/autoscaling_policy/create_policy.py
AutoScalingPolicy.delete_existing_policy
def delete_existing_policy(self, scaling_policy, server_group): """Given a scaling_policy and server_group, deletes the existing scaling_policy. Scaling policies need to be deleted instead of upserted for consistency. Args: scaling_policy (json): the scaling_policy json from Spinnaker that should be deleted server_group (str): the affected server_group """ self.log.info("Deleting policy %s on %s", scaling_policy['policyName'], server_group) delete_dict = { "application": self.app, "description": "Delete scaling policy", "job": [{ "policyName": scaling_policy['policyName'], "serverGroupName": server_group, "credentials": self.env, "region": self.region, "provider": "aws", "type": "deleteScalingPolicy", "user": "foremast-autoscaling-policy" }] } wait_for_task(json.dumps(delete_dict))
python
def delete_existing_policy(self, scaling_policy, server_group): """Given a scaling_policy and server_group, deletes the existing scaling_policy. Scaling policies need to be deleted instead of upserted for consistency. Args: scaling_policy (json): the scaling_policy json from Spinnaker that should be deleted server_group (str): the affected server_group """ self.log.info("Deleting policy %s on %s", scaling_policy['policyName'], server_group) delete_dict = { "application": self.app, "description": "Delete scaling policy", "job": [{ "policyName": scaling_policy['policyName'], "serverGroupName": server_group, "credentials": self.env, "region": self.region, "provider": "aws", "type": "deleteScalingPolicy", "user": "foremast-autoscaling-policy" }] } wait_for_task(json.dumps(delete_dict))
[ "def", "delete_existing_policy", "(", "self", ",", "scaling_policy", ",", "server_group", ")", ":", "self", ".", "log", ".", "info", "(", "\"Deleting policy %s on %s\"", ",", "scaling_policy", "[", "'policyName'", "]", ",", "server_group", ")", "delete_dict", "=", "{", "\"application\"", ":", "self", ".", "app", ",", "\"description\"", ":", "\"Delete scaling policy\"", ",", "\"job\"", ":", "[", "{", "\"policyName\"", ":", "scaling_policy", "[", "'policyName'", "]", ",", "\"serverGroupName\"", ":", "server_group", ",", "\"credentials\"", ":", "self", ".", "env", ",", "\"region\"", ":", "self", ".", "region", ",", "\"provider\"", ":", "\"aws\"", ",", "\"type\"", ":", "\"deleteScalingPolicy\"", ",", "\"user\"", ":", "\"foremast-autoscaling-policy\"", "}", "]", "}", "wait_for_task", "(", "json", ".", "dumps", "(", "delete_dict", ")", ")" ]
Given a scaling_policy and server_group, deletes the existing scaling_policy. Scaling policies need to be deleted instead of upserted for consistency. Args: scaling_policy (json): the scaling_policy json from Spinnaker that should be deleted server_group (str): the affected server_group
[ "Given", "a", "scaling_policy", "and", "server_group", "deletes", "the", "existing", "scaling_policy", ".", "Scaling", "policies", "need", "to", "be", "deleted", "instead", "of", "upserted", "for", "consistency", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/create_policy.py#L129-L153
6,192
foremast/foremast
src/foremast/autoscaling_policy/create_policy.py
AutoScalingPolicy.get_all_existing
def get_all_existing(self, server_group): """Finds all existing scaling policies for an application Returns: scalingpolicies (list): List of all existing scaling policies for the application """ self.log.info("Checking for existing scaling policy") url = "{0}/applications/{1}/clusters/{2}/{1}/serverGroups".format(API_URL, self.app, self.env) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, "Error looking for existing Autoscaling Policy for {0}: {1}".format(self.app, response.text) scalingpolicies = [] for servergroup in response.json(): if servergroup['scalingPolicies'] and servergroup['asg']['autoScalingGroupName'] == server_group: self.log.info("Found policies on %s", server_group) scalingpolicies.append(servergroup['scalingPolicies']) self.log.debug("Scaling policies: %s", scalingpolicies) return scalingpolicies
python
def get_all_existing(self, server_group): """Finds all existing scaling policies for an application Returns: scalingpolicies (list): List of all existing scaling policies for the application """ self.log.info("Checking for existing scaling policy") url = "{0}/applications/{1}/clusters/{2}/{1}/serverGroups".format(API_URL, self.app, self.env) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, "Error looking for existing Autoscaling Policy for {0}: {1}".format(self.app, response.text) scalingpolicies = [] for servergroup in response.json(): if servergroup['scalingPolicies'] and servergroup['asg']['autoScalingGroupName'] == server_group: self.log.info("Found policies on %s", server_group) scalingpolicies.append(servergroup['scalingPolicies']) self.log.debug("Scaling policies: %s", scalingpolicies) return scalingpolicies
[ "def", "get_all_existing", "(", "self", ",", "server_group", ")", ":", "self", ".", "log", ".", "info", "(", "\"Checking for existing scaling policy\"", ")", "url", "=", "\"{0}/applications/{1}/clusters/{2}/{1}/serverGroups\"", ".", "format", "(", "API_URL", ",", "self", ".", "app", ",", "self", ".", "env", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "response", ".", "ok", ",", "\"Error looking for existing Autoscaling Policy for {0}: {1}\"", ".", "format", "(", "self", ".", "app", ",", "response", ".", "text", ")", "scalingpolicies", "=", "[", "]", "for", "servergroup", "in", "response", ".", "json", "(", ")", ":", "if", "servergroup", "[", "'scalingPolicies'", "]", "and", "servergroup", "[", "'asg'", "]", "[", "'autoScalingGroupName'", "]", "==", "server_group", ":", "self", ".", "log", ".", "info", "(", "\"Found policies on %s\"", ",", "server_group", ")", "scalingpolicies", ".", "append", "(", "servergroup", "[", "'scalingPolicies'", "]", ")", "self", ".", "log", ".", "debug", "(", "\"Scaling policies: %s\"", ",", "scalingpolicies", ")", "return", "scalingpolicies" ]
Finds all existing scaling policies for an application Returns: scalingpolicies (list): List of all existing scaling policies for the application
[ "Finds", "all", "existing", "scaling", "policies", "for", "an", "application" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/autoscaling_policy/create_policy.py#L155-L172
6,193
foremast/foremast
src/foremast/awslambda/cloudwatch_event/destroy_cloudwatch_event/destroy_cloudwatch_event.py
destroy_cloudwatch_event
def destroy_cloudwatch_event(app='', env='dev', region=''): """Destroy Cloudwatch event subscription. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion. """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('events') event_rules = get_cloudwatch_event_rule(app_name=app, account=env, region=region) for rule in event_rules: cloudwatch_client.remove_targets(Rule=rule, Ids=[app]) return True
python
def destroy_cloudwatch_event(app='', env='dev', region=''): """Destroy Cloudwatch event subscription. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion. """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('events') event_rules = get_cloudwatch_event_rule(app_name=app, account=env, region=region) for rule in event_rules: cloudwatch_client.remove_targets(Rule=rule, Ids=[app]) return True
[ "def", "destroy_cloudwatch_event", "(", "app", "=", "''", ",", "env", "=", "'dev'", ",", "region", "=", "''", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "cloudwatch_client", "=", "session", ".", "client", "(", "'events'", ")", "event_rules", "=", "get_cloudwatch_event_rule", "(", "app_name", "=", "app", ",", "account", "=", "env", ",", "region", "=", "region", ")", "for", "rule", "in", "event_rules", ":", "cloudwatch_client", ".", "remove_targets", "(", "Rule", "=", "rule", ",", "Ids", "=", "[", "app", "]", ")", "return", "True" ]
Destroy Cloudwatch event subscription. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion.
[ "Destroy", "Cloudwatch", "event", "subscription", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/cloudwatch_event/destroy_cloudwatch_event/destroy_cloudwatch_event.py#L26-L45
6,194
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps.create_bucket
def create_bucket(self): """Create or update bucket based on app name.""" bucket_exists = self._bucket_exists() if self.s3props.get('shared_bucket_target'): if bucket_exists: LOG.info('App uses shared bucket - %s ', self.bucket) else: LOG.error("Shared bucket %s does not exist", self.bucket) raise S3SharedBucketNotFound else: if self.region == 'us-east-1': _response = self.s3client.create_bucket(ACL=self.s3props['bucket_acl'], Bucket=self.bucket) else: if not bucket_exists: _response = self.s3client.create_bucket(ACL=self.s3props['bucket_acl'], Bucket=self.bucket, CreateBucketConfiguration={ 'LocationConstraint': self.region}) else: _response = "bucket already exists, skipping create for non-standard region buckets." LOG.debug('Response creating bucket: %s', _response) LOG.info('%s - S3 Bucket Upserted', self.bucket) self._put_bucket_policy() self._put_bucket_website() self._put_bucket_logging() self._put_bucket_lifecycle() self._put_bucket_versioning() self._put_bucket_encryption() self._put_bucket_tagging()
python
def create_bucket(self): """Create or update bucket based on app name.""" bucket_exists = self._bucket_exists() if self.s3props.get('shared_bucket_target'): if bucket_exists: LOG.info('App uses shared bucket - %s ', self.bucket) else: LOG.error("Shared bucket %s does not exist", self.bucket) raise S3SharedBucketNotFound else: if self.region == 'us-east-1': _response = self.s3client.create_bucket(ACL=self.s3props['bucket_acl'], Bucket=self.bucket) else: if not bucket_exists: _response = self.s3client.create_bucket(ACL=self.s3props['bucket_acl'], Bucket=self.bucket, CreateBucketConfiguration={ 'LocationConstraint': self.region}) else: _response = "bucket already exists, skipping create for non-standard region buckets." LOG.debug('Response creating bucket: %s', _response) LOG.info('%s - S3 Bucket Upserted', self.bucket) self._put_bucket_policy() self._put_bucket_website() self._put_bucket_logging() self._put_bucket_lifecycle() self._put_bucket_versioning() self._put_bucket_encryption() self._put_bucket_tagging()
[ "def", "create_bucket", "(", "self", ")", ":", "bucket_exists", "=", "self", ".", "_bucket_exists", "(", ")", "if", "self", ".", "s3props", ".", "get", "(", "'shared_bucket_target'", ")", ":", "if", "bucket_exists", ":", "LOG", ".", "info", "(", "'App uses shared bucket - %s '", ",", "self", ".", "bucket", ")", "else", ":", "LOG", ".", "error", "(", "\"Shared bucket %s does not exist\"", ",", "self", ".", "bucket", ")", "raise", "S3SharedBucketNotFound", "else", ":", "if", "self", ".", "region", "==", "'us-east-1'", ":", "_response", "=", "self", ".", "s3client", ".", "create_bucket", "(", "ACL", "=", "self", ".", "s3props", "[", "'bucket_acl'", "]", ",", "Bucket", "=", "self", ".", "bucket", ")", "else", ":", "if", "not", "bucket_exists", ":", "_response", "=", "self", ".", "s3client", ".", "create_bucket", "(", "ACL", "=", "self", ".", "s3props", "[", "'bucket_acl'", "]", ",", "Bucket", "=", "self", ".", "bucket", ",", "CreateBucketConfiguration", "=", "{", "'LocationConstraint'", ":", "self", ".", "region", "}", ")", "else", ":", "_response", "=", "\"bucket already exists, skipping create for non-standard region buckets.\"", "LOG", ".", "debug", "(", "'Response creating bucket: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'%s - S3 Bucket Upserted'", ",", "self", ".", "bucket", ")", "self", ".", "_put_bucket_policy", "(", ")", "self", ".", "_put_bucket_website", "(", ")", "self", ".", "_put_bucket_logging", "(", ")", "self", ".", "_put_bucket_lifecycle", "(", ")", "self", ".", "_put_bucket_versioning", "(", ")", "self", ".", "_put_bucket_encryption", "(", ")", "self", ".", "_put_bucket_tagging", "(", ")" ]
Create or update bucket based on app name.
[ "Create", "or", "update", "bucket", "based", "on", "app", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L65-L92
6,195
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps._bucket_exists
def _bucket_exists(self): """Check if the bucket exists.""" try: self.s3client.get_bucket_location(Bucket=self.bucket) return True except ClientError as error: LOG.error(error) return False
python
def _bucket_exists(self): """Check if the bucket exists.""" try: self.s3client.get_bucket_location(Bucket=self.bucket) return True except ClientError as error: LOG.error(error) return False
[ "def", "_bucket_exists", "(", "self", ")", ":", "try", ":", "self", ".", "s3client", ".", "get_bucket_location", "(", "Bucket", "=", "self", ".", "bucket", ")", "return", "True", "except", "ClientError", "as", "error", ":", "LOG", ".", "error", "(", "error", ")", "return", "False" ]
Check if the bucket exists.
[ "Check", "if", "the", "bucket", "exists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L94-L101
6,196
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps._put_bucket_policy
def _put_bucket_policy(self): """Attach a bucket policy to app bucket.""" if self.s3props['bucket_policy']: policy_str = json.dumps(self.s3props['bucket_policy']) _response = self.s3client.put_bucket_policy(Bucket=self.bucket, Policy=policy_str) else: _response = self.s3client.delete_bucket_policy(Bucket=self.bucket) LOG.debug('Response adding bucket policy: %s', _response) LOG.info('S3 Bucket Policy Attached')
python
def _put_bucket_policy(self): """Attach a bucket policy to app bucket.""" if self.s3props['bucket_policy']: policy_str = json.dumps(self.s3props['bucket_policy']) _response = self.s3client.put_bucket_policy(Bucket=self.bucket, Policy=policy_str) else: _response = self.s3client.delete_bucket_policy(Bucket=self.bucket) LOG.debug('Response adding bucket policy: %s', _response) LOG.info('S3 Bucket Policy Attached')
[ "def", "_put_bucket_policy", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'bucket_policy'", "]", ":", "policy_str", "=", "json", ".", "dumps", "(", "self", ".", "s3props", "[", "'bucket_policy'", "]", ")", "_response", "=", "self", ".", "s3client", ".", "put_bucket_policy", "(", "Bucket", "=", "self", ".", "bucket", ",", "Policy", "=", "policy_str", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_policy", "(", "Bucket", "=", "self", ".", "bucket", ")", "LOG", ".", "debug", "(", "'Response adding bucket policy: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 Bucket Policy Attached'", ")" ]
Attach a bucket policy to app bucket.
[ "Attach", "a", "bucket", "policy", "to", "app", "bucket", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L103-L111
6,197
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps._put_bucket_website
def _put_bucket_website(self): """Configure static website on S3 bucket.""" if self.s3props['website']['enabled']: website_config = { 'ErrorDocument': { 'Key': self.s3props['website']['error_document'] }, 'IndexDocument': { 'Suffix': self.s3props['website']['index_suffix'] } } _response = self.s3client.put_bucket_website(Bucket=self.bucket, WebsiteConfiguration=website_config) self._put_bucket_cors() self._set_bucket_dns() else: _response = self.s3client.delete_bucket_website(Bucket=self.bucket) self._put_bucket_cors() LOG.debug('Response setting up S3 website: %s', _response) LOG.info('S3 website settings updated')
python
def _put_bucket_website(self): """Configure static website on S3 bucket.""" if self.s3props['website']['enabled']: website_config = { 'ErrorDocument': { 'Key': self.s3props['website']['error_document'] }, 'IndexDocument': { 'Suffix': self.s3props['website']['index_suffix'] } } _response = self.s3client.put_bucket_website(Bucket=self.bucket, WebsiteConfiguration=website_config) self._put_bucket_cors() self._set_bucket_dns() else: _response = self.s3client.delete_bucket_website(Bucket=self.bucket) self._put_bucket_cors() LOG.debug('Response setting up S3 website: %s', _response) LOG.info('S3 website settings updated')
[ "def", "_put_bucket_website", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'website'", "]", "[", "'enabled'", "]", ":", "website_config", "=", "{", "'ErrorDocument'", ":", "{", "'Key'", ":", "self", ".", "s3props", "[", "'website'", "]", "[", "'error_document'", "]", "}", ",", "'IndexDocument'", ":", "{", "'Suffix'", ":", "self", ".", "s3props", "[", "'website'", "]", "[", "'index_suffix'", "]", "}", "}", "_response", "=", "self", ".", "s3client", ".", "put_bucket_website", "(", "Bucket", "=", "self", ".", "bucket", ",", "WebsiteConfiguration", "=", "website_config", ")", "self", ".", "_put_bucket_cors", "(", ")", "self", ".", "_set_bucket_dns", "(", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_website", "(", "Bucket", "=", "self", ".", "bucket", ")", "self", ".", "_put_bucket_cors", "(", ")", "LOG", ".", "debug", "(", "'Response setting up S3 website: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 website settings updated'", ")" ]
Configure static website on S3 bucket.
[ "Configure", "static", "website", "on", "S3", "bucket", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L113-L131
6,198
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps._set_bucket_dns
def _set_bucket_dns(self): """Create CNAME for S3 endpoint.""" # Different regions have different s3 endpoint formats dotformat_regions = ["eu-west-2", "eu-central-1", "ap-northeast-2", "ap-south-1", "ca-central-1", "us-east-2"] if self.region in dotformat_regions: s3_endpoint = "{0}.s3-website.{1}.amazonaws.com".format(self.bucket, self.region) else: s3_endpoint = "{0}.s3-website-{1}.amazonaws.com".format(self.bucket, self.region) zone_ids = get_dns_zone_ids(env=self.env, facing="public") dns_kwargs = { 'dns_name': self.bucket, 'dns_name_aws': s3_endpoint, 'dns_ttl': self.properties['dns']['ttl'] } for zone_id in zone_ids: LOG.debug('zone_id: %s', zone_id) update_dns_zone_record(self.env, zone_id, **dns_kwargs) LOG.info("Created DNS %s for Bucket", self.bucket)
python
def _set_bucket_dns(self): """Create CNAME for S3 endpoint.""" # Different regions have different s3 endpoint formats dotformat_regions = ["eu-west-2", "eu-central-1", "ap-northeast-2", "ap-south-1", "ca-central-1", "us-east-2"] if self.region in dotformat_regions: s3_endpoint = "{0}.s3-website.{1}.amazonaws.com".format(self.bucket, self.region) else: s3_endpoint = "{0}.s3-website-{1}.amazonaws.com".format(self.bucket, self.region) zone_ids = get_dns_zone_ids(env=self.env, facing="public") dns_kwargs = { 'dns_name': self.bucket, 'dns_name_aws': s3_endpoint, 'dns_ttl': self.properties['dns']['ttl'] } for zone_id in zone_ids: LOG.debug('zone_id: %s', zone_id) update_dns_zone_record(self.env, zone_id, **dns_kwargs) LOG.info("Created DNS %s for Bucket", self.bucket)
[ "def", "_set_bucket_dns", "(", "self", ")", ":", "# Different regions have different s3 endpoint formats", "dotformat_regions", "=", "[", "\"eu-west-2\"", ",", "\"eu-central-1\"", ",", "\"ap-northeast-2\"", ",", "\"ap-south-1\"", ",", "\"ca-central-1\"", ",", "\"us-east-2\"", "]", "if", "self", ".", "region", "in", "dotformat_regions", ":", "s3_endpoint", "=", "\"{0}.s3-website.{1}.amazonaws.com\"", ".", "format", "(", "self", ".", "bucket", ",", "self", ".", "region", ")", "else", ":", "s3_endpoint", "=", "\"{0}.s3-website-{1}.amazonaws.com\"", ".", "format", "(", "self", ".", "bucket", ",", "self", ".", "region", ")", "zone_ids", "=", "get_dns_zone_ids", "(", "env", "=", "self", ".", "env", ",", "facing", "=", "\"public\"", ")", "dns_kwargs", "=", "{", "'dns_name'", ":", "self", ".", "bucket", ",", "'dns_name_aws'", ":", "s3_endpoint", ",", "'dns_ttl'", ":", "self", ".", "properties", "[", "'dns'", "]", "[", "'ttl'", "]", "}", "for", "zone_id", "in", "zone_ids", ":", "LOG", ".", "debug", "(", "'zone_id: %s'", ",", "zone_id", ")", "update_dns_zone_record", "(", "self", ".", "env", ",", "zone_id", ",", "*", "*", "dns_kwargs", ")", "LOG", ".", "info", "(", "\"Created DNS %s for Bucket\"", ",", "self", ".", "bucket", ")" ]
Create CNAME for S3 endpoint.
[ "Create", "CNAME", "for", "S3", "endpoint", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L133-L152
6,199
foremast/foremast
src/foremast/s3/s3apps.py
S3Apps._put_bucket_cors
def _put_bucket_cors(self): """Adds bucket cors configuration.""" if self.s3props['cors']['enabled'] and self.s3props['website']['enabled']: cors_config = {} cors_rules = [] for each_rule in self.s3props['cors']['cors_rules']: cors_rules.append({ 'AllowedHeaders': each_rule['cors_headers'], 'AllowedMethods': each_rule['cors_methods'], 'AllowedOrigins': each_rule['cors_origins'], 'ExposeHeaders': each_rule['cors_expose_headers'], 'MaxAgeSeconds': each_rule['cors_max_age'] }) cors_config = { 'CORSRules': cors_rules } LOG.debug(cors_config) _response = self.s3client.put_bucket_cors(Bucket=self.bucket, CORSConfiguration=cors_config) else: _response = self.s3client.delete_bucket_cors(Bucket=self.bucket) LOG.debug('Response setting up S3 CORS: %s', _response) LOG.info('S3 CORS configuration updated')
python
def _put_bucket_cors(self): """Adds bucket cors configuration.""" if self.s3props['cors']['enabled'] and self.s3props['website']['enabled']: cors_config = {} cors_rules = [] for each_rule in self.s3props['cors']['cors_rules']: cors_rules.append({ 'AllowedHeaders': each_rule['cors_headers'], 'AllowedMethods': each_rule['cors_methods'], 'AllowedOrigins': each_rule['cors_origins'], 'ExposeHeaders': each_rule['cors_expose_headers'], 'MaxAgeSeconds': each_rule['cors_max_age'] }) cors_config = { 'CORSRules': cors_rules } LOG.debug(cors_config) _response = self.s3client.put_bucket_cors(Bucket=self.bucket, CORSConfiguration=cors_config) else: _response = self.s3client.delete_bucket_cors(Bucket=self.bucket) LOG.debug('Response setting up S3 CORS: %s', _response) LOG.info('S3 CORS configuration updated')
[ "def", "_put_bucket_cors", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'cors'", "]", "[", "'enabled'", "]", "and", "self", ".", "s3props", "[", "'website'", "]", "[", "'enabled'", "]", ":", "cors_config", "=", "{", "}", "cors_rules", "=", "[", "]", "for", "each_rule", "in", "self", ".", "s3props", "[", "'cors'", "]", "[", "'cors_rules'", "]", ":", "cors_rules", ".", "append", "(", "{", "'AllowedHeaders'", ":", "each_rule", "[", "'cors_headers'", "]", ",", "'AllowedMethods'", ":", "each_rule", "[", "'cors_methods'", "]", ",", "'AllowedOrigins'", ":", "each_rule", "[", "'cors_origins'", "]", ",", "'ExposeHeaders'", ":", "each_rule", "[", "'cors_expose_headers'", "]", ",", "'MaxAgeSeconds'", ":", "each_rule", "[", "'cors_max_age'", "]", "}", ")", "cors_config", "=", "{", "'CORSRules'", ":", "cors_rules", "}", "LOG", ".", "debug", "(", "cors_config", ")", "_response", "=", "self", ".", "s3client", ".", "put_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ",", "CORSConfiguration", "=", "cors_config", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ")", "LOG", ".", "debug", "(", "'Response setting up S3 CORS: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 CORS configuration updated'", ")" ]
Adds bucket cors configuration.
[ "Adds", "bucket", "cors", "configuration", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3apps.py#L154-L175