id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
6,000 | mfitzp/padua | padua/analysis.py | modifiedaminoacids | def modifiedaminoacids(df):
"""
Calculate the number of modified amino acids in supplied ``DataFrame``.
Returns the total of all modifications and the total for each amino acid individually, as an ``int`` and a
``dict`` of ``int``, keyed by amino acid, respectively.
:param df: Pandas ``DataFrame`` containing processed data.
:return: total_aas ``int`` the total number of all modified amino acids
quants ``dict`` of ``int`` keyed by amino acid, giving individual counts for each aa.
"""
amino_acids = list(df['Amino acid'].values)
aas = set(amino_acids)
quants = {}
for aa in aas:
quants[aa] = amino_acids.count(aa)
total_aas = len(amino_acids)
return total_aas, quants | python | def modifiedaminoacids(df):
"""
Calculate the number of modified amino acids in supplied ``DataFrame``.
Returns the total of all modifications and the total for each amino acid individually, as an ``int`` and a
``dict`` of ``int``, keyed by amino acid, respectively.
:param df: Pandas ``DataFrame`` containing processed data.
:return: total_aas ``int`` the total number of all modified amino acids
quants ``dict`` of ``int`` keyed by amino acid, giving individual counts for each aa.
"""
amino_acids = list(df['Amino acid'].values)
aas = set(amino_acids)
quants = {}
for aa in aas:
quants[aa] = amino_acids.count(aa)
total_aas = len(amino_acids)
return total_aas, quants | [
"def",
"modifiedaminoacids",
"(",
"df",
")",
":",
"amino_acids",
"=",
"list",
"(",
"df",
"[",
"'Amino acid'",
"]",
".",
"values",
")",
"aas",
"=",
"set",
"(",
"amino_acids",
")",
"quants",
"=",
"{",
"}",
"for",
"aa",
"in",
"aas",
":",
"quants",
"[",
"aa",
"]",
"=",
"amino_acids",
".",
"count",
"(",
"aa",
")",
"total_aas",
"=",
"len",
"(",
"amino_acids",
")",
"return",
"total_aas",
",",
"quants"
] | Calculate the number of modified amino acids in supplied ``DataFrame``.
Returns the total of all modifications and the total for each amino acid individually, as an ``int`` and a
``dict`` of ``int``, keyed by amino acid, respectively.
:param df: Pandas ``DataFrame`` containing processed data.
:return: total_aas ``int`` the total number of all modified amino acids
quants ``dict`` of ``int`` keyed by amino acid, giving individual counts for each aa. | [
"Calculate",
"the",
"number",
"of",
"modified",
"amino",
"acids",
"in",
"supplied",
"DataFrame",
"."
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/analysis.py#L312-L333 |
6,001 | mfitzp/padua | padua/process.py | build_index_from_design | def build_index_from_design(df, design, remove_prefix=None, types=None, axis=1, auto_convert_numeric=True, unmatched_columns='index'):
"""
Build a MultiIndex from a design table.
Supply with a table with column headings for the new multiindex
and a index containing the labels to search for in the data.
:param df:
:param design:
:param remove:
:param types:
:param axis:
:param auto_convert_numeric:
:return:
"""
df = df.copy()
if 'Label' not in design.index.names:
design = design.set_index('Label')
if remove_prefix is None:
remove_prefix = []
if type(remove_prefix) is str:
remove_prefix=[remove_prefix]
unmatched_for_index = []
names = design.columns.values
idx_levels = len(names)
indexes = []
# Convert numeric only columns_to_combine; except index
if auto_convert_numeric:
design = design.apply(pd.to_numeric, errors="ignore")
# The match columns are always strings, so the index must also be
design.index = design.index.astype(str)
# Apply type settings
if types:
for n, t in types.items():
if n in design.columns.values:
design[n] = design[n].astype(t)
# Build the index
for lo in df.columns.values:
l = copy(lo)
for s in remove_prefix:
l = l.replace(s, '')
# Remove trailing/forward spaces
l = l.strip()
# Convert to numeric if possible
l = numeric(l)
# Attempt to match to the labels
try:
# Index
idx = design.loc[str(l)]
except:
if unmatched_columns:
unmatched_for_index.append(lo)
else:
# No match, fill with None
idx = tuple([None] * idx_levels)
indexes.append(idx)
else:
# We have a matched row, store it
idx = tuple(idx.values)
indexes.append(idx)
if axis == 0:
df.index = pd.MultiIndex.from_tuples(indexes, names=names)
else:
# If using unmatched for index, append
if unmatched_columns == 'index':
df = df.set_index(unmatched_for_index, append=True)
elif unmatched_columns == 'drop':
df = df.drop(unmatched_for_index, axis=1)
df.columns = pd.MultiIndex.from_tuples(indexes, names=names)
df = df.sort_index(axis=1)
return df | python | def build_index_from_design(df, design, remove_prefix=None, types=None, axis=1, auto_convert_numeric=True, unmatched_columns='index'):
"""
Build a MultiIndex from a design table.
Supply with a table with column headings for the new multiindex
and a index containing the labels to search for in the data.
:param df:
:param design:
:param remove:
:param types:
:param axis:
:param auto_convert_numeric:
:return:
"""
df = df.copy()
if 'Label' not in design.index.names:
design = design.set_index('Label')
if remove_prefix is None:
remove_prefix = []
if type(remove_prefix) is str:
remove_prefix=[remove_prefix]
unmatched_for_index = []
names = design.columns.values
idx_levels = len(names)
indexes = []
# Convert numeric only columns_to_combine; except index
if auto_convert_numeric:
design = design.apply(pd.to_numeric, errors="ignore")
# The match columns are always strings, so the index must also be
design.index = design.index.astype(str)
# Apply type settings
if types:
for n, t in types.items():
if n in design.columns.values:
design[n] = design[n].astype(t)
# Build the index
for lo in df.columns.values:
l = copy(lo)
for s in remove_prefix:
l = l.replace(s, '')
# Remove trailing/forward spaces
l = l.strip()
# Convert to numeric if possible
l = numeric(l)
# Attempt to match to the labels
try:
# Index
idx = design.loc[str(l)]
except:
if unmatched_columns:
unmatched_for_index.append(lo)
else:
# No match, fill with None
idx = tuple([None] * idx_levels)
indexes.append(idx)
else:
# We have a matched row, store it
idx = tuple(idx.values)
indexes.append(idx)
if axis == 0:
df.index = pd.MultiIndex.from_tuples(indexes, names=names)
else:
# If using unmatched for index, append
if unmatched_columns == 'index':
df = df.set_index(unmatched_for_index, append=True)
elif unmatched_columns == 'drop':
df = df.drop(unmatched_for_index, axis=1)
df.columns = pd.MultiIndex.from_tuples(indexes, names=names)
df = df.sort_index(axis=1)
return df | [
"def",
"build_index_from_design",
"(",
"df",
",",
"design",
",",
"remove_prefix",
"=",
"None",
",",
"types",
"=",
"None",
",",
"axis",
"=",
"1",
",",
"auto_convert_numeric",
"=",
"True",
",",
"unmatched_columns",
"=",
"'index'",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"if",
"'Label'",
"not",
"in",
"design",
".",
"index",
".",
"names",
":",
"design",
"=",
"design",
".",
"set_index",
"(",
"'Label'",
")",
"if",
"remove_prefix",
"is",
"None",
":",
"remove_prefix",
"=",
"[",
"]",
"if",
"type",
"(",
"remove_prefix",
")",
"is",
"str",
":",
"remove_prefix",
"=",
"[",
"remove_prefix",
"]",
"unmatched_for_index",
"=",
"[",
"]",
"names",
"=",
"design",
".",
"columns",
".",
"values",
"idx_levels",
"=",
"len",
"(",
"names",
")",
"indexes",
"=",
"[",
"]",
"# Convert numeric only columns_to_combine; except index",
"if",
"auto_convert_numeric",
":",
"design",
"=",
"design",
".",
"apply",
"(",
"pd",
".",
"to_numeric",
",",
"errors",
"=",
"\"ignore\"",
")",
"# The match columns are always strings, so the index must also be",
"design",
".",
"index",
"=",
"design",
".",
"index",
".",
"astype",
"(",
"str",
")",
"# Apply type settings",
"if",
"types",
":",
"for",
"n",
",",
"t",
"in",
"types",
".",
"items",
"(",
")",
":",
"if",
"n",
"in",
"design",
".",
"columns",
".",
"values",
":",
"design",
"[",
"n",
"]",
"=",
"design",
"[",
"n",
"]",
".",
"astype",
"(",
"t",
")",
"# Build the index",
"for",
"lo",
"in",
"df",
".",
"columns",
".",
"values",
":",
"l",
"=",
"copy",
"(",
"lo",
")",
"for",
"s",
"in",
"remove_prefix",
":",
"l",
"=",
"l",
".",
"replace",
"(",
"s",
",",
"''",
")",
"# Remove trailing/forward spaces",
"l",
"=",
"l",
".",
"strip",
"(",
")",
"# Convert to numeric if possible",
"l",
"=",
"numeric",
"(",
"l",
")",
"# Attempt to match to the labels",
"try",
":",
"# Index",
"idx",
"=",
"design",
".",
"loc",
"[",
"str",
"(",
"l",
")",
"]",
"except",
":",
"if",
"unmatched_columns",
":",
"unmatched_for_index",
".",
"append",
"(",
"lo",
")",
"else",
":",
"# No match, fill with None",
"idx",
"=",
"tuple",
"(",
"[",
"None",
"]",
"*",
"idx_levels",
")",
"indexes",
".",
"append",
"(",
"idx",
")",
"else",
":",
"# We have a matched row, store it",
"idx",
"=",
"tuple",
"(",
"idx",
".",
"values",
")",
"indexes",
".",
"append",
"(",
"idx",
")",
"if",
"axis",
"==",
"0",
":",
"df",
".",
"index",
"=",
"pd",
".",
"MultiIndex",
".",
"from_tuples",
"(",
"indexes",
",",
"names",
"=",
"names",
")",
"else",
":",
"# If using unmatched for index, append",
"if",
"unmatched_columns",
"==",
"'index'",
":",
"df",
"=",
"df",
".",
"set_index",
"(",
"unmatched_for_index",
",",
"append",
"=",
"True",
")",
"elif",
"unmatched_columns",
"==",
"'drop'",
":",
"df",
"=",
"df",
".",
"drop",
"(",
"unmatched_for_index",
",",
"axis",
"=",
"1",
")",
"df",
".",
"columns",
"=",
"pd",
".",
"MultiIndex",
".",
"from_tuples",
"(",
"indexes",
",",
"names",
"=",
"names",
")",
"df",
"=",
"df",
".",
"sort_index",
"(",
"axis",
"=",
"1",
")",
"return",
"df"
] | Build a MultiIndex from a design table.
Supply with a table with column headings for the new multiindex
and a index containing the labels to search for in the data.
:param df:
:param design:
:param remove:
:param types:
:param axis:
:param auto_convert_numeric:
:return: | [
"Build",
"a",
"MultiIndex",
"from",
"a",
"design",
"table",
"."
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L23-L111 |
6,002 | mfitzp/padua | padua/process.py | build_index_from_labels | def build_index_from_labels(df, indices, remove_prefix=None, types=None, axis=1):
"""
Build a MultiIndex from a list of labels and matching regex
Supply with a dictionary of Hierarchy levels and matching regex to
extract this level from the sample label
:param df:
:param indices: Tuples of indices ('label','regex') matches
:param strip: Strip these strings from labels before matching (e.g. headers)
:param axis=1: Axis (1 = columns, 0 = rows)
:return:
"""
df = df.copy()
if remove_prefix is None:
remove_prefix = []
if types is None:
types = {}
idx = [df.index, df.columns][axis]
indexes = []
for l in idx.get_level_values(0):
for s in remove_prefix:
l = l.replace(s+" ", '')
ixr = []
for n, m in indices:
m = re.search(m, l)
if m:
r = m.group(1)
if n in types:
# Map this value to a new type
r = types[n](r)
else:
r = None
ixr.append(r)
indexes.append( tuple(ixr) )
if axis == 0:
df.index = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices])
else:
df.columns = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices])
return df | python | def build_index_from_labels(df, indices, remove_prefix=None, types=None, axis=1):
"""
Build a MultiIndex from a list of labels and matching regex
Supply with a dictionary of Hierarchy levels and matching regex to
extract this level from the sample label
:param df:
:param indices: Tuples of indices ('label','regex') matches
:param strip: Strip these strings from labels before matching (e.g. headers)
:param axis=1: Axis (1 = columns, 0 = rows)
:return:
"""
df = df.copy()
if remove_prefix is None:
remove_prefix = []
if types is None:
types = {}
idx = [df.index, df.columns][axis]
indexes = []
for l in idx.get_level_values(0):
for s in remove_prefix:
l = l.replace(s+" ", '')
ixr = []
for n, m in indices:
m = re.search(m, l)
if m:
r = m.group(1)
if n in types:
# Map this value to a new type
r = types[n](r)
else:
r = None
ixr.append(r)
indexes.append( tuple(ixr) )
if axis == 0:
df.index = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices])
else:
df.columns = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices])
return df | [
"def",
"build_index_from_labels",
"(",
"df",
",",
"indices",
",",
"remove_prefix",
"=",
"None",
",",
"types",
"=",
"None",
",",
"axis",
"=",
"1",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"if",
"remove_prefix",
"is",
"None",
":",
"remove_prefix",
"=",
"[",
"]",
"if",
"types",
"is",
"None",
":",
"types",
"=",
"{",
"}",
"idx",
"=",
"[",
"df",
".",
"index",
",",
"df",
".",
"columns",
"]",
"[",
"axis",
"]",
"indexes",
"=",
"[",
"]",
"for",
"l",
"in",
"idx",
".",
"get_level_values",
"(",
"0",
")",
":",
"for",
"s",
"in",
"remove_prefix",
":",
"l",
"=",
"l",
".",
"replace",
"(",
"s",
"+",
"\" \"",
",",
"''",
")",
"ixr",
"=",
"[",
"]",
"for",
"n",
",",
"m",
"in",
"indices",
":",
"m",
"=",
"re",
".",
"search",
"(",
"m",
",",
"l",
")",
"if",
"m",
":",
"r",
"=",
"m",
".",
"group",
"(",
"1",
")",
"if",
"n",
"in",
"types",
":",
"# Map this value to a new type",
"r",
"=",
"types",
"[",
"n",
"]",
"(",
"r",
")",
"else",
":",
"r",
"=",
"None",
"ixr",
".",
"append",
"(",
"r",
")",
"indexes",
".",
"append",
"(",
"tuple",
"(",
"ixr",
")",
")",
"if",
"axis",
"==",
"0",
":",
"df",
".",
"index",
"=",
"pd",
".",
"MultiIndex",
".",
"from_tuples",
"(",
"indexes",
",",
"names",
"=",
"[",
"n",
"for",
"n",
",",
"_",
"in",
"indices",
"]",
")",
"else",
":",
"df",
".",
"columns",
"=",
"pd",
".",
"MultiIndex",
".",
"from_tuples",
"(",
"indexes",
",",
"names",
"=",
"[",
"n",
"for",
"n",
",",
"_",
"in",
"indices",
"]",
")",
"return",
"df"
] | Build a MultiIndex from a list of labels and matching regex
Supply with a dictionary of Hierarchy levels and matching regex to
extract this level from the sample label
:param df:
:param indices: Tuples of indices ('label','regex') matches
:param strip: Strip these strings from labels before matching (e.g. headers)
:param axis=1: Axis (1 = columns, 0 = rows)
:return: | [
"Build",
"a",
"MultiIndex",
"from",
"a",
"list",
"of",
"labels",
"and",
"matching",
"regex"
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L114-L165 |
6,003 | mfitzp/padua | padua/process.py | combine_expression_columns | def combine_expression_columns(df, columns_to_combine, remove_combined=True):
"""
Combine expression columns, calculating the mean for 2 columns
:param df: Pandas dataframe
:param columns_to_combine: A list of tuples containing the column names to combine
:return:
"""
df = df.copy()
for ca, cb in columns_to_combine:
df["%s_(x+y)/2_%s" % (ca, cb)] = (df[ca] + df[cb]) / 2
if remove_combined:
for ca, cb in columns_to_combine:
df.drop([ca, cb], inplace=True, axis=1)
return df | python | def combine_expression_columns(df, columns_to_combine, remove_combined=True):
"""
Combine expression columns, calculating the mean for 2 columns
:param df: Pandas dataframe
:param columns_to_combine: A list of tuples containing the column names to combine
:return:
"""
df = df.copy()
for ca, cb in columns_to_combine:
df["%s_(x+y)/2_%s" % (ca, cb)] = (df[ca] + df[cb]) / 2
if remove_combined:
for ca, cb in columns_to_combine:
df.drop([ca, cb], inplace=True, axis=1)
return df | [
"def",
"combine_expression_columns",
"(",
"df",
",",
"columns_to_combine",
",",
"remove_combined",
"=",
"True",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"for",
"ca",
",",
"cb",
"in",
"columns_to_combine",
":",
"df",
"[",
"\"%s_(x+y)/2_%s\"",
"%",
"(",
"ca",
",",
"cb",
")",
"]",
"=",
"(",
"df",
"[",
"ca",
"]",
"+",
"df",
"[",
"cb",
"]",
")",
"/",
"2",
"if",
"remove_combined",
":",
"for",
"ca",
",",
"cb",
"in",
"columns_to_combine",
":",
"df",
".",
"drop",
"(",
"[",
"ca",
",",
"cb",
"]",
",",
"inplace",
"=",
"True",
",",
"axis",
"=",
"1",
")",
"return",
"df"
] | Combine expression columns, calculating the mean for 2 columns
:param df: Pandas dataframe
:param columns_to_combine: A list of tuples containing the column names to combine
:return: | [
"Combine",
"expression",
"columns",
"calculating",
"the",
"mean",
"for",
"2",
"columns"
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L198-L218 |
6,004 | mfitzp/padua | padua/process.py | expand_side_table | def expand_side_table(df):
"""
Perform equivalent of 'expand side table' in Perseus by folding
Multiplicity columns down onto duplicate rows
The id is remapped to UID___Multiplicity, which
is different to Perseus behaviour, but prevents accidental of
non-matching rows from occurring later in analysis.
:param df:
:return:
"""
df = df.copy()
idx = df.index.names
df.reset_index(inplace=True)
def strip_multiplicity(df):
df.columns = [c[:-4] for c in df.columns]
return df
def strip_multiple(s):
for sr in ['___1','___2','___3']:
if s.endswith(sr):
s = s[:-4]
return s
base = df.filter(regex='.*(?<!___\d)$')
# Remove columns that will match ripped multiplicity columns
for c in df.columns.values:
if strip_multiple(c) != c and strip_multiple(c) in list(base.columns.values):
base.drop(strip_multiple(c), axis=1, inplace=True)
multi1 = df.filter(regex='^.*___1$')
multi1 = strip_multiplicity(multi1)
multi1['Multiplicity'] = '___1'
multi1 = pd.concat([multi1, base], axis=1)
multi2 = df.filter(regex='^.*___2$')
multi2 = strip_multiplicity(multi2)
multi2['Multiplicity'] = '___2'
multi2 = pd.concat([multi2, base], axis=1)
multi3 = df.filter(regex='^.*___3$')
multi3 = strip_multiplicity(multi3)
multi3['Multiplicity'] = '___3'
multi3 = pd.concat([multi3, base], axis=1)
df = pd.concat([multi1, multi2, multi3], axis=0)
df['id'] = ["%s%s" % (a, b) for a, b in zip(df['id'], df['Multiplicity'])]
if idx[0] is not None:
df.set_index(idx, inplace=True)
return df | python | def expand_side_table(df):
"""
Perform equivalent of 'expand side table' in Perseus by folding
Multiplicity columns down onto duplicate rows
The id is remapped to UID___Multiplicity, which
is different to Perseus behaviour, but prevents accidental of
non-matching rows from occurring later in analysis.
:param df:
:return:
"""
df = df.copy()
idx = df.index.names
df.reset_index(inplace=True)
def strip_multiplicity(df):
df.columns = [c[:-4] for c in df.columns]
return df
def strip_multiple(s):
for sr in ['___1','___2','___3']:
if s.endswith(sr):
s = s[:-4]
return s
base = df.filter(regex='.*(?<!___\d)$')
# Remove columns that will match ripped multiplicity columns
for c in df.columns.values:
if strip_multiple(c) != c and strip_multiple(c) in list(base.columns.values):
base.drop(strip_multiple(c), axis=1, inplace=True)
multi1 = df.filter(regex='^.*___1$')
multi1 = strip_multiplicity(multi1)
multi1['Multiplicity'] = '___1'
multi1 = pd.concat([multi1, base], axis=1)
multi2 = df.filter(regex='^.*___2$')
multi2 = strip_multiplicity(multi2)
multi2['Multiplicity'] = '___2'
multi2 = pd.concat([multi2, base], axis=1)
multi3 = df.filter(regex='^.*___3$')
multi3 = strip_multiplicity(multi3)
multi3['Multiplicity'] = '___3'
multi3 = pd.concat([multi3, base], axis=1)
df = pd.concat([multi1, multi2, multi3], axis=0)
df['id'] = ["%s%s" % (a, b) for a, b in zip(df['id'], df['Multiplicity'])]
if idx[0] is not None:
df.set_index(idx, inplace=True)
return df | [
"def",
"expand_side_table",
"(",
"df",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"idx",
"=",
"df",
".",
"index",
".",
"names",
"df",
".",
"reset_index",
"(",
"inplace",
"=",
"True",
")",
"def",
"strip_multiplicity",
"(",
"df",
")",
":",
"df",
".",
"columns",
"=",
"[",
"c",
"[",
":",
"-",
"4",
"]",
"for",
"c",
"in",
"df",
".",
"columns",
"]",
"return",
"df",
"def",
"strip_multiple",
"(",
"s",
")",
":",
"for",
"sr",
"in",
"[",
"'___1'",
",",
"'___2'",
",",
"'___3'",
"]",
":",
"if",
"s",
".",
"endswith",
"(",
"sr",
")",
":",
"s",
"=",
"s",
"[",
":",
"-",
"4",
"]",
"return",
"s",
"base",
"=",
"df",
".",
"filter",
"(",
"regex",
"=",
"'.*(?<!___\\d)$'",
")",
"# Remove columns that will match ripped multiplicity columns",
"for",
"c",
"in",
"df",
".",
"columns",
".",
"values",
":",
"if",
"strip_multiple",
"(",
"c",
")",
"!=",
"c",
"and",
"strip_multiple",
"(",
"c",
")",
"in",
"list",
"(",
"base",
".",
"columns",
".",
"values",
")",
":",
"base",
".",
"drop",
"(",
"strip_multiple",
"(",
"c",
")",
",",
"axis",
"=",
"1",
",",
"inplace",
"=",
"True",
")",
"multi1",
"=",
"df",
".",
"filter",
"(",
"regex",
"=",
"'^.*___1$'",
")",
"multi1",
"=",
"strip_multiplicity",
"(",
"multi1",
")",
"multi1",
"[",
"'Multiplicity'",
"]",
"=",
"'___1'",
"multi1",
"=",
"pd",
".",
"concat",
"(",
"[",
"multi1",
",",
"base",
"]",
",",
"axis",
"=",
"1",
")",
"multi2",
"=",
"df",
".",
"filter",
"(",
"regex",
"=",
"'^.*___2$'",
")",
"multi2",
"=",
"strip_multiplicity",
"(",
"multi2",
")",
"multi2",
"[",
"'Multiplicity'",
"]",
"=",
"'___2'",
"multi2",
"=",
"pd",
".",
"concat",
"(",
"[",
"multi2",
",",
"base",
"]",
",",
"axis",
"=",
"1",
")",
"multi3",
"=",
"df",
".",
"filter",
"(",
"regex",
"=",
"'^.*___3$'",
")",
"multi3",
"=",
"strip_multiplicity",
"(",
"multi3",
")",
"multi3",
"[",
"'Multiplicity'",
"]",
"=",
"'___3'",
"multi3",
"=",
"pd",
".",
"concat",
"(",
"[",
"multi3",
",",
"base",
"]",
",",
"axis",
"=",
"1",
")",
"df",
"=",
"pd",
".",
"concat",
"(",
"[",
"multi1",
",",
"multi2",
",",
"multi3",
"]",
",",
"axis",
"=",
"0",
")",
"df",
"[",
"'id'",
"]",
"=",
"[",
"\"%s%s\"",
"%",
"(",
"a",
",",
"b",
")",
"for",
"a",
",",
"b",
"in",
"zip",
"(",
"df",
"[",
"'id'",
"]",
",",
"df",
"[",
"'Multiplicity'",
"]",
")",
"]",
"if",
"idx",
"[",
"0",
"]",
"is",
"not",
"None",
":",
"df",
".",
"set_index",
"(",
"idx",
",",
"inplace",
"=",
"True",
")",
"return",
"df"
] | Perform equivalent of 'expand side table' in Perseus by folding
Multiplicity columns down onto duplicate rows
The id is remapped to UID___Multiplicity, which
is different to Perseus behaviour, but prevents accidental of
non-matching rows from occurring later in analysis.
:param df:
:return: | [
"Perform",
"equivalent",
"of",
"expand",
"side",
"table",
"in",
"Perseus",
"by",
"folding",
"Multiplicity",
"columns",
"down",
"onto",
"duplicate",
"rows"
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L221-L277 |
6,005 | mfitzp/padua | padua/process.py | apply_experimental_design | def apply_experimental_design(df, f, prefix='Intensity '):
"""
Load the experimental design template from MaxQuant and use it to apply the label names to the data columns.
:param df:
:param f: File path for the experimental design template
:param prefix:
:return: dt
"""
df = df.copy()
edt = pd.read_csv(f, sep='\t', header=0)
edt.set_index('Experiment', inplace=True)
new_column_labels = []
for l in df.columns.values:
try:
l = edt.loc[l.replace(prefix, '')]['Name']
except (IndexError, KeyError):
pass
new_column_labels.append(l)
df.columns = new_column_labels
return df | python | def apply_experimental_design(df, f, prefix='Intensity '):
"""
Load the experimental design template from MaxQuant and use it to apply the label names to the data columns.
:param df:
:param f: File path for the experimental design template
:param prefix:
:return: dt
"""
df = df.copy()
edt = pd.read_csv(f, sep='\t', header=0)
edt.set_index('Experiment', inplace=True)
new_column_labels = []
for l in df.columns.values:
try:
l = edt.loc[l.replace(prefix, '')]['Name']
except (IndexError, KeyError):
pass
new_column_labels.append(l)
df.columns = new_column_labels
return df | [
"def",
"apply_experimental_design",
"(",
"df",
",",
"f",
",",
"prefix",
"=",
"'Intensity '",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"edt",
"=",
"pd",
".",
"read_csv",
"(",
"f",
",",
"sep",
"=",
"'\\t'",
",",
"header",
"=",
"0",
")",
"edt",
".",
"set_index",
"(",
"'Experiment'",
",",
"inplace",
"=",
"True",
")",
"new_column_labels",
"=",
"[",
"]",
"for",
"l",
"in",
"df",
".",
"columns",
".",
"values",
":",
"try",
":",
"l",
"=",
"edt",
".",
"loc",
"[",
"l",
".",
"replace",
"(",
"prefix",
",",
"''",
")",
"]",
"[",
"'Name'",
"]",
"except",
"(",
"IndexError",
",",
"KeyError",
")",
":",
"pass",
"new_column_labels",
".",
"append",
"(",
"l",
")",
"df",
".",
"columns",
"=",
"new_column_labels",
"return",
"df"
] | Load the experimental design template from MaxQuant and use it to apply the label names to the data columns.
:param df:
:param f: File path for the experimental design template
:param prefix:
:return: dt | [
"Load",
"the",
"experimental",
"design",
"template",
"from",
"MaxQuant",
"and",
"use",
"it",
"to",
"apply",
"the",
"label",
"names",
"to",
"the",
"data",
"columns",
"."
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L280-L306 |
6,006 | mfitzp/padua | padua/process.py | transform_expression_columns | def transform_expression_columns(df, fn=np.log2, prefix='Intensity '):
"""
Apply transformation to expression columns.
Default is log2 transform to expression columns beginning with Intensity
:param df:
:param prefix: The column prefix for expression columns
:return:
"""
df = df.copy()
mask = np.array([l.startswith(prefix) for l in df.columns.values])
df.iloc[:, mask] = fn(df.iloc[:, mask])
df.replace([np.inf, -np.inf], np.nan, inplace=True)
return df | python | def transform_expression_columns(df, fn=np.log2, prefix='Intensity '):
"""
Apply transformation to expression columns.
Default is log2 transform to expression columns beginning with Intensity
:param df:
:param prefix: The column prefix for expression columns
:return:
"""
df = df.copy()
mask = np.array([l.startswith(prefix) for l in df.columns.values])
df.iloc[:, mask] = fn(df.iloc[:, mask])
df.replace([np.inf, -np.inf], np.nan, inplace=True)
return df | [
"def",
"transform_expression_columns",
"(",
"df",
",",
"fn",
"=",
"np",
".",
"log2",
",",
"prefix",
"=",
"'Intensity '",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"mask",
"=",
"np",
".",
"array",
"(",
"[",
"l",
".",
"startswith",
"(",
"prefix",
")",
"for",
"l",
"in",
"df",
".",
"columns",
".",
"values",
"]",
")",
"df",
".",
"iloc",
"[",
":",
",",
"mask",
"]",
"=",
"fn",
"(",
"df",
".",
"iloc",
"[",
":",
",",
"mask",
"]",
")",
"df",
".",
"replace",
"(",
"[",
"np",
".",
"inf",
",",
"-",
"np",
".",
"inf",
"]",
",",
"np",
".",
"nan",
",",
"inplace",
"=",
"True",
")",
"return",
"df"
] | Apply transformation to expression columns.
Default is log2 transform to expression columns beginning with Intensity
:param df:
:param prefix: The column prefix for expression columns
:return: | [
"Apply",
"transformation",
"to",
"expression",
"columns",
"."
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L309-L327 |
6,007 | mfitzp/padua | padua/process.py | fold_columns_to_rows | def fold_columns_to_rows(df, levels_from=2):
"""
Take a levels from the columns and fold down into the row index.
This destroys the existing index; existing rows will appear as
columns under the new column index
:param df:
:param levels_from: The level (inclusive) from which column index will be folded
:return:
"""
df = df.copy()
df.reset_index(inplace=True, drop=True) # Wipe out the current index
df = df.T
# Build all index combinations
a = [list( set( df.index.get_level_values(i) ) ) for i in range(0, levels_from)]
combinations = list(itertools.product(*a))
names = df.index.names[:levels_from]
concats = []
for c in combinations:
try:
dfcc = df.loc[c]
except KeyError:
continue
else:
# Silly pandas
if len(dfcc.shape) == 1:
continue
dfcc.columns = pd.MultiIndex.from_tuples([c]*dfcc.shape[1], names=names)
concats.append(dfcc)
# Concatenate
dfc = pd.concat(concats, axis=1)
dfc.sort_index(axis=1, inplace=True)
# Fix name if collapsed
if dfc.index.name is None:
dfc.index.name = df.index.names[-1]
return dfc | python | def fold_columns_to_rows(df, levels_from=2):
"""
Take a levels from the columns and fold down into the row index.
This destroys the existing index; existing rows will appear as
columns under the new column index
:param df:
:param levels_from: The level (inclusive) from which column index will be folded
:return:
"""
df = df.copy()
df.reset_index(inplace=True, drop=True) # Wipe out the current index
df = df.T
# Build all index combinations
a = [list( set( df.index.get_level_values(i) ) ) for i in range(0, levels_from)]
combinations = list(itertools.product(*a))
names = df.index.names[:levels_from]
concats = []
for c in combinations:
try:
dfcc = df.loc[c]
except KeyError:
continue
else:
# Silly pandas
if len(dfcc.shape) == 1:
continue
dfcc.columns = pd.MultiIndex.from_tuples([c]*dfcc.shape[1], names=names)
concats.append(dfcc)
# Concatenate
dfc = pd.concat(concats, axis=1)
dfc.sort_index(axis=1, inplace=True)
# Fix name if collapsed
if dfc.index.name is None:
dfc.index.name = df.index.names[-1]
return dfc | [
"def",
"fold_columns_to_rows",
"(",
"df",
",",
"levels_from",
"=",
"2",
")",
":",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"df",
".",
"reset_index",
"(",
"inplace",
"=",
"True",
",",
"drop",
"=",
"True",
")",
"# Wipe out the current index",
"df",
"=",
"df",
".",
"T",
"# Build all index combinations",
"a",
"=",
"[",
"list",
"(",
"set",
"(",
"df",
".",
"index",
".",
"get_level_values",
"(",
"i",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"levels_from",
")",
"]",
"combinations",
"=",
"list",
"(",
"itertools",
".",
"product",
"(",
"*",
"a",
")",
")",
"names",
"=",
"df",
".",
"index",
".",
"names",
"[",
":",
"levels_from",
"]",
"concats",
"=",
"[",
"]",
"for",
"c",
"in",
"combinations",
":",
"try",
":",
"dfcc",
"=",
"df",
".",
"loc",
"[",
"c",
"]",
"except",
"KeyError",
":",
"continue",
"else",
":",
"# Silly pandas",
"if",
"len",
"(",
"dfcc",
".",
"shape",
")",
"==",
"1",
":",
"continue",
"dfcc",
".",
"columns",
"=",
"pd",
".",
"MultiIndex",
".",
"from_tuples",
"(",
"[",
"c",
"]",
"*",
"dfcc",
".",
"shape",
"[",
"1",
"]",
",",
"names",
"=",
"names",
")",
"concats",
".",
"append",
"(",
"dfcc",
")",
"# Concatenate",
"dfc",
"=",
"pd",
".",
"concat",
"(",
"concats",
",",
"axis",
"=",
"1",
")",
"dfc",
".",
"sort_index",
"(",
"axis",
"=",
"1",
",",
"inplace",
"=",
"True",
")",
"# Fix name if collapsed",
"if",
"dfc",
".",
"index",
".",
"name",
"is",
"None",
":",
"dfc",
".",
"index",
".",
"name",
"=",
"df",
".",
"index",
".",
"names",
"[",
"-",
"1",
"]",
"return",
"dfc"
] | Take a levels from the columns and fold down into the row index.
This destroys the existing index; existing rows will appear as
columns under the new column index
:param df:
:param levels_from: The level (inclusive) from which column index will be folded
:return: | [
"Take",
"a",
"levels",
"from",
"the",
"columns",
"and",
"fold",
"down",
"into",
"the",
"row",
"index",
".",
"This",
"destroys",
"the",
"existing",
"index",
";",
"existing",
"rows",
"will",
"appear",
"as",
"columns",
"under",
"the",
"new",
"column",
"index"
] | 8b14bf4d2f895da6aea5d7885d409315bd303ec6 | https://github.com/mfitzp/padua/blob/8b14bf4d2f895da6aea5d7885d409315bd303ec6/padua/process.py#L330-L377 |
6,008 | ECRL/ecabc | ecabc/abc.py | ABC.args | def args(self, args):
'''Set additional arguments to be passed to the fitness function
Args:
args (dict): additional arguments
'''
self._args = args
self._logger.log('debug', 'Args set to {}'.format(args)) | python | def args(self, args):
'''Set additional arguments to be passed to the fitness function
Args:
args (dict): additional arguments
'''
self._args = args
self._logger.log('debug', 'Args set to {}'.format(args)) | [
"def",
"args",
"(",
"self",
",",
"args",
")",
":",
"self",
".",
"_args",
"=",
"args",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Args set to {}'",
".",
"format",
"(",
"args",
")",
")"
] | Set additional arguments to be passed to the fitness function
Args:
args (dict): additional arguments | [
"Set",
"additional",
"arguments",
"to",
"be",
"passed",
"to",
"the",
"fitness",
"function"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L147-L154 |
6,009 | ECRL/ecabc | ecabc/abc.py | ABC.minimize | def minimize(self, minimize):
'''Configures the ABC to minimize fitness function return value or
derived score
Args:
minimize (bool): if True, minimizes fitness function return value;
if False, minimizes derived score
'''
self._minimize = minimize
self._logger.log('debug', 'Minimize set to {}'.format(minimize)) | python | def minimize(self, minimize):
'''Configures the ABC to minimize fitness function return value or
derived score
Args:
minimize (bool): if True, minimizes fitness function return value;
if False, minimizes derived score
'''
self._minimize = minimize
self._logger.log('debug', 'Minimize set to {}'.format(minimize)) | [
"def",
"minimize",
"(",
"self",
",",
"minimize",
")",
":",
"self",
".",
"_minimize",
"=",
"minimize",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Minimize set to {}'",
".",
"format",
"(",
"minimize",
")",
")"
] | Configures the ABC to minimize fitness function return value or
derived score
Args:
minimize (bool): if True, minimizes fitness function return value;
if False, minimizes derived score | [
"Configures",
"the",
"ABC",
"to",
"minimize",
"fitness",
"function",
"return",
"value",
"or",
"derived",
"score"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L165-L175 |
6,010 | ECRL/ecabc | ecabc/abc.py | ABC.num_employers | def num_employers(self, num_employers):
'''Sets the number of employer bees; at least two are required
Args:
num_employers (int): number of employer bees
'''
if num_employers < 2:
self._logger.log(
'warn',
'Two employers are needed: setting to two'
)
num_employers = 2
self._num_employers = num_employers
self._logger.log('debug', 'Number of employers set to {}'.format(
num_employers
))
self._limit = num_employers * len(self._value_ranges)
self._logger.log('debug', 'Limit set to {}'.format(self._limit)) | python | def num_employers(self, num_employers):
'''Sets the number of employer bees; at least two are required
Args:
num_employers (int): number of employer bees
'''
if num_employers < 2:
self._logger.log(
'warn',
'Two employers are needed: setting to two'
)
num_employers = 2
self._num_employers = num_employers
self._logger.log('debug', 'Number of employers set to {}'.format(
num_employers
))
self._limit = num_employers * len(self._value_ranges)
self._logger.log('debug', 'Limit set to {}'.format(self._limit)) | [
"def",
"num_employers",
"(",
"self",
",",
"num_employers",
")",
":",
"if",
"num_employers",
"<",
"2",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'warn'",
",",
"'Two employers are needed: setting to two'",
")",
"num_employers",
"=",
"2",
"self",
".",
"_num_employers",
"=",
"num_employers",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Number of employers set to {}'",
".",
"format",
"(",
"num_employers",
")",
")",
"self",
".",
"_limit",
"=",
"num_employers",
"*",
"len",
"(",
"self",
".",
"_value_ranges",
")",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Limit set to {}'",
".",
"format",
"(",
"self",
".",
"_limit",
")",
")"
] | Sets the number of employer bees; at least two are required
Args:
num_employers (int): number of employer bees | [
"Sets",
"the",
"number",
"of",
"employer",
"bees",
";",
"at",
"least",
"two",
"are",
"required"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L184-L202 |
6,011 | ECRL/ecabc | ecabc/abc.py | ABC.processes | def processes(self, processes):
'''Set the number of concurrent processes the ABC will utilize for
fitness function evaluation; if <= 1, single process is used
Args:
processes (int): number of concurrent processes
'''
if self._processes > 1:
self._pool.close()
self._pool.join()
self._pool = multiprocessing.Pool(processes)
else:
self._pool = None
self._logger.log('debug', 'Number of processes set to {}'.format(
processes
)) | python | def processes(self, processes):
'''Set the number of concurrent processes the ABC will utilize for
fitness function evaluation; if <= 1, single process is used
Args:
processes (int): number of concurrent processes
'''
if self._processes > 1:
self._pool.close()
self._pool.join()
self._pool = multiprocessing.Pool(processes)
else:
self._pool = None
self._logger.log('debug', 'Number of processes set to {}'.format(
processes
)) | [
"def",
"processes",
"(",
"self",
",",
"processes",
")",
":",
"if",
"self",
".",
"_processes",
">",
"1",
":",
"self",
".",
"_pool",
".",
"close",
"(",
")",
"self",
".",
"_pool",
".",
"join",
"(",
")",
"self",
".",
"_pool",
"=",
"multiprocessing",
".",
"Pool",
"(",
"processes",
")",
"else",
":",
"self",
".",
"_pool",
"=",
"None",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Number of processes set to {}'",
".",
"format",
"(",
"processes",
")",
")"
] | Set the number of concurrent processes the ABC will utilize for
fitness function evaluation; if <= 1, single process is used
Args:
processes (int): number of concurrent processes | [
"Set",
"the",
"number",
"of",
"concurrent",
"processes",
"the",
"ABC",
"will",
"utilize",
"for",
"fitness",
"function",
"evaluation",
";",
"if",
"<",
"=",
"1",
"single",
"process",
"is",
"used"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L268-L284 |
6,012 | ECRL/ecabc | ecabc/abc.py | ABC.infer_process_count | def infer_process_count(self):
'''Infers the number of CPU cores in the current system, sets the
number of concurrent processes accordingly
'''
try:
self.processes = multiprocessing.cpu_count()
except NotImplementedError:
self._logger.log(
'error',
'Could infer CPU count, setting number of processes back to 4'
)
self.processes = 4 | python | def infer_process_count(self):
'''Infers the number of CPU cores in the current system, sets the
number of concurrent processes accordingly
'''
try:
self.processes = multiprocessing.cpu_count()
except NotImplementedError:
self._logger.log(
'error',
'Could infer CPU count, setting number of processes back to 4'
)
self.processes = 4 | [
"def",
"infer_process_count",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"processes",
"=",
"multiprocessing",
".",
"cpu_count",
"(",
")",
"except",
"NotImplementedError",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'error'",
",",
"'Could infer CPU count, setting number of processes back to 4'",
")",
"self",
".",
"processes",
"=",
"4"
] | Infers the number of CPU cores in the current system, sets the
number of concurrent processes accordingly | [
"Infers",
"the",
"number",
"of",
"CPU",
"cores",
"in",
"the",
"current",
"system",
"sets",
"the",
"number",
"of",
"concurrent",
"processes",
"accordingly"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L286-L298 |
6,013 | ECRL/ecabc | ecabc/abc.py | ABC.create_employers | def create_employers(self):
'''Generate employer bees. This should be called directly after the
ABC is initialized.
'''
self.__verify_ready(True)
employers = []
for i in range(self._num_employers):
employer = EmployerBee(self.__gen_random_values())
if self._processes <= 1:
employer.error = self._fitness_fxn(
employer.values, **self._args
)
employer.score = employer.get_score()
if np.isnan(employer.score):
self._logger.log('warn', 'NaN bee score: {}, {}'.format(
employer.id, employer.score
))
self._logger.log('debug', 'Bee number {} created'.format(
i + 1
))
self.__update(employer.score, employer.values, employer.error)
else:
employer.error = self._pool.apply_async(
self._fitness_fxn,
[employer.values],
self._args
)
employers.append(employer)
self._employers.append(employer)
for idx, employer in enumerate(employers):
try:
employer.error = employer.error.get()
employer.score = employer.get_score()
if np.isnan(employer.score):
self._logger.log('warn', 'NaN bee score: {}, {}'.format(
employer.id, employer.score
))
self._logger.log('debug', 'Bee number {} created'.format(
i + 1
))
self.__update(employer.score, employer.values, employer.error)
except Exception as e:
raise e
self._logger.log('debug', 'Employer creation complete') | python | def create_employers(self):
'''Generate employer bees. This should be called directly after the
ABC is initialized.
'''
self.__verify_ready(True)
employers = []
for i in range(self._num_employers):
employer = EmployerBee(self.__gen_random_values())
if self._processes <= 1:
employer.error = self._fitness_fxn(
employer.values, **self._args
)
employer.score = employer.get_score()
if np.isnan(employer.score):
self._logger.log('warn', 'NaN bee score: {}, {}'.format(
employer.id, employer.score
))
self._logger.log('debug', 'Bee number {} created'.format(
i + 1
))
self.__update(employer.score, employer.values, employer.error)
else:
employer.error = self._pool.apply_async(
self._fitness_fxn,
[employer.values],
self._args
)
employers.append(employer)
self._employers.append(employer)
for idx, employer in enumerate(employers):
try:
employer.error = employer.error.get()
employer.score = employer.get_score()
if np.isnan(employer.score):
self._logger.log('warn', 'NaN bee score: {}, {}'.format(
employer.id, employer.score
))
self._logger.log('debug', 'Bee number {} created'.format(
i + 1
))
self.__update(employer.score, employer.values, employer.error)
except Exception as e:
raise e
self._logger.log('debug', 'Employer creation complete') | [
"def",
"create_employers",
"(",
"self",
")",
":",
"self",
".",
"__verify_ready",
"(",
"True",
")",
"employers",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_num_employers",
")",
":",
"employer",
"=",
"EmployerBee",
"(",
"self",
".",
"__gen_random_values",
"(",
")",
")",
"if",
"self",
".",
"_processes",
"<=",
"1",
":",
"employer",
".",
"error",
"=",
"self",
".",
"_fitness_fxn",
"(",
"employer",
".",
"values",
",",
"*",
"*",
"self",
".",
"_args",
")",
"employer",
".",
"score",
"=",
"employer",
".",
"get_score",
"(",
")",
"if",
"np",
".",
"isnan",
"(",
"employer",
".",
"score",
")",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'warn'",
",",
"'NaN bee score: {}, {}'",
".",
"format",
"(",
"employer",
".",
"id",
",",
"employer",
".",
"score",
")",
")",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Bee number {} created'",
".",
"format",
"(",
"i",
"+",
"1",
")",
")",
"self",
".",
"__update",
"(",
"employer",
".",
"score",
",",
"employer",
".",
"values",
",",
"employer",
".",
"error",
")",
"else",
":",
"employer",
".",
"error",
"=",
"self",
".",
"_pool",
".",
"apply_async",
"(",
"self",
".",
"_fitness_fxn",
",",
"[",
"employer",
".",
"values",
"]",
",",
"self",
".",
"_args",
")",
"employers",
".",
"append",
"(",
"employer",
")",
"self",
".",
"_employers",
".",
"append",
"(",
"employer",
")",
"for",
"idx",
",",
"employer",
"in",
"enumerate",
"(",
"employers",
")",
":",
"try",
":",
"employer",
".",
"error",
"=",
"employer",
".",
"error",
".",
"get",
"(",
")",
"employer",
".",
"score",
"=",
"employer",
".",
"get_score",
"(",
")",
"if",
"np",
".",
"isnan",
"(",
"employer",
".",
"score",
")",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'warn'",
",",
"'NaN bee score: {}, {}'",
".",
"format",
"(",
"employer",
".",
"id",
",",
"employer",
".",
"score",
")",
")",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Bee number {} created'",
".",
"format",
"(",
"i",
"+",
"1",
")",
")",
"self",
".",
"__update",
"(",
"employer",
".",
"score",
",",
"employer",
".",
"values",
",",
"employer",
".",
"error",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"e",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Employer creation complete'",
")"
] | Generate employer bees. This should be called directly after the
ABC is initialized. | [
"Generate",
"employer",
"bees",
".",
"This",
"should",
"be",
"called",
"directly",
"after",
"the",
"ABC",
"is",
"initialized",
"."
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L300-L344 |
6,014 | ECRL/ecabc | ecabc/abc.py | ABC.run_iteration | def run_iteration(self):
'''Runs a single iteration of the ABC; employer phase -> probability
calculation -> onlooker phase -> check positions
'''
self._employer_phase()
self._calc_probability()
self._onlooker_phase()
self._check_positions() | python | def run_iteration(self):
'''Runs a single iteration of the ABC; employer phase -> probability
calculation -> onlooker phase -> check positions
'''
self._employer_phase()
self._calc_probability()
self._onlooker_phase()
self._check_positions() | [
"def",
"run_iteration",
"(",
"self",
")",
":",
"self",
".",
"_employer_phase",
"(",
")",
"self",
".",
"_calc_probability",
"(",
")",
"self",
".",
"_onlooker_phase",
"(",
")",
"self",
".",
"_check_positions",
"(",
")"
] | Runs a single iteration of the ABC; employer phase -> probability
calculation -> onlooker phase -> check positions | [
"Runs",
"a",
"single",
"iteration",
"of",
"the",
"ABC",
";",
"employer",
"phase",
"-",
">",
"probability",
"calculation",
"-",
">",
"onlooker",
"phase",
"-",
">",
"check",
"positions"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L346-L354 |
6,015 | ECRL/ecabc | ecabc/abc.py | ABC._calc_probability | def _calc_probability(self):
'''Determines the probability that each bee will be chosen during the
onlooker phase; also determines if a new best-performing bee is found
'''
self._logger.log('debug', 'Calculating bee probabilities')
self.__verify_ready()
self._total_score = 0
for employer in self._employers:
self._total_score += employer.score
if self.__update(employer.score, employer.values, employer.error):
self._logger.log(
'info',
'Update to best performer -'
' error: {} | score: {} | values: {}'.format(
employer.error,
employer.score,
employer.values
)
)
for employer in self._employers:
employer.calculate_probability(self._total_score) | python | def _calc_probability(self):
'''Determines the probability that each bee will be chosen during the
onlooker phase; also determines if a new best-performing bee is found
'''
self._logger.log('debug', 'Calculating bee probabilities')
self.__verify_ready()
self._total_score = 0
for employer in self._employers:
self._total_score += employer.score
if self.__update(employer.score, employer.values, employer.error):
self._logger.log(
'info',
'Update to best performer -'
' error: {} | score: {} | values: {}'.format(
employer.error,
employer.score,
employer.values
)
)
for employer in self._employers:
employer.calculate_probability(self._total_score) | [
"def",
"_calc_probability",
"(",
"self",
")",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Calculating bee probabilities'",
")",
"self",
".",
"__verify_ready",
"(",
")",
"self",
".",
"_total_score",
"=",
"0",
"for",
"employer",
"in",
"self",
".",
"_employers",
":",
"self",
".",
"_total_score",
"+=",
"employer",
".",
"score",
"if",
"self",
".",
"__update",
"(",
"employer",
".",
"score",
",",
"employer",
".",
"values",
",",
"employer",
".",
"error",
")",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'info'",
",",
"'Update to best performer -'",
"' error: {} | score: {} | values: {}'",
".",
"format",
"(",
"employer",
".",
"error",
",",
"employer",
".",
"score",
",",
"employer",
".",
"values",
")",
")",
"for",
"employer",
"in",
"self",
".",
"_employers",
":",
"employer",
".",
"calculate_probability",
"(",
"self",
".",
"_total_score",
")"
] | Determines the probability that each bee will be chosen during the
onlooker phase; also determines if a new best-performing bee is found | [
"Determines",
"the",
"probability",
"that",
"each",
"bee",
"will",
"be",
"chosen",
"during",
"the",
"onlooker",
"phase",
";",
"also",
"determines",
"if",
"a",
"new",
"best",
"-",
"performing",
"bee",
"is",
"found"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L377-L398 |
6,016 | ECRL/ecabc | ecabc/abc.py | ABC._merge_bee | def _merge_bee(self, bee):
'''Shifts a random value for a supplied bee with in accordance with
another random bee's value
Args:
bee (EmployerBee): supplied bee to merge
Returns:
tuple: (score of new position, values of new position, fitness
function return value of new position)
'''
random_dimension = randint(0, len(self._value_ranges) - 1)
second_bee = randint(0, self._num_employers - 1)
while (bee.id == self._employers[second_bee].id):
second_bee = randint(0, self._num_employers - 1)
new_bee = deepcopy(bee)
new_bee.values[random_dimension] = self.__onlooker.calculate_positions(
new_bee.values[random_dimension],
self._employers[second_bee].values[random_dimension],
self._value_ranges[random_dimension]
)
fitness_score = new_bee.get_score(self._fitness_fxn(
new_bee.values,
**self._args
))
return (fitness_score, new_bee.values, new_bee.error) | python | def _merge_bee(self, bee):
'''Shifts a random value for a supplied bee with in accordance with
another random bee's value
Args:
bee (EmployerBee): supplied bee to merge
Returns:
tuple: (score of new position, values of new position, fitness
function return value of new position)
'''
random_dimension = randint(0, len(self._value_ranges) - 1)
second_bee = randint(0, self._num_employers - 1)
while (bee.id == self._employers[second_bee].id):
second_bee = randint(0, self._num_employers - 1)
new_bee = deepcopy(bee)
new_bee.values[random_dimension] = self.__onlooker.calculate_positions(
new_bee.values[random_dimension],
self._employers[second_bee].values[random_dimension],
self._value_ranges[random_dimension]
)
fitness_score = new_bee.get_score(self._fitness_fxn(
new_bee.values,
**self._args
))
return (fitness_score, new_bee.values, new_bee.error) | [
"def",
"_merge_bee",
"(",
"self",
",",
"bee",
")",
":",
"random_dimension",
"=",
"randint",
"(",
"0",
",",
"len",
"(",
"self",
".",
"_value_ranges",
")",
"-",
"1",
")",
"second_bee",
"=",
"randint",
"(",
"0",
",",
"self",
".",
"_num_employers",
"-",
"1",
")",
"while",
"(",
"bee",
".",
"id",
"==",
"self",
".",
"_employers",
"[",
"second_bee",
"]",
".",
"id",
")",
":",
"second_bee",
"=",
"randint",
"(",
"0",
",",
"self",
".",
"_num_employers",
"-",
"1",
")",
"new_bee",
"=",
"deepcopy",
"(",
"bee",
")",
"new_bee",
".",
"values",
"[",
"random_dimension",
"]",
"=",
"self",
".",
"__onlooker",
".",
"calculate_positions",
"(",
"new_bee",
".",
"values",
"[",
"random_dimension",
"]",
",",
"self",
".",
"_employers",
"[",
"second_bee",
"]",
".",
"values",
"[",
"random_dimension",
"]",
",",
"self",
".",
"_value_ranges",
"[",
"random_dimension",
"]",
")",
"fitness_score",
"=",
"new_bee",
".",
"get_score",
"(",
"self",
".",
"_fitness_fxn",
"(",
"new_bee",
".",
"values",
",",
"*",
"*",
"self",
".",
"_args",
")",
")",
"return",
"(",
"fitness_score",
",",
"new_bee",
".",
"values",
",",
"new_bee",
".",
"error",
")"
] | Shifts a random value for a supplied bee with in accordance with
another random bee's value
Args:
bee (EmployerBee): supplied bee to merge
Returns:
tuple: (score of new position, values of new position, fitness
function return value of new position) | [
"Shifts",
"a",
"random",
"value",
"for",
"a",
"supplied",
"bee",
"with",
"in",
"accordance",
"with",
"another",
"random",
"bee",
"s",
"value"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L452-L478 |
6,017 | ECRL/ecabc | ecabc/abc.py | ABC._move_bee | def _move_bee(self, bee, new_values):
'''Moves a bee to a new position if new fitness score is better than
the bee's current fitness score
Args:
bee (EmployerBee): bee to move
new_values (tuple): (new score, new values, new fitness function
return value)
'''
score = np.nan_to_num(new_values[0])
if bee.score > score:
bee.failed_trials += 1
else:
bee.values = new_values[1]
bee.score = score
bee.error = new_values[2]
bee.failed_trials = 0
self._logger.log('debug', 'Bee assigned to new merged position') | python | def _move_bee(self, bee, new_values):
'''Moves a bee to a new position if new fitness score is better than
the bee's current fitness score
Args:
bee (EmployerBee): bee to move
new_values (tuple): (new score, new values, new fitness function
return value)
'''
score = np.nan_to_num(new_values[0])
if bee.score > score:
bee.failed_trials += 1
else:
bee.values = new_values[1]
bee.score = score
bee.error = new_values[2]
bee.failed_trials = 0
self._logger.log('debug', 'Bee assigned to new merged position') | [
"def",
"_move_bee",
"(",
"self",
",",
"bee",
",",
"new_values",
")",
":",
"score",
"=",
"np",
".",
"nan_to_num",
"(",
"new_values",
"[",
"0",
"]",
")",
"if",
"bee",
".",
"score",
">",
"score",
":",
"bee",
".",
"failed_trials",
"+=",
"1",
"else",
":",
"bee",
".",
"values",
"=",
"new_values",
"[",
"1",
"]",
"bee",
".",
"score",
"=",
"score",
"bee",
".",
"error",
"=",
"new_values",
"[",
"2",
"]",
"bee",
".",
"failed_trials",
"=",
"0",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'Bee assigned to new merged position'",
")"
] | Moves a bee to a new position if new fitness score is better than
the bee's current fitness score
Args:
bee (EmployerBee): bee to move
new_values (tuple): (new score, new values, new fitness function
return value) | [
"Moves",
"a",
"bee",
"to",
"a",
"new",
"position",
"if",
"new",
"fitness",
"score",
"is",
"better",
"than",
"the",
"bee",
"s",
"current",
"fitness",
"score"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L480-L498 |
6,018 | ECRL/ecabc | ecabc/abc.py | ABC.__update | def __update(self, score, values, error):
'''Update the best score and values if the given score is better than
the current best score
Args:
score (float): new score to evaluate
values (list): new value ranges to evaluate
error (float): new fitness function return value to evaluate
Returns:
bool: True if new score is better, False otherwise
'''
if self._minimize:
if self._best_score is None or score > self._best_score:
self._best_score = score
self._best_values = values.copy()
self._best_error = error
self._logger.log(
'debug',
'New best food source memorized: {}'.format(
self._best_error
)
)
return True
elif not self._minimize:
if self._best_score is None or score < self._best_score:
self._best_score = score
self._best_values = values.copy()
self._best_error = error
self._logger.log(
'debug',
'New best food source memorized: {}'.format(
self._best_error
)
)
return True
return False | python | def __update(self, score, values, error):
'''Update the best score and values if the given score is better than
the current best score
Args:
score (float): new score to evaluate
values (list): new value ranges to evaluate
error (float): new fitness function return value to evaluate
Returns:
bool: True if new score is better, False otherwise
'''
if self._minimize:
if self._best_score is None or score > self._best_score:
self._best_score = score
self._best_values = values.copy()
self._best_error = error
self._logger.log(
'debug',
'New best food source memorized: {}'.format(
self._best_error
)
)
return True
elif not self._minimize:
if self._best_score is None or score < self._best_score:
self._best_score = score
self._best_values = values.copy()
self._best_error = error
self._logger.log(
'debug',
'New best food source memorized: {}'.format(
self._best_error
)
)
return True
return False | [
"def",
"__update",
"(",
"self",
",",
"score",
",",
"values",
",",
"error",
")",
":",
"if",
"self",
".",
"_minimize",
":",
"if",
"self",
".",
"_best_score",
"is",
"None",
"or",
"score",
">",
"self",
".",
"_best_score",
":",
"self",
".",
"_best_score",
"=",
"score",
"self",
".",
"_best_values",
"=",
"values",
".",
"copy",
"(",
")",
"self",
".",
"_best_error",
"=",
"error",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'New best food source memorized: {}'",
".",
"format",
"(",
"self",
".",
"_best_error",
")",
")",
"return",
"True",
"elif",
"not",
"self",
".",
"_minimize",
":",
"if",
"self",
".",
"_best_score",
"is",
"None",
"or",
"score",
"<",
"self",
".",
"_best_score",
":",
"self",
".",
"_best_score",
"=",
"score",
"self",
".",
"_best_values",
"=",
"values",
".",
"copy",
"(",
")",
"self",
".",
"_best_error",
"=",
"error",
"self",
".",
"_logger",
".",
"log",
"(",
"'debug'",
",",
"'New best food source memorized: {}'",
".",
"format",
"(",
"self",
".",
"_best_error",
")",
")",
"return",
"True",
"return",
"False"
] | Update the best score and values if the given score is better than
the current best score
Args:
score (float): new score to evaluate
values (list): new value ranges to evaluate
error (float): new fitness function return value to evaluate
Returns:
bool: True if new score is better, False otherwise | [
"Update",
"the",
"best",
"score",
"and",
"values",
"if",
"the",
"given",
"score",
"is",
"better",
"than",
"the",
"current",
"best",
"score"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L500-L537 |
6,019 | ECRL/ecabc | ecabc/abc.py | ABC.__gen_random_values | def __gen_random_values(self):
'''Generate random values based on supplied value ranges
Returns:
list: random values, one per tunable variable
'''
values = []
if self._value_ranges is None:
self._logger.log(
'crit',
'Must set the type/range of possible values'
)
raise RuntimeError("Must set the type/range of possible values")
else:
for t in self._value_ranges:
if t[0] == 'int':
values.append(randint(t[1][0], t[1][1]))
elif t[0] == 'float':
values.append(np.random.uniform(t[1][0], t[1][1]))
else:
self._logger.log(
'crit',
'Value type must be either an `int` or a `float`'
)
raise RuntimeError(
'Value type must be either an `int` or a `float`'
)
return values | python | def __gen_random_values(self):
'''Generate random values based on supplied value ranges
Returns:
list: random values, one per tunable variable
'''
values = []
if self._value_ranges is None:
self._logger.log(
'crit',
'Must set the type/range of possible values'
)
raise RuntimeError("Must set the type/range of possible values")
else:
for t in self._value_ranges:
if t[0] == 'int':
values.append(randint(t[1][0], t[1][1]))
elif t[0] == 'float':
values.append(np.random.uniform(t[1][0], t[1][1]))
else:
self._logger.log(
'crit',
'Value type must be either an `int` or a `float`'
)
raise RuntimeError(
'Value type must be either an `int` or a `float`'
)
return values | [
"def",
"__gen_random_values",
"(",
"self",
")",
":",
"values",
"=",
"[",
"]",
"if",
"self",
".",
"_value_ranges",
"is",
"None",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'crit'",
",",
"'Must set the type/range of possible values'",
")",
"raise",
"RuntimeError",
"(",
"\"Must set the type/range of possible values\"",
")",
"else",
":",
"for",
"t",
"in",
"self",
".",
"_value_ranges",
":",
"if",
"t",
"[",
"0",
"]",
"==",
"'int'",
":",
"values",
".",
"append",
"(",
"randint",
"(",
"t",
"[",
"1",
"]",
"[",
"0",
"]",
",",
"t",
"[",
"1",
"]",
"[",
"1",
"]",
")",
")",
"elif",
"t",
"[",
"0",
"]",
"==",
"'float'",
":",
"values",
".",
"append",
"(",
"np",
".",
"random",
".",
"uniform",
"(",
"t",
"[",
"1",
"]",
"[",
"0",
"]",
",",
"t",
"[",
"1",
"]",
"[",
"1",
"]",
")",
")",
"else",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'crit'",
",",
"'Value type must be either an `int` or a `float`'",
")",
"raise",
"RuntimeError",
"(",
"'Value type must be either an `int` or a `float`'",
")",
"return",
"values"
] | Generate random values based on supplied value ranges
Returns:
list: random values, one per tunable variable | [
"Generate",
"random",
"values",
"based",
"on",
"supplied",
"value",
"ranges"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L539-L567 |
6,020 | ECRL/ecabc | ecabc/abc.py | ABC.__verify_ready | def __verify_ready(self, creating=False):
'''Some cleanup, ensures that everything is set up properly to avoid
random errors during execution
Args:
creating (bool): True if currently creating employer bees, False
for checking all other operations
'''
if len(self._value_ranges) == 0:
self._logger.log(
'crit',
'Attribute value_ranges must have at least one value'
)
raise RuntimeWarning(
'Attribute value_ranges must have at least one value'
)
if len(self._employers) == 0 and creating is False:
self._logger.log('crit', 'Need to create employers')
raise RuntimeWarning('Need to create employers') | python | def __verify_ready(self, creating=False):
'''Some cleanup, ensures that everything is set up properly to avoid
random errors during execution
Args:
creating (bool): True if currently creating employer bees, False
for checking all other operations
'''
if len(self._value_ranges) == 0:
self._logger.log(
'crit',
'Attribute value_ranges must have at least one value'
)
raise RuntimeWarning(
'Attribute value_ranges must have at least one value'
)
if len(self._employers) == 0 and creating is False:
self._logger.log('crit', 'Need to create employers')
raise RuntimeWarning('Need to create employers') | [
"def",
"__verify_ready",
"(",
"self",
",",
"creating",
"=",
"False",
")",
":",
"if",
"len",
"(",
"self",
".",
"_value_ranges",
")",
"==",
"0",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'crit'",
",",
"'Attribute value_ranges must have at least one value'",
")",
"raise",
"RuntimeWarning",
"(",
"'Attribute value_ranges must have at least one value'",
")",
"if",
"len",
"(",
"self",
".",
"_employers",
")",
"==",
"0",
"and",
"creating",
"is",
"False",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'crit'",
",",
"'Need to create employers'",
")",
"raise",
"RuntimeWarning",
"(",
"'Need to create employers'",
")"
] | Some cleanup, ensures that everything is set up properly to avoid
random errors during execution
Args:
creating (bool): True if currently creating employer bees, False
for checking all other operations | [
"Some",
"cleanup",
"ensures",
"that",
"everything",
"is",
"set",
"up",
"properly",
"to",
"avoid",
"random",
"errors",
"during",
"execution"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L569-L588 |
6,021 | ECRL/ecabc | ecabc/abc.py | ABC.import_settings | def import_settings(self, filename):
'''Import settings from a JSON file
Args:
filename (string): name of the file to import from
'''
if not os.path.isfile(filename):
self._logger.log(
'error',
'File: {} not found, continuing with default settings'.format(
filename
)
)
else:
with open(filename, 'r') as jsonFile:
data = json.load(jsonFile)
self._value_ranges = data['valueRanges']
self._best_values = data['best_values']
self._best_values = []
for index, value in enumerate(data['best_values']):
if self._value_ranges[index] == 'int':
self._best_values.append(int(value))
else:
self._best_values.append(float(value))
self.minimize = data['minimize']
self.num_employers = data['num_employers']
self._best_score = float(data['best_score'])
self.limit = data['limit'] | python | def import_settings(self, filename):
'''Import settings from a JSON file
Args:
filename (string): name of the file to import from
'''
if not os.path.isfile(filename):
self._logger.log(
'error',
'File: {} not found, continuing with default settings'.format(
filename
)
)
else:
with open(filename, 'r') as jsonFile:
data = json.load(jsonFile)
self._value_ranges = data['valueRanges']
self._best_values = data['best_values']
self._best_values = []
for index, value in enumerate(data['best_values']):
if self._value_ranges[index] == 'int':
self._best_values.append(int(value))
else:
self._best_values.append(float(value))
self.minimize = data['minimize']
self.num_employers = data['num_employers']
self._best_score = float(data['best_score'])
self.limit = data['limit'] | [
"def",
"import_settings",
"(",
"self",
",",
"filename",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"'error'",
",",
"'File: {} not found, continuing with default settings'",
".",
"format",
"(",
"filename",
")",
")",
"else",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"jsonFile",
":",
"data",
"=",
"json",
".",
"load",
"(",
"jsonFile",
")",
"self",
".",
"_value_ranges",
"=",
"data",
"[",
"'valueRanges'",
"]",
"self",
".",
"_best_values",
"=",
"data",
"[",
"'best_values'",
"]",
"self",
".",
"_best_values",
"=",
"[",
"]",
"for",
"index",
",",
"value",
"in",
"enumerate",
"(",
"data",
"[",
"'best_values'",
"]",
")",
":",
"if",
"self",
".",
"_value_ranges",
"[",
"index",
"]",
"==",
"'int'",
":",
"self",
".",
"_best_values",
".",
"append",
"(",
"int",
"(",
"value",
")",
")",
"else",
":",
"self",
".",
"_best_values",
".",
"append",
"(",
"float",
"(",
"value",
")",
")",
"self",
".",
"minimize",
"=",
"data",
"[",
"'minimize'",
"]",
"self",
".",
"num_employers",
"=",
"data",
"[",
"'num_employers'",
"]",
"self",
".",
"_best_score",
"=",
"float",
"(",
"data",
"[",
"'best_score'",
"]",
")",
"self",
".",
"limit",
"=",
"data",
"[",
"'limit'",
"]"
] | Import settings from a JSON file
Args:
filename (string): name of the file to import from | [
"Import",
"settings",
"from",
"a",
"JSON",
"file"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L590-L618 |
6,022 | ECRL/ecabc | ecabc/abc.py | ABC.save_settings | def save_settings(self, filename):
'''Save settings to a JSON file
Arge:
filename (string): name of the file to save to
'''
data = dict()
data['valueRanges'] = self._value_ranges
data['best_values'] = [str(value) for value in self._best_values]
data['minimize'] = self._minimize
data['num_employers'] = self._num_employers
data['best_score'] = str(self._best_score)
data['limit'] = self._limit
data['best_error'] = self._best_error
with open(filename, 'w') as outfile:
json.dump(data, outfile, indent=4, sort_keys=True) | python | def save_settings(self, filename):
'''Save settings to a JSON file
Arge:
filename (string): name of the file to save to
'''
data = dict()
data['valueRanges'] = self._value_ranges
data['best_values'] = [str(value) for value in self._best_values]
data['minimize'] = self._minimize
data['num_employers'] = self._num_employers
data['best_score'] = str(self._best_score)
data['limit'] = self._limit
data['best_error'] = self._best_error
with open(filename, 'w') as outfile:
json.dump(data, outfile, indent=4, sort_keys=True) | [
"def",
"save_settings",
"(",
"self",
",",
"filename",
")",
":",
"data",
"=",
"dict",
"(",
")",
"data",
"[",
"'valueRanges'",
"]",
"=",
"self",
".",
"_value_ranges",
"data",
"[",
"'best_values'",
"]",
"=",
"[",
"str",
"(",
"value",
")",
"for",
"value",
"in",
"self",
".",
"_best_values",
"]",
"data",
"[",
"'minimize'",
"]",
"=",
"self",
".",
"_minimize",
"data",
"[",
"'num_employers'",
"]",
"=",
"self",
".",
"_num_employers",
"data",
"[",
"'best_score'",
"]",
"=",
"str",
"(",
"self",
".",
"_best_score",
")",
"data",
"[",
"'limit'",
"]",
"=",
"self",
".",
"_limit",
"data",
"[",
"'best_error'",
"]",
"=",
"self",
".",
"_best_error",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"outfile",
":",
"json",
".",
"dump",
"(",
"data",
",",
"outfile",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")"
] | Save settings to a JSON file
Arge:
filename (string): name of the file to save to | [
"Save",
"settings",
"to",
"a",
"JSON",
"file"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/abc.py#L620-L636 |
6,023 | ECRL/ecabc | ecabc/bees.py | EmployerBee.get_score | def get_score(self, error=None):
'''Calculate bee's fitness score given a value returned by the fitness
function
Args:
error (float): value returned by the fitness function
Returns:
float: derived fitness score
'''
if error is not None:
self.error = error
if self.error >= 0:
return 1 / (self.error + 1)
else:
return 1 + abs(self.error) | python | def get_score(self, error=None):
'''Calculate bee's fitness score given a value returned by the fitness
function
Args:
error (float): value returned by the fitness function
Returns:
float: derived fitness score
'''
if error is not None:
self.error = error
if self.error >= 0:
return 1 / (self.error + 1)
else:
return 1 + abs(self.error) | [
"def",
"get_score",
"(",
"self",
",",
"error",
"=",
"None",
")",
":",
"if",
"error",
"is",
"not",
"None",
":",
"self",
".",
"error",
"=",
"error",
"if",
"self",
".",
"error",
">=",
"0",
":",
"return",
"1",
"/",
"(",
"self",
".",
"error",
"+",
"1",
")",
"else",
":",
"return",
"1",
"+",
"abs",
"(",
"self",
".",
"error",
")"
] | Calculate bee's fitness score given a value returned by the fitness
function
Args:
error (float): value returned by the fitness function
Returns:
float: derived fitness score | [
"Calculate",
"bee",
"s",
"fitness",
"score",
"given",
"a",
"value",
"returned",
"by",
"the",
"fitness",
"function"
] | 4e73125ff90bfeeae359a5ab1badba8894d70eaa | https://github.com/ECRL/ecabc/blob/4e73125ff90bfeeae359a5ab1badba8894d70eaa/ecabc/bees.py#L40-L56 |
6,024 | foremast/foremast | src/foremast/dns/create_dns.py | SpinnakerDns.create_elb_dns | def create_elb_dns(self, regionspecific=False):
"""Create dns entries in route53.
Args:
regionspecific (bool): The DNS entry should have region on it
Returns:
str: Auto-generated DNS name for the Elastic Load Balancer.
"""
if regionspecific:
dns_elb = self.generated.dns()['elb_region']
else:
dns_elb = self.generated.dns()['elb']
dns_elb_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
self.log.info('Updating Application URL: %s', dns_elb)
dns_kwargs = {
'dns_name': dns_elb,
'dns_name_aws': dns_elb_aws,
'dns_ttl': self.dns_ttl,
}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_dns_zone_record(self.env, zone_id, **dns_kwargs)
return dns_elb | python | def create_elb_dns(self, regionspecific=False):
"""Create dns entries in route53.
Args:
regionspecific (bool): The DNS entry should have region on it
Returns:
str: Auto-generated DNS name for the Elastic Load Balancer.
"""
if regionspecific:
dns_elb = self.generated.dns()['elb_region']
else:
dns_elb = self.generated.dns()['elb']
dns_elb_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
self.log.info('Updating Application URL: %s', dns_elb)
dns_kwargs = {
'dns_name': dns_elb,
'dns_name_aws': dns_elb_aws,
'dns_ttl': self.dns_ttl,
}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_dns_zone_record(self.env, zone_id, **dns_kwargs)
return dns_elb | [
"def",
"create_elb_dns",
"(",
"self",
",",
"regionspecific",
"=",
"False",
")",
":",
"if",
"regionspecific",
":",
"dns_elb",
"=",
"self",
".",
"generated",
".",
"dns",
"(",
")",
"[",
"'elb_region'",
"]",
"else",
":",
"dns_elb",
"=",
"self",
".",
"generated",
".",
"dns",
"(",
")",
"[",
"'elb'",
"]",
"dns_elb_aws",
"=",
"find_elb",
"(",
"name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
")",
"zone_ids",
"=",
"get_dns_zone_ids",
"(",
"env",
"=",
"self",
".",
"env",
",",
"facing",
"=",
"self",
".",
"elb_subnet",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Updating Application URL: %s'",
",",
"dns_elb",
")",
"dns_kwargs",
"=",
"{",
"'dns_name'",
":",
"dns_elb",
",",
"'dns_name_aws'",
":",
"dns_elb_aws",
",",
"'dns_ttl'",
":",
"self",
".",
"dns_ttl",
",",
"}",
"for",
"zone_id",
"in",
"zone_ids",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'zone_id: %s'",
",",
"zone_id",
")",
"update_dns_zone_record",
"(",
"self",
".",
"env",
",",
"zone_id",
",",
"*",
"*",
"dns_kwargs",
")",
"return",
"dns_elb"
] | Create dns entries in route53.
Args:
regionspecific (bool): The DNS entry should have region on it
Returns:
str: Auto-generated DNS name for the Elastic Load Balancer. | [
"Create",
"dns",
"entries",
"in",
"route53",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/create_dns.py#L55-L85 |
6,025 | foremast/foremast | src/foremast/dns/create_dns.py | SpinnakerDns.create_failover_dns | def create_failover_dns(self, primary_region='us-east-1'):
"""Create dns entries in route53 for multiregion failover setups.
Args:
primary_region (str): primary AWS region for failover
Returns:
Auto-generated DNS name.
"""
dns_record = self.generated.dns()['global']
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
elb_dns_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
elb_dns_zone_id = find_elb_dns_zone_id(name=self.app_name, env=self.env, region=self.region)
if primary_region in elb_dns_aws:
failover_state = 'PRIMARY'
else:
failover_state = 'SECONDARY'
self.log.info("%s set as %s record", elb_dns_aws, failover_state)
self.log.info('Updating Application Failover URL: %s', dns_record)
dns_kwargs = {
'dns_name': dns_record,
'elb_dns_zone_id': elb_dns_zone_id,
'elb_aws_dns': elb_dns_aws,
'dns_ttl': self.dns_ttl,
'failover_state': failover_state,
}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_failover_dns_record(self.env, zone_id, **dns_kwargs)
return dns_record | python | def create_failover_dns(self, primary_region='us-east-1'):
"""Create dns entries in route53 for multiregion failover setups.
Args:
primary_region (str): primary AWS region for failover
Returns:
Auto-generated DNS name.
"""
dns_record = self.generated.dns()['global']
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
elb_dns_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
elb_dns_zone_id = find_elb_dns_zone_id(name=self.app_name, env=self.env, region=self.region)
if primary_region in elb_dns_aws:
failover_state = 'PRIMARY'
else:
failover_state = 'SECONDARY'
self.log.info("%s set as %s record", elb_dns_aws, failover_state)
self.log.info('Updating Application Failover URL: %s', dns_record)
dns_kwargs = {
'dns_name': dns_record,
'elb_dns_zone_id': elb_dns_zone_id,
'elb_aws_dns': elb_dns_aws,
'dns_ttl': self.dns_ttl,
'failover_state': failover_state,
}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_failover_dns_record(self.env, zone_id, **dns_kwargs)
return dns_record | [
"def",
"create_failover_dns",
"(",
"self",
",",
"primary_region",
"=",
"'us-east-1'",
")",
":",
"dns_record",
"=",
"self",
".",
"generated",
".",
"dns",
"(",
")",
"[",
"'global'",
"]",
"zone_ids",
"=",
"get_dns_zone_ids",
"(",
"env",
"=",
"self",
".",
"env",
",",
"facing",
"=",
"self",
".",
"elb_subnet",
")",
"elb_dns_aws",
"=",
"find_elb",
"(",
"name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
")",
"elb_dns_zone_id",
"=",
"find_elb_dns_zone_id",
"(",
"name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
")",
"if",
"primary_region",
"in",
"elb_dns_aws",
":",
"failover_state",
"=",
"'PRIMARY'",
"else",
":",
"failover_state",
"=",
"'SECONDARY'",
"self",
".",
"log",
".",
"info",
"(",
"\"%s set as %s record\"",
",",
"elb_dns_aws",
",",
"failover_state",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Updating Application Failover URL: %s'",
",",
"dns_record",
")",
"dns_kwargs",
"=",
"{",
"'dns_name'",
":",
"dns_record",
",",
"'elb_dns_zone_id'",
":",
"elb_dns_zone_id",
",",
"'elb_aws_dns'",
":",
"elb_dns_aws",
",",
"'dns_ttl'",
":",
"self",
".",
"dns_ttl",
",",
"'failover_state'",
":",
"failover_state",
",",
"}",
"for",
"zone_id",
"in",
"zone_ids",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'zone_id: %s'",
",",
"zone_id",
")",
"update_failover_dns_record",
"(",
"self",
".",
"env",
",",
"zone_id",
",",
"*",
"*",
"dns_kwargs",
")",
"return",
"dns_record"
] | Create dns entries in route53 for multiregion failover setups.
Args:
primary_region (str): primary AWS region for failover
Returns:
Auto-generated DNS name. | [
"Create",
"dns",
"entries",
"in",
"route53",
"for",
"multiregion",
"failover",
"setups",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/create_dns.py#L87-L121 |
6,026 | foremast/foremast | src/foremast/elb/format_listeners.py | format_listeners | def format_listeners(elb_settings=None, env='dev', region='us-east-1'):
"""Format ELB Listeners into standard list.
Args:
elb_settings (dict): ELB settings including ELB Listeners to add,
e.g.::
# old
{
"certificate": null,
"i_port": 8080,
"lb_port": 80,
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
# new
{
"ports": [
{
"instance": "HTTP:8080",
"loadbalancer": "HTTP:80"
},
{
"certificate": "cert_name",
"instance": "HTTP:8443",
"loadbalancer": "HTTPS:443"
}
],
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
env (str): Environment to find the Account Number for.
Returns:
list: ELB Listeners formatted into dicts for Spinnaker::
[
{
'externalPort': 80,
'externalProtocol': 'HTTP',
'internalPort': 8080,
'internalProtocol': 'HTTP',
'sslCertificateId': None,
'listenerPolicies': [],
'backendPolicies': []
},
...
]
"""
LOG.debug('ELB settings:\n%s', elb_settings)
credential = get_env_credential(env=env)
account = credential['accountId']
listeners = []
if 'ports' in elb_settings:
for listener in elb_settings['ports']:
cert_name = format_cert_name(
env=env, region=region, account=account, certificate=listener.get('certificate', None))
lb_proto, lb_port = listener['loadbalancer'].split(':')
i_proto, i_port = listener['instance'].split(':')
listener_policies = listener.get('policies', [])
listener_policies += listener.get('listener_policies', [])
backend_policies = listener.get('backend_policies', [])
elb_data = {
'externalPort': int(lb_port),
'externalProtocol': lb_proto.upper(),
'internalPort': int(i_port),
'internalProtocol': i_proto.upper(),
'sslCertificateId': cert_name,
'listenerPolicies': listener_policies,
'backendPolicies': backend_policies,
}
listeners.append(elb_data)
else:
listener_policies = elb_settings.get('policies', [])
listener_policies += elb_settings.get('listener_policies', [])
backend_policies = elb_settings.get('backend_policies', [])
listeners = [{
'externalPort': int(elb_settings['lb_port']),
'externalProtocol': elb_settings['lb_proto'],
'internalPort': int(elb_settings['i_port']),
'internalProtocol': elb_settings['i_proto'],
'sslCertificateId': elb_settings['certificate'],
'listenerPolicies': listener_policies,
'backendPolicies': backend_policies,
}]
for listener in listeners:
LOG.info('ELB Listener:\n'
'loadbalancer %(externalProtocol)s:%(externalPort)d\n'
'instance %(internalProtocol)s:%(internalPort)d\n'
'certificate: %(sslCertificateId)s\n'
'listener_policies: %(listenerPolicies)s\n'
'backend_policies: %(backendPolicies)s', listener)
return listeners | python | def format_listeners(elb_settings=None, env='dev', region='us-east-1'):
"""Format ELB Listeners into standard list.
Args:
elb_settings (dict): ELB settings including ELB Listeners to add,
e.g.::
# old
{
"certificate": null,
"i_port": 8080,
"lb_port": 80,
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
# new
{
"ports": [
{
"instance": "HTTP:8080",
"loadbalancer": "HTTP:80"
},
{
"certificate": "cert_name",
"instance": "HTTP:8443",
"loadbalancer": "HTTPS:443"
}
],
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
env (str): Environment to find the Account Number for.
Returns:
list: ELB Listeners formatted into dicts for Spinnaker::
[
{
'externalPort': 80,
'externalProtocol': 'HTTP',
'internalPort': 8080,
'internalProtocol': 'HTTP',
'sslCertificateId': None,
'listenerPolicies': [],
'backendPolicies': []
},
...
]
"""
LOG.debug('ELB settings:\n%s', elb_settings)
credential = get_env_credential(env=env)
account = credential['accountId']
listeners = []
if 'ports' in elb_settings:
for listener in elb_settings['ports']:
cert_name = format_cert_name(
env=env, region=region, account=account, certificate=listener.get('certificate', None))
lb_proto, lb_port = listener['loadbalancer'].split(':')
i_proto, i_port = listener['instance'].split(':')
listener_policies = listener.get('policies', [])
listener_policies += listener.get('listener_policies', [])
backend_policies = listener.get('backend_policies', [])
elb_data = {
'externalPort': int(lb_port),
'externalProtocol': lb_proto.upper(),
'internalPort': int(i_port),
'internalProtocol': i_proto.upper(),
'sslCertificateId': cert_name,
'listenerPolicies': listener_policies,
'backendPolicies': backend_policies,
}
listeners.append(elb_data)
else:
listener_policies = elb_settings.get('policies', [])
listener_policies += elb_settings.get('listener_policies', [])
backend_policies = elb_settings.get('backend_policies', [])
listeners = [{
'externalPort': int(elb_settings['lb_port']),
'externalProtocol': elb_settings['lb_proto'],
'internalPort': int(elb_settings['i_port']),
'internalProtocol': elb_settings['i_proto'],
'sslCertificateId': elb_settings['certificate'],
'listenerPolicies': listener_policies,
'backendPolicies': backend_policies,
}]
for listener in listeners:
LOG.info('ELB Listener:\n'
'loadbalancer %(externalProtocol)s:%(externalPort)d\n'
'instance %(internalProtocol)s:%(internalPort)d\n'
'certificate: %(sslCertificateId)s\n'
'listener_policies: %(listenerPolicies)s\n'
'backend_policies: %(backendPolicies)s', listener)
return listeners | [
"def",
"format_listeners",
"(",
"elb_settings",
"=",
"None",
",",
"env",
"=",
"'dev'",
",",
"region",
"=",
"'us-east-1'",
")",
":",
"LOG",
".",
"debug",
"(",
"'ELB settings:\\n%s'",
",",
"elb_settings",
")",
"credential",
"=",
"get_env_credential",
"(",
"env",
"=",
"env",
")",
"account",
"=",
"credential",
"[",
"'accountId'",
"]",
"listeners",
"=",
"[",
"]",
"if",
"'ports'",
"in",
"elb_settings",
":",
"for",
"listener",
"in",
"elb_settings",
"[",
"'ports'",
"]",
":",
"cert_name",
"=",
"format_cert_name",
"(",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"account",
"=",
"account",
",",
"certificate",
"=",
"listener",
".",
"get",
"(",
"'certificate'",
",",
"None",
")",
")",
"lb_proto",
",",
"lb_port",
"=",
"listener",
"[",
"'loadbalancer'",
"]",
".",
"split",
"(",
"':'",
")",
"i_proto",
",",
"i_port",
"=",
"listener",
"[",
"'instance'",
"]",
".",
"split",
"(",
"':'",
")",
"listener_policies",
"=",
"listener",
".",
"get",
"(",
"'policies'",
",",
"[",
"]",
")",
"listener_policies",
"+=",
"listener",
".",
"get",
"(",
"'listener_policies'",
",",
"[",
"]",
")",
"backend_policies",
"=",
"listener",
".",
"get",
"(",
"'backend_policies'",
",",
"[",
"]",
")",
"elb_data",
"=",
"{",
"'externalPort'",
":",
"int",
"(",
"lb_port",
")",
",",
"'externalProtocol'",
":",
"lb_proto",
".",
"upper",
"(",
")",
",",
"'internalPort'",
":",
"int",
"(",
"i_port",
")",
",",
"'internalProtocol'",
":",
"i_proto",
".",
"upper",
"(",
")",
",",
"'sslCertificateId'",
":",
"cert_name",
",",
"'listenerPolicies'",
":",
"listener_policies",
",",
"'backendPolicies'",
":",
"backend_policies",
",",
"}",
"listeners",
".",
"append",
"(",
"elb_data",
")",
"else",
":",
"listener_policies",
"=",
"elb_settings",
".",
"get",
"(",
"'policies'",
",",
"[",
"]",
")",
"listener_policies",
"+=",
"elb_settings",
".",
"get",
"(",
"'listener_policies'",
",",
"[",
"]",
")",
"backend_policies",
"=",
"elb_settings",
".",
"get",
"(",
"'backend_policies'",
",",
"[",
"]",
")",
"listeners",
"=",
"[",
"{",
"'externalPort'",
":",
"int",
"(",
"elb_settings",
"[",
"'lb_port'",
"]",
")",
",",
"'externalProtocol'",
":",
"elb_settings",
"[",
"'lb_proto'",
"]",
",",
"'internalPort'",
":",
"int",
"(",
"elb_settings",
"[",
"'i_port'",
"]",
")",
",",
"'internalProtocol'",
":",
"elb_settings",
"[",
"'i_proto'",
"]",
",",
"'sslCertificateId'",
":",
"elb_settings",
"[",
"'certificate'",
"]",
",",
"'listenerPolicies'",
":",
"listener_policies",
",",
"'backendPolicies'",
":",
"backend_policies",
",",
"}",
"]",
"for",
"listener",
"in",
"listeners",
":",
"LOG",
".",
"info",
"(",
"'ELB Listener:\\n'",
"'loadbalancer %(externalProtocol)s:%(externalPort)d\\n'",
"'instance %(internalProtocol)s:%(internalPort)d\\n'",
"'certificate: %(sslCertificateId)s\\n'",
"'listener_policies: %(listenerPolicies)s\\n'",
"'backend_policies: %(backendPolicies)s'",
",",
"listener",
")",
"return",
"listeners"
] | Format ELB Listeners into standard list.
Args:
elb_settings (dict): ELB settings including ELB Listeners to add,
e.g.::
# old
{
"certificate": null,
"i_port": 8080,
"lb_port": 80,
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
# new
{
"ports": [
{
"instance": "HTTP:8080",
"loadbalancer": "HTTP:80"
},
{
"certificate": "cert_name",
"instance": "HTTP:8443",
"loadbalancer": "HTTPS:443"
}
],
"subnet_purpose": "internal",
"target": "HTTP:8080/health"
}
env (str): Environment to find the Account Number for.
Returns:
list: ELB Listeners formatted into dicts for Spinnaker::
[
{
'externalPort': 80,
'externalProtocol': 'HTTP',
'internalPort': 8080,
'internalProtocol': 'HTTP',
'sslCertificateId': None,
'listenerPolicies': [],
'backendPolicies': []
},
...
] | [
"Format",
"ELB",
"Listeners",
"into",
"standard",
"list",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/format_listeners.py#L26-L128 |
6,027 | foremast/foremast | src/foremast/elb/format_listeners.py | format_cert_name | def format_cert_name(env='', account='', region='', certificate=None):
"""Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired
"""
cert_name = None
if certificate:
if certificate.startswith('arn'):
LOG.info("Full ARN provided...skipping lookup.")
cert_name = certificate
else:
generated_cert_name = generate_custom_cert_name(env, region, account, certificate)
if generated_cert_name:
LOG.info("Found generated certificate %s from template", generated_cert_name)
cert_name = generated_cert_name
else:
LOG.info("Using default certificate name logic")
cert_name = ('arn:aws:iam::{account}:server-certificate/{name}'.format(
account=account, name=certificate))
LOG.debug('Certificate name: %s', cert_name)
return cert_name | python | def format_cert_name(env='', account='', region='', certificate=None):
"""Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired
"""
cert_name = None
if certificate:
if certificate.startswith('arn'):
LOG.info("Full ARN provided...skipping lookup.")
cert_name = certificate
else:
generated_cert_name = generate_custom_cert_name(env, region, account, certificate)
if generated_cert_name:
LOG.info("Found generated certificate %s from template", generated_cert_name)
cert_name = generated_cert_name
else:
LOG.info("Using default certificate name logic")
cert_name = ('arn:aws:iam::{account}:server-certificate/{name}'.format(
account=account, name=certificate))
LOG.debug('Certificate name: %s', cert_name)
return cert_name | [
"def",
"format_cert_name",
"(",
"env",
"=",
"''",
",",
"account",
"=",
"''",
",",
"region",
"=",
"''",
",",
"certificate",
"=",
"None",
")",
":",
"cert_name",
"=",
"None",
"if",
"certificate",
":",
"if",
"certificate",
".",
"startswith",
"(",
"'arn'",
")",
":",
"LOG",
".",
"info",
"(",
"\"Full ARN provided...skipping lookup.\"",
")",
"cert_name",
"=",
"certificate",
"else",
":",
"generated_cert_name",
"=",
"generate_custom_cert_name",
"(",
"env",
",",
"region",
",",
"account",
",",
"certificate",
")",
"if",
"generated_cert_name",
":",
"LOG",
".",
"info",
"(",
"\"Found generated certificate %s from template\"",
",",
"generated_cert_name",
")",
"cert_name",
"=",
"generated_cert_name",
"else",
":",
"LOG",
".",
"info",
"(",
"\"Using default certificate name logic\"",
")",
"cert_name",
"=",
"(",
"'arn:aws:iam::{account}:server-certificate/{name}'",
".",
"format",
"(",
"account",
"=",
"account",
",",
"name",
"=",
"certificate",
")",
")",
"LOG",
".",
"debug",
"(",
"'Certificate name: %s'",
",",
"cert_name",
")",
"return",
"cert_name"
] | Format the SSL certificate name into ARN for ELB.
Args:
env (str): Account environment name
account (str): Account number for ARN
region (str): AWS Region.
certificate (str): Name of SSL certificate
Returns:
str: Fully qualified ARN for SSL certificate
None: Certificate is not desired | [
"Format",
"the",
"SSL",
"certificate",
"name",
"into",
"ARN",
"for",
"ELB",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/format_listeners.py#L131-L161 |
6,028 | foremast/foremast | src/foremast/elb/format_listeners.py | generate_custom_cert_name | def generate_custom_cert_name(env='', region='', account='', certificate=None):
"""Generate a custom TLS Cert name based on a template.
Args:
env (str): Account environment name
region (str): AWS Region.
account (str): Account number for ARN.
certificate (str): Name of SSL certificate.
Returns:
str: Fully qualified ARN for SSL certificate.
None: Template doesn't exist.
"""
cert_name = None
template_kwargs = {'account': account, 'name': certificate}
# TODO: Investigate moving this to a remote API, then fallback to local file if unable to connect
try:
rendered_template = get_template(template_file='infrastructure/iam/tlscert_naming.json.j2', **template_kwargs)
tlscert_dict = json.loads(rendered_template)
except ForemastTemplateNotFound:
LOG.info('Unable to find TLS Cert Template...falling back to default logic...')
return cert_name
# TODO: Move to v1 method for check
try:
LOG.info("Attempting to find TLS Cert using TLS Cert Template v1 lookup...")
cert_name = tlscert_dict[env][certificate]
LOG.info("Found TLS certificate named %s under %s using TLS Cert Template v1", certificate, env)
except KeyError:
LOG.error("Unable to find TLS certificate named %s under %s using v1 TLS Cert Template.", certificate, env)
# TODO: Move variable to consts
# TODO: move to v2 method for check
tls_services = ['iam', 'acm']
if cert_name is None and all(service in tlscert_dict for service in tls_services):
LOG.info("Attempting to find TLS Cert using TLS Cert Template v2 lookup...")
if certificate in tlscert_dict['iam'][env]:
cert_name = tlscert_dict['iam'][env][certificate]
LOG.info("Found IAM TLS certificate named %s under %s using TLS Cert Template v2", certificate, env)
elif certificate in tlscert_dict['acm'][region][env]:
cert_name = tlscert_dict['acm'][region][env][certificate]
LOG.info("Found ACM TLS certificate named %s under %s in %s using TLS Cert Template v2", certificate, env,
region)
else:
LOG.error(
"Unable to find TLS certificate named %s under parent keys [ACM, IAM] %s in v2 TLS Cert Template.",
certificate, env)
return cert_name | python | def generate_custom_cert_name(env='', region='', account='', certificate=None):
"""Generate a custom TLS Cert name based on a template.
Args:
env (str): Account environment name
region (str): AWS Region.
account (str): Account number for ARN.
certificate (str): Name of SSL certificate.
Returns:
str: Fully qualified ARN for SSL certificate.
None: Template doesn't exist.
"""
cert_name = None
template_kwargs = {'account': account, 'name': certificate}
# TODO: Investigate moving this to a remote API, then fallback to local file if unable to connect
try:
rendered_template = get_template(template_file='infrastructure/iam/tlscert_naming.json.j2', **template_kwargs)
tlscert_dict = json.loads(rendered_template)
except ForemastTemplateNotFound:
LOG.info('Unable to find TLS Cert Template...falling back to default logic...')
return cert_name
# TODO: Move to v1 method for check
try:
LOG.info("Attempting to find TLS Cert using TLS Cert Template v1 lookup...")
cert_name = tlscert_dict[env][certificate]
LOG.info("Found TLS certificate named %s under %s using TLS Cert Template v1", certificate, env)
except KeyError:
LOG.error("Unable to find TLS certificate named %s under %s using v1 TLS Cert Template.", certificate, env)
# TODO: Move variable to consts
# TODO: move to v2 method for check
tls_services = ['iam', 'acm']
if cert_name is None and all(service in tlscert_dict for service in tls_services):
LOG.info("Attempting to find TLS Cert using TLS Cert Template v2 lookup...")
if certificate in tlscert_dict['iam'][env]:
cert_name = tlscert_dict['iam'][env][certificate]
LOG.info("Found IAM TLS certificate named %s under %s using TLS Cert Template v2", certificate, env)
elif certificate in tlscert_dict['acm'][region][env]:
cert_name = tlscert_dict['acm'][region][env][certificate]
LOG.info("Found ACM TLS certificate named %s under %s in %s using TLS Cert Template v2", certificate, env,
region)
else:
LOG.error(
"Unable to find TLS certificate named %s under parent keys [ACM, IAM] %s in v2 TLS Cert Template.",
certificate, env)
return cert_name | [
"def",
"generate_custom_cert_name",
"(",
"env",
"=",
"''",
",",
"region",
"=",
"''",
",",
"account",
"=",
"''",
",",
"certificate",
"=",
"None",
")",
":",
"cert_name",
"=",
"None",
"template_kwargs",
"=",
"{",
"'account'",
":",
"account",
",",
"'name'",
":",
"certificate",
"}",
"# TODO: Investigate moving this to a remote API, then fallback to local file if unable to connect",
"try",
":",
"rendered_template",
"=",
"get_template",
"(",
"template_file",
"=",
"'infrastructure/iam/tlscert_naming.json.j2'",
",",
"*",
"*",
"template_kwargs",
")",
"tlscert_dict",
"=",
"json",
".",
"loads",
"(",
"rendered_template",
")",
"except",
"ForemastTemplateNotFound",
":",
"LOG",
".",
"info",
"(",
"'Unable to find TLS Cert Template...falling back to default logic...'",
")",
"return",
"cert_name",
"# TODO: Move to v1 method for check",
"try",
":",
"LOG",
".",
"info",
"(",
"\"Attempting to find TLS Cert using TLS Cert Template v1 lookup...\"",
")",
"cert_name",
"=",
"tlscert_dict",
"[",
"env",
"]",
"[",
"certificate",
"]",
"LOG",
".",
"info",
"(",
"\"Found TLS certificate named %s under %s using TLS Cert Template v1\"",
",",
"certificate",
",",
"env",
")",
"except",
"KeyError",
":",
"LOG",
".",
"error",
"(",
"\"Unable to find TLS certificate named %s under %s using v1 TLS Cert Template.\"",
",",
"certificate",
",",
"env",
")",
"# TODO: Move variable to consts",
"# TODO: move to v2 method for check",
"tls_services",
"=",
"[",
"'iam'",
",",
"'acm'",
"]",
"if",
"cert_name",
"is",
"None",
"and",
"all",
"(",
"service",
"in",
"tlscert_dict",
"for",
"service",
"in",
"tls_services",
")",
":",
"LOG",
".",
"info",
"(",
"\"Attempting to find TLS Cert using TLS Cert Template v2 lookup...\"",
")",
"if",
"certificate",
"in",
"tlscert_dict",
"[",
"'iam'",
"]",
"[",
"env",
"]",
":",
"cert_name",
"=",
"tlscert_dict",
"[",
"'iam'",
"]",
"[",
"env",
"]",
"[",
"certificate",
"]",
"LOG",
".",
"info",
"(",
"\"Found IAM TLS certificate named %s under %s using TLS Cert Template v2\"",
",",
"certificate",
",",
"env",
")",
"elif",
"certificate",
"in",
"tlscert_dict",
"[",
"'acm'",
"]",
"[",
"region",
"]",
"[",
"env",
"]",
":",
"cert_name",
"=",
"tlscert_dict",
"[",
"'acm'",
"]",
"[",
"region",
"]",
"[",
"env",
"]",
"[",
"certificate",
"]",
"LOG",
".",
"info",
"(",
"\"Found ACM TLS certificate named %s under %s in %s using TLS Cert Template v2\"",
",",
"certificate",
",",
"env",
",",
"region",
")",
"else",
":",
"LOG",
".",
"error",
"(",
"\"Unable to find TLS certificate named %s under parent keys [ACM, IAM] %s in v2 TLS Cert Template.\"",
",",
"certificate",
",",
"env",
")",
"return",
"cert_name"
] | Generate a custom TLS Cert name based on a template.
Args:
env (str): Account environment name
region (str): AWS Region.
account (str): Account number for ARN.
certificate (str): Name of SSL certificate.
Returns:
str: Fully qualified ARN for SSL certificate.
None: Template doesn't exist. | [
"Generate",
"a",
"custom",
"TLS",
"Cert",
"name",
"based",
"on",
"a",
"template",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/format_listeners.py#L164-L213 |
6,029 | foremast/foremast | src/foremast/slacknotify/__main__.py | main | def main():
"""Send Slack notification to a configured channel."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
args = parser.parse_args()
logging.getLogger(__package__.split(".")[0]).setLevel(args.debug)
log.debug('Parsed arguements: %s', args)
if "prod" not in args.env:
log.info('No slack message sent, not a production environment')
else:
log.info("Sending slack message, production environment")
slacknotify = SlackNotification(app=args.app, env=args.env, prop_path=args.properties)
slacknotify.post_message() | python | def main():
"""Send Slack notification to a configured channel."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
args = parser.parse_args()
logging.getLogger(__package__.split(".")[0]).setLevel(args.debug)
log.debug('Parsed arguements: %s', args)
if "prod" not in args.env:
log.info('No slack message sent, not a production environment')
else:
log.info("Sending slack message, production environment")
slacknotify = SlackNotification(app=args.app, env=args.env, prop_path=args.properties)
slacknotify.post_message() | [
"def",
"main",
"(",
")",
":",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"LOGGING_FORMAT",
")",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"add_debug",
"(",
"parser",
")",
"add_app",
"(",
"parser",
")",
"add_env",
"(",
"parser",
")",
"add_properties",
"(",
"parser",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"logging",
".",
"getLogger",
"(",
"__package__",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
")",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"log",
".",
"debug",
"(",
"'Parsed arguements: %s'",
",",
"args",
")",
"if",
"\"prod\"",
"not",
"in",
"args",
".",
"env",
":",
"log",
".",
"info",
"(",
"'No slack message sent, not a production environment'",
")",
"else",
":",
"log",
".",
"info",
"(",
"\"Sending slack message, production environment\"",
")",
"slacknotify",
"=",
"SlackNotification",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"args",
".",
"env",
",",
"prop_path",
"=",
"args",
".",
"properties",
")",
"slacknotify",
".",
"post_message",
"(",
")"
] | Send Slack notification to a configured channel. | [
"Send",
"Slack",
"notification",
"to",
"a",
"configured",
"channel",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/slacknotify/__main__.py#L28-L49 |
6,030 | foremast/foremast | src/foremast/destroyer.py | main | def main(): # noqa
"""Attempt to fully destroy AWS Resources for a Spinnaker Application."""
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
args = parser.parse_args()
if args.debug == logging.DEBUG:
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
else:
LOG.setLevel(args.debug)
for env in ENVS:
for region in REGIONS:
LOG.info('DESTROY %s:%s', env, region)
try:
destroy_dns(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('DNS issue for %s in %s: %s', env, region, error)
try:
destroy_elb(app=args.app, env=env, region=region)
except SpinnakerError:
pass
try:
destroy_iam(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('IAM issue for %s in %s: %s', env, region, error)
try:
destroy_s3(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('S3 issue for %s in %s: %s', env, region, error)
try:
destroy_sg(app=args.app, env=env, region=region)
except SpinnakerError:
pass
LOG.info('Destroyed %s:%s', env, region)
LOG.info('Destruction complete.') | python | def main(): # noqa
"""Attempt to fully destroy AWS Resources for a Spinnaker Application."""
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
args = parser.parse_args()
if args.debug == logging.DEBUG:
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
else:
LOG.setLevel(args.debug)
for env in ENVS:
for region in REGIONS:
LOG.info('DESTROY %s:%s', env, region)
try:
destroy_dns(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('DNS issue for %s in %s: %s', env, region, error)
try:
destroy_elb(app=args.app, env=env, region=region)
except SpinnakerError:
pass
try:
destroy_iam(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('IAM issue for %s in %s: %s', env, region, error)
try:
destroy_s3(app=args.app, env=env)
except botocore.exceptions.ClientError as error:
LOG.warning('S3 issue for %s in %s: %s', env, region, error)
try:
destroy_sg(app=args.app, env=env, region=region)
except SpinnakerError:
pass
LOG.info('Destroyed %s:%s', env, region)
LOG.info('Destruction complete.') | [
"def",
"main",
"(",
")",
":",
"# noqa",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"LOGGING_FORMAT",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"main",
".",
"__doc__",
")",
"add_debug",
"(",
"parser",
")",
"add_app",
"(",
"parser",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"args",
".",
"debug",
"==",
"logging",
".",
"DEBUG",
":",
"logging",
".",
"getLogger",
"(",
"__package__",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"else",
":",
"LOG",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"for",
"env",
"in",
"ENVS",
":",
"for",
"region",
"in",
"REGIONS",
":",
"LOG",
".",
"info",
"(",
"'DESTROY %s:%s'",
",",
"env",
",",
"region",
")",
"try",
":",
"destroy_dns",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"env",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"LOG",
".",
"warning",
"(",
"'DNS issue for %s in %s: %s'",
",",
"env",
",",
"region",
",",
"error",
")",
"try",
":",
"destroy_elb",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
")",
"except",
"SpinnakerError",
":",
"pass",
"try",
":",
"destroy_iam",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"env",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"LOG",
".",
"warning",
"(",
"'IAM issue for %s in %s: %s'",
",",
"env",
",",
"region",
",",
"error",
")",
"try",
":",
"destroy_s3",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"env",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"LOG",
".",
"warning",
"(",
"'S3 issue for %s in %s: %s'",
",",
"env",
",",
"region",
",",
"error",
")",
"try",
":",
"destroy_sg",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
")",
"except",
"SpinnakerError",
":",
"pass",
"LOG",
".",
"info",
"(",
"'Destroyed %s:%s'",
",",
"env",
",",
"region",
")",
"LOG",
".",
"info",
"(",
"'Destruction complete.'",
")"
] | Attempt to fully destroy AWS Resources for a Spinnaker Application. | [
"Attempt",
"to",
"fully",
"destroy",
"AWS",
"Resources",
"for",
"a",
"Spinnaker",
"Application",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/destroyer.py#L34-L79 |
6,031 | foremast/foremast | src/foremast/pipeline/construct_pipeline_block.py | check_provider_healthcheck | def check_provider_healthcheck(settings, default_provider='Discovery'):
"""Set Provider Health Check when specified.
Returns:
collections.namedtuple: **ProviderHealthCheck** with attributes:
* providers (list): Providers set to use native Health Check.
* has_healthcheck (bool): If any native Health Checks requested.
"""
ProviderHealthCheck = collections.namedtuple('ProviderHealthCheck', ['providers', 'has_healthcheck'])
eureka_enabled = settings['app']['eureka_enabled']
providers = settings['asg']['provider_healthcheck']
LOG.debug('Template defined Health Check Providers: %s', providers)
health_check_providers = []
has_healthcheck = False
normalized_default_provider = default_provider.capitalize()
if eureka_enabled:
LOG.info('Eureka enabled, enabling default Provider Health Check: %s', normalized_default_provider)
for provider, active in providers.items():
if provider.lower() == normalized_default_provider.lower():
providers[provider] = True
LOG.debug('Override defined Provider Health Check: %s -> %s', active, providers[provider])
break
else:
LOG.debug('Adding default Provider Health Check: %s', normalized_default_provider)
providers[normalized_default_provider] = True
for provider, active in providers.items():
if active:
health_check_providers.append(provider.capitalize())
LOG.info('Provider healthchecks: %s', health_check_providers)
if health_check_providers:
has_healthcheck = True
return ProviderHealthCheck(providers=health_check_providers, has_healthcheck=has_healthcheck) | python | def check_provider_healthcheck(settings, default_provider='Discovery'):
"""Set Provider Health Check when specified.
Returns:
collections.namedtuple: **ProviderHealthCheck** with attributes:
* providers (list): Providers set to use native Health Check.
* has_healthcheck (bool): If any native Health Checks requested.
"""
ProviderHealthCheck = collections.namedtuple('ProviderHealthCheck', ['providers', 'has_healthcheck'])
eureka_enabled = settings['app']['eureka_enabled']
providers = settings['asg']['provider_healthcheck']
LOG.debug('Template defined Health Check Providers: %s', providers)
health_check_providers = []
has_healthcheck = False
normalized_default_provider = default_provider.capitalize()
if eureka_enabled:
LOG.info('Eureka enabled, enabling default Provider Health Check: %s', normalized_default_provider)
for provider, active in providers.items():
if provider.lower() == normalized_default_provider.lower():
providers[provider] = True
LOG.debug('Override defined Provider Health Check: %s -> %s', active, providers[provider])
break
else:
LOG.debug('Adding default Provider Health Check: %s', normalized_default_provider)
providers[normalized_default_provider] = True
for provider, active in providers.items():
if active:
health_check_providers.append(provider.capitalize())
LOG.info('Provider healthchecks: %s', health_check_providers)
if health_check_providers:
has_healthcheck = True
return ProviderHealthCheck(providers=health_check_providers, has_healthcheck=has_healthcheck) | [
"def",
"check_provider_healthcheck",
"(",
"settings",
",",
"default_provider",
"=",
"'Discovery'",
")",
":",
"ProviderHealthCheck",
"=",
"collections",
".",
"namedtuple",
"(",
"'ProviderHealthCheck'",
",",
"[",
"'providers'",
",",
"'has_healthcheck'",
"]",
")",
"eureka_enabled",
"=",
"settings",
"[",
"'app'",
"]",
"[",
"'eureka_enabled'",
"]",
"providers",
"=",
"settings",
"[",
"'asg'",
"]",
"[",
"'provider_healthcheck'",
"]",
"LOG",
".",
"debug",
"(",
"'Template defined Health Check Providers: %s'",
",",
"providers",
")",
"health_check_providers",
"=",
"[",
"]",
"has_healthcheck",
"=",
"False",
"normalized_default_provider",
"=",
"default_provider",
".",
"capitalize",
"(",
")",
"if",
"eureka_enabled",
":",
"LOG",
".",
"info",
"(",
"'Eureka enabled, enabling default Provider Health Check: %s'",
",",
"normalized_default_provider",
")",
"for",
"provider",
",",
"active",
"in",
"providers",
".",
"items",
"(",
")",
":",
"if",
"provider",
".",
"lower",
"(",
")",
"==",
"normalized_default_provider",
".",
"lower",
"(",
")",
":",
"providers",
"[",
"provider",
"]",
"=",
"True",
"LOG",
".",
"debug",
"(",
"'Override defined Provider Health Check: %s -> %s'",
",",
"active",
",",
"providers",
"[",
"provider",
"]",
")",
"break",
"else",
":",
"LOG",
".",
"debug",
"(",
"'Adding default Provider Health Check: %s'",
",",
"normalized_default_provider",
")",
"providers",
"[",
"normalized_default_provider",
"]",
"=",
"True",
"for",
"provider",
",",
"active",
"in",
"providers",
".",
"items",
"(",
")",
":",
"if",
"active",
":",
"health_check_providers",
".",
"append",
"(",
"provider",
".",
"capitalize",
"(",
")",
")",
"LOG",
".",
"info",
"(",
"'Provider healthchecks: %s'",
",",
"health_check_providers",
")",
"if",
"health_check_providers",
":",
"has_healthcheck",
"=",
"True",
"return",
"ProviderHealthCheck",
"(",
"providers",
"=",
"health_check_providers",
",",
"has_healthcheck",
"=",
"has_healthcheck",
")"
] | Set Provider Health Check when specified.
Returns:
collections.namedtuple: **ProviderHealthCheck** with attributes:
* providers (list): Providers set to use native Health Check.
* has_healthcheck (bool): If any native Health Checks requested. | [
"Set",
"Provider",
"Health",
"Check",
"when",
"specified",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/construct_pipeline_block.py#L29-L70 |
6,032 | foremast/foremast | src/foremast/pipeline/construct_pipeline_block.py | get_template_name | def get_template_name(env, pipeline_type):
"""Generates the correct template name based on pipeline type
Args:
env (str): environment to generate templates for
pipeline_type (str): Type of pipeline like ec2 or lambda
Returns:
str: Name of template
"""
pipeline_base = 'pipeline/pipeline'
template_name_format = '{pipeline_base}'
if env.startswith('prod'):
template_name_format = template_name_format + '_{env}'
else:
template_name_format = template_name_format + '_stages'
if pipeline_type != 'ec2':
template_name_format = template_name_format + '_{pipeline_type}'
template_name_format = template_name_format + '.json.j2'
template_name = template_name_format.format(pipeline_base=pipeline_base, env=env, pipeline_type=pipeline_type)
return template_name | python | def get_template_name(env, pipeline_type):
"""Generates the correct template name based on pipeline type
Args:
env (str): environment to generate templates for
pipeline_type (str): Type of pipeline like ec2 or lambda
Returns:
str: Name of template
"""
pipeline_base = 'pipeline/pipeline'
template_name_format = '{pipeline_base}'
if env.startswith('prod'):
template_name_format = template_name_format + '_{env}'
else:
template_name_format = template_name_format + '_stages'
if pipeline_type != 'ec2':
template_name_format = template_name_format + '_{pipeline_type}'
template_name_format = template_name_format + '.json.j2'
template_name = template_name_format.format(pipeline_base=pipeline_base, env=env, pipeline_type=pipeline_type)
return template_name | [
"def",
"get_template_name",
"(",
"env",
",",
"pipeline_type",
")",
":",
"pipeline_base",
"=",
"'pipeline/pipeline'",
"template_name_format",
"=",
"'{pipeline_base}'",
"if",
"env",
".",
"startswith",
"(",
"'prod'",
")",
":",
"template_name_format",
"=",
"template_name_format",
"+",
"'_{env}'",
"else",
":",
"template_name_format",
"=",
"template_name_format",
"+",
"'_stages'",
"if",
"pipeline_type",
"!=",
"'ec2'",
":",
"template_name_format",
"=",
"template_name_format",
"+",
"'_{pipeline_type}'",
"template_name_format",
"=",
"template_name_format",
"+",
"'.json.j2'",
"template_name",
"=",
"template_name_format",
".",
"format",
"(",
"pipeline_base",
"=",
"pipeline_base",
",",
"env",
"=",
"env",
",",
"pipeline_type",
"=",
"pipeline_type",
")",
"return",
"template_name"
] | Generates the correct template name based on pipeline type
Args:
env (str): environment to generate templates for
pipeline_type (str): Type of pipeline like ec2 or lambda
Returns:
str: Name of template | [
"Generates",
"the",
"correct",
"template",
"name",
"based",
"on",
"pipeline",
"type"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/construct_pipeline_block.py#L73-L96 |
6,033 | foremast/foremast | src/foremast/pipeline/construct_pipeline_block.py | ec2_pipeline_setup | def ec2_pipeline_setup(
generated=None,
project='',
settings=None,
env='',
pipeline_type='',
region='',
region_subnets=None,
):
"""Handles ec2 pipeline data setup
Args:
generated (gogoutils.Generator): Generated naming formats.
project (str): Group name of application
settings (dict): Environment settings from configurations.
env (str): Deploy environment name, e.g. dev, stage, prod.
pipeline_type (str): Type of Foremast Pipeline to configure.
region (str): AWS Region to deploy to.
region_subnets (dict): Subnets for a Region, e.g.
{'us-west-2': ['us-west-2a', 'us-west-2b', 'us-west-2c']}.
Returns:
dict: Updated settings to pass to templates for EC2 info
"""
data = copy.deepcopy(settings)
user_data = generate_encoded_user_data(
env=env,
region=region,
generated=generated,
group_name=project,
pipeline_type=pipeline_type,
)
# Use different variable to keep template simple
instance_security_groups = sorted(DEFAULT_EC2_SECURITYGROUPS[env])
instance_security_groups.append(generated.security_group_app)
instance_security_groups.extend(settings['security_group']['instance_extras'])
instance_security_groups = remove_duplicate_sg(instance_security_groups)
LOG.info('Instance security groups to attach: %s', instance_security_groups)
# check if scaling policy exists
if settings['asg']['scaling_policy']:
scalingpolicy = True
LOG.info('Found scaling policy')
else:
scalingpolicy = False
LOG.info('No scaling policy found')
if settings['app']['eureka_enabled']:
elb = []
else:
elb = [generated.elb_app]
LOG.info('Attaching the following ELB: %s', elb)
health_checks = check_provider_healthcheck(settings)
# Use EC2 Health Check for DEV or Eureka enabled
if env == 'dev' or settings['app']['eureka_enabled']:
data['asg'].update({'hc_type': 'EC2'})
LOG.info('Switching health check type to: EC2')
# Aggregate the default grace period, plus the exposed app_grace_period
# to allow per repo extension of asg healthcheck grace period
hc_grace_period = data['asg'].get('hc_grace_period')
app_grace_period = data['asg'].get('app_grace_period')
grace_period = hc_grace_period + app_grace_period
# TODO: Migrate the naming logic to an external library to make it easier
# to update in the future. Gogo-Utils looks like a good candidate
ssh_keypair = data['asg'].get('ssh_keypair', None)
if not ssh_keypair:
ssh_keypair = '{0}_{1}_default'.format(env, region)
LOG.info('SSH keypair (%s) used', ssh_keypair)
if settings['app']['canary']:
canary_user_data = generate_encoded_user_data(
env=env,
region=region,
generated=generated,
group_name=project,
canary=True,
)
data['app'].update({
'canary_encoded_user_data': canary_user_data,
})
data['asg'].update({
'hc_type': data['asg'].get('hc_type').upper(),
'hc_grace_period': grace_period,
'ssh_keypair': ssh_keypair,
'provider_healthcheck': json.dumps(health_checks.providers),
'enable_public_ips': json.dumps(settings['asg']['enable_public_ips']),
'has_provider_healthcheck': health_checks.has_healthcheck,
'asg_whitelist': ASG_WHITELIST,
})
data['app'].update({
'az_dict': json.dumps(region_subnets),
'encoded_user_data': user_data,
'instance_security_groups': json.dumps(instance_security_groups),
'elb': json.dumps(elb),
'scalingpolicy': scalingpolicy,
})
return data | python | def ec2_pipeline_setup(
generated=None,
project='',
settings=None,
env='',
pipeline_type='',
region='',
region_subnets=None,
):
"""Handles ec2 pipeline data setup
Args:
generated (gogoutils.Generator): Generated naming formats.
project (str): Group name of application
settings (dict): Environment settings from configurations.
env (str): Deploy environment name, e.g. dev, stage, prod.
pipeline_type (str): Type of Foremast Pipeline to configure.
region (str): AWS Region to deploy to.
region_subnets (dict): Subnets for a Region, e.g.
{'us-west-2': ['us-west-2a', 'us-west-2b', 'us-west-2c']}.
Returns:
dict: Updated settings to pass to templates for EC2 info
"""
data = copy.deepcopy(settings)
user_data = generate_encoded_user_data(
env=env,
region=region,
generated=generated,
group_name=project,
pipeline_type=pipeline_type,
)
# Use different variable to keep template simple
instance_security_groups = sorted(DEFAULT_EC2_SECURITYGROUPS[env])
instance_security_groups.append(generated.security_group_app)
instance_security_groups.extend(settings['security_group']['instance_extras'])
instance_security_groups = remove_duplicate_sg(instance_security_groups)
LOG.info('Instance security groups to attach: %s', instance_security_groups)
# check if scaling policy exists
if settings['asg']['scaling_policy']:
scalingpolicy = True
LOG.info('Found scaling policy')
else:
scalingpolicy = False
LOG.info('No scaling policy found')
if settings['app']['eureka_enabled']:
elb = []
else:
elb = [generated.elb_app]
LOG.info('Attaching the following ELB: %s', elb)
health_checks = check_provider_healthcheck(settings)
# Use EC2 Health Check for DEV or Eureka enabled
if env == 'dev' or settings['app']['eureka_enabled']:
data['asg'].update({'hc_type': 'EC2'})
LOG.info('Switching health check type to: EC2')
# Aggregate the default grace period, plus the exposed app_grace_period
# to allow per repo extension of asg healthcheck grace period
hc_grace_period = data['asg'].get('hc_grace_period')
app_grace_period = data['asg'].get('app_grace_period')
grace_period = hc_grace_period + app_grace_period
# TODO: Migrate the naming logic to an external library to make it easier
# to update in the future. Gogo-Utils looks like a good candidate
ssh_keypair = data['asg'].get('ssh_keypair', None)
if not ssh_keypair:
ssh_keypair = '{0}_{1}_default'.format(env, region)
LOG.info('SSH keypair (%s) used', ssh_keypair)
if settings['app']['canary']:
canary_user_data = generate_encoded_user_data(
env=env,
region=region,
generated=generated,
group_name=project,
canary=True,
)
data['app'].update({
'canary_encoded_user_data': canary_user_data,
})
data['asg'].update({
'hc_type': data['asg'].get('hc_type').upper(),
'hc_grace_period': grace_period,
'ssh_keypair': ssh_keypair,
'provider_healthcheck': json.dumps(health_checks.providers),
'enable_public_ips': json.dumps(settings['asg']['enable_public_ips']),
'has_provider_healthcheck': health_checks.has_healthcheck,
'asg_whitelist': ASG_WHITELIST,
})
data['app'].update({
'az_dict': json.dumps(region_subnets),
'encoded_user_data': user_data,
'instance_security_groups': json.dumps(instance_security_groups),
'elb': json.dumps(elb),
'scalingpolicy': scalingpolicy,
})
return data | [
"def",
"ec2_pipeline_setup",
"(",
"generated",
"=",
"None",
",",
"project",
"=",
"''",
",",
"settings",
"=",
"None",
",",
"env",
"=",
"''",
",",
"pipeline_type",
"=",
"''",
",",
"region",
"=",
"''",
",",
"region_subnets",
"=",
"None",
",",
")",
":",
"data",
"=",
"copy",
".",
"deepcopy",
"(",
"settings",
")",
"user_data",
"=",
"generate_encoded_user_data",
"(",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"generated",
"=",
"generated",
",",
"group_name",
"=",
"project",
",",
"pipeline_type",
"=",
"pipeline_type",
",",
")",
"# Use different variable to keep template simple",
"instance_security_groups",
"=",
"sorted",
"(",
"DEFAULT_EC2_SECURITYGROUPS",
"[",
"env",
"]",
")",
"instance_security_groups",
".",
"append",
"(",
"generated",
".",
"security_group_app",
")",
"instance_security_groups",
".",
"extend",
"(",
"settings",
"[",
"'security_group'",
"]",
"[",
"'instance_extras'",
"]",
")",
"instance_security_groups",
"=",
"remove_duplicate_sg",
"(",
"instance_security_groups",
")",
"LOG",
".",
"info",
"(",
"'Instance security groups to attach: %s'",
",",
"instance_security_groups",
")",
"# check if scaling policy exists",
"if",
"settings",
"[",
"'asg'",
"]",
"[",
"'scaling_policy'",
"]",
":",
"scalingpolicy",
"=",
"True",
"LOG",
".",
"info",
"(",
"'Found scaling policy'",
")",
"else",
":",
"scalingpolicy",
"=",
"False",
"LOG",
".",
"info",
"(",
"'No scaling policy found'",
")",
"if",
"settings",
"[",
"'app'",
"]",
"[",
"'eureka_enabled'",
"]",
":",
"elb",
"=",
"[",
"]",
"else",
":",
"elb",
"=",
"[",
"generated",
".",
"elb_app",
"]",
"LOG",
".",
"info",
"(",
"'Attaching the following ELB: %s'",
",",
"elb",
")",
"health_checks",
"=",
"check_provider_healthcheck",
"(",
"settings",
")",
"# Use EC2 Health Check for DEV or Eureka enabled",
"if",
"env",
"==",
"'dev'",
"or",
"settings",
"[",
"'app'",
"]",
"[",
"'eureka_enabled'",
"]",
":",
"data",
"[",
"'asg'",
"]",
".",
"update",
"(",
"{",
"'hc_type'",
":",
"'EC2'",
"}",
")",
"LOG",
".",
"info",
"(",
"'Switching health check type to: EC2'",
")",
"# Aggregate the default grace period, plus the exposed app_grace_period",
"# to allow per repo extension of asg healthcheck grace period",
"hc_grace_period",
"=",
"data",
"[",
"'asg'",
"]",
".",
"get",
"(",
"'hc_grace_period'",
")",
"app_grace_period",
"=",
"data",
"[",
"'asg'",
"]",
".",
"get",
"(",
"'app_grace_period'",
")",
"grace_period",
"=",
"hc_grace_period",
"+",
"app_grace_period",
"# TODO: Migrate the naming logic to an external library to make it easier",
"# to update in the future. Gogo-Utils looks like a good candidate",
"ssh_keypair",
"=",
"data",
"[",
"'asg'",
"]",
".",
"get",
"(",
"'ssh_keypair'",
",",
"None",
")",
"if",
"not",
"ssh_keypair",
":",
"ssh_keypair",
"=",
"'{0}_{1}_default'",
".",
"format",
"(",
"env",
",",
"region",
")",
"LOG",
".",
"info",
"(",
"'SSH keypair (%s) used'",
",",
"ssh_keypair",
")",
"if",
"settings",
"[",
"'app'",
"]",
"[",
"'canary'",
"]",
":",
"canary_user_data",
"=",
"generate_encoded_user_data",
"(",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"generated",
"=",
"generated",
",",
"group_name",
"=",
"project",
",",
"canary",
"=",
"True",
",",
")",
"data",
"[",
"'app'",
"]",
".",
"update",
"(",
"{",
"'canary_encoded_user_data'",
":",
"canary_user_data",
",",
"}",
")",
"data",
"[",
"'asg'",
"]",
".",
"update",
"(",
"{",
"'hc_type'",
":",
"data",
"[",
"'asg'",
"]",
".",
"get",
"(",
"'hc_type'",
")",
".",
"upper",
"(",
")",
",",
"'hc_grace_period'",
":",
"grace_period",
",",
"'ssh_keypair'",
":",
"ssh_keypair",
",",
"'provider_healthcheck'",
":",
"json",
".",
"dumps",
"(",
"health_checks",
".",
"providers",
")",
",",
"'enable_public_ips'",
":",
"json",
".",
"dumps",
"(",
"settings",
"[",
"'asg'",
"]",
"[",
"'enable_public_ips'",
"]",
")",
",",
"'has_provider_healthcheck'",
":",
"health_checks",
".",
"has_healthcheck",
",",
"'asg_whitelist'",
":",
"ASG_WHITELIST",
",",
"}",
")",
"data",
"[",
"'app'",
"]",
".",
"update",
"(",
"{",
"'az_dict'",
":",
"json",
".",
"dumps",
"(",
"region_subnets",
")",
",",
"'encoded_user_data'",
":",
"user_data",
",",
"'instance_security_groups'",
":",
"json",
".",
"dumps",
"(",
"instance_security_groups",
")",
",",
"'elb'",
":",
"json",
".",
"dumps",
"(",
"elb",
")",
",",
"'scalingpolicy'",
":",
"scalingpolicy",
",",
"}",
")",
"return",
"data"
] | Handles ec2 pipeline data setup
Args:
generated (gogoutils.Generator): Generated naming formats.
project (str): Group name of application
settings (dict): Environment settings from configurations.
env (str): Deploy environment name, e.g. dev, stage, prod.
pipeline_type (str): Type of Foremast Pipeline to configure.
region (str): AWS Region to deploy to.
region_subnets (dict): Subnets for a Region, e.g.
{'us-west-2': ['us-west-2a', 'us-west-2b', 'us-west-2c']}.
Returns:
dict: Updated settings to pass to templates for EC2 info | [
"Handles",
"ec2",
"pipeline",
"data",
"setup"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/construct_pipeline_block.py#L169-L275 |
6,034 | foremast/foremast | src/foremast/pipeline/create_pipeline_manual.py | SpinnakerPipelineManual.create_pipeline | def create_pipeline(self):
"""Use JSON files to create Pipelines."""
pipelines = self.settings['pipeline']['pipeline_files']
self.log.info('Uploading manual Pipelines: %s', pipelines)
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in pipelines:
json_dict = lookup.json(filename=json_file)
json_dict.setdefault('application', self.app_name)
json_dict.setdefault('name', normalize_pipeline_name(name=json_file))
json_dict.setdefault('id', get_pipeline_id(app=json_dict['application'], name=json_dict['name']))
self.post_pipeline(json_dict)
return True | python | def create_pipeline(self):
"""Use JSON files to create Pipelines."""
pipelines = self.settings['pipeline']['pipeline_files']
self.log.info('Uploading manual Pipelines: %s', pipelines)
lookup = FileLookup(git_short=self.generated.gitlab()['main'], runway_dir=self.runway_dir)
for json_file in pipelines:
json_dict = lookup.json(filename=json_file)
json_dict.setdefault('application', self.app_name)
json_dict.setdefault('name', normalize_pipeline_name(name=json_file))
json_dict.setdefault('id', get_pipeline_id(app=json_dict['application'], name=json_dict['name']))
self.post_pipeline(json_dict)
return True | [
"def",
"create_pipeline",
"(",
"self",
")",
":",
"pipelines",
"=",
"self",
".",
"settings",
"[",
"'pipeline'",
"]",
"[",
"'pipeline_files'",
"]",
"self",
".",
"log",
".",
"info",
"(",
"'Uploading manual Pipelines: %s'",
",",
"pipelines",
")",
"lookup",
"=",
"FileLookup",
"(",
"git_short",
"=",
"self",
".",
"generated",
".",
"gitlab",
"(",
")",
"[",
"'main'",
"]",
",",
"runway_dir",
"=",
"self",
".",
"runway_dir",
")",
"for",
"json_file",
"in",
"pipelines",
":",
"json_dict",
"=",
"lookup",
".",
"json",
"(",
"filename",
"=",
"json_file",
")",
"json_dict",
".",
"setdefault",
"(",
"'application'",
",",
"self",
".",
"app_name",
")",
"json_dict",
".",
"setdefault",
"(",
"'name'",
",",
"normalize_pipeline_name",
"(",
"name",
"=",
"json_file",
")",
")",
"json_dict",
".",
"setdefault",
"(",
"'id'",
",",
"get_pipeline_id",
"(",
"app",
"=",
"json_dict",
"[",
"'application'",
"]",
",",
"name",
"=",
"json_dict",
"[",
"'name'",
"]",
")",
")",
"self",
".",
"post_pipeline",
"(",
"json_dict",
")",
"return",
"True"
] | Use JSON files to create Pipelines. | [
"Use",
"JSON",
"files",
"to",
"create",
"Pipelines",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline_manual.py#L25-L42 |
6,035 | foremast/foremast | src/foremast/pipeline/__main__.py | main | def main():
"""Creates a pipeline in Spinnaker"""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_properties(parser)
parser.add_argument('-b', '--base', help='Base AMI name to use, e.g. fedora, tomcat')
parser.add_argument("--triggerjob", help="The jenkins job to monitor for pipeline triggering", required=True)
parser.add_argument('--onetime', required=False, choices=ENVS, help='Onetime deployment environment')
parser.add_argument(
'-t', '--type', dest='type', required=False, default='ec2', help='Deployment type, e.g. ec2, lambda')
args = parser.parse_args()
if args.base and '"' in args.base:
args.base = args.base.strip('"')
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
if args.onetime:
spinnakerapps = SpinnakerPipelineOnetime(
app=args.app, onetime=args.onetime, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
else:
if args.type == "ec2":
spinnakerapps = SpinnakerPipeline(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
elif args.type == "lambda":
spinnakerapps = SpinnakerPipelineLambda(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
elif args.type == "s3":
spinnakerapps = SpinnakerPipelineS3(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline() | python | def main():
"""Creates a pipeline in Spinnaker"""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_properties(parser)
parser.add_argument('-b', '--base', help='Base AMI name to use, e.g. fedora, tomcat')
parser.add_argument("--triggerjob", help="The jenkins job to monitor for pipeline triggering", required=True)
parser.add_argument('--onetime', required=False, choices=ENVS, help='Onetime deployment environment')
parser.add_argument(
'-t', '--type', dest='type', required=False, default='ec2', help='Deployment type, e.g. ec2, lambda')
args = parser.parse_args()
if args.base and '"' in args.base:
args.base = args.base.strip('"')
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
if args.onetime:
spinnakerapps = SpinnakerPipelineOnetime(
app=args.app, onetime=args.onetime, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
else:
if args.type == "ec2":
spinnakerapps = SpinnakerPipeline(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
elif args.type == "lambda":
spinnakerapps = SpinnakerPipelineLambda(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline()
elif args.type == "s3":
spinnakerapps = SpinnakerPipelineS3(
app=args.app, trigger_job=args.triggerjob, prop_path=args.properties, base=args.base)
spinnakerapps.create_pipeline() | [
"def",
"main",
"(",
")",
":",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"LOGGING_FORMAT",
")",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"add_debug",
"(",
"parser",
")",
"add_app",
"(",
"parser",
")",
"add_properties",
"(",
"parser",
")",
"parser",
".",
"add_argument",
"(",
"'-b'",
",",
"'--base'",
",",
"help",
"=",
"'Base AMI name to use, e.g. fedora, tomcat'",
")",
"parser",
".",
"add_argument",
"(",
"\"--triggerjob\"",
",",
"help",
"=",
"\"The jenkins job to monitor for pipeline triggering\"",
",",
"required",
"=",
"True",
")",
"parser",
".",
"add_argument",
"(",
"'--onetime'",
",",
"required",
"=",
"False",
",",
"choices",
"=",
"ENVS",
",",
"help",
"=",
"'Onetime deployment environment'",
")",
"parser",
".",
"add_argument",
"(",
"'-t'",
",",
"'--type'",
",",
"dest",
"=",
"'type'",
",",
"required",
"=",
"False",
",",
"default",
"=",
"'ec2'",
",",
"help",
"=",
"'Deployment type, e.g. ec2, lambda'",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"args",
".",
"base",
"and",
"'\"'",
"in",
"args",
".",
"base",
":",
"args",
".",
"base",
"=",
"args",
".",
"base",
".",
"strip",
"(",
"'\"'",
")",
"logging",
".",
"getLogger",
"(",
"__package__",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"log",
".",
"debug",
"(",
"'Parsed arguments: %s'",
",",
"args",
")",
"if",
"args",
".",
"onetime",
":",
"spinnakerapps",
"=",
"SpinnakerPipelineOnetime",
"(",
"app",
"=",
"args",
".",
"app",
",",
"onetime",
"=",
"args",
".",
"onetime",
",",
"trigger_job",
"=",
"args",
".",
"triggerjob",
",",
"prop_path",
"=",
"args",
".",
"properties",
",",
"base",
"=",
"args",
".",
"base",
")",
"spinnakerapps",
".",
"create_pipeline",
"(",
")",
"else",
":",
"if",
"args",
".",
"type",
"==",
"\"ec2\"",
":",
"spinnakerapps",
"=",
"SpinnakerPipeline",
"(",
"app",
"=",
"args",
".",
"app",
",",
"trigger_job",
"=",
"args",
".",
"triggerjob",
",",
"prop_path",
"=",
"args",
".",
"properties",
",",
"base",
"=",
"args",
".",
"base",
")",
"spinnakerapps",
".",
"create_pipeline",
"(",
")",
"elif",
"args",
".",
"type",
"==",
"\"lambda\"",
":",
"spinnakerapps",
"=",
"SpinnakerPipelineLambda",
"(",
"app",
"=",
"args",
".",
"app",
",",
"trigger_job",
"=",
"args",
".",
"triggerjob",
",",
"prop_path",
"=",
"args",
".",
"properties",
",",
"base",
"=",
"args",
".",
"base",
")",
"spinnakerapps",
".",
"create_pipeline",
"(",
")",
"elif",
"args",
".",
"type",
"==",
"\"s3\"",
":",
"spinnakerapps",
"=",
"SpinnakerPipelineS3",
"(",
"app",
"=",
"args",
".",
"app",
",",
"trigger_job",
"=",
"args",
".",
"triggerjob",
",",
"prop_path",
"=",
"args",
".",
"properties",
",",
"base",
"=",
"args",
".",
"base",
")",
"spinnakerapps",
".",
"create_pipeline",
"(",
")"
] | Creates a pipeline in Spinnaker | [
"Creates",
"a",
"pipeline",
"in",
"Spinnaker"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/__main__.py#L31-L71 |
6,036 | foremast/foremast | src/foremast/configs/outputs.py | convert_ini | def convert_ini(config_dict):
"""Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value.
"""
config_lines = []
for env, configs in sorted(config_dict.items()):
for resource, app_properties in sorted(configs.items()):
try:
for app_property, value in sorted(app_properties.items()):
variable = '{env}_{resource}_{app_property}'.format(
env=env, resource=resource, app_property=app_property).upper()
if isinstance(value, (dict, DeepChainMap)):
safe_value = "'{0}'".format(json.dumps(dict(value)))
else:
safe_value = json.dumps(value)
line = "{variable}={value}".format(variable=variable, value=safe_value)
LOG.debug('INI line: %s', line)
config_lines.append(line)
except AttributeError:
resource = resource.upper()
app_properties = "'{}'".format(json.dumps(app_properties))
line = '{0}={1}'.format(resource, app_properties)
LOG.debug('INI line: %s', line)
config_lines.append(line)
return config_lines | python | def convert_ini(config_dict):
"""Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value.
"""
config_lines = []
for env, configs in sorted(config_dict.items()):
for resource, app_properties in sorted(configs.items()):
try:
for app_property, value in sorted(app_properties.items()):
variable = '{env}_{resource}_{app_property}'.format(
env=env, resource=resource, app_property=app_property).upper()
if isinstance(value, (dict, DeepChainMap)):
safe_value = "'{0}'".format(json.dumps(dict(value)))
else:
safe_value = json.dumps(value)
line = "{variable}={value}".format(variable=variable, value=safe_value)
LOG.debug('INI line: %s', line)
config_lines.append(line)
except AttributeError:
resource = resource.upper()
app_properties = "'{}'".format(json.dumps(app_properties))
line = '{0}={1}'.format(resource, app_properties)
LOG.debug('INI line: %s', line)
config_lines.append(line)
return config_lines | [
"def",
"convert_ini",
"(",
"config_dict",
")",
":",
"config_lines",
"=",
"[",
"]",
"for",
"env",
",",
"configs",
"in",
"sorted",
"(",
"config_dict",
".",
"items",
"(",
")",
")",
":",
"for",
"resource",
",",
"app_properties",
"in",
"sorted",
"(",
"configs",
".",
"items",
"(",
")",
")",
":",
"try",
":",
"for",
"app_property",
",",
"value",
"in",
"sorted",
"(",
"app_properties",
".",
"items",
"(",
")",
")",
":",
"variable",
"=",
"'{env}_{resource}_{app_property}'",
".",
"format",
"(",
"env",
"=",
"env",
",",
"resource",
"=",
"resource",
",",
"app_property",
"=",
"app_property",
")",
".",
"upper",
"(",
")",
"if",
"isinstance",
"(",
"value",
",",
"(",
"dict",
",",
"DeepChainMap",
")",
")",
":",
"safe_value",
"=",
"\"'{0}'\"",
".",
"format",
"(",
"json",
".",
"dumps",
"(",
"dict",
"(",
"value",
")",
")",
")",
"else",
":",
"safe_value",
"=",
"json",
".",
"dumps",
"(",
"value",
")",
"line",
"=",
"\"{variable}={value}\"",
".",
"format",
"(",
"variable",
"=",
"variable",
",",
"value",
"=",
"safe_value",
")",
"LOG",
".",
"debug",
"(",
"'INI line: %s'",
",",
"line",
")",
"config_lines",
".",
"append",
"(",
"line",
")",
"except",
"AttributeError",
":",
"resource",
"=",
"resource",
".",
"upper",
"(",
")",
"app_properties",
"=",
"\"'{}'\"",
".",
"format",
"(",
"json",
".",
"dumps",
"(",
"app_properties",
")",
")",
"line",
"=",
"'{0}={1}'",
".",
"format",
"(",
"resource",
",",
"app_properties",
")",
"LOG",
".",
"debug",
"(",
"'INI line: %s'",
",",
"line",
")",
"config_lines",
".",
"append",
"(",
"line",
")",
"return",
"config_lines"
] | Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value. | [
"Convert",
"_config_dict_",
"into",
"a",
"list",
"of",
"INI",
"formatted",
"strings",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/configs/outputs.py#L29-L63 |
6,037 | foremast/foremast | src/foremast/configs/outputs.py | write_variables | def write_variables(app_configs=None, out_file='', git_short=''):
"""Append _application.json_ configs to _out_file_, .exports, and .json.
Variables are written in INI style, e.g. UPPER_CASE=value. The .exports file
contains 'export' prepended to each line for easy sourcing. The .json file
is a minified representation of the combined configurations.
Args:
app_configs (dict): Environment configurations from _application.json_
files, e.g. {'dev': {'elb': {'subnet_purpose': 'internal'}}}.
out_file (str): Name of INI file to append to.
git_short (str): Short name of Git repository, e.g. forrest/core.
Returns:
dict: Configuration equivalent to the JSON output.
"""
generated = gogoutils.Generator(*gogoutils.Parser(git_short).parse_url(), formats=APP_FORMATS)
json_configs = {}
for env, configs in app_configs.items():
if env != 'pipeline':
instance_profile = generated.iam()['profile']
rendered_configs = json.loads(
get_template(
'configs/configs.json.j2',
env=env,
app=generated.app_name(),
profile=instance_profile,
formats=generated))
json_configs[env] = dict(DeepChainMap(configs, rendered_configs))
region_list = configs.get('regions', rendered_configs['regions'])
json_configs[env]['regions'] = region_list # removes regions defined in templates but not configs.
for region in region_list:
region_config = json_configs[env][region]
json_configs[env][region] = dict(DeepChainMap(region_config, rendered_configs))
else:
default_pipeline_json = json.loads(get_template('configs/pipeline.json.j2', formats=generated))
json_configs['pipeline'] = dict(DeepChainMap(configs, default_pipeline_json))
LOG.debug('Compiled configs:\n%s', pformat(json_configs))
config_lines = convert_ini(json_configs)
with open(out_file, 'at') as jenkins_vars:
LOG.info('Appending variables to %s.', out_file)
jenkins_vars.write('\n'.join(config_lines))
with open(out_file + '.exports', 'wt') as export_vars:
LOG.info('Writing sourceable variables to %s.', export_vars.name)
export_vars.write('\n'.join('export {0}'.format(line) for line in config_lines))
with open(out_file + '.json', 'wt') as json_handle:
LOG.info('Writing JSON to %s.', json_handle.name)
LOG.debug('Total JSON dict:\n%s', json_configs)
json.dump(json_configs, json_handle)
return json_configs | python | def write_variables(app_configs=None, out_file='', git_short=''):
"""Append _application.json_ configs to _out_file_, .exports, and .json.
Variables are written in INI style, e.g. UPPER_CASE=value. The .exports file
contains 'export' prepended to each line for easy sourcing. The .json file
is a minified representation of the combined configurations.
Args:
app_configs (dict): Environment configurations from _application.json_
files, e.g. {'dev': {'elb': {'subnet_purpose': 'internal'}}}.
out_file (str): Name of INI file to append to.
git_short (str): Short name of Git repository, e.g. forrest/core.
Returns:
dict: Configuration equivalent to the JSON output.
"""
generated = gogoutils.Generator(*gogoutils.Parser(git_short).parse_url(), formats=APP_FORMATS)
json_configs = {}
for env, configs in app_configs.items():
if env != 'pipeline':
instance_profile = generated.iam()['profile']
rendered_configs = json.loads(
get_template(
'configs/configs.json.j2',
env=env,
app=generated.app_name(),
profile=instance_profile,
formats=generated))
json_configs[env] = dict(DeepChainMap(configs, rendered_configs))
region_list = configs.get('regions', rendered_configs['regions'])
json_configs[env]['regions'] = region_list # removes regions defined in templates but not configs.
for region in region_list:
region_config = json_configs[env][region]
json_configs[env][region] = dict(DeepChainMap(region_config, rendered_configs))
else:
default_pipeline_json = json.loads(get_template('configs/pipeline.json.j2', formats=generated))
json_configs['pipeline'] = dict(DeepChainMap(configs, default_pipeline_json))
LOG.debug('Compiled configs:\n%s', pformat(json_configs))
config_lines = convert_ini(json_configs)
with open(out_file, 'at') as jenkins_vars:
LOG.info('Appending variables to %s.', out_file)
jenkins_vars.write('\n'.join(config_lines))
with open(out_file + '.exports', 'wt') as export_vars:
LOG.info('Writing sourceable variables to %s.', export_vars.name)
export_vars.write('\n'.join('export {0}'.format(line) for line in config_lines))
with open(out_file + '.json', 'wt') as json_handle:
LOG.info('Writing JSON to %s.', json_handle.name)
LOG.debug('Total JSON dict:\n%s', json_configs)
json.dump(json_configs, json_handle)
return json_configs | [
"def",
"write_variables",
"(",
"app_configs",
"=",
"None",
",",
"out_file",
"=",
"''",
",",
"git_short",
"=",
"''",
")",
":",
"generated",
"=",
"gogoutils",
".",
"Generator",
"(",
"*",
"gogoutils",
".",
"Parser",
"(",
"git_short",
")",
".",
"parse_url",
"(",
")",
",",
"formats",
"=",
"APP_FORMATS",
")",
"json_configs",
"=",
"{",
"}",
"for",
"env",
",",
"configs",
"in",
"app_configs",
".",
"items",
"(",
")",
":",
"if",
"env",
"!=",
"'pipeline'",
":",
"instance_profile",
"=",
"generated",
".",
"iam",
"(",
")",
"[",
"'profile'",
"]",
"rendered_configs",
"=",
"json",
".",
"loads",
"(",
"get_template",
"(",
"'configs/configs.json.j2'",
",",
"env",
"=",
"env",
",",
"app",
"=",
"generated",
".",
"app_name",
"(",
")",
",",
"profile",
"=",
"instance_profile",
",",
"formats",
"=",
"generated",
")",
")",
"json_configs",
"[",
"env",
"]",
"=",
"dict",
"(",
"DeepChainMap",
"(",
"configs",
",",
"rendered_configs",
")",
")",
"region_list",
"=",
"configs",
".",
"get",
"(",
"'regions'",
",",
"rendered_configs",
"[",
"'regions'",
"]",
")",
"json_configs",
"[",
"env",
"]",
"[",
"'regions'",
"]",
"=",
"region_list",
"# removes regions defined in templates but not configs.",
"for",
"region",
"in",
"region_list",
":",
"region_config",
"=",
"json_configs",
"[",
"env",
"]",
"[",
"region",
"]",
"json_configs",
"[",
"env",
"]",
"[",
"region",
"]",
"=",
"dict",
"(",
"DeepChainMap",
"(",
"region_config",
",",
"rendered_configs",
")",
")",
"else",
":",
"default_pipeline_json",
"=",
"json",
".",
"loads",
"(",
"get_template",
"(",
"'configs/pipeline.json.j2'",
",",
"formats",
"=",
"generated",
")",
")",
"json_configs",
"[",
"'pipeline'",
"]",
"=",
"dict",
"(",
"DeepChainMap",
"(",
"configs",
",",
"default_pipeline_json",
")",
")",
"LOG",
".",
"debug",
"(",
"'Compiled configs:\\n%s'",
",",
"pformat",
"(",
"json_configs",
")",
")",
"config_lines",
"=",
"convert_ini",
"(",
"json_configs",
")",
"with",
"open",
"(",
"out_file",
",",
"'at'",
")",
"as",
"jenkins_vars",
":",
"LOG",
".",
"info",
"(",
"'Appending variables to %s.'",
",",
"out_file",
")",
"jenkins_vars",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"config_lines",
")",
")",
"with",
"open",
"(",
"out_file",
"+",
"'.exports'",
",",
"'wt'",
")",
"as",
"export_vars",
":",
"LOG",
".",
"info",
"(",
"'Writing sourceable variables to %s.'",
",",
"export_vars",
".",
"name",
")",
"export_vars",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"'export {0}'",
".",
"format",
"(",
"line",
")",
"for",
"line",
"in",
"config_lines",
")",
")",
"with",
"open",
"(",
"out_file",
"+",
"'.json'",
",",
"'wt'",
")",
"as",
"json_handle",
":",
"LOG",
".",
"info",
"(",
"'Writing JSON to %s.'",
",",
"json_handle",
".",
"name",
")",
"LOG",
".",
"debug",
"(",
"'Total JSON dict:\\n%s'",
",",
"json_configs",
")",
"json",
".",
"dump",
"(",
"json_configs",
",",
"json_handle",
")",
"return",
"json_configs"
] | Append _application.json_ configs to _out_file_, .exports, and .json.
Variables are written in INI style, e.g. UPPER_CASE=value. The .exports file
contains 'export' prepended to each line for easy sourcing. The .json file
is a minified representation of the combined configurations.
Args:
app_configs (dict): Environment configurations from _application.json_
files, e.g. {'dev': {'elb': {'subnet_purpose': 'internal'}}}.
out_file (str): Name of INI file to append to.
git_short (str): Short name of Git repository, e.g. forrest/core.
Returns:
dict: Configuration equivalent to the JSON output. | [
"Append",
"_application",
".",
"json_",
"configs",
"to",
"_out_file_",
".",
"exports",
"and",
".",
"json",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/configs/outputs.py#L66-L122 |
6,038 | foremast/foremast | src/foremast/utils/get_sns_subscriptions.py | get_sns_subscriptions | def get_sns_subscriptions(app_name, env, region):
"""List SNS lambda subscriptions.
Returns:
list: List of Lambda subscribed SNS ARNs.
"""
session = boto3.Session(profile_name=env, region_name=region)
sns_client = session.client('sns')
lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region)
lambda_subscriptions = []
subscriptions = sns_client.list_subscriptions()
for subscription in subscriptions['Subscriptions']:
if subscription['Protocol'] == "lambda" and subscription['Endpoint'] == lambda_alias_arn:
lambda_subscriptions.append(subscription['SubscriptionArn'])
if not lambda_subscriptions:
LOG.debug('SNS subscription for function %s not found', lambda_alias_arn)
return lambda_subscriptions | python | def get_sns_subscriptions(app_name, env, region):
"""List SNS lambda subscriptions.
Returns:
list: List of Lambda subscribed SNS ARNs.
"""
session = boto3.Session(profile_name=env, region_name=region)
sns_client = session.client('sns')
lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region)
lambda_subscriptions = []
subscriptions = sns_client.list_subscriptions()
for subscription in subscriptions['Subscriptions']:
if subscription['Protocol'] == "lambda" and subscription['Endpoint'] == lambda_alias_arn:
lambda_subscriptions.append(subscription['SubscriptionArn'])
if not lambda_subscriptions:
LOG.debug('SNS subscription for function %s not found', lambda_alias_arn)
return lambda_subscriptions | [
"def",
"get_sns_subscriptions",
"(",
"app_name",
",",
"env",
",",
"region",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"sns_client",
"=",
"session",
".",
"client",
"(",
"'sns'",
")",
"lambda_alias_arn",
"=",
"get_lambda_alias_arn",
"(",
"app",
"=",
"app_name",
",",
"account",
"=",
"env",
",",
"region",
"=",
"region",
")",
"lambda_subscriptions",
"=",
"[",
"]",
"subscriptions",
"=",
"sns_client",
".",
"list_subscriptions",
"(",
")",
"for",
"subscription",
"in",
"subscriptions",
"[",
"'Subscriptions'",
"]",
":",
"if",
"subscription",
"[",
"'Protocol'",
"]",
"==",
"\"lambda\"",
"and",
"subscription",
"[",
"'Endpoint'",
"]",
"==",
"lambda_alias_arn",
":",
"lambda_subscriptions",
".",
"append",
"(",
"subscription",
"[",
"'SubscriptionArn'",
"]",
")",
"if",
"not",
"lambda_subscriptions",
":",
"LOG",
".",
"debug",
"(",
"'SNS subscription for function %s not found'",
",",
"lambda_alias_arn",
")",
"return",
"lambda_subscriptions"
] | List SNS lambda subscriptions.
Returns:
list: List of Lambda subscribed SNS ARNs. | [
"List",
"SNS",
"lambda",
"subscriptions",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/get_sns_subscriptions.py#L11-L33 |
6,039 | foremast/foremast | src/foremast/awslambda/cloudwatch_log_event/destroy_cloudwatch_log_event/destroy_cloudwatch_log_event.py | destroy_cloudwatch_log_event | def destroy_cloudwatch_log_event(app='', env='dev', region=''):
"""Destroy Cloudwatch log event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion.
"""
session = boto3.Session(profile_name=env, region_name=region)
cloudwatch_client = session.client('logs')
# FIXME: see below
# TODO: Log group name is required, where do we get it if it is not in application-master-env.json?
cloudwatch_client.delete_subscription_filter(logGroupName='/aws/lambda/awslimitchecker', filterName=app)
return True | python | def destroy_cloudwatch_log_event(app='', env='dev', region=''):
"""Destroy Cloudwatch log event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion.
"""
session = boto3.Session(profile_name=env, region_name=region)
cloudwatch_client = session.client('logs')
# FIXME: see below
# TODO: Log group name is required, where do we get it if it is not in application-master-env.json?
cloudwatch_client.delete_subscription_filter(logGroupName='/aws/lambda/awslimitchecker', filterName=app)
return True | [
"def",
"destroy_cloudwatch_log_event",
"(",
"app",
"=",
"''",
",",
"env",
"=",
"'dev'",
",",
"region",
"=",
"''",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"cloudwatch_client",
"=",
"session",
".",
"client",
"(",
"'logs'",
")",
"# FIXME: see below",
"# TODO: Log group name is required, where do we get it if it is not in application-master-env.json?",
"cloudwatch_client",
".",
"delete_subscription_filter",
"(",
"logGroupName",
"=",
"'/aws/lambda/awslimitchecker'",
",",
"filterName",
"=",
"app",
")",
"return",
"True"
] | Destroy Cloudwatch log event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion. | [
"Destroy",
"Cloudwatch",
"log",
"event",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/cloudwatch_log_event/destroy_cloudwatch_log_event/destroy_cloudwatch_log_event.py#L24-L42 |
6,040 | foremast/foremast | src/foremast/app/create_app.py | SpinnakerApp.get_accounts | def get_accounts(self, provider='aws'):
"""Get Accounts added to Spinnaker.
Args:
provider (str): What provider to find accounts for.
Returns:
list: list of dicts of Spinnaker credentials matching _provider_.
Raises:
AssertionError: Failure getting accounts from Spinnaker.
"""
url = '{gate}/credentials'.format(gate=API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Failed to get accounts: {0}'.format(response.text)
all_accounts = response.json()
self.log.debug('Accounts in Spinnaker:\n%s', all_accounts)
filtered_accounts = []
for account in all_accounts:
if account['type'] == provider:
filtered_accounts.append(account)
if not filtered_accounts:
raise ForemastError('No Accounts matching {0}.'.format(provider))
return filtered_accounts | python | def get_accounts(self, provider='aws'):
"""Get Accounts added to Spinnaker.
Args:
provider (str): What provider to find accounts for.
Returns:
list: list of dicts of Spinnaker credentials matching _provider_.
Raises:
AssertionError: Failure getting accounts from Spinnaker.
"""
url = '{gate}/credentials'.format(gate=API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Failed to get accounts: {0}'.format(response.text)
all_accounts = response.json()
self.log.debug('Accounts in Spinnaker:\n%s', all_accounts)
filtered_accounts = []
for account in all_accounts:
if account['type'] == provider:
filtered_accounts.append(account)
if not filtered_accounts:
raise ForemastError('No Accounts matching {0}.'.format(provider))
return filtered_accounts | [
"def",
"get_accounts",
"(",
"self",
",",
"provider",
"=",
"'aws'",
")",
":",
"url",
"=",
"'{gate}/credentials'",
".",
"format",
"(",
"gate",
"=",
"API_URL",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"assert",
"response",
".",
"ok",
",",
"'Failed to get accounts: {0}'",
".",
"format",
"(",
"response",
".",
"text",
")",
"all_accounts",
"=",
"response",
".",
"json",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'Accounts in Spinnaker:\\n%s'",
",",
"all_accounts",
")",
"filtered_accounts",
"=",
"[",
"]",
"for",
"account",
"in",
"all_accounts",
":",
"if",
"account",
"[",
"'type'",
"]",
"==",
"provider",
":",
"filtered_accounts",
".",
"append",
"(",
"account",
")",
"if",
"not",
"filtered_accounts",
":",
"raise",
"ForemastError",
"(",
"'No Accounts matching {0}.'",
".",
"format",
"(",
"provider",
")",
")",
"return",
"filtered_accounts"
] | Get Accounts added to Spinnaker.
Args:
provider (str): What provider to find accounts for.
Returns:
list: list of dicts of Spinnaker credentials matching _provider_.
Raises:
AssertionError: Failure getting accounts from Spinnaker. | [
"Get",
"Accounts",
"added",
"to",
"Spinnaker",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L55-L82 |
6,041 | foremast/foremast | src/foremast/app/create_app.py | SpinnakerApp.create_app | def create_app(self):
"""Send a POST to spinnaker to create a new application with class variables.
Raises:
AssertionError: Application creation failed.
"""
self.appinfo['accounts'] = self.get_accounts()
self.log.debug('Pipeline Config\n%s', pformat(self.pipeline_config))
self.log.debug('App info:\n%s', pformat(self.appinfo))
jsondata = self.retrieve_template()
wait_for_task(jsondata)
self.log.info("Successfully created %s application", self.appname)
return jsondata | python | def create_app(self):
"""Send a POST to spinnaker to create a new application with class variables.
Raises:
AssertionError: Application creation failed.
"""
self.appinfo['accounts'] = self.get_accounts()
self.log.debug('Pipeline Config\n%s', pformat(self.pipeline_config))
self.log.debug('App info:\n%s', pformat(self.appinfo))
jsondata = self.retrieve_template()
wait_for_task(jsondata)
self.log.info("Successfully created %s application", self.appname)
return jsondata | [
"def",
"create_app",
"(",
"self",
")",
":",
"self",
".",
"appinfo",
"[",
"'accounts'",
"]",
"=",
"self",
".",
"get_accounts",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'Pipeline Config\\n%s'",
",",
"pformat",
"(",
"self",
".",
"pipeline_config",
")",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'App info:\\n%s'",
",",
"pformat",
"(",
"self",
".",
"appinfo",
")",
")",
"jsondata",
"=",
"self",
".",
"retrieve_template",
"(",
")",
"wait_for_task",
"(",
"jsondata",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Successfully created %s application\"",
",",
"self",
".",
"appname",
")",
"return",
"jsondata"
] | Send a POST to spinnaker to create a new application with class variables.
Raises:
AssertionError: Application creation failed. | [
"Send",
"a",
"POST",
"to",
"spinnaker",
"to",
"create",
"a",
"new",
"application",
"with",
"class",
"variables",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L84-L97 |
6,042 | foremast/foremast | src/foremast/app/create_app.py | SpinnakerApp.retrieve_template | def retrieve_template(self):
"""Sets the instance links with pipeline_configs and then renders template files
Returns:
jsondata: A json objects containing templates
"""
links = self.retrieve_instance_links()
self.log.debug('Links is \n%s', pformat(links))
self.pipeline_config['instance_links'].update(links)
jsondata = get_template(
template_file='infrastructure/app_data.json.j2',
appinfo=self.appinfo,
pipeline_config=self.pipeline_config,
formats=self.generated,
run_as_user=DEFAULT_RUN_AS_USER)
self.log.debug('jsondata is %s', pformat(jsondata))
return jsondata | python | def retrieve_template(self):
"""Sets the instance links with pipeline_configs and then renders template files
Returns:
jsondata: A json objects containing templates
"""
links = self.retrieve_instance_links()
self.log.debug('Links is \n%s', pformat(links))
self.pipeline_config['instance_links'].update(links)
jsondata = get_template(
template_file='infrastructure/app_data.json.j2',
appinfo=self.appinfo,
pipeline_config=self.pipeline_config,
formats=self.generated,
run_as_user=DEFAULT_RUN_AS_USER)
self.log.debug('jsondata is %s', pformat(jsondata))
return jsondata | [
"def",
"retrieve_template",
"(",
"self",
")",
":",
"links",
"=",
"self",
".",
"retrieve_instance_links",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'Links is \\n%s'",
",",
"pformat",
"(",
"links",
")",
")",
"self",
".",
"pipeline_config",
"[",
"'instance_links'",
"]",
".",
"update",
"(",
"links",
")",
"jsondata",
"=",
"get_template",
"(",
"template_file",
"=",
"'infrastructure/app_data.json.j2'",
",",
"appinfo",
"=",
"self",
".",
"appinfo",
",",
"pipeline_config",
"=",
"self",
".",
"pipeline_config",
",",
"formats",
"=",
"self",
".",
"generated",
",",
"run_as_user",
"=",
"DEFAULT_RUN_AS_USER",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'jsondata is %s'",
",",
"pformat",
"(",
"jsondata",
")",
")",
"return",
"jsondata"
] | Sets the instance links with pipeline_configs and then renders template files
Returns:
jsondata: A json objects containing templates | [
"Sets",
"the",
"instance",
"links",
"with",
"pipeline_configs",
"and",
"then",
"renders",
"template",
"files"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L99-L115 |
6,043 | foremast/foremast | src/foremast/app/create_app.py | SpinnakerApp.retrieve_instance_links | def retrieve_instance_links(self):
"""Appends on existing instance links
Returns:
instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config
"""
instance_links = {}
self.log.debug("LINKS IS %s", LINKS)
for key, value in LINKS.items():
if value not in self.pipeline_config['instance_links'].values():
instance_links[key] = value
return instance_links | python | def retrieve_instance_links(self):
"""Appends on existing instance links
Returns:
instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config
"""
instance_links = {}
self.log.debug("LINKS IS %s", LINKS)
for key, value in LINKS.items():
if value not in self.pipeline_config['instance_links'].values():
instance_links[key] = value
return instance_links | [
"def",
"retrieve_instance_links",
"(",
"self",
")",
":",
"instance_links",
"=",
"{",
"}",
"self",
".",
"log",
".",
"debug",
"(",
"\"LINKS IS %s\"",
",",
"LINKS",
")",
"for",
"key",
",",
"value",
"in",
"LINKS",
".",
"items",
"(",
")",
":",
"if",
"value",
"not",
"in",
"self",
".",
"pipeline_config",
"[",
"'instance_links'",
"]",
".",
"values",
"(",
")",
":",
"instance_links",
"[",
"key",
"]",
"=",
"value",
"return",
"instance_links"
] | Appends on existing instance links
Returns:
instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config | [
"Appends",
"on",
"existing",
"instance",
"links"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L117-L128 |
6,044 | foremast/foremast | src/foremast/utils/get_cloudwatch_event_rule.py | get_cloudwatch_event_rule | def get_cloudwatch_event_rule(app_name, account, region):
"""Get CloudWatch Event rule names."""
session = boto3.Session(profile_name=account, region_name=region)
cloudwatch_client = session.client('events')
lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=account, region=region)
rule_names = cloudwatch_client.list_rule_names_by_target(TargetArn=lambda_alias_arn)
if rule_names['RuleNames']:
all_rules = rule_names['RuleNames']
else:
LOG.debug("No event rules found")
all_rules = []
return all_rules | python | def get_cloudwatch_event_rule(app_name, account, region):
"""Get CloudWatch Event rule names."""
session = boto3.Session(profile_name=account, region_name=region)
cloudwatch_client = session.client('events')
lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=account, region=region)
rule_names = cloudwatch_client.list_rule_names_by_target(TargetArn=lambda_alias_arn)
if rule_names['RuleNames']:
all_rules = rule_names['RuleNames']
else:
LOG.debug("No event rules found")
all_rules = []
return all_rules | [
"def",
"get_cloudwatch_event_rule",
"(",
"app_name",
",",
"account",
",",
"region",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"account",
",",
"region_name",
"=",
"region",
")",
"cloudwatch_client",
"=",
"session",
".",
"client",
"(",
"'events'",
")",
"lambda_alias_arn",
"=",
"get_lambda_alias_arn",
"(",
"app",
"=",
"app_name",
",",
"account",
"=",
"account",
",",
"region",
"=",
"region",
")",
"rule_names",
"=",
"cloudwatch_client",
".",
"list_rule_names_by_target",
"(",
"TargetArn",
"=",
"lambda_alias_arn",
")",
"if",
"rule_names",
"[",
"'RuleNames'",
"]",
":",
"all_rules",
"=",
"rule_names",
"[",
"'RuleNames'",
"]",
"else",
":",
"LOG",
".",
"debug",
"(",
"\"No event rules found\"",
")",
"all_rules",
"=",
"[",
"]",
"return",
"all_rules"
] | Get CloudWatch Event rule names. | [
"Get",
"CloudWatch",
"Event",
"rule",
"names",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/get_cloudwatch_event_rule.py#L11-L24 |
6,045 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment.setup_pathing | def setup_pathing(self):
"""Format pathing for S3 deployments."""
self.s3_version_uri = self._path_formatter(self.version)
self.s3_latest_uri = self._path_formatter("LATEST")
self.s3_canary_uri = self._path_formatter("CANARY")
self.s3_alpha_uri = self._path_formatter("ALPHA")
self.s3_mirror_uri = self._path_formatter("MIRROR") | python | def setup_pathing(self):
"""Format pathing for S3 deployments."""
self.s3_version_uri = self._path_formatter(self.version)
self.s3_latest_uri = self._path_formatter("LATEST")
self.s3_canary_uri = self._path_formatter("CANARY")
self.s3_alpha_uri = self._path_formatter("ALPHA")
self.s3_mirror_uri = self._path_formatter("MIRROR") | [
"def",
"setup_pathing",
"(",
"self",
")",
":",
"self",
".",
"s3_version_uri",
"=",
"self",
".",
"_path_formatter",
"(",
"self",
".",
"version",
")",
"self",
".",
"s3_latest_uri",
"=",
"self",
".",
"_path_formatter",
"(",
"\"LATEST\"",
")",
"self",
".",
"s3_canary_uri",
"=",
"self",
".",
"_path_formatter",
"(",
"\"CANARY\"",
")",
"self",
".",
"s3_alpha_uri",
"=",
"self",
".",
"_path_formatter",
"(",
"\"ALPHA\"",
")",
"self",
".",
"s3_mirror_uri",
"=",
"self",
".",
"_path_formatter",
"(",
"\"MIRROR\"",
")"
] | Format pathing for S3 deployments. | [
"Format",
"pathing",
"for",
"S3",
"deployments",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L69-L75 |
6,046 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment._path_formatter | def _path_formatter(self, suffix):
"""Format the s3 path properly.
Args:
suffix (str): suffix to add on to an s3 path
Returns:
str: formatted path
"""
if suffix.lower() == "mirror":
path_items = [self.bucket, self.s3path]
else:
path_items = [self.bucket, self.s3path, suffix]
path = '/'.join(path_items)
s3_format = "s3://{}"
formatted_path = path.replace('//', '/') # removes configuration errors
full_path = s3_format.format(formatted_path)
return full_path | python | def _path_formatter(self, suffix):
"""Format the s3 path properly.
Args:
suffix (str): suffix to add on to an s3 path
Returns:
str: formatted path
"""
if suffix.lower() == "mirror":
path_items = [self.bucket, self.s3path]
else:
path_items = [self.bucket, self.s3path, suffix]
path = '/'.join(path_items)
s3_format = "s3://{}"
formatted_path = path.replace('//', '/') # removes configuration errors
full_path = s3_format.format(formatted_path)
return full_path | [
"def",
"_path_formatter",
"(",
"self",
",",
"suffix",
")",
":",
"if",
"suffix",
".",
"lower",
"(",
")",
"==",
"\"mirror\"",
":",
"path_items",
"=",
"[",
"self",
".",
"bucket",
",",
"self",
".",
"s3path",
"]",
"else",
":",
"path_items",
"=",
"[",
"self",
".",
"bucket",
",",
"self",
".",
"s3path",
",",
"suffix",
"]",
"path",
"=",
"'/'",
".",
"join",
"(",
"path_items",
")",
"s3_format",
"=",
"\"s3://{}\"",
"formatted_path",
"=",
"path",
".",
"replace",
"(",
"'//'",
",",
"'/'",
")",
"# removes configuration errors",
"full_path",
"=",
"s3_format",
".",
"format",
"(",
"formatted_path",
")",
"return",
"full_path"
] | Format the s3 path properly.
Args:
suffix (str): suffix to add on to an s3 path
Returns:
str: formatted path | [
"Format",
"the",
"s3",
"path",
"properly",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L77-L96 |
6,047 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment.upload_artifacts | def upload_artifacts(self):
"""Upload artifacts to S3 and copy to correct path depending on strategy."""
deploy_strategy = self.properties["deploy_strategy"]
mirror = False
if deploy_strategy == "mirror":
mirror = True
self._upload_artifacts_to_path(mirror=mirror)
if deploy_strategy == "highlander":
self._sync_to_uri(self.s3_latest_uri)
elif deploy_strategy == "canary":
self._sync_to_uri(self.s3_canary_uri)
elif deploy_strategy == "alpha":
self._sync_to_uri(self.s3_alpha_uri)
elif deploy_strategy == "mirror":
pass # Nothing extra needed for mirror deployments
else:
raise NotImplementedError | python | def upload_artifacts(self):
"""Upload artifacts to S3 and copy to correct path depending on strategy."""
deploy_strategy = self.properties["deploy_strategy"]
mirror = False
if deploy_strategy == "mirror":
mirror = True
self._upload_artifacts_to_path(mirror=mirror)
if deploy_strategy == "highlander":
self._sync_to_uri(self.s3_latest_uri)
elif deploy_strategy == "canary":
self._sync_to_uri(self.s3_canary_uri)
elif deploy_strategy == "alpha":
self._sync_to_uri(self.s3_alpha_uri)
elif deploy_strategy == "mirror":
pass # Nothing extra needed for mirror deployments
else:
raise NotImplementedError | [
"def",
"upload_artifacts",
"(",
"self",
")",
":",
"deploy_strategy",
"=",
"self",
".",
"properties",
"[",
"\"deploy_strategy\"",
"]",
"mirror",
"=",
"False",
"if",
"deploy_strategy",
"==",
"\"mirror\"",
":",
"mirror",
"=",
"True",
"self",
".",
"_upload_artifacts_to_path",
"(",
"mirror",
"=",
"mirror",
")",
"if",
"deploy_strategy",
"==",
"\"highlander\"",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_latest_uri",
")",
"elif",
"deploy_strategy",
"==",
"\"canary\"",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_canary_uri",
")",
"elif",
"deploy_strategy",
"==",
"\"alpha\"",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_alpha_uri",
")",
"elif",
"deploy_strategy",
"==",
"\"mirror\"",
":",
"pass",
"# Nothing extra needed for mirror deployments",
"else",
":",
"raise",
"NotImplementedError"
] | Upload artifacts to S3 and copy to correct path depending on strategy. | [
"Upload",
"artifacts",
"to",
"S3",
"and",
"copy",
"to",
"correct",
"path",
"depending",
"on",
"strategy",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L98-L116 |
6,048 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment.promote_artifacts | def promote_artifacts(self, promote_stage='latest'):
"""Promote artifact version to dest.
Args:
promote_stage (string): Stage that is being promoted
"""
if promote_stage.lower() == 'alpha':
self._sync_to_uri(self.s3_canary_uri)
elif promote_stage.lower() == 'canary':
self._sync_to_uri(self.s3_latest_uri)
else:
self._sync_to_uri(self.s3_latest_uri) | python | def promote_artifacts(self, promote_stage='latest'):
"""Promote artifact version to dest.
Args:
promote_stage (string): Stage that is being promoted
"""
if promote_stage.lower() == 'alpha':
self._sync_to_uri(self.s3_canary_uri)
elif promote_stage.lower() == 'canary':
self._sync_to_uri(self.s3_latest_uri)
else:
self._sync_to_uri(self.s3_latest_uri) | [
"def",
"promote_artifacts",
"(",
"self",
",",
"promote_stage",
"=",
"'latest'",
")",
":",
"if",
"promote_stage",
".",
"lower",
"(",
")",
"==",
"'alpha'",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_canary_uri",
")",
"elif",
"promote_stage",
".",
"lower",
"(",
")",
"==",
"'canary'",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_latest_uri",
")",
"else",
":",
"self",
".",
"_sync_to_uri",
"(",
"self",
".",
"s3_latest_uri",
")"
] | Promote artifact version to dest.
Args:
promote_stage (string): Stage that is being promoted | [
"Promote",
"artifact",
"version",
"to",
"dest",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L118-L129 |
6,049 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment._get_upload_cmd | def _get_upload_cmd(self, mirror=False):
"""Generate the S3 CLI upload command
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
str: The full CLI command to run.
"""
if mirror:
dest_uri = self.s3_mirror_uri
else:
dest_uri = self.s3_version_uri
cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path,
dest_uri, self.env)
return cmd | python | def _get_upload_cmd(self, mirror=False):
"""Generate the S3 CLI upload command
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
str: The full CLI command to run.
"""
if mirror:
dest_uri = self.s3_mirror_uri
else:
dest_uri = self.s3_version_uri
cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path,
dest_uri, self.env)
return cmd | [
"def",
"_get_upload_cmd",
"(",
"self",
",",
"mirror",
"=",
"False",
")",
":",
"if",
"mirror",
":",
"dest_uri",
"=",
"self",
".",
"s3_mirror_uri",
"else",
":",
"dest_uri",
"=",
"self",
".",
"s3_version_uri",
"cmd",
"=",
"'aws s3 sync {} {} --delete --exact-timestamps --profile {}'",
".",
"format",
"(",
"self",
".",
"artifact_path",
",",
"dest_uri",
",",
"self",
".",
"env",
")",
"return",
"cmd"
] | Generate the S3 CLI upload command
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
str: The full CLI command to run. | [
"Generate",
"the",
"S3",
"CLI",
"upload",
"command"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L131-L147 |
6,050 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment._upload_artifacts_to_path | def _upload_artifacts_to_path(self, mirror=False):
"""Recursively upload directory contents to S3.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
"""
if not os.listdir(self.artifact_path) or not self.artifact_path:
raise S3ArtifactNotFound
uploaded = False
if self.s3props.get("content_metadata"):
LOG.info("Uploading in multiple parts to set metadata")
uploaded = self.content_metadata_uploads(mirror=mirror)
if not uploaded:
cmd = self._get_upload_cmd(mirror=mirror)
result = subprocess.run(cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Upload Command Ouput: %s", result.stdout)
LOG.info("Uploaded artifacts to %s bucket", self.bucket) | python | def _upload_artifacts_to_path(self, mirror=False):
"""Recursively upload directory contents to S3.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
"""
if not os.listdir(self.artifact_path) or not self.artifact_path:
raise S3ArtifactNotFound
uploaded = False
if self.s3props.get("content_metadata"):
LOG.info("Uploading in multiple parts to set metadata")
uploaded = self.content_metadata_uploads(mirror=mirror)
if not uploaded:
cmd = self._get_upload_cmd(mirror=mirror)
result = subprocess.run(cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Upload Command Ouput: %s", result.stdout)
LOG.info("Uploaded artifacts to %s bucket", self.bucket) | [
"def",
"_upload_artifacts_to_path",
"(",
"self",
",",
"mirror",
"=",
"False",
")",
":",
"if",
"not",
"os",
".",
"listdir",
"(",
"self",
".",
"artifact_path",
")",
"or",
"not",
"self",
".",
"artifact_path",
":",
"raise",
"S3ArtifactNotFound",
"uploaded",
"=",
"False",
"if",
"self",
".",
"s3props",
".",
"get",
"(",
"\"content_metadata\"",
")",
":",
"LOG",
".",
"info",
"(",
"\"Uploading in multiple parts to set metadata\"",
")",
"uploaded",
"=",
"self",
".",
"content_metadata_uploads",
"(",
"mirror",
"=",
"mirror",
")",
"if",
"not",
"uploaded",
":",
"cmd",
"=",
"self",
".",
"_get_upload_cmd",
"(",
"mirror",
"=",
"mirror",
")",
"result",
"=",
"subprocess",
".",
"run",
"(",
"cmd",
",",
"check",
"=",
"True",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"LOG",
".",
"debug",
"(",
"\"Upload Command Ouput: %s\"",
",",
"result",
".",
"stdout",
")",
"LOG",
".",
"info",
"(",
"\"Uploaded artifacts to %s bucket\"",
",",
"self",
".",
"bucket",
")"
] | Recursively upload directory contents to S3.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version. | [
"Recursively",
"upload",
"directory",
"contents",
"to",
"S3",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L149-L168 |
6,051 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment.content_metadata_uploads | def content_metadata_uploads(self, mirror=False):
"""Finds all specified encoded directories and uploads in multiple parts,
setting metadata for objects.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
bool: True if uploaded
"""
excludes_str = ''
includes_cmds = []
cmd_base = self._get_upload_cmd(mirror=mirror)
for content in self.s3props.get('content_metadata'):
full_path = os.path.join(self.artifact_path, content['path'])
if not os.listdir(full_path):
raise S3ArtifactNotFound
excludes_str += '--exclude "{}/*" '.format(content['path'])
include_cmd = '{} --exclude "*", --include "{}/*"'.format(cmd_base, content['path'])
include_cmd += ' --content-encoding {} --metadata-directive REPLACE'.format(content['content-encoding'])
includes_cmds.append(include_cmd)
exclude_cmd = '{} {}'.format(cmd_base, excludes_str)
result = subprocess.run(exclude_cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.info("Uploaded files without metadata with command: %s", exclude_cmd)
LOG.debug("Upload Command Output: %s", result.stdout)
for include_cmd in includes_cmds:
result = subprocess.run(include_cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.info("Uploaded files with metadata with command: %s", include_cmd)
LOG.debug("Upload Command Output: %s", result.stdout)
return True | python | def content_metadata_uploads(self, mirror=False):
"""Finds all specified encoded directories and uploads in multiple parts,
setting metadata for objects.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
bool: True if uploaded
"""
excludes_str = ''
includes_cmds = []
cmd_base = self._get_upload_cmd(mirror=mirror)
for content in self.s3props.get('content_metadata'):
full_path = os.path.join(self.artifact_path, content['path'])
if not os.listdir(full_path):
raise S3ArtifactNotFound
excludes_str += '--exclude "{}/*" '.format(content['path'])
include_cmd = '{} --exclude "*", --include "{}/*"'.format(cmd_base, content['path'])
include_cmd += ' --content-encoding {} --metadata-directive REPLACE'.format(content['content-encoding'])
includes_cmds.append(include_cmd)
exclude_cmd = '{} {}'.format(cmd_base, excludes_str)
result = subprocess.run(exclude_cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.info("Uploaded files without metadata with command: %s", exclude_cmd)
LOG.debug("Upload Command Output: %s", result.stdout)
for include_cmd in includes_cmds:
result = subprocess.run(include_cmd, check=True, shell=True, stdout=subprocess.PIPE)
LOG.info("Uploaded files with metadata with command: %s", include_cmd)
LOG.debug("Upload Command Output: %s", result.stdout)
return True | [
"def",
"content_metadata_uploads",
"(",
"self",
",",
"mirror",
"=",
"False",
")",
":",
"excludes_str",
"=",
"''",
"includes_cmds",
"=",
"[",
"]",
"cmd_base",
"=",
"self",
".",
"_get_upload_cmd",
"(",
"mirror",
"=",
"mirror",
")",
"for",
"content",
"in",
"self",
".",
"s3props",
".",
"get",
"(",
"'content_metadata'",
")",
":",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"artifact_path",
",",
"content",
"[",
"'path'",
"]",
")",
"if",
"not",
"os",
".",
"listdir",
"(",
"full_path",
")",
":",
"raise",
"S3ArtifactNotFound",
"excludes_str",
"+=",
"'--exclude \"{}/*\" '",
".",
"format",
"(",
"content",
"[",
"'path'",
"]",
")",
"include_cmd",
"=",
"'{} --exclude \"*\", --include \"{}/*\"'",
".",
"format",
"(",
"cmd_base",
",",
"content",
"[",
"'path'",
"]",
")",
"include_cmd",
"+=",
"' --content-encoding {} --metadata-directive REPLACE'",
".",
"format",
"(",
"content",
"[",
"'content-encoding'",
"]",
")",
"includes_cmds",
".",
"append",
"(",
"include_cmd",
")",
"exclude_cmd",
"=",
"'{} {}'",
".",
"format",
"(",
"cmd_base",
",",
"excludes_str",
")",
"result",
"=",
"subprocess",
".",
"run",
"(",
"exclude_cmd",
",",
"check",
"=",
"True",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"LOG",
".",
"info",
"(",
"\"Uploaded files without metadata with command: %s\"",
",",
"exclude_cmd",
")",
"LOG",
".",
"debug",
"(",
"\"Upload Command Output: %s\"",
",",
"result",
".",
"stdout",
")",
"for",
"include_cmd",
"in",
"includes_cmds",
":",
"result",
"=",
"subprocess",
".",
"run",
"(",
"include_cmd",
",",
"check",
"=",
"True",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"LOG",
".",
"info",
"(",
"\"Uploaded files with metadata with command: %s\"",
",",
"include_cmd",
")",
"LOG",
".",
"debug",
"(",
"\"Upload Command Output: %s\"",
",",
"result",
".",
"stdout",
")",
"return",
"True"
] | Finds all specified encoded directories and uploads in multiple parts,
setting metadata for objects.
Args:
mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
Returns:
bool: True if uploaded | [
"Finds",
"all",
"specified",
"encoded",
"directories",
"and",
"uploads",
"in",
"multiple",
"parts",
"setting",
"metadata",
"for",
"objects",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L170-L204 |
6,052 | foremast/foremast | src/foremast/s3/s3deploy.py | S3Deployment._sync_to_uri | def _sync_to_uri(self, uri):
"""Copy and sync versioned directory to uri in S3.
Args:
uri (str): S3 URI to sync version to.
"""
cmd_cp = 'aws s3 cp {} {} --recursive --profile {}'.format(self.s3_version_uri, uri, self.env)
# AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync.
cmd_sync = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(
self.s3_version_uri, uri, self.env)
cp_result = subprocess.run(cmd_cp, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Copy to %s before sync output: %s", uri, cp_result.stdout)
LOG.info("Copied version %s to %s", self.version, uri)
sync_result = subprocess.run(cmd_sync, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Sync to %s command output: %s", uri, sync_result.stdout)
LOG.info("Synced version %s to %s", self.version, uri) | python | def _sync_to_uri(self, uri):
"""Copy and sync versioned directory to uri in S3.
Args:
uri (str): S3 URI to sync version to.
"""
cmd_cp = 'aws s3 cp {} {} --recursive --profile {}'.format(self.s3_version_uri, uri, self.env)
# AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync.
cmd_sync = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(
self.s3_version_uri, uri, self.env)
cp_result = subprocess.run(cmd_cp, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Copy to %s before sync output: %s", uri, cp_result.stdout)
LOG.info("Copied version %s to %s", self.version, uri)
sync_result = subprocess.run(cmd_sync, check=True, shell=True, stdout=subprocess.PIPE)
LOG.debug("Sync to %s command output: %s", uri, sync_result.stdout)
LOG.info("Synced version %s to %s", self.version, uri) | [
"def",
"_sync_to_uri",
"(",
"self",
",",
"uri",
")",
":",
"cmd_cp",
"=",
"'aws s3 cp {} {} --recursive --profile {}'",
".",
"format",
"(",
"self",
".",
"s3_version_uri",
",",
"uri",
",",
"self",
".",
"env",
")",
"# AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync.",
"cmd_sync",
"=",
"'aws s3 sync {} {} --delete --exact-timestamps --profile {}'",
".",
"format",
"(",
"self",
".",
"s3_version_uri",
",",
"uri",
",",
"self",
".",
"env",
")",
"cp_result",
"=",
"subprocess",
".",
"run",
"(",
"cmd_cp",
",",
"check",
"=",
"True",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"LOG",
".",
"debug",
"(",
"\"Copy to %s before sync output: %s\"",
",",
"uri",
",",
"cp_result",
".",
"stdout",
")",
"LOG",
".",
"info",
"(",
"\"Copied version %s to %s\"",
",",
"self",
".",
"version",
",",
"uri",
")",
"sync_result",
"=",
"subprocess",
".",
"run",
"(",
"cmd_sync",
",",
"check",
"=",
"True",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"LOG",
".",
"debug",
"(",
"\"Sync to %s command output: %s\"",
",",
"uri",
",",
"sync_result",
".",
"stdout",
")",
"LOG",
".",
"info",
"(",
"\"Synced version %s to %s\"",
",",
"self",
".",
"version",
",",
"uri",
")"
] | Copy and sync versioned directory to uri in S3.
Args:
uri (str): S3 URI to sync version to. | [
"Copy",
"and",
"sync",
"versioned",
"directory",
"to",
"uri",
"in",
"S3",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L206-L223 |
6,053 | foremast/foremast | src/foremast/utils/vpc.py | get_vpc_id | def get_vpc_id(account, region):
"""Get VPC ID configured for ``account`` in ``region``.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1.
Returns:
str: VPC ID for the requested ``account`` in ``region``.
Raises:
:obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for
``account`` in ``region``.
:obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs
configured.
"""
url = '{0}/networks/aws'.format(API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not response.ok:
raise SpinnakerVPCNotFound(response.text)
vpcs = response.json()
for vpc in vpcs:
LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc)
if 'name' in vpc and all([vpc['name'] == 'vpc', vpc['account'] == account, vpc['region'] == region]):
LOG.info('Found VPC ID for %s in %s: %s', account, region, vpc['id'])
vpc_id = vpc['id']
break
else:
LOG.fatal('VPC list: %s', vpcs)
raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(account, region))
return vpc_id | python | def get_vpc_id(account, region):
"""Get VPC ID configured for ``account`` in ``region``.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1.
Returns:
str: VPC ID for the requested ``account`` in ``region``.
Raises:
:obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for
``account`` in ``region``.
:obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs
configured.
"""
url = '{0}/networks/aws'.format(API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not response.ok:
raise SpinnakerVPCNotFound(response.text)
vpcs = response.json()
for vpc in vpcs:
LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc)
if 'name' in vpc and all([vpc['name'] == 'vpc', vpc['account'] == account, vpc['region'] == region]):
LOG.info('Found VPC ID for %s in %s: %s', account, region, vpc['id'])
vpc_id = vpc['id']
break
else:
LOG.fatal('VPC list: %s', vpcs)
raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(account, region))
return vpc_id | [
"def",
"get_vpc_id",
"(",
"account",
",",
"region",
")",
":",
"url",
"=",
"'{0}/networks/aws'",
".",
"format",
"(",
"API_URL",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"if",
"not",
"response",
".",
"ok",
":",
"raise",
"SpinnakerVPCNotFound",
"(",
"response",
".",
"text",
")",
"vpcs",
"=",
"response",
".",
"json",
"(",
")",
"for",
"vpc",
"in",
"vpcs",
":",
"LOG",
".",
"debug",
"(",
"'VPC: %(name)s, %(account)s, %(region)s => %(id)s'",
",",
"vpc",
")",
"if",
"'name'",
"in",
"vpc",
"and",
"all",
"(",
"[",
"vpc",
"[",
"'name'",
"]",
"==",
"'vpc'",
",",
"vpc",
"[",
"'account'",
"]",
"==",
"account",
",",
"vpc",
"[",
"'region'",
"]",
"==",
"region",
"]",
")",
":",
"LOG",
".",
"info",
"(",
"'Found VPC ID for %s in %s: %s'",
",",
"account",
",",
"region",
",",
"vpc",
"[",
"'id'",
"]",
")",
"vpc_id",
"=",
"vpc",
"[",
"'id'",
"]",
"break",
"else",
":",
"LOG",
".",
"fatal",
"(",
"'VPC list: %s'",
",",
"vpcs",
")",
"raise",
"SpinnakerVPCIDNotFound",
"(",
"'No VPC available for {0} [{1}].'",
".",
"format",
"(",
"account",
",",
"region",
")",
")",
"return",
"vpc_id"
] | Get VPC ID configured for ``account`` in ``region``.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1.
Returns:
str: VPC ID for the requested ``account`` in ``region``.
Raises:
:obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for
``account`` in ``region``.
:obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs
configured. | [
"Get",
"VPC",
"ID",
"configured",
"for",
"account",
"in",
"region",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/vpc.py#L27-L62 |
6,054 | foremast/foremast | src/foremast/utils/subnets.py | get_subnets | def get_subnets(
target='ec2',
purpose='internal',
env='',
region='', ):
"""Get all availability zones for a given target.
Args:
target (str): Type of subnets to look up (ec2 or elb).
env (str): Environment to look up.
region (str): AWS Region to find Subnets for.
Returns:
az_dict: dictionary of availbility zones, structured like
{ $region: [ $avaibilityzones ] }
or
{ $account: $region: [ $availabilityzone] }
"""
account_az_dict = defaultdict(defaultdict)
subnet_id_dict = defaultdict(defaultdict)
subnet_url = '{0}/subnets/aws'.format(API_URL)
subnet_response = requests.get(subnet_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not subnet_response.ok:
raise SpinnakerTimeout(subnet_response.text)
subnet_list = subnet_response.json()
for subnet in subnet_list:
LOG.debug('Subnet: %(account)s\t%(region)s\t%(target)s\t%(vpcId)s\t' '%(availabilityZone)s', subnet)
if subnet.get('target', '') == target:
availability_zone = subnet['availabilityZone']
account = subnet['account']
subnet_region = subnet['region']
subnet_id = subnet['id']
try:
if availability_zone not in account_az_dict[account][subnet_region]:
account_az_dict[account][subnet_region].append(availability_zone)
except KeyError:
account_az_dict[account][subnet_region] = [availability_zone]
# get list of all subnet IDs with correct purpose
if subnet['purpose'] == purpose:
try:
subnet_id_dict[account][subnet_region].append(subnet_id)
except KeyError:
subnet_id_dict[account][subnet_region] = [subnet_id]
LOG.debug('%s regions: %s', account, list(account_az_dict[account].keys()))
if all([env, region]):
try:
region_dict = {region: account_az_dict[env][region]}
region_dict['subnet_ids'] = {region: subnet_id_dict[env][region]}
LOG.debug('Region dict: %s', region_dict)
return region_dict
except KeyError:
raise SpinnakerSubnetError(env=env, region=region)
LOG.debug('AZ dict:\n%s', pformat(dict(account_az_dict)))
return account_az_dict | python | def get_subnets(
target='ec2',
purpose='internal',
env='',
region='', ):
"""Get all availability zones for a given target.
Args:
target (str): Type of subnets to look up (ec2 or elb).
env (str): Environment to look up.
region (str): AWS Region to find Subnets for.
Returns:
az_dict: dictionary of availbility zones, structured like
{ $region: [ $avaibilityzones ] }
or
{ $account: $region: [ $availabilityzone] }
"""
account_az_dict = defaultdict(defaultdict)
subnet_id_dict = defaultdict(defaultdict)
subnet_url = '{0}/subnets/aws'.format(API_URL)
subnet_response = requests.get(subnet_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not subnet_response.ok:
raise SpinnakerTimeout(subnet_response.text)
subnet_list = subnet_response.json()
for subnet in subnet_list:
LOG.debug('Subnet: %(account)s\t%(region)s\t%(target)s\t%(vpcId)s\t' '%(availabilityZone)s', subnet)
if subnet.get('target', '') == target:
availability_zone = subnet['availabilityZone']
account = subnet['account']
subnet_region = subnet['region']
subnet_id = subnet['id']
try:
if availability_zone not in account_az_dict[account][subnet_region]:
account_az_dict[account][subnet_region].append(availability_zone)
except KeyError:
account_az_dict[account][subnet_region] = [availability_zone]
# get list of all subnet IDs with correct purpose
if subnet['purpose'] == purpose:
try:
subnet_id_dict[account][subnet_region].append(subnet_id)
except KeyError:
subnet_id_dict[account][subnet_region] = [subnet_id]
LOG.debug('%s regions: %s', account, list(account_az_dict[account].keys()))
if all([env, region]):
try:
region_dict = {region: account_az_dict[env][region]}
region_dict['subnet_ids'] = {region: subnet_id_dict[env][region]}
LOG.debug('Region dict: %s', region_dict)
return region_dict
except KeyError:
raise SpinnakerSubnetError(env=env, region=region)
LOG.debug('AZ dict:\n%s', pformat(dict(account_az_dict)))
return account_az_dict | [
"def",
"get_subnets",
"(",
"target",
"=",
"'ec2'",
",",
"purpose",
"=",
"'internal'",
",",
"env",
"=",
"''",
",",
"region",
"=",
"''",
",",
")",
":",
"account_az_dict",
"=",
"defaultdict",
"(",
"defaultdict",
")",
"subnet_id_dict",
"=",
"defaultdict",
"(",
"defaultdict",
")",
"subnet_url",
"=",
"'{0}/subnets/aws'",
".",
"format",
"(",
"API_URL",
")",
"subnet_response",
"=",
"requests",
".",
"get",
"(",
"subnet_url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"if",
"not",
"subnet_response",
".",
"ok",
":",
"raise",
"SpinnakerTimeout",
"(",
"subnet_response",
".",
"text",
")",
"subnet_list",
"=",
"subnet_response",
".",
"json",
"(",
")",
"for",
"subnet",
"in",
"subnet_list",
":",
"LOG",
".",
"debug",
"(",
"'Subnet: %(account)s\\t%(region)s\\t%(target)s\\t%(vpcId)s\\t'",
"'%(availabilityZone)s'",
",",
"subnet",
")",
"if",
"subnet",
".",
"get",
"(",
"'target'",
",",
"''",
")",
"==",
"target",
":",
"availability_zone",
"=",
"subnet",
"[",
"'availabilityZone'",
"]",
"account",
"=",
"subnet",
"[",
"'account'",
"]",
"subnet_region",
"=",
"subnet",
"[",
"'region'",
"]",
"subnet_id",
"=",
"subnet",
"[",
"'id'",
"]",
"try",
":",
"if",
"availability_zone",
"not",
"in",
"account_az_dict",
"[",
"account",
"]",
"[",
"subnet_region",
"]",
":",
"account_az_dict",
"[",
"account",
"]",
"[",
"subnet_region",
"]",
".",
"append",
"(",
"availability_zone",
")",
"except",
"KeyError",
":",
"account_az_dict",
"[",
"account",
"]",
"[",
"subnet_region",
"]",
"=",
"[",
"availability_zone",
"]",
"# get list of all subnet IDs with correct purpose",
"if",
"subnet",
"[",
"'purpose'",
"]",
"==",
"purpose",
":",
"try",
":",
"subnet_id_dict",
"[",
"account",
"]",
"[",
"subnet_region",
"]",
".",
"append",
"(",
"subnet_id",
")",
"except",
"KeyError",
":",
"subnet_id_dict",
"[",
"account",
"]",
"[",
"subnet_region",
"]",
"=",
"[",
"subnet_id",
"]",
"LOG",
".",
"debug",
"(",
"'%s regions: %s'",
",",
"account",
",",
"list",
"(",
"account_az_dict",
"[",
"account",
"]",
".",
"keys",
"(",
")",
")",
")",
"if",
"all",
"(",
"[",
"env",
",",
"region",
"]",
")",
":",
"try",
":",
"region_dict",
"=",
"{",
"region",
":",
"account_az_dict",
"[",
"env",
"]",
"[",
"region",
"]",
"}",
"region_dict",
"[",
"'subnet_ids'",
"]",
"=",
"{",
"region",
":",
"subnet_id_dict",
"[",
"env",
"]",
"[",
"region",
"]",
"}",
"LOG",
".",
"debug",
"(",
"'Region dict: %s'",
",",
"region_dict",
")",
"return",
"region_dict",
"except",
"KeyError",
":",
"raise",
"SpinnakerSubnetError",
"(",
"env",
"=",
"env",
",",
"region",
"=",
"region",
")",
"LOG",
".",
"debug",
"(",
"'AZ dict:\\n%s'",
",",
"pformat",
"(",
"dict",
"(",
"account_az_dict",
")",
")",
")",
"return",
"account_az_dict"
] | Get all availability zones for a given target.
Args:
target (str): Type of subnets to look up (ec2 or elb).
env (str): Environment to look up.
region (str): AWS Region to find Subnets for.
Returns:
az_dict: dictionary of availbility zones, structured like
{ $region: [ $avaibilityzones ] }
or
{ $account: $region: [ $availabilityzone] } | [
"Get",
"all",
"availability",
"zones",
"for",
"a",
"given",
"target",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/subnets.py#L32-L93 |
6,055 | foremast/foremast | src/foremast/awslambda/awslambdaevent.py | LambdaEvent.create_lambda_events | def create_lambda_events(self):
"""Create all defined lambda events for an lambda application."""
# Clean up lambda permissions before creating triggers
remove_all_lambda_permissions(app_name=self.app_name, env=self.env, region=self.region)
triggers = self.properties['lambda_triggers']
for trigger in triggers:
if trigger['type'] == 'sns':
create_sns_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'cloudwatch-event':
create_cloudwatch_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'cloudwatch-logs':
create_cloudwatch_log_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'api-gateway':
apigateway = APIGateway(
app=self.app_name, env=self.env, region=self.region, rules=trigger, prop_path=self.prop_path)
apigateway.setup_lambda_api()
# filter all triggers to isolate s3 triggers so we can operate on the entire group
s3_triggers = [x for x in triggers if x['type'] == 's3']
# group triggers by unique target bucket
bucket_triggers = dict()
for s3_trigger in s3_triggers:
bucket = s3_trigger.get('bucket')
if bucket in bucket_triggers:
bucket_triggers[bucket].append(s3_trigger)
else:
bucket_triggers[bucket] = [s3_trigger]
# apply relevant triggers to each respective bucket all at once.
for bucket, triggers in bucket_triggers.items():
create_s3_event(app_name=self.app_name, env=self.env, region=self.region, bucket=bucket, triggers=triggers) | python | def create_lambda_events(self):
"""Create all defined lambda events for an lambda application."""
# Clean up lambda permissions before creating triggers
remove_all_lambda_permissions(app_name=self.app_name, env=self.env, region=self.region)
triggers = self.properties['lambda_triggers']
for trigger in triggers:
if trigger['type'] == 'sns':
create_sns_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'cloudwatch-event':
create_cloudwatch_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'cloudwatch-logs':
create_cloudwatch_log_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger)
if trigger['type'] == 'api-gateway':
apigateway = APIGateway(
app=self.app_name, env=self.env, region=self.region, rules=trigger, prop_path=self.prop_path)
apigateway.setup_lambda_api()
# filter all triggers to isolate s3 triggers so we can operate on the entire group
s3_triggers = [x for x in triggers if x['type'] == 's3']
# group triggers by unique target bucket
bucket_triggers = dict()
for s3_trigger in s3_triggers:
bucket = s3_trigger.get('bucket')
if bucket in bucket_triggers:
bucket_triggers[bucket].append(s3_trigger)
else:
bucket_triggers[bucket] = [s3_trigger]
# apply relevant triggers to each respective bucket all at once.
for bucket, triggers in bucket_triggers.items():
create_s3_event(app_name=self.app_name, env=self.env, region=self.region, bucket=bucket, triggers=triggers) | [
"def",
"create_lambda_events",
"(",
"self",
")",
":",
"# Clean up lambda permissions before creating triggers",
"remove_all_lambda_permissions",
"(",
"app_name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
")",
"triggers",
"=",
"self",
".",
"properties",
"[",
"'lambda_triggers'",
"]",
"for",
"trigger",
"in",
"triggers",
":",
"if",
"trigger",
"[",
"'type'",
"]",
"==",
"'sns'",
":",
"create_sns_event",
"(",
"app_name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"rules",
"=",
"trigger",
")",
"if",
"trigger",
"[",
"'type'",
"]",
"==",
"'cloudwatch-event'",
":",
"create_cloudwatch_event",
"(",
"app_name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"rules",
"=",
"trigger",
")",
"if",
"trigger",
"[",
"'type'",
"]",
"==",
"'cloudwatch-logs'",
":",
"create_cloudwatch_log_event",
"(",
"app_name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"rules",
"=",
"trigger",
")",
"if",
"trigger",
"[",
"'type'",
"]",
"==",
"'api-gateway'",
":",
"apigateway",
"=",
"APIGateway",
"(",
"app",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"rules",
"=",
"trigger",
",",
"prop_path",
"=",
"self",
".",
"prop_path",
")",
"apigateway",
".",
"setup_lambda_api",
"(",
")",
"# filter all triggers to isolate s3 triggers so we can operate on the entire group",
"s3_triggers",
"=",
"[",
"x",
"for",
"x",
"in",
"triggers",
"if",
"x",
"[",
"'type'",
"]",
"==",
"'s3'",
"]",
"# group triggers by unique target bucket",
"bucket_triggers",
"=",
"dict",
"(",
")",
"for",
"s3_trigger",
"in",
"s3_triggers",
":",
"bucket",
"=",
"s3_trigger",
".",
"get",
"(",
"'bucket'",
")",
"if",
"bucket",
"in",
"bucket_triggers",
":",
"bucket_triggers",
"[",
"bucket",
"]",
".",
"append",
"(",
"s3_trigger",
")",
"else",
":",
"bucket_triggers",
"[",
"bucket",
"]",
"=",
"[",
"s3_trigger",
"]",
"# apply relevant triggers to each respective bucket all at once.",
"for",
"bucket",
",",
"triggers",
"in",
"bucket_triggers",
".",
"items",
"(",
")",
":",
"create_s3_event",
"(",
"app_name",
"=",
"self",
".",
"app_name",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"bucket",
"=",
"bucket",
",",
"triggers",
"=",
"triggers",
")"
] | Create all defined lambda events for an lambda application. | [
"Create",
"all",
"defined",
"lambda",
"events",
"for",
"an",
"lambda",
"application",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambdaevent.py#L45-L83 |
6,056 | foremast/foremast | src/foremast/utils/pipelines.py | get_all_pipelines | def get_all_pipelines(app=''):
"""Get a list of all the Pipelines in _app_.
Args:
app (str): Name of Spinnaker Application.
Returns:
requests.models.Response: Response from Gate containing Pipelines.
"""
url = '{host}/applications/{app}/pipelineConfigs'.format(host=API_URL, app=app)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Could not retrieve Pipelines for {0}.'.format(app)
pipelines = response.json()
LOG.debug('Pipelines:\n%s', pipelines)
return pipelines | python | def get_all_pipelines(app=''):
"""Get a list of all the Pipelines in _app_.
Args:
app (str): Name of Spinnaker Application.
Returns:
requests.models.Response: Response from Gate containing Pipelines.
"""
url = '{host}/applications/{app}/pipelineConfigs'.format(host=API_URL, app=app)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Could not retrieve Pipelines for {0}.'.format(app)
pipelines = response.json()
LOG.debug('Pipelines:\n%s', pipelines)
return pipelines | [
"def",
"get_all_pipelines",
"(",
"app",
"=",
"''",
")",
":",
"url",
"=",
"'{host}/applications/{app}/pipelineConfigs'",
".",
"format",
"(",
"host",
"=",
"API_URL",
",",
"app",
"=",
"app",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"assert",
"response",
".",
"ok",
",",
"'Could not retrieve Pipelines for {0}.'",
".",
"format",
"(",
"app",
")",
"pipelines",
"=",
"response",
".",
"json",
"(",
")",
"LOG",
".",
"debug",
"(",
"'Pipelines:\\n%s'",
",",
"pipelines",
")",
"return",
"pipelines"
] | Get a list of all the Pipelines in _app_.
Args:
app (str): Name of Spinnaker Application.
Returns:
requests.models.Response: Response from Gate containing Pipelines. | [
"Get",
"a",
"list",
"of",
"all",
"the",
"Pipelines",
"in",
"_app_",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L64-L82 |
6,057 | foremast/foremast | src/foremast/utils/pipelines.py | get_pipeline_id | def get_pipeline_id(app='', name=''):
"""Get the ID for Pipeline _name_.
Args:
app (str): Name of Spinnaker Application to search.
name (str): Name of Pipeline to get ID for.
Returns:
str: ID of specified Pipeline.
None: Pipeline or Spinnaker Appliation not found.
"""
return_id = None
pipelines = get_all_pipelines(app=app)
for pipeline in pipelines:
LOG.debug('ID of %(name)s: %(id)s', pipeline)
if pipeline['name'] == name:
return_id = pipeline['id']
LOG.info('Pipeline %s found, ID: %s', name, return_id)
break
return return_id | python | def get_pipeline_id(app='', name=''):
"""Get the ID for Pipeline _name_.
Args:
app (str): Name of Spinnaker Application to search.
name (str): Name of Pipeline to get ID for.
Returns:
str: ID of specified Pipeline.
None: Pipeline or Spinnaker Appliation not found.
"""
return_id = None
pipelines = get_all_pipelines(app=app)
for pipeline in pipelines:
LOG.debug('ID of %(name)s: %(id)s', pipeline)
if pipeline['name'] == name:
return_id = pipeline['id']
LOG.info('Pipeline %s found, ID: %s', name, return_id)
break
return return_id | [
"def",
"get_pipeline_id",
"(",
"app",
"=",
"''",
",",
"name",
"=",
"''",
")",
":",
"return_id",
"=",
"None",
"pipelines",
"=",
"get_all_pipelines",
"(",
"app",
"=",
"app",
")",
"for",
"pipeline",
"in",
"pipelines",
":",
"LOG",
".",
"debug",
"(",
"'ID of %(name)s: %(id)s'",
",",
"pipeline",
")",
"if",
"pipeline",
"[",
"'name'",
"]",
"==",
"name",
":",
"return_id",
"=",
"pipeline",
"[",
"'id'",
"]",
"LOG",
".",
"info",
"(",
"'Pipeline %s found, ID: %s'",
",",
"name",
",",
"return_id",
")",
"break",
"return",
"return_id"
] | Get the ID for Pipeline _name_.
Args:
app (str): Name of Spinnaker Application to search.
name (str): Name of Pipeline to get ID for.
Returns:
str: ID of specified Pipeline.
None: Pipeline or Spinnaker Appliation not found. | [
"Get",
"the",
"ID",
"for",
"Pipeline",
"_name_",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L85-L109 |
6,058 | foremast/foremast | src/foremast/utils/pipelines.py | normalize_pipeline_name | def normalize_pipeline_name(name=''):
"""Translate unsafe characters to underscores."""
normalized_name = name
for bad in '\\/?%#':
normalized_name = normalized_name.replace(bad, '_')
return normalized_name | python | def normalize_pipeline_name(name=''):
"""Translate unsafe characters to underscores."""
normalized_name = name
for bad in '\\/?%#':
normalized_name = normalized_name.replace(bad, '_')
return normalized_name | [
"def",
"normalize_pipeline_name",
"(",
"name",
"=",
"''",
")",
":",
"normalized_name",
"=",
"name",
"for",
"bad",
"in",
"'\\\\/?%#'",
":",
"normalized_name",
"=",
"normalized_name",
".",
"replace",
"(",
"bad",
",",
"'_'",
")",
"return",
"normalized_name"
] | Translate unsafe characters to underscores. | [
"Translate",
"unsafe",
"characters",
"to",
"underscores",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L112-L117 |
6,059 | foremast/foremast | src/foremast/utils/apps.py | get_all_apps | def get_all_apps():
"""Get a list of all applications in Spinnaker.
Returns:
requests.models.Response: Response from Gate containing list of all apps.
"""
LOG.info('Retreiving list of all Spinnaker applications')
url = '{}/applications'.format(API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Could not retrieve application list'
pipelines = response.json()
LOG.debug('All Applications:\n%s', pipelines)
return pipelines | python | def get_all_apps():
"""Get a list of all applications in Spinnaker.
Returns:
requests.models.Response: Response from Gate containing list of all apps.
"""
LOG.info('Retreiving list of all Spinnaker applications')
url = '{}/applications'.format(API_URL)
response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
assert response.ok, 'Could not retrieve application list'
pipelines = response.json()
LOG.debug('All Applications:\n%s', pipelines)
return pipelines | [
"def",
"get_all_apps",
"(",
")",
":",
"LOG",
".",
"info",
"(",
"'Retreiving list of all Spinnaker applications'",
")",
"url",
"=",
"'{}/applications'",
".",
"format",
"(",
"API_URL",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"assert",
"response",
".",
"ok",
",",
"'Could not retrieve application list'",
"pipelines",
"=",
"response",
".",
"json",
"(",
")",
"LOG",
".",
"debug",
"(",
"'All Applications:\\n%s'",
",",
"pipelines",
")",
"return",
"pipelines"
] | Get a list of all applications in Spinnaker.
Returns:
requests.models.Response: Response from Gate containing list of all apps. | [
"Get",
"a",
"list",
"of",
"all",
"applications",
"in",
"Spinnaker",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/apps.py#L28-L44 |
6,060 | foremast/foremast | src/foremast/utils/apps.py | get_details | def get_details(app='groupproject', env='dev', region='us-east-1'):
"""Extract details for Application.
Args:
app (str): Application Name
env (str): Environment/account to get details from
Returns:
collections.namedtuple with _group_, _policy_, _profile_, _role_,
_user_.
"""
url = '{host}/applications/{app}'.format(host=API_URL, app=app)
request = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not request.ok:
raise SpinnakerAppNotFound('"{0}" not found.'.format(app))
app_details = request.json()
LOG.debug('App details: %s', app_details)
group = app_details['attributes'].get('repoProjectKey')
project = app_details['attributes'].get('repoSlug')
generated = gogoutils.Generator(group, project, env=env, region=region, formats=APP_FORMATS)
LOG.debug('Application details: %s', generated)
return generated | python | def get_details(app='groupproject', env='dev', region='us-east-1'):
"""Extract details for Application.
Args:
app (str): Application Name
env (str): Environment/account to get details from
Returns:
collections.namedtuple with _group_, _policy_, _profile_, _role_,
_user_.
"""
url = '{host}/applications/{app}'.format(host=API_URL, app=app)
request = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not request.ok:
raise SpinnakerAppNotFound('"{0}" not found.'.format(app))
app_details = request.json()
LOG.debug('App details: %s', app_details)
group = app_details['attributes'].get('repoProjectKey')
project = app_details['attributes'].get('repoSlug')
generated = gogoutils.Generator(group, project, env=env, region=region, formats=APP_FORMATS)
LOG.debug('Application details: %s', generated)
return generated | [
"def",
"get_details",
"(",
"app",
"=",
"'groupproject'",
",",
"env",
"=",
"'dev'",
",",
"region",
"=",
"'us-east-1'",
")",
":",
"url",
"=",
"'{host}/applications/{app}'",
".",
"format",
"(",
"host",
"=",
"API_URL",
",",
"app",
"=",
"app",
")",
"request",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"if",
"not",
"request",
".",
"ok",
":",
"raise",
"SpinnakerAppNotFound",
"(",
"'\"{0}\" not found.'",
".",
"format",
"(",
"app",
")",
")",
"app_details",
"=",
"request",
".",
"json",
"(",
")",
"LOG",
".",
"debug",
"(",
"'App details: %s'",
",",
"app_details",
")",
"group",
"=",
"app_details",
"[",
"'attributes'",
"]",
".",
"get",
"(",
"'repoProjectKey'",
")",
"project",
"=",
"app_details",
"[",
"'attributes'",
"]",
".",
"get",
"(",
"'repoSlug'",
")",
"generated",
"=",
"gogoutils",
".",
"Generator",
"(",
"group",
",",
"project",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"formats",
"=",
"APP_FORMATS",
")",
"LOG",
".",
"debug",
"(",
"'Application details: %s'",
",",
"generated",
")",
"return",
"generated"
] | Extract details for Application.
Args:
app (str): Application Name
env (str): Environment/account to get details from
Returns:
collections.namedtuple with _group_, _policy_, _profile_, _role_,
_user_. | [
"Extract",
"details",
"for",
"Application",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/apps.py#L47-L74 |
6,061 | foremast/foremast | src/foremast/pipeline/create_pipeline_s3.py | SpinnakerPipelineS3.create_pipeline | def create_pipeline(self):
"""Main wrapper for pipeline creation.
1. Runs clean_pipelines to clean up existing ones
2. determines which environments the pipeline needs
3. Renders all of the pipeline blocks as defined in configs
4. Runs post_pipeline to create pipeline
"""
clean_pipelines(app=self.app_name, settings=self.settings)
pipeline_envs = self.environments
self.log.debug('Envs from pipeline.json: %s', pipeline_envs)
regions_envs = collections.defaultdict(list)
for env in pipeline_envs:
for region in self.settings[env]['regions']:
regions_envs[region].append(env)
self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4))
pipelines = {}
for region, envs in regions_envs.items():
# TODO: Overrides for an environment no longer makes sense. Need to
# provide override for entire Region possibly.
pipelines[region] = self.render_wrapper(region=region)
previous_env = None
for env in envs:
block = construct_pipeline_block_s3(
env=env,
generated=self.generated,
previous_env=previous_env,
region=region,
settings=self.settings[env][region],
pipeline_data=self.settings['pipeline'])
pipelines[region]['stages'].extend(json.loads(block))
previous_env = env
self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines))
for region, pipeline in pipelines.items():
renumerate_stages(pipeline)
self.post_pipeline(pipeline)
return True | python | def create_pipeline(self):
"""Main wrapper for pipeline creation.
1. Runs clean_pipelines to clean up existing ones
2. determines which environments the pipeline needs
3. Renders all of the pipeline blocks as defined in configs
4. Runs post_pipeline to create pipeline
"""
clean_pipelines(app=self.app_name, settings=self.settings)
pipeline_envs = self.environments
self.log.debug('Envs from pipeline.json: %s', pipeline_envs)
regions_envs = collections.defaultdict(list)
for env in pipeline_envs:
for region in self.settings[env]['regions']:
regions_envs[region].append(env)
self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4))
pipelines = {}
for region, envs in regions_envs.items():
# TODO: Overrides for an environment no longer makes sense. Need to
# provide override for entire Region possibly.
pipelines[region] = self.render_wrapper(region=region)
previous_env = None
for env in envs:
block = construct_pipeline_block_s3(
env=env,
generated=self.generated,
previous_env=previous_env,
region=region,
settings=self.settings[env][region],
pipeline_data=self.settings['pipeline'])
pipelines[region]['stages'].extend(json.loads(block))
previous_env = env
self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines))
for region, pipeline in pipelines.items():
renumerate_stages(pipeline)
self.post_pipeline(pipeline)
return True | [
"def",
"create_pipeline",
"(",
"self",
")",
":",
"clean_pipelines",
"(",
"app",
"=",
"self",
".",
"app_name",
",",
"settings",
"=",
"self",
".",
"settings",
")",
"pipeline_envs",
"=",
"self",
".",
"environments",
"self",
".",
"log",
".",
"debug",
"(",
"'Envs from pipeline.json: %s'",
",",
"pipeline_envs",
")",
"regions_envs",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"env",
"in",
"pipeline_envs",
":",
"for",
"region",
"in",
"self",
".",
"settings",
"[",
"env",
"]",
"[",
"'regions'",
"]",
":",
"regions_envs",
"[",
"region",
"]",
".",
"append",
"(",
"env",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Environments and Regions for Pipelines:\\n%s'",
",",
"json",
".",
"dumps",
"(",
"regions_envs",
",",
"indent",
"=",
"4",
")",
")",
"pipelines",
"=",
"{",
"}",
"for",
"region",
",",
"envs",
"in",
"regions_envs",
".",
"items",
"(",
")",
":",
"# TODO: Overrides for an environment no longer makes sense. Need to",
"# provide override for entire Region possibly.",
"pipelines",
"[",
"region",
"]",
"=",
"self",
".",
"render_wrapper",
"(",
"region",
"=",
"region",
")",
"previous_env",
"=",
"None",
"for",
"env",
"in",
"envs",
":",
"block",
"=",
"construct_pipeline_block_s3",
"(",
"env",
"=",
"env",
",",
"generated",
"=",
"self",
".",
"generated",
",",
"previous_env",
"=",
"previous_env",
",",
"region",
"=",
"region",
",",
"settings",
"=",
"self",
".",
"settings",
"[",
"env",
"]",
"[",
"region",
"]",
",",
"pipeline_data",
"=",
"self",
".",
"settings",
"[",
"'pipeline'",
"]",
")",
"pipelines",
"[",
"region",
"]",
"[",
"'stages'",
"]",
".",
"extend",
"(",
"json",
".",
"loads",
"(",
"block",
")",
")",
"previous_env",
"=",
"env",
"self",
".",
"log",
".",
"debug",
"(",
"'Assembled Pipelines:\\n%s'",
",",
"pformat",
"(",
"pipelines",
")",
")",
"for",
"region",
",",
"pipeline",
"in",
"pipelines",
".",
"items",
"(",
")",
":",
"renumerate_stages",
"(",
"pipeline",
")",
"self",
".",
"post_pipeline",
"(",
"pipeline",
")",
"return",
"True"
] | Main wrapper for pipeline creation.
1. Runs clean_pipelines to clean up existing ones
2. determines which environments the pipeline needs
3. Renders all of the pipeline blocks as defined in configs
4. Runs post_pipeline to create pipeline | [
"Main",
"wrapper",
"for",
"pipeline",
"creation",
".",
"1",
".",
"Runs",
"clean_pipelines",
"to",
"clean",
"up",
"existing",
"ones",
"2",
".",
"determines",
"which",
"environments",
"the",
"pipeline",
"needs",
"3",
".",
"Renders",
"all",
"of",
"the",
"pipeline",
"blocks",
"as",
"defined",
"in",
"configs",
"4",
".",
"Runs",
"post_pipeline",
"to",
"create",
"pipeline"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline_s3.py#L84-L129 |
6,062 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction._check_lambda | def _check_lambda(self):
"""Check if lambda function exists.
Returns:
True if function does exist
False if function does not exist
"""
exists = False
try:
self.lambda_client.get_function(FunctionName=self.app_name)
exists = True
except boto3.exceptions.botocore.exceptions.ClientError:
pass
return exists | python | def _check_lambda(self):
"""Check if lambda function exists.
Returns:
True if function does exist
False if function does not exist
"""
exists = False
try:
self.lambda_client.get_function(FunctionName=self.app_name)
exists = True
except boto3.exceptions.botocore.exceptions.ClientError:
pass
return exists | [
"def",
"_check_lambda",
"(",
"self",
")",
":",
"exists",
"=",
"False",
"try",
":",
"self",
".",
"lambda_client",
".",
"get_function",
"(",
"FunctionName",
"=",
"self",
".",
"app_name",
")",
"exists",
"=",
"True",
"except",
"boto3",
".",
"exceptions",
".",
"botocore",
".",
"exceptions",
".",
"ClientError",
":",
"pass",
"return",
"exists"
] | Check if lambda function exists.
Returns:
True if function does exist
False if function does not exist | [
"Check",
"if",
"lambda",
"function",
"exists",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L72-L85 |
6,063 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction._check_lambda_alias | def _check_lambda_alias(self):
"""Check if lambda alias exists.
Returns:
True if alias exists
False if alias does not exist
"""
aliases = self.lambda_client.list_aliases(FunctionName=self.app_name)
matched_alias = False
for alias in aliases['Aliases']:
if alias['Name'] == self.env:
LOG.info('Found alias %s for function %s', self.env, self.app_name)
matched_alias = True
break
else:
LOG.info('No alias %s found for function %s', self.env, self.app_name)
return matched_alias | python | def _check_lambda_alias(self):
"""Check if lambda alias exists.
Returns:
True if alias exists
False if alias does not exist
"""
aliases = self.lambda_client.list_aliases(FunctionName=self.app_name)
matched_alias = False
for alias in aliases['Aliases']:
if alias['Name'] == self.env:
LOG.info('Found alias %s for function %s', self.env, self.app_name)
matched_alias = True
break
else:
LOG.info('No alias %s found for function %s', self.env, self.app_name)
return matched_alias | [
"def",
"_check_lambda_alias",
"(",
"self",
")",
":",
"aliases",
"=",
"self",
".",
"lambda_client",
".",
"list_aliases",
"(",
"FunctionName",
"=",
"self",
".",
"app_name",
")",
"matched_alias",
"=",
"False",
"for",
"alias",
"in",
"aliases",
"[",
"'Aliases'",
"]",
":",
"if",
"alias",
"[",
"'Name'",
"]",
"==",
"self",
".",
"env",
":",
"LOG",
".",
"info",
"(",
"'Found alias %s for function %s'",
",",
"self",
".",
"env",
",",
"self",
".",
"app_name",
")",
"matched_alias",
"=",
"True",
"break",
"else",
":",
"LOG",
".",
"info",
"(",
"'No alias %s found for function %s'",
",",
"self",
".",
"env",
",",
"self",
".",
"app_name",
")",
"return",
"matched_alias"
] | Check if lambda alias exists.
Returns:
True if alias exists
False if alias does not exist | [
"Check",
"if",
"lambda",
"alias",
"exists",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L87-L104 |
6,064 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction._vpc_config | def _vpc_config(self):
"""Get VPC config."""
if self.vpc_enabled:
subnets = get_subnets(env=self.env, region=self.region, purpose='internal')['subnet_ids'][self.region]
security_groups = self._get_sg_ids()
vpc_config = {'SubnetIds': subnets, 'SecurityGroupIds': security_groups}
else:
vpc_config = {'SubnetIds': [], 'SecurityGroupIds': []}
LOG.debug("Lambda VPC config setup: %s", vpc_config)
return vpc_config | python | def _vpc_config(self):
"""Get VPC config."""
if self.vpc_enabled:
subnets = get_subnets(env=self.env, region=self.region, purpose='internal')['subnet_ids'][self.region]
security_groups = self._get_sg_ids()
vpc_config = {'SubnetIds': subnets, 'SecurityGroupIds': security_groups}
else:
vpc_config = {'SubnetIds': [], 'SecurityGroupIds': []}
LOG.debug("Lambda VPC config setup: %s", vpc_config)
return vpc_config | [
"def",
"_vpc_config",
"(",
"self",
")",
":",
"if",
"self",
".",
"vpc_enabled",
":",
"subnets",
"=",
"get_subnets",
"(",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"purpose",
"=",
"'internal'",
")",
"[",
"'subnet_ids'",
"]",
"[",
"self",
".",
"region",
"]",
"security_groups",
"=",
"self",
".",
"_get_sg_ids",
"(",
")",
"vpc_config",
"=",
"{",
"'SubnetIds'",
":",
"subnets",
",",
"'SecurityGroupIds'",
":",
"security_groups",
"}",
"else",
":",
"vpc_config",
"=",
"{",
"'SubnetIds'",
":",
"[",
"]",
",",
"'SecurityGroupIds'",
":",
"[",
"]",
"}",
"LOG",
".",
"debug",
"(",
"\"Lambda VPC config setup: %s\"",
",",
"vpc_config",
")",
"return",
"vpc_config"
] | Get VPC config. | [
"Get",
"VPC",
"config",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L106-L116 |
6,065 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction._get_sg_ids | def _get_sg_ids(self):
"""Get IDs for all defined security groups.
Returns:
list: security group IDs for all lambda_extras
"""
try:
lambda_extras = self.settings['security_groups']['lambda_extras']
except KeyError:
lambda_extras = []
security_groups = [self.app_name] + lambda_extras
sg_ids = []
for security_group in security_groups:
sg_id = get_security_group_id(name=security_group, env=self.env, region=self.region)
sg_ids.append(sg_id)
return sg_ids | python | def _get_sg_ids(self):
"""Get IDs for all defined security groups.
Returns:
list: security group IDs for all lambda_extras
"""
try:
lambda_extras = self.settings['security_groups']['lambda_extras']
except KeyError:
lambda_extras = []
security_groups = [self.app_name] + lambda_extras
sg_ids = []
for security_group in security_groups:
sg_id = get_security_group_id(name=security_group, env=self.env, region=self.region)
sg_ids.append(sg_id)
return sg_ids | [
"def",
"_get_sg_ids",
"(",
"self",
")",
":",
"try",
":",
"lambda_extras",
"=",
"self",
".",
"settings",
"[",
"'security_groups'",
"]",
"[",
"'lambda_extras'",
"]",
"except",
"KeyError",
":",
"lambda_extras",
"=",
"[",
"]",
"security_groups",
"=",
"[",
"self",
".",
"app_name",
"]",
"+",
"lambda_extras",
"sg_ids",
"=",
"[",
"]",
"for",
"security_group",
"in",
"security_groups",
":",
"sg_id",
"=",
"get_security_group_id",
"(",
"name",
"=",
"security_group",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
")",
"sg_ids",
".",
"append",
"(",
"sg_id",
")",
"return",
"sg_ids"
] | Get IDs for all defined security groups.
Returns:
list: security group IDs for all lambda_extras | [
"Get",
"IDs",
"for",
"all",
"defined",
"security",
"groups",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L118-L134 |
6,066 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction.update_function_configuration | def update_function_configuration(self, vpc_config):
"""Update existing Lambda function configuration.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda
"""
LOG.info('Updating configuration for lambda function: %s', self.app_name)
try:
self.lambda_client.update_function_configuration(
Environment=self.lambda_environment,
FunctionName=self.app_name,
Runtime=self.runtime,
Role=self.role_arn,
Handler=self.handler,
Description=self.description,
Timeout=int(self.timeout),
MemorySize=int(self.memory),
VpcConfig=vpc_config)
if self.concurrency_limit:
self.lambda_client.put_function_concurrency(
FunctionName=self.app_name,
ReservedConcurrentExecutions=self.concurrency_limit
)
else:
self.lambda_client.delete_function_concurrency(FunctionName=self.app_name)
except boto3.exceptions.botocore.exceptions.ClientError as error:
if 'CreateNetworkInterface' in error.response['Error']['Message']:
message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn)
LOG.debug(message)
raise SystemExit(message)
raise
LOG.info('Updating Lambda function tags')
lambda_arn = get_lambda_arn(self.app_name, self.env, self.region)
self.lambda_client.tag_resource(Resource=lambda_arn, Tags={'app_group': self.group, 'app_name': self.app_name})
LOG.info("Successfully updated Lambda configuration.") | python | def update_function_configuration(self, vpc_config):
"""Update existing Lambda function configuration.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda
"""
LOG.info('Updating configuration for lambda function: %s', self.app_name)
try:
self.lambda_client.update_function_configuration(
Environment=self.lambda_environment,
FunctionName=self.app_name,
Runtime=self.runtime,
Role=self.role_arn,
Handler=self.handler,
Description=self.description,
Timeout=int(self.timeout),
MemorySize=int(self.memory),
VpcConfig=vpc_config)
if self.concurrency_limit:
self.lambda_client.put_function_concurrency(
FunctionName=self.app_name,
ReservedConcurrentExecutions=self.concurrency_limit
)
else:
self.lambda_client.delete_function_concurrency(FunctionName=self.app_name)
except boto3.exceptions.botocore.exceptions.ClientError as error:
if 'CreateNetworkInterface' in error.response['Error']['Message']:
message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn)
LOG.debug(message)
raise SystemExit(message)
raise
LOG.info('Updating Lambda function tags')
lambda_arn = get_lambda_arn(self.app_name, self.env, self.region)
self.lambda_client.tag_resource(Resource=lambda_arn, Tags={'app_group': self.group, 'app_name': self.app_name})
LOG.info("Successfully updated Lambda configuration.") | [
"def",
"update_function_configuration",
"(",
"self",
",",
"vpc_config",
")",
":",
"LOG",
".",
"info",
"(",
"'Updating configuration for lambda function: %s'",
",",
"self",
".",
"app_name",
")",
"try",
":",
"self",
".",
"lambda_client",
".",
"update_function_configuration",
"(",
"Environment",
"=",
"self",
".",
"lambda_environment",
",",
"FunctionName",
"=",
"self",
".",
"app_name",
",",
"Runtime",
"=",
"self",
".",
"runtime",
",",
"Role",
"=",
"self",
".",
"role_arn",
",",
"Handler",
"=",
"self",
".",
"handler",
",",
"Description",
"=",
"self",
".",
"description",
",",
"Timeout",
"=",
"int",
"(",
"self",
".",
"timeout",
")",
",",
"MemorySize",
"=",
"int",
"(",
"self",
".",
"memory",
")",
",",
"VpcConfig",
"=",
"vpc_config",
")",
"if",
"self",
".",
"concurrency_limit",
":",
"self",
".",
"lambda_client",
".",
"put_function_concurrency",
"(",
"FunctionName",
"=",
"self",
".",
"app_name",
",",
"ReservedConcurrentExecutions",
"=",
"self",
".",
"concurrency_limit",
")",
"else",
":",
"self",
".",
"lambda_client",
".",
"delete_function_concurrency",
"(",
"FunctionName",
"=",
"self",
".",
"app_name",
")",
"except",
"boto3",
".",
"exceptions",
".",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"if",
"'CreateNetworkInterface'",
"in",
"error",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
":",
"message",
"=",
"'{0} is missing \"ec2:CreateNetworkInterface\"'",
".",
"format",
"(",
"self",
".",
"role_arn",
")",
"LOG",
".",
"debug",
"(",
"message",
")",
"raise",
"SystemExit",
"(",
"message",
")",
"raise",
"LOG",
".",
"info",
"(",
"'Updating Lambda function tags'",
")",
"lambda_arn",
"=",
"get_lambda_arn",
"(",
"self",
".",
"app_name",
",",
"self",
".",
"env",
",",
"self",
".",
"region",
")",
"self",
".",
"lambda_client",
".",
"tag_resource",
"(",
"Resource",
"=",
"lambda_arn",
",",
"Tags",
"=",
"{",
"'app_group'",
":",
"self",
".",
"group",
",",
"'app_name'",
":",
"self",
".",
"app_name",
"}",
")",
"LOG",
".",
"info",
"(",
"\"Successfully updated Lambda configuration.\"",
")"
] | Update existing Lambda function configuration.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda | [
"Update",
"existing",
"Lambda",
"function",
"configuration",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L165-L206 |
6,067 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction.create_function | def create_function(self, vpc_config):
"""Create lambda function, configures lambda parameters.
We need to upload non-zero zip when creating function. Uploading
hello_world python lambda function since AWS doesn't care which
executable is in ZIP.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda
"""
zip_file = 'lambda-holder.zip'
with zipfile.ZipFile(zip_file, mode='w') as zipped:
zipped.writestr('index.py', 'print "Hello world"')
contents = ''
with open('lambda-holder.zip', 'rb') as openfile:
contents = openfile.read()
LOG.info('Creating lambda function: %s', self.app_name)
try:
self.lambda_client.create_function(
Environment=self.lambda_environment,
FunctionName=self.app_name,
Runtime=self.runtime,
Role=self.role_arn,
Handler=self.handler,
Code={'ZipFile': contents},
Description=self.description,
Timeout=int(self.timeout),
MemorySize=int(self.memory),
Publish=False,
VpcConfig=vpc_config,
Tags={'app_group': self.group,
'app_name': self.app_name})
except boto3.exceptions.botocore.exceptions.ClientError as error:
if 'CreateNetworkInterface' in error.response['Error']['Message']:
message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn)
LOG.critical(message)
raise SystemExit(message)
raise
LOG.info("Successfully created Lambda function and alias") | python | def create_function(self, vpc_config):
"""Create lambda function, configures lambda parameters.
We need to upload non-zero zip when creating function. Uploading
hello_world python lambda function since AWS doesn't care which
executable is in ZIP.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda
"""
zip_file = 'lambda-holder.zip'
with zipfile.ZipFile(zip_file, mode='w') as zipped:
zipped.writestr('index.py', 'print "Hello world"')
contents = ''
with open('lambda-holder.zip', 'rb') as openfile:
contents = openfile.read()
LOG.info('Creating lambda function: %s', self.app_name)
try:
self.lambda_client.create_function(
Environment=self.lambda_environment,
FunctionName=self.app_name,
Runtime=self.runtime,
Role=self.role_arn,
Handler=self.handler,
Code={'ZipFile': contents},
Description=self.description,
Timeout=int(self.timeout),
MemorySize=int(self.memory),
Publish=False,
VpcConfig=vpc_config,
Tags={'app_group': self.group,
'app_name': self.app_name})
except boto3.exceptions.botocore.exceptions.ClientError as error:
if 'CreateNetworkInterface' in error.response['Error']['Message']:
message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn)
LOG.critical(message)
raise SystemExit(message)
raise
LOG.info("Successfully created Lambda function and alias") | [
"def",
"create_function",
"(",
"self",
",",
"vpc_config",
")",
":",
"zip_file",
"=",
"'lambda-holder.zip'",
"with",
"zipfile",
".",
"ZipFile",
"(",
"zip_file",
",",
"mode",
"=",
"'w'",
")",
"as",
"zipped",
":",
"zipped",
".",
"writestr",
"(",
"'index.py'",
",",
"'print \"Hello world\"'",
")",
"contents",
"=",
"''",
"with",
"open",
"(",
"'lambda-holder.zip'",
",",
"'rb'",
")",
"as",
"openfile",
":",
"contents",
"=",
"openfile",
".",
"read",
"(",
")",
"LOG",
".",
"info",
"(",
"'Creating lambda function: %s'",
",",
"self",
".",
"app_name",
")",
"try",
":",
"self",
".",
"lambda_client",
".",
"create_function",
"(",
"Environment",
"=",
"self",
".",
"lambda_environment",
",",
"FunctionName",
"=",
"self",
".",
"app_name",
",",
"Runtime",
"=",
"self",
".",
"runtime",
",",
"Role",
"=",
"self",
".",
"role_arn",
",",
"Handler",
"=",
"self",
".",
"handler",
",",
"Code",
"=",
"{",
"'ZipFile'",
":",
"contents",
"}",
",",
"Description",
"=",
"self",
".",
"description",
",",
"Timeout",
"=",
"int",
"(",
"self",
".",
"timeout",
")",
",",
"MemorySize",
"=",
"int",
"(",
"self",
".",
"memory",
")",
",",
"Publish",
"=",
"False",
",",
"VpcConfig",
"=",
"vpc_config",
",",
"Tags",
"=",
"{",
"'app_group'",
":",
"self",
".",
"group",
",",
"'app_name'",
":",
"self",
".",
"app_name",
"}",
")",
"except",
"boto3",
".",
"exceptions",
".",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"if",
"'CreateNetworkInterface'",
"in",
"error",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
":",
"message",
"=",
"'{0} is missing \"ec2:CreateNetworkInterface\"'",
".",
"format",
"(",
"self",
".",
"role_arn",
")",
"LOG",
".",
"critical",
"(",
"message",
")",
"raise",
"SystemExit",
"(",
"message",
")",
"raise",
"LOG",
".",
"info",
"(",
"\"Successfully created Lambda function and alias\"",
")"
] | Create lambda function, configures lambda parameters.
We need to upload non-zero zip when creating function. Uploading
hello_world python lambda function since AWS doesn't care which
executable is in ZIP.
Args:
vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using
a VPC in lambda | [
"Create",
"lambda",
"function",
"configures",
"lambda",
"parameters",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L209-L253 |
6,068 | foremast/foremast | src/foremast/awslambda/awslambda.py | LambdaFunction.create_lambda_function | def create_lambda_function(self):
"""Create or update Lambda function."""
vpc_config = self._vpc_config()
if self._check_lambda():
self.update_function_configuration(vpc_config)
else:
self.create_function(vpc_config)
if self._check_lambda_alias():
self.update_alias()
else:
self.create_alias() | python | def create_lambda_function(self):
"""Create or update Lambda function."""
vpc_config = self._vpc_config()
if self._check_lambda():
self.update_function_configuration(vpc_config)
else:
self.create_function(vpc_config)
if self._check_lambda_alias():
self.update_alias()
else:
self.create_alias() | [
"def",
"create_lambda_function",
"(",
"self",
")",
":",
"vpc_config",
"=",
"self",
".",
"_vpc_config",
"(",
")",
"if",
"self",
".",
"_check_lambda",
"(",
")",
":",
"self",
".",
"update_function_configuration",
"(",
"vpc_config",
")",
"else",
":",
"self",
".",
"create_function",
"(",
"vpc_config",
")",
"if",
"self",
".",
"_check_lambda_alias",
"(",
")",
":",
"self",
".",
"update_alias",
"(",
")",
"else",
":",
"self",
".",
"create_alias",
"(",
")"
] | Create or update Lambda function. | [
"Create",
"or",
"update",
"Lambda",
"function",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L255-L267 |
6,069 | foremast/foremast | src/foremast/securitygroup/destroy_sg/destroy_sg.py | destroy_sg | def destroy_sg(app='', env='', region='', **_):
"""Destroy Security Group.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): Region name, e.g. us-east-1.
Returns:
True upon successful completion.
"""
vpc = get_vpc_id(account=env, region=region)
url = '{api}/securityGroups/{env}/{region}/{app}'.format(api=API_URL, env=env, region=region, app=app)
payload = {'vpcId': vpc}
security_group = requests.get(url, params=payload, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not security_group:
LOG.info('Nothing to delete.')
else:
LOG.info('Found Security Group in %(region)s: %(name)s', security_group)
destroy_request = get_template('destroy/destroy_sg.json.j2', app=app, env=env, region=region, vpc=vpc)
wait_for_task(destroy_request)
return True | python | def destroy_sg(app='', env='', region='', **_):
"""Destroy Security Group.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): Region name, e.g. us-east-1.
Returns:
True upon successful completion.
"""
vpc = get_vpc_id(account=env, region=region)
url = '{api}/securityGroups/{env}/{region}/{app}'.format(api=API_URL, env=env, region=region, app=app)
payload = {'vpcId': vpc}
security_group = requests.get(url, params=payload, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
if not security_group:
LOG.info('Nothing to delete.')
else:
LOG.info('Found Security Group in %(region)s: %(name)s', security_group)
destroy_request = get_template('destroy/destroy_sg.json.j2', app=app, env=env, region=region, vpc=vpc)
wait_for_task(destroy_request)
return True | [
"def",
"destroy_sg",
"(",
"app",
"=",
"''",
",",
"env",
"=",
"''",
",",
"region",
"=",
"''",
",",
"*",
"*",
"_",
")",
":",
"vpc",
"=",
"get_vpc_id",
"(",
"account",
"=",
"env",
",",
"region",
"=",
"region",
")",
"url",
"=",
"'{api}/securityGroups/{env}/{region}/{app}'",
".",
"format",
"(",
"api",
"=",
"API_URL",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"app",
"=",
"app",
")",
"payload",
"=",
"{",
"'vpcId'",
":",
"vpc",
"}",
"security_group",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"params",
"=",
"payload",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"if",
"not",
"security_group",
":",
"LOG",
".",
"info",
"(",
"'Nothing to delete.'",
")",
"else",
":",
"LOG",
".",
"info",
"(",
"'Found Security Group in %(region)s: %(name)s'",
",",
"security_group",
")",
"destroy_request",
"=",
"get_template",
"(",
"'destroy/destroy_sg.json.j2'",
",",
"app",
"=",
"app",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"vpc",
"=",
"vpc",
")",
"wait_for_task",
"(",
"destroy_request",
")",
"return",
"True"
] | Destroy Security Group.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): Region name, e.g. us-east-1.
Returns:
True upon successful completion. | [
"Destroy",
"Security",
"Group",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/destroy_sg/destroy_sg.py#L27-L52 |
6,070 | foremast/foremast | src/foremast/s3/destroy_s3/destroy_s3.py | destroy_s3 | def destroy_s3(app='', env='dev', **_):
"""Destroy S3 Resources for _app_ in _env_.
Args:
app (str): Application name
env (str): Deployment environment/account name
Returns:
boolean: True if destroyed sucessfully
"""
session = boto3.Session(profile_name=env)
client = session.resource('s3')
generated = get_details(app=app, env=env)
archaius = generated.archaius()
bucket = client.Bucket(archaius['bucket'])
for item in bucket.objects.filter(Prefix=archaius['path']):
item.Object().delete()
LOG.info('Deleted: %s/%s', item.bucket_name, item.key)
return True | python | def destroy_s3(app='', env='dev', **_):
"""Destroy S3 Resources for _app_ in _env_.
Args:
app (str): Application name
env (str): Deployment environment/account name
Returns:
boolean: True if destroyed sucessfully
"""
session = boto3.Session(profile_name=env)
client = session.resource('s3')
generated = get_details(app=app, env=env)
archaius = generated.archaius()
bucket = client.Bucket(archaius['bucket'])
for item in bucket.objects.filter(Prefix=archaius['path']):
item.Object().delete()
LOG.info('Deleted: %s/%s', item.bucket_name, item.key)
return True | [
"def",
"destroy_s3",
"(",
"app",
"=",
"''",
",",
"env",
"=",
"'dev'",
",",
"*",
"*",
"_",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
")",
"client",
"=",
"session",
".",
"resource",
"(",
"'s3'",
")",
"generated",
"=",
"get_details",
"(",
"app",
"=",
"app",
",",
"env",
"=",
"env",
")",
"archaius",
"=",
"generated",
".",
"archaius",
"(",
")",
"bucket",
"=",
"client",
".",
"Bucket",
"(",
"archaius",
"[",
"'bucket'",
"]",
")",
"for",
"item",
"in",
"bucket",
".",
"objects",
".",
"filter",
"(",
"Prefix",
"=",
"archaius",
"[",
"'path'",
"]",
")",
":",
"item",
".",
"Object",
"(",
")",
".",
"delete",
"(",
")",
"LOG",
".",
"info",
"(",
"'Deleted: %s/%s'",
",",
"item",
".",
"bucket_name",
",",
"item",
".",
"key",
")",
"return",
"True"
] | Destroy S3 Resources for _app_ in _env_.
Args:
app (str): Application name
env (str): Deployment environment/account name
Returns:
boolean: True if destroyed sucessfully | [
"Destroy",
"S3",
"Resources",
"for",
"_app_",
"in",
"_env_",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/destroy_s3/destroy_s3.py#L26-L48 |
6,071 | foremast/foremast | src/foremast/app/__main__.py | main | def main():
"""Entry point for creating a Spinnaker application."""
# Setup parser
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
parser.add_argument(
'--email', help='Email address to associate with application', default='[email protected]')
parser.add_argument('--project', help='Git project to associate with application', default='None')
parser.add_argument('--repo', help='Git repo to associate with application', default='None')
parser.add_argument('--git', help='Git URI', default=None)
args = parser.parse_args()
logging.basicConfig(format=LOGGING_FORMAT)
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
if args.git and args.git != 'None':
parsed = gogoutils.Parser(args.git).parse_url()
generated = gogoutils.Generator(*parsed, formats=APP_FORMATS)
project = generated.project
repo = generated.repo
else:
project = args.project
repo = args.repo
spinnakerapps = SpinnakerApp(app=args.app, email=args.email, project=project, repo=repo)
spinnakerapps.create_app() | python | def main():
"""Entry point for creating a Spinnaker application."""
# Setup parser
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
parser.add_argument(
'--email', help='Email address to associate with application', default='[email protected]')
parser.add_argument('--project', help='Git project to associate with application', default='None')
parser.add_argument('--repo', help='Git repo to associate with application', default='None')
parser.add_argument('--git', help='Git URI', default=None)
args = parser.parse_args()
logging.basicConfig(format=LOGGING_FORMAT)
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
if args.git and args.git != 'None':
parsed = gogoutils.Parser(args.git).parse_url()
generated = gogoutils.Generator(*parsed, formats=APP_FORMATS)
project = generated.project
repo = generated.repo
else:
project = args.project
repo = args.repo
spinnakerapps = SpinnakerApp(app=args.app, email=args.email, project=project, repo=repo)
spinnakerapps.create_app() | [
"def",
"main",
"(",
")",
":",
"# Setup parser",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"add_debug",
"(",
"parser",
")",
"add_app",
"(",
"parser",
")",
"parser",
".",
"add_argument",
"(",
"'--email'",
",",
"help",
"=",
"'Email address to associate with application'",
",",
"default",
"=",
"'[email protected]'",
")",
"parser",
".",
"add_argument",
"(",
"'--project'",
",",
"help",
"=",
"'Git project to associate with application'",
",",
"default",
"=",
"'None'",
")",
"parser",
".",
"add_argument",
"(",
"'--repo'",
",",
"help",
"=",
"'Git repo to associate with application'",
",",
"default",
"=",
"'None'",
")",
"parser",
".",
"add_argument",
"(",
"'--git'",
",",
"help",
"=",
"'Git URI'",
",",
"default",
"=",
"None",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"LOGGING_FORMAT",
")",
"logging",
".",
"getLogger",
"(",
"__package__",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"if",
"args",
".",
"git",
"and",
"args",
".",
"git",
"!=",
"'None'",
":",
"parsed",
"=",
"gogoutils",
".",
"Parser",
"(",
"args",
".",
"git",
")",
".",
"parse_url",
"(",
")",
"generated",
"=",
"gogoutils",
".",
"Generator",
"(",
"*",
"parsed",
",",
"formats",
"=",
"APP_FORMATS",
")",
"project",
"=",
"generated",
".",
"project",
"repo",
"=",
"generated",
".",
"repo",
"else",
":",
"project",
"=",
"args",
".",
"project",
"repo",
"=",
"args",
".",
"repo",
"spinnakerapps",
"=",
"SpinnakerApp",
"(",
"app",
"=",
"args",
".",
"app",
",",
"email",
"=",
"args",
".",
"email",
",",
"project",
"=",
"project",
",",
"repo",
"=",
"repo",
")",
"spinnakerapps",
".",
"create_app",
"(",
")"
] | Entry point for creating a Spinnaker application. | [
"Entry",
"point",
"for",
"creating",
"a",
"Spinnaker",
"application",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/__main__.py#L30-L56 |
6,072 | foremast/foremast | src/foremast/awslambda/s3_event/destroy_s3_event/destroy_s3_event.py | destroy_s3_event | def destroy_s3_event(app, env, region):
"""Destroy S3 event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion.
"""
# TODO: how do we know which bucket to process if triggers dict is empty?
# Maybe list buckets and see which has notification to that lambda defined?
# TODO: buckets should be named the same as apps, what if one app has multiple buckets?
# bucket = rules.get('bucket')
generated = get_details(app=app, env=env)
bucket = generated.s3_app_bucket()
session = boto3.Session(profile_name=env, region_name=region)
s3_client = session.client('s3')
config = {}
s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=config)
LOG.debug("Deleted Lambda S3 notification")
return True | python | def destroy_s3_event(app, env, region):
"""Destroy S3 event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion.
"""
# TODO: how do we know which bucket to process if triggers dict is empty?
# Maybe list buckets and see which has notification to that lambda defined?
# TODO: buckets should be named the same as apps, what if one app has multiple buckets?
# bucket = rules.get('bucket')
generated = get_details(app=app, env=env)
bucket = generated.s3_app_bucket()
session = boto3.Session(profile_name=env, region_name=region)
s3_client = session.client('s3')
config = {}
s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=config)
LOG.debug("Deleted Lambda S3 notification")
return True | [
"def",
"destroy_s3_event",
"(",
"app",
",",
"env",
",",
"region",
")",
":",
"# TODO: how do we know which bucket to process if triggers dict is empty?",
"# Maybe list buckets and see which has notification to that lambda defined?",
"# TODO: buckets should be named the same as apps, what if one app has multiple buckets?",
"# bucket = rules.get('bucket')",
"generated",
"=",
"get_details",
"(",
"app",
"=",
"app",
",",
"env",
"=",
"env",
")",
"bucket",
"=",
"generated",
".",
"s3_app_bucket",
"(",
")",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"s3_client",
"=",
"session",
".",
"client",
"(",
"'s3'",
")",
"config",
"=",
"{",
"}",
"s3_client",
".",
"put_bucket_notification_configuration",
"(",
"Bucket",
"=",
"bucket",
",",
"NotificationConfiguration",
"=",
"config",
")",
"LOG",
".",
"debug",
"(",
"\"Deleted Lambda S3 notification\"",
")",
"return",
"True"
] | Destroy S3 event.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
bool: True upon successful completion. | [
"Destroy",
"S3",
"event",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/s3_event/destroy_s3_event/destroy_s3_event.py#L26-L53 |
6,073 | foremast/foremast | src/foremast/iam/destroy_iam/destroy_iam.py | destroy_iam | def destroy_iam(app='', env='dev', **_):
"""Destroy IAM Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment, i.e. dev, stage, prod.
Returns:
True upon successful completion.
"""
session = boto3.Session(profile_name=env)
client = session.client('iam')
generated = get_details(env=env, app=app)
generated_iam = generated.iam()
app_details = collections.namedtuple('AppDetails', generated_iam.keys())
details = app_details(**generated_iam)
LOG.debug('Application details: %s', details)
resource_action(
client,
action='remove_user_from_group',
log_format='Removed user from group: %(UserName)s ~> %(GroupName)s',
GroupName=details.group,
UserName=details.user)
resource_action(client, action='delete_user', log_format='Destroyed user: %(UserName)s', UserName=details.user)
resource_action(client, action='delete_group', log_format='Destroyed group: %(GroupName)s', GroupName=details.group)
resource_action(
client,
action='remove_role_from_instance_profile',
log_format='Destroyed Instance Profile from Role: '
'%(InstanceProfileName)s ~> %(RoleName)s',
InstanceProfileName=details.profile,
RoleName=details.role)
resource_action(
client,
action='delete_instance_profile',
log_format='Destroyed Instance Profile: %(InstanceProfileName)s',
InstanceProfileName=details.profile)
role_policies = []
try:
role_policies = resource_action(
client,
action='list_role_policies',
log_format='Found Role Policies for %(RoleName)s.',
RoleName=details.role)['PolicyNames']
except TypeError:
LOG.info('Role %s not found.', details.role)
for policy in role_policies:
resource_action(
client,
action='delete_role_policy',
log_format='Removed Inline Policy from Role: '
'%(PolicyName)s ~> %(RoleName)s',
RoleName=details.role,
PolicyName=policy)
attached_role_policies = []
try:
attached_role_policies = resource_action(
client,
action='list_attached_role_policies',
log_format='Found attached Role Polices for %(RoleName)s.',
RoleName=details.role)['AttachedPolicies']
except TypeError:
LOG.info('Role %s not found.', details.role)
for policy in attached_role_policies:
resource_action(
client,
action='detach_role_policy',
log_format='Detached Policy from Role: '
'%(PolicyArn)s ~> %(RoleName)s',
RoleName=details.role,
PolicyArn=policy['PolicyArn'])
resource_action(client, action='delete_role', log_format='Destroyed Role: %(RoleName)s', RoleName=details.role) | python | def destroy_iam(app='', env='dev', **_):
"""Destroy IAM Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment, i.e. dev, stage, prod.
Returns:
True upon successful completion.
"""
session = boto3.Session(profile_name=env)
client = session.client('iam')
generated = get_details(env=env, app=app)
generated_iam = generated.iam()
app_details = collections.namedtuple('AppDetails', generated_iam.keys())
details = app_details(**generated_iam)
LOG.debug('Application details: %s', details)
resource_action(
client,
action='remove_user_from_group',
log_format='Removed user from group: %(UserName)s ~> %(GroupName)s',
GroupName=details.group,
UserName=details.user)
resource_action(client, action='delete_user', log_format='Destroyed user: %(UserName)s', UserName=details.user)
resource_action(client, action='delete_group', log_format='Destroyed group: %(GroupName)s', GroupName=details.group)
resource_action(
client,
action='remove_role_from_instance_profile',
log_format='Destroyed Instance Profile from Role: '
'%(InstanceProfileName)s ~> %(RoleName)s',
InstanceProfileName=details.profile,
RoleName=details.role)
resource_action(
client,
action='delete_instance_profile',
log_format='Destroyed Instance Profile: %(InstanceProfileName)s',
InstanceProfileName=details.profile)
role_policies = []
try:
role_policies = resource_action(
client,
action='list_role_policies',
log_format='Found Role Policies for %(RoleName)s.',
RoleName=details.role)['PolicyNames']
except TypeError:
LOG.info('Role %s not found.', details.role)
for policy in role_policies:
resource_action(
client,
action='delete_role_policy',
log_format='Removed Inline Policy from Role: '
'%(PolicyName)s ~> %(RoleName)s',
RoleName=details.role,
PolicyName=policy)
attached_role_policies = []
try:
attached_role_policies = resource_action(
client,
action='list_attached_role_policies',
log_format='Found attached Role Polices for %(RoleName)s.',
RoleName=details.role)['AttachedPolicies']
except TypeError:
LOG.info('Role %s not found.', details.role)
for policy in attached_role_policies:
resource_action(
client,
action='detach_role_policy',
log_format='Detached Policy from Role: '
'%(PolicyArn)s ~> %(RoleName)s',
RoleName=details.role,
PolicyArn=policy['PolicyArn'])
resource_action(client, action='delete_role', log_format='Destroyed Role: %(RoleName)s', RoleName=details.role) | [
"def",
"destroy_iam",
"(",
"app",
"=",
"''",
",",
"env",
"=",
"'dev'",
",",
"*",
"*",
"_",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
")",
"client",
"=",
"session",
".",
"client",
"(",
"'iam'",
")",
"generated",
"=",
"get_details",
"(",
"env",
"=",
"env",
",",
"app",
"=",
"app",
")",
"generated_iam",
"=",
"generated",
".",
"iam",
"(",
")",
"app_details",
"=",
"collections",
".",
"namedtuple",
"(",
"'AppDetails'",
",",
"generated_iam",
".",
"keys",
"(",
")",
")",
"details",
"=",
"app_details",
"(",
"*",
"*",
"generated_iam",
")",
"LOG",
".",
"debug",
"(",
"'Application details: %s'",
",",
"details",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'remove_user_from_group'",
",",
"log_format",
"=",
"'Removed user from group: %(UserName)s ~> %(GroupName)s'",
",",
"GroupName",
"=",
"details",
".",
"group",
",",
"UserName",
"=",
"details",
".",
"user",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'delete_user'",
",",
"log_format",
"=",
"'Destroyed user: %(UserName)s'",
",",
"UserName",
"=",
"details",
".",
"user",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'delete_group'",
",",
"log_format",
"=",
"'Destroyed group: %(GroupName)s'",
",",
"GroupName",
"=",
"details",
".",
"group",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'remove_role_from_instance_profile'",
",",
"log_format",
"=",
"'Destroyed Instance Profile from Role: '",
"'%(InstanceProfileName)s ~> %(RoleName)s'",
",",
"InstanceProfileName",
"=",
"details",
".",
"profile",
",",
"RoleName",
"=",
"details",
".",
"role",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'delete_instance_profile'",
",",
"log_format",
"=",
"'Destroyed Instance Profile: %(InstanceProfileName)s'",
",",
"InstanceProfileName",
"=",
"details",
".",
"profile",
")",
"role_policies",
"=",
"[",
"]",
"try",
":",
"role_policies",
"=",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'list_role_policies'",
",",
"log_format",
"=",
"'Found Role Policies for %(RoleName)s.'",
",",
"RoleName",
"=",
"details",
".",
"role",
")",
"[",
"'PolicyNames'",
"]",
"except",
"TypeError",
":",
"LOG",
".",
"info",
"(",
"'Role %s not found.'",
",",
"details",
".",
"role",
")",
"for",
"policy",
"in",
"role_policies",
":",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'delete_role_policy'",
",",
"log_format",
"=",
"'Removed Inline Policy from Role: '",
"'%(PolicyName)s ~> %(RoleName)s'",
",",
"RoleName",
"=",
"details",
".",
"role",
",",
"PolicyName",
"=",
"policy",
")",
"attached_role_policies",
"=",
"[",
"]",
"try",
":",
"attached_role_policies",
"=",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'list_attached_role_policies'",
",",
"log_format",
"=",
"'Found attached Role Polices for %(RoleName)s.'",
",",
"RoleName",
"=",
"details",
".",
"role",
")",
"[",
"'AttachedPolicies'",
"]",
"except",
"TypeError",
":",
"LOG",
".",
"info",
"(",
"'Role %s not found.'",
",",
"details",
".",
"role",
")",
"for",
"policy",
"in",
"attached_role_policies",
":",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'detach_role_policy'",
",",
"log_format",
"=",
"'Detached Policy from Role: '",
"'%(PolicyArn)s ~> %(RoleName)s'",
",",
"RoleName",
"=",
"details",
".",
"role",
",",
"PolicyArn",
"=",
"policy",
"[",
"'PolicyArn'",
"]",
")",
"resource_action",
"(",
"client",
",",
"action",
"=",
"'delete_role'",
",",
"log_format",
"=",
"'Destroyed Role: %(RoleName)s'",
",",
"RoleName",
"=",
"details",
".",
"role",
")"
] | Destroy IAM Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment, i.e. dev, stage, prod.
Returns:
True upon successful completion. | [
"Destroy",
"IAM",
"Resources",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/destroy_iam/destroy_iam.py#L28-L108 |
6,074 | foremast/foremast | src/foremast/utils/roles.py | get_role_arn | def get_role_arn(role_name, env, region):
"""Get role ARN given role name.
Args:
role_name (str): Role name to lookup
env (str): Environment in which to lookup
region (str): Region
Returns:
ARN if role found
"""
session = boto3.Session(profile_name=env, region_name=region)
iam_client = session.client('iam')
LOG.debug('Searching for %s.', role_name)
role = iam_client.get_role(RoleName=role_name)
role_arn = role['Role']['Arn']
LOG.debug("Found role's %s ARN %s", role_name, role_arn)
return role_arn | python | def get_role_arn(role_name, env, region):
"""Get role ARN given role name.
Args:
role_name (str): Role name to lookup
env (str): Environment in which to lookup
region (str): Region
Returns:
ARN if role found
"""
session = boto3.Session(profile_name=env, region_name=region)
iam_client = session.client('iam')
LOG.debug('Searching for %s.', role_name)
role = iam_client.get_role(RoleName=role_name)
role_arn = role['Role']['Arn']
LOG.debug("Found role's %s ARN %s", role_name, role_arn)
return role_arn | [
"def",
"get_role_arn",
"(",
"role_name",
",",
"env",
",",
"region",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"iam_client",
"=",
"session",
".",
"client",
"(",
"'iam'",
")",
"LOG",
".",
"debug",
"(",
"'Searching for %s.'",
",",
"role_name",
")",
"role",
"=",
"iam_client",
".",
"get_role",
"(",
"RoleName",
"=",
"role_name",
")",
"role_arn",
"=",
"role",
"[",
"'Role'",
"]",
"[",
"'Arn'",
"]",
"LOG",
".",
"debug",
"(",
"\"Found role's %s ARN %s\"",
",",
"role_name",
",",
"role_arn",
")",
"return",
"role_arn"
] | Get role ARN given role name.
Args:
role_name (str): Role name to lookup
env (str): Environment in which to lookup
region (str): Region
Returns:
ARN if role found | [
"Get",
"role",
"ARN",
"given",
"role",
"name",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/roles.py#L9-L31 |
6,075 | foremast/foremast | src/foremast/iam/construct_policy.py | render_policy_template | def render_policy_template( # pylint: disable=too-many-arguments
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=None,
pipeline_settings=None,
region='us-east-1',
service=''):
"""Render IAM Policy template.
To support multiple statement blocks, JSON objects can be separated by a
comma. This function attempts to turn any invalid JSON into a valid list
based on this comma separated assumption.
Args:
account_number (str): AWS Account number.
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
items (list): Resource names used to create a Policy per Resource.
region (str): AWS region.
pipeline_settings (dict): Settings from *pipeline.json*.
service (str): Name of cloud service to find matching IAM Policy
template.
Returns:
list: IAM Policy :obj:`dict` statements for the given service.
"""
statements = []
rendered_service_policy = get_template(
'infrastructure/iam/{0}.json.j2'.format(service),
account_number=account_number,
app=app,
env=env,
group=group,
region=region,
items=items,
settings=pipeline_settings)
try:
statement_block = json.loads(rendered_service_policy)
statements.append(statement_block)
except ValueError:
LOG.debug('Need to make %s template into list.', service)
statements = json.loads('[{0}]'.format(rendered_service_policy))
LOG.debug('Rendered IAM Policy statements: %s', statements)
return statements | python | def render_policy_template( # pylint: disable=too-many-arguments
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=None,
pipeline_settings=None,
region='us-east-1',
service=''):
"""Render IAM Policy template.
To support multiple statement blocks, JSON objects can be separated by a
comma. This function attempts to turn any invalid JSON into a valid list
based on this comma separated assumption.
Args:
account_number (str): AWS Account number.
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
items (list): Resource names used to create a Policy per Resource.
region (str): AWS region.
pipeline_settings (dict): Settings from *pipeline.json*.
service (str): Name of cloud service to find matching IAM Policy
template.
Returns:
list: IAM Policy :obj:`dict` statements for the given service.
"""
statements = []
rendered_service_policy = get_template(
'infrastructure/iam/{0}.json.j2'.format(service),
account_number=account_number,
app=app,
env=env,
group=group,
region=region,
items=items,
settings=pipeline_settings)
try:
statement_block = json.loads(rendered_service_policy)
statements.append(statement_block)
except ValueError:
LOG.debug('Need to make %s template into list.', service)
statements = json.loads('[{0}]'.format(rendered_service_policy))
LOG.debug('Rendered IAM Policy statements: %s', statements)
return statements | [
"def",
"render_policy_template",
"(",
"# pylint: disable=too-many-arguments",
"account_number",
"=",
"''",
",",
"app",
"=",
"'coreforrest'",
",",
"env",
"=",
"'dev'",
",",
"group",
"=",
"'forrest'",
",",
"items",
"=",
"None",
",",
"pipeline_settings",
"=",
"None",
",",
"region",
"=",
"'us-east-1'",
",",
"service",
"=",
"''",
")",
":",
"statements",
"=",
"[",
"]",
"rendered_service_policy",
"=",
"get_template",
"(",
"'infrastructure/iam/{0}.json.j2'",
".",
"format",
"(",
"service",
")",
",",
"account_number",
"=",
"account_number",
",",
"app",
"=",
"app",
",",
"env",
"=",
"env",
",",
"group",
"=",
"group",
",",
"region",
"=",
"region",
",",
"items",
"=",
"items",
",",
"settings",
"=",
"pipeline_settings",
")",
"try",
":",
"statement_block",
"=",
"json",
".",
"loads",
"(",
"rendered_service_policy",
")",
"statements",
".",
"append",
"(",
"statement_block",
")",
"except",
"ValueError",
":",
"LOG",
".",
"debug",
"(",
"'Need to make %s template into list.'",
",",
"service",
")",
"statements",
"=",
"json",
".",
"loads",
"(",
"'[{0}]'",
".",
"format",
"(",
"rendered_service_policy",
")",
")",
"LOG",
".",
"debug",
"(",
"'Rendered IAM Policy statements: %s'",
",",
"statements",
")",
"return",
"statements"
] | Render IAM Policy template.
To support multiple statement blocks, JSON objects can be separated by a
comma. This function attempts to turn any invalid JSON into a valid list
based on this comma separated assumption.
Args:
account_number (str): AWS Account number.
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
items (list): Resource names used to create a Policy per Resource.
region (str): AWS region.
pipeline_settings (dict): Settings from *pipeline.json*.
service (str): Name of cloud service to find matching IAM Policy
template.
Returns:
list: IAM Policy :obj:`dict` statements for the given service. | [
"Render",
"IAM",
"Policy",
"template",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/construct_policy.py#L57-L108 |
6,076 | foremast/foremast | src/foremast/iam/construct_policy.py | construct_policy | def construct_policy(app='coreforrest', env='dev', group='forrest', region='us-east-1', pipeline_settings=None):
"""Assemble IAM Policy for _app_.
Args:
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
region (str): AWS region
pipeline_settings (dict): Settings from *pipeline.json*.
Returns:
json: Custom IAM Policy for _app_.
None: When no *services* have been defined in *pipeline.json*.
"""
LOG.info('Create custom IAM Policy for %s.', app)
services = pipeline_settings.get('services', {})
LOG.debug('Found requested services: %s', services)
services = auto_service(pipeline_settings=pipeline_settings, services=services)
if services:
credential = get_env_credential(env=env)
account_number = credential['accountId']
statements = []
for service, value in services.items():
if value is True:
items = []
elif isinstance(value, str):
items = [value]
else:
items = value
rendered_statements = render_policy_template(
account_number=account_number,
app=app,
env=env,
group=group,
items=items,
pipeline_settings=pipeline_settings,
region=region,
service=service)
statements.extend(rendered_statements)
if statements:
policy_json = get_template('infrastructure/iam/wrapper.json.j2', statements=json.dumps(statements))
else:
LOG.info('No services defined for %s.', app)
policy_json = None
return policy_json | python | def construct_policy(app='coreforrest', env='dev', group='forrest', region='us-east-1', pipeline_settings=None):
"""Assemble IAM Policy for _app_.
Args:
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
region (str): AWS region
pipeline_settings (dict): Settings from *pipeline.json*.
Returns:
json: Custom IAM Policy for _app_.
None: When no *services* have been defined in *pipeline.json*.
"""
LOG.info('Create custom IAM Policy for %s.', app)
services = pipeline_settings.get('services', {})
LOG.debug('Found requested services: %s', services)
services = auto_service(pipeline_settings=pipeline_settings, services=services)
if services:
credential = get_env_credential(env=env)
account_number = credential['accountId']
statements = []
for service, value in services.items():
if value is True:
items = []
elif isinstance(value, str):
items = [value]
else:
items = value
rendered_statements = render_policy_template(
account_number=account_number,
app=app,
env=env,
group=group,
items=items,
pipeline_settings=pipeline_settings,
region=region,
service=service)
statements.extend(rendered_statements)
if statements:
policy_json = get_template('infrastructure/iam/wrapper.json.j2', statements=json.dumps(statements))
else:
LOG.info('No services defined for %s.', app)
policy_json = None
return policy_json | [
"def",
"construct_policy",
"(",
"app",
"=",
"'coreforrest'",
",",
"env",
"=",
"'dev'",
",",
"group",
"=",
"'forrest'",
",",
"region",
"=",
"'us-east-1'",
",",
"pipeline_settings",
"=",
"None",
")",
":",
"LOG",
".",
"info",
"(",
"'Create custom IAM Policy for %s.'",
",",
"app",
")",
"services",
"=",
"pipeline_settings",
".",
"get",
"(",
"'services'",
",",
"{",
"}",
")",
"LOG",
".",
"debug",
"(",
"'Found requested services: %s'",
",",
"services",
")",
"services",
"=",
"auto_service",
"(",
"pipeline_settings",
"=",
"pipeline_settings",
",",
"services",
"=",
"services",
")",
"if",
"services",
":",
"credential",
"=",
"get_env_credential",
"(",
"env",
"=",
"env",
")",
"account_number",
"=",
"credential",
"[",
"'accountId'",
"]",
"statements",
"=",
"[",
"]",
"for",
"service",
",",
"value",
"in",
"services",
".",
"items",
"(",
")",
":",
"if",
"value",
"is",
"True",
":",
"items",
"=",
"[",
"]",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"items",
"=",
"[",
"value",
"]",
"else",
":",
"items",
"=",
"value",
"rendered_statements",
"=",
"render_policy_template",
"(",
"account_number",
"=",
"account_number",
",",
"app",
"=",
"app",
",",
"env",
"=",
"env",
",",
"group",
"=",
"group",
",",
"items",
"=",
"items",
",",
"pipeline_settings",
"=",
"pipeline_settings",
",",
"region",
"=",
"region",
",",
"service",
"=",
"service",
")",
"statements",
".",
"extend",
"(",
"rendered_statements",
")",
"if",
"statements",
":",
"policy_json",
"=",
"get_template",
"(",
"'infrastructure/iam/wrapper.json.j2'",
",",
"statements",
"=",
"json",
".",
"dumps",
"(",
"statements",
")",
")",
"else",
":",
"LOG",
".",
"info",
"(",
"'No services defined for %s.'",
",",
"app",
")",
"policy_json",
"=",
"None",
"return",
"policy_json"
] | Assemble IAM Policy for _app_.
Args:
app (str): Name of Spinnaker Application.
env (str): Environment/Account in AWS
group (str):A Application group/namespace
region (str): AWS region
pipeline_settings (dict): Settings from *pipeline.json*.
Returns:
json: Custom IAM Policy for _app_.
None: When no *services* have been defined in *pipeline.json*. | [
"Assemble",
"IAM",
"Policy",
"for",
"_app_",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/construct_policy.py#L111-L163 |
6,077 | foremast/foremast | src/foremast/validate.py | validate_gate | def validate_gate():
"""Check Gate connection."""
try:
credentials = get_env_credential()
LOG.debug('Found credentials: %s', credentials)
LOG.info('Gate working.')
except TypeError:
LOG.fatal('Gate connection not valid: API_URL = %s', API_URL) | python | def validate_gate():
"""Check Gate connection."""
try:
credentials = get_env_credential()
LOG.debug('Found credentials: %s', credentials)
LOG.info('Gate working.')
except TypeError:
LOG.fatal('Gate connection not valid: API_URL = %s', API_URL) | [
"def",
"validate_gate",
"(",
")",
":",
"try",
":",
"credentials",
"=",
"get_env_credential",
"(",
")",
"LOG",
".",
"debug",
"(",
"'Found credentials: %s'",
",",
"credentials",
")",
"LOG",
".",
"info",
"(",
"'Gate working.'",
")",
"except",
"TypeError",
":",
"LOG",
".",
"fatal",
"(",
"'Gate connection not valid: API_URL = %s'",
",",
"API_URL",
")"
] | Check Gate connection. | [
"Check",
"Gate",
"connection",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/validate.py#L10-L17 |
6,078 | foremast/foremast | src/foremast/awslambda/s3_event/s3_event.py | create_s3_event | def create_s3_event(app_name, env, region, bucket, triggers):
"""Create S3 lambda events from triggers
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
triggers (list): List of triggers from the settings
"""
session = boto3.Session(profile_name=env, region_name=region)
s3_client = session.client('s3')
lambda_alias_arn = get_lambda_alias_arn(app_name, env, region)
LOG.debug("Lambda ARN for lambda function %s is %s.", app_name, lambda_alias_arn)
LOG.debug("Creating S3 events for bucket %s", bucket)
# allow lambda trigger permission from bucket
principal = 's3.amazonaws.com'
statement_id = "{}_s3_{}".format(app_name, bucket).replace('.', '')
source_arn = "arn:aws:s3:::{}".format(bucket)
add_lambda_permissions(
function=lambda_alias_arn,
env=env,
region=region,
principal=principal,
statement_id=statement_id,
source_arn=source_arn)
# configure events on s3 bucket to trigger lambda function
template_kwargs = {"lambda_arn": lambda_alias_arn, "triggers": triggers}
config = get_template(template_file='infrastructure/lambda/s3_event.json.j2', **template_kwargs)
s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=json.loads(config))
LOG.info("Created lambda %s S3 event on bucket %s", app_name, bucket) | python | def create_s3_event(app_name, env, region, bucket, triggers):
"""Create S3 lambda events from triggers
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
triggers (list): List of triggers from the settings
"""
session = boto3.Session(profile_name=env, region_name=region)
s3_client = session.client('s3')
lambda_alias_arn = get_lambda_alias_arn(app_name, env, region)
LOG.debug("Lambda ARN for lambda function %s is %s.", app_name, lambda_alias_arn)
LOG.debug("Creating S3 events for bucket %s", bucket)
# allow lambda trigger permission from bucket
principal = 's3.amazonaws.com'
statement_id = "{}_s3_{}".format(app_name, bucket).replace('.', '')
source_arn = "arn:aws:s3:::{}".format(bucket)
add_lambda_permissions(
function=lambda_alias_arn,
env=env,
region=region,
principal=principal,
statement_id=statement_id,
source_arn=source_arn)
# configure events on s3 bucket to trigger lambda function
template_kwargs = {"lambda_arn": lambda_alias_arn, "triggers": triggers}
config = get_template(template_file='infrastructure/lambda/s3_event.json.j2', **template_kwargs)
s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=json.loads(config))
LOG.info("Created lambda %s S3 event on bucket %s", app_name, bucket) | [
"def",
"create_s3_event",
"(",
"app_name",
",",
"env",
",",
"region",
",",
"bucket",
",",
"triggers",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"s3_client",
"=",
"session",
".",
"client",
"(",
"'s3'",
")",
"lambda_alias_arn",
"=",
"get_lambda_alias_arn",
"(",
"app_name",
",",
"env",
",",
"region",
")",
"LOG",
".",
"debug",
"(",
"\"Lambda ARN for lambda function %s is %s.\"",
",",
"app_name",
",",
"lambda_alias_arn",
")",
"LOG",
".",
"debug",
"(",
"\"Creating S3 events for bucket %s\"",
",",
"bucket",
")",
"# allow lambda trigger permission from bucket",
"principal",
"=",
"'s3.amazonaws.com'",
"statement_id",
"=",
"\"{}_s3_{}\"",
".",
"format",
"(",
"app_name",
",",
"bucket",
")",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
"source_arn",
"=",
"\"arn:aws:s3:::{}\"",
".",
"format",
"(",
"bucket",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_alias_arn",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
"principal",
"=",
"principal",
",",
"statement_id",
"=",
"statement_id",
",",
"source_arn",
"=",
"source_arn",
")",
"# configure events on s3 bucket to trigger lambda function",
"template_kwargs",
"=",
"{",
"\"lambda_arn\"",
":",
"lambda_alias_arn",
",",
"\"triggers\"",
":",
"triggers",
"}",
"config",
"=",
"get_template",
"(",
"template_file",
"=",
"'infrastructure/lambda/s3_event.json.j2'",
",",
"*",
"*",
"template_kwargs",
")",
"s3_client",
".",
"put_bucket_notification_configuration",
"(",
"Bucket",
"=",
"bucket",
",",
"NotificationConfiguration",
"=",
"json",
".",
"loads",
"(",
"config",
")",
")",
"LOG",
".",
"info",
"(",
"\"Created lambda %s S3 event on bucket %s\"",
",",
"app_name",
",",
"bucket",
")"
] | Create S3 lambda events from triggers
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
triggers (list): List of triggers from the settings | [
"Create",
"S3",
"lambda",
"events",
"from",
"triggers"
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/s3_event/s3_event.py#L28-L62 |
6,079 | foremast/foremast | src/foremast/utils/generate_filename.py | generate_packer_filename | def generate_packer_filename(provider, region, builder):
"""Generate a filename to be used by packer.
Args:
provider (str): Name of Spinnaker provider.
region (str): Name of provider region to use.
builder (str): Name of builder process type.
Returns:
str: Generated filename based on parameters.
"""
filename = '{0}_{1}_{2}.json'.format(provider, region, builder)
return filename | python | def generate_packer_filename(provider, region, builder):
"""Generate a filename to be used by packer.
Args:
provider (str): Name of Spinnaker provider.
region (str): Name of provider region to use.
builder (str): Name of builder process type.
Returns:
str: Generated filename based on parameters.
"""
filename = '{0}_{1}_{2}.json'.format(provider, region, builder)
return filename | [
"def",
"generate_packer_filename",
"(",
"provider",
",",
"region",
",",
"builder",
")",
":",
"filename",
"=",
"'{0}_{1}_{2}.json'",
".",
"format",
"(",
"provider",
",",
"region",
",",
"builder",
")",
"return",
"filename"
] | Generate a filename to be used by packer.
Args:
provider (str): Name of Spinnaker provider.
region (str): Name of provider region to use.
builder (str): Name of builder process type.
Returns:
str: Generated filename based on parameters. | [
"Generate",
"a",
"filename",
"to",
"be",
"used",
"by",
"packer",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/generate_filename.py#L19-L32 |
6,080 | foremast/foremast | src/foremast/utils/templates.py | get_template | def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
template_file (str): name of the template file
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
template = get_template_object(template_file)
LOG.info('Rendering template %s', template.filename)
for key, value in kwargs.items():
LOG.debug('%s => %s', key, value)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json | python | def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
template_file (str): name of the template file
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
template = get_template_object(template_file)
LOG.info('Rendering template %s', template.filename)
for key, value in kwargs.items():
LOG.debug('%s => %s', key, value)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json | [
"def",
"get_template",
"(",
"template_file",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"template",
"=",
"get_template_object",
"(",
"template_file",
")",
"LOG",
".",
"info",
"(",
"'Rendering template %s'",
",",
"template",
".",
"filename",
")",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"LOG",
".",
"debug",
"(",
"'%s => %s'",
",",
"key",
",",
"value",
")",
"rendered_json",
"=",
"template",
".",
"render",
"(",
"*",
"*",
"kwargs",
")",
"LOG",
".",
"debug",
"(",
"'Rendered JSON:\\n%s'",
",",
"rendered_json",
")",
"return",
"rendered_json"
] | Get the Jinja2 template and renders with dict _kwargs_.
Args:
template_file (str): name of the template file
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template. | [
"Get",
"the",
"Jinja2",
"template",
"and",
"renders",
"with",
"dict",
"_kwargs_",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/templates.py#L70-L90 |
6,081 | foremast/foremast | src/foremast/pipeline/renumerate_stages.py | renumerate_stages | def renumerate_stages(pipeline):
"""Renumber Pipeline Stage reference IDs to account for dependencies.
stage order is defined in the templates. The ``refId`` field dictates
if a stage should be mainline or parallel to other stages.
* ``master`` - A mainline required stage. Other stages depend on it
* ``branch`` - A stage that should be ran in parallel to master stages.
* ``merge`` - A stage thatis parallel but other stages still depend on it.
Args:
pipeline (dict): Completed Pipeline ready for renumeration.
Returns:
dict: Pipeline ready to be sent to Spinnaker.
"""
stages = pipeline['stages']
main_index = 0
branch_index = 0
previous_refid = ''
for stage in stages:
current_refid = stage['refId'].lower()
if current_refid == 'master':
if main_index == 0:
stage['requisiteStageRefIds'] = []
else:
stage['requisiteStageRefIds'] = [str(main_index)]
main_index += 1
stage['refId'] = str(main_index)
elif current_refid == 'branch':
# increments a branch_index to account for multiple parrallel stages
if previous_refid == 'branch':
branch_index += 1
else:
branch_index = 0
stage['refId'] = str((main_index * 100) + branch_index)
stage['requisiteStageRefIds'] = [str(main_index)]
elif current_refid == 'merge':
# TODO: Added logic to handle merge stages.
pass
previous_refid = current_refid
LOG.debug('step=%(name)s\trefId=%(refId)s\t' 'requisiteStageRefIds=%(requisiteStageRefIds)s', stage)
return pipeline | python | def renumerate_stages(pipeline):
"""Renumber Pipeline Stage reference IDs to account for dependencies.
stage order is defined in the templates. The ``refId`` field dictates
if a stage should be mainline or parallel to other stages.
* ``master`` - A mainline required stage. Other stages depend on it
* ``branch`` - A stage that should be ran in parallel to master stages.
* ``merge`` - A stage thatis parallel but other stages still depend on it.
Args:
pipeline (dict): Completed Pipeline ready for renumeration.
Returns:
dict: Pipeline ready to be sent to Spinnaker.
"""
stages = pipeline['stages']
main_index = 0
branch_index = 0
previous_refid = ''
for stage in stages:
current_refid = stage['refId'].lower()
if current_refid == 'master':
if main_index == 0:
stage['requisiteStageRefIds'] = []
else:
stage['requisiteStageRefIds'] = [str(main_index)]
main_index += 1
stage['refId'] = str(main_index)
elif current_refid == 'branch':
# increments a branch_index to account for multiple parrallel stages
if previous_refid == 'branch':
branch_index += 1
else:
branch_index = 0
stage['refId'] = str((main_index * 100) + branch_index)
stage['requisiteStageRefIds'] = [str(main_index)]
elif current_refid == 'merge':
# TODO: Added logic to handle merge stages.
pass
previous_refid = current_refid
LOG.debug('step=%(name)s\trefId=%(refId)s\t' 'requisiteStageRefIds=%(requisiteStageRefIds)s', stage)
return pipeline | [
"def",
"renumerate_stages",
"(",
"pipeline",
")",
":",
"stages",
"=",
"pipeline",
"[",
"'stages'",
"]",
"main_index",
"=",
"0",
"branch_index",
"=",
"0",
"previous_refid",
"=",
"''",
"for",
"stage",
"in",
"stages",
":",
"current_refid",
"=",
"stage",
"[",
"'refId'",
"]",
".",
"lower",
"(",
")",
"if",
"current_refid",
"==",
"'master'",
":",
"if",
"main_index",
"==",
"0",
":",
"stage",
"[",
"'requisiteStageRefIds'",
"]",
"=",
"[",
"]",
"else",
":",
"stage",
"[",
"'requisiteStageRefIds'",
"]",
"=",
"[",
"str",
"(",
"main_index",
")",
"]",
"main_index",
"+=",
"1",
"stage",
"[",
"'refId'",
"]",
"=",
"str",
"(",
"main_index",
")",
"elif",
"current_refid",
"==",
"'branch'",
":",
"# increments a branch_index to account for multiple parrallel stages",
"if",
"previous_refid",
"==",
"'branch'",
":",
"branch_index",
"+=",
"1",
"else",
":",
"branch_index",
"=",
"0",
"stage",
"[",
"'refId'",
"]",
"=",
"str",
"(",
"(",
"main_index",
"*",
"100",
")",
"+",
"branch_index",
")",
"stage",
"[",
"'requisiteStageRefIds'",
"]",
"=",
"[",
"str",
"(",
"main_index",
")",
"]",
"elif",
"current_refid",
"==",
"'merge'",
":",
"# TODO: Added logic to handle merge stages.",
"pass",
"previous_refid",
"=",
"current_refid",
"LOG",
".",
"debug",
"(",
"'step=%(name)s\\trefId=%(refId)s\\t'",
"'requisiteStageRefIds=%(requisiteStageRefIds)s'",
",",
"stage",
")",
"return",
"pipeline"
] | Renumber Pipeline Stage reference IDs to account for dependencies.
stage order is defined in the templates. The ``refId`` field dictates
if a stage should be mainline or parallel to other stages.
* ``master`` - A mainline required stage. Other stages depend on it
* ``branch`` - A stage that should be ran in parallel to master stages.
* ``merge`` - A stage thatis parallel but other stages still depend on it.
Args:
pipeline (dict): Completed Pipeline ready for renumeration.
Returns:
dict: Pipeline ready to be sent to Spinnaker. | [
"Renumber",
"Pipeline",
"Stage",
"reference",
"IDs",
"to",
"account",
"for",
"dependencies",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/renumerate_stages.py#L22-L68 |
6,082 | foremast/foremast | src/foremast/utils/tasks.py | post_task | def post_task(task_data, task_uri='/tasks'):
"""Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker.
"""
url = '{}/{}'.format(API_URL, task_uri.lstrip('/'))
if isinstance(task_data, str):
task_json = task_data
else:
task_json = json.dumps(task_data)
resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
resp_json = resp.json()
LOG.debug(resp_json)
assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text)
return resp_json['ref'] | python | def post_task(task_data, task_uri='/tasks'):
"""Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker.
"""
url = '{}/{}'.format(API_URL, task_uri.lstrip('/'))
if isinstance(task_data, str):
task_json = task_data
else:
task_json = json.dumps(task_data)
resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
resp_json = resp.json()
LOG.debug(resp_json)
assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text)
return resp_json['ref'] | [
"def",
"post_task",
"(",
"task_data",
",",
"task_uri",
"=",
"'/tasks'",
")",
":",
"url",
"=",
"'{}/{}'",
".",
"format",
"(",
"API_URL",
",",
"task_uri",
".",
"lstrip",
"(",
"'/'",
")",
")",
"if",
"isinstance",
"(",
"task_data",
",",
"str",
")",
":",
"task_json",
"=",
"task_data",
"else",
":",
"task_json",
"=",
"json",
".",
"dumps",
"(",
"task_data",
")",
"resp",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"data",
"=",
"task_json",
",",
"headers",
"=",
"HEADERS",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"resp_json",
"=",
"resp",
".",
"json",
"(",
")",
"LOG",
".",
"debug",
"(",
"resp_json",
")",
"assert",
"resp",
".",
"ok",
",",
"'Spinnaker communication error: {0}'",
".",
"format",
"(",
"resp",
".",
"text",
")",
"return",
"resp_json",
"[",
"'ref'",
"]"
] | Create Spinnaker Task.
Args:
task_data (str): Task JSON definition.
Returns:
str: Spinnaker Task ID.
Raises:
AssertionError: Error response from Spinnaker. | [
"Create",
"Spinnaker",
"Task",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L29-L56 |
6,083 | foremast/foremast | src/foremast/utils/tasks.py | _check_task | def _check_task(taskid):
"""Check Spinnaker Task status.
Args:
taskid (str): Existing Spinnaker Task ID.
Returns:
str: Task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{}/tasks/{}'.format(API_URL, taskid)
task_response = requests.get(url, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
LOG.debug(task_response.json())
assert task_response.ok, 'Spinnaker communication error: {0}'.format(task_response.text)
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED': # pylint: disable=no-else-return
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError | python | def _check_task(taskid):
"""Check Spinnaker Task status.
Args:
taskid (str): Existing Spinnaker Task ID.
Returns:
str: Task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{}/tasks/{}'.format(API_URL, taskid)
task_response = requests.get(url, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT)
LOG.debug(task_response.json())
assert task_response.ok, 'Spinnaker communication error: {0}'.format(task_response.text)
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED': # pylint: disable=no-else-return
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError | [
"def",
"_check_task",
"(",
"taskid",
")",
":",
"try",
":",
"taskurl",
"=",
"taskid",
".",
"get",
"(",
"'ref'",
",",
"'0000'",
")",
"except",
"AttributeError",
":",
"taskurl",
"=",
"taskid",
"taskid",
"=",
"taskurl",
".",
"split",
"(",
"'/tasks/'",
")",
"[",
"-",
"1",
"]",
"LOG",
".",
"info",
"(",
"'Checking taskid %s'",
",",
"taskid",
")",
"url",
"=",
"'{}/tasks/{}'",
".",
"format",
"(",
"API_URL",
",",
"taskid",
")",
"task_response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"HEADERS",
",",
"verify",
"=",
"GATE_CA_BUNDLE",
",",
"cert",
"=",
"GATE_CLIENT_CERT",
")",
"LOG",
".",
"debug",
"(",
"task_response",
".",
"json",
"(",
")",
")",
"assert",
"task_response",
".",
"ok",
",",
"'Spinnaker communication error: {0}'",
".",
"format",
"(",
"task_response",
".",
"text",
")",
"task_state",
"=",
"task_response",
".",
"json",
"(",
")",
"status",
"=",
"task_state",
"[",
"'status'",
"]",
"LOG",
".",
"info",
"(",
"'Current task status: %s'",
",",
"status",
")",
"if",
"status",
"==",
"'SUCCEEDED'",
":",
"# pylint: disable=no-else-return",
"return",
"status",
"elif",
"status",
"==",
"'TERMINAL'",
":",
"raise",
"SpinnakerTaskError",
"(",
"task_state",
")",
"else",
":",
"raise",
"ValueError"
] | Check Spinnaker Task status.
Args:
taskid (str): Existing Spinnaker Task ID.
Returns:
str: Task status. | [
"Check",
"Spinnaker",
"Task",
"status",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L59-L94 |
6,084 | foremast/foremast | src/foremast/utils/tasks.py | check_task | def check_task(taskid, timeout=DEFAULT_TASK_TIMEOUT, wait=2):
"""Wrap check_task.
Args:
taskid (str): Existing Spinnaker Task ID.
timeout (int, optional): Consider Task failed after given seconds.
wait (int, optional): Seconds to pause between polling attempts.
Returns:
str: Task status.
Raises:
AssertionError: API did not respond with a 200 status code.
:obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not
reach a terminal state before the given time out.
"""
max_attempts = int(timeout / wait)
try:
return retry_call(
partial(_check_task, taskid),
max_attempts=max_attempts,
wait=wait,
exceptions=(AssertionError, ValueError), )
except ValueError:
raise SpinnakerTaskInconclusiveError('Task failed to complete in {0} seconds: {1}'.format(timeout, taskid)) | python | def check_task(taskid, timeout=DEFAULT_TASK_TIMEOUT, wait=2):
"""Wrap check_task.
Args:
taskid (str): Existing Spinnaker Task ID.
timeout (int, optional): Consider Task failed after given seconds.
wait (int, optional): Seconds to pause between polling attempts.
Returns:
str: Task status.
Raises:
AssertionError: API did not respond with a 200 status code.
:obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not
reach a terminal state before the given time out.
"""
max_attempts = int(timeout / wait)
try:
return retry_call(
partial(_check_task, taskid),
max_attempts=max_attempts,
wait=wait,
exceptions=(AssertionError, ValueError), )
except ValueError:
raise SpinnakerTaskInconclusiveError('Task failed to complete in {0} seconds: {1}'.format(timeout, taskid)) | [
"def",
"check_task",
"(",
"taskid",
",",
"timeout",
"=",
"DEFAULT_TASK_TIMEOUT",
",",
"wait",
"=",
"2",
")",
":",
"max_attempts",
"=",
"int",
"(",
"timeout",
"/",
"wait",
")",
"try",
":",
"return",
"retry_call",
"(",
"partial",
"(",
"_check_task",
",",
"taskid",
")",
",",
"max_attempts",
"=",
"max_attempts",
",",
"wait",
"=",
"wait",
",",
"exceptions",
"=",
"(",
"AssertionError",
",",
"ValueError",
")",
",",
")",
"except",
"ValueError",
":",
"raise",
"SpinnakerTaskInconclusiveError",
"(",
"'Task failed to complete in {0} seconds: {1}'",
".",
"format",
"(",
"timeout",
",",
"taskid",
")",
")"
] | Wrap check_task.
Args:
taskid (str): Existing Spinnaker Task ID.
timeout (int, optional): Consider Task failed after given seconds.
wait (int, optional): Seconds to pause between polling attempts.
Returns:
str: Task status.
Raises:
AssertionError: API did not respond with a 200 status code.
:obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not
reach a terminal state before the given time out. | [
"Wrap",
"check_task",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L97-L122 |
6,085 | foremast/foremast | src/foremast/utils/tasks.py | wait_for_task | def wait_for_task(task_data, task_uri='/tasks'):
"""Run task and check the result.
Args:
task_data (str): the task json to execute
Returns:
str: Task status.
"""
taskid = post_task(task_data, task_uri)
if isinstance(task_data, str):
json_data = json.loads(task_data)
else:
json_data = task_data
# inspect the task to see if a timeout is configured
job = json_data['job'][0]
env = job.get('credentials')
task_type = job.get('type')
timeout = TASK_TIMEOUTS.get(env, dict()).get(task_type, DEFAULT_TASK_TIMEOUT)
LOG.debug("Task %s will timeout after %s", task_type, timeout)
return check_task(taskid, timeout) | python | def wait_for_task(task_data, task_uri='/tasks'):
"""Run task and check the result.
Args:
task_data (str): the task json to execute
Returns:
str: Task status.
"""
taskid = post_task(task_data, task_uri)
if isinstance(task_data, str):
json_data = json.loads(task_data)
else:
json_data = task_data
# inspect the task to see if a timeout is configured
job = json_data['job'][0]
env = job.get('credentials')
task_type = job.get('type')
timeout = TASK_TIMEOUTS.get(env, dict()).get(task_type, DEFAULT_TASK_TIMEOUT)
LOG.debug("Task %s will timeout after %s", task_type, timeout)
return check_task(taskid, timeout) | [
"def",
"wait_for_task",
"(",
"task_data",
",",
"task_uri",
"=",
"'/tasks'",
")",
":",
"taskid",
"=",
"post_task",
"(",
"task_data",
",",
"task_uri",
")",
"if",
"isinstance",
"(",
"task_data",
",",
"str",
")",
":",
"json_data",
"=",
"json",
".",
"loads",
"(",
"task_data",
")",
"else",
":",
"json_data",
"=",
"task_data",
"# inspect the task to see if a timeout is configured",
"job",
"=",
"json_data",
"[",
"'job'",
"]",
"[",
"0",
"]",
"env",
"=",
"job",
".",
"get",
"(",
"'credentials'",
")",
"task_type",
"=",
"job",
".",
"get",
"(",
"'type'",
")",
"timeout",
"=",
"TASK_TIMEOUTS",
".",
"get",
"(",
"env",
",",
"dict",
"(",
")",
")",
".",
"get",
"(",
"task_type",
",",
"DEFAULT_TASK_TIMEOUT",
")",
"LOG",
".",
"debug",
"(",
"\"Task %s will timeout after %s\"",
",",
"task_type",
",",
"timeout",
")",
"return",
"check_task",
"(",
"taskid",
",",
"timeout",
")"
] | Run task and check the result.
Args:
task_data (str): the task json to execute
Returns:
str: Task status. | [
"Run",
"task",
"and",
"check",
"the",
"result",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L125-L151 |
6,086 | foremast/foremast | src/foremast/s3/__main__.py | main | def main():
"""Create application.properties for a given application."""
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
add_region(parser)
add_artifact_path(parser)
add_artifact_version(parser)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
LOG.debug('Args: %s', vars(args))
rendered_props = get_properties(args.properties)
if rendered_props['pipeline']['type'] == 's3':
s3app = S3Apps(app=args.app, env=args.env, region=args.region, prop_path=args.properties)
s3app.create_bucket()
s3deploy = S3Deployment(
app=args.app,
env=args.env,
region=args.region,
prop_path=args.properties,
artifact_path=args.artifact_path,
artifact_version=args.artifact_version)
s3deploy.upload_artifacts()
else:
init_properties(**vars(args)) | python | def main():
"""Create application.properties for a given application."""
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
add_region(parser)
add_artifact_path(parser)
add_artifact_version(parser)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
LOG.debug('Args: %s', vars(args))
rendered_props = get_properties(args.properties)
if rendered_props['pipeline']['type'] == 's3':
s3app = S3Apps(app=args.app, env=args.env, region=args.region, prop_path=args.properties)
s3app.create_bucket()
s3deploy = S3Deployment(
app=args.app,
env=args.env,
region=args.region,
prop_path=args.properties,
artifact_path=args.artifact_path,
artifact_version=args.artifact_version)
s3deploy.upload_artifacts()
else:
init_properties(**vars(args)) | [
"def",
"main",
"(",
")",
":",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"LOGGING_FORMAT",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"main",
".",
"__doc__",
")",
"add_debug",
"(",
"parser",
")",
"add_app",
"(",
"parser",
")",
"add_env",
"(",
"parser",
")",
"add_properties",
"(",
"parser",
")",
"add_region",
"(",
"parser",
")",
"add_artifact_path",
"(",
"parser",
")",
"add_artifact_version",
"(",
"parser",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"logging",
".",
"getLogger",
"(",
"__package__",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
".",
"setLevel",
"(",
"args",
".",
"debug",
")",
"LOG",
".",
"debug",
"(",
"'Args: %s'",
",",
"vars",
"(",
"args",
")",
")",
"rendered_props",
"=",
"get_properties",
"(",
"args",
".",
"properties",
")",
"if",
"rendered_props",
"[",
"'pipeline'",
"]",
"[",
"'type'",
"]",
"==",
"'s3'",
":",
"s3app",
"=",
"S3Apps",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"args",
".",
"env",
",",
"region",
"=",
"args",
".",
"region",
",",
"prop_path",
"=",
"args",
".",
"properties",
")",
"s3app",
".",
"create_bucket",
"(",
")",
"s3deploy",
"=",
"S3Deployment",
"(",
"app",
"=",
"args",
".",
"app",
",",
"env",
"=",
"args",
".",
"env",
",",
"region",
"=",
"args",
".",
"region",
",",
"prop_path",
"=",
"args",
".",
"properties",
",",
"artifact_path",
"=",
"args",
".",
"artifact_path",
",",
"artifact_version",
"=",
"args",
".",
"artifact_version",
")",
"s3deploy",
".",
"upload_artifacts",
"(",
")",
"else",
":",
"init_properties",
"(",
"*",
"*",
"vars",
"(",
"args",
")",
")"
] | Create application.properties for a given application. | [
"Create",
"application",
".",
"properties",
"for",
"a",
"given",
"application",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/__main__.py#L34-L68 |
6,087 | foremast/foremast | src/foremast/s3/create_archaius.py | init_properties | def init_properties(env='dev', app='unnecessary', **_):
"""Make sure _application.properties_ file exists in S3.
For Applications with Archaius support, there needs to be a file where the
cloud environment variable points to.
Args:
env (str): Deployment environment/account, i.e. dev, stage, prod.
app (str): GitLab Project name.
Returns:
True when application.properties was found.
False when application.properties needed to be created.
"""
aws_env = boto3.session.Session(profile_name=env)
s3client = aws_env.resource('s3')
generated = get_details(app=app, env=env)
archaius = generated.archaius()
archaius_file = ('{path}/application.properties').format(path=archaius['path'])
try:
s3client.Object(archaius['bucket'], archaius_file).get()
LOG.info('Found: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file})
return True
except boto3.exceptions.botocore.client.ClientError:
s3client.Object(archaius['bucket'], archaius_file).put()
LOG.info('Created: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file})
return False | python | def init_properties(env='dev', app='unnecessary', **_):
"""Make sure _application.properties_ file exists in S3.
For Applications with Archaius support, there needs to be a file where the
cloud environment variable points to.
Args:
env (str): Deployment environment/account, i.e. dev, stage, prod.
app (str): GitLab Project name.
Returns:
True when application.properties was found.
False when application.properties needed to be created.
"""
aws_env = boto3.session.Session(profile_name=env)
s3client = aws_env.resource('s3')
generated = get_details(app=app, env=env)
archaius = generated.archaius()
archaius_file = ('{path}/application.properties').format(path=archaius['path'])
try:
s3client.Object(archaius['bucket'], archaius_file).get()
LOG.info('Found: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file})
return True
except boto3.exceptions.botocore.client.ClientError:
s3client.Object(archaius['bucket'], archaius_file).put()
LOG.info('Created: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file})
return False | [
"def",
"init_properties",
"(",
"env",
"=",
"'dev'",
",",
"app",
"=",
"'unnecessary'",
",",
"*",
"*",
"_",
")",
":",
"aws_env",
"=",
"boto3",
".",
"session",
".",
"Session",
"(",
"profile_name",
"=",
"env",
")",
"s3client",
"=",
"aws_env",
".",
"resource",
"(",
"'s3'",
")",
"generated",
"=",
"get_details",
"(",
"app",
"=",
"app",
",",
"env",
"=",
"env",
")",
"archaius",
"=",
"generated",
".",
"archaius",
"(",
")",
"archaius_file",
"=",
"(",
"'{path}/application.properties'",
")",
".",
"format",
"(",
"path",
"=",
"archaius",
"[",
"'path'",
"]",
")",
"try",
":",
"s3client",
".",
"Object",
"(",
"archaius",
"[",
"'bucket'",
"]",
",",
"archaius_file",
")",
".",
"get",
"(",
")",
"LOG",
".",
"info",
"(",
"'Found: %(bucket)s/%(file)s'",
",",
"{",
"'bucket'",
":",
"archaius",
"[",
"'bucket'",
"]",
",",
"'file'",
":",
"archaius_file",
"}",
")",
"return",
"True",
"except",
"boto3",
".",
"exceptions",
".",
"botocore",
".",
"client",
".",
"ClientError",
":",
"s3client",
".",
"Object",
"(",
"archaius",
"[",
"'bucket'",
"]",
",",
"archaius_file",
")",
".",
"put",
"(",
")",
"LOG",
".",
"info",
"(",
"'Created: %(bucket)s/%(file)s'",
",",
"{",
"'bucket'",
":",
"archaius",
"[",
"'bucket'",
"]",
",",
"'file'",
":",
"archaius_file",
"}",
")",
"return",
"False"
] | Make sure _application.properties_ file exists in S3.
For Applications with Archaius support, there needs to be a file where the
cloud environment variable points to.
Args:
env (str): Deployment environment/account, i.e. dev, stage, prod.
app (str): GitLab Project name.
Returns:
True when application.properties was found.
False when application.properties needed to be created. | [
"Make",
"sure",
"_application",
".",
"properties_",
"file",
"exists",
"in",
"S3",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/create_archaius.py#L26-L55 |
6,088 | foremast/foremast | src/foremast/awslambda/cloudwatch_event/cloudwatch_event.py | create_cloudwatch_event | def create_cloudwatch_event(app_name, env, region, rules):
"""Create cloudwatch event for lambda from rules.
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
rules (dict): Trigger rules from the settings
"""
session = boto3.Session(profile_name=env, region_name=region)
cloudwatch_client = session.client('events')
rule_name = rules.get('rule_name')
schedule = rules.get('schedule')
rule_description = rules.get('rule_description')
json_input = rules.get('json_input', {})
if schedule is None:
LOG.critical('Schedule is required and no schedule is defined!')
raise InvalidEventConfiguration('Schedule is required and no schedule is defined!')
if rule_name is None:
LOG.critical('Rule name is required and no rule_name is defined!')
raise InvalidEventConfiguration('Rule name is required and no rule_name is defined!')
else:
LOG.info('%s and %s', app_name, rule_name)
rule_name = "{}_{}".format(app_name, rule_name.replace(' ', '_'))
if rule_description is None:
rule_description = "{} - {}".format(app_name, rule_name)
lambda_arn = get_lambda_arn(app=app_name, account=env, region=region)
# Add lambda permissions
account_id = get_env_credential(env=env)['accountId']
principal = "events.amazonaws.com"
statement_id = '{}_cloudwatch_{}'.format(app_name, rule_name)
source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(region, account_id, rule_name)
add_lambda_permissions(
function=lambda_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
source_arn=source_arn,
env=env,
region=region, )
# Create Cloudwatch rule
cloudwatch_client.put_rule(
Name=rule_name,
ScheduleExpression=schedule,
State='ENABLED',
Description=rule_description, )
targets = []
# TODO: read this one from file event-config-*.json
json_payload = '{}'.format(json.dumps(json_input))
target = {
"Id": app_name,
"Arn": lambda_arn,
"Input": json_payload,
}
targets.append(target)
put_targets_response = cloudwatch_client.put_targets(Rule=rule_name, Targets=targets)
LOG.debug('Cloudwatch put targets response: %s', put_targets_response)
LOG.info('Created Cloudwatch event "%s" with schedule: %s', rule_name, schedule) | python | def create_cloudwatch_event(app_name, env, region, rules):
"""Create cloudwatch event for lambda from rules.
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
rules (dict): Trigger rules from the settings
"""
session = boto3.Session(profile_name=env, region_name=region)
cloudwatch_client = session.client('events')
rule_name = rules.get('rule_name')
schedule = rules.get('schedule')
rule_description = rules.get('rule_description')
json_input = rules.get('json_input', {})
if schedule is None:
LOG.critical('Schedule is required and no schedule is defined!')
raise InvalidEventConfiguration('Schedule is required and no schedule is defined!')
if rule_name is None:
LOG.critical('Rule name is required and no rule_name is defined!')
raise InvalidEventConfiguration('Rule name is required and no rule_name is defined!')
else:
LOG.info('%s and %s', app_name, rule_name)
rule_name = "{}_{}".format(app_name, rule_name.replace(' ', '_'))
if rule_description is None:
rule_description = "{} - {}".format(app_name, rule_name)
lambda_arn = get_lambda_arn(app=app_name, account=env, region=region)
# Add lambda permissions
account_id = get_env_credential(env=env)['accountId']
principal = "events.amazonaws.com"
statement_id = '{}_cloudwatch_{}'.format(app_name, rule_name)
source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(region, account_id, rule_name)
add_lambda_permissions(
function=lambda_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
source_arn=source_arn,
env=env,
region=region, )
# Create Cloudwatch rule
cloudwatch_client.put_rule(
Name=rule_name,
ScheduleExpression=schedule,
State='ENABLED',
Description=rule_description, )
targets = []
# TODO: read this one from file event-config-*.json
json_payload = '{}'.format(json.dumps(json_input))
target = {
"Id": app_name,
"Arn": lambda_arn,
"Input": json_payload,
}
targets.append(target)
put_targets_response = cloudwatch_client.put_targets(Rule=rule_name, Targets=targets)
LOG.debug('Cloudwatch put targets response: %s', put_targets_response)
LOG.info('Created Cloudwatch event "%s" with schedule: %s', rule_name, schedule) | [
"def",
"create_cloudwatch_event",
"(",
"app_name",
",",
"env",
",",
"region",
",",
"rules",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"env",
",",
"region_name",
"=",
"region",
")",
"cloudwatch_client",
"=",
"session",
".",
"client",
"(",
"'events'",
")",
"rule_name",
"=",
"rules",
".",
"get",
"(",
"'rule_name'",
")",
"schedule",
"=",
"rules",
".",
"get",
"(",
"'schedule'",
")",
"rule_description",
"=",
"rules",
".",
"get",
"(",
"'rule_description'",
")",
"json_input",
"=",
"rules",
".",
"get",
"(",
"'json_input'",
",",
"{",
"}",
")",
"if",
"schedule",
"is",
"None",
":",
"LOG",
".",
"critical",
"(",
"'Schedule is required and no schedule is defined!'",
")",
"raise",
"InvalidEventConfiguration",
"(",
"'Schedule is required and no schedule is defined!'",
")",
"if",
"rule_name",
"is",
"None",
":",
"LOG",
".",
"critical",
"(",
"'Rule name is required and no rule_name is defined!'",
")",
"raise",
"InvalidEventConfiguration",
"(",
"'Rule name is required and no rule_name is defined!'",
")",
"else",
":",
"LOG",
".",
"info",
"(",
"'%s and %s'",
",",
"app_name",
",",
"rule_name",
")",
"rule_name",
"=",
"\"{}_{}\"",
".",
"format",
"(",
"app_name",
",",
"rule_name",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
")",
"if",
"rule_description",
"is",
"None",
":",
"rule_description",
"=",
"\"{} - {}\"",
".",
"format",
"(",
"app_name",
",",
"rule_name",
")",
"lambda_arn",
"=",
"get_lambda_arn",
"(",
"app",
"=",
"app_name",
",",
"account",
"=",
"env",
",",
"region",
"=",
"region",
")",
"# Add lambda permissions",
"account_id",
"=",
"get_env_credential",
"(",
"env",
"=",
"env",
")",
"[",
"'accountId'",
"]",
"principal",
"=",
"\"events.amazonaws.com\"",
"statement_id",
"=",
"'{}_cloudwatch_{}'",
".",
"format",
"(",
"app_name",
",",
"rule_name",
")",
"source_arn",
"=",
"'arn:aws:events:{}:{}:rule/{}'",
".",
"format",
"(",
"region",
",",
"account_id",
",",
"rule_name",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_arn",
",",
"statement_id",
"=",
"statement_id",
",",
"action",
"=",
"'lambda:InvokeFunction'",
",",
"principal",
"=",
"principal",
",",
"source_arn",
"=",
"source_arn",
",",
"env",
"=",
"env",
",",
"region",
"=",
"region",
",",
")",
"# Create Cloudwatch rule",
"cloudwatch_client",
".",
"put_rule",
"(",
"Name",
"=",
"rule_name",
",",
"ScheduleExpression",
"=",
"schedule",
",",
"State",
"=",
"'ENABLED'",
",",
"Description",
"=",
"rule_description",
",",
")",
"targets",
"=",
"[",
"]",
"# TODO: read this one from file event-config-*.json",
"json_payload",
"=",
"'{}'",
".",
"format",
"(",
"json",
".",
"dumps",
"(",
"json_input",
")",
")",
"target",
"=",
"{",
"\"Id\"",
":",
"app_name",
",",
"\"Arn\"",
":",
"lambda_arn",
",",
"\"Input\"",
":",
"json_payload",
",",
"}",
"targets",
".",
"append",
"(",
"target",
")",
"put_targets_response",
"=",
"cloudwatch_client",
".",
"put_targets",
"(",
"Rule",
"=",
"rule_name",
",",
"Targets",
"=",
"targets",
")",
"LOG",
".",
"debug",
"(",
"'Cloudwatch put targets response: %s'",
",",
"put_targets_response",
")",
"LOG",
".",
"info",
"(",
"'Created Cloudwatch event \"%s\" with schedule: %s'",
",",
"rule_name",
",",
"schedule",
")"
] | Create cloudwatch event for lambda from rules.
Args:
app_name (str): name of the lambda function
env (str): Environment/Account for lambda function
region (str): AWS region of the lambda function
rules (dict): Trigger rules from the settings | [
"Create",
"cloudwatch",
"event",
"for",
"lambda",
"from",
"rules",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/cloudwatch_event/cloudwatch_event.py#L29-L98 |
6,089 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.find_api_id | def find_api_id(self):
"""Given API name, find API ID."""
allapis = self.client.get_rest_apis()
api_name = self.trigger_settings['api_name']
api_id = None
for api in allapis['items']:
if api['name'] == api_name:
api_id = api['id']
self.log.info("Found API for: %s", api_name)
break
else:
api_id = self.create_api()
return api_id | python | def find_api_id(self):
"""Given API name, find API ID."""
allapis = self.client.get_rest_apis()
api_name = self.trigger_settings['api_name']
api_id = None
for api in allapis['items']:
if api['name'] == api_name:
api_id = api['id']
self.log.info("Found API for: %s", api_name)
break
else:
api_id = self.create_api()
return api_id | [
"def",
"find_api_id",
"(",
"self",
")",
":",
"allapis",
"=",
"self",
".",
"client",
".",
"get_rest_apis",
"(",
")",
"api_name",
"=",
"self",
".",
"trigger_settings",
"[",
"'api_name'",
"]",
"api_id",
"=",
"None",
"for",
"api",
"in",
"allapis",
"[",
"'items'",
"]",
":",
"if",
"api",
"[",
"'name'",
"]",
"==",
"api_name",
":",
"api_id",
"=",
"api",
"[",
"'id'",
"]",
"self",
".",
"log",
".",
"info",
"(",
"\"Found API for: %s\"",
",",
"api_name",
")",
"break",
"else",
":",
"api_id",
"=",
"self",
".",
"create_api",
"(",
")",
"return",
"api_id"
] | Given API name, find API ID. | [
"Given",
"API",
"name",
"find",
"API",
"ID",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L59-L72 |
6,090 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.find_resource_ids | def find_resource_ids(self):
"""Given a resource path and API Id, find resource Id."""
all_resources = self.client.get_resources(restApiId=self.api_id)
parent_id = None
resource_id = None
for resource in all_resources['items']:
if resource['path'] == "/":
parent_id = resource['id']
if resource['path'] == self.trigger_settings['resource']:
resource_id = resource['id']
self.log.info("Found Resource ID for: %s", resource['path'])
return resource_id, parent_id | python | def find_resource_ids(self):
"""Given a resource path and API Id, find resource Id."""
all_resources = self.client.get_resources(restApiId=self.api_id)
parent_id = None
resource_id = None
for resource in all_resources['items']:
if resource['path'] == "/":
parent_id = resource['id']
if resource['path'] == self.trigger_settings['resource']:
resource_id = resource['id']
self.log.info("Found Resource ID for: %s", resource['path'])
return resource_id, parent_id | [
"def",
"find_resource_ids",
"(",
"self",
")",
":",
"all_resources",
"=",
"self",
".",
"client",
".",
"get_resources",
"(",
"restApiId",
"=",
"self",
".",
"api_id",
")",
"parent_id",
"=",
"None",
"resource_id",
"=",
"None",
"for",
"resource",
"in",
"all_resources",
"[",
"'items'",
"]",
":",
"if",
"resource",
"[",
"'path'",
"]",
"==",
"\"/\"",
":",
"parent_id",
"=",
"resource",
"[",
"'id'",
"]",
"if",
"resource",
"[",
"'path'",
"]",
"==",
"self",
".",
"trigger_settings",
"[",
"'resource'",
"]",
":",
"resource_id",
"=",
"resource",
"[",
"'id'",
"]",
"self",
".",
"log",
".",
"info",
"(",
"\"Found Resource ID for: %s\"",
",",
"resource",
"[",
"'path'",
"]",
")",
"return",
"resource_id",
",",
"parent_id"
] | Given a resource path and API Id, find resource Id. | [
"Given",
"a",
"resource",
"path",
"and",
"API",
"Id",
"find",
"resource",
"Id",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L74-L85 |
6,091 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.add_lambda_integration | def add_lambda_integration(self):
"""Attach lambda found to API."""
lambda_uri = self.generate_uris()['lambda_uri']
self.client.put_integration(
restApiId=self.api_id,
resourceId=self.resource_id,
httpMethod=self.trigger_settings['method'],
integrationHttpMethod='POST',
uri=lambda_uri,
type='AWS')
self.add_integration_response()
self.log.info("Successfully added Lambda intergration to API") | python | def add_lambda_integration(self):
"""Attach lambda found to API."""
lambda_uri = self.generate_uris()['lambda_uri']
self.client.put_integration(
restApiId=self.api_id,
resourceId=self.resource_id,
httpMethod=self.trigger_settings['method'],
integrationHttpMethod='POST',
uri=lambda_uri,
type='AWS')
self.add_integration_response()
self.log.info("Successfully added Lambda intergration to API") | [
"def",
"add_lambda_integration",
"(",
"self",
")",
":",
"lambda_uri",
"=",
"self",
".",
"generate_uris",
"(",
")",
"[",
"'lambda_uri'",
"]",
"self",
".",
"client",
".",
"put_integration",
"(",
"restApiId",
"=",
"self",
".",
"api_id",
",",
"resourceId",
"=",
"self",
".",
"resource_id",
",",
"httpMethod",
"=",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"integrationHttpMethod",
"=",
"'POST'",
",",
"uri",
"=",
"lambda_uri",
",",
"type",
"=",
"'AWS'",
")",
"self",
".",
"add_integration_response",
"(",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Successfully added Lambda intergration to API\"",
")"
] | Attach lambda found to API. | [
"Attach",
"lambda",
"found",
"to",
"API",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L87-L98 |
6,092 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.add_integration_response | def add_integration_response(self):
"""Add an intergation response to the API for the lambda integration."""
self.client.put_integration_response(
restApiId=self.api_id,
resourceId=self.resource_id,
httpMethod=self.trigger_settings['method'],
statusCode='200',
responseTemplates={'application/json': ''}) | python | def add_integration_response(self):
"""Add an intergation response to the API for the lambda integration."""
self.client.put_integration_response(
restApiId=self.api_id,
resourceId=self.resource_id,
httpMethod=self.trigger_settings['method'],
statusCode='200',
responseTemplates={'application/json': ''}) | [
"def",
"add_integration_response",
"(",
"self",
")",
":",
"self",
".",
"client",
".",
"put_integration_response",
"(",
"restApiId",
"=",
"self",
".",
"api_id",
",",
"resourceId",
"=",
"self",
".",
"resource_id",
",",
"httpMethod",
"=",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"statusCode",
"=",
"'200'",
",",
"responseTemplates",
"=",
"{",
"'application/json'",
":",
"''",
"}",
")"
] | Add an intergation response to the API for the lambda integration. | [
"Add",
"an",
"intergation",
"response",
"to",
"the",
"API",
"for",
"the",
"lambda",
"integration",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L100-L107 |
6,093 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.add_permission | def add_permission(self):
"""Add permission to Lambda for the API Trigger."""
statement_id = '{}_api_{}'.format(self.app_name, self.trigger_settings['api_name'])
principal = 'apigateway.amazonaws.com'
lambda_alias_arn = get_lambda_alias_arn(self.app_name, self.env, self.region)
lambda_unqualified_arn = get_lambda_arn(self.app_name, self.env, self.region)
resource_name = self.trigger_settings.get('resource', '')
resource_name = resource_name.replace('/', '')
method_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'.format(
self.region, self.account_id, self.api_id, self.env, self.trigger_settings['method'], resource_name)
global_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/*/*/{}'.format(self.region, self.account_id, self.api_id,
resource_name)
add_lambda_permissions(
function=lambda_alias_arn,
statement_id=statement_id + self.trigger_settings['method'],
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=method_api_source_arn)
add_lambda_permissions(
function=lambda_alias_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=global_api_source_arn)
add_lambda_permissions(
function=lambda_unqualified_arn,
statement_id=statement_id + self.trigger_settings['method'],
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=method_api_source_arn)
add_lambda_permissions(
function=lambda_unqualified_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=global_api_source_arn) | python | def add_permission(self):
"""Add permission to Lambda for the API Trigger."""
statement_id = '{}_api_{}'.format(self.app_name, self.trigger_settings['api_name'])
principal = 'apigateway.amazonaws.com'
lambda_alias_arn = get_lambda_alias_arn(self.app_name, self.env, self.region)
lambda_unqualified_arn = get_lambda_arn(self.app_name, self.env, self.region)
resource_name = self.trigger_settings.get('resource', '')
resource_name = resource_name.replace('/', '')
method_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'.format(
self.region, self.account_id, self.api_id, self.env, self.trigger_settings['method'], resource_name)
global_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/*/*/{}'.format(self.region, self.account_id, self.api_id,
resource_name)
add_lambda_permissions(
function=lambda_alias_arn,
statement_id=statement_id + self.trigger_settings['method'],
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=method_api_source_arn)
add_lambda_permissions(
function=lambda_alias_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=global_api_source_arn)
add_lambda_permissions(
function=lambda_unqualified_arn,
statement_id=statement_id + self.trigger_settings['method'],
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=method_api_source_arn)
add_lambda_permissions(
function=lambda_unqualified_arn,
statement_id=statement_id,
action='lambda:InvokeFunction',
principal=principal,
env=self.env,
region=self.region,
source_arn=global_api_source_arn) | [
"def",
"add_permission",
"(",
"self",
")",
":",
"statement_id",
"=",
"'{}_api_{}'",
".",
"format",
"(",
"self",
".",
"app_name",
",",
"self",
".",
"trigger_settings",
"[",
"'api_name'",
"]",
")",
"principal",
"=",
"'apigateway.amazonaws.com'",
"lambda_alias_arn",
"=",
"get_lambda_alias_arn",
"(",
"self",
".",
"app_name",
",",
"self",
".",
"env",
",",
"self",
".",
"region",
")",
"lambda_unqualified_arn",
"=",
"get_lambda_arn",
"(",
"self",
".",
"app_name",
",",
"self",
".",
"env",
",",
"self",
".",
"region",
")",
"resource_name",
"=",
"self",
".",
"trigger_settings",
".",
"get",
"(",
"'resource'",
",",
"''",
")",
"resource_name",
"=",
"resource_name",
".",
"replace",
"(",
"'/'",
",",
"''",
")",
"method_api_source_arn",
"=",
"'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'",
".",
"format",
"(",
"self",
".",
"region",
",",
"self",
".",
"account_id",
",",
"self",
".",
"api_id",
",",
"self",
".",
"env",
",",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"resource_name",
")",
"global_api_source_arn",
"=",
"'arn:aws:execute-api:{}:{}:{}/*/*/{}'",
".",
"format",
"(",
"self",
".",
"region",
",",
"self",
".",
"account_id",
",",
"self",
".",
"api_id",
",",
"resource_name",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_alias_arn",
",",
"statement_id",
"=",
"statement_id",
"+",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"action",
"=",
"'lambda:InvokeFunction'",
",",
"principal",
"=",
"principal",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"source_arn",
"=",
"method_api_source_arn",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_alias_arn",
",",
"statement_id",
"=",
"statement_id",
",",
"action",
"=",
"'lambda:InvokeFunction'",
",",
"principal",
"=",
"principal",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"source_arn",
"=",
"global_api_source_arn",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_unqualified_arn",
",",
"statement_id",
"=",
"statement_id",
"+",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"action",
"=",
"'lambda:InvokeFunction'",
",",
"principal",
"=",
"principal",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"source_arn",
"=",
"method_api_source_arn",
")",
"add_lambda_permissions",
"(",
"function",
"=",
"lambda_unqualified_arn",
",",
"statement_id",
"=",
"statement_id",
",",
"action",
"=",
"'lambda:InvokeFunction'",
",",
"principal",
"=",
"principal",
",",
"env",
"=",
"self",
".",
"env",
",",
"region",
"=",
"self",
".",
"region",
",",
"source_arn",
"=",
"global_api_source_arn",
")"
] | Add permission to Lambda for the API Trigger. | [
"Add",
"permission",
"to",
"Lambda",
"for",
"the",
"API",
"Trigger",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L109-L152 |
6,094 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.create_api_deployment | def create_api_deployment(self):
"""Create API deployment of ENV name."""
try:
self.client.create_deployment(restApiId=self.api_id, stageName=self.env)
self.log.info('Created a deployment resource.')
except botocore.exceptions.ClientError as error:
error_code = error.response['Error']['Code']
if error_code == 'TooManyRequestsException':
self.log.debug('Retrying. We have hit api limit.')
else:
self.log.debug('Retrying. We received %s.', error_code) | python | def create_api_deployment(self):
"""Create API deployment of ENV name."""
try:
self.client.create_deployment(restApiId=self.api_id, stageName=self.env)
self.log.info('Created a deployment resource.')
except botocore.exceptions.ClientError as error:
error_code = error.response['Error']['Code']
if error_code == 'TooManyRequestsException':
self.log.debug('Retrying. We have hit api limit.')
else:
self.log.debug('Retrying. We received %s.', error_code) | [
"def",
"create_api_deployment",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"client",
".",
"create_deployment",
"(",
"restApiId",
"=",
"self",
".",
"api_id",
",",
"stageName",
"=",
"self",
".",
"env",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Created a deployment resource.'",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"error_code",
"=",
"error",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"if",
"error_code",
"==",
"'TooManyRequestsException'",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'Retrying. We have hit api limit.'",
")",
"else",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'Retrying. We received %s.'",
",",
"error_code",
")"
] | Create API deployment of ENV name. | [
"Create",
"API",
"deployment",
"of",
"ENV",
"name",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L155-L165 |
6,095 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.create_api_key | def create_api_key(self):
"""Create API Key for API access."""
apikeys = self.client.get_api_keys()
for key in apikeys['items']:
if key['name'] == self.app_name:
self.log.info("Key %s already exists", self.app_name)
break
else:
self.client.create_api_key(
name=self.app_name, enabled=True, stageKeys=[{
'restApiId': self.api_id,
'stageName': self.env
}])
self.log.info("Successfully created API Key %s. Look in the AWS console for the key", self.app_name) | python | def create_api_key(self):
"""Create API Key for API access."""
apikeys = self.client.get_api_keys()
for key in apikeys['items']:
if key['name'] == self.app_name:
self.log.info("Key %s already exists", self.app_name)
break
else:
self.client.create_api_key(
name=self.app_name, enabled=True, stageKeys=[{
'restApiId': self.api_id,
'stageName': self.env
}])
self.log.info("Successfully created API Key %s. Look in the AWS console for the key", self.app_name) | [
"def",
"create_api_key",
"(",
"self",
")",
":",
"apikeys",
"=",
"self",
".",
"client",
".",
"get_api_keys",
"(",
")",
"for",
"key",
"in",
"apikeys",
"[",
"'items'",
"]",
":",
"if",
"key",
"[",
"'name'",
"]",
"==",
"self",
".",
"app_name",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Key %s already exists\"",
",",
"self",
".",
"app_name",
")",
"break",
"else",
":",
"self",
".",
"client",
".",
"create_api_key",
"(",
"name",
"=",
"self",
".",
"app_name",
",",
"enabled",
"=",
"True",
",",
"stageKeys",
"=",
"[",
"{",
"'restApiId'",
":",
"self",
".",
"api_id",
",",
"'stageName'",
":",
"self",
".",
"env",
"}",
"]",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Successfully created API Key %s. Look in the AWS console for the key\"",
",",
"self",
".",
"app_name",
")"
] | Create API Key for API access. | [
"Create",
"API",
"Key",
"for",
"API",
"access",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L167-L180 |
6,096 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway._format_base_path | def _format_base_path(self, api_name):
"""Format the base path name."""
name = self.app_name
if self.app_name != api_name:
name = '{0}-{1}'.format(self.app_name, api_name)
return name | python | def _format_base_path(self, api_name):
"""Format the base path name."""
name = self.app_name
if self.app_name != api_name:
name = '{0}-{1}'.format(self.app_name, api_name)
return name | [
"def",
"_format_base_path",
"(",
"self",
",",
"api_name",
")",
":",
"name",
"=",
"self",
".",
"app_name",
"if",
"self",
".",
"app_name",
"!=",
"api_name",
":",
"name",
"=",
"'{0}-{1}'",
".",
"format",
"(",
"self",
".",
"app_name",
",",
"api_name",
")",
"return",
"name"
] | Format the base path name. | [
"Format",
"the",
"base",
"path",
"name",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L182-L187 |
6,097 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.update_api_mappings | def update_api_mappings(self):
"""Create a cname for the API deployment."""
response_provider = None
response_action = None
domain = self.generated.apigateway()['domain']
try:
response_provider = self.client.create_base_path_mapping(
domainName=domain,
basePath=self._format_base_path(self.trigger_settings['api_name']),
restApiId=self.api_id,
stage=self.env, )
response_action = 'API mapping added.'
except botocore.exceptions.ClientError as error:
error_code = error.response['Error']['Code']
if error_code == 'ConflictException':
response_action = 'API mapping already exist.'
else:
response_action = 'Unknown error: {0}'.format(error_code)
self.log.debug('Provider response: %s', response_provider)
self.log.info(response_action)
return response_provider | python | def update_api_mappings(self):
"""Create a cname for the API deployment."""
response_provider = None
response_action = None
domain = self.generated.apigateway()['domain']
try:
response_provider = self.client.create_base_path_mapping(
domainName=domain,
basePath=self._format_base_path(self.trigger_settings['api_name']),
restApiId=self.api_id,
stage=self.env, )
response_action = 'API mapping added.'
except botocore.exceptions.ClientError as error:
error_code = error.response['Error']['Code']
if error_code == 'ConflictException':
response_action = 'API mapping already exist.'
else:
response_action = 'Unknown error: {0}'.format(error_code)
self.log.debug('Provider response: %s', response_provider)
self.log.info(response_action)
return response_provider | [
"def",
"update_api_mappings",
"(",
"self",
")",
":",
"response_provider",
"=",
"None",
"response_action",
"=",
"None",
"domain",
"=",
"self",
".",
"generated",
".",
"apigateway",
"(",
")",
"[",
"'domain'",
"]",
"try",
":",
"response_provider",
"=",
"self",
".",
"client",
".",
"create_base_path_mapping",
"(",
"domainName",
"=",
"domain",
",",
"basePath",
"=",
"self",
".",
"_format_base_path",
"(",
"self",
".",
"trigger_settings",
"[",
"'api_name'",
"]",
")",
",",
"restApiId",
"=",
"self",
".",
"api_id",
",",
"stage",
"=",
"self",
".",
"env",
",",
")",
"response_action",
"=",
"'API mapping added.'",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"error",
":",
"error_code",
"=",
"error",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"if",
"error_code",
"==",
"'ConflictException'",
":",
"response_action",
"=",
"'API mapping already exist.'",
"else",
":",
"response_action",
"=",
"'Unknown error: {0}'",
".",
"format",
"(",
"error_code",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'Provider response: %s'",
",",
"response_provider",
")",
"self",
".",
"log",
".",
"info",
"(",
"response_action",
")",
"return",
"response_provider"
] | Create a cname for the API deployment. | [
"Create",
"a",
"cname",
"for",
"the",
"API",
"deployment",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L189-L210 |
6,098 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.generate_uris | def generate_uris(self):
"""Generate several lambda uris."""
lambda_arn = "arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}".format(self.region, self.account_id, self.api_id,
self.trigger_settings['method'],
self.trigger_settings['resource'])
lambda_uri = ("arn:aws:apigateway:{0}:lambda:path/{1}/functions/"
"arn:aws:lambda:{0}:{2}:function:{3}/invocations").format(self.region, self.api_version,
self.account_id, self.app_name)
api_dns = "https://{0}.execute-api.{1}.amazonaws.com/{2}".format(self.api_id, self.region, self.env)
uri_dict = {'lambda_arn': lambda_arn, 'lambda_uri': lambda_uri, 'api_dns': api_dns}
return uri_dict | python | def generate_uris(self):
"""Generate several lambda uris."""
lambda_arn = "arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}".format(self.region, self.account_id, self.api_id,
self.trigger_settings['method'],
self.trigger_settings['resource'])
lambda_uri = ("arn:aws:apigateway:{0}:lambda:path/{1}/functions/"
"arn:aws:lambda:{0}:{2}:function:{3}/invocations").format(self.region, self.api_version,
self.account_id, self.app_name)
api_dns = "https://{0}.execute-api.{1}.amazonaws.com/{2}".format(self.api_id, self.region, self.env)
uri_dict = {'lambda_arn': lambda_arn, 'lambda_uri': lambda_uri, 'api_dns': api_dns}
return uri_dict | [
"def",
"generate_uris",
"(",
"self",
")",
":",
"lambda_arn",
"=",
"\"arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}\"",
".",
"format",
"(",
"self",
".",
"region",
",",
"self",
".",
"account_id",
",",
"self",
".",
"api_id",
",",
"self",
".",
"trigger_settings",
"[",
"'method'",
"]",
",",
"self",
".",
"trigger_settings",
"[",
"'resource'",
"]",
")",
"lambda_uri",
"=",
"(",
"\"arn:aws:apigateway:{0}:lambda:path/{1}/functions/\"",
"\"arn:aws:lambda:{0}:{2}:function:{3}/invocations\"",
")",
".",
"format",
"(",
"self",
".",
"region",
",",
"self",
".",
"api_version",
",",
"self",
".",
"account_id",
",",
"self",
".",
"app_name",
")",
"api_dns",
"=",
"\"https://{0}.execute-api.{1}.amazonaws.com/{2}\"",
".",
"format",
"(",
"self",
".",
"api_id",
",",
"self",
".",
"region",
",",
"self",
".",
"env",
")",
"uri_dict",
"=",
"{",
"'lambda_arn'",
":",
"lambda_arn",
",",
"'lambda_uri'",
":",
"lambda_uri",
",",
"'api_dns'",
":",
"api_dns",
"}",
"return",
"uri_dict"
] | Generate several lambda uris. | [
"Generate",
"several",
"lambda",
"uris",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L212-L225 |
6,099 | foremast/foremast | src/foremast/awslambda/api_gateway_event/api_gateway_event.py | APIGateway.create_api | def create_api(self):
"""Create the REST API."""
created_api = self.client.create_rest_api(name=self.trigger_settings.get('api_name', self.app_name))
api_id = created_api['id']
self.log.info("Successfully created API")
return api_id | python | def create_api(self):
"""Create the REST API."""
created_api = self.client.create_rest_api(name=self.trigger_settings.get('api_name', self.app_name))
api_id = created_api['id']
self.log.info("Successfully created API")
return api_id | [
"def",
"create_api",
"(",
"self",
")",
":",
"created_api",
"=",
"self",
".",
"client",
".",
"create_rest_api",
"(",
"name",
"=",
"self",
".",
"trigger_settings",
".",
"get",
"(",
"'api_name'",
",",
"self",
".",
"app_name",
")",
")",
"api_id",
"=",
"created_api",
"[",
"'id'",
"]",
"self",
".",
"log",
".",
"info",
"(",
"\"Successfully created API\"",
")",
"return",
"api_id"
] | Create the REST API. | [
"Create",
"the",
"REST",
"API",
"."
] | fb70f29b8ce532f061685a17d120486e47b215ba | https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L227-L232 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.