text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def mappings(self):
"""
Returns a sorted list of all the mappings for this memory.
:return: a list of mappings.
:rtype: list
"""
result = []
for m in self.maps:
if isinstance(m, AnonMap):
result.append((m.start, m.end, m.perms, 0, ''))
elif isinstance(m, FileMap):
result.append((m.start, m.end, m.perms, m._offset, m._filename))
else:
result.append((m.start, m.end, m.perms, 0, m.name))
return sorted(result)
|
[
"def",
"mappings",
"(",
"self",
")",
":",
"result",
"=",
"[",
"]",
"for",
"m",
"in",
"self",
".",
"maps",
":",
"if",
"isinstance",
"(",
"m",
",",
"AnonMap",
")",
":",
"result",
".",
"append",
"(",
"(",
"m",
".",
"start",
",",
"m",
".",
"end",
",",
"m",
".",
"perms",
",",
"0",
",",
"''",
")",
")",
"elif",
"isinstance",
"(",
"m",
",",
"FileMap",
")",
":",
"result",
".",
"append",
"(",
"(",
"m",
".",
"start",
",",
"m",
".",
"end",
",",
"m",
".",
"perms",
",",
"m",
".",
"_offset",
",",
"m",
".",
"_filename",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"m",
".",
"start",
",",
"m",
".",
"end",
",",
"m",
".",
"perms",
",",
"0",
",",
"m",
".",
"name",
")",
")",
"return",
"sorted",
"(",
"result",
")"
] | 31.941176 | 18.294118 |
def stages(self):
"""
Property for accessing :class:`StageManager` instance, which is used to manage stages.
:rtype: yagocd.resources.stage.StageManager
"""
if self._stage_manager is None:
self._stage_manager = StageManager(session=self._session)
return self._stage_manager
|
[
"def",
"stages",
"(",
"self",
")",
":",
"if",
"self",
".",
"_stage_manager",
"is",
"None",
":",
"self",
".",
"_stage_manager",
"=",
"StageManager",
"(",
"session",
"=",
"self",
".",
"_session",
")",
"return",
"self",
".",
"_stage_manager"
] | 36.222222 | 18.222222 |
def normalize_words(self, ord=2, inplace=False):
"""Normalize embeddings matrix row-wise.
Args:
ord: normalization order. Possible values {1, 2, 'inf', '-inf'}
"""
if ord == 2:
ord = None # numpy uses this flag to indicate l2.
vectors = self.vectors.T / np.linalg.norm(self.vectors, ord, axis=1)
if inplace:
self.vectors = vectors.T
return self
return Embedding(vectors=vectors.T, vocabulary=self.vocabulary)
|
[
"def",
"normalize_words",
"(",
"self",
",",
"ord",
"=",
"2",
",",
"inplace",
"=",
"False",
")",
":",
"if",
"ord",
"==",
"2",
":",
"ord",
"=",
"None",
"# numpy uses this flag to indicate l2.",
"vectors",
"=",
"self",
".",
"vectors",
".",
"T",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"self",
".",
"vectors",
",",
"ord",
",",
"axis",
"=",
"1",
")",
"if",
"inplace",
":",
"self",
".",
"vectors",
"=",
"vectors",
".",
"T",
"return",
"self",
"return",
"Embedding",
"(",
"vectors",
"=",
"vectors",
".",
"T",
",",
"vocabulary",
"=",
"self",
".",
"vocabulary",
")"
] | 34.538462 | 20.307692 |
def fuzzy_int(str_):
"""
lets some special strings be interpreted as ints
"""
try:
ret = int(str_)
return ret
except Exception:
# Parse comma separated values as ints
if re.match(r'\d*,\d*,?\d*', str_):
return tuple(map(int, str_.split(',')))
# Parse range values as ints
if re.match(r'\d*:\d*:?\d*', str_):
return tuple(range(*map(int, str_.split(':'))))
raise
|
[
"def",
"fuzzy_int",
"(",
"str_",
")",
":",
"try",
":",
"ret",
"=",
"int",
"(",
"str_",
")",
"return",
"ret",
"except",
"Exception",
":",
"# Parse comma separated values as ints",
"if",
"re",
".",
"match",
"(",
"r'\\d*,\\d*,?\\d*'",
",",
"str_",
")",
":",
"return",
"tuple",
"(",
"map",
"(",
"int",
",",
"str_",
".",
"split",
"(",
"','",
")",
")",
")",
"# Parse range values as ints",
"if",
"re",
".",
"match",
"(",
"r'\\d*:\\d*:?\\d*'",
",",
"str_",
")",
":",
"return",
"tuple",
"(",
"range",
"(",
"*",
"map",
"(",
"int",
",",
"str_",
".",
"split",
"(",
"':'",
")",
")",
")",
")",
"raise"
] | 29.8 | 13 |
def onDeleteRow(self, event, data_type):
"""
On button click, remove relevant object from both the data model and the grid.
"""
ancestry = self.er_magic_data.ancestry
child_type = ancestry[ancestry.index(data_type) - 1]
names = [self.grid.GetCellValue(row, 0) for row in self.selected_rows]
if data_type == 'site':
how_to_fix = 'Make sure to select a new site for each orphaned sample in the next step'
else:
how_to_fix = 'Go back a step and select a new {} for each orphaned {}'.format(data_type, child_type)
orphans = []
for name in names:
row = self.grid.row_labels.index(name)
orphan = self.er_magic_data.delete_methods[data_type](name)
if orphan:
orphans.extend(orphan)
self.grid.remove_row(row)
if orphans:
orphan_names = self.er_magic_data.make_name_list(orphans)
pw.simple_warning('You have deleted:\n\n {}\n\nthe parent(s) of {}(s):\n\n {}\n\n{}'.format(', '.join(names), child_type, ', '.join(orphan_names), how_to_fix))
self.selected_rows = set()
# update grid and data model
self.update_grid(self.grid)#, grids[grid_name])
self.grid.Refresh()
|
[
"def",
"onDeleteRow",
"(",
"self",
",",
"event",
",",
"data_type",
")",
":",
"ancestry",
"=",
"self",
".",
"er_magic_data",
".",
"ancestry",
"child_type",
"=",
"ancestry",
"[",
"ancestry",
".",
"index",
"(",
"data_type",
")",
"-",
"1",
"]",
"names",
"=",
"[",
"self",
".",
"grid",
".",
"GetCellValue",
"(",
"row",
",",
"0",
")",
"for",
"row",
"in",
"self",
".",
"selected_rows",
"]",
"if",
"data_type",
"==",
"'site'",
":",
"how_to_fix",
"=",
"'Make sure to select a new site for each orphaned sample in the next step'",
"else",
":",
"how_to_fix",
"=",
"'Go back a step and select a new {} for each orphaned {}'",
".",
"format",
"(",
"data_type",
",",
"child_type",
")",
"orphans",
"=",
"[",
"]",
"for",
"name",
"in",
"names",
":",
"row",
"=",
"self",
".",
"grid",
".",
"row_labels",
".",
"index",
"(",
"name",
")",
"orphan",
"=",
"self",
".",
"er_magic_data",
".",
"delete_methods",
"[",
"data_type",
"]",
"(",
"name",
")",
"if",
"orphan",
":",
"orphans",
".",
"extend",
"(",
"orphan",
")",
"self",
".",
"grid",
".",
"remove_row",
"(",
"row",
")",
"if",
"orphans",
":",
"orphan_names",
"=",
"self",
".",
"er_magic_data",
".",
"make_name_list",
"(",
"orphans",
")",
"pw",
".",
"simple_warning",
"(",
"'You have deleted:\\n\\n {}\\n\\nthe parent(s) of {}(s):\\n\\n {}\\n\\n{}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"names",
")",
",",
"child_type",
",",
"', '",
".",
"join",
"(",
"orphan_names",
")",
",",
"how_to_fix",
")",
")",
"self",
".",
"selected_rows",
"=",
"set",
"(",
")",
"# update grid and data model",
"self",
".",
"update_grid",
"(",
"self",
".",
"grid",
")",
"#, grids[grid_name])",
"self",
".",
"grid",
".",
"Refresh",
"(",
")"
] | 43.586207 | 26.068966 |
def make_pipeline(context):
"""
Create our pipeline.
"""
# Filter for primary share equities. IsPrimaryShare is a built-in filter.
primary_share = IsPrimaryShare()
# Not when-issued equities.
not_wi = ~IEXCompany.symbol.latest.endswith('.WI')
# Equities without LP in their name, .matches does a match using a regular
# expression
not_lp_name = ~IEXCompany.companyName.latest.matches('.* L[. ]?P.?$')
# Equities whose most recent Morningstar market cap is not null have
# fundamental data and therefore are not ETFs.
have_market_cap = IEXKeyStats.marketcap.latest >= 1
# At least a certain price
price = USEquityPricing.close.latest
AtLeastPrice = (price >= context.MyLeastPrice)
AtMostPrice = (price <= context.MyMostPrice)
# Filter for stocks that pass all of our previous filters.
tradeable_stocks = (
primary_share
& not_wi
& not_lp_name
& have_market_cap
& AtLeastPrice
& AtMostPrice
)
LowVar = 6
HighVar = 40
log.info(
'''
Algorithm initialized variables:
context.MaxCandidates %s
LowVar %s
HighVar %s''' %
(context.MaxCandidates, LowVar, HighVar))
# High dollar volume filter.
base_universe = AverageDollarVolume(
window_length=20,
mask=tradeable_stocks
).percentile_between(LowVar, HighVar)
# Short close price average.
ShortAvg = SimpleMovingAverage(
inputs=[USEquityPricing.close],
window_length=3,
mask=base_universe
)
# Long close price average.
LongAvg = SimpleMovingAverage(
inputs=[USEquityPricing.close],
window_length=45,
mask=base_universe
)
percent_difference = (ShortAvg - LongAvg) / LongAvg
# Filter to select securities to long.
stocks_worst = percent_difference.bottom(context.MaxCandidates)
securities_to_trade = (stocks_worst)
return Pipeline(
columns={
'stocks_worst': stocks_worst
},
screen=(securities_to_trade),
)
|
[
"def",
"make_pipeline",
"(",
"context",
")",
":",
"# Filter for primary share equities. IsPrimaryShare is a built-in filter.",
"primary_share",
"=",
"IsPrimaryShare",
"(",
")",
"# Not when-issued equities.",
"not_wi",
"=",
"~",
"IEXCompany",
".",
"symbol",
".",
"latest",
".",
"endswith",
"(",
"'.WI'",
")",
"# Equities without LP in their name, .matches does a match using a regular",
"# expression",
"not_lp_name",
"=",
"~",
"IEXCompany",
".",
"companyName",
".",
"latest",
".",
"matches",
"(",
"'.* L[. ]?P.?$'",
")",
"# Equities whose most recent Morningstar market cap is not null have",
"# fundamental data and therefore are not ETFs.",
"have_market_cap",
"=",
"IEXKeyStats",
".",
"marketcap",
".",
"latest",
">=",
"1",
"# At least a certain price",
"price",
"=",
"USEquityPricing",
".",
"close",
".",
"latest",
"AtLeastPrice",
"=",
"(",
"price",
">=",
"context",
".",
"MyLeastPrice",
")",
"AtMostPrice",
"=",
"(",
"price",
"<=",
"context",
".",
"MyMostPrice",
")",
"# Filter for stocks that pass all of our previous filters.",
"tradeable_stocks",
"=",
"(",
"primary_share",
"&",
"not_wi",
"&",
"not_lp_name",
"&",
"have_market_cap",
"&",
"AtLeastPrice",
"&",
"AtMostPrice",
")",
"LowVar",
"=",
"6",
"HighVar",
"=",
"40",
"log",
".",
"info",
"(",
"'''\nAlgorithm initialized variables:\n context.MaxCandidates %s\n LowVar %s\n HighVar %s'''",
"%",
"(",
"context",
".",
"MaxCandidates",
",",
"LowVar",
",",
"HighVar",
")",
")",
"# High dollar volume filter.",
"base_universe",
"=",
"AverageDollarVolume",
"(",
"window_length",
"=",
"20",
",",
"mask",
"=",
"tradeable_stocks",
")",
".",
"percentile_between",
"(",
"LowVar",
",",
"HighVar",
")",
"# Short close price average.",
"ShortAvg",
"=",
"SimpleMovingAverage",
"(",
"inputs",
"=",
"[",
"USEquityPricing",
".",
"close",
"]",
",",
"window_length",
"=",
"3",
",",
"mask",
"=",
"base_universe",
")",
"# Long close price average.",
"LongAvg",
"=",
"SimpleMovingAverage",
"(",
"inputs",
"=",
"[",
"USEquityPricing",
".",
"close",
"]",
",",
"window_length",
"=",
"45",
",",
"mask",
"=",
"base_universe",
")",
"percent_difference",
"=",
"(",
"ShortAvg",
"-",
"LongAvg",
")",
"/",
"LongAvg",
"# Filter to select securities to long.",
"stocks_worst",
"=",
"percent_difference",
".",
"bottom",
"(",
"context",
".",
"MaxCandidates",
")",
"securities_to_trade",
"=",
"(",
"stocks_worst",
")",
"return",
"Pipeline",
"(",
"columns",
"=",
"{",
"'stocks_worst'",
":",
"stocks_worst",
"}",
",",
"screen",
"=",
"(",
"securities_to_trade",
")",
",",
")"
] | 25.961039 | 20.272727 |
def write_files(text, where='.'):
"""Write many files."""
for filename in text:
target = os.path.join(where, filename)
write_file(target, text[filename])
|
[
"def",
"write_files",
"(",
"text",
",",
"where",
"=",
"'.'",
")",
":",
"for",
"filename",
"in",
"text",
":",
"target",
"=",
"os",
".",
"path",
".",
"join",
"(",
"where",
",",
"filename",
")",
"write_file",
"(",
"target",
",",
"text",
"[",
"filename",
"]",
")"
] | 34.6 | 6 |
def _get_description(self, element):
"""
Returns the description of element.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: The description of element.
:rtype: str
"""
description = None
if element.has_attribute('title'):
description = element.get_attribute('title')
elif element.has_attribute('aria-label'):
description = element.get_attribute('aria-label')
elif element.has_attribute('alt'):
description = element.get_attribute('alt')
elif element.has_attribute('label'):
description = element.get_attribute('label')
elif (
(element.has_attribute('aria-labelledby'))
or (element.has_attribute('aria-describedby'))
):
if element.has_attribute('aria-labelledby'):
description_ids = re.split(
'[ \n\r\t]+',
element.get_attribute('aria-labelledby').strip()
)
else:
description_ids = re.split(
'[ \n\r\t]+',
element.get_attribute('aria-describedby').strip()
)
for description_id in description_ids:
element_description = self.parser.find(
'#' + description_id
).first_result()
if element_description is not None:
description = element_description.get_text_content()
break
elif (
(element.get_tag_name() == 'INPUT')
and (element.has_attribute('type'))
):
type_attribute = element.get_attribute('type').lower()
if (
(
(type_attribute == 'button')
or (type_attribute == 'submit')
or (type_attribute == 'reset')
)
and (element.has_attribute('value'))
):
description = element.get_attribute('value')
if not bool(description):
description = element.get_text_content()
return re.sub('[ \n\r\t]+', ' ', description.strip())
|
[
"def",
"_get_description",
"(",
"self",
",",
"element",
")",
":",
"description",
"=",
"None",
"if",
"element",
".",
"has_attribute",
"(",
"'title'",
")",
":",
"description",
"=",
"element",
".",
"get_attribute",
"(",
"'title'",
")",
"elif",
"element",
".",
"has_attribute",
"(",
"'aria-label'",
")",
":",
"description",
"=",
"element",
".",
"get_attribute",
"(",
"'aria-label'",
")",
"elif",
"element",
".",
"has_attribute",
"(",
"'alt'",
")",
":",
"description",
"=",
"element",
".",
"get_attribute",
"(",
"'alt'",
")",
"elif",
"element",
".",
"has_attribute",
"(",
"'label'",
")",
":",
"description",
"=",
"element",
".",
"get_attribute",
"(",
"'label'",
")",
"elif",
"(",
"(",
"element",
".",
"has_attribute",
"(",
"'aria-labelledby'",
")",
")",
"or",
"(",
"element",
".",
"has_attribute",
"(",
"'aria-describedby'",
")",
")",
")",
":",
"if",
"element",
".",
"has_attribute",
"(",
"'aria-labelledby'",
")",
":",
"description_ids",
"=",
"re",
".",
"split",
"(",
"'[ \\n\\r\\t]+'",
",",
"element",
".",
"get_attribute",
"(",
"'aria-labelledby'",
")",
".",
"strip",
"(",
")",
")",
"else",
":",
"description_ids",
"=",
"re",
".",
"split",
"(",
"'[ \\n\\r\\t]+'",
",",
"element",
".",
"get_attribute",
"(",
"'aria-describedby'",
")",
".",
"strip",
"(",
")",
")",
"for",
"description_id",
"in",
"description_ids",
":",
"element_description",
"=",
"self",
".",
"parser",
".",
"find",
"(",
"'#'",
"+",
"description_id",
")",
".",
"first_result",
"(",
")",
"if",
"element_description",
"is",
"not",
"None",
":",
"description",
"=",
"element_description",
".",
"get_text_content",
"(",
")",
"break",
"elif",
"(",
"(",
"element",
".",
"get_tag_name",
"(",
")",
"==",
"'INPUT'",
")",
"and",
"(",
"element",
".",
"has_attribute",
"(",
"'type'",
")",
")",
")",
":",
"type_attribute",
"=",
"element",
".",
"get_attribute",
"(",
"'type'",
")",
".",
"lower",
"(",
")",
"if",
"(",
"(",
"(",
"type_attribute",
"==",
"'button'",
")",
"or",
"(",
"type_attribute",
"==",
"'submit'",
")",
"or",
"(",
"type_attribute",
"==",
"'reset'",
")",
")",
"and",
"(",
"element",
".",
"has_attribute",
"(",
"'value'",
")",
")",
")",
":",
"description",
"=",
"element",
".",
"get_attribute",
"(",
"'value'",
")",
"if",
"not",
"bool",
"(",
"description",
")",
":",
"description",
"=",
"element",
".",
"get_text_content",
"(",
")",
"return",
"re",
".",
"sub",
"(",
"'[ \\n\\r\\t]+'",
",",
"' '",
",",
"description",
".",
"strip",
"(",
")",
")"
] | 38.824561 | 15.421053 |
def get_page(self, path, return_content=True, return_html=True):
""" Get a Telegraph page
:param path: Path to the Telegraph page (in the format Title-12-31,
i.e. everything that comes after https://telegra.ph/)
:param return_content: If true, content field will be returned
:param return_html: If true, returns HTML instead of Nodes list
"""
response = self._telegraph.method('getPage', path=path, values={
'return_content': return_content
})
if return_content and return_html:
response['content'] = nodes_to_html(response['content'])
return response
|
[
"def",
"get_page",
"(",
"self",
",",
"path",
",",
"return_content",
"=",
"True",
",",
"return_html",
"=",
"True",
")",
":",
"response",
"=",
"self",
".",
"_telegraph",
".",
"method",
"(",
"'getPage'",
",",
"path",
"=",
"path",
",",
"values",
"=",
"{",
"'return_content'",
":",
"return_content",
"}",
")",
"if",
"return_content",
"and",
"return_html",
":",
"response",
"[",
"'content'",
"]",
"=",
"nodes_to_html",
"(",
"response",
"[",
"'content'",
"]",
")",
"return",
"response"
] | 36.444444 | 25.944444 |
def stmt2enum(enum_type, declare=True, assign=True, wrap=True):
"""Returns a dzn enum declaration from an enum type.
Parameters
----------
enum_type : Enum
The enum to serialize.
declare : bool
Whether to include the ``enum`` declatation keyword in the statement or
just the assignment.
assign : bool
Wheter to include the assignment of the enum in the statement or just
the declaration.
wrap : bool
Whether to wrap the serialized enum.
Returns
-------
str
The serialized dzn representation of the enum.
"""
if not (declare or assign):
raise ValueError(
'The statement must be a declaration or an assignment.'
)
stmt = []
if declare:
stmt.append('enum ')
stmt.append(enum_type.__name__)
if assign:
val_str = []
for v in list(enum_type):
val_str.append(v.name)
val_str = ''.join(['{', ','.join(val_str), '}'])
if wrap:
wrapper = _get_wrapper()
val_str = wrapper.fill(val_str)
stmt.append(' = {}'.format(val_str))
stmt.append(';')
return ''.join(stmt)
|
[
"def",
"stmt2enum",
"(",
"enum_type",
",",
"declare",
"=",
"True",
",",
"assign",
"=",
"True",
",",
"wrap",
"=",
"True",
")",
":",
"if",
"not",
"(",
"declare",
"or",
"assign",
")",
":",
"raise",
"ValueError",
"(",
"'The statement must be a declaration or an assignment.'",
")",
"stmt",
"=",
"[",
"]",
"if",
"declare",
":",
"stmt",
".",
"append",
"(",
"'enum '",
")",
"stmt",
".",
"append",
"(",
"enum_type",
".",
"__name__",
")",
"if",
"assign",
":",
"val_str",
"=",
"[",
"]",
"for",
"v",
"in",
"list",
"(",
"enum_type",
")",
":",
"val_str",
".",
"append",
"(",
"v",
".",
"name",
")",
"val_str",
"=",
"''",
".",
"join",
"(",
"[",
"'{'",
",",
"','",
".",
"join",
"(",
"val_str",
")",
",",
"'}'",
"]",
")",
"if",
"wrap",
":",
"wrapper",
"=",
"_get_wrapper",
"(",
")",
"val_str",
"=",
"wrapper",
".",
"fill",
"(",
"val_str",
")",
"stmt",
".",
"append",
"(",
"' = {}'",
".",
"format",
"(",
"val_str",
")",
")",
"stmt",
".",
"append",
"(",
"';'",
")",
"return",
"''",
".",
"join",
"(",
"stmt",
")"
] | 26.181818 | 21.022727 |
def grant_user_access(self, user, db_names, strict=True):
"""
Gives access to the databases listed in `db_names` to the user.
"""
return self._user_manager.grant_user_access(user, db_names,
strict=strict)
|
[
"def",
"grant_user_access",
"(",
"self",
",",
"user",
",",
"db_names",
",",
"strict",
"=",
"True",
")",
":",
"return",
"self",
".",
"_user_manager",
".",
"grant_user_access",
"(",
"user",
",",
"db_names",
",",
"strict",
"=",
"strict",
")"
] | 41.166667 | 14.166667 |
def register(linter):
"""Register all transforms with the linter."""
MANAGER.register_transform(astroid.Call, transform_declare)
MANAGER.register_transform(astroid.Module, transform_conf_module)
|
[
"def",
"register",
"(",
"linter",
")",
":",
"MANAGER",
".",
"register_transform",
"(",
"astroid",
".",
"Call",
",",
"transform_declare",
")",
"MANAGER",
".",
"register_transform",
"(",
"astroid",
".",
"Module",
",",
"transform_conf_module",
")"
] | 49.25 | 16.75 |
def global_iterator_to_indices(self, git=None):
"""
Return sympy expressions translating global_iterator to loop indices.
If global_iterator is given, an integer is returned
"""
# unwind global iteration count into loop counters:
base_loop_counters = {}
global_iterator = symbol_pos_int('global_iterator')
idiv = implemented_function(sympy.Function(str('idiv')), lambda x, y: x//y)
total_length = 1
last_incr = 1
for var_name, start, end, incr in reversed(self._loop_stack):
loop_var = symbol_pos_int(var_name)
# This unspools the iterations:
length = end-start # FIXME is incr handled correct here?
counter = start+(idiv(global_iterator*last_incr, total_length)*incr) % length
total_length = total_length*length
last_incr = incr
base_loop_counters[loop_var] = sympy.lambdify(
global_iterator,
self.subs_consts(counter), modules=[numpy, {'Mod': numpy.mod}])
if git is not None:
try: # Try to resolve to integer if global_iterator was given
base_loop_counters[loop_var] = sympy.Integer(self.subs_consts(counter))
continue
except (ValueError, TypeError):
base_loop_counters[loop_var] = base_loop_counters[loop_var](git)
return base_loop_counters
|
[
"def",
"global_iterator_to_indices",
"(",
"self",
",",
"git",
"=",
"None",
")",
":",
"# unwind global iteration count into loop counters:",
"base_loop_counters",
"=",
"{",
"}",
"global_iterator",
"=",
"symbol_pos_int",
"(",
"'global_iterator'",
")",
"idiv",
"=",
"implemented_function",
"(",
"sympy",
".",
"Function",
"(",
"str",
"(",
"'idiv'",
")",
")",
",",
"lambda",
"x",
",",
"y",
":",
"x",
"//",
"y",
")",
"total_length",
"=",
"1",
"last_incr",
"=",
"1",
"for",
"var_name",
",",
"start",
",",
"end",
",",
"incr",
"in",
"reversed",
"(",
"self",
".",
"_loop_stack",
")",
":",
"loop_var",
"=",
"symbol_pos_int",
"(",
"var_name",
")",
"# This unspools the iterations:",
"length",
"=",
"end",
"-",
"start",
"# FIXME is incr handled correct here?",
"counter",
"=",
"start",
"+",
"(",
"idiv",
"(",
"global_iterator",
"*",
"last_incr",
",",
"total_length",
")",
"*",
"incr",
")",
"%",
"length",
"total_length",
"=",
"total_length",
"*",
"length",
"last_incr",
"=",
"incr",
"base_loop_counters",
"[",
"loop_var",
"]",
"=",
"sympy",
".",
"lambdify",
"(",
"global_iterator",
",",
"self",
".",
"subs_consts",
"(",
"counter",
")",
",",
"modules",
"=",
"[",
"numpy",
",",
"{",
"'Mod'",
":",
"numpy",
".",
"mod",
"}",
"]",
")",
"if",
"git",
"is",
"not",
"None",
":",
"try",
":",
"# Try to resolve to integer if global_iterator was given",
"base_loop_counters",
"[",
"loop_var",
"]",
"=",
"sympy",
".",
"Integer",
"(",
"self",
".",
"subs_consts",
"(",
"counter",
")",
")",
"continue",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"base_loop_counters",
"[",
"loop_var",
"]",
"=",
"base_loop_counters",
"[",
"loop_var",
"]",
"(",
"git",
")",
"return",
"base_loop_counters"
] | 43.454545 | 22.909091 |
def _subscription_thread(self, endpoint):
"""
Thread Method, running the connection for each endpoint.
:param endpoint:
:return:
"""
try:
conn = create_connection(self.addr + endpoint, timeout=5)
except WebSocketTimeoutException:
self.restart_q.put(endpoint)
return
while self.threads_running[endpoint]:
try:
msg = conn.recv()
except WebSocketTimeoutException:
self._controller_q.put(endpoint)
log.debug("%s, %s", endpoint, msg)
ep, pair = endpoint.split('/')
log.debug("_subscription_thread(): Putting data on q..")
try:
self.data_q.put((ep, pair, msg, time.time()), timeout=1)
except TimeoutError:
continue
finally:
log.debug("_subscription_thread(): Data Processed, looping back..")
conn.close()
log.debug("_subscription_thread(): Thread Loop Ended.")
|
[
"def",
"_subscription_thread",
"(",
"self",
",",
"endpoint",
")",
":",
"try",
":",
"conn",
"=",
"create_connection",
"(",
"self",
".",
"addr",
"+",
"endpoint",
",",
"timeout",
"=",
"5",
")",
"except",
"WebSocketTimeoutException",
":",
"self",
".",
"restart_q",
".",
"put",
"(",
"endpoint",
")",
"return",
"while",
"self",
".",
"threads_running",
"[",
"endpoint",
"]",
":",
"try",
":",
"msg",
"=",
"conn",
".",
"recv",
"(",
")",
"except",
"WebSocketTimeoutException",
":",
"self",
".",
"_controller_q",
".",
"put",
"(",
"endpoint",
")",
"log",
".",
"debug",
"(",
"\"%s, %s\"",
",",
"endpoint",
",",
"msg",
")",
"ep",
",",
"pair",
"=",
"endpoint",
".",
"split",
"(",
"'/'",
")",
"log",
".",
"debug",
"(",
"\"_subscription_thread(): Putting data on q..\"",
")",
"try",
":",
"self",
".",
"data_q",
".",
"put",
"(",
"(",
"ep",
",",
"pair",
",",
"msg",
",",
"time",
".",
"time",
"(",
")",
")",
",",
"timeout",
"=",
"1",
")",
"except",
"TimeoutError",
":",
"continue",
"finally",
":",
"log",
".",
"debug",
"(",
"\"_subscription_thread(): Data Processed, looping back..\"",
")",
"conn",
".",
"close",
"(",
")",
"log",
".",
"debug",
"(",
"\"_subscription_thread(): Thread Loop Ended.\"",
")"
] | 35.172414 | 17.103448 |
def setApparentDecel(self, typeID, decel):
"""setDecel(string, double) -> None
Sets the apparent deceleration in m/s^2 of vehicles of this type.
"""
self._connection._sendDoubleCmd(
tc.CMD_SET_VEHICLETYPE_VARIABLE, tc.VAR_APPARENT_DECEL, typeID, decel)
|
[
"def",
"setApparentDecel",
"(",
"self",
",",
"typeID",
",",
"decel",
")",
":",
"self",
".",
"_connection",
".",
"_sendDoubleCmd",
"(",
"tc",
".",
"CMD_SET_VEHICLETYPE_VARIABLE",
",",
"tc",
".",
"VAR_APPARENT_DECEL",
",",
"typeID",
",",
"decel",
")"
] | 41.571429 | 16.714286 |
def flatten(text):
"""
Flatten the text:
* make sure each record is on one line.
* remove parenthesis
"""
lines = text.split("\n")
# tokens: sequence of non-whitespace separated by '' where a newline was
tokens = []
for l in lines:
if len(l) == 0:
continue
l = l.replace("\t", " ")
tokens += filter(lambda x: len(x) > 0, l.split(" ")) + ['']
# find (...) and turn it into a single line ("capture" it)
capturing = False
captured = []
flattened = []
while len(tokens) > 0:
tok = tokens.pop(0)
if not capturing and len(tok) == 0:
# normal end-of-line
if len(captured) > 0:
flattened.append(" ".join(captured))
captured = []
continue
if tok.startswith("("):
# begin grouping
tok = tok.lstrip("(")
capturing = True
if capturing and tok.endswith(")"):
# end grouping. next end-of-line will turn this sequence into a flat line
tok = tok.rstrip(")")
capturing = False
captured.append(tok)
return "\n".join(flattened)
|
[
"def",
"flatten",
"(",
"text",
")",
":",
"lines",
"=",
"text",
".",
"split",
"(",
"\"\\n\"",
")",
"# tokens: sequence of non-whitespace separated by '' where a newline was",
"tokens",
"=",
"[",
"]",
"for",
"l",
"in",
"lines",
":",
"if",
"len",
"(",
"l",
")",
"==",
"0",
":",
"continue",
"l",
"=",
"l",
".",
"replace",
"(",
"\"\\t\"",
",",
"\" \"",
")",
"tokens",
"+=",
"filter",
"(",
"lambda",
"x",
":",
"len",
"(",
"x",
")",
">",
"0",
",",
"l",
".",
"split",
"(",
"\" \"",
")",
")",
"+",
"[",
"''",
"]",
"# find (...) and turn it into a single line (\"capture\" it)",
"capturing",
"=",
"False",
"captured",
"=",
"[",
"]",
"flattened",
"=",
"[",
"]",
"while",
"len",
"(",
"tokens",
")",
">",
"0",
":",
"tok",
"=",
"tokens",
".",
"pop",
"(",
"0",
")",
"if",
"not",
"capturing",
"and",
"len",
"(",
"tok",
")",
"==",
"0",
":",
"# normal end-of-line",
"if",
"len",
"(",
"captured",
")",
">",
"0",
":",
"flattened",
".",
"append",
"(",
"\" \"",
".",
"join",
"(",
"captured",
")",
")",
"captured",
"=",
"[",
"]",
"continue",
"if",
"tok",
".",
"startswith",
"(",
"\"(\"",
")",
":",
"# begin grouping",
"tok",
"=",
"tok",
".",
"lstrip",
"(",
"\"(\"",
")",
"capturing",
"=",
"True",
"if",
"capturing",
"and",
"tok",
".",
"endswith",
"(",
"\")\"",
")",
":",
"# end grouping. next end-of-line will turn this sequence into a flat line",
"tok",
"=",
"tok",
".",
"rstrip",
"(",
"\")\"",
")",
"capturing",
"=",
"False",
"captured",
".",
"append",
"(",
"tok",
")",
"return",
"\"\\n\"",
".",
"join",
"(",
"flattened",
")"
] | 26.227273 | 19.181818 |
def constraint_from_choices(cls, value_type: type, choices: collections.Sequence):
"""
Returns a constraint callable based on choices of a given type
"""
choices_str = ', '.join(map(str, choices))
def constraint(value):
value = value_type(value)
if value not in choices:
raise ParameterError('Argument must be one of %s' % choices_str)
return value
constraint.__name__ = 'choices_%s' % value_type.__name__
constraint.__doc__ = 'choice of %s' % choices_str
return constraint
|
[
"def",
"constraint_from_choices",
"(",
"cls",
",",
"value_type",
":",
"type",
",",
"choices",
":",
"collections",
".",
"Sequence",
")",
":",
"choices_str",
"=",
"', '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"choices",
")",
")",
"def",
"constraint",
"(",
"value",
")",
":",
"value",
"=",
"value_type",
"(",
"value",
")",
"if",
"value",
"not",
"in",
"choices",
":",
"raise",
"ParameterError",
"(",
"'Argument must be one of %s'",
"%",
"choices_str",
")",
"return",
"value",
"constraint",
".",
"__name__",
"=",
"'choices_%s'",
"%",
"value_type",
".",
"__name__",
"constraint",
".",
"__doc__",
"=",
"'choice of %s'",
"%",
"choices_str",
"return",
"constraint"
] | 38.466667 | 19.4 |
def add_item(self, api_token, content, **kwargs):
"""Add a task to a project.
:param token: The user's login token.
:type token: str
:param content: The task description.
:type content: str
:param project_id: The project to add the task to. Default is ``Inbox``
:type project_id: str
:param date_string: The deadline date for the task.
:type date_string: str
:param priority: The task priority ``(1-4)``.
:type priority: int
:param indent: The task indentation ``(1-4)``.
:type indent: int
:param item_order: The task order.
:type item_order: int
:param children: A list of child tasks IDs.
:type children: str
:param labels: A list of label IDs.
:type labels: str
:param assigned_by_uid: The ID of the user who assigns current task.
Accepts 0 or any user id from the list of project collaborators.
If value is unset or invalid it will automatically be set up by
your uid.
:type assigned_by_uid: str
:param responsible_uid: The id of user who is responsible for
accomplishing the current task. Accepts 0 or any user id from
the list of project collaborators. If the value is unset or
invalid it will automatically be set to null.
:type responsible_uid: str
:param note: Content of a note to add.
:type note: str
:return: The HTTP response to the request.
:rtype: :class:`requests.Response`
>>> from pytodoist.api import TodoistAPI
>>> api = TodoistAPI()
>>> response = api.login('[email protected]', 'password')
>>> user_info = response.json()
>>> user_api_token = user_info['token']
>>> response = api.add_item(user_api_token, 'Install PyTodoist')
>>> task = response.json()
>>> print(task['content'])
Install PyTodoist
"""
params = {
'token': api_token,
'content': content
}
return self._post('add_item', params, **kwargs)
|
[
"def",
"add_item",
"(",
"self",
",",
"api_token",
",",
"content",
",",
"*",
"*",
"kwargs",
")",
":",
"params",
"=",
"{",
"'token'",
":",
"api_token",
",",
"'content'",
":",
"content",
"}",
"return",
"self",
".",
"_post",
"(",
"'add_item'",
",",
"params",
",",
"*",
"*",
"kwargs",
")"
] | 41.039216 | 15.666667 |
def is_valid_endpoint(endpoint):
"""
Verify if endpoint is valid.
:type endpoint: string
:param endpoint: An endpoint. Must have at least a scheme and a hostname.
:return: True if the endpoint is valid. Raise :exc:`InvalidEndpointError`
otherwise.
"""
try:
if urlsplit(endpoint).scheme:
raise InvalidEndpointError('Hostname cannot have a scheme.')
hostname = endpoint.split(':')[0]
if hostname is None:
raise InvalidEndpointError('Hostname cannot be empty.')
if len(hostname) > 255:
raise InvalidEndpointError('Hostname cannot be greater than 255.')
if hostname[-1] == '.':
hostname = hostname[:-1]
if not _ALLOWED_HOSTNAME_REGEX.match(hostname):
raise InvalidEndpointError('Hostname does not meet URL standards.')
except AttributeError as error:
raise TypeError(error)
return True
|
[
"def",
"is_valid_endpoint",
"(",
"endpoint",
")",
":",
"try",
":",
"if",
"urlsplit",
"(",
"endpoint",
")",
".",
"scheme",
":",
"raise",
"InvalidEndpointError",
"(",
"'Hostname cannot have a scheme.'",
")",
"hostname",
"=",
"endpoint",
".",
"split",
"(",
"':'",
")",
"[",
"0",
"]",
"if",
"hostname",
"is",
"None",
":",
"raise",
"InvalidEndpointError",
"(",
"'Hostname cannot be empty.'",
")",
"if",
"len",
"(",
"hostname",
")",
">",
"255",
":",
"raise",
"InvalidEndpointError",
"(",
"'Hostname cannot be greater than 255.'",
")",
"if",
"hostname",
"[",
"-",
"1",
"]",
"==",
"'.'",
":",
"hostname",
"=",
"hostname",
"[",
":",
"-",
"1",
"]",
"if",
"not",
"_ALLOWED_HOSTNAME_REGEX",
".",
"match",
"(",
"hostname",
")",
":",
"raise",
"InvalidEndpointError",
"(",
"'Hostname does not meet URL standards.'",
")",
"except",
"AttributeError",
"as",
"error",
":",
"raise",
"TypeError",
"(",
"error",
")",
"return",
"True"
] | 30.6 | 22.266667 |
def _clear_audio_file(self, audio_file):
"""
Clear audio from memory.
:param audio_file: the object to clear
:type audio_file: :class:`~aeneas.audiofile.AudioFile`
"""
self._step_begin(u"clear audio file")
audio_file.clear_data()
audio_file = None
self._step_end()
|
[
"def",
"_clear_audio_file",
"(",
"self",
",",
"audio_file",
")",
":",
"self",
".",
"_step_begin",
"(",
"u\"clear audio file\"",
")",
"audio_file",
".",
"clear_data",
"(",
")",
"audio_file",
"=",
"None",
"self",
".",
"_step_end",
"(",
")"
] | 29.818182 | 11.090909 |
def prepare_cache_id(self, combined_args_kw):
"get the cacheid (conc. string of argument self.ids in order)"
cache_id = "".join(self.id(a) for a in combined_args_kw)
return cache_id
|
[
"def",
"prepare_cache_id",
"(",
"self",
",",
"combined_args_kw",
")",
":",
"cache_id",
"=",
"\"\"",
".",
"join",
"(",
"self",
".",
"id",
"(",
"a",
")",
"for",
"a",
"in",
"combined_args_kw",
")",
"return",
"cache_id"
] | 50.5 | 19 |
def clean(ctx, node=False, translations=False, all=False):
'''Cleanup all build artifacts'''
header('Clean all build artifacts')
patterns = [
'build', 'dist', 'cover', 'docs/_build',
'**/*.pyc', '*.egg-info', '.tox', 'udata/static/*'
]
if node or all:
patterns.append('node_modules')
if translations or all:
patterns.append('udata/translations/*/LC_MESSAGES/udata.mo')
for pattern in patterns:
info(pattern)
lrun('rm -rf {0}'.format(' '.join(patterns)))
|
[
"def",
"clean",
"(",
"ctx",
",",
"node",
"=",
"False",
",",
"translations",
"=",
"False",
",",
"all",
"=",
"False",
")",
":",
"header",
"(",
"'Clean all build artifacts'",
")",
"patterns",
"=",
"[",
"'build'",
",",
"'dist'",
",",
"'cover'",
",",
"'docs/_build'",
",",
"'**/*.pyc'",
",",
"'*.egg-info'",
",",
"'.tox'",
",",
"'udata/static/*'",
"]",
"if",
"node",
"or",
"all",
":",
"patterns",
".",
"append",
"(",
"'node_modules'",
")",
"if",
"translations",
"or",
"all",
":",
"patterns",
".",
"append",
"(",
"'udata/translations/*/LC_MESSAGES/udata.mo'",
")",
"for",
"pattern",
"in",
"patterns",
":",
"info",
"(",
"pattern",
")",
"lrun",
"(",
"'rm -rf {0}'",
".",
"format",
"(",
"' '",
".",
"join",
"(",
"patterns",
")",
")",
")"
] | 36.571429 | 15 |
def _set_redistribute(self, v, load=False):
"""
Setter method for redistribute, mapped from YANG variable /rbridge_id/router/ospf/permit/redistribute (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_redistribute is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_redistribute() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("redist_value route_option",redistribute.redistribute, yang_name="redistribute", rest_name="redistribute", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='redist-value route-option', extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}), is_container='list', yang_name="redistribute", rest_name="redistribute", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """redistribute must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("redist_value route_option",redistribute.redistribute, yang_name="redistribute", rest_name="redistribute", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='redist-value route-option', extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}), is_container='list', yang_name="redistribute", rest_name="redistribute", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True)""",
})
self.__redistribute = t
if hasattr(self, '_set'):
self._set()
|
[
"def",
"_set_redistribute",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"YANGListType",
"(",
"\"redist_value route_option\"",
",",
"redistribute",
".",
"redistribute",
",",
"yang_name",
"=",
"\"redistribute\"",
",",
"rest_name",
"=",
"\"redistribute\"",
",",
"parent",
"=",
"self",
",",
"is_container",
"=",
"'list'",
",",
"user_ordered",
"=",
"False",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"yang_keys",
"=",
"'redist-value route-option'",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'cli-compact-syntax'",
":",
"None",
",",
"u'cli-suppress-mode'",
":",
"None",
",",
"u'callpoint'",
":",
"u'OSPFPermitRedistributeCallPoint'",
"}",
"}",
")",
",",
"is_container",
"=",
"'list'",
",",
"yang_name",
"=",
"\"redistribute\"",
",",
"rest_name",
"=",
"\"redistribute\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"True",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'cli-compact-syntax'",
":",
"None",
",",
"u'cli-suppress-mode'",
":",
"None",
",",
"u'callpoint'",
":",
"u'OSPFPermitRedistributeCallPoint'",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-ospf'",
",",
"defining_module",
"=",
"'brocade-ospf'",
",",
"yang_type",
"=",
"'list'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"redistribute must be of a type compatible with list\"\"\"",
",",
"'defined-type'",
":",
"\"list\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=YANGListType(\"redist_value route_option\",redistribute.redistribute, yang_name=\"redistribute\", rest_name=\"redistribute\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='redist-value route-option', extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}), is_container='list', yang_name=\"redistribute\", rest_name=\"redistribute\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-suppress-mode': None, u'callpoint': u'OSPFPermitRedistributeCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='list', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__redistribute",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 109.954545 | 52.818182 |
def rmpart(self, disk, number):
"""
Remove partion from disk
:param disk: device path (/dev/sda, /dev/sdb, etc...)
:param number: Partition number (starting from 1)
"""
args = {
'disk': disk,
'number': number,
}
self._rmpart_chk.check(args)
response = self._client.raw('disk.rmpart', args)
result = response.get()
if result.state != 'SUCCESS':
raise RuntimeError('failed to remove partition: %s' % result.stderr)
|
[
"def",
"rmpart",
"(",
"self",
",",
"disk",
",",
"number",
")",
":",
"args",
"=",
"{",
"'disk'",
":",
"disk",
",",
"'number'",
":",
"number",
",",
"}",
"self",
".",
"_rmpart_chk",
".",
"check",
"(",
"args",
")",
"response",
"=",
"self",
".",
"_client",
".",
"raw",
"(",
"'disk.rmpart'",
",",
"args",
")",
"result",
"=",
"response",
".",
"get",
"(",
")",
"if",
"result",
".",
"state",
"!=",
"'SUCCESS'",
":",
"raise",
"RuntimeError",
"(",
"'failed to remove partition: %s'",
"%",
"result",
".",
"stderr",
")"
] | 27.473684 | 19.368421 |
def label_correcting_check_cycle(self, j, pred):
'''
API:
label_correcting_check_cycle(self, j, pred)
Description:
Checks if predecessor dictionary has a cycle, j represents the node
that predecessor is recently updated.
Pre:
(1) predecessor of source node should be None.
Input:
j: node that predecessor is recently updated.
pred: predecessor dictionary
Return:
If there exists a cycle, returns the list that represents the
cycle, otherwise it returns to None.
'''
labelled = {}
for n in self.neighbors:
labelled[n] = None
current = j
while current != None:
if labelled[current]==j:
cycle = self.label_correcting_get_cycle(j, pred)
return cycle
labelled[current] = j
current = pred[current]
return None
|
[
"def",
"label_correcting_check_cycle",
"(",
"self",
",",
"j",
",",
"pred",
")",
":",
"labelled",
"=",
"{",
"}",
"for",
"n",
"in",
"self",
".",
"neighbors",
":",
"labelled",
"[",
"n",
"]",
"=",
"None",
"current",
"=",
"j",
"while",
"current",
"!=",
"None",
":",
"if",
"labelled",
"[",
"current",
"]",
"==",
"j",
":",
"cycle",
"=",
"self",
".",
"label_correcting_get_cycle",
"(",
"j",
",",
"pred",
")",
"return",
"cycle",
"labelled",
"[",
"current",
"]",
"=",
"j",
"current",
"=",
"pred",
"[",
"current",
"]",
"return",
"None"
] | 35.148148 | 17.518519 |
def create_delete_model(record):
"""Create an S3 model from a record."""
arn = f"arn:aws:s3:::{cloudwatch.filter_request_parameters('bucketName', record)}"
LOG.debug(f'[-] Deleting Dynamodb Records. Hash Key: {arn}')
data = {
'arn': arn,
'principalId': cloudwatch.get_principal(record),
'userIdentity': cloudwatch.get_user_identity(record),
'accountId': record['account'],
'eventTime': record['detail']['eventTime'],
'BucketName': cloudwatch.filter_request_parameters('bucketName', record),
'Region': cloudwatch.get_region(record),
'Tags': {},
'configuration': {},
'eventSource': record['detail']['eventSource'],
'version': VERSION
}
return CurrentS3Model(**data)
|
[
"def",
"create_delete_model",
"(",
"record",
")",
":",
"arn",
"=",
"f\"arn:aws:s3:::{cloudwatch.filter_request_parameters('bucketName', record)}\"",
"LOG",
".",
"debug",
"(",
"f'[-] Deleting Dynamodb Records. Hash Key: {arn}'",
")",
"data",
"=",
"{",
"'arn'",
":",
"arn",
",",
"'principalId'",
":",
"cloudwatch",
".",
"get_principal",
"(",
"record",
")",
",",
"'userIdentity'",
":",
"cloudwatch",
".",
"get_user_identity",
"(",
"record",
")",
",",
"'accountId'",
":",
"record",
"[",
"'account'",
"]",
",",
"'eventTime'",
":",
"record",
"[",
"'detail'",
"]",
"[",
"'eventTime'",
"]",
",",
"'BucketName'",
":",
"cloudwatch",
".",
"filter_request_parameters",
"(",
"'bucketName'",
",",
"record",
")",
",",
"'Region'",
":",
"cloudwatch",
".",
"get_region",
"(",
"record",
")",
",",
"'Tags'",
":",
"{",
"}",
",",
"'configuration'",
":",
"{",
"}",
",",
"'eventSource'",
":",
"record",
"[",
"'detail'",
"]",
"[",
"'eventSource'",
"]",
",",
"'version'",
":",
"VERSION",
"}",
"return",
"CurrentS3Model",
"(",
"*",
"*",
"data",
")"
] | 37.9 | 20.45 |
def _rewrite_guides(self):
"""
Write ``<a:gd>`` elements to the XML, one for each adjustment value.
Any existing guide elements are overwritten.
"""
guides = [(adj.name, adj.val) for adj in self._adjustments_]
self._prstGeom.rewrite_guides(guides)
|
[
"def",
"_rewrite_guides",
"(",
"self",
")",
":",
"guides",
"=",
"[",
"(",
"adj",
".",
"name",
",",
"adj",
".",
"val",
")",
"for",
"adj",
"in",
"self",
".",
"_adjustments_",
"]",
"self",
".",
"_prstGeom",
".",
"rewrite_guides",
"(",
"guides",
")"
] | 41.285714 | 13.571429 |
def pdf_merge(inputs: [str], output: str, delete: bool = False):
"""
Merge multiple Pdf input files in one output file.
:param inputs: input files
:param output: output file
:param delete: delete input files after completion if true
"""
writer = PdfFileWriter()
if os.path.isfile(output):
ans = input(
"The file '%s' already exists. "
"Overwrite? Yes/Abort [Y/a]: " % output
).lower()
if ans == "a":
return
outputfile = open(output, "wb")
try:
infiles = []
for filename in inputs:
f = open(filename, "rb")
reader = PdfFileReader(f)
for page in reader.pages:
writer.addPage(page)
infiles.append(f)
writer.write(outputfile)
except FileNotFoundError as e:
print(e.strerror + ": " + e.filename)
finally:
outputfile.close()
for f in infiles:
f.close()
if delete:
for filename in inputs:
os.remove(filename)
|
[
"def",
"pdf_merge",
"(",
"inputs",
":",
"[",
"str",
"]",
",",
"output",
":",
"str",
",",
"delete",
":",
"bool",
"=",
"False",
")",
":",
"writer",
"=",
"PdfFileWriter",
"(",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"output",
")",
":",
"ans",
"=",
"input",
"(",
"\"The file '%s' already exists. \"",
"\"Overwrite? Yes/Abort [Y/a]: \"",
"%",
"output",
")",
".",
"lower",
"(",
")",
"if",
"ans",
"==",
"\"a\"",
":",
"return",
"outputfile",
"=",
"open",
"(",
"output",
",",
"\"wb\"",
")",
"try",
":",
"infiles",
"=",
"[",
"]",
"for",
"filename",
"in",
"inputs",
":",
"f",
"=",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"reader",
"=",
"PdfFileReader",
"(",
"f",
")",
"for",
"page",
"in",
"reader",
".",
"pages",
":",
"writer",
".",
"addPage",
"(",
"page",
")",
"infiles",
".",
"append",
"(",
"f",
")",
"writer",
".",
"write",
"(",
"outputfile",
")",
"except",
"FileNotFoundError",
"as",
"e",
":",
"print",
"(",
"e",
".",
"strerror",
"+",
"\": \"",
"+",
"e",
".",
"filename",
")",
"finally",
":",
"outputfile",
".",
"close",
"(",
")",
"for",
"f",
"in",
"infiles",
":",
"f",
".",
"close",
"(",
")",
"if",
"delete",
":",
"for",
"filename",
"in",
"inputs",
":",
"os",
".",
"remove",
"(",
"filename",
")"
] | 28.444444 | 14.166667 |
def marginal_loglike(self, x):
"""Marginal log-likelihood.
Returns ``L_marg(x) = \int L(x,y|z') L(y) dy``
This will used the cached '~fermipy.castro.Interpolator'
object if possible, and construct it if needed.
"""
if self._marg_interp is None:
# This calculates values and caches the spline
return self._marginal_loglike(x)
x = np.array(x, ndmin=1)
return self._marg_interp(x)
|
[
"def",
"marginal_loglike",
"(",
"self",
",",
"x",
")",
":",
"if",
"self",
".",
"_marg_interp",
"is",
"None",
":",
"# This calculates values and caches the spline",
"return",
"self",
".",
"_marginal_loglike",
"(",
"x",
")",
"x",
"=",
"np",
".",
"array",
"(",
"x",
",",
"ndmin",
"=",
"1",
")",
"return",
"self",
".",
"_marg_interp",
"(",
"x",
")"
] | 32.571429 | 15.857143 |
def shutdown_server(self):
"""
Cleanly shutdown the server.
"""
if not self._closing:
self._closing = True
else:
log.warning("Close is already in progress")
return
if self._server:
self._server.close()
yield from self._server.wait_closed()
if self._app:
yield from self._app.shutdown()
if self._handler:
try:
# aiohttp < 2.3
yield from self._handler.finish_connections(2) # Parameter is timeout
except AttributeError:
# aiohttp >= 2.3
yield from self._handler.shutdown(2) # Parameter is timeout
if self._app:
yield from self._app.cleanup()
yield from Controller.instance().stop()
for module in MODULES:
log.debug("Unloading module {}".format(module.__name__))
m = module.instance()
yield from m.unload()
if PortManager.instance().tcp_ports:
log.warning("TCP ports are still used {}".format(PortManager.instance().tcp_ports))
if PortManager.instance().udp_ports:
log.warning("UDP ports are still used {}".format(PortManager.instance().udp_ports))
for task in asyncio.Task.all_tasks():
task.cancel()
try:
yield from asyncio.wait_for(task, 1)
except BaseException:
pass
self._loop.stop()
|
[
"def",
"shutdown_server",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_closing",
":",
"self",
".",
"_closing",
"=",
"True",
"else",
":",
"log",
".",
"warning",
"(",
"\"Close is already in progress\"",
")",
"return",
"if",
"self",
".",
"_server",
":",
"self",
".",
"_server",
".",
"close",
"(",
")",
"yield",
"from",
"self",
".",
"_server",
".",
"wait_closed",
"(",
")",
"if",
"self",
".",
"_app",
":",
"yield",
"from",
"self",
".",
"_app",
".",
"shutdown",
"(",
")",
"if",
"self",
".",
"_handler",
":",
"try",
":",
"# aiohttp < 2.3",
"yield",
"from",
"self",
".",
"_handler",
".",
"finish_connections",
"(",
"2",
")",
"# Parameter is timeout",
"except",
"AttributeError",
":",
"# aiohttp >= 2.3",
"yield",
"from",
"self",
".",
"_handler",
".",
"shutdown",
"(",
"2",
")",
"# Parameter is timeout",
"if",
"self",
".",
"_app",
":",
"yield",
"from",
"self",
".",
"_app",
".",
"cleanup",
"(",
")",
"yield",
"from",
"Controller",
".",
"instance",
"(",
")",
".",
"stop",
"(",
")",
"for",
"module",
"in",
"MODULES",
":",
"log",
".",
"debug",
"(",
"\"Unloading module {}\"",
".",
"format",
"(",
"module",
".",
"__name__",
")",
")",
"m",
"=",
"module",
".",
"instance",
"(",
")",
"yield",
"from",
"m",
".",
"unload",
"(",
")",
"if",
"PortManager",
".",
"instance",
"(",
")",
".",
"tcp_ports",
":",
"log",
".",
"warning",
"(",
"\"TCP ports are still used {}\"",
".",
"format",
"(",
"PortManager",
".",
"instance",
"(",
")",
".",
"tcp_ports",
")",
")",
"if",
"PortManager",
".",
"instance",
"(",
")",
".",
"udp_ports",
":",
"log",
".",
"warning",
"(",
"\"UDP ports are still used {}\"",
".",
"format",
"(",
"PortManager",
".",
"instance",
"(",
")",
".",
"udp_ports",
")",
")",
"for",
"task",
"in",
"asyncio",
".",
"Task",
".",
"all_tasks",
"(",
")",
":",
"task",
".",
"cancel",
"(",
")",
"try",
":",
"yield",
"from",
"asyncio",
".",
"wait_for",
"(",
"task",
",",
"1",
")",
"except",
"BaseException",
":",
"pass",
"self",
".",
"_loop",
".",
"stop",
"(",
")"
] | 31.234043 | 19.489362 |
def ops(self, start=None, stop=None, **kwargs):
""" Yields all operations (excluding virtual operations) starting from
``start``.
:param int start: Starting block
:param int stop: Stop at this block
:param str mode: We here have the choice between
"head" (the last block) and "irreversible" (the block that is
confirmed by 2/3 of all block producers and is thus irreversible)
:param bool only_virtual_ops: Only yield virtual operations
This call returns a list that only carries one operation and
its type!
"""
for block in self.blocks(start=start, stop=stop, **kwargs):
for tx in block["transactions"]:
for op in tx["operations"]:
# Replace opid by op name
op[0] = self.operationids.getOperationName(op[0])
yield {
"block_num": block["block_num"],
"op": op,
"timestamp": block["timestamp"],
}
|
[
"def",
"ops",
"(",
"self",
",",
"start",
"=",
"None",
",",
"stop",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"block",
"in",
"self",
".",
"blocks",
"(",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"tx",
"in",
"block",
"[",
"\"transactions\"",
"]",
":",
"for",
"op",
"in",
"tx",
"[",
"\"operations\"",
"]",
":",
"# Replace opid by op name",
"op",
"[",
"0",
"]",
"=",
"self",
".",
"operationids",
".",
"getOperationName",
"(",
"op",
"[",
"0",
"]",
")",
"yield",
"{",
"\"block_num\"",
":",
"block",
"[",
"\"block_num\"",
"]",
",",
"\"op\"",
":",
"op",
",",
"\"timestamp\"",
":",
"block",
"[",
"\"timestamp\"",
"]",
",",
"}"
] | 43.44 | 18.76 |
def get_rendered_fields(self, ctx=None):
'''
:param ctx: rendering context in which the method was called
:return: ordered list of the fields that will be rendered
'''
res = []
if ctx is None:
ctx = RenderContext()
if self._evaluate_condition(ctx):
ctx.push(self)
res = super(Conditional, self).get_rendered_fields(ctx)
ctx.pop()
return res
|
[
"def",
"get_rendered_fields",
"(",
"self",
",",
"ctx",
"=",
"None",
")",
":",
"res",
"=",
"[",
"]",
"if",
"ctx",
"is",
"None",
":",
"ctx",
"=",
"RenderContext",
"(",
")",
"if",
"self",
".",
"_evaluate_condition",
"(",
"ctx",
")",
":",
"ctx",
".",
"push",
"(",
"self",
")",
"res",
"=",
"super",
"(",
"Conditional",
",",
"self",
")",
".",
"get_rendered_fields",
"(",
"ctx",
")",
"ctx",
".",
"pop",
"(",
")",
"return",
"res"
] | 33.846154 | 18.615385 |
def insert(self, x1, x2, name = '', referedObject = []) :
"""Insert the segment in it's right place and returns it.
If there's already a segment S as S.x1 == x1 and S.x2 == x2. S.name will be changed to 'S.name U name' and the
referedObject will be appended to the already existing list"""
if x1 > x2 :
xx1, xx2 = x2, x1
else :
xx1, xx2 = x1, x2
rt = None
insertId = None
childrenToRemove = []
for i in range(len(self.children)) :
if self.children[i].x1 == xx1 and xx2 == self.children[i].x2 :
self.children[i].name = self.children[i].name + ' U ' + name
self.children[i].referedObject.append(referedObject)
return self.children[i]
if self.children[i].x1 <= xx1 and xx2 <= self.children[i].x2 :
return self.children[i].insert(x1, x2, name, referedObject)
elif xx1 <= self.children[i].x1 and self.children[i].x2 <= xx2 :
if rt == None :
if type(referedObject) is types.ListType :
rt = SegmentTree(xx1, xx2, name, referedObject, self, self.level+1)
else :
rt = SegmentTree(xx1, xx2, name, [referedObject], self, self.level+1)
insertId = i
rt.__addChild(self.children[i])
self.children[i].father = rt
childrenToRemove.append(self.children[i])
elif xx1 <= self.children[i].x1 and xx2 <= self.children[i].x2 :
insertId = i
break
if rt != None :
self.__addChild(rt, insertId)
for c in childrenToRemove :
self.children.remove(c)
else :
if type(referedObject) is types.ListType :
rt = SegmentTree(xx1, xx2, name, referedObject, self, self.level+1)
else :
rt = SegmentTree(xx1, xx2, name, [referedObject], self, self.level+1)
if insertId != None :
self.__addChild(rt, insertId)
else :
self.__addChild(rt)
return rt
|
[
"def",
"insert",
"(",
"self",
",",
"x1",
",",
"x2",
",",
"name",
"=",
"''",
",",
"referedObject",
"=",
"[",
"]",
")",
":",
"if",
"x1",
">",
"x2",
":",
"xx1",
",",
"xx2",
"=",
"x2",
",",
"x1",
"else",
":",
"xx1",
",",
"xx2",
"=",
"x1",
",",
"x2",
"rt",
"=",
"None",
"insertId",
"=",
"None",
"childrenToRemove",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"children",
")",
")",
":",
"if",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x1",
"==",
"xx1",
"and",
"xx2",
"==",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x2",
":",
"self",
".",
"children",
"[",
"i",
"]",
".",
"name",
"=",
"self",
".",
"children",
"[",
"i",
"]",
".",
"name",
"+",
"' U '",
"+",
"name",
"self",
".",
"children",
"[",
"i",
"]",
".",
"referedObject",
".",
"append",
"(",
"referedObject",
")",
"return",
"self",
".",
"children",
"[",
"i",
"]",
"if",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x1",
"<=",
"xx1",
"and",
"xx2",
"<=",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x2",
":",
"return",
"self",
".",
"children",
"[",
"i",
"]",
".",
"insert",
"(",
"x1",
",",
"x2",
",",
"name",
",",
"referedObject",
")",
"elif",
"xx1",
"<=",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x1",
"and",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x2",
"<=",
"xx2",
":",
"if",
"rt",
"==",
"None",
":",
"if",
"type",
"(",
"referedObject",
")",
"is",
"types",
".",
"ListType",
":",
"rt",
"=",
"SegmentTree",
"(",
"xx1",
",",
"xx2",
",",
"name",
",",
"referedObject",
",",
"self",
",",
"self",
".",
"level",
"+",
"1",
")",
"else",
":",
"rt",
"=",
"SegmentTree",
"(",
"xx1",
",",
"xx2",
",",
"name",
",",
"[",
"referedObject",
"]",
",",
"self",
",",
"self",
".",
"level",
"+",
"1",
")",
"insertId",
"=",
"i",
"rt",
".",
"__addChild",
"(",
"self",
".",
"children",
"[",
"i",
"]",
")",
"self",
".",
"children",
"[",
"i",
"]",
".",
"father",
"=",
"rt",
"childrenToRemove",
".",
"append",
"(",
"self",
".",
"children",
"[",
"i",
"]",
")",
"elif",
"xx1",
"<=",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x1",
"and",
"xx2",
"<=",
"self",
".",
"children",
"[",
"i",
"]",
".",
"x2",
":",
"insertId",
"=",
"i",
"break",
"if",
"rt",
"!=",
"None",
":",
"self",
".",
"__addChild",
"(",
"rt",
",",
"insertId",
")",
"for",
"c",
"in",
"childrenToRemove",
":",
"self",
".",
"children",
".",
"remove",
"(",
"c",
")",
"else",
":",
"if",
"type",
"(",
"referedObject",
")",
"is",
"types",
".",
"ListType",
":",
"rt",
"=",
"SegmentTree",
"(",
"xx1",
",",
"xx2",
",",
"name",
",",
"referedObject",
",",
"self",
",",
"self",
".",
"level",
"+",
"1",
")",
"else",
":",
"rt",
"=",
"SegmentTree",
"(",
"xx1",
",",
"xx2",
",",
"name",
",",
"[",
"referedObject",
"]",
",",
"self",
",",
"self",
".",
"level",
"+",
"1",
")",
"if",
"insertId",
"!=",
"None",
":",
"self",
".",
"__addChild",
"(",
"rt",
",",
"insertId",
")",
"else",
":",
"self",
".",
"__addChild",
"(",
"rt",
")",
"return",
"rt"
] | 31.6 | 23.927273 |
def find_by_campaign(campaign_id, _connection=None, page_size=100,
page_number=0, sort_by=enums.DEFAULT_SORT_BY,
sort_order=enums.DEFAULT_SORT_ORDER):
"""
List all videos for a given campaign.
"""
return connection.ItemResultSet(
'find_videos_by_campaign_id', Video, _connection, page_size,
page_number, sort_by, sort_order, campaign_id=campaign_id)
|
[
"def",
"find_by_campaign",
"(",
"campaign_id",
",",
"_connection",
"=",
"None",
",",
"page_size",
"=",
"100",
",",
"page_number",
"=",
"0",
",",
"sort_by",
"=",
"enums",
".",
"DEFAULT_SORT_BY",
",",
"sort_order",
"=",
"enums",
".",
"DEFAULT_SORT_ORDER",
")",
":",
"return",
"connection",
".",
"ItemResultSet",
"(",
"'find_videos_by_campaign_id'",
",",
"Video",
",",
"_connection",
",",
"page_size",
",",
"page_number",
",",
"sort_by",
",",
"sort_order",
",",
"campaign_id",
"=",
"campaign_id",
")"
] | 45.888889 | 12.333333 |
def constant(self, name, value):
"""Declare and set a project global constant.
Project global constants are normal variables but should
not be changed. They are applied to every child Jamfile."""
assert is_iterable_typed(name, basestring)
assert is_iterable_typed(value, basestring)
self.registry.current().add_constant(name[0], value)
|
[
"def",
"constant",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"assert",
"is_iterable_typed",
"(",
"name",
",",
"basestring",
")",
"assert",
"is_iterable_typed",
"(",
"value",
",",
"basestring",
")",
"self",
".",
"registry",
".",
"current",
"(",
")",
".",
"add_constant",
"(",
"name",
"[",
"0",
"]",
",",
"value",
")"
] | 53.857143 | 10.428571 |
def subscribe_strategy(
self,
strategy_id: str,
last: int,
today=datetime.date.today(),
cost_coins=10
):
"""订阅一个策略
会扣减你的积分
Arguments:
strategy_id {str} -- [description]
last {int} -- [description]
Keyword Arguments:
today {[type]} -- [description] (default: {datetime.date.today()})
cost_coins {int} -- [description] (default: {10})
"""
if self.coins > cost_coins:
order_id = str(uuid.uuid1())
self._subscribed_strategy[strategy_id] = {
'lasttime':
last,
'start':
str(today),
'strategy_id':
strategy_id,
'end':
QA_util_get_next_day(
QA_util_get_real_date(str(today),
towards=1),
last
),
'status':
'running',
'uuid':
order_id
}
self.coins -= cost_coins
self.coins_history.append(
[
cost_coins,
strategy_id,
str(today),
last,
order_id,
'subscribe'
]
)
return True, order_id
else:
# return QAERROR.
return False, 'Not Enough Coins'
|
[
"def",
"subscribe_strategy",
"(",
"self",
",",
"strategy_id",
":",
"str",
",",
"last",
":",
"int",
",",
"today",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
",",
"cost_coins",
"=",
"10",
")",
":",
"if",
"self",
".",
"coins",
">",
"cost_coins",
":",
"order_id",
"=",
"str",
"(",
"uuid",
".",
"uuid1",
"(",
")",
")",
"self",
".",
"_subscribed_strategy",
"[",
"strategy_id",
"]",
"=",
"{",
"'lasttime'",
":",
"last",
",",
"'start'",
":",
"str",
"(",
"today",
")",
",",
"'strategy_id'",
":",
"strategy_id",
",",
"'end'",
":",
"QA_util_get_next_day",
"(",
"QA_util_get_real_date",
"(",
"str",
"(",
"today",
")",
",",
"towards",
"=",
"1",
")",
",",
"last",
")",
",",
"'status'",
":",
"'running'",
",",
"'uuid'",
":",
"order_id",
"}",
"self",
".",
"coins",
"-=",
"cost_coins",
"self",
".",
"coins_history",
".",
"append",
"(",
"[",
"cost_coins",
",",
"strategy_id",
",",
"str",
"(",
"today",
")",
",",
"last",
",",
"order_id",
",",
"'subscribe'",
"]",
")",
"return",
"True",
",",
"order_id",
"else",
":",
"# return QAERROR.",
"return",
"False",
",",
"'Not Enough Coins'"
] | 27.2 | 15.818182 |
def imagetransformer_base_10l_8h_big_uncond_dr03_dan_64():
"""big 1d model for unconditional generation on imagenet."""
hparams = imagetransformer_base_10l_8h_big_cond_dr03_dan()
hparams.unconditional = True
hparams.max_length = 14000
hparams.batch_size = 1
hparams.img_len = 64
hparams.layer_prepostprocess_dropout = 0.1
return hparams
|
[
"def",
"imagetransformer_base_10l_8h_big_uncond_dr03_dan_64",
"(",
")",
":",
"hparams",
"=",
"imagetransformer_base_10l_8h_big_cond_dr03_dan",
"(",
")",
"hparams",
".",
"unconditional",
"=",
"True",
"hparams",
".",
"max_length",
"=",
"14000",
"hparams",
".",
"batch_size",
"=",
"1",
"hparams",
".",
"img_len",
"=",
"64",
"hparams",
".",
"layer_prepostprocess_dropout",
"=",
"0.1",
"return",
"hparams"
] | 38.222222 | 13.555556 |
def check_sas_base_dir(root=None):
''' Check for the SAS_BASE_DIR environment variable
Will set the SAS_BASE_DIR in your local environment
or prompt you to define one if is undefined
Parameters:
root (str):
Optional override of the SAS_BASE_DIR envvar
'''
sasbasedir = root or os.getenv("SAS_BASE_DIR")
if not sasbasedir:
sasbasedir = input('Enter a path for SAS_BASE_DIR: ')
os.environ['SAS_BASE_DIR'] = sasbasedir
|
[
"def",
"check_sas_base_dir",
"(",
"root",
"=",
"None",
")",
":",
"sasbasedir",
"=",
"root",
"or",
"os",
".",
"getenv",
"(",
"\"SAS_BASE_DIR\"",
")",
"if",
"not",
"sasbasedir",
":",
"sasbasedir",
"=",
"input",
"(",
"'Enter a path for SAS_BASE_DIR: '",
")",
"os",
".",
"environ",
"[",
"'SAS_BASE_DIR'",
"]",
"=",
"sasbasedir"
] | 30.933333 | 20.666667 |
def GET_savedmodifiedconditionitemvalues(self) -> None:
"""ToDo: extend functionality and add tests"""
dict_ = state.modifiedconditionitemvalues.get(self._id)
if dict_ is None:
self.GET_conditionitemvalues()
else:
for name, value in dict_.items():
self._outputs[name] = value
|
[
"def",
"GET_savedmodifiedconditionitemvalues",
"(",
"self",
")",
"->",
"None",
":",
"dict_",
"=",
"state",
".",
"modifiedconditionitemvalues",
".",
"get",
"(",
"self",
".",
"_id",
")",
"if",
"dict_",
"is",
"None",
":",
"self",
".",
"GET_conditionitemvalues",
"(",
")",
"else",
":",
"for",
"name",
",",
"value",
"in",
"dict_",
".",
"items",
"(",
")",
":",
"self",
".",
"_outputs",
"[",
"name",
"]",
"=",
"value"
] | 42.5 | 11.25 |
def contact_page(view, **kwargs):
"""
:param view: The view to copy to
:param kwargs:
- fa_icon
- menu: The name of the menu
- show_menu: bool - show/hide menu
- menu_order: int - position of the menu
- return_to
- email_to
:return:
"""
endpoint_namespace = view.__name__ + ":%s"
endpoint = endpoint_namespace % "ContactPage"
template_dir = kwargs.pop("template_dir", "Juice/Plugin/ContactPage")
template_page = template_dir + "/%s.html"
return_to = kwargs.pop("return_to", endpoint)
_menu = kwargs.get("menu", {})
_menu.setdefault("name", "Contact")
_menu.setdefault("extends", view)
_menu.setdefault("visible", True)
_menu.setdefault("order", 100)
_menu.setdefault("")
class ContactPage(object):
@menu(endpoint=endpoint, **_menu)
@template(template_page % "contact_page",
endpoint_namespace=endpoint_namespace)
@route("contact", methods=["GET", "POST"], endpoint=endpoint)
def contact_page(self):
# Email to
email_to = kwargs.pop("email_to", self.get_config("APPLICATION_CONTACT_EMAIL", None))
if not mail.validated:
abort("MailmanConfigurationError")
elif not email_to:
abort("ContactPageMissingEmailToError")
if request.method == "POST":
email = request.form.get("email")
subject = request.form.get("subject")
message = request.form.get("message")
name = request.form.get("name")
flash_message = "Message sent. Thank you!"
flash_type = "success"
if recaptcha.verify():
if not email or not subject or not message:
flash_message = "All fields are required"
flash_type = "error"
elif not utils.is_valid_email(email):
flash_message = "Invalid email address"
flash_type = "error"
else:
try:
mail.send(to=email_to,
reply_to=email,
mail_from=email,
mail_subject=subject,
mail_message=message,
mail_name=name,
template="contact-us.txt")
except Exception as ex:
logging.exception(ex)
abort("MailmanConfigurationError")
else:
flash_message = "Security code is invalid"
flash_type = "error"
flash(flash_message, flash_type)
return redirect(url_for(return_to))
self.meta_tags(title="Contact Us")
return None
return ContactPage
|
[
"def",
"contact_page",
"(",
"view",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint_namespace",
"=",
"view",
".",
"__name__",
"+",
"\":%s\"",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"ContactPage\"",
"template_dir",
"=",
"kwargs",
".",
"pop",
"(",
"\"template_dir\"",
",",
"\"Juice/Plugin/ContactPage\"",
")",
"template_page",
"=",
"template_dir",
"+",
"\"/%s.html\"",
"return_to",
"=",
"kwargs",
".",
"pop",
"(",
"\"return_to\"",
",",
"endpoint",
")",
"_menu",
"=",
"kwargs",
".",
"get",
"(",
"\"menu\"",
",",
"{",
"}",
")",
"_menu",
".",
"setdefault",
"(",
"\"name\"",
",",
"\"Contact\"",
")",
"_menu",
".",
"setdefault",
"(",
"\"extends\"",
",",
"view",
")",
"_menu",
".",
"setdefault",
"(",
"\"visible\"",
",",
"True",
")",
"_menu",
".",
"setdefault",
"(",
"\"order\"",
",",
"100",
")",
"_menu",
".",
"setdefault",
"(",
"\"\"",
")",
"class",
"ContactPage",
"(",
"object",
")",
":",
"@",
"menu",
"(",
"endpoint",
"=",
"endpoint",
",",
"*",
"*",
"_menu",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"contact_page\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"route",
"(",
"\"contact\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint",
")",
"def",
"contact_page",
"(",
"self",
")",
":",
"# Email to",
"email_to",
"=",
"kwargs",
".",
"pop",
"(",
"\"email_to\"",
",",
"self",
".",
"get_config",
"(",
"\"APPLICATION_CONTACT_EMAIL\"",
",",
"None",
")",
")",
"if",
"not",
"mail",
".",
"validated",
":",
"abort",
"(",
"\"MailmanConfigurationError\"",
")",
"elif",
"not",
"email_to",
":",
"abort",
"(",
"\"ContactPageMissingEmailToError\"",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
"subject",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"subject\"",
")",
"message",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"message\"",
")",
"name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"name\"",
")",
"flash_message",
"=",
"\"Message sent. Thank you!\"",
"flash_type",
"=",
"\"success\"",
"if",
"recaptcha",
".",
"verify",
"(",
")",
":",
"if",
"not",
"email",
"or",
"not",
"subject",
"or",
"not",
"message",
":",
"flash_message",
"=",
"\"All fields are required\"",
"flash_type",
"=",
"\"error\"",
"elif",
"not",
"utils",
".",
"is_valid_email",
"(",
"email",
")",
":",
"flash_message",
"=",
"\"Invalid email address\"",
"flash_type",
"=",
"\"error\"",
"else",
":",
"try",
":",
"mail",
".",
"send",
"(",
"to",
"=",
"email_to",
",",
"reply_to",
"=",
"email",
",",
"mail_from",
"=",
"email",
",",
"mail_subject",
"=",
"subject",
",",
"mail_message",
"=",
"message",
",",
"mail_name",
"=",
"name",
",",
"template",
"=",
"\"contact-us.txt\"",
")",
"except",
"Exception",
"as",
"ex",
":",
"logging",
".",
"exception",
"(",
"ex",
")",
"abort",
"(",
"\"MailmanConfigurationError\"",
")",
"else",
":",
"flash_message",
"=",
"\"Security code is invalid\"",
"flash_type",
"=",
"\"error\"",
"flash",
"(",
"flash_message",
",",
"flash_type",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"return_to",
")",
")",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Contact Us\"",
")",
"return",
"None",
"return",
"ContactPage"
] | 34.941176 | 17.929412 |
def connect(self):
"""Create connection to CasparCG Server"""
try:
self.connection = telnetlib.Telnet(self.host, self.port, timeout=self.timeout)
except Exception:
log_traceback()
return False
return True
|
[
"def",
"connect",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"connection",
"=",
"telnetlib",
".",
"Telnet",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"except",
"Exception",
":",
"log_traceback",
"(",
")",
"return",
"False",
"return",
"True"
] | 33.125 | 20.625 |
def import_variables(self, container, varnames=None):
"""Helper method to avoid call get_variable for every variable."""
if varnames is None:
for keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword])
else:
for keyword in varnames:
if keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword])
|
[
"def",
"import_variables",
"(",
"self",
",",
"container",
",",
"varnames",
"=",
"None",
")",
":",
"if",
"varnames",
"is",
"None",
":",
"for",
"keyword",
"in",
"self",
".",
"tkvariables",
":",
"setattr",
"(",
"container",
",",
"keyword",
",",
"self",
".",
"tkvariables",
"[",
"keyword",
"]",
")",
"else",
":",
"for",
"keyword",
"in",
"varnames",
":",
"if",
"keyword",
"in",
"self",
".",
"tkvariables",
":",
"setattr",
"(",
"container",
",",
"keyword",
",",
"self",
".",
"tkvariables",
"[",
"keyword",
"]",
")"
] | 48.777778 | 14.555556 |
def set_zone_order(self, zone_ids):
""" reorder zones per the passed in list
:param zone_ids:
:return:
"""
reordered_zones = []
current_zone_ids = [z['id'] for z in self.my_osid_object_form._my_map['zones']]
if set(zone_ids) != set(current_zone_ids):
raise IllegalState('zone_ids do not match existing zones')
for zone_id in zone_ids:
for current_zone in self.my_osid_object_form._my_map['zones']:
if zone_id == current_zone['id']:
reordered_zones.append(current_zone)
break
self.my_osid_object_form._my_map['zones'] = reordered_zones
|
[
"def",
"set_zone_order",
"(",
"self",
",",
"zone_ids",
")",
":",
"reordered_zones",
"=",
"[",
"]",
"current_zone_ids",
"=",
"[",
"z",
"[",
"'id'",
"]",
"for",
"z",
"in",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'zones'",
"]",
"]",
"if",
"set",
"(",
"zone_ids",
")",
"!=",
"set",
"(",
"current_zone_ids",
")",
":",
"raise",
"IllegalState",
"(",
"'zone_ids do not match existing zones'",
")",
"for",
"zone_id",
"in",
"zone_ids",
":",
"for",
"current_zone",
"in",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'zones'",
"]",
":",
"if",
"zone_id",
"==",
"current_zone",
"[",
"'id'",
"]",
":",
"reordered_zones",
".",
"append",
"(",
"current_zone",
")",
"break",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'zones'",
"]",
"=",
"reordered_zones"
] | 39.529412 | 19.588235 |
def scroll_deck(self, decknum, scroll_x, scroll_y):
"""Move a deck."""
self.scroll_deck_x(decknum, scroll_x)
self.scroll_deck_y(decknum, scroll_y)
|
[
"def",
"scroll_deck",
"(",
"self",
",",
"decknum",
",",
"scroll_x",
",",
"scroll_y",
")",
":",
"self",
".",
"scroll_deck_x",
"(",
"decknum",
",",
"scroll_x",
")",
"self",
".",
"scroll_deck_y",
"(",
"decknum",
",",
"scroll_y",
")"
] | 41.75 | 5.25 |
def _create_sagemaker_model(self, *args): # pylint: disable=unused-argument
"""Create a SageMaker Model Entity
Args:
*args: Arguments coming from the caller. This class
does not require any so they are ignored.
"""
if self.algorithm_arn:
# When ModelPackage is created using an algorithm_arn we need to first
# create a ModelPackage. If we had already created one then its fine to re-use it.
if self._created_model_package_name is None:
model_package_name = self._create_sagemaker_model_package()
self.sagemaker_session.wait_for_model_package(model_package_name)
self._created_model_package_name = model_package_name
model_package_name = self._created_model_package_name
else:
# When a ModelPackageArn is provided we just create the Model
model_package_name = self.model_package_arn
container_def = {
'ModelPackageName': model_package_name,
}
if self.env != {}:
container_def['Environment'] = self.env
model_package_short_name = model_package_name.split('/')[-1]
enable_network_isolation = self.enable_network_isolation()
self.name = self.name or utils.name_from_base(model_package_short_name)
self.sagemaker_session.create_model(self.name, self.role, container_def,
vpc_config=self.vpc_config,
enable_network_isolation=enable_network_isolation)
|
[
"def",
"_create_sagemaker_model",
"(",
"self",
",",
"*",
"args",
")",
":",
"# pylint: disable=unused-argument",
"if",
"self",
".",
"algorithm_arn",
":",
"# When ModelPackage is created using an algorithm_arn we need to first",
"# create a ModelPackage. If we had already created one then its fine to re-use it.",
"if",
"self",
".",
"_created_model_package_name",
"is",
"None",
":",
"model_package_name",
"=",
"self",
".",
"_create_sagemaker_model_package",
"(",
")",
"self",
".",
"sagemaker_session",
".",
"wait_for_model_package",
"(",
"model_package_name",
")",
"self",
".",
"_created_model_package_name",
"=",
"model_package_name",
"model_package_name",
"=",
"self",
".",
"_created_model_package_name",
"else",
":",
"# When a ModelPackageArn is provided we just create the Model",
"model_package_name",
"=",
"self",
".",
"model_package_arn",
"container_def",
"=",
"{",
"'ModelPackageName'",
":",
"model_package_name",
",",
"}",
"if",
"self",
".",
"env",
"!=",
"{",
"}",
":",
"container_def",
"[",
"'Environment'",
"]",
"=",
"self",
".",
"env",
"model_package_short_name",
"=",
"model_package_name",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"enable_network_isolation",
"=",
"self",
".",
"enable_network_isolation",
"(",
")",
"self",
".",
"name",
"=",
"self",
".",
"name",
"or",
"utils",
".",
"name_from_base",
"(",
"model_package_short_name",
")",
"self",
".",
"sagemaker_session",
".",
"create_model",
"(",
"self",
".",
"name",
",",
"self",
".",
"role",
",",
"container_def",
",",
"vpc_config",
"=",
"self",
".",
"vpc_config",
",",
"enable_network_isolation",
"=",
"enable_network_isolation",
")"
] | 49.21875 | 27.8125 |
def switch_to_aux_top_layer(self):
"""Context that construct cnn in the auxiliary arm."""
if self.aux_top_layer is None:
raise RuntimeError("Empty auxiliary top layer in the network.")
saved_top_layer = self.top_layer
saved_top_size = self.top_size
self.top_layer = self.aux_top_layer
self.top_size = self.aux_top_size
yield
self.aux_top_layer = self.top_layer
self.aux_top_size = self.top_size
self.top_layer = saved_top_layer
self.top_size = saved_top_size
|
[
"def",
"switch_to_aux_top_layer",
"(",
"self",
")",
":",
"if",
"self",
".",
"aux_top_layer",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Empty auxiliary top layer in the network.\"",
")",
"saved_top_layer",
"=",
"self",
".",
"top_layer",
"saved_top_size",
"=",
"self",
".",
"top_size",
"self",
".",
"top_layer",
"=",
"self",
".",
"aux_top_layer",
"self",
".",
"top_size",
"=",
"self",
".",
"aux_top_size",
"yield",
"self",
".",
"aux_top_layer",
"=",
"self",
".",
"top_layer",
"self",
".",
"aux_top_size",
"=",
"self",
".",
"top_size",
"self",
".",
"top_layer",
"=",
"saved_top_layer",
"self",
".",
"top_size",
"=",
"saved_top_size"
] | 42 | 6.307692 |
def _calculate_hour_and_minute(float_hour):
"""Calculate hour and minutes as integers from a float hour."""
hour, minute = int(float_hour), int(round((float_hour - int(float_hour)) * 60))
if minute == 60:
return hour + 1, 0
else:
return hour, minute
|
[
"def",
"_calculate_hour_and_minute",
"(",
"float_hour",
")",
":",
"hour",
",",
"minute",
"=",
"int",
"(",
"float_hour",
")",
",",
"int",
"(",
"round",
"(",
"(",
"float_hour",
"-",
"int",
"(",
"float_hour",
")",
")",
"*",
"60",
")",
")",
"if",
"minute",
"==",
"60",
":",
"return",
"hour",
"+",
"1",
",",
"0",
"else",
":",
"return",
"hour",
",",
"minute"
] | 42.714286 | 16 |
def get_plugin_data(self, plugin):
"""Get the data object of the plugin's current tab manager."""
# The data object is named "data" in the editor plugin while it is
# named "clients" in the notebook plugin.
try:
data = plugin.get_current_tab_manager().data
except AttributeError:
data = plugin.get_current_tab_manager().clients
return data
|
[
"def",
"get_plugin_data",
"(",
"self",
",",
"plugin",
")",
":",
"# The data object is named \"data\" in the editor plugin while it is",
"# named \"clients\" in the notebook plugin.",
"try",
":",
"data",
"=",
"plugin",
".",
"get_current_tab_manager",
"(",
")",
".",
"data",
"except",
"AttributeError",
":",
"data",
"=",
"plugin",
".",
"get_current_tab_manager",
"(",
")",
".",
"clients",
"return",
"data"
] | 40.3 | 18.3 |
def load_sample(sample):
""" Load meter data, temperature data, and metadata for associated with a
particular sample identifier. Note: samples are simulated, not real, data.
Parameters
----------
sample : :any:`str`
Identifier of sample. Complete list can be obtained with
:any:`eemeter.samples`.
Returns
-------
meter_data, temperature_data, metadata : :any:`tuple` of :any:`pandas.DataFrame`, :any:`pandas.Series`, and :any:`dict`
Meter data, temperature data, and metadata for this sample identifier.
"""
sample_metadata = _load_sample_metadata()
metadata = sample_metadata.get(sample)
if metadata is None:
raise ValueError(
"Sample not found: {}. Try one of these?\n{}".format(
sample,
"\n".join(
[" - {}".format(key) for key in sorted(sample_metadata.keys())]
),
)
)
freq = metadata.get("freq")
if freq not in ("hourly", "daily"):
freq = None
meter_data_filename = metadata["meter_data_filename"]
with resource_stream("eemeter.samples", meter_data_filename) as f:
meter_data = meter_data_from_csv(f, gzipped=True, freq=freq)
temperature_filename = metadata["temperature_filename"]
with resource_stream("eemeter.samples", temperature_filename) as f:
temperature_data = temperature_data_from_csv(f, gzipped=True, freq="hourly")
metadata["blackout_start_date"] = pytz.UTC.localize(
parse_date(metadata["blackout_start_date"])
)
metadata["blackout_end_date"] = pytz.UTC.localize(
parse_date(metadata["blackout_end_date"])
)
return meter_data, temperature_data, metadata
|
[
"def",
"load_sample",
"(",
"sample",
")",
":",
"sample_metadata",
"=",
"_load_sample_metadata",
"(",
")",
"metadata",
"=",
"sample_metadata",
".",
"get",
"(",
"sample",
")",
"if",
"metadata",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Sample not found: {}. Try one of these?\\n{}\"",
".",
"format",
"(",
"sample",
",",
"\"\\n\"",
".",
"join",
"(",
"[",
"\" - {}\"",
".",
"format",
"(",
"key",
")",
"for",
"key",
"in",
"sorted",
"(",
"sample_metadata",
".",
"keys",
"(",
")",
")",
"]",
")",
",",
")",
")",
"freq",
"=",
"metadata",
".",
"get",
"(",
"\"freq\"",
")",
"if",
"freq",
"not",
"in",
"(",
"\"hourly\"",
",",
"\"daily\"",
")",
":",
"freq",
"=",
"None",
"meter_data_filename",
"=",
"metadata",
"[",
"\"meter_data_filename\"",
"]",
"with",
"resource_stream",
"(",
"\"eemeter.samples\"",
",",
"meter_data_filename",
")",
"as",
"f",
":",
"meter_data",
"=",
"meter_data_from_csv",
"(",
"f",
",",
"gzipped",
"=",
"True",
",",
"freq",
"=",
"freq",
")",
"temperature_filename",
"=",
"metadata",
"[",
"\"temperature_filename\"",
"]",
"with",
"resource_stream",
"(",
"\"eemeter.samples\"",
",",
"temperature_filename",
")",
"as",
"f",
":",
"temperature_data",
"=",
"temperature_data_from_csv",
"(",
"f",
",",
"gzipped",
"=",
"True",
",",
"freq",
"=",
"\"hourly\"",
")",
"metadata",
"[",
"\"blackout_start_date\"",
"]",
"=",
"pytz",
".",
"UTC",
".",
"localize",
"(",
"parse_date",
"(",
"metadata",
"[",
"\"blackout_start_date\"",
"]",
")",
")",
"metadata",
"[",
"\"blackout_end_date\"",
"]",
"=",
"pytz",
".",
"UTC",
".",
"localize",
"(",
"parse_date",
"(",
"metadata",
"[",
"\"blackout_end_date\"",
"]",
")",
")",
"return",
"meter_data",
",",
"temperature_data",
",",
"metadata"
] | 36.06383 | 24.702128 |
def postproc_mask (stats_result):
"""Simple helper to postprocess angular outputs from StatsCollectors in the
way we want.
"""
n, mean, scat = stats_result
ok = np.isfinite (mean)
n = n[ok]
mean = mean[ok]
scat = scat[ok]
mean *= 180 / np.pi # rad => deg
scat /= n # variance-of-samples => variance-of-mean
scat **= 0.5 # variance => stddev
scat *= 180 / np.pi # rad => deg
return ok, mean, scat
|
[
"def",
"postproc_mask",
"(",
"stats_result",
")",
":",
"n",
",",
"mean",
",",
"scat",
"=",
"stats_result",
"ok",
"=",
"np",
".",
"isfinite",
"(",
"mean",
")",
"n",
"=",
"n",
"[",
"ok",
"]",
"mean",
"=",
"mean",
"[",
"ok",
"]",
"scat",
"=",
"scat",
"[",
"ok",
"]",
"mean",
"*=",
"180",
"/",
"np",
".",
"pi",
"# rad => deg",
"scat",
"/=",
"n",
"# variance-of-samples => variance-of-mean",
"scat",
"**=",
"0.5",
"# variance => stddev",
"scat",
"*=",
"180",
"/",
"np",
".",
"pi",
"# rad => deg",
"return",
"ok",
",",
"mean",
",",
"scat"
] | 25.529412 | 16.588235 |
def buscar(self):
"""Faz a busca das informações do objeto no Postmon.
Retorna um ``bool`` indicando se a busca foi bem sucedida.
"""
headers = {'User-Agent': self.user_agent}
try:
self._response = requests.get(self.url, headers=headers)
except requests.RequestException:
logger.exception("%s.buscar() falhou: GET %s" %
(self.__class__.__name__, self.url))
return False
if self._response.ok:
self.atualizar(**self._response.json())
return self._response.ok
|
[
"def",
"buscar",
"(",
"self",
")",
":",
"headers",
"=",
"{",
"'User-Agent'",
":",
"self",
".",
"user_agent",
"}",
"try",
":",
"self",
".",
"_response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"url",
",",
"headers",
"=",
"headers",
")",
"except",
"requests",
".",
"RequestException",
":",
"logger",
".",
"exception",
"(",
"\"%s.buscar() falhou: GET %s\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"self",
".",
"url",
")",
")",
"return",
"False",
"if",
"self",
".",
"_response",
".",
"ok",
":",
"self",
".",
"atualizar",
"(",
"*",
"*",
"self",
".",
"_response",
".",
"json",
"(",
")",
")",
"return",
"self",
".",
"_response",
".",
"ok"
] | 36.5 | 17.8125 |
def remove_declaration(self, decl):
"""
Removes declaration from members list.
:param decl: declaration to be removed
:type decl: :class:`declaration_t`
"""
del self.declarations[self.declarations.index(decl)]
decl.cache.reset()
|
[
"def",
"remove_declaration",
"(",
"self",
",",
"decl",
")",
":",
"del",
"self",
".",
"declarations",
"[",
"self",
".",
"declarations",
".",
"index",
"(",
"decl",
")",
"]",
"decl",
".",
"cache",
".",
"reset",
"(",
")"
] | 25.181818 | 15.727273 |
def tag_image(self, repository=None, tag=None):
"""
Apply additional tags to the image or even add a new name
:param repository: str, see constructor
:param tag: str, see constructor
:return: instance of DockerImage
"""
if not (repository or tag):
raise ValueError("You need to specify either repository or tag.")
r = repository or self.name
t = "latest" if not tag else tag
self.d.tag(image=self.get_full_name(), repository=r, tag=t)
return DockerImage(r, tag=t)
|
[
"def",
"tag_image",
"(",
"self",
",",
"repository",
"=",
"None",
",",
"tag",
"=",
"None",
")",
":",
"if",
"not",
"(",
"repository",
"or",
"tag",
")",
":",
"raise",
"ValueError",
"(",
"\"You need to specify either repository or tag.\"",
")",
"r",
"=",
"repository",
"or",
"self",
".",
"name",
"t",
"=",
"\"latest\"",
"if",
"not",
"tag",
"else",
"tag",
"self",
".",
"d",
".",
"tag",
"(",
"image",
"=",
"self",
".",
"get_full_name",
"(",
")",
",",
"repository",
"=",
"r",
",",
"tag",
"=",
"t",
")",
"return",
"DockerImage",
"(",
"r",
",",
"tag",
"=",
"t",
")"
] | 39.357143 | 11.214286 |
def calculate_order_parameter(self, start_iteration = None, stop_iteration = None):
"""!
@brief Calculates level of global synchorization (order parameter).
@details This parameter is tend 1.0 when the oscillatory network close to global synchronization and it tend to 0.0 when
desynchronization is observed in the network. Order parameter is calculated using following equation:
\f[
r_{c}=\frac{1}{Ne^{i\varphi }}\sum_{j=0}^{N}e^{i\theta_{j}};
\f]
where \f$\varphi\f$ is a average phase coordinate in the network, \f$N\f$ is an amount of oscillators in the network.
@param[in] start_iteration (uint): The first iteration that is used for calculation, if 'None' then the last iteration is used.
@param[in] stop_iteration (uint): The last iteration that is used for calculation, if 'None' then 'start_iteration' + 1 is used.
Example:
@code
oscillatory_network = sync(16, type_conn = conn_type.ALL_TO_ALL);
output_dynamic = oscillatory_network.simulate_static(100, 10);
print("Order parameter at the last step: ", output_dynamic.calculate_order_parameter());
print("Order parameter at the first step:", output_dynamic.calculate_order_parameter(0));
print("Order parameter evolution between 40 and 50 steps:", output_dynamic.calculate_order_parameter(40, 50));
@endcode
@return (list) List of levels of global synchronization (order parameter evolution).
@see order_estimator
"""
(start_iteration, stop_iteration) = self.__get_start_stop_iterations(start_iteration, stop_iteration);
if (self._ccore_sync_dynamic_pointer is not None):
return wrapper.sync_dynamic_calculate_order(self._ccore_sync_dynamic_pointer, start_iteration, stop_iteration);
sequence_order = [];
for index in range(start_iteration, stop_iteration):
sequence_order.append(order_estimator.calculate_sync_order(self.output[index]));
return sequence_order;
|
[
"def",
"calculate_order_parameter",
"(",
"self",
",",
"start_iteration",
"=",
"None",
",",
"stop_iteration",
"=",
"None",
")",
":",
"(",
"start_iteration",
",",
"stop_iteration",
")",
"=",
"self",
".",
"__get_start_stop_iterations",
"(",
"start_iteration",
",",
"stop_iteration",
")",
"if",
"(",
"self",
".",
"_ccore_sync_dynamic_pointer",
"is",
"not",
"None",
")",
":",
"return",
"wrapper",
".",
"sync_dynamic_calculate_order",
"(",
"self",
".",
"_ccore_sync_dynamic_pointer",
",",
"start_iteration",
",",
"stop_iteration",
")",
"sequence_order",
"=",
"[",
"]",
"for",
"index",
"in",
"range",
"(",
"start_iteration",
",",
"stop_iteration",
")",
":",
"sequence_order",
".",
"append",
"(",
"order_estimator",
".",
"calculate_sync_order",
"(",
"self",
".",
"output",
"[",
"index",
"]",
")",
")",
"return",
"sequence_order"
] | 55 | 40.146341 |
def get_override_votes(self, obj):
"""
Votes entered into backend.
Only used if ``override_ap_votes = True``.
"""
if hasattr(obj, "meta"): # TODO: REVISIT THIS
if obj.meta.override_ap_votes:
all_votes = None
for ce in obj.candidate_elections.all():
if all_votes:
all_votes = all_votes | ce.votes.all()
else:
all_votes = ce.votes.all()
return VotesSerializer(all_votes, many=True).data
return False
|
[
"def",
"get_override_votes",
"(",
"self",
",",
"obj",
")",
":",
"if",
"hasattr",
"(",
"obj",
",",
"\"meta\"",
")",
":",
"# TODO: REVISIT THIS",
"if",
"obj",
".",
"meta",
".",
"override_ap_votes",
":",
"all_votes",
"=",
"None",
"for",
"ce",
"in",
"obj",
".",
"candidate_elections",
".",
"all",
"(",
")",
":",
"if",
"all_votes",
":",
"all_votes",
"=",
"all_votes",
"|",
"ce",
".",
"votes",
".",
"all",
"(",
")",
"else",
":",
"all_votes",
"=",
"ce",
".",
"votes",
".",
"all",
"(",
")",
"return",
"VotesSerializer",
"(",
"all_votes",
",",
"many",
"=",
"True",
")",
".",
"data",
"return",
"False"
] | 38.666667 | 10.666667 |
def send_command(self, command, arg=None):
"""Sends a command to the device.
Args:
command: The command to send.
arg: Optional argument to the command.
"""
if arg is not None:
command = '%s:%s' % (command, arg)
self._write(six.StringIO(command), len(command))
|
[
"def",
"send_command",
"(",
"self",
",",
"command",
",",
"arg",
"=",
"None",
")",
":",
"if",
"arg",
"is",
"not",
"None",
":",
"command",
"=",
"'%s:%s'",
"%",
"(",
"command",
",",
"arg",
")",
"self",
".",
"_write",
"(",
"six",
".",
"StringIO",
"(",
"command",
")",
",",
"len",
"(",
"command",
")",
")"
] | 28.9 | 11.1 |
def get_attributes(self, request_envelope):
# type: (RequestEnvelope) -> Dict[str, object]
"""Get attributes from table in Dynamodb resource.
Retrieves the attributes from Dynamodb table. If the table
doesn't exist, returns an empty dict if the
``create_table`` variable is set as True, else it raises
PersistenceException. Raises PersistenceException if `get_item`
fails on the table.
:param request_envelope: Request Envelope passed during skill
invocation
:type request_envelope: ask_sdk_model.RequestEnvelope
:return: Attributes stored under the partition keygen mapping
in the table
:rtype: Dict[str, object]
:raises: :py:class:`ask_sdk_core.exceptions.PersistenceException`
"""
try:
table = self.dynamodb.Table(self.table_name)
partition_key_val = self.partition_keygen(request_envelope)
response = table.get_item(
Key={self.partition_key_name: partition_key_val},
ConsistentRead=True)
if "Item" in response:
return response["Item"][self.attribute_name]
else:
return {}
except ResourceNotExistsError:
raise PersistenceException(
"DynamoDb table {} doesn't exist or in the process of "
"being created. Failed to get attributes from "
"DynamoDb table.".format(self.table_name))
except Exception as e:
raise PersistenceException(
"Failed to retrieve attributes from DynamoDb table. "
"Exception of type {} occurred: {}".format(
type(e).__name__, str(e)))
|
[
"def",
"get_attributes",
"(",
"self",
",",
"request_envelope",
")",
":",
"# type: (RequestEnvelope) -> Dict[str, object]",
"try",
":",
"table",
"=",
"self",
".",
"dynamodb",
".",
"Table",
"(",
"self",
".",
"table_name",
")",
"partition_key_val",
"=",
"self",
".",
"partition_keygen",
"(",
"request_envelope",
")",
"response",
"=",
"table",
".",
"get_item",
"(",
"Key",
"=",
"{",
"self",
".",
"partition_key_name",
":",
"partition_key_val",
"}",
",",
"ConsistentRead",
"=",
"True",
")",
"if",
"\"Item\"",
"in",
"response",
":",
"return",
"response",
"[",
"\"Item\"",
"]",
"[",
"self",
".",
"attribute_name",
"]",
"else",
":",
"return",
"{",
"}",
"except",
"ResourceNotExistsError",
":",
"raise",
"PersistenceException",
"(",
"\"DynamoDb table {} doesn't exist or in the process of \"",
"\"being created. Failed to get attributes from \"",
"\"DynamoDb table.\"",
".",
"format",
"(",
"self",
".",
"table_name",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"PersistenceException",
"(",
"\"Failed to retrieve attributes from DynamoDb table. \"",
"\"Exception of type {} occurred: {}\"",
".",
"format",
"(",
"type",
"(",
"e",
")",
".",
"__name__",
",",
"str",
"(",
"e",
")",
")",
")"
] | 45.315789 | 17.5 |
async def resolution_at_vote(self, root):
"""The proposal currently being voted on.
Returns
-------
:class:`ApiQuery` of :class:`ResolutionAtVote`
:class:`ApiQuery` of None
If no resolution is currently at vote.
"""
elem = root.find('RESOLUTION')
if elem:
resolution = ResolutionAtVote(elem)
resolution._council_id = self._council_id
return resolution
|
[
"async",
"def",
"resolution_at_vote",
"(",
"self",
",",
"root",
")",
":",
"elem",
"=",
"root",
".",
"find",
"(",
"'RESOLUTION'",
")",
"if",
"elem",
":",
"resolution",
"=",
"ResolutionAtVote",
"(",
"elem",
")",
"resolution",
".",
"_council_id",
"=",
"self",
".",
"_council_id",
"return",
"resolution"
] | 32.214286 | 12.785714 |
def build_case(case_data, adapter):
"""Build a case object that is to be inserted to the database
Args:
case_data (dict): A dictionary with the relevant case information
adapter (scout.adapter.MongoAdapter)
Returns:
case_obj (dict): A case object
dict(
case_id = str, # required=True, unique
display_name = str, # If not display name use case_id
owner = str, # required
# These are the names of all the collaborators that are allowed to view the
# case, including the owner
collaborators = list, # List of institute_ids
assignee = str, # _id of a user
individuals = list, # list of dictionaries with individuals
created_at = datetime,
updated_at = datetime,
suspects = list, # List of variants referred by there _id
causatives = list, # List of variants referred by there _id
synopsis = str, # The synopsis is a text blob
status = str, # default='inactive', choices=STATUS
is_research = bool, # default=False
research_requested = bool, # default=False
rerun_requested = bool, # default=False
analysis_date = datetime,
analyses = list, # list of dict
# default_panels specifies which panels that should be shown when
# the case is opened
panels = list, # list of dictionaries with panel information
dynamic_gene_list = list, # List of genes
genome_build = str, # This should be 37 or 38
genome_version = float, # What version of the build
rank_model_version = str,
rank_score_threshold = int, # default=8
phenotype_terms = list, # List of dictionaries with phenotype information
phenotype_groups = list, # List of dictionaries with phenotype information
madeline_info = str, # madeline info is a full xml file
multiqc = str, # path to dir with multiqc information
vcf_files = dict, # A dictionary with vcf files
diagnosis_phenotypes = list, # List of references to diseases
diagnosis_genes = list, # List of references to genes
has_svvariants = bool, # default=False
is_migrated = bool # default=False
)
"""
log.info("build case with id: {0}".format(case_data['case_id']))
case_obj = {
'_id': case_data['case_id'],
'display_name': case_data.get('display_name', case_data['case_id']),
}
# Check if institute exists in database
try:
institute_id = case_data['owner']
except KeyError as err:
raise ConfigError("Case has to have a institute")
institute_obj = adapter.institute(institute_id)
if not institute_obj:
raise IntegrityError("Institute %s not found in database" % institute_id)
case_obj['owner'] = case_data['owner']
# Owner allways has to be part of collaborators
collaborators = set(case_data.get('collaborators', []))
collaborators.add(case_data['owner'])
case_obj['collaborators'] = list(collaborators)
if case_data.get('assignee'):
case_obj['assignees'] = [case_data['assignee']]
# Individuals
ind_objs = []
try:
for individual in case_data.get('individuals', []):
ind_objs.append(build_individual(individual))
except Exception as error:
## TODO add some action here
raise error
# sort the samples to put the affected individual first
sorted_inds = sorted(ind_objs, key=lambda ind: -ind['phenotype'])
case_obj['individuals'] = sorted_inds
now = datetime.now()
case_obj['created_at'] = now
case_obj['updated_at'] = now
if case_data.get('suspects'):
case_obj['suspects'] = case_data['suspects']
if case_data.get('causatives'):
case_obj['causatives'] = case_data['causatives']
case_obj['synopsis'] = case_data.get('synopsis', '')
case_obj['status'] = 'inactive'
case_obj['is_research'] = False
case_obj['research_requested'] = False
case_obj['rerun_requested'] = False
analysis_date = case_data.get('analysis_date')
if analysis_date:
case_obj['analysis_date'] = analysis_date
# We store some metadata and references about gene panels in 'panels'
case_panels = case_data.get('gene_panels', [])
default_panels = case_data.get('default_panels', [])
panels = []
for panel_name in case_panels:
panel_obj = adapter.gene_panel(panel_name)
if not panel_obj:
raise IntegrityError("Panel %s does not exist in database" % panel_name)
panel = {
'panel_id': panel_obj['_id'],
'panel_name': panel_obj['panel_name'],
'display_name': panel_obj['display_name'],
'version': panel_obj['version'],
'updated_at': panel_obj['date'],
'nr_genes': len(panel_obj['genes'])
}
if panel_name in default_panels:
panel['is_default'] = True
else:
panel['is_default'] = False
panels.append(panel)
case_obj['panels'] = panels
case_obj['dynamic_gene_list'] = {}
# Meta data
genome_build = case_data.get('genome_build', '37')
if not genome_build in ['37', '38']:
pass
##TODO raise exception if invalid genome build was used
case_obj['genome_build'] = genome_build
case_obj['genome_version'] = case_data.get('genome_version')
if case_data.get('rank_model_version'):
case_obj['rank_model_version'] = str(case_data['rank_model_version'])
if case_data.get('sv_rank_model_version'):
case_obj['sv_rank_model_version'] = str(case_data['sv_rank_model_version'])
if case_data.get('rank_score_threshold'):
case_obj['rank_score_threshold'] = float(case_data['rank_score_threshold'])
# phenotype information
phenotypes = []
for phenotype in case_data.get('phenotype_terms', []):
phenotype_obj = build_phenotype(phenotype, adapter)
if phenotype_obj:
phenotypes.append(phenotype_obj)
if phenotypes:
case_obj['phenotype_terms'] = phenotypes
# phenotype groups
phenotype_groups = []
for phenotype in case_data.get('phenotype_groups', []):
phenotype_obj = build_phenotype(phenotype, adapter)
if phenotype_obj:
phenotype_groups.append(phenotype_obj)
if phenotype_groups:
case_obj['phenotype_groups'] = phenotype_groups
# Files
case_obj['madeline_info'] = case_data.get('madeline_info')
if 'multiqc' in case_data:
case_obj['multiqc'] = case_data.get('multiqc')
case_obj['vcf_files'] = case_data.get('vcf_files', {})
case_obj['delivery_report'] = case_data.get('delivery_report')
case_obj['has_svvariants'] = False
if (case_obj['vcf_files'].get('vcf_sv') or case_obj['vcf_files'].get('vcf_sv_research')):
case_obj['has_svvariants'] = True
case_obj['has_strvariants'] = False
if (case_obj['vcf_files'].get('vcf_str')):
case_obj['has_strvariants'] = True
case_obj['is_migrated'] = False
case_obj['track'] = case_data.get('track', 'rare')
return case_obj
|
[
"def",
"build_case",
"(",
"case_data",
",",
"adapter",
")",
":",
"log",
".",
"info",
"(",
"\"build case with id: {0}\"",
".",
"format",
"(",
"case_data",
"[",
"'case_id'",
"]",
")",
")",
"case_obj",
"=",
"{",
"'_id'",
":",
"case_data",
"[",
"'case_id'",
"]",
",",
"'display_name'",
":",
"case_data",
".",
"get",
"(",
"'display_name'",
",",
"case_data",
"[",
"'case_id'",
"]",
")",
",",
"}",
"# Check if institute exists in database",
"try",
":",
"institute_id",
"=",
"case_data",
"[",
"'owner'",
"]",
"except",
"KeyError",
"as",
"err",
":",
"raise",
"ConfigError",
"(",
"\"Case has to have a institute\"",
")",
"institute_obj",
"=",
"adapter",
".",
"institute",
"(",
"institute_id",
")",
"if",
"not",
"institute_obj",
":",
"raise",
"IntegrityError",
"(",
"\"Institute %s not found in database\"",
"%",
"institute_id",
")",
"case_obj",
"[",
"'owner'",
"]",
"=",
"case_data",
"[",
"'owner'",
"]",
"# Owner allways has to be part of collaborators",
"collaborators",
"=",
"set",
"(",
"case_data",
".",
"get",
"(",
"'collaborators'",
",",
"[",
"]",
")",
")",
"collaborators",
".",
"add",
"(",
"case_data",
"[",
"'owner'",
"]",
")",
"case_obj",
"[",
"'collaborators'",
"]",
"=",
"list",
"(",
"collaborators",
")",
"if",
"case_data",
".",
"get",
"(",
"'assignee'",
")",
":",
"case_obj",
"[",
"'assignees'",
"]",
"=",
"[",
"case_data",
"[",
"'assignee'",
"]",
"]",
"# Individuals",
"ind_objs",
"=",
"[",
"]",
"try",
":",
"for",
"individual",
"in",
"case_data",
".",
"get",
"(",
"'individuals'",
",",
"[",
"]",
")",
":",
"ind_objs",
".",
"append",
"(",
"build_individual",
"(",
"individual",
")",
")",
"except",
"Exception",
"as",
"error",
":",
"## TODO add some action here",
"raise",
"error",
"# sort the samples to put the affected individual first",
"sorted_inds",
"=",
"sorted",
"(",
"ind_objs",
",",
"key",
"=",
"lambda",
"ind",
":",
"-",
"ind",
"[",
"'phenotype'",
"]",
")",
"case_obj",
"[",
"'individuals'",
"]",
"=",
"sorted_inds",
"now",
"=",
"datetime",
".",
"now",
"(",
")",
"case_obj",
"[",
"'created_at'",
"]",
"=",
"now",
"case_obj",
"[",
"'updated_at'",
"]",
"=",
"now",
"if",
"case_data",
".",
"get",
"(",
"'suspects'",
")",
":",
"case_obj",
"[",
"'suspects'",
"]",
"=",
"case_data",
"[",
"'suspects'",
"]",
"if",
"case_data",
".",
"get",
"(",
"'causatives'",
")",
":",
"case_obj",
"[",
"'causatives'",
"]",
"=",
"case_data",
"[",
"'causatives'",
"]",
"case_obj",
"[",
"'synopsis'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'synopsis'",
",",
"''",
")",
"case_obj",
"[",
"'status'",
"]",
"=",
"'inactive'",
"case_obj",
"[",
"'is_research'",
"]",
"=",
"False",
"case_obj",
"[",
"'research_requested'",
"]",
"=",
"False",
"case_obj",
"[",
"'rerun_requested'",
"]",
"=",
"False",
"analysis_date",
"=",
"case_data",
".",
"get",
"(",
"'analysis_date'",
")",
"if",
"analysis_date",
":",
"case_obj",
"[",
"'analysis_date'",
"]",
"=",
"analysis_date",
"# We store some metadata and references about gene panels in 'panels'",
"case_panels",
"=",
"case_data",
".",
"get",
"(",
"'gene_panels'",
",",
"[",
"]",
")",
"default_panels",
"=",
"case_data",
".",
"get",
"(",
"'default_panels'",
",",
"[",
"]",
")",
"panels",
"=",
"[",
"]",
"for",
"panel_name",
"in",
"case_panels",
":",
"panel_obj",
"=",
"adapter",
".",
"gene_panel",
"(",
"panel_name",
")",
"if",
"not",
"panel_obj",
":",
"raise",
"IntegrityError",
"(",
"\"Panel %s does not exist in database\"",
"%",
"panel_name",
")",
"panel",
"=",
"{",
"'panel_id'",
":",
"panel_obj",
"[",
"'_id'",
"]",
",",
"'panel_name'",
":",
"panel_obj",
"[",
"'panel_name'",
"]",
",",
"'display_name'",
":",
"panel_obj",
"[",
"'display_name'",
"]",
",",
"'version'",
":",
"panel_obj",
"[",
"'version'",
"]",
",",
"'updated_at'",
":",
"panel_obj",
"[",
"'date'",
"]",
",",
"'nr_genes'",
":",
"len",
"(",
"panel_obj",
"[",
"'genes'",
"]",
")",
"}",
"if",
"panel_name",
"in",
"default_panels",
":",
"panel",
"[",
"'is_default'",
"]",
"=",
"True",
"else",
":",
"panel",
"[",
"'is_default'",
"]",
"=",
"False",
"panels",
".",
"append",
"(",
"panel",
")",
"case_obj",
"[",
"'panels'",
"]",
"=",
"panels",
"case_obj",
"[",
"'dynamic_gene_list'",
"]",
"=",
"{",
"}",
"# Meta data",
"genome_build",
"=",
"case_data",
".",
"get",
"(",
"'genome_build'",
",",
"'37'",
")",
"if",
"not",
"genome_build",
"in",
"[",
"'37'",
",",
"'38'",
"]",
":",
"pass",
"##TODO raise exception if invalid genome build was used",
"case_obj",
"[",
"'genome_build'",
"]",
"=",
"genome_build",
"case_obj",
"[",
"'genome_version'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'genome_version'",
")",
"if",
"case_data",
".",
"get",
"(",
"'rank_model_version'",
")",
":",
"case_obj",
"[",
"'rank_model_version'",
"]",
"=",
"str",
"(",
"case_data",
"[",
"'rank_model_version'",
"]",
")",
"if",
"case_data",
".",
"get",
"(",
"'sv_rank_model_version'",
")",
":",
"case_obj",
"[",
"'sv_rank_model_version'",
"]",
"=",
"str",
"(",
"case_data",
"[",
"'sv_rank_model_version'",
"]",
")",
"if",
"case_data",
".",
"get",
"(",
"'rank_score_threshold'",
")",
":",
"case_obj",
"[",
"'rank_score_threshold'",
"]",
"=",
"float",
"(",
"case_data",
"[",
"'rank_score_threshold'",
"]",
")",
"# phenotype information",
"phenotypes",
"=",
"[",
"]",
"for",
"phenotype",
"in",
"case_data",
".",
"get",
"(",
"'phenotype_terms'",
",",
"[",
"]",
")",
":",
"phenotype_obj",
"=",
"build_phenotype",
"(",
"phenotype",
",",
"adapter",
")",
"if",
"phenotype_obj",
":",
"phenotypes",
".",
"append",
"(",
"phenotype_obj",
")",
"if",
"phenotypes",
":",
"case_obj",
"[",
"'phenotype_terms'",
"]",
"=",
"phenotypes",
"# phenotype groups",
"phenotype_groups",
"=",
"[",
"]",
"for",
"phenotype",
"in",
"case_data",
".",
"get",
"(",
"'phenotype_groups'",
",",
"[",
"]",
")",
":",
"phenotype_obj",
"=",
"build_phenotype",
"(",
"phenotype",
",",
"adapter",
")",
"if",
"phenotype_obj",
":",
"phenotype_groups",
".",
"append",
"(",
"phenotype_obj",
")",
"if",
"phenotype_groups",
":",
"case_obj",
"[",
"'phenotype_groups'",
"]",
"=",
"phenotype_groups",
"# Files",
"case_obj",
"[",
"'madeline_info'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'madeline_info'",
")",
"if",
"'multiqc'",
"in",
"case_data",
":",
"case_obj",
"[",
"'multiqc'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'multiqc'",
")",
"case_obj",
"[",
"'vcf_files'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'vcf_files'",
",",
"{",
"}",
")",
"case_obj",
"[",
"'delivery_report'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'delivery_report'",
")",
"case_obj",
"[",
"'has_svvariants'",
"]",
"=",
"False",
"if",
"(",
"case_obj",
"[",
"'vcf_files'",
"]",
".",
"get",
"(",
"'vcf_sv'",
")",
"or",
"case_obj",
"[",
"'vcf_files'",
"]",
".",
"get",
"(",
"'vcf_sv_research'",
")",
")",
":",
"case_obj",
"[",
"'has_svvariants'",
"]",
"=",
"True",
"case_obj",
"[",
"'has_strvariants'",
"]",
"=",
"False",
"if",
"(",
"case_obj",
"[",
"'vcf_files'",
"]",
".",
"get",
"(",
"'vcf_str'",
")",
")",
":",
"case_obj",
"[",
"'has_strvariants'",
"]",
"=",
"True",
"case_obj",
"[",
"'is_migrated'",
"]",
"=",
"False",
"case_obj",
"[",
"'track'",
"]",
"=",
"case_data",
".",
"get",
"(",
"'track'",
",",
"'rare'",
")",
"return",
"case_obj"
] | 33.946341 | 20.912195 |
def _entry_management(self): # pylint: disable=too-many-branches
"""
Avoid to have 1 millions line into self.__init__()
"""
if not self.modulo_test: # pylint: disable=no-member
# We are not in a module usage.
# We set the file_path as the file we have to test.
PyFunceble.INTERN[
"file_to_test"
] = self.file_path # pylint: disable=no-member
# We check if the given file_path is an url.
# If it is an URL we update the file to test and download
# the given URL.
self._entry_management_url()
# We fix the environnement permissions.
AutoSave().travis_permissions()
# We check if we need to bypass the execution of PyFunceble.
self.bypass()
# We set the start time.
ExecutionTime("start")
if PyFunceble.CONFIGURATION["syntax"]:
# We are checking for syntax.
# We deactivate the http status code.
PyFunceble.HTTP_CODE["active"] = False
if self.domain_or_ip_to_test: # pylint: disable=no-member
# The given domain is not empty or None.
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "domain"
# We set the start time.
ExecutionTime("start")
# We deactivate the showing of percentage as we are in a single
# test run.
PyFunceble.CONFIGURATION["show_percentage"] = False
# We deactivate the whois database as it is not needed.
PyFunceble.CONFIGURATION["whois_database"] = False
if PyFunceble.CONFIGURATION["idna_conversion"]:
domain_or_ip_to_test = domain2idna(
self.domain_or_ip_to_test.lower() # pylint: disable=no-member
)
else:
domain_or_ip_to_test = (
self.domain_or_ip_to_test.lower() # pylint: disable=no-member
) # pylint: disable=no-member
# We test the domain after converting it to lower case.
self.domain(domain_or_ip_to_test)
elif self.url_to_test and not self.file_path: # pylint: disable=no-member
# An url to test is given and the file path is empty.
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "url"
# We set the start time.
ExecutionTime("start")
# We deactivate the showing of percentage as we are in a single
# test run.
PyFunceble.CONFIGURATION["show_percentage"] = False
# We test the url to test after converting it if needed (IDNA).
self.url(
self.checker.is_url_valid(
self.url_to_test, # pylint: disable=no-member
return_formatted=True,
)
)
elif (
self._entry_management_url_download(
self.url_file # pylint: disable=no-member
)
or self.url_file # pylint: disable=no-member
):
# * A file full of URL is given.
# or
# * the given file full of URL is a URL.
# * We deactivate the whois subsystem as it is not needed for url testing.
# * We activate the generation of plain list element.
# * We activate the generation of splited data instead of unified data.
PyFunceble.CONFIGURATION["no_whois"] = PyFunceble.CONFIGURATION[
"plain_list_domain"
] = PyFunceble.CONFIGURATION["split"] = True
# We deactivate the generation of hosts file as it is not relevant for
# url testing.
PyFunceble.CONFIGURATION["generate_hosts"] = False
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "url"
# And we test the given or the downloaded file.
self.file_url()
elif (
self._entry_management_url_download(
self.link_to_test # pylint: disable=no-member
)
or self._entry_management_url_download(
self.file_path # pylint: disable=no-member
) # pylint: disable=no-member
or self.file_path # pylint: disable=no-member
):
# * A file path is given.
# or
# * The given file path is an URL.
# or
# * A link to test is given.
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "domain"
# We test the given or the downloaded file.
self.file()
else:
# No file, domain, single url or file or url is given.
# We print a message on screen.
print(
PyFunceble.Fore.CYAN + PyFunceble.Style.BRIGHT + "Nothing to test."
)
if (
self.domain_or_ip_to_test # pylint: disable=no-member
or self.url_to_test # pylint: disable=no-member
):
# We are testing a domain.
# We stop and log the execution time.
ExecutionTime("stop", last=True)
# We log the current percentage state.
self.percentage.log()
# We show the colored logo.
self.colorify_logo()
# We print our friendly message :)
PyFunceble.stay_safe()
else:
# We are used as an imported module.
# * We activate the simple mode as the table or any full
# details on screen are irrelevant.
# * We activate the quiet mode.
# And we deactivate the generation of files.
PyFunceble.CONFIGURATION["simple"] = PyFunceble.CONFIGURATION[
"quiet"
] = PyFunceble.CONFIGURATION["no_files"] = True
# * We deactivate the whois database as it is not needed.
# * We deactivate the database as it is not needed.
# * We deactivate the autocontinue subsystem as it is not needed.
# * We deactivate the execution time subsystem as it is not needed.
PyFunceble.CONFIGURATION["whois_database"] = PyFunceble.CONFIGURATION[
"inactive_database"
] = PyFunceble.CONFIGURATION["auto_continue"] = PyFunceble.CONFIGURATION[
"show_execution_time"
] = False
if self.domain_or_ip_to_test: # pylint: disable=no-member
# A domain is given.
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "domain"
# We set the domain to test.
PyFunceble.INTERN[
"to_test"
] = self.domain_or_ip_to_test.lower() # pylint: disable=no-member
elif self.url_to_test: # pylint: disable=no-member
# A url is given,
# We initiate a variable which will tell the system the type
# of the tested element.
PyFunceble.INTERN["to_test_type"] = "url"
# We set the url to test.
PyFunceble.INTERN[
"to_test"
] = self.url_to_test
|
[
"def",
"_entry_management",
"(",
"self",
")",
":",
"# pylint: disable=too-many-branches",
"if",
"not",
"self",
".",
"modulo_test",
":",
"# pylint: disable=no-member",
"# We are not in a module usage.",
"# We set the file_path as the file we have to test.",
"PyFunceble",
".",
"INTERN",
"[",
"\"file_to_test\"",
"]",
"=",
"self",
".",
"file_path",
"# pylint: disable=no-member",
"# We check if the given file_path is an url.",
"# If it is an URL we update the file to test and download",
"# the given URL.",
"self",
".",
"_entry_management_url",
"(",
")",
"# We fix the environnement permissions.",
"AutoSave",
"(",
")",
".",
"travis_permissions",
"(",
")",
"# We check if we need to bypass the execution of PyFunceble.",
"self",
".",
"bypass",
"(",
")",
"# We set the start time.",
"ExecutionTime",
"(",
"\"start\"",
")",
"if",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"syntax\"",
"]",
":",
"# We are checking for syntax.",
"# We deactivate the http status code.",
"PyFunceble",
".",
"HTTP_CODE",
"[",
"\"active\"",
"]",
"=",
"False",
"if",
"self",
".",
"domain_or_ip_to_test",
":",
"# pylint: disable=no-member",
"# The given domain is not empty or None.",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"domain\"",
"# We set the start time.",
"ExecutionTime",
"(",
"\"start\"",
")",
"# We deactivate the showing of percentage as we are in a single",
"# test run.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"show_percentage\"",
"]",
"=",
"False",
"# We deactivate the whois database as it is not needed.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"whois_database\"",
"]",
"=",
"False",
"if",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"idna_conversion\"",
"]",
":",
"domain_or_ip_to_test",
"=",
"domain2idna",
"(",
"self",
".",
"domain_or_ip_to_test",
".",
"lower",
"(",
")",
"# pylint: disable=no-member",
")",
"else",
":",
"domain_or_ip_to_test",
"=",
"(",
"self",
".",
"domain_or_ip_to_test",
".",
"lower",
"(",
")",
"# pylint: disable=no-member",
")",
"# pylint: disable=no-member",
"# We test the domain after converting it to lower case.",
"self",
".",
"domain",
"(",
"domain_or_ip_to_test",
")",
"elif",
"self",
".",
"url_to_test",
"and",
"not",
"self",
".",
"file_path",
":",
"# pylint: disable=no-member",
"# An url to test is given and the file path is empty.",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"url\"",
"# We set the start time.",
"ExecutionTime",
"(",
"\"start\"",
")",
"# We deactivate the showing of percentage as we are in a single",
"# test run.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"show_percentage\"",
"]",
"=",
"False",
"# We test the url to test after converting it if needed (IDNA).",
"self",
".",
"url",
"(",
"self",
".",
"checker",
".",
"is_url_valid",
"(",
"self",
".",
"url_to_test",
",",
"# pylint: disable=no-member",
"return_formatted",
"=",
"True",
",",
")",
")",
"elif",
"(",
"self",
".",
"_entry_management_url_download",
"(",
"self",
".",
"url_file",
"# pylint: disable=no-member",
")",
"or",
"self",
".",
"url_file",
"# pylint: disable=no-member",
")",
":",
"# * A file full of URL is given.",
"# or",
"# * the given file full of URL is a URL.",
"# * We deactivate the whois subsystem as it is not needed for url testing.",
"# * We activate the generation of plain list element.",
"# * We activate the generation of splited data instead of unified data.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"no_whois\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"plain_list_domain\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"split\"",
"]",
"=",
"True",
"# We deactivate the generation of hosts file as it is not relevant for",
"# url testing.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"generate_hosts\"",
"]",
"=",
"False",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"url\"",
"# And we test the given or the downloaded file.",
"self",
".",
"file_url",
"(",
")",
"elif",
"(",
"self",
".",
"_entry_management_url_download",
"(",
"self",
".",
"link_to_test",
"# pylint: disable=no-member",
")",
"or",
"self",
".",
"_entry_management_url_download",
"(",
"self",
".",
"file_path",
"# pylint: disable=no-member",
")",
"# pylint: disable=no-member",
"or",
"self",
".",
"file_path",
"# pylint: disable=no-member",
")",
":",
"# * A file path is given.",
"# or",
"# * The given file path is an URL.",
"# or",
"# * A link to test is given.",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"domain\"",
"# We test the given or the downloaded file.",
"self",
".",
"file",
"(",
")",
"else",
":",
"# No file, domain, single url or file or url is given.",
"# We print a message on screen.",
"print",
"(",
"PyFunceble",
".",
"Fore",
".",
"CYAN",
"+",
"PyFunceble",
".",
"Style",
".",
"BRIGHT",
"+",
"\"Nothing to test.\"",
")",
"if",
"(",
"self",
".",
"domain_or_ip_to_test",
"# pylint: disable=no-member",
"or",
"self",
".",
"url_to_test",
"# pylint: disable=no-member",
")",
":",
"# We are testing a domain.",
"# We stop and log the execution time.",
"ExecutionTime",
"(",
"\"stop\"",
",",
"last",
"=",
"True",
")",
"# We log the current percentage state.",
"self",
".",
"percentage",
".",
"log",
"(",
")",
"# We show the colored logo.",
"self",
".",
"colorify_logo",
"(",
")",
"# We print our friendly message :)",
"PyFunceble",
".",
"stay_safe",
"(",
")",
"else",
":",
"# We are used as an imported module.",
"# * We activate the simple mode as the table or any full",
"# details on screen are irrelevant.",
"# * We activate the quiet mode.",
"# And we deactivate the generation of files.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"simple\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"quiet\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"no_files\"",
"]",
"=",
"True",
"# * We deactivate the whois database as it is not needed.",
"# * We deactivate the database as it is not needed.",
"# * We deactivate the autocontinue subsystem as it is not needed.",
"# * We deactivate the execution time subsystem as it is not needed.",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"whois_database\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"inactive_database\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"auto_continue\"",
"]",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"show_execution_time\"",
"]",
"=",
"False",
"if",
"self",
".",
"domain_or_ip_to_test",
":",
"# pylint: disable=no-member",
"# A domain is given.",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"domain\"",
"# We set the domain to test.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test\"",
"]",
"=",
"self",
".",
"domain_or_ip_to_test",
".",
"lower",
"(",
")",
"# pylint: disable=no-member",
"elif",
"self",
".",
"url_to_test",
":",
"# pylint: disable=no-member",
"# A url is given,",
"# We initiate a variable which will tell the system the type",
"# of the tested element.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"=",
"\"url\"",
"# We set the url to test.",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test\"",
"]",
"=",
"self",
".",
"url_to_test"
] | 40.070707 | 22.424242 |
def _parse_image_name(self, image, retry=True):
'''starting with an image string in either of the following formats:
job_id|collection
job_id|collection|job_name
Parse the job_name, job_id, and collection uri from it. If the user
provides the first option, we use the job_name set by the client
(default is build).
Parameters
==========
image: the string to parse, with values separated by |
retry: the client can call itself recursively once, providing the
default job_name if the user doesn't.
'''
try:
job_id, collection, job_name = image.split(',')
except:
# Retry and add job_name
if retry:
return self._parse_image_name("%s,%s" %(image, self.job),
retry=False)
# Or fail
bot.exit('''Malformed image string! Please provide:
job_id,collection (or)
job_id,collection,job_name''')
return job_id, collection, job_name
|
[
"def",
"_parse_image_name",
"(",
"self",
",",
"image",
",",
"retry",
"=",
"True",
")",
":",
"try",
":",
"job_id",
",",
"collection",
",",
"job_name",
"=",
"image",
".",
"split",
"(",
"','",
")",
"except",
":",
"# Retry and add job_name",
"if",
"retry",
":",
"return",
"self",
".",
"_parse_image_name",
"(",
"\"%s,%s\"",
"%",
"(",
"image",
",",
"self",
".",
"job",
")",
",",
"retry",
"=",
"False",
")",
"# Or fail",
"bot",
".",
"exit",
"(",
"'''Malformed image string! Please provide:\n job_id,collection (or)\n job_id,collection,job_name'''",
")",
"return",
"job_id",
",",
"collection",
",",
"job_name"
] | 39.068966 | 22.862069 |
def _utf8_params(params):
"""encode a dictionary of URL parameters (including iterables) as utf-8"""
assert isinstance(params, dict)
encoded_params = []
for k, v in params.items():
if v is None:
continue
if isinstance(v, integer_types + (float,)):
v = str(v)
if isinstance(v, (list, tuple)):
v = [to_bytes(x) for x in v]
else:
v = to_bytes(v)
encoded_params.append((k, v))
return dict(encoded_params)
|
[
"def",
"_utf8_params",
"(",
"params",
")",
":",
"assert",
"isinstance",
"(",
"params",
",",
"dict",
")",
"encoded_params",
"=",
"[",
"]",
"for",
"k",
",",
"v",
"in",
"params",
".",
"items",
"(",
")",
":",
"if",
"v",
"is",
"None",
":",
"continue",
"if",
"isinstance",
"(",
"v",
",",
"integer_types",
"+",
"(",
"float",
",",
")",
")",
":",
"v",
"=",
"str",
"(",
"v",
")",
"if",
"isinstance",
"(",
"v",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"v",
"=",
"[",
"to_bytes",
"(",
"x",
")",
"for",
"x",
"in",
"v",
"]",
"else",
":",
"v",
"=",
"to_bytes",
"(",
"v",
")",
"encoded_params",
".",
"append",
"(",
"(",
"k",
",",
"v",
")",
")",
"return",
"dict",
"(",
"encoded_params",
")"
] | 32.933333 | 11.066667 |
def has_active_condition(self, condition, instances):
"""
Given a list of instances, and the condition active for
this switch, returns a boolean representing if the
conditional is met, including a non-instance default.
"""
return_value = None
for instance in instances + [None]:
if not self.can_execute(instance):
continue
result = self.is_active(instance, condition)
if result is False:
return False
elif result is True:
return_value = True
return return_value
|
[
"def",
"has_active_condition",
"(",
"self",
",",
"condition",
",",
"instances",
")",
":",
"return_value",
"=",
"None",
"for",
"instance",
"in",
"instances",
"+",
"[",
"None",
"]",
":",
"if",
"not",
"self",
".",
"can_execute",
"(",
"instance",
")",
":",
"continue",
"result",
"=",
"self",
".",
"is_active",
"(",
"instance",
",",
"condition",
")",
"if",
"result",
"is",
"False",
":",
"return",
"False",
"elif",
"result",
"is",
"True",
":",
"return_value",
"=",
"True",
"return",
"return_value"
] | 37.875 | 11 |
def divide(a, b):
'''
divide(a, b) returns the quotient a / b as a numpy array object. Unlike numpy's divide function
or a/b syntax, divide will thread over the earliest dimension possible; thus if a.shape is
(4,2) and b.shape is 4, divide(a,b) is a equivalent to [ai/bi for (ai,bi) in zip(a,b)].
Note that divide(a,b) supports sparse array arguments, but if b is a sparse matrix, then it will
be reified. Additionally, errors are raised by this function when divide-by-zero occurs, so it
is usually not useful to use divide() with sparse matrices--see zdivide instead.
'''
(a,b) = unbroadcast(a,b)
return cdivide(a,b)
|
[
"def",
"divide",
"(",
"a",
",",
"b",
")",
":",
"(",
"a",
",",
"b",
")",
"=",
"unbroadcast",
"(",
"a",
",",
"b",
")",
"return",
"cdivide",
"(",
"a",
",",
"b",
")"
] | 54.333333 | 40.666667 |
def predict(self, choosers, alternatives, debug=False):
"""
Choose from among alternatives for a group of agents.
Parameters
----------
choosers : pandas.DataFrame
Table describing the agents making choices, e.g. households.
alternatives : pandas.DataFrame
Table describing the things from which agents are choosing.
debug : bool
If debug is set to true, will set the variable "sim_pdf" on
the object to store the probabilities for mapping of the
outcome.
Returns
-------
choices : pandas.Series
Mapping of chooser ID to alternative ID. Some choosers
will map to a nan value when there are not enough alternatives
for all the choosers.
"""
self.assert_fitted()
logger.debug('start: predict LCM model {}'.format(self.name))
choosers, alternatives = self.apply_predict_filters(
choosers, alternatives)
if len(choosers) == 0:
return pd.Series()
if len(alternatives) == 0:
return pd.Series(index=choosers.index)
probabilities = self.probabilities(
choosers, alternatives, filter_tables=False)
if debug:
self.sim_pdf = probabilities
if self.choice_mode == 'aggregate':
choices = unit_choice(
choosers.index.values,
probabilities.index.get_level_values('alternative_id').values,
probabilities.values)
elif self.choice_mode == 'individual':
def mkchoice(probs):
probs.reset_index(0, drop=True, inplace=True)
return np.random.choice(
probs.index.values, p=probs.values / probs.sum())
choices = probabilities.groupby(level='chooser_id', sort=False)\
.apply(mkchoice)
else:
raise ValueError(
'Unrecognized choice_mode option: {}'.format(self.choice_mode))
logger.debug('finish: predict LCM model {}'.format(self.name))
return choices
|
[
"def",
"predict",
"(",
"self",
",",
"choosers",
",",
"alternatives",
",",
"debug",
"=",
"False",
")",
":",
"self",
".",
"assert_fitted",
"(",
")",
"logger",
".",
"debug",
"(",
"'start: predict LCM model {}'",
".",
"format",
"(",
"self",
".",
"name",
")",
")",
"choosers",
",",
"alternatives",
"=",
"self",
".",
"apply_predict_filters",
"(",
"choosers",
",",
"alternatives",
")",
"if",
"len",
"(",
"choosers",
")",
"==",
"0",
":",
"return",
"pd",
".",
"Series",
"(",
")",
"if",
"len",
"(",
"alternatives",
")",
"==",
"0",
":",
"return",
"pd",
".",
"Series",
"(",
"index",
"=",
"choosers",
".",
"index",
")",
"probabilities",
"=",
"self",
".",
"probabilities",
"(",
"choosers",
",",
"alternatives",
",",
"filter_tables",
"=",
"False",
")",
"if",
"debug",
":",
"self",
".",
"sim_pdf",
"=",
"probabilities",
"if",
"self",
".",
"choice_mode",
"==",
"'aggregate'",
":",
"choices",
"=",
"unit_choice",
"(",
"choosers",
".",
"index",
".",
"values",
",",
"probabilities",
".",
"index",
".",
"get_level_values",
"(",
"'alternative_id'",
")",
".",
"values",
",",
"probabilities",
".",
"values",
")",
"elif",
"self",
".",
"choice_mode",
"==",
"'individual'",
":",
"def",
"mkchoice",
"(",
"probs",
")",
":",
"probs",
".",
"reset_index",
"(",
"0",
",",
"drop",
"=",
"True",
",",
"inplace",
"=",
"True",
")",
"return",
"np",
".",
"random",
".",
"choice",
"(",
"probs",
".",
"index",
".",
"values",
",",
"p",
"=",
"probs",
".",
"values",
"/",
"probs",
".",
"sum",
"(",
")",
")",
"choices",
"=",
"probabilities",
".",
"groupby",
"(",
"level",
"=",
"'chooser_id'",
",",
"sort",
"=",
"False",
")",
".",
"apply",
"(",
"mkchoice",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unrecognized choice_mode option: {}'",
".",
"format",
"(",
"self",
".",
"choice_mode",
")",
")",
"logger",
".",
"debug",
"(",
"'finish: predict LCM model {}'",
".",
"format",
"(",
"self",
".",
"name",
")",
")",
"return",
"choices"
] | 35.508475 | 20.389831 |
def list_networks(**kwargs):
'''
List all virtual networks.
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
CLI Example:
.. code-block:: bash
salt '*' virt.list_networks
'''
conn = __get_conn(**kwargs)
try:
return [net.name() for net in conn.listAllNetworks()]
finally:
conn.close()
|
[
"def",
"list_networks",
"(",
"*",
"*",
"kwargs",
")",
":",
"conn",
"=",
"__get_conn",
"(",
"*",
"*",
"kwargs",
")",
"try",
":",
"return",
"[",
"net",
".",
"name",
"(",
")",
"for",
"net",
"in",
"conn",
".",
"listAllNetworks",
"(",
")",
"]",
"finally",
":",
"conn",
".",
"close",
"(",
")"
] | 24.095238 | 25.333333 |
def reassign_arguments(self):
"""Deal with optional stringlist before a required one."""
condition = (
"variable-list" in self.arguments and
"list-of-flags" not in self.arguments
)
if condition:
self.arguments["list-of-flags"] = (
self.arguments.pop("variable-list"))
self.rargs_cnt = 1
|
[
"def",
"reassign_arguments",
"(",
"self",
")",
":",
"condition",
"=",
"(",
"\"variable-list\"",
"in",
"self",
".",
"arguments",
"and",
"\"list-of-flags\"",
"not",
"in",
"self",
".",
"arguments",
")",
"if",
"condition",
":",
"self",
".",
"arguments",
"[",
"\"list-of-flags\"",
"]",
"=",
"(",
"self",
".",
"arguments",
".",
"pop",
"(",
"\"variable-list\"",
")",
")",
"self",
".",
"rargs_cnt",
"=",
"1"
] | 37.3 | 12.7 |
def conference_deaf(self, call_params):
"""REST Conference Deaf helper
"""
path = '/' + self.api_version + '/ConferenceDeaf/'
method = 'POST'
return self.request(path, method, call_params)
|
[
"def",
"conference_deaf",
"(",
"self",
",",
"call_params",
")",
":",
"path",
"=",
"'/'",
"+",
"self",
".",
"api_version",
"+",
"'/ConferenceDeaf/'",
"method",
"=",
"'POST'",
"return",
"self",
".",
"request",
"(",
"path",
",",
"method",
",",
"call_params",
")"
] | 37.166667 | 8.333333 |
def color_to_hex(color):
"""Convert matplotlib color code to hex color code"""
if color is None or colorConverter.to_rgba(color)[3] == 0:
return 'none'
else:
rgb = colorConverter.to_rgb(color)
return '#{0:02X}{1:02X}{2:02X}'.format(*(int(255 * c) for c in rgb))
|
[
"def",
"color_to_hex",
"(",
"color",
")",
":",
"if",
"color",
"is",
"None",
"or",
"colorConverter",
".",
"to_rgba",
"(",
"color",
")",
"[",
"3",
"]",
"==",
"0",
":",
"return",
"'none'",
"else",
":",
"rgb",
"=",
"colorConverter",
".",
"to_rgb",
"(",
"color",
")",
"return",
"'#{0:02X}{1:02X}{2:02X}'",
".",
"format",
"(",
"*",
"(",
"int",
"(",
"255",
"*",
"c",
")",
"for",
"c",
"in",
"rgb",
")",
")"
] | 41.571429 | 18 |
def huffman_conv2bitstring(cls, s):
# type: (str) -> Tuple[int, int]
""" huffman_conv2bitstring converts a string into its bitstring
representation. It returns a tuple: the bitstring and its bitlength.
This function DOES NOT compress/decompress the string!
@param str s: the bytestring to convert.
@return (int, int): the bitstring of s, and its bitlength.
@raise AssertionError
"""
i = 0
ibl = len(s) * 8
for c in s:
i = (i << 8) + orb(c)
ret = i, ibl
assert(ret[0] >= 0)
assert(ret[1] >= 0)
return ret
|
[
"def",
"huffman_conv2bitstring",
"(",
"cls",
",",
"s",
")",
":",
"# type: (str) -> Tuple[int, int]",
"i",
"=",
"0",
"ibl",
"=",
"len",
"(",
"s",
")",
"*",
"8",
"for",
"c",
"in",
"s",
":",
"i",
"=",
"(",
"i",
"<<",
"8",
")",
"+",
"orb",
"(",
"c",
")",
"ret",
"=",
"i",
",",
"ibl",
"assert",
"(",
"ret",
"[",
"0",
"]",
">=",
"0",
")",
"assert",
"(",
"ret",
"[",
"1",
"]",
">=",
"0",
")",
"return",
"ret"
] | 32.578947 | 17.210526 |
def pickFilepath( self ):
"""
Picks the image file to use for this icon path.
"""
filepath = QFileDialog.getOpenFileName( self,
'Select Image File',
QDir.currentPath(),
self.fileTypes())
if type(filepath) == tuple:
filepath = nativestring(filepath[0])
if ( filepath ):
self.setFilepath(filepath)
|
[
"def",
"pickFilepath",
"(",
"self",
")",
":",
"filepath",
"=",
"QFileDialog",
".",
"getOpenFileName",
"(",
"self",
",",
"'Select Image File'",
",",
"QDir",
".",
"currentPath",
"(",
")",
",",
"self",
".",
"fileTypes",
"(",
")",
")",
"if",
"type",
"(",
"filepath",
")",
"==",
"tuple",
":",
"filepath",
"=",
"nativestring",
"(",
"filepath",
"[",
"0",
"]",
")",
"if",
"(",
"filepath",
")",
":",
"self",
".",
"setFilepath",
"(",
"filepath",
")"
] | 37.071429 | 15.5 |
def accel_rename_current_tab(self, *args):
"""Callback to show the rename tab dialog. Called by the accel
key.
"""
page_num = self.get_notebook().get_current_page()
page = self.get_notebook().get_nth_page(page_num)
self.get_notebook().get_tab_label(page).on_rename(None)
return True
|
[
"def",
"accel_rename_current_tab",
"(",
"self",
",",
"*",
"args",
")",
":",
"page_num",
"=",
"self",
".",
"get_notebook",
"(",
")",
".",
"get_current_page",
"(",
")",
"page",
"=",
"self",
".",
"get_notebook",
"(",
")",
".",
"get_nth_page",
"(",
"page_num",
")",
"self",
".",
"get_notebook",
"(",
")",
".",
"get_tab_label",
"(",
"page",
")",
".",
"on_rename",
"(",
"None",
")",
"return",
"True"
] | 41.375 | 13.5 |
def k_bn(self, n, Re):
"""
returns normalisation of the sersic profile such that Re is the half light radius given n_sersic slope
"""
bn = self.b_n(n)
k = bn*Re**(-1./n)
return k, bn
|
[
"def",
"k_bn",
"(",
"self",
",",
"n",
",",
"Re",
")",
":",
"bn",
"=",
"self",
".",
"b_n",
"(",
"n",
")",
"k",
"=",
"bn",
"*",
"Re",
"**",
"(",
"-",
"1.",
"/",
"n",
")",
"return",
"k",
",",
"bn"
] | 32 | 19.714286 |
def altimeter(alt: Number, unit: str = 'inHg') -> str:
"""
Format altimeter details into a spoken word string
"""
ret = 'Altimeter '
if not alt:
ret += 'unknown'
elif unit == 'inHg':
ret += core.spoken_number(alt.repr[:2]) + ' point ' + core.spoken_number(alt.repr[2:])
elif unit == 'hPa':
ret += core.spoken_number(alt.repr)
return ret
|
[
"def",
"altimeter",
"(",
"alt",
":",
"Number",
",",
"unit",
":",
"str",
"=",
"'inHg'",
")",
"->",
"str",
":",
"ret",
"=",
"'Altimeter '",
"if",
"not",
"alt",
":",
"ret",
"+=",
"'unknown'",
"elif",
"unit",
"==",
"'inHg'",
":",
"ret",
"+=",
"core",
".",
"spoken_number",
"(",
"alt",
".",
"repr",
"[",
":",
"2",
"]",
")",
"+",
"' point '",
"+",
"core",
".",
"spoken_number",
"(",
"alt",
".",
"repr",
"[",
"2",
":",
"]",
")",
"elif",
"unit",
"==",
"'hPa'",
":",
"ret",
"+=",
"core",
".",
"spoken_number",
"(",
"alt",
".",
"repr",
")",
"return",
"ret"
] | 31.75 | 16.916667 |
def cmd_cm(self, nm=None, ch=None):
"""cm nm=color_map_name ch=chname
Set a color map (name `nm`) for the given channel.
If no value is given, reports the current color map.
"""
viewer = self.get_viewer(ch)
if viewer is None:
self.log("No current viewer/channel.")
return
if nm is None:
rgbmap = viewer.get_rgbmap()
cmap = rgbmap.get_cmap()
self.log(cmap.name)
else:
viewer.set_color_map(nm)
|
[
"def",
"cmd_cm",
"(",
"self",
",",
"nm",
"=",
"None",
",",
"ch",
"=",
"None",
")",
":",
"viewer",
"=",
"self",
".",
"get_viewer",
"(",
"ch",
")",
"if",
"viewer",
"is",
"None",
":",
"self",
".",
"log",
"(",
"\"No current viewer/channel.\"",
")",
"return",
"if",
"nm",
"is",
"None",
":",
"rgbmap",
"=",
"viewer",
".",
"get_rgbmap",
"(",
")",
"cmap",
"=",
"rgbmap",
".",
"get_cmap",
"(",
")",
"self",
".",
"log",
"(",
"cmap",
".",
"name",
")",
"else",
":",
"viewer",
".",
"set_color_map",
"(",
"nm",
")"
] | 28.5 | 15.277778 |
def deleteMetadata(self, remote, address, key):
"""Delete metadata of device"""
try:
return self.proxies["%s-%s" % (self._interface_id, remote)].deleteMetadata(address, key)
except Exception as err:
LOG.debug("ServerThread.deleteMetadata: Exception: %s" % str(err))
|
[
"def",
"deleteMetadata",
"(",
"self",
",",
"remote",
",",
"address",
",",
"key",
")",
":",
"try",
":",
"return",
"self",
".",
"proxies",
"[",
"\"%s-%s\"",
"%",
"(",
"self",
".",
"_interface_id",
",",
"remote",
")",
"]",
".",
"deleteMetadata",
"(",
"address",
",",
"key",
")",
"except",
"Exception",
"as",
"err",
":",
"LOG",
".",
"debug",
"(",
"\"ServerThread.deleteMetadata: Exception: %s\"",
"%",
"str",
"(",
"err",
")",
")"
] | 51.333333 | 23.5 |
def has_change_permission(self):
"""
Returns a boolean if the current user has permission to change the current object being
viewed/edited.
"""
has_permission = False
if self.user is not None:
# We check for the object level permission here, even though by default the Django
# admin doesn't. If the Django ModelAdmin is extended to allow object level
# permissions - then this will work as expected.
permission_name = '{}.change_{}'.format(self.opts.app_label, self.opts.model_name)
has_permission = (
self.user.has_perm(permission_name) or
self.user.has_perm(permission_name, obj=self.obj)
)
return has_permission
|
[
"def",
"has_change_permission",
"(",
"self",
")",
":",
"has_permission",
"=",
"False",
"if",
"self",
".",
"user",
"is",
"not",
"None",
":",
"# We check for the object level permission here, even though by default the Django",
"# admin doesn't. If the Django ModelAdmin is extended to allow object level",
"# permissions - then this will work as expected.",
"permission_name",
"=",
"'{}.change_{}'",
".",
"format",
"(",
"self",
".",
"opts",
".",
"app_label",
",",
"self",
".",
"opts",
".",
"model_name",
")",
"has_permission",
"=",
"(",
"self",
".",
"user",
".",
"has_perm",
"(",
"permission_name",
")",
"or",
"self",
".",
"user",
".",
"has_perm",
"(",
"permission_name",
",",
"obj",
"=",
"self",
".",
"obj",
")",
")",
"return",
"has_permission"
] | 42.222222 | 24.444444 |
def cross_entropy_loss(logits, one_hot_labels, label_smoothing=0,
weight=1.0, scope=None):
"""Define a Cross Entropy loss using softmax_cross_entropy_with_logits.
It can scale the loss by weight factor, and smooth the labels.
Args:
logits: [batch_size, num_classes] logits outputs of the network .
one_hot_labels: [batch_size, num_classes] target one_hot_encoded labels.
label_smoothing: if greater than 0 then smooth the labels.
weight: scale the loss by this factor.
scope: Optional scope for name_scope.
Returns:
A tensor with the softmax_cross_entropy loss.
"""
logits.get_shape().assert_is_compatible_with(one_hot_labels.get_shape())
with tf.name_scope(scope, 'CrossEntropyLoss', [logits, one_hot_labels]):
num_classes = one_hot_labels.get_shape()[-1].value
one_hot_labels = tf.cast(one_hot_labels, logits.dtype)
if label_smoothing > 0:
smooth_positives = 1.0 - label_smoothing
smooth_negatives = label_smoothing / num_classes
one_hot_labels = one_hot_labels * smooth_positives + smooth_negatives
cross_entropy = tf.contrib.nn.deprecated_flipped_softmax_cross_entropy_with_logits(
logits, one_hot_labels, name='xentropy')
weight = tf.convert_to_tensor(weight,
dtype=logits.dtype.base_dtype,
name='loss_weight')
loss = tf.multiply(weight, tf.reduce_mean(cross_entropy), name='value')
tf.add_to_collection(LOSSES_COLLECTION, loss)
return loss
|
[
"def",
"cross_entropy_loss",
"(",
"logits",
",",
"one_hot_labels",
",",
"label_smoothing",
"=",
"0",
",",
"weight",
"=",
"1.0",
",",
"scope",
"=",
"None",
")",
":",
"logits",
".",
"get_shape",
"(",
")",
".",
"assert_is_compatible_with",
"(",
"one_hot_labels",
".",
"get_shape",
"(",
")",
")",
"with",
"tf",
".",
"name_scope",
"(",
"scope",
",",
"'CrossEntropyLoss'",
",",
"[",
"logits",
",",
"one_hot_labels",
"]",
")",
":",
"num_classes",
"=",
"one_hot_labels",
".",
"get_shape",
"(",
")",
"[",
"-",
"1",
"]",
".",
"value",
"one_hot_labels",
"=",
"tf",
".",
"cast",
"(",
"one_hot_labels",
",",
"logits",
".",
"dtype",
")",
"if",
"label_smoothing",
">",
"0",
":",
"smooth_positives",
"=",
"1.0",
"-",
"label_smoothing",
"smooth_negatives",
"=",
"label_smoothing",
"/",
"num_classes",
"one_hot_labels",
"=",
"one_hot_labels",
"*",
"smooth_positives",
"+",
"smooth_negatives",
"cross_entropy",
"=",
"tf",
".",
"contrib",
".",
"nn",
".",
"deprecated_flipped_softmax_cross_entropy_with_logits",
"(",
"logits",
",",
"one_hot_labels",
",",
"name",
"=",
"'xentropy'",
")",
"weight",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"weight",
",",
"dtype",
"=",
"logits",
".",
"dtype",
".",
"base_dtype",
",",
"name",
"=",
"'loss_weight'",
")",
"loss",
"=",
"tf",
".",
"multiply",
"(",
"weight",
",",
"tf",
".",
"reduce_mean",
"(",
"cross_entropy",
")",
",",
"name",
"=",
"'value'",
")",
"tf",
".",
"add_to_collection",
"(",
"LOSSES_COLLECTION",
",",
"loss",
")",
"return",
"loss"
] | 45.575758 | 21.454545 |
def get_dev_interface(devid, auth, url):
"""
Function takes devid as input to RESTFUL call to HP IMC platform and returns list of device interfaces
:param devid: requires devid as the only input
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list object which contains a dictionary per interface
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.device import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> dev_interfaces = get_dev_interface('15', auth.creds, auth.url)
>>> assert type(dev_interfaces) is list
>>> assert 'ifAlias' in dev_interfaces[0]
"""
get_dev_interface_url = "/imcrs/plat/res/device/" + str(devid) + \
"/interface?start=0&size=1000&desc=false&total=false"
f_url = url + get_dev_interface_url
# creates the URL using the payload variable as the contents
r = requests.get(f_url, auth=auth, headers=HEADERS)
# r.status_code
try:
if r.status_code == 200:
int_list = (json.loads(r.text))['interface']
return int_list
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + " get_dev_interface: An Error has occured"
|
[
"def",
"get_dev_interface",
"(",
"devid",
",",
"auth",
",",
"url",
")",
":",
"get_dev_interface_url",
"=",
"\"/imcrs/plat/res/device/\"",
"+",
"str",
"(",
"devid",
")",
"+",
"\"/interface?start=0&size=1000&desc=false&total=false\"",
"f_url",
"=",
"url",
"+",
"get_dev_interface_url",
"# creates the URL using the payload variable as the contents",
"r",
"=",
"requests",
".",
"get",
"(",
"f_url",
",",
"auth",
"=",
"auth",
",",
"headers",
"=",
"HEADERS",
")",
"# r.status_code",
"try",
":",
"if",
"r",
".",
"status_code",
"==",
"200",
":",
"int_list",
"=",
"(",
"json",
".",
"loads",
"(",
"r",
".",
"text",
")",
")",
"[",
"'interface'",
"]",
"return",
"int_list",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
"as",
"e",
":",
"return",
"\"Error:\\n\"",
"+",
"str",
"(",
"e",
")",
"+",
"\" get_dev_interface: An Error has occured\""
] | 33.317073 | 27.341463 |
def init_app(self, app):
"""
Initializes the extension for the provided Flask application.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
self._key = app.config.get(CONF_KEY) or getenv(CONF_KEY)
if not self._key:
return
self._endpoint_uri = app.config.get(CONF_ENDPOINT_URI)
sender = AsynchronousSender(self._endpoint_uri)
queue = AsynchronousQueue(sender)
self._channel = TelemetryChannel(None, queue)
self._init_request_logging(app)
self._init_trace_logging(app)
self._init_exception_logging(app)
|
[
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"self",
".",
"_key",
"=",
"app",
".",
"config",
".",
"get",
"(",
"CONF_KEY",
")",
"or",
"getenv",
"(",
"CONF_KEY",
")",
"if",
"not",
"self",
".",
"_key",
":",
"return",
"self",
".",
"_endpoint_uri",
"=",
"app",
".",
"config",
".",
"get",
"(",
"CONF_ENDPOINT_URI",
")",
"sender",
"=",
"AsynchronousSender",
"(",
"self",
".",
"_endpoint_uri",
")",
"queue",
"=",
"AsynchronousQueue",
"(",
"sender",
")",
"self",
".",
"_channel",
"=",
"TelemetryChannel",
"(",
"None",
",",
"queue",
")",
"self",
".",
"_init_request_logging",
"(",
"app",
")",
"self",
".",
"_init_trace_logging",
"(",
"app",
")",
"self",
".",
"_init_exception_logging",
"(",
"app",
")"
] | 31.142857 | 20.952381 |
def stopPolitely(self, disconnect=False):
"""Delete all active ROSpecs. Return a Deferred that will be called
when the DELETE_ROSPEC_RESPONSE comes back."""
logger.info('stopping politely')
if disconnect:
logger.info('will disconnect when stopped')
self.disconnecting = True
self.sendMessage({
'DELETE_ACCESSSPEC': {
'Ver': 1,
'Type': 41,
'ID': 0,
'AccessSpecID': 0 # all AccessSpecs
}})
self.setState(LLRPClient.STATE_SENT_DELETE_ACCESSSPEC)
d = defer.Deferred()
d.addCallback(self.stopAllROSpecs)
d.addErrback(self.panic, 'DELETE_ACCESSSPEC failed')
self._deferreds['DELETE_ACCESSSPEC_RESPONSE'].append(d)
return d
|
[
"def",
"stopPolitely",
"(",
"self",
",",
"disconnect",
"=",
"False",
")",
":",
"logger",
".",
"info",
"(",
"'stopping politely'",
")",
"if",
"disconnect",
":",
"logger",
".",
"info",
"(",
"'will disconnect when stopped'",
")",
"self",
".",
"disconnecting",
"=",
"True",
"self",
".",
"sendMessage",
"(",
"{",
"'DELETE_ACCESSSPEC'",
":",
"{",
"'Ver'",
":",
"1",
",",
"'Type'",
":",
"41",
",",
"'ID'",
":",
"0",
",",
"'AccessSpecID'",
":",
"0",
"# all AccessSpecs",
"}",
"}",
")",
"self",
".",
"setState",
"(",
"LLRPClient",
".",
"STATE_SENT_DELETE_ACCESSSPEC",
")",
"d",
"=",
"defer",
".",
"Deferred",
"(",
")",
"d",
".",
"addCallback",
"(",
"self",
".",
"stopAllROSpecs",
")",
"d",
".",
"addErrback",
"(",
"self",
".",
"panic",
",",
"'DELETE_ACCESSSPEC failed'",
")",
"self",
".",
"_deferreds",
"[",
"'DELETE_ACCESSSPEC_RESPONSE'",
"]",
".",
"append",
"(",
"d",
")",
"return",
"d"
] | 36.454545 | 14.590909 |
def process_raw_data(cls, raw_data):
"""Create a new model using raw API response."""
properties = raw_data["properties"]
raw_content = properties.get("accessControlList", None)
if raw_content is not None:
resource = Resource.from_raw_data(raw_content)
properties["accessControlList"] = resource
# TODO(alexcoman): Add model for ServiceInsertion
raw_content = properties.get("serviceInsertion", None)
if raw_content is not None:
resource = Resource.from_raw_data(raw_content)
properties["serviceInsertion"] = resource
raw_content = properties.get("routeTable", None)
if raw_content is not None:
resource = Resource.from_raw_data(raw_content)
properties["routeTable"] = resource
ip_configurations = []
for raw_config in properties.get("ipConfigurations", []):
ip_configurations.append(Resource.from_raw_data(raw_config))
properties["ipConfigurations"] = ip_configurations
return super(SubNetworks, cls).process_raw_data(raw_data)
|
[
"def",
"process_raw_data",
"(",
"cls",
",",
"raw_data",
")",
":",
"properties",
"=",
"raw_data",
"[",
"\"properties\"",
"]",
"raw_content",
"=",
"properties",
".",
"get",
"(",
"\"accessControlList\"",
",",
"None",
")",
"if",
"raw_content",
"is",
"not",
"None",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"properties",
"[",
"\"accessControlList\"",
"]",
"=",
"resource",
"# TODO(alexcoman): Add model for ServiceInsertion",
"raw_content",
"=",
"properties",
".",
"get",
"(",
"\"serviceInsertion\"",
",",
"None",
")",
"if",
"raw_content",
"is",
"not",
"None",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"properties",
"[",
"\"serviceInsertion\"",
"]",
"=",
"resource",
"raw_content",
"=",
"properties",
".",
"get",
"(",
"\"routeTable\"",
",",
"None",
")",
"if",
"raw_content",
"is",
"not",
"None",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"properties",
"[",
"\"routeTable\"",
"]",
"=",
"resource",
"ip_configurations",
"=",
"[",
"]",
"for",
"raw_config",
"in",
"properties",
".",
"get",
"(",
"\"ipConfigurations\"",
",",
"[",
"]",
")",
":",
"ip_configurations",
".",
"append",
"(",
"Resource",
".",
"from_raw_data",
"(",
"raw_config",
")",
")",
"properties",
"[",
"\"ipConfigurations\"",
"]",
"=",
"ip_configurations",
"return",
"super",
"(",
"SubNetworks",
",",
"cls",
")",
".",
"process_raw_data",
"(",
"raw_data",
")"
] | 42.153846 | 19.153846 |
def from_configuration(cls, env_name, config_name='default'):
""" Environment factory from name and configuration strings.
:param str env_name: the name string of the environment
:param str config_name: the configuration string for env_name
Environment name strings are available using::
from explauto.environment import environments
print environments.keys()
This will return the available environment names, something like::
'['npendulum', 'pendulum', 'simple_arm']'
Once you have choose an environment, e.g. 'simple_arm', corresponding available configurations are available using::
env_cls, env_configs, _ = environments['simple_arm']
print env_configs.keys()
This will return the available configuration names for the 'simple_arm' environment, something like::
'['mid_dimensional', 'default', 'high_dim_high_s_range', 'low_dimensional', 'high_dimensional']'
Once you have choose a configuration, for example the 'mid_dimensional' one, you can contruct the environment using::
from explauto import Environment
my_environment = Environment.from_configuration('simple_arm', 'mid_dimensional')
Or, in an equivalent manner::
my_environment = env_cls(**env_configs['mid_dimensional'])
"""
env_cls, env_configs, _ = environments[env_name]
return env_cls(**env_configs[config_name])
|
[
"def",
"from_configuration",
"(",
"cls",
",",
"env_name",
",",
"config_name",
"=",
"'default'",
")",
":",
"env_cls",
",",
"env_configs",
",",
"_",
"=",
"environments",
"[",
"env_name",
"]",
"return",
"env_cls",
"(",
"*",
"*",
"env_configs",
"[",
"config_name",
"]",
")"
] | 40.611111 | 32.305556 |
def validate(
schema: GraphQLSchema,
document_ast: DocumentNode,
rules: Sequence[RuleType] = None,
type_info: TypeInfo = None,
) -> List[GraphQLError]:
"""Implements the "Validation" section of the spec.
Validation runs synchronously, returning a list of encountered errors, or an empty
list if no errors were encountered and the document is valid.
A list of specific validation rules may be provided. If not provided, the default
list of rules defined by the GraphQL specification will be used.
Each validation rule is a ValidationRule object which is a visitor object that holds
a ValidationContext (see the language/visitor API). Visitor methods are expected to
return GraphQLErrors, or lists of GraphQLErrors when invalid.
Optionally a custom TypeInfo instance may be provided. If not provided, one will be
created from the provided schema.
"""
if not document_ast or not isinstance(document_ast, DocumentNode):
raise TypeError("You must provide a document node.")
# If the schema used for validation is invalid, throw an error.
assert_valid_schema(schema)
if type_info is None:
type_info = TypeInfo(schema)
elif not isinstance(type_info, TypeInfo):
raise TypeError(f"Not a TypeInfo object: {inspect(type_info)}")
if rules is None:
rules = specified_rules
elif not isinstance(rules, (list, tuple)):
raise TypeError("Rules must be passed as a list/tuple.")
context = ValidationContext(schema, document_ast, type_info)
# This uses a specialized visitor which runs multiple visitors in parallel,
# while maintaining the visitor skip and break API.
visitors = [rule(context) for rule in rules]
# Visit the whole document with each instance of all provided rules.
visit(document_ast, TypeInfoVisitor(type_info, ParallelVisitor(visitors)))
return context.errors
|
[
"def",
"validate",
"(",
"schema",
":",
"GraphQLSchema",
",",
"document_ast",
":",
"DocumentNode",
",",
"rules",
":",
"Sequence",
"[",
"RuleType",
"]",
"=",
"None",
",",
"type_info",
":",
"TypeInfo",
"=",
"None",
",",
")",
"->",
"List",
"[",
"GraphQLError",
"]",
":",
"if",
"not",
"document_ast",
"or",
"not",
"isinstance",
"(",
"document_ast",
",",
"DocumentNode",
")",
":",
"raise",
"TypeError",
"(",
"\"You must provide a document node.\"",
")",
"# If the schema used for validation is invalid, throw an error.",
"assert_valid_schema",
"(",
"schema",
")",
"if",
"type_info",
"is",
"None",
":",
"type_info",
"=",
"TypeInfo",
"(",
"schema",
")",
"elif",
"not",
"isinstance",
"(",
"type_info",
",",
"TypeInfo",
")",
":",
"raise",
"TypeError",
"(",
"f\"Not a TypeInfo object: {inspect(type_info)}\"",
")",
"if",
"rules",
"is",
"None",
":",
"rules",
"=",
"specified_rules",
"elif",
"not",
"isinstance",
"(",
"rules",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"Rules must be passed as a list/tuple.\"",
")",
"context",
"=",
"ValidationContext",
"(",
"schema",
",",
"document_ast",
",",
"type_info",
")",
"# This uses a specialized visitor which runs multiple visitors in parallel,",
"# while maintaining the visitor skip and break API.",
"visitors",
"=",
"[",
"rule",
"(",
"context",
")",
"for",
"rule",
"in",
"rules",
"]",
"# Visit the whole document with each instance of all provided rules.",
"visit",
"(",
"document_ast",
",",
"TypeInfoVisitor",
"(",
"type_info",
",",
"ParallelVisitor",
"(",
"visitors",
")",
")",
")",
"return",
"context",
".",
"errors"
] | 47 | 23.05 |
def init_multipart_upload(self, bucket, object_name, content_type=None,
amz_headers={}, metadata={}):
"""
Initiate a multipart upload to a bucket.
@param bucket: The name of the bucket
@param object_name: The object name
@param content_type: The Content-Type for the object
@param metadata: C{dict} containing additional metadata
@param amz_headers: A C{dict} used to build C{x-amz-*} headers.
@return: C{str} upload_id
"""
objectname_plus = '%s?uploads' % object_name
details = self._details(
method=b"POST",
url_context=self._url_context(bucket=bucket, object_name=objectname_plus),
headers=self._headers(content_type),
metadata=metadata,
amz_headers=amz_headers,
)
d = self._submit(self._query_factory(details))
d.addCallback(
lambda (response, body): MultipartInitiationResponse.from_xml(body)
)
return d
|
[
"def",
"init_multipart_upload",
"(",
"self",
",",
"bucket",
",",
"object_name",
",",
"content_type",
"=",
"None",
",",
"amz_headers",
"=",
"{",
"}",
",",
"metadata",
"=",
"{",
"}",
")",
":",
"objectname_plus",
"=",
"'%s?uploads'",
"%",
"object_name",
"details",
"=",
"self",
".",
"_details",
"(",
"method",
"=",
"b\"POST\"",
",",
"url_context",
"=",
"self",
".",
"_url_context",
"(",
"bucket",
"=",
"bucket",
",",
"object_name",
"=",
"objectname_plus",
")",
",",
"headers",
"=",
"self",
".",
"_headers",
"(",
"content_type",
")",
",",
"metadata",
"=",
"metadata",
",",
"amz_headers",
"=",
"amz_headers",
",",
")",
"d",
"=",
"self",
".",
"_submit",
"(",
"self",
".",
"_query_factory",
"(",
"details",
")",
")",
"d",
".",
"addCallback",
"(",
"lambda",
"(",
"response",
",",
"body",
")",
":",
"MultipartInitiationResponse",
".",
"from_xml",
"(",
"body",
")",
")",
"return",
"d"
] | 40.6 | 17.8 |
def kwargs(self):
"""combine GET and POST params to be passed to the controller"""
kwargs = dict(self.query_kwargs)
kwargs.update(self.body_kwargs)
return kwargs
|
[
"def",
"kwargs",
"(",
"self",
")",
":",
"kwargs",
"=",
"dict",
"(",
"self",
".",
"query_kwargs",
")",
"kwargs",
".",
"update",
"(",
"self",
".",
"body_kwargs",
")",
"return",
"kwargs"
] | 31.5 | 13.833333 |
def generic_meta_translator(self, meta_to_translate):
'''Translates the metadate contained in an object into a dictionary
suitable for export.
Parameters
----------
meta_to_translate : Meta
The metadata object to translate
Returns
-------
dict
A dictionary of the metadata for each variable of an output file
e.g. netcdf4'''
export_dict = {}
if self._meta_translation_table is not None:
# Create a translation table for the actual values of the meta labels.
# The instrument specific translation table only stores the names of the
# attributes that hold the various meta labels
translation_table = {}
for key in self._meta_translation_table:
translation_table[getattr(self, key)] = self._meta_translation_table[key]
else:
translation_table = None
#First Order Data
for key in meta_to_translate.data.index:
if translation_table is None:
export_dict[key] = meta_to_translate.data.loc[key].to_dict()
else:
# Translate each key if a translation is provided
export_dict[key] = {}
meta_dict = meta_to_translate.data.loc[key].to_dict()
for original_key in meta_dict:
if original_key in translation_table:
for translated_key in translation_table[original_key]:
export_dict[key][translated_key] = meta_dict[original_key]
else:
export_dict[key][original_key] = meta_dict[original_key]
#Higher Order Data
for key in meta_to_translate.ho_data:
if key not in export_dict:
export_dict[key] = {}
for ho_key in meta_to_translate.ho_data[key].data.index:
if translation_table is None:
export_dict[key+'_'+ho_key] = meta_to_translate.ho_data[key].data.loc[ho_key].to_dict()
else:
#Translate each key if a translation is provided
export_dict[key+'_'+ho_key] = {}
meta_dict = meta_to_translate.ho_data[key].data.loc[ho_key].to_dict()
for original_key in meta_dict:
if original_key in translation_table:
for translated_key in translation_table[original_key]:
export_dict[key+'_'+ho_key][translated_key] = meta_dict[original_key]
else:
export_dict[key+'_'+ho_key][original_key] = meta_dict[original_key]
return export_dict
|
[
"def",
"generic_meta_translator",
"(",
"self",
",",
"meta_to_translate",
")",
":",
"export_dict",
"=",
"{",
"}",
"if",
"self",
".",
"_meta_translation_table",
"is",
"not",
"None",
":",
"# Create a translation table for the actual values of the meta labels.",
"# The instrument specific translation table only stores the names of the",
"# attributes that hold the various meta labels",
"translation_table",
"=",
"{",
"}",
"for",
"key",
"in",
"self",
".",
"_meta_translation_table",
":",
"translation_table",
"[",
"getattr",
"(",
"self",
",",
"key",
")",
"]",
"=",
"self",
".",
"_meta_translation_table",
"[",
"key",
"]",
"else",
":",
"translation_table",
"=",
"None",
"#First Order Data",
"for",
"key",
"in",
"meta_to_translate",
".",
"data",
".",
"index",
":",
"if",
"translation_table",
"is",
"None",
":",
"export_dict",
"[",
"key",
"]",
"=",
"meta_to_translate",
".",
"data",
".",
"loc",
"[",
"key",
"]",
".",
"to_dict",
"(",
")",
"else",
":",
"# Translate each key if a translation is provided",
"export_dict",
"[",
"key",
"]",
"=",
"{",
"}",
"meta_dict",
"=",
"meta_to_translate",
".",
"data",
".",
"loc",
"[",
"key",
"]",
".",
"to_dict",
"(",
")",
"for",
"original_key",
"in",
"meta_dict",
":",
"if",
"original_key",
"in",
"translation_table",
":",
"for",
"translated_key",
"in",
"translation_table",
"[",
"original_key",
"]",
":",
"export_dict",
"[",
"key",
"]",
"[",
"translated_key",
"]",
"=",
"meta_dict",
"[",
"original_key",
"]",
"else",
":",
"export_dict",
"[",
"key",
"]",
"[",
"original_key",
"]",
"=",
"meta_dict",
"[",
"original_key",
"]",
"#Higher Order Data",
"for",
"key",
"in",
"meta_to_translate",
".",
"ho_data",
":",
"if",
"key",
"not",
"in",
"export_dict",
":",
"export_dict",
"[",
"key",
"]",
"=",
"{",
"}",
"for",
"ho_key",
"in",
"meta_to_translate",
".",
"ho_data",
"[",
"key",
"]",
".",
"data",
".",
"index",
":",
"if",
"translation_table",
"is",
"None",
":",
"export_dict",
"[",
"key",
"+",
"'_'",
"+",
"ho_key",
"]",
"=",
"meta_to_translate",
".",
"ho_data",
"[",
"key",
"]",
".",
"data",
".",
"loc",
"[",
"ho_key",
"]",
".",
"to_dict",
"(",
")",
"else",
":",
"#Translate each key if a translation is provided",
"export_dict",
"[",
"key",
"+",
"'_'",
"+",
"ho_key",
"]",
"=",
"{",
"}",
"meta_dict",
"=",
"meta_to_translate",
".",
"ho_data",
"[",
"key",
"]",
".",
"data",
".",
"loc",
"[",
"ho_key",
"]",
".",
"to_dict",
"(",
")",
"for",
"original_key",
"in",
"meta_dict",
":",
"if",
"original_key",
"in",
"translation_table",
":",
"for",
"translated_key",
"in",
"translation_table",
"[",
"original_key",
"]",
":",
"export_dict",
"[",
"key",
"+",
"'_'",
"+",
"ho_key",
"]",
"[",
"translated_key",
"]",
"=",
"meta_dict",
"[",
"original_key",
"]",
"else",
":",
"export_dict",
"[",
"key",
"+",
"'_'",
"+",
"ho_key",
"]",
"[",
"original_key",
"]",
"=",
"meta_dict",
"[",
"original_key",
"]",
"return",
"export_dict"
] | 46.220339 | 24.084746 |
def render_title(text, markup=True, no_smartquotes=False):
""" Convert a Markdown title to HTML """
# HACK: If the title starts with something that looks like a list, save it
# for later
pfx, text = re.match(r'([0-9. ]*)(.*)', text).group(1, 2)
text = pfx + misaka.Markdown(TitleRenderer(),
extensions=TITLE_EXTENSIONS)(text)
if not markup:
strip = HTMLStripper()
strip.feed(text)
text = strip.get_data()
if not no_smartquotes:
text = misaka.smartypants(text)
return flask.Markup(text)
|
[
"def",
"render_title",
"(",
"text",
",",
"markup",
"=",
"True",
",",
"no_smartquotes",
"=",
"False",
")",
":",
"# HACK: If the title starts with something that looks like a list, save it",
"# for later",
"pfx",
",",
"text",
"=",
"re",
".",
"match",
"(",
"r'([0-9. ]*)(.*)'",
",",
"text",
")",
".",
"group",
"(",
"1",
",",
"2",
")",
"text",
"=",
"pfx",
"+",
"misaka",
".",
"Markdown",
"(",
"TitleRenderer",
"(",
")",
",",
"extensions",
"=",
"TITLE_EXTENSIONS",
")",
"(",
"text",
")",
"if",
"not",
"markup",
":",
"strip",
"=",
"HTMLStripper",
"(",
")",
"strip",
".",
"feed",
"(",
"text",
")",
"text",
"=",
"strip",
".",
"get_data",
"(",
")",
"if",
"not",
"no_smartquotes",
":",
"text",
"=",
"misaka",
".",
"smartypants",
"(",
"text",
")",
"return",
"flask",
".",
"Markup",
"(",
"text",
")"
] | 31.611111 | 21.166667 |
def _get_min_mag_and_num_bins(self):
"""
Estimate the number of bins in the histogram and return it along with
the first bin center value.
Rounds ``min_mag`` and ``max_mag`` with respect to ``bin_width`` to
make the distance between them include integer number of bins.
:returns:
A tuple of 2 items: first bin center, and total number of bins.
"""
min_mag = round(self.min_mag / self.bin_width) * self.bin_width
max_mag = (round((self.char_mag + DELTA_CHAR / 2) /
self.bin_width) * self.bin_width)
min_mag += self.bin_width / 2.0
max_mag -= self.bin_width / 2.0
# here we use math round on the result of division and not just
# cast it to integer because for some magnitude values that can't
# be represented as an IEEE 754 double precisely the result can
# look like 7.999999999999 which would become 7 instead of 8
# being naively casted to int so we would lose the last bin.
num_bins = int(round((max_mag - min_mag) / self.bin_width)) + 1
return min_mag, num_bins
|
[
"def",
"_get_min_mag_and_num_bins",
"(",
"self",
")",
":",
"min_mag",
"=",
"round",
"(",
"self",
".",
"min_mag",
"/",
"self",
".",
"bin_width",
")",
"*",
"self",
".",
"bin_width",
"max_mag",
"=",
"(",
"round",
"(",
"(",
"self",
".",
"char_mag",
"+",
"DELTA_CHAR",
"/",
"2",
")",
"/",
"self",
".",
"bin_width",
")",
"*",
"self",
".",
"bin_width",
")",
"min_mag",
"+=",
"self",
".",
"bin_width",
"/",
"2.0",
"max_mag",
"-=",
"self",
".",
"bin_width",
"/",
"2.0",
"# here we use math round on the result of division and not just",
"# cast it to integer because for some magnitude values that can't",
"# be represented as an IEEE 754 double precisely the result can",
"# look like 7.999999999999 which would become 7 instead of 8",
"# being naively casted to int so we would lose the last bin.",
"num_bins",
"=",
"int",
"(",
"round",
"(",
"(",
"max_mag",
"-",
"min_mag",
")",
"/",
"self",
".",
"bin_width",
")",
")",
"+",
"1",
"return",
"min_mag",
",",
"num_bins"
] | 48.73913 | 21.869565 |
def instantiate_by_name(self, object_name):
""" Instantiate object from the environment, possibly giving some extra arguments """
if object_name not in self.instances:
instance = self.instantiate_from_data(self.environment[object_name])
self.instances[object_name] = instance
return instance
else:
return self.instances[object_name]
|
[
"def",
"instantiate_by_name",
"(",
"self",
",",
"object_name",
")",
":",
"if",
"object_name",
"not",
"in",
"self",
".",
"instances",
":",
"instance",
"=",
"self",
".",
"instantiate_from_data",
"(",
"self",
".",
"environment",
"[",
"object_name",
"]",
")",
"self",
".",
"instances",
"[",
"object_name",
"]",
"=",
"instance",
"return",
"instance",
"else",
":",
"return",
"self",
".",
"instances",
"[",
"object_name",
"]"
] | 44.111111 | 16 |
def remove_collection(self, first_arg, sec_arg, third_arg, fourth_arg=None, commit_msg=None):
"""Remove a collection
Given a collection_id, branch and optionally an
author, remove a collection on the given branch
and attribute the commit to author.
Returns the SHA of the commit on branch.
"""
if fourth_arg is None:
collection_id, branch_name, author = first_arg, sec_arg, third_arg
gh_user = branch_name.split('_collection_')[0]
parent_sha = self.get_master_sha()
else:
gh_user, collection_id, parent_sha, author = first_arg, sec_arg, third_arg, fourth_arg
if commit_msg is None:
commit_msg = "Delete Collection '%s' via OpenTree API" % collection_id
return self._remove_document(gh_user, collection_id, parent_sha, author, commit_msg)
|
[
"def",
"remove_collection",
"(",
"self",
",",
"first_arg",
",",
"sec_arg",
",",
"third_arg",
",",
"fourth_arg",
"=",
"None",
",",
"commit_msg",
"=",
"None",
")",
":",
"if",
"fourth_arg",
"is",
"None",
":",
"collection_id",
",",
"branch_name",
",",
"author",
"=",
"first_arg",
",",
"sec_arg",
",",
"third_arg",
"gh_user",
"=",
"branch_name",
".",
"split",
"(",
"'_collection_'",
")",
"[",
"0",
"]",
"parent_sha",
"=",
"self",
".",
"get_master_sha",
"(",
")",
"else",
":",
"gh_user",
",",
"collection_id",
",",
"parent_sha",
",",
"author",
"=",
"first_arg",
",",
"sec_arg",
",",
"third_arg",
",",
"fourth_arg",
"if",
"commit_msg",
"is",
"None",
":",
"commit_msg",
"=",
"\"Delete Collection '%s' via OpenTree API\"",
"%",
"collection_id",
"return",
"self",
".",
"_remove_document",
"(",
"gh_user",
",",
"collection_id",
",",
"parent_sha",
",",
"author",
",",
"commit_msg",
")"
] | 53.875 | 22.1875 |
def _get_reference_index(self, case):
""" Returns the index of the reference bus.
"""
refs = [bus._i for bus in case.connected_buses if bus.type == REFERENCE]
if len(refs) == 1:
return refs [0]
else:
logger.error("Single swing bus required for DCPF.")
return -1
|
[
"def",
"_get_reference_index",
"(",
"self",
",",
"case",
")",
":",
"refs",
"=",
"[",
"bus",
".",
"_i",
"for",
"bus",
"in",
"case",
".",
"connected_buses",
"if",
"bus",
".",
"type",
"==",
"REFERENCE",
"]",
"if",
"len",
"(",
"refs",
")",
"==",
"1",
":",
"return",
"refs",
"[",
"0",
"]",
"else",
":",
"logger",
".",
"error",
"(",
"\"Single swing bus required for DCPF.\"",
")",
"return",
"-",
"1"
] | 36.555556 | 15.444444 |
def validate_config(self, project, config, actor=None):
"""
```
if config['foo'] and not config['bar']:
raise PluginError('You cannot configure foo with bar')
return config
```
"""
client = JiraClient(config['instance_url'], config['username'], config['password'])
try:
client.get_projects_list()
except ApiError as e:
self.raise_error(e)
return config
|
[
"def",
"validate_config",
"(",
"self",
",",
"project",
",",
"config",
",",
"actor",
"=",
"None",
")",
":",
"client",
"=",
"JiraClient",
"(",
"config",
"[",
"'instance_url'",
"]",
",",
"config",
"[",
"'username'",
"]",
",",
"config",
"[",
"'password'",
"]",
")",
"try",
":",
"client",
".",
"get_projects_list",
"(",
")",
"except",
"ApiError",
"as",
"e",
":",
"self",
".",
"raise_error",
"(",
"e",
")",
"return",
"config"
] | 30.333333 | 19 |
def host_exists(host=None, hostid=None, name=None, node=None, nodeids=None, **kwargs):
'''
Checks if at least one host that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param host: technical name of the host
:param hostids: Hosts (hostids) to delete.
:param name: visible name of the host
:param node: name of the node the hosts must belong to (zabbix API < 2.4)
:param nodeids: IDs of the node the hosts must belong to (zabbix API < 2.4)
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)
:param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)
:return: IDs of the deleted hosts, False on failure.
CLI Example:
.. code-block:: bash
salt '*' zabbix.host_exists 'Zabbix server'
'''
conn_args = _login(**kwargs)
zabbix_version = apiinfo_version(**kwargs)
ret = {}
try:
if conn_args:
# hostgroup.exists deprecated
if _LooseVersion(zabbix_version) > _LooseVersion("2.5"):
if not host:
host = None
if not name:
name = None
if not hostid:
hostid = None
ret = host_get(host, name, hostid, **kwargs)
return bool(ret)
# zabbix 2.4 nad earlier
else:
method = 'host.exists'
params = {}
if hostid:
params['hostid'] = hostid
if host:
params['host'] = host
if name:
params['name'] = name
# deprecated in 2.4
if _LooseVersion(zabbix_version) < _LooseVersion("2.4"):
if node:
params['node'] = node
if nodeids:
params['nodeids'] = nodeids
if not hostid and not host and not name and not node and not nodeids:
return {'result': False, 'comment': 'Please submit hostid, host, name, node or nodeids parameter to'
'check if at least one host that matches the given filter '
'criteria exists.'}
ret = _query(method, params, conn_args['url'], conn_args['auth'])
return ret['result']
else:
raise KeyError
except KeyError:
return ret
|
[
"def",
"host_exists",
"(",
"host",
"=",
"None",
",",
"hostid",
"=",
"None",
",",
"name",
"=",
"None",
",",
"node",
"=",
"None",
",",
"nodeids",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"conn_args",
"=",
"_login",
"(",
"*",
"*",
"kwargs",
")",
"zabbix_version",
"=",
"apiinfo_version",
"(",
"*",
"*",
"kwargs",
")",
"ret",
"=",
"{",
"}",
"try",
":",
"if",
"conn_args",
":",
"# hostgroup.exists deprecated",
"if",
"_LooseVersion",
"(",
"zabbix_version",
")",
">",
"_LooseVersion",
"(",
"\"2.5\"",
")",
":",
"if",
"not",
"host",
":",
"host",
"=",
"None",
"if",
"not",
"name",
":",
"name",
"=",
"None",
"if",
"not",
"hostid",
":",
"hostid",
"=",
"None",
"ret",
"=",
"host_get",
"(",
"host",
",",
"name",
",",
"hostid",
",",
"*",
"*",
"kwargs",
")",
"return",
"bool",
"(",
"ret",
")",
"# zabbix 2.4 nad earlier",
"else",
":",
"method",
"=",
"'host.exists'",
"params",
"=",
"{",
"}",
"if",
"hostid",
":",
"params",
"[",
"'hostid'",
"]",
"=",
"hostid",
"if",
"host",
":",
"params",
"[",
"'host'",
"]",
"=",
"host",
"if",
"name",
":",
"params",
"[",
"'name'",
"]",
"=",
"name",
"# deprecated in 2.4",
"if",
"_LooseVersion",
"(",
"zabbix_version",
")",
"<",
"_LooseVersion",
"(",
"\"2.4\"",
")",
":",
"if",
"node",
":",
"params",
"[",
"'node'",
"]",
"=",
"node",
"if",
"nodeids",
":",
"params",
"[",
"'nodeids'",
"]",
"=",
"nodeids",
"if",
"not",
"hostid",
"and",
"not",
"host",
"and",
"not",
"name",
"and",
"not",
"node",
"and",
"not",
"nodeids",
":",
"return",
"{",
"'result'",
":",
"False",
",",
"'comment'",
":",
"'Please submit hostid, host, name, node or nodeids parameter to'",
"'check if at least one host that matches the given filter '",
"'criteria exists.'",
"}",
"ret",
"=",
"_query",
"(",
"method",
",",
"params",
",",
"conn_args",
"[",
"'url'",
"]",
",",
"conn_args",
"[",
"'auth'",
"]",
")",
"return",
"ret",
"[",
"'result'",
"]",
"else",
":",
"raise",
"KeyError",
"except",
"KeyError",
":",
"return",
"ret"
] | 42.238095 | 23.539683 |
def connection(self):
"""return authenticated connection"""
c = pymongo.MongoClient(
self.hostname, fsync=True,
socketTimeoutMS=self.socket_timeout, **self.kwargs)
connected(c)
if not self.is_mongos and self.login and not self.restart_required:
db = c[self.auth_source]
if self.x509_extra_user:
auth_dict = {
'name': DEFAULT_SUBJECT, 'mechanism': 'MONGODB-X509'}
else:
auth_dict = {'name': self.login, 'password': self.password}
try:
db.authenticate(**auth_dict)
except:
logger.exception("Could not authenticate to %s with %r"
% (self.hostname, auth_dict))
raise
return c
|
[
"def",
"connection",
"(",
"self",
")",
":",
"c",
"=",
"pymongo",
".",
"MongoClient",
"(",
"self",
".",
"hostname",
",",
"fsync",
"=",
"True",
",",
"socketTimeoutMS",
"=",
"self",
".",
"socket_timeout",
",",
"*",
"*",
"self",
".",
"kwargs",
")",
"connected",
"(",
"c",
")",
"if",
"not",
"self",
".",
"is_mongos",
"and",
"self",
".",
"login",
"and",
"not",
"self",
".",
"restart_required",
":",
"db",
"=",
"c",
"[",
"self",
".",
"auth_source",
"]",
"if",
"self",
".",
"x509_extra_user",
":",
"auth_dict",
"=",
"{",
"'name'",
":",
"DEFAULT_SUBJECT",
",",
"'mechanism'",
":",
"'MONGODB-X509'",
"}",
"else",
":",
"auth_dict",
"=",
"{",
"'name'",
":",
"self",
".",
"login",
",",
"'password'",
":",
"self",
".",
"password",
"}",
"try",
":",
"db",
".",
"authenticate",
"(",
"*",
"*",
"auth_dict",
")",
"except",
":",
"logger",
".",
"exception",
"(",
"\"Could not authenticate to %s with %r\"",
"%",
"(",
"self",
".",
"hostname",
",",
"auth_dict",
")",
")",
"raise",
"return",
"c"
] | 40.45 | 18.1 |
def start_check (aggregate, out):
"""Start checking in background and write encoded output to out."""
# check in background
t = threading.Thread(target=director.check_urls, args=(aggregate,))
t.start()
# time to wait for new data
sleep_seconds = 2
# current running time
run_seconds = 0
while not aggregate.is_finished():
yield out.get_data()
time.sleep(sleep_seconds)
run_seconds += sleep_seconds
if run_seconds > MAX_REQUEST_SECONDS:
director.abort(aggregate)
break
yield out.get_data()
|
[
"def",
"start_check",
"(",
"aggregate",
",",
"out",
")",
":",
"# check in background",
"t",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"director",
".",
"check_urls",
",",
"args",
"=",
"(",
"aggregate",
",",
")",
")",
"t",
".",
"start",
"(",
")",
"# time to wait for new data",
"sleep_seconds",
"=",
"2",
"# current running time",
"run_seconds",
"=",
"0",
"while",
"not",
"aggregate",
".",
"is_finished",
"(",
")",
":",
"yield",
"out",
".",
"get_data",
"(",
")",
"time",
".",
"sleep",
"(",
"sleep_seconds",
")",
"run_seconds",
"+=",
"sleep_seconds",
"if",
"run_seconds",
">",
"MAX_REQUEST_SECONDS",
":",
"director",
".",
"abort",
"(",
"aggregate",
")",
"break",
"yield",
"out",
".",
"get_data",
"(",
")"
] | 33.411765 | 12.647059 |
def parse_yaml(self, y):
'''Parse a YAML specification of a service port connector into this
object.
'''
self.connector_id = y['connectorId']
self.name = y['name']
if 'transMethod' in y:
self.trans_method = y['transMethod']
else:
self.trans_method = ''
if RTS_EXT_NS_YAML + 'comment' in y:
self.comment = y[RTS_EXT_NS_YAML + 'comment']
else:
self.comment = ''
if RTS_EXT_NS_YAML + 'visible' in y:
visible = y[RTS_EXT_NS_YAML + 'visible']
if visible == True or visible == 'true' or visible == '1':
self.visible = True
else:
self.visible = False
if 'sourceServicePort' not in y:
raise InvalidServicePortConnectorNodeError
self.source_service_port = \
TargetPort().parse_yaml(y['sourceServicePort'])
if 'targetServicePort' not in y:
raise InvalidServicePortConnectorNodeError
self.target_service_port = \
TargetPort().parse_yaml(y['targetServicePort'])
if RTS_EXT_NS_YAML + 'properties' in y:
for p in y[RTS_EXT_NS_YAML + 'properties']:
if 'value' in p:
value = p['value']
else:
value = None
self._properties[p['name']] = value
return self
|
[
"def",
"parse_yaml",
"(",
"self",
",",
"y",
")",
":",
"self",
".",
"connector_id",
"=",
"y",
"[",
"'connectorId'",
"]",
"self",
".",
"name",
"=",
"y",
"[",
"'name'",
"]",
"if",
"'transMethod'",
"in",
"y",
":",
"self",
".",
"trans_method",
"=",
"y",
"[",
"'transMethod'",
"]",
"else",
":",
"self",
".",
"trans_method",
"=",
"''",
"if",
"RTS_EXT_NS_YAML",
"+",
"'comment'",
"in",
"y",
":",
"self",
".",
"comment",
"=",
"y",
"[",
"RTS_EXT_NS_YAML",
"+",
"'comment'",
"]",
"else",
":",
"self",
".",
"comment",
"=",
"''",
"if",
"RTS_EXT_NS_YAML",
"+",
"'visible'",
"in",
"y",
":",
"visible",
"=",
"y",
"[",
"RTS_EXT_NS_YAML",
"+",
"'visible'",
"]",
"if",
"visible",
"==",
"True",
"or",
"visible",
"==",
"'true'",
"or",
"visible",
"==",
"'1'",
":",
"self",
".",
"visible",
"=",
"True",
"else",
":",
"self",
".",
"visible",
"=",
"False",
"if",
"'sourceServicePort'",
"not",
"in",
"y",
":",
"raise",
"InvalidServicePortConnectorNodeError",
"self",
".",
"source_service_port",
"=",
"TargetPort",
"(",
")",
".",
"parse_yaml",
"(",
"y",
"[",
"'sourceServicePort'",
"]",
")",
"if",
"'targetServicePort'",
"not",
"in",
"y",
":",
"raise",
"InvalidServicePortConnectorNodeError",
"self",
".",
"target_service_port",
"=",
"TargetPort",
"(",
")",
".",
"parse_yaml",
"(",
"y",
"[",
"'targetServicePort'",
"]",
")",
"if",
"RTS_EXT_NS_YAML",
"+",
"'properties'",
"in",
"y",
":",
"for",
"p",
"in",
"y",
"[",
"RTS_EXT_NS_YAML",
"+",
"'properties'",
"]",
":",
"if",
"'value'",
"in",
"p",
":",
"value",
"=",
"p",
"[",
"'value'",
"]",
"else",
":",
"value",
"=",
"None",
"self",
".",
"_properties",
"[",
"p",
"[",
"'name'",
"]",
"]",
"=",
"value",
"return",
"self"
] | 37.864865 | 14.081081 |
def _all_keys(self):
"""Return a list of all encoded key names."""
file_keys = [self._filename_to_key(fn) for fn in self._all_filenames()]
if self._sync:
return set(file_keys)
else:
return set(file_keys + list(self._buffer))
|
[
"def",
"_all_keys",
"(",
"self",
")",
":",
"file_keys",
"=",
"[",
"self",
".",
"_filename_to_key",
"(",
"fn",
")",
"for",
"fn",
"in",
"self",
".",
"_all_filenames",
"(",
")",
"]",
"if",
"self",
".",
"_sync",
":",
"return",
"set",
"(",
"file_keys",
")",
"else",
":",
"return",
"set",
"(",
"file_keys",
"+",
"list",
"(",
"self",
".",
"_buffer",
")",
")"
] | 39.142857 | 17.857143 |
def setMinGap(self, typeID, minGap):
"""setMinGap(string, double) -> None
Sets the offset (gap to front vehicle if halting) of vehicles of this type.
"""
self._connection._sendDoubleCmd(
tc.CMD_SET_VEHICLETYPE_VARIABLE, tc.VAR_MINGAP, typeID, minGap)
|
[
"def",
"setMinGap",
"(",
"self",
",",
"typeID",
",",
"minGap",
")",
":",
"self",
".",
"_connection",
".",
"_sendDoubleCmd",
"(",
"tc",
".",
"CMD_SET_VEHICLETYPE_VARIABLE",
",",
"tc",
".",
"VAR_MINGAP",
",",
"typeID",
",",
"minGap",
")"
] | 41.285714 | 17.428571 |
def addInput(self, key):
"""Add key to input : key, value or map
"""
if key not in self.inputs:
self.inputs.append(key)
root = self.etree
t_inputs = root.find('inputs')
if not t_inputs :
t_inputs = ctree.SubElement(root, 'inputs')
t_inputs.append(key.etree)
return True
|
[
"def",
"addInput",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"not",
"in",
"self",
".",
"inputs",
":",
"self",
".",
"inputs",
".",
"append",
"(",
"key",
")",
"root",
"=",
"self",
".",
"etree",
"t_inputs",
"=",
"root",
".",
"find",
"(",
"'inputs'",
")",
"if",
"not",
"t_inputs",
":",
"t_inputs",
"=",
"ctree",
".",
"SubElement",
"(",
"root",
",",
"'inputs'",
")",
"t_inputs",
".",
"append",
"(",
"key",
".",
"etree",
")",
"return",
"True"
] | 23.133333 | 17.4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.