nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
chromiumembedded/cef
|
80caf947f3fe2210e5344713c5281d8af9bdc295
|
tools/yapf/yapf/yapflib/unwrapped_line.py
|
python
|
IsSurroundedByBrackets
|
(tok)
|
return None
|
Return True if the token is surrounded by brackets.
|
Return True if the token is surrounded by brackets.
|
[
"Return",
"True",
"if",
"the",
"token",
"is",
"surrounded",
"by",
"brackets",
"."
] |
def IsSurroundedByBrackets(tok):
"""Return True if the token is surrounded by brackets."""
paren_count = 0
brace_count = 0
sq_bracket_count = 0
previous_token = tok.previous_token
while previous_token:
if previous_token.value == ')':
paren_count -= 1
elif previous_token.value == '}':
brace_count -= 1
elif previous_token.value == ']':
sq_bracket_count -= 1
if previous_token.value == '(':
if paren_count == 0:
return previous_token
paren_count += 1
elif previous_token.value == '{':
if brace_count == 0:
return previous_token
brace_count += 1
elif previous_token.value == '[':
if sq_bracket_count == 0:
return previous_token
sq_bracket_count += 1
previous_token = previous_token.previous_token
return None
|
[
"def",
"IsSurroundedByBrackets",
"(",
"tok",
")",
":",
"paren_count",
"=",
"0",
"brace_count",
"=",
"0",
"sq_bracket_count",
"=",
"0",
"previous_token",
"=",
"tok",
".",
"previous_token",
"while",
"previous_token",
":",
"if",
"previous_token",
".",
"value",
"==",
"')'",
":",
"paren_count",
"-=",
"1",
"elif",
"previous_token",
".",
"value",
"==",
"'}'",
":",
"brace_count",
"-=",
"1",
"elif",
"previous_token",
".",
"value",
"==",
"']'",
":",
"sq_bracket_count",
"-=",
"1",
"if",
"previous_token",
".",
"value",
"==",
"'('",
":",
"if",
"paren_count",
"==",
"0",
":",
"return",
"previous_token",
"paren_count",
"+=",
"1",
"elif",
"previous_token",
".",
"value",
"==",
"'{'",
":",
"if",
"brace_count",
"==",
"0",
":",
"return",
"previous_token",
"brace_count",
"+=",
"1",
"elif",
"previous_token",
".",
"value",
"==",
"'['",
":",
"if",
"sq_bracket_count",
"==",
"0",
":",
"return",
"previous_token",
"sq_bracket_count",
"+=",
"1",
"previous_token",
"=",
"previous_token",
".",
"previous_token",
"return",
"None"
] |
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/yapf/yapf/yapflib/unwrapped_line.py#L387-L415
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py
|
python
|
TFAsymmetryFittingModel._convert_global_ties_for_tf_asymmetry_mode
|
(self, global_ties: str)
|
return [self._convert_global_tie_for_tf_asymmetry_mode(global_tie) for global_tie in global_ties]
|
Converts the global ties to the equivalent global ties in the TF Asymmetry function.
|
Converts the global ties to the equivalent global ties in the TF Asymmetry function.
|
[
"Converts",
"the",
"global",
"ties",
"to",
"the",
"equivalent",
"global",
"ties",
"in",
"the",
"TF",
"Asymmetry",
"function",
"."
] |
def _convert_global_ties_for_tf_asymmetry_mode(self, global_ties: str) -> str:
"""Converts the global ties to the equivalent global ties in the TF Asymmetry function."""
return [self._convert_global_tie_for_tf_asymmetry_mode(global_tie) for global_tie in global_ties]
|
[
"def",
"_convert_global_ties_for_tf_asymmetry_mode",
"(",
"self",
",",
"global_ties",
":",
"str",
")",
"->",
"str",
":",
"return",
"[",
"self",
".",
"_convert_global_tie_for_tf_asymmetry_mode",
"(",
"global_tie",
")",
"for",
"global_tie",
"in",
"global_ties",
"]"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py#L628-L630
|
|
lmb-freiburg/ogn
|
974f72ef4bf840d6f6693d22d1843a79223e77ce
|
tools/extra/parse_log.py
|
python
|
parse_log
|
(path_to_log)
|
return train_dict_list, test_dict_list
|
Parse log file
Returns (train_dict_list, test_dict_list)
train_dict_list and test_dict_list are lists of dicts that define the table
rows
|
Parse log file
Returns (train_dict_list, test_dict_list)
|
[
"Parse",
"log",
"file",
"Returns",
"(",
"train_dict_list",
"test_dict_list",
")"
] |
def parse_log(path_to_log):
"""Parse log file
Returns (train_dict_list, test_dict_list)
train_dict_list and test_dict_list are lists of dicts that define the table
rows
"""
regex_iteration = re.compile('Iteration (\d+)')
regex_train_output = re.compile('Train net output #(\d+): (\S+) = ([\.\deE+-]+)')
regex_test_output = re.compile('Test net output #(\d+): (\S+) = ([\.\deE+-]+)')
regex_learning_rate = re.compile('lr = ([-+]?[0-9]*\.?[0-9]+([eE]?[-+]?[0-9]+)?)')
# Pick out lines of interest
iteration = -1
learning_rate = float('NaN')
train_dict_list = []
test_dict_list = []
train_row = None
test_row = None
logfile_year = extract_seconds.get_log_created_year(path_to_log)
with open(path_to_log) as f:
start_time = extract_seconds.get_start_time(f, logfile_year)
last_time = start_time
for line in f:
iteration_match = regex_iteration.search(line)
if iteration_match:
iteration = float(iteration_match.group(1))
if iteration == -1:
# Only start parsing for other stuff if we've found the first
# iteration
continue
try:
time = extract_seconds.extract_datetime_from_line(line,
logfile_year)
except ValueError:
# Skip lines with bad formatting, for example when resuming solver
continue
# if it's another year
if time.month < last_time.month:
logfile_year += 1
time = extract_seconds.extract_datetime_from_line(line, logfile_year)
last_time = time
seconds = (time - start_time).total_seconds()
learning_rate_match = regex_learning_rate.search(line)
if learning_rate_match:
learning_rate = float(learning_rate_match.group(1))
train_dict_list, train_row = parse_line_for_net_output(
regex_train_output, train_row, train_dict_list,
line, iteration, seconds, learning_rate
)
test_dict_list, test_row = parse_line_for_net_output(
regex_test_output, test_row, test_dict_list,
line, iteration, seconds, learning_rate
)
fix_initial_nan_learning_rate(train_dict_list)
fix_initial_nan_learning_rate(test_dict_list)
return train_dict_list, test_dict_list
|
[
"def",
"parse_log",
"(",
"path_to_log",
")",
":",
"regex_iteration",
"=",
"re",
".",
"compile",
"(",
"'Iteration (\\d+)'",
")",
"regex_train_output",
"=",
"re",
".",
"compile",
"(",
"'Train net output #(\\d+): (\\S+) = ([\\.\\deE+-]+)'",
")",
"regex_test_output",
"=",
"re",
".",
"compile",
"(",
"'Test net output #(\\d+): (\\S+) = ([\\.\\deE+-]+)'",
")",
"regex_learning_rate",
"=",
"re",
".",
"compile",
"(",
"'lr = ([-+]?[0-9]*\\.?[0-9]+([eE]?[-+]?[0-9]+)?)'",
")",
"# Pick out lines of interest",
"iteration",
"=",
"-",
"1",
"learning_rate",
"=",
"float",
"(",
"'NaN'",
")",
"train_dict_list",
"=",
"[",
"]",
"test_dict_list",
"=",
"[",
"]",
"train_row",
"=",
"None",
"test_row",
"=",
"None",
"logfile_year",
"=",
"extract_seconds",
".",
"get_log_created_year",
"(",
"path_to_log",
")",
"with",
"open",
"(",
"path_to_log",
")",
"as",
"f",
":",
"start_time",
"=",
"extract_seconds",
".",
"get_start_time",
"(",
"f",
",",
"logfile_year",
")",
"last_time",
"=",
"start_time",
"for",
"line",
"in",
"f",
":",
"iteration_match",
"=",
"regex_iteration",
".",
"search",
"(",
"line",
")",
"if",
"iteration_match",
":",
"iteration",
"=",
"float",
"(",
"iteration_match",
".",
"group",
"(",
"1",
")",
")",
"if",
"iteration",
"==",
"-",
"1",
":",
"# Only start parsing for other stuff if we've found the first",
"# iteration",
"continue",
"try",
":",
"time",
"=",
"extract_seconds",
".",
"extract_datetime_from_line",
"(",
"line",
",",
"logfile_year",
")",
"except",
"ValueError",
":",
"# Skip lines with bad formatting, for example when resuming solver",
"continue",
"# if it's another year",
"if",
"time",
".",
"month",
"<",
"last_time",
".",
"month",
":",
"logfile_year",
"+=",
"1",
"time",
"=",
"extract_seconds",
".",
"extract_datetime_from_line",
"(",
"line",
",",
"logfile_year",
")",
"last_time",
"=",
"time",
"seconds",
"=",
"(",
"time",
"-",
"start_time",
")",
".",
"total_seconds",
"(",
")",
"learning_rate_match",
"=",
"regex_learning_rate",
".",
"search",
"(",
"line",
")",
"if",
"learning_rate_match",
":",
"learning_rate",
"=",
"float",
"(",
"learning_rate_match",
".",
"group",
"(",
"1",
")",
")",
"train_dict_list",
",",
"train_row",
"=",
"parse_line_for_net_output",
"(",
"regex_train_output",
",",
"train_row",
",",
"train_dict_list",
",",
"line",
",",
"iteration",
",",
"seconds",
",",
"learning_rate",
")",
"test_dict_list",
",",
"test_row",
"=",
"parse_line_for_net_output",
"(",
"regex_test_output",
",",
"test_row",
",",
"test_dict_list",
",",
"line",
",",
"iteration",
",",
"seconds",
",",
"learning_rate",
")",
"fix_initial_nan_learning_rate",
"(",
"train_dict_list",
")",
"fix_initial_nan_learning_rate",
"(",
"test_dict_list",
")",
"return",
"train_dict_list",
",",
"test_dict_list"
] |
https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/tools/extra/parse_log.py#L17-L83
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_windows.py
|
python
|
VListBox._setCallbackInfo
|
(*args, **kwargs)
|
return _windows_.VListBox__setCallbackInfo(*args, **kwargs)
|
_setCallbackInfo(self, PyObject self, PyObject _class)
|
_setCallbackInfo(self, PyObject self, PyObject _class)
|
[
"_setCallbackInfo",
"(",
"self",
"PyObject",
"self",
"PyObject",
"_class",
")"
] |
def _setCallbackInfo(*args, **kwargs):
"""_setCallbackInfo(self, PyObject self, PyObject _class)"""
return _windows_.VListBox__setCallbackInfo(*args, **kwargs)
|
[
"def",
"_setCallbackInfo",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"VListBox__setCallbackInfo",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L2593-L2595
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/xrc.py
|
python
|
XmlResource.GetDomain
|
(*args, **kwargs)
|
return _xrc.XmlResource_GetDomain(*args, **kwargs)
|
GetDomain(self) -> String
|
GetDomain(self) -> String
|
[
"GetDomain",
"(",
"self",
")",
"-",
">",
"String"
] |
def GetDomain(*args, **kwargs):
"""GetDomain(self) -> String"""
return _xrc.XmlResource_GetDomain(*args, **kwargs)
|
[
"def",
"GetDomain",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_xrc",
".",
"XmlResource_GetDomain",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/xrc.py#L218-L220
|
|
snap-stanford/snap-python
|
d53c51b0a26aa7e3e7400b014cdf728948fde80a
|
setup/snap.py
|
python
|
TStrV.Ins
|
(self, *args)
|
return _snap.TStrV_Ins(self, *args)
|
Ins(TStrV self, int const & ValN, TStr Val)
Parameters:
ValN: int const &
Val: TStr const &
|
Ins(TStrV self, int const & ValN, TStr Val)
|
[
"Ins",
"(",
"TStrV",
"self",
"int",
"const",
"&",
"ValN",
"TStr",
"Val",
")"
] |
def Ins(self, *args):
"""
Ins(TStrV self, int const & ValN, TStr Val)
Parameters:
ValN: int const &
Val: TStr const &
"""
return _snap.TStrV_Ins(self, *args)
|
[
"def",
"Ins",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TStrV_Ins",
"(",
"self",
",",
"*",
"args",
")"
] |
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L19538-L19547
|
|
psi4/psi4
|
be533f7f426b6ccc263904e55122899b16663395
|
psi4/driver/qcdb/dbwrap.py
|
python
|
DB4.make_dhdft_Figure_1
|
(self)
|
Plot all the graphics needed for the grey bars plot
in Fig. 1 of DHDFT.
|
Plot all the graphics needed for the grey bars plot
in Fig. 1 of DHDFT.
|
[
"Plot",
"all",
"the",
"graphics",
"needed",
"for",
"the",
"grey",
"bars",
"plot",
"in",
"Fig",
".",
"1",
"of",
"DHDFT",
"."
] |
def make_dhdft_Figure_1(self):
"""Plot all the graphics needed for the grey bars plot
in Fig. 1 of DHDFT.
"""
# Fig. bars (a)
self.plot_bars([
'M052X-unCP-adz', 'M052X-CP-adz', 'M052X-unCP-atz', 'M052X-CP-atz', None,
'M062X-unCP-adz', 'M062X-CP-adz', 'M062X-unCP-atz', 'M062X-CP-atz', None,
'M08SO-unCP-adz', 'M08SO-CP-adz', 'M08SO-unCP-atz', 'M08SO-CP-atz', None,
'M08HX-unCP-adz', 'M08HX-CP-adz', 'M08HX-unCP-atz', 'M08HX-CP-atz', None,
'M11-unCP-adz', 'M11-CP-adz', 'M11-unCP-atz', 'M11-CP-atz', None,
'M11L-unCP-adz', 'M11L-CP-adz', 'M11L-unCP-atz', 'M11L-CP-atz'],
sset=['tt-5min', 'hb-5min', 'mx-5min', 'dd-5min'])
# Fig. bars (b)
self.plot_bars([
'PBED3-unCP-adz', 'PBED3-CP-adz', 'PBED3-unCP-atz', 'PBED3-CP-atz', None,
'B97D3-unCP-adz', 'B97D3-CP-adz', 'B97D3-unCP-atz', 'B97D3-CP-atz', None,
'PBE0D3-unCP-adz', 'PBE0D3-CP-adz', 'PBE0D3-unCP-atz', 'PBE0D3-CP-atz', None,
'B3LYPD3-unCP-adz', 'B3LYPD3-CP-adz', 'B3LYPD3-unCP-atz', 'B3LYPD3-CP-atz', None,
'DLDFD-unCP-adz', 'DLDFD-CP-adz', 'DLDFD-unCP-atz', 'DLDFD-CP-atz', None,
'WB97XD-unCP-adz', 'WB97XD-CP-adz', 'WB97XD-unCP-atz', 'WB97XD-CP-atz'],
sset=['tt-5min', 'hb-5min', 'mx-5min', 'dd-5min'])
# Fig. bars (c)
self.plot_bars([
'VV10-unCP-adz', 'VV10-CP-adz', 'VV10-unCP-atz', 'VV10-CP-atz', None, None,
'LCVV10-unCP-adz', 'LCVV10-CP-adz', 'LCVV10-unCP-atz', 'LCVV10-CP-atz', None, None,
'WB97XV-unCP-adz', 'WB97XV-CP-adz', 'WB97XV-unCP-atz', 'WB97XV-CP-atz'],
sset=['tt-5min', 'hb-5min', 'mx-5min', 'dd-5min'])
# Fig. bars (d)
self.plot_bars([
'PBE02-unCP-adz', 'PBE02-CP-adz', 'PBE02-unCP-atz', 'PBE02-CP-atz', None,
'WB97X2-unCP-adz', 'WB97X2-CP-adz', 'WB97X2-unCP-atz', 'WB97X2-CP-atz', None,
'B2PLYPD3-unCP-adz', 'B2PLYPD3-CP-adz', 'B2PLYPD3-unCP-atz', 'B2PLYPD3-CP-atz', None,
'DSDPBEP86D2OPT-unCP-adz', 'DSDPBEP86D2OPT-CP-adz', 'DSDPBEP86D2OPT-unCP-atz', 'DSDPBEP86D2OPT-CP-atz'],
sset=['tt-5min', 'hb-5min', 'mx-5min', 'dd-5min'])
# Fig. bars (e)
self.plot_bars([
'MP2-unCP-adz', 'MP2-CP-adz', 'MP2-unCP-atz', 'MP2-CP-atz'],
sset=['tt-5min', 'hb-5min', 'mx-5min', 'dd-5min'])
|
[
"def",
"make_dhdft_Figure_1",
"(",
"self",
")",
":",
"# Fig. bars (a)",
"self",
".",
"plot_bars",
"(",
"[",
"'M052X-unCP-adz'",
",",
"'M052X-CP-adz'",
",",
"'M052X-unCP-atz'",
",",
"'M052X-CP-atz'",
",",
"None",
",",
"'M062X-unCP-adz'",
",",
"'M062X-CP-adz'",
",",
"'M062X-unCP-atz'",
",",
"'M062X-CP-atz'",
",",
"None",
",",
"'M08SO-unCP-adz'",
",",
"'M08SO-CP-adz'",
",",
"'M08SO-unCP-atz'",
",",
"'M08SO-CP-atz'",
",",
"None",
",",
"'M08HX-unCP-adz'",
",",
"'M08HX-CP-adz'",
",",
"'M08HX-unCP-atz'",
",",
"'M08HX-CP-atz'",
",",
"None",
",",
"'M11-unCP-adz'",
",",
"'M11-CP-adz'",
",",
"'M11-unCP-atz'",
",",
"'M11-CP-atz'",
",",
"None",
",",
"'M11L-unCP-adz'",
",",
"'M11L-CP-adz'",
",",
"'M11L-unCP-atz'",
",",
"'M11L-CP-atz'",
"]",
",",
"sset",
"=",
"[",
"'tt-5min'",
",",
"'hb-5min'",
",",
"'mx-5min'",
",",
"'dd-5min'",
"]",
")",
"# Fig. bars (b)",
"self",
".",
"plot_bars",
"(",
"[",
"'PBED3-unCP-adz'",
",",
"'PBED3-CP-adz'",
",",
"'PBED3-unCP-atz'",
",",
"'PBED3-CP-atz'",
",",
"None",
",",
"'B97D3-unCP-adz'",
",",
"'B97D3-CP-adz'",
",",
"'B97D3-unCP-atz'",
",",
"'B97D3-CP-atz'",
",",
"None",
",",
"'PBE0D3-unCP-adz'",
",",
"'PBE0D3-CP-adz'",
",",
"'PBE0D3-unCP-atz'",
",",
"'PBE0D3-CP-atz'",
",",
"None",
",",
"'B3LYPD3-unCP-adz'",
",",
"'B3LYPD3-CP-adz'",
",",
"'B3LYPD3-unCP-atz'",
",",
"'B3LYPD3-CP-atz'",
",",
"None",
",",
"'DLDFD-unCP-adz'",
",",
"'DLDFD-CP-adz'",
",",
"'DLDFD-unCP-atz'",
",",
"'DLDFD-CP-atz'",
",",
"None",
",",
"'WB97XD-unCP-adz'",
",",
"'WB97XD-CP-adz'",
",",
"'WB97XD-unCP-atz'",
",",
"'WB97XD-CP-atz'",
"]",
",",
"sset",
"=",
"[",
"'tt-5min'",
",",
"'hb-5min'",
",",
"'mx-5min'",
",",
"'dd-5min'",
"]",
")",
"# Fig. bars (c)",
"self",
".",
"plot_bars",
"(",
"[",
"'VV10-unCP-adz'",
",",
"'VV10-CP-adz'",
",",
"'VV10-unCP-atz'",
",",
"'VV10-CP-atz'",
",",
"None",
",",
"None",
",",
"'LCVV10-unCP-adz'",
",",
"'LCVV10-CP-adz'",
",",
"'LCVV10-unCP-atz'",
",",
"'LCVV10-CP-atz'",
",",
"None",
",",
"None",
",",
"'WB97XV-unCP-adz'",
",",
"'WB97XV-CP-adz'",
",",
"'WB97XV-unCP-atz'",
",",
"'WB97XV-CP-atz'",
"]",
",",
"sset",
"=",
"[",
"'tt-5min'",
",",
"'hb-5min'",
",",
"'mx-5min'",
",",
"'dd-5min'",
"]",
")",
"# Fig. bars (d)",
"self",
".",
"plot_bars",
"(",
"[",
"'PBE02-unCP-adz'",
",",
"'PBE02-CP-adz'",
",",
"'PBE02-unCP-atz'",
",",
"'PBE02-CP-atz'",
",",
"None",
",",
"'WB97X2-unCP-adz'",
",",
"'WB97X2-CP-adz'",
",",
"'WB97X2-unCP-atz'",
",",
"'WB97X2-CP-atz'",
",",
"None",
",",
"'B2PLYPD3-unCP-adz'",
",",
"'B2PLYPD3-CP-adz'",
",",
"'B2PLYPD3-unCP-atz'",
",",
"'B2PLYPD3-CP-atz'",
",",
"None",
",",
"'DSDPBEP86D2OPT-unCP-adz'",
",",
"'DSDPBEP86D2OPT-CP-adz'",
",",
"'DSDPBEP86D2OPT-unCP-atz'",
",",
"'DSDPBEP86D2OPT-CP-atz'",
"]",
",",
"sset",
"=",
"[",
"'tt-5min'",
",",
"'hb-5min'",
",",
"'mx-5min'",
",",
"'dd-5min'",
"]",
")",
"# Fig. bars (e)",
"self",
".",
"plot_bars",
"(",
"[",
"'MP2-unCP-adz'",
",",
"'MP2-CP-adz'",
",",
"'MP2-unCP-atz'",
",",
"'MP2-CP-atz'",
"]",
",",
"sset",
"=",
"[",
"'tt-5min'",
",",
"'hb-5min'",
",",
"'mx-5min'",
",",
"'dd-5min'",
"]",
")"
] |
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/qcdb/dbwrap.py#L3284-L3327
|
||
google/shaka-packager
|
e1b0c7c45431327fd3ce193514a5407d07b39b22
|
packager/third_party/protobuf/python/google/protobuf/internal/python_message.py
|
python
|
_ExtensionDict._FindExtensionByNumber
|
(self, number)
|
return self._extended_message._extensions_by_number.get(number, None)
|
Tries to find a known extension with the field number.
Args:
number: Extension field number.
Returns:
Extension field descriptor.
|
Tries to find a known extension with the field number.
|
[
"Tries",
"to",
"find",
"a",
"known",
"extension",
"with",
"the",
"field",
"number",
"."
] |
def _FindExtensionByNumber(self, number):
"""Tries to find a known extension with the field number.
Args:
number: Extension field number.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_number.get(number, None)
|
[
"def",
"_FindExtensionByNumber",
"(",
"self",
",",
"number",
")",
":",
"return",
"self",
".",
"_extended_message",
".",
"_extensions_by_number",
".",
"get",
"(",
"number",
",",
"None",
")"
] |
https://github.com/google/shaka-packager/blob/e1b0c7c45431327fd3ce193514a5407d07b39b22/packager/third_party/protobuf/python/google/protobuf/internal/python_message.py#L1529-L1538
|
|
krishauser/Klampt
|
972cc83ea5befac3f653c1ba20f80155768ad519
|
Python/python2_version/klampt/vis/ipython/widgets.py
|
python
|
EditPoint
|
(value=None,min=None,max=None,labels=None,
klampt_widget=None,point_name='edited_point',point_radius=DEFAULT_POINT_RADIUS,
callback=None)
|
return widgets.VBox(elems)
|
Creates a Jupyter widget for interactive editing of an xyz point
Args:
value (list of 3 floats, optional): the initial value of the point. If given, this must
be a list and will hold the edited values.
min/max (list of 3 floats, optional): the minimum and maximum of the point
labels (list of strs, optional): if given, the labels of each channel
klampt_widget (KlamptWidget, optional): the KlamptWidget visualization to update,
or None if you don't want to visualize the point.
point_name (str, optional): the name of the point in the visualization world to edit.
point_radius (float, optional): the radius of the visualized point.
callback (function ,optional): a function callback(xyz) called when a DOF's value has changed.
Returns:
VBox: a widget that can be displayed as you like
|
Creates a Jupyter widget for interactive editing of an xyz point
|
[
"Creates",
"a",
"Jupyter",
"widget",
"for",
"interactive",
"editing",
"of",
"an",
"xyz",
"point"
] |
def EditPoint(value=None,min=None,max=None,labels=None,
klampt_widget=None,point_name='edited_point',point_radius=DEFAULT_POINT_RADIUS,
callback=None):
"""Creates a Jupyter widget for interactive editing of an xyz point
Args:
value (list of 3 floats, optional): the initial value of the point. If given, this must
be a list and will hold the edited values.
min/max (list of 3 floats, optional): the minimum and maximum of the point
labels (list of strs, optional): if given, the labels of each channel
klampt_widget (KlamptWidget, optional): the KlamptWidget visualization to update,
or None if you don't want to visualize the point.
point_name (str, optional): the name of the point in the visualization world to edit.
point_radius (float, optional): the radius of the visualized point.
callback (function ,optional): a function callback(xyz) called when a DOF's value has changed.
Returns:
VBox: a widget that can be displayed as you like
"""
if value is None:
value = [0,0,0]
else:
if not isinstance(value,list):
raise ValueError("value must be a 3-element list")
if len(value) != 3:
raise ValueError("value must be a 3-element list")
if labels is None:
labels = 'xyz'
if min is None:
min = [-5,-5,-5]
elif isinstance(min,(int,float)):
min = [min,min,min]
if max is None:
max = [5,5,5]
elif isinstance(max,(int,float)):
max = [max,max,max]
if len(min) != 3:
raise ValueError("min must be a 3-element list")
if len(max) != 3:
raise ValueError("max must be a 3-element list")
if klampt_widget:
klampt_widget.addSphere(name=point_name,x=value[0],y=value[1],z=value[2],r=point_radius)
def _dochange(index,element):
value[index] = element
if klampt_widget:
klampt_widget.addSphere(name=point_name,x=value[0],y=value[1],z=value[2],r=point_radius)
if callback:
callback(value)
elems = []
for i in range(3):
elems.append(widgets.FloatSlider(description=labels[i],value=value[i],min=min[i],max=max[i],step=0.001))
elems[-1].observe(lambda v,i=i:_dochange(i,v['new']),'value')
return widgets.VBox(elems)
|
[
"def",
"EditPoint",
"(",
"value",
"=",
"None",
",",
"min",
"=",
"None",
",",
"max",
"=",
"None",
",",
"labels",
"=",
"None",
",",
"klampt_widget",
"=",
"None",
",",
"point_name",
"=",
"'edited_point'",
",",
"point_radius",
"=",
"DEFAULT_POINT_RADIUS",
",",
"callback",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
":",
"value",
"=",
"[",
"0",
",",
"0",
",",
"0",
"]",
"else",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"raise",
"ValueError",
"(",
"\"value must be a 3-element list\"",
")",
"if",
"len",
"(",
"value",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"\"value must be a 3-element list\"",
")",
"if",
"labels",
"is",
"None",
":",
"labels",
"=",
"'xyz'",
"if",
"min",
"is",
"None",
":",
"min",
"=",
"[",
"-",
"5",
",",
"-",
"5",
",",
"-",
"5",
"]",
"elif",
"isinstance",
"(",
"min",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"min",
"=",
"[",
"min",
",",
"min",
",",
"min",
"]",
"if",
"max",
"is",
"None",
":",
"max",
"=",
"[",
"5",
",",
"5",
",",
"5",
"]",
"elif",
"isinstance",
"(",
"max",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"max",
"=",
"[",
"max",
",",
"max",
",",
"max",
"]",
"if",
"len",
"(",
"min",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"\"min must be a 3-element list\"",
")",
"if",
"len",
"(",
"max",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"\"max must be a 3-element list\"",
")",
"if",
"klampt_widget",
":",
"klampt_widget",
".",
"addSphere",
"(",
"name",
"=",
"point_name",
",",
"x",
"=",
"value",
"[",
"0",
"]",
",",
"y",
"=",
"value",
"[",
"1",
"]",
",",
"z",
"=",
"value",
"[",
"2",
"]",
",",
"r",
"=",
"point_radius",
")",
"def",
"_dochange",
"(",
"index",
",",
"element",
")",
":",
"value",
"[",
"index",
"]",
"=",
"element",
"if",
"klampt_widget",
":",
"klampt_widget",
".",
"addSphere",
"(",
"name",
"=",
"point_name",
",",
"x",
"=",
"value",
"[",
"0",
"]",
",",
"y",
"=",
"value",
"[",
"1",
"]",
",",
"z",
"=",
"value",
"[",
"2",
"]",
",",
"r",
"=",
"point_radius",
")",
"if",
"callback",
":",
"callback",
"(",
"value",
")",
"elems",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"elems",
".",
"append",
"(",
"widgets",
".",
"FloatSlider",
"(",
"description",
"=",
"labels",
"[",
"i",
"]",
",",
"value",
"=",
"value",
"[",
"i",
"]",
",",
"min",
"=",
"min",
"[",
"i",
"]",
",",
"max",
"=",
"max",
"[",
"i",
"]",
",",
"step",
"=",
"0.001",
")",
")",
"elems",
"[",
"-",
"1",
"]",
".",
"observe",
"(",
"lambda",
"v",
",",
"i",
"=",
"i",
":",
"_dochange",
"(",
"i",
",",
"v",
"[",
"'new'",
"]",
")",
",",
"'value'",
")",
"return",
"widgets",
".",
"VBox",
"(",
"elems",
")"
] |
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/vis/ipython/widgets.py#L703-L755
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/ipython/py2/IPython/core/display.py
|
python
|
Image.__init__
|
(self, data=None, url=None, filename=None, format=None,
embed=None, width=None, height=None, retina=False,
unconfined=False, metadata=None)
|
Create a PNG/JPEG image object given raw data.
When this object is returned by an input cell or passed to the
display function, it will result in the image being displayed
in the frontend.
Parameters
----------
data : unicode, str or bytes
The raw image data or a URL or filename to load the data from.
This always results in embedded image data.
url : unicode
A URL to download the data from. If you specify `url=`,
the image data will not be embedded unless you also specify `embed=True`.
filename : unicode
Path to a local file to load the data from.
Images from a file are always embedded.
format : unicode
The format of the image data (png/jpeg/jpg). If a filename or URL is given
for format will be inferred from the filename extension.
embed : bool
Should the image data be embedded using a data URI (True) or be
loaded using an <img> tag. Set this to True if you want the image
to be viewable later with no internet connection in the notebook.
Default is `True`, unless the keyword argument `url` is set, then
default value is `False`.
Note that QtConsole is not able to display images if `embed` is set to `False`
width : int
Width in pixels to which to constrain the image in html
height : int
Height in pixels to which to constrain the image in html
retina : bool
Automatically set the width and height to half of the measured
width and height.
This only works for embedded images because it reads the width/height
from image data.
For non-embedded images, you can just set the desired display width
and height directly.
unconfined: bool
Set unconfined=True to disable max-width confinement of the image.
metadata: dict
Specify extra metadata to attach to the image.
Examples
--------
# embedded image data, works in qtconsole and notebook
# when passed positionally, the first arg can be any of raw image data,
# a URL, or a filename from which to load image data.
# The result is always embedding image data for inline images.
Image('http://www.google.fr/images/srpr/logo3w.png')
Image('/path/to/image.jpg')
Image(b'RAW_PNG_DATA...')
# Specifying Image(url=...) does not embed the image data,
# it only generates `<img>` tag with a link to the source.
# This will not work in the qtconsole or offline.
Image(url='http://www.google.fr/images/srpr/logo3w.png')
|
Create a PNG/JPEG image object given raw data.
|
[
"Create",
"a",
"PNG",
"/",
"JPEG",
"image",
"object",
"given",
"raw",
"data",
"."
] |
def __init__(self, data=None, url=None, filename=None, format=None,
embed=None, width=None, height=None, retina=False,
unconfined=False, metadata=None):
"""Create a PNG/JPEG image object given raw data.
When this object is returned by an input cell or passed to the
display function, it will result in the image being displayed
in the frontend.
Parameters
----------
data : unicode, str or bytes
The raw image data or a URL or filename to load the data from.
This always results in embedded image data.
url : unicode
A URL to download the data from. If you specify `url=`,
the image data will not be embedded unless you also specify `embed=True`.
filename : unicode
Path to a local file to load the data from.
Images from a file are always embedded.
format : unicode
The format of the image data (png/jpeg/jpg). If a filename or URL is given
for format will be inferred from the filename extension.
embed : bool
Should the image data be embedded using a data URI (True) or be
loaded using an <img> tag. Set this to True if you want the image
to be viewable later with no internet connection in the notebook.
Default is `True`, unless the keyword argument `url` is set, then
default value is `False`.
Note that QtConsole is not able to display images if `embed` is set to `False`
width : int
Width in pixels to which to constrain the image in html
height : int
Height in pixels to which to constrain the image in html
retina : bool
Automatically set the width and height to half of the measured
width and height.
This only works for embedded images because it reads the width/height
from image data.
For non-embedded images, you can just set the desired display width
and height directly.
unconfined: bool
Set unconfined=True to disable max-width confinement of the image.
metadata: dict
Specify extra metadata to attach to the image.
Examples
--------
# embedded image data, works in qtconsole and notebook
# when passed positionally, the first arg can be any of raw image data,
# a URL, or a filename from which to load image data.
# The result is always embedding image data for inline images.
Image('http://www.google.fr/images/srpr/logo3w.png')
Image('/path/to/image.jpg')
Image(b'RAW_PNG_DATA...')
# Specifying Image(url=...) does not embed the image data,
# it only generates `<img>` tag with a link to the source.
# This will not work in the qtconsole or offline.
Image(url='http://www.google.fr/images/srpr/logo3w.png')
"""
if filename is not None:
ext = self._find_ext(filename)
elif url is not None:
ext = self._find_ext(url)
elif data is None:
raise ValueError("No image data found. Expecting filename, url, or data.")
elif isinstance(data, string_types) and (
data.startswith('http') or _safe_exists(data)
):
ext = self._find_ext(data)
else:
ext = None
if format is None:
if ext is not None:
if ext == u'jpg' or ext == u'jpeg':
format = self._FMT_JPEG
elif ext == u'png':
format = self._FMT_PNG
else:
format = ext.lower()
elif isinstance(data, bytes):
# infer image type from image data header,
# only if format has not been specified.
if data[:2] == _JPEG:
format = self._FMT_JPEG
# failed to detect format, default png
if format is None:
format = 'png'
if format.lower() == 'jpg':
# jpg->jpeg
format = self._FMT_JPEG
self.format = unicode_type(format).lower()
self.embed = embed if embed is not None else (url is None)
if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS:
raise ValueError("Cannot embed the '%s' image format" % (self.format))
self.width = width
self.height = height
self.retina = retina
self.unconfined = unconfined
self.metadata = metadata
super(Image, self).__init__(data=data, url=url, filename=filename)
if retina:
self._retina_shape()
|
[
"def",
"__init__",
"(",
"self",
",",
"data",
"=",
"None",
",",
"url",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"format",
"=",
"None",
",",
"embed",
"=",
"None",
",",
"width",
"=",
"None",
",",
"height",
"=",
"None",
",",
"retina",
"=",
"False",
",",
"unconfined",
"=",
"False",
",",
"metadata",
"=",
"None",
")",
":",
"if",
"filename",
"is",
"not",
"None",
":",
"ext",
"=",
"self",
".",
"_find_ext",
"(",
"filename",
")",
"elif",
"url",
"is",
"not",
"None",
":",
"ext",
"=",
"self",
".",
"_find_ext",
"(",
"url",
")",
"elif",
"data",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"No image data found. Expecting filename, url, or data.\"",
")",
"elif",
"isinstance",
"(",
"data",
",",
"string_types",
")",
"and",
"(",
"data",
".",
"startswith",
"(",
"'http'",
")",
"or",
"_safe_exists",
"(",
"data",
")",
")",
":",
"ext",
"=",
"self",
".",
"_find_ext",
"(",
"data",
")",
"else",
":",
"ext",
"=",
"None",
"if",
"format",
"is",
"None",
":",
"if",
"ext",
"is",
"not",
"None",
":",
"if",
"ext",
"==",
"u'jpg'",
"or",
"ext",
"==",
"u'jpeg'",
":",
"format",
"=",
"self",
".",
"_FMT_JPEG",
"elif",
"ext",
"==",
"u'png'",
":",
"format",
"=",
"self",
".",
"_FMT_PNG",
"else",
":",
"format",
"=",
"ext",
".",
"lower",
"(",
")",
"elif",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"# infer image type from image data header,",
"# only if format has not been specified.",
"if",
"data",
"[",
":",
"2",
"]",
"==",
"_JPEG",
":",
"format",
"=",
"self",
".",
"_FMT_JPEG",
"# failed to detect format, default png",
"if",
"format",
"is",
"None",
":",
"format",
"=",
"'png'",
"if",
"format",
".",
"lower",
"(",
")",
"==",
"'jpg'",
":",
"# jpg->jpeg",
"format",
"=",
"self",
".",
"_FMT_JPEG",
"self",
".",
"format",
"=",
"unicode_type",
"(",
"format",
")",
".",
"lower",
"(",
")",
"self",
".",
"embed",
"=",
"embed",
"if",
"embed",
"is",
"not",
"None",
"else",
"(",
"url",
"is",
"None",
")",
"if",
"self",
".",
"embed",
"and",
"self",
".",
"format",
"not",
"in",
"self",
".",
"_ACCEPTABLE_EMBEDDINGS",
":",
"raise",
"ValueError",
"(",
"\"Cannot embed the '%s' image format\"",
"%",
"(",
"self",
".",
"format",
")",
")",
"self",
".",
"width",
"=",
"width",
"self",
".",
"height",
"=",
"height",
"self",
".",
"retina",
"=",
"retina",
"self",
".",
"unconfined",
"=",
"unconfined",
"self",
".",
"metadata",
"=",
"metadata",
"super",
"(",
"Image",
",",
"self",
")",
".",
"__init__",
"(",
"data",
"=",
"data",
",",
"url",
"=",
"url",
",",
"filename",
"=",
"filename",
")",
"if",
"retina",
":",
"self",
".",
"_retina_shape",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/core/display.py#L934-L1046
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_version200.py
|
python
|
gluechops
|
(string, key, n, funcref)
|
return message
|
Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
Used by 'decrypt' and 'verify'.
|
Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
|
[
"Glues",
"chops",
"back",
"together",
"into",
"a",
"string",
".",
"calls",
"funcref",
"(",
"integer",
"key",
"n",
")",
"for",
"each",
"chop",
"."
] |
def gluechops(string, key, n, funcref):
"""Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
Used by 'decrypt' and 'verify'.
"""
message = ""
chops = decode64chops(string) #Decode base64 strings into integer chops
for cpart in chops:
mpart = funcref(cpart, key, n) #Decrypt each chop
message += int2bytes(mpart) #Combine decrypted strings into a msg
return message
|
[
"def",
"gluechops",
"(",
"string",
",",
"key",
",",
"n",
",",
"funcref",
")",
":",
"message",
"=",
"\"\"",
"chops",
"=",
"decode64chops",
"(",
"string",
")",
"#Decode base64 strings into integer chops",
"for",
"cpart",
"in",
"chops",
":",
"mpart",
"=",
"funcref",
"(",
"cpart",
",",
"key",
",",
"n",
")",
"#Decrypt each chop",
"message",
"+=",
"int2bytes",
"(",
"mpart",
")",
"#Combine decrypted strings into a msg",
"return",
"message"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_version200.py#L479-L493
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/build/waf-1.7.13/lmbrwaflib/lumberyard.py
|
python
|
get_engine_node
|
(ctx)
|
return ctx.engine_node
|
Determine the engine root path from SetupAssistantUserPreferences. if it exists
|
Determine the engine root path from SetupAssistantUserPreferences. if it exists
|
[
"Determine",
"the",
"engine",
"root",
"path",
"from",
"SetupAssistantUserPreferences",
".",
"if",
"it",
"exists"
] |
def get_engine_node(ctx):
"""
Determine the engine root path from SetupAssistantUserPreferences. if it exists
"""
try:
return ctx.engine_node
except AttributeError:
pass
# Root context path must have an engine.json file, regardless if this is an internal or external project
engine_json_file_path = ctx.path.make_node('engine.json').abspath()
try:
if not os.path.exists(engine_json_file_path):
raise Errors.WafError("Invalid context path '{}'. The base project path must contain a valid engine.json file.".format(engine_json_file_path))
engine_json = parse_json_file(engine_json_file_path)
except ValueError as e:
raise Errors.WafError("Invalid context path '{}'. The base project path must contain a valid engine.json file. ({})".format(engine_json_file_path, e))
ctx.engine_root_version = engine_json.get('LumberyardVersion', '0.0.0.0')
if 'ExternalEnginePath' in engine_json:
# An external engine path was specified, get its path and set the engine_node appropriately
external_engine_path = engine_json['ExternalEnginePath']
if os.path.isabs(external_engine_path):
engine_root_abs = external_engine_path
else:
engine_root_abs = os.path.normpath(os.path.join(ctx.path.abspath(), external_engine_path))
if not os.path.exists(engine_root_abs):
ctx.fatal('[ERROR] Invalid external engine path in engine.json : {}'.format(engine_root_abs))
ctx.engine_node = ctx.root.make_node(engine_root_abs)
# Warn if the external engine version is different from the last time this project was built
last_build_engine_version_node = ctx.get_bintemp_folder_node().make_node(LAST_ENGINE_BUILD_VERSION_TAG_FILE)
if os.path.exists(last_build_engine_version_node.abspath()):
last_built_version = last_build_engine_version_node.read()
if last_built_version != ctx.engine_root_version:
Logs.warn('[WARN] The current engine version ({}) does not match the last version {} that this project was built against'.format(ctx.engine_root_version, last_built_version))
last_build_engine_version_node.write(ctx.engine_root_version)
else:
ctx.engine_node = ctx.path
ctx.engine_path = ctx.engine_node.abspath()
return ctx.engine_node
|
[
"def",
"get_engine_node",
"(",
"ctx",
")",
":",
"try",
":",
"return",
"ctx",
".",
"engine_node",
"except",
"AttributeError",
":",
"pass",
"# Root context path must have an engine.json file, regardless if this is an internal or external project",
"engine_json_file_path",
"=",
"ctx",
".",
"path",
".",
"make_node",
"(",
"'engine.json'",
")",
".",
"abspath",
"(",
")",
"try",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"engine_json_file_path",
")",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"\"Invalid context path '{}'. The base project path must contain a valid engine.json file.\"",
".",
"format",
"(",
"engine_json_file_path",
")",
")",
"engine_json",
"=",
"parse_json_file",
"(",
"engine_json_file_path",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"\"Invalid context path '{}'. The base project path must contain a valid engine.json file. ({})\"",
".",
"format",
"(",
"engine_json_file_path",
",",
"e",
")",
")",
"ctx",
".",
"engine_root_version",
"=",
"engine_json",
".",
"get",
"(",
"'LumberyardVersion'",
",",
"'0.0.0.0'",
")",
"if",
"'ExternalEnginePath'",
"in",
"engine_json",
":",
"# An external engine path was specified, get its path and set the engine_node appropriately",
"external_engine_path",
"=",
"engine_json",
"[",
"'ExternalEnginePath'",
"]",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"external_engine_path",
")",
":",
"engine_root_abs",
"=",
"external_engine_path",
"else",
":",
"engine_root_abs",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"ctx",
".",
"path",
".",
"abspath",
"(",
")",
",",
"external_engine_path",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"engine_root_abs",
")",
":",
"ctx",
".",
"fatal",
"(",
"'[ERROR] Invalid external engine path in engine.json : {}'",
".",
"format",
"(",
"engine_root_abs",
")",
")",
"ctx",
".",
"engine_node",
"=",
"ctx",
".",
"root",
".",
"make_node",
"(",
"engine_root_abs",
")",
"# Warn if the external engine version is different from the last time this project was built",
"last_build_engine_version_node",
"=",
"ctx",
".",
"get_bintemp_folder_node",
"(",
")",
".",
"make_node",
"(",
"LAST_ENGINE_BUILD_VERSION_TAG_FILE",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"last_build_engine_version_node",
".",
"abspath",
"(",
")",
")",
":",
"last_built_version",
"=",
"last_build_engine_version_node",
".",
"read",
"(",
")",
"if",
"last_built_version",
"!=",
"ctx",
".",
"engine_root_version",
":",
"Logs",
".",
"warn",
"(",
"'[WARN] The current engine version ({}) does not match the last version {} that this project was built against'",
".",
"format",
"(",
"ctx",
".",
"engine_root_version",
",",
"last_built_version",
")",
")",
"last_build_engine_version_node",
".",
"write",
"(",
"ctx",
".",
"engine_root_version",
")",
"else",
":",
"ctx",
".",
"engine_node",
"=",
"ctx",
".",
"path",
"ctx",
".",
"engine_path",
"=",
"ctx",
".",
"engine_node",
".",
"abspath",
"(",
")",
"return",
"ctx",
".",
"engine_node"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/lmbrwaflib/lumberyard.py#L1421-L1463
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_core.py
|
python
|
AcceleratorEntry.ToString
|
(*args, **kwargs)
|
return _core_.AcceleratorEntry_ToString(*args, **kwargs)
|
ToString(self) -> String
Returns a string representation for the this accelerator. The string
is formatted using the <flags>-<keycode> format where <flags> maybe a
hyphen-separed list of "shift|alt|ctrl"
|
ToString(self) -> String
|
[
"ToString",
"(",
"self",
")",
"-",
">",
"String"
] |
def ToString(*args, **kwargs):
"""
ToString(self) -> String
Returns a string representation for the this accelerator. The string
is formatted using the <flags>-<keycode> format where <flags> maybe a
hyphen-separed list of "shift|alt|ctrl"
"""
return _core_.AcceleratorEntry_ToString(*args, **kwargs)
|
[
"def",
"ToString",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"AcceleratorEntry_ToString",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L8960-L8969
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/stc.py
|
python
|
StyledTextEvent.GetModificationType
|
(*args, **kwargs)
|
return _stc.StyledTextEvent_GetModificationType(*args, **kwargs)
|
GetModificationType(self) -> int
|
GetModificationType(self) -> int
|
[
"GetModificationType",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetModificationType(*args, **kwargs):
"""GetModificationType(self) -> int"""
return _stc.StyledTextEvent_GetModificationType(*args, **kwargs)
|
[
"def",
"GetModificationType",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextEvent_GetModificationType",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/stc.py#L7134-L7136
|
|
hfinkel/llvm-project-cxxjit
|
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
|
lldb/examples/python/gdbremote.py
|
python
|
Packet.get_number
|
(self, fail_value=-1)
|
Get a number from the packet. The number must be in big endian format and should be parsed
according to its prefix (starts with "0x" means hex, starts with "0" means octal, starts with
[1-9] means decimal, etc)
|
Get a number from the packet. The number must be in big endian format and should be parsed
according to its prefix (starts with "0x" means hex, starts with "0" means octal, starts with
[1-9] means decimal, etc)
|
[
"Get",
"a",
"number",
"from",
"the",
"packet",
".",
"The",
"number",
"must",
"be",
"in",
"big",
"endian",
"format",
"and",
"should",
"be",
"parsed",
"according",
"to",
"its",
"prefix",
"(",
"starts",
"with",
"0x",
"means",
"hex",
"starts",
"with",
"0",
"means",
"octal",
"starts",
"with",
"[",
"1",
"-",
"9",
"]",
"means",
"decimal",
"etc",
")"
] |
def get_number(self, fail_value=-1):
'''Get a number from the packet. The number must be in big endian format and should be parsed
according to its prefix (starts with "0x" means hex, starts with "0" means octal, starts with
[1-9] means decimal, etc)'''
match = g_number_regex.match(self.str)
if match:
number_str = match.group(1)
self.str = self.str[len(number_str):]
return int(number_str, 0)
else:
return fail_value
|
[
"def",
"get_number",
"(",
"self",
",",
"fail_value",
"=",
"-",
"1",
")",
":",
"match",
"=",
"g_number_regex",
".",
"match",
"(",
"self",
".",
"str",
")",
"if",
"match",
":",
"number_str",
"=",
"match",
".",
"group",
"(",
"1",
")",
"self",
".",
"str",
"=",
"self",
".",
"str",
"[",
"len",
"(",
"number_str",
")",
":",
"]",
"return",
"int",
"(",
"number_str",
",",
"0",
")",
"else",
":",
"return",
"fail_value"
] |
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/lldb/examples/python/gdbremote.py#L491-L501
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cgutils.py
|
python
|
gep
|
(builder, ptr, *inds, **kws)
|
return builder.gep(ptr, idx, name=name, inbounds=inbounds)
|
Emit a getelementptr instruction for the given pointer and indices.
The indices can be LLVM values or Python int constants.
|
Emit a getelementptr instruction for the given pointer and indices.
The indices can be LLVM values or Python int constants.
|
[
"Emit",
"a",
"getelementptr",
"instruction",
"for",
"the",
"given",
"pointer",
"and",
"indices",
".",
"The",
"indices",
"can",
"be",
"LLVM",
"values",
"or",
"Python",
"int",
"constants",
"."
] |
def gep(builder, ptr, *inds, **kws):
"""
Emit a getelementptr instruction for the given pointer and indices.
The indices can be LLVM values or Python int constants.
"""
name = kws.pop('name', '')
inbounds = kws.pop('inbounds', False)
assert not kws
idx = []
for i in inds:
if isinstance(i, utils.INT_TYPES):
# NOTE: llvm only accepts int32 inside structs, not int64
ind = int32_t(i)
else:
ind = i
idx.append(ind)
return builder.gep(ptr, idx, name=name, inbounds=inbounds)
|
[
"def",
"gep",
"(",
"builder",
",",
"ptr",
",",
"*",
"inds",
",",
"*",
"*",
"kws",
")",
":",
"name",
"=",
"kws",
".",
"pop",
"(",
"'name'",
",",
"''",
")",
"inbounds",
"=",
"kws",
".",
"pop",
"(",
"'inbounds'",
",",
"False",
")",
"assert",
"not",
"kws",
"idx",
"=",
"[",
"]",
"for",
"i",
"in",
"inds",
":",
"if",
"isinstance",
"(",
"i",
",",
"utils",
".",
"INT_TYPES",
")",
":",
"# NOTE: llvm only accepts int32 inside structs, not int64",
"ind",
"=",
"int32_t",
"(",
"i",
")",
"else",
":",
"ind",
"=",
"i",
"idx",
".",
"append",
"(",
"ind",
")",
"return",
"builder",
".",
"gep",
"(",
"ptr",
",",
"idx",
",",
"name",
"=",
"name",
",",
"inbounds",
"=",
"inbounds",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cgutils.py#L866-L882
|
|
synfig/synfig
|
a5ec91db5b751dc12e4400ccfb5c063fd6d2d928
|
synfig-studio/plugins/lottie-exporter/layers/group.py
|
python
|
flip_time
|
(time)
|
return ret
|
Time will be in a set();
Example: input: ((2, 3), (4, 5))
output: ((0, 2), (3, 4), (5, frame_last_time))
Args:
time (set) : Range of time is stored in this
Returns:
(set) : Flipped/opposite of `time` is returned
|
Time will be in a set();
Example: input: ((2, 3), (4, 5))
output: ((0, 2), (3, 4), (5, frame_last_time))
|
[
"Time",
"will",
"be",
"in",
"a",
"set",
"()",
";",
"Example",
":",
"input",
":",
"((",
"2",
"3",
")",
"(",
"4",
"5",
"))",
"output",
":",
"((",
"0",
"2",
")",
"(",
"3",
"4",
")",
"(",
"5",
"frame_last_time",
"))"
] |
def flip_time(time):
"""
Time will be in a set();
Example: input: ((2, 3), (4, 5))
output: ((0, 2), (3, 4), (5, frame_last_time))
Args:
time (set) : Range of time is stored in this
Returns:
(set) : Flipped/opposite of `time` is returned
"""
ret = set()
last = settings.lottie_format["op"]/settings.lottie_format["fr"]
z = 0
for it in time:
if (not approximate_equal(z, it[0])) and (not approximate_equal(it[0], it[1])):
ret.add((z, it[0]))
z = it[1]
if not approximate_equal(z, last):
ret.add((z, last))
return ret
|
[
"def",
"flip_time",
"(",
"time",
")",
":",
"ret",
"=",
"set",
"(",
")",
"last",
"=",
"settings",
".",
"lottie_format",
"[",
"\"op\"",
"]",
"/",
"settings",
".",
"lottie_format",
"[",
"\"fr\"",
"]",
"z",
"=",
"0",
"for",
"it",
"in",
"time",
":",
"if",
"(",
"not",
"approximate_equal",
"(",
"z",
",",
"it",
"[",
"0",
"]",
")",
")",
"and",
"(",
"not",
"approximate_equal",
"(",
"it",
"[",
"0",
"]",
",",
"it",
"[",
"1",
"]",
")",
")",
":",
"ret",
".",
"add",
"(",
"(",
"z",
",",
"it",
"[",
"0",
"]",
")",
")",
"z",
"=",
"it",
"[",
"1",
"]",
"if",
"not",
"approximate_equal",
"(",
"z",
",",
"last",
")",
":",
"ret",
".",
"add",
"(",
"(",
"z",
",",
"last",
")",
")",
"return",
"ret"
] |
https://github.com/synfig/synfig/blob/a5ec91db5b751dc12e4400ccfb5c063fd6d2d928/synfig-studio/plugins/lottie-exporter/layers/group.py#L283-L304
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
Window.GetScreenPositionTuple
|
(*args, **kwargs)
|
return _core_.Window_GetScreenPositionTuple(*args, **kwargs)
|
GetScreenPositionTuple() -> (x,y)
Get the position of the window in screen coordinantes.
|
GetScreenPositionTuple() -> (x,y)
|
[
"GetScreenPositionTuple",
"()",
"-",
">",
"(",
"x",
"y",
")"
] |
def GetScreenPositionTuple(*args, **kwargs):
"""
GetScreenPositionTuple() -> (x,y)
Get the position of the window in screen coordinantes.
"""
return _core_.Window_GetScreenPositionTuple(*args, **kwargs)
|
[
"def",
"GetScreenPositionTuple",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_GetScreenPositionTuple",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L9484-L9490
|
|
google/earthenterprise
|
0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9
|
earth_enterprise/src/server/wsgi/serve/snippets/util/snippet_masker.py
|
python
|
_FlexibleBool
|
(bool_text)
|
Tolerant boolean text.
Args:
bool_text: the bool text.
Returns:
Boolean value of text.
Raises:
ValueError: if the text bool isn't recognized.
|
Tolerant boolean text.
|
[
"Tolerant",
"boolean",
"text",
"."
] |
def _FlexibleBool(bool_text):
"""Tolerant boolean text.
Args:
bool_text: the bool text.
Returns:
Boolean value of text.
Raises:
ValueError: if the text bool isn't recognized.
"""
as_lower = bool_text.lower()
if as_lower in _FlexibleBool.TEXT_TRUES:
return True
elif as_lower in _FlexibleBool.TEXT_FALSES:
return False
else:
raise ValueError("Can't convert '%s' to bool" % bool_text)
|
[
"def",
"_FlexibleBool",
"(",
"bool_text",
")",
":",
"as_lower",
"=",
"bool_text",
".",
"lower",
"(",
")",
"if",
"as_lower",
"in",
"_FlexibleBool",
".",
"TEXT_TRUES",
":",
"return",
"True",
"elif",
"as_lower",
"in",
"_FlexibleBool",
".",
"TEXT_FALSES",
":",
"return",
"False",
"else",
":",
"raise",
"ValueError",
"(",
"\"Can't convert '%s' to bool\"",
"%",
"bool_text",
")"
] |
https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/server/wsgi/serve/snippets/util/snippet_masker.py#L88-L106
|
||
mongodb/mongo
|
d8ff665343ad29cf286ee2cf4a1960d29371937b
|
buildscripts/resmokelib/hang_analyzer/dumper.py
|
python
|
get_dumpers
|
(root_logger: logging.Logger, dbg_output: str)
|
return Dumpers(dbg=dbg, jstack=jstack)
|
Return OS-appropriate dumpers.
:param root_logger: Top-level logger
:param dbg_output: 'stdout' or 'file'
|
Return OS-appropriate dumpers.
|
[
"Return",
"OS",
"-",
"appropriate",
"dumpers",
"."
] |
def get_dumpers(root_logger: logging.Logger, dbg_output: str):
"""
Return OS-appropriate dumpers.
:param root_logger: Top-level logger
:param dbg_output: 'stdout' or 'file'
"""
dbg = None
jstack = None
if sys.platform.startswith("linux"):
dbg = GDBDumper(root_logger, dbg_output)
jstack = JstackDumper()
elif sys.platform == "win32" or sys.platform == "cygwin":
dbg = WindowsDumper(root_logger, dbg_output)
jstack = JstackWindowsDumper()
elif sys.platform == "darwin":
dbg = LLDBDumper(root_logger, dbg_output)
jstack = JstackDumper()
return Dumpers(dbg=dbg, jstack=jstack)
|
[
"def",
"get_dumpers",
"(",
"root_logger",
":",
"logging",
".",
"Logger",
",",
"dbg_output",
":",
"str",
")",
":",
"dbg",
"=",
"None",
"jstack",
"=",
"None",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"\"linux\"",
")",
":",
"dbg",
"=",
"GDBDumper",
"(",
"root_logger",
",",
"dbg_output",
")",
"jstack",
"=",
"JstackDumper",
"(",
")",
"elif",
"sys",
".",
"platform",
"==",
"\"win32\"",
"or",
"sys",
".",
"platform",
"==",
"\"cygwin\"",
":",
"dbg",
"=",
"WindowsDumper",
"(",
"root_logger",
",",
"dbg_output",
")",
"jstack",
"=",
"JstackWindowsDumper",
"(",
")",
"elif",
"sys",
".",
"platform",
"==",
"\"darwin\"",
":",
"dbg",
"=",
"LLDBDumper",
"(",
"root_logger",
",",
"dbg_output",
")",
"jstack",
"=",
"JstackDumper",
"(",
")",
"return",
"Dumpers",
"(",
"dbg",
"=",
"dbg",
",",
"jstack",
"=",
"jstack",
")"
] |
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/hang_analyzer/dumper.py#L18-L38
|
|
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/contrib/linalg/python/ops/linear_operator_util.py
|
python
|
assert_zero_imag_part
|
(x, message=None, name="assert_zero_imag_part")
|
Returns `Op` that asserts Tensor `x` has no non-zero imaginary parts.
Args:
x: Numeric `Tensor`, real, integer, or complex.
message: A string message to prepend to failure message.
name: A name to give this `Op`.
Returns:
An `Op` that asserts `x` has no entries with modulus zero.
|
Returns `Op` that asserts Tensor `x` has no non-zero imaginary parts.
|
[
"Returns",
"Op",
"that",
"asserts",
"Tensor",
"x",
"has",
"no",
"non",
"-",
"zero",
"imaginary",
"parts",
"."
] |
def assert_zero_imag_part(x, message=None, name="assert_zero_imag_part"):
"""Returns `Op` that asserts Tensor `x` has no non-zero imaginary parts.
Args:
x: Numeric `Tensor`, real, integer, or complex.
message: A string message to prepend to failure message.
name: A name to give this `Op`.
Returns:
An `Op` that asserts `x` has no entries with modulus zero.
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
dtype = x.dtype.base_dtype
if dtype.is_floating:
return control_flow_ops.no_op()
zero = ops.convert_to_tensor(0, dtype=dtype.real_dtype)
return check_ops.assert_equal(zero, math_ops.imag(x), message=message)
|
[
"def",
"assert_zero_imag_part",
"(",
"x",
",",
"message",
"=",
"None",
",",
"name",
"=",
"\"assert_zero_imag_part\"",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"values",
"=",
"[",
"x",
"]",
")",
":",
"x",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"x",
",",
"name",
"=",
"\"x\"",
")",
"dtype",
"=",
"x",
".",
"dtype",
".",
"base_dtype",
"if",
"dtype",
".",
"is_floating",
":",
"return",
"control_flow_ops",
".",
"no_op",
"(",
")",
"zero",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"0",
",",
"dtype",
"=",
"dtype",
".",
"real_dtype",
")",
"return",
"check_ops",
".",
"assert_equal",
"(",
"zero",
",",
"math_ops",
".",
"imag",
"(",
"x",
")",
",",
"message",
"=",
"message",
")"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/linalg/python/ops/linear_operator_util.py#L49-L68
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2class.py
|
python
|
xmlNode.setNsProp
|
(self, ns, name, value)
|
return __tmp
|
Set (or reset) an attribute carried by a node. The ns
structure must be in scope, this is not checked
|
Set (or reset) an attribute carried by a node. The ns
structure must be in scope, this is not checked
|
[
"Set",
"(",
"or",
"reset",
")",
"an",
"attribute",
"carried",
"by",
"a",
"node",
".",
"The",
"ns",
"structure",
"must",
"be",
"in",
"scope",
"this",
"is",
"not",
"checked"
] |
def setNsProp(self, ns, name, value):
"""Set (or reset) an attribute carried by a node. The ns
structure must be in scope, this is not checked """
if ns is None: ns__o = None
else: ns__o = ns._o
ret = libxml2mod.xmlSetNsProp(self._o, ns__o, name, value)
if ret is None:raise treeError('xmlSetNsProp() failed')
__tmp = xmlAttr(_obj=ret)
return __tmp
|
[
"def",
"setNsProp",
"(",
"self",
",",
"ns",
",",
"name",
",",
"value",
")",
":",
"if",
"ns",
"is",
"None",
":",
"ns__o",
"=",
"None",
"else",
":",
"ns__o",
"=",
"ns",
".",
"_o",
"ret",
"=",
"libxml2mod",
".",
"xmlSetNsProp",
"(",
"self",
".",
"_o",
",",
"ns__o",
",",
"name",
",",
"value",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlSetNsProp() failed'",
")",
"__tmp",
"=",
"xmlAttr",
"(",
"_obj",
"=",
"ret",
")",
"return",
"__tmp"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L2782-L2790
|
|
papyrussolution/OpenPapyrus
|
bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91
|
Src/OSF/abseil/absl/abseil.podspec.gen.py
|
python
|
read_build
|
(package)
|
return [
parse_rule(elem, package)
for elem in root
if elem.tag == "rule" and elem.attrib["class"].startswith("cc_")
]
|
Runs bazel query on given package file and returns all cc rules.
|
Runs bazel query on given package file and returns all cc rules.
|
[
"Runs",
"bazel",
"query",
"on",
"given",
"package",
"file",
"and",
"returns",
"all",
"cc",
"rules",
"."
] |
def read_build(package):
"""Runs bazel query on given package file and returns all cc rules."""
result = subprocess.check_output(
["bazel", "query", package + ":all", "--output", "xml"])
root = xml.etree.ElementTree.fromstring(result)
return [
parse_rule(elem, package)
for elem in root
if elem.tag == "rule" and elem.attrib["class"].startswith("cc_")
]
|
[
"def",
"read_build",
"(",
"package",
")",
":",
"result",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"\"bazel\"",
",",
"\"query\"",
",",
"package",
"+",
"\":all\"",
",",
"\"--output\"",
",",
"\"xml\"",
"]",
")",
"root",
"=",
"xml",
".",
"etree",
".",
"ElementTree",
".",
"fromstring",
"(",
"result",
")",
"return",
"[",
"parse_rule",
"(",
"elem",
",",
"package",
")",
"for",
"elem",
"in",
"root",
"if",
"elem",
".",
"tag",
"==",
"\"rule\"",
"and",
"elem",
".",
"attrib",
"[",
"\"class\"",
"]",
".",
"startswith",
"(",
"\"cc_\"",
")",
"]"
] |
https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/abseil/absl/abseil.podspec.gen.py#L89-L98
|
|
panda3d/panda3d
|
833ad89ebad58395d0af0b7ec08538e5e4308265
|
direct/src/extensions_native/extension_native_helpers.py
|
python
|
Dtool_funcToMethod
|
(func, cls, method_name=None)
|
Adds func to class so it is an accessible method; use method_name to specify the name to be used for calling the method.
The new method is accessible to any instance immediately.
|
Adds func to class so it is an accessible method; use method_name to specify the name to be used for calling the method.
The new method is accessible to any instance immediately.
|
[
"Adds",
"func",
"to",
"class",
"so",
"it",
"is",
"an",
"accessible",
"method",
";",
"use",
"method_name",
"to",
"specify",
"the",
"name",
"to",
"be",
"used",
"for",
"calling",
"the",
"method",
".",
"The",
"new",
"method",
"is",
"accessible",
"to",
"any",
"instance",
"immediately",
"."
] |
def Dtool_funcToMethod(func, cls, method_name=None):
"""Adds func to class so it is an accessible method; use method_name to specify the name to be used for calling the method.
The new method is accessible to any instance immediately."""
func.__func__ = func
func.__self__ = None
if not method_name:
method_name = func.__name__
cls.DtoolClassDict[method_name] = func
|
[
"def",
"Dtool_funcToMethod",
"(",
"func",
",",
"cls",
",",
"method_name",
"=",
"None",
")",
":",
"func",
".",
"__func__",
"=",
"func",
"func",
".",
"__self__",
"=",
"None",
"if",
"not",
"method_name",
":",
"method_name",
"=",
"func",
".",
"__name__",
"cls",
".",
"DtoolClassDict",
"[",
"method_name",
"]",
"=",
"func"
] |
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/extensions_native/extension_native_helpers.py#L8-L15
|
||
microsoft/TSS.MSR
|
0f2516fca2cd9929c31d5450e39301c9bde43688
|
TSS.Py/src/TpmTypes.py
|
python
|
TPM_HANDLE.nv
|
(nvIndex)
|
return TPM_HANDLE((TPM_HT.NV_INDEX << 24) + nvIndex)
|
Creates a TPM_HANDLE for an NV slot
Args:
nvIndex (int): The NV index
Returns:
New TPM_HANDLE object
|
Creates a TPM_HANDLE for an NV slot
Args:
nvIndex (int): The NV index
Returns:
New TPM_HANDLE object
|
[
"Creates",
"a",
"TPM_HANDLE",
"for",
"an",
"NV",
"slot",
"Args",
":",
"nvIndex",
"(",
"int",
")",
":",
"The",
"NV",
"index",
"Returns",
":",
"New",
"TPM_HANDLE",
"object"
] |
def nv(nvIndex):
""" Creates a TPM_HANDLE for an NV slot
Args:
nvIndex (int): The NV index
Returns:
New TPM_HANDLE object
"""
return TPM_HANDLE((TPM_HT.NV_INDEX << 24) + nvIndex)
|
[
"def",
"nv",
"(",
"nvIndex",
")",
":",
"return",
"TPM_HANDLE",
"(",
"(",
"TPM_HT",
".",
"NV_INDEX",
"<<",
"24",
")",
"+",
"nvIndex",
")"
] |
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L3488-L3495
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/numbers.py
|
python
|
Integral.__xor__
|
(self, other)
|
self ^ other
|
self ^ other
|
[
"self",
"^",
"other"
] |
def __xor__(self, other):
"""self ^ other"""
raise NotImplementedError
|
[
"def",
"__xor__",
"(",
"self",
",",
"other",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/numbers.py#L354-L356
|
||
chromiumembedded/cef
|
80caf947f3fe2210e5344713c5281d8af9bdc295
|
tools/yapf/yapf/yapflib/pytree_utils.py
|
python
|
DumpNodeToString
|
(node)
|
Dump a string representation of the given node. For debugging.
Arguments:
node: the node.
Returns:
The string representation.
|
Dump a string representation of the given node. For debugging.
|
[
"Dump",
"a",
"string",
"representation",
"of",
"the",
"given",
"node",
".",
"For",
"debugging",
"."
] |
def DumpNodeToString(node):
"""Dump a string representation of the given node. For debugging.
Arguments:
node: the node.
Returns:
The string representation.
"""
if isinstance(node, pytree.Leaf):
fmt = '{name}({value}) [lineno={lineno}, column={column}, prefix={prefix}]'
return fmt.format(
name=NodeName(node),
value=_PytreeNodeRepr(node),
lineno=node.lineno,
column=node.column,
prefix=repr(node.prefix))
else:
fmt = '{node} [{len} children] [child_indent="{indent}"]'
return fmt.format(
node=NodeName(node),
len=len(node.children),
indent=GetNodeAnnotation(node, Annotation.CHILD_INDENT))
|
[
"def",
"DumpNodeToString",
"(",
"node",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"pytree",
".",
"Leaf",
")",
":",
"fmt",
"=",
"'{name}({value}) [lineno={lineno}, column={column}, prefix={prefix}]'",
"return",
"fmt",
".",
"format",
"(",
"name",
"=",
"NodeName",
"(",
"node",
")",
",",
"value",
"=",
"_PytreeNodeRepr",
"(",
"node",
")",
",",
"lineno",
"=",
"node",
".",
"lineno",
",",
"column",
"=",
"node",
".",
"column",
",",
"prefix",
"=",
"repr",
"(",
"node",
".",
"prefix",
")",
")",
"else",
":",
"fmt",
"=",
"'{node} [{len} children] [child_indent=\"{indent}\"]'",
"return",
"fmt",
".",
"format",
"(",
"node",
"=",
"NodeName",
"(",
"node",
")",
",",
"len",
"=",
"len",
"(",
"node",
".",
"children",
")",
",",
"indent",
"=",
"GetNodeAnnotation",
"(",
"node",
",",
"Annotation",
".",
"CHILD_INDENT",
")",
")"
] |
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/yapf/yapf/yapflib/pytree_utils.py#L261-L283
|
||
rdkit/rdkit
|
ede860ae316d12d8568daf5ee800921c3389c84e
|
rdkit/ML/MLUtils/VoteImg.py
|
python
|
CollectVotes
|
(composite, data, badOnly)
|
return res, values, trueValues, misCount
|
collects the votes from _composite_ for the examples in _data_
**Arguments**
- composite: a composite model
- data: a list of examples to run through _composite_
- badOnly: if set only bad (misclassified) examples will be kept
**Returns**
a 4-tuple containing:
1) the expanded list of vote details (see below)
2) the list of predicted results
3) the list of true results
4) the number of miscounted examples
**Notes**
pp - the expanded list of vote details consists of:
'[ vote1, vote2, ... voteN, 0, res, trueRes]'
where _res_ is the predicted results and _trueRes_ is the actual result.
The extra zero is included to allow a line to be drawn between the votes
and the results.
|
collects the votes from _composite_ for the examples in _data_
|
[
"collects",
"the",
"votes",
"from",
"_composite_",
"for",
"the",
"examples",
"in",
"_data_"
] |
def CollectVotes(composite, data, badOnly):
""" collects the votes from _composite_ for the examples in _data_
**Arguments**
- composite: a composite model
- data: a list of examples to run through _composite_
- badOnly: if set only bad (misclassified) examples will be kept
**Returns**
a 4-tuple containing:
1) the expanded list of vote details (see below)
2) the list of predicted results
3) the list of true results
4) the number of miscounted examples
**Notes**
pp - the expanded list of vote details consists of:
'[ vote1, vote2, ... voteN, 0, res, trueRes]'
where _res_ is the predicted results and _trueRes_ is the actual result.
The extra zero is included to allow a line to be drawn between the votes
and the results.
"""
res = []
values = []
trueValues = []
misCount = 0
for pt in data:
val, _ = composite.ClassifyExample(pt)
predict = pt[-1]
if not badOnly or val != predict:
values.append(val)
trueValues.append(predict)
if val != predict:
misCount = misCount + 1
res.append(composite.GetVoteDetails() + [0, val, pt[-1]])
return res, values, trueValues, misCount
|
[
"def",
"CollectVotes",
"(",
"composite",
",",
"data",
",",
"badOnly",
")",
":",
"res",
"=",
"[",
"]",
"values",
"=",
"[",
"]",
"trueValues",
"=",
"[",
"]",
"misCount",
"=",
"0",
"for",
"pt",
"in",
"data",
":",
"val",
",",
"_",
"=",
"composite",
".",
"ClassifyExample",
"(",
"pt",
")",
"predict",
"=",
"pt",
"[",
"-",
"1",
"]",
"if",
"not",
"badOnly",
"or",
"val",
"!=",
"predict",
":",
"values",
".",
"append",
"(",
"val",
")",
"trueValues",
".",
"append",
"(",
"predict",
")",
"if",
"val",
"!=",
"predict",
":",
"misCount",
"=",
"misCount",
"+",
"1",
"res",
".",
"append",
"(",
"composite",
".",
"GetVoteDetails",
"(",
")",
"+",
"[",
"0",
",",
"val",
",",
"pt",
"[",
"-",
"1",
"]",
"]",
")",
"return",
"res",
",",
"values",
",",
"trueValues",
",",
"misCount"
] |
https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/ML/MLUtils/VoteImg.py#L15-L63
|
|
OpenLightingProject/ola
|
d1433a1bed73276fbe55ce18c03b1c208237decc
|
tools/rdm/ModelCollector.py
|
python
|
ModelCollector._HandleSoftwareVersionLabel
|
(self, data)
|
Called when we get a SOFTWARE_VERSION_LABEL response.
|
Called when we get a SOFTWARE_VERSION_LABEL response.
|
[
"Called",
"when",
"we",
"get",
"a",
"SOFTWARE_VERSION_LABEL",
"response",
"."
] |
def _HandleSoftwareVersionLabel(self, data):
"""Called when we get a SOFTWARE_VERSION_LABEL response."""
if data is not None:
this_version = self._GetVersion()
this_version['label'] = data['label']
self._NextState()
|
[
"def",
"_HandleSoftwareVersionLabel",
"(",
"self",
",",
"data",
")",
":",
"if",
"data",
"is",
"not",
"None",
":",
"this_version",
"=",
"self",
".",
"_GetVersion",
"(",
")",
"this_version",
"[",
"'label'",
"]",
"=",
"data",
"[",
"'label'",
"]",
"self",
".",
"_NextState",
"(",
")"
] |
https://github.com/OpenLightingProject/ola/blob/d1433a1bed73276fbe55ce18c03b1c208237decc/tools/rdm/ModelCollector.py#L271-L276
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/xml/sax/handler.py
|
python
|
DTDHandler.unparsedEntityDecl
|
(self, name, publicId, systemId, ndata)
|
Handle an unparsed entity declaration event.
|
Handle an unparsed entity declaration event.
|
[
"Handle",
"an",
"unparsed",
"entity",
"declaration",
"event",
"."
] |
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
"Handle an unparsed entity declaration event."
|
[
"def",
"unparsedEntityDecl",
"(",
"self",
",",
"name",
",",
"publicId",
",",
"systemId",
",",
"ndata",
")",
":"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/xml/sax/handler.py#L217-L218
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/core/indexes/base.py
|
python
|
Index.__getitem__
|
(self, key)
|
Override numpy.ndarray's __getitem__ method to work as desired.
This function adds lists and Series as valid boolean indexers
(ndarrays only supports ndarray with dtype=bool).
If resulting ndim != 1, plain ndarray is returned instead of
corresponding `Index` subclass.
|
Override numpy.ndarray's __getitem__ method to work as desired.
|
[
"Override",
"numpy",
".",
"ndarray",
"s",
"__getitem__",
"method",
"to",
"work",
"as",
"desired",
"."
] |
def __getitem__(self, key):
"""
Override numpy.ndarray's __getitem__ method to work as desired.
This function adds lists and Series as valid boolean indexers
(ndarrays only supports ndarray with dtype=bool).
If resulting ndim != 1, plain ndarray is returned instead of
corresponding `Index` subclass.
"""
# There's no custom logic to be implemented in __getslice__, so it's
# not overloaded intentionally.
getitem = self._data.__getitem__
if is_scalar(key):
key = com.cast_scalar_indexer(key, warn_float=True)
return getitem(key)
if isinstance(key, slice):
# This case is separated from the conditional above to avoid
# pessimization of basic indexing.
result = getitem(key)
# Going through simple_new for performance.
return type(self)._simple_new(result, name=self._name)
if com.is_bool_indexer(key):
key = np.asarray(key, dtype=bool)
result = getitem(key)
if not is_scalar(result):
# error: Argument 1 to "ndim" has incompatible type "Union[ExtensionArray,
# Any]"; expected "Union[Union[int, float, complex, str, bytes, generic],
# Sequence[Union[int, float, complex, str, bytes, generic]],
# Sequence[Sequence[Any]], _SupportsArray]"
if np.ndim(result) > 1: # type: ignore[arg-type]
deprecate_ndim_indexing(result)
return result
# NB: Using _constructor._simple_new would break if MultiIndex
# didn't override __getitem__
return self._constructor._simple_new(result, name=self._name)
else:
return result
|
[
"def",
"__getitem__",
"(",
"self",
",",
"key",
")",
":",
"# There's no custom logic to be implemented in __getslice__, so it's",
"# not overloaded intentionally.",
"getitem",
"=",
"self",
".",
"_data",
".",
"__getitem__",
"if",
"is_scalar",
"(",
"key",
")",
":",
"key",
"=",
"com",
".",
"cast_scalar_indexer",
"(",
"key",
",",
"warn_float",
"=",
"True",
")",
"return",
"getitem",
"(",
"key",
")",
"if",
"isinstance",
"(",
"key",
",",
"slice",
")",
":",
"# This case is separated from the conditional above to avoid",
"# pessimization of basic indexing.",
"result",
"=",
"getitem",
"(",
"key",
")",
"# Going through simple_new for performance.",
"return",
"type",
"(",
"self",
")",
".",
"_simple_new",
"(",
"result",
",",
"name",
"=",
"self",
".",
"_name",
")",
"if",
"com",
".",
"is_bool_indexer",
"(",
"key",
")",
":",
"key",
"=",
"np",
".",
"asarray",
"(",
"key",
",",
"dtype",
"=",
"bool",
")",
"result",
"=",
"getitem",
"(",
"key",
")",
"if",
"not",
"is_scalar",
"(",
"result",
")",
":",
"# error: Argument 1 to \"ndim\" has incompatible type \"Union[ExtensionArray,",
"# Any]\"; expected \"Union[Union[int, float, complex, str, bytes, generic],",
"# Sequence[Union[int, float, complex, str, bytes, generic]],",
"# Sequence[Sequence[Any]], _SupportsArray]\"",
"if",
"np",
".",
"ndim",
"(",
"result",
")",
">",
"1",
":",
"# type: ignore[arg-type]",
"deprecate_ndim_indexing",
"(",
"result",
")",
"return",
"result",
"# NB: Using _constructor._simple_new would break if MultiIndex",
"# didn't override __getitem__",
"return",
"self",
".",
"_constructor",
".",
"_simple_new",
"(",
"result",
",",
"name",
"=",
"self",
".",
"_name",
")",
"else",
":",
"return",
"result"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/indexes/base.py#L4587-L4629
|
||
alexgkendall/caffe-segnet
|
344c113bf1832886f1cbe9f33ffe28a3beeaf412
|
scripts/cpp_lint.py
|
python
|
CleansedLines.NumLines
|
(self)
|
return self.num_lines
|
Returns the number of lines represented.
|
Returns the number of lines represented.
|
[
"Returns",
"the",
"number",
"of",
"lines",
"represented",
"."
] |
def NumLines(self):
"""Returns the number of lines represented."""
return self.num_lines
|
[
"def",
"NumLines",
"(",
"self",
")",
":",
"return",
"self",
".",
"num_lines"
] |
https://github.com/alexgkendall/caffe-segnet/blob/344c113bf1832886f1cbe9f33ffe28a3beeaf412/scripts/cpp_lint.py#L1204-L1206
|
|
Tencent/Pebble
|
68315f176d9e328a233ace29b7579a829f89879f
|
tools/blade/src/blade/blade.py
|
python
|
Blade.get_scons_platform
|
(self)
|
return self.__scons_platform
|
Return handle of the platform class.
|
Return handle of the platform class.
|
[
"Return",
"handle",
"of",
"the",
"platform",
"class",
"."
] |
def get_scons_platform(self):
"""Return handle of the platform class. """
return self.__scons_platform
|
[
"def",
"get_scons_platform",
"(",
"self",
")",
":",
"return",
"self",
".",
"__scons_platform"
] |
https://github.com/Tencent/Pebble/blob/68315f176d9e328a233ace29b7579a829f89879f/tools/blade/src/blade/blade.py#L336-L338
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/cgi.py
|
python
|
FieldStorage.getvalue
|
(self, key, default=None)
|
Dictionary style get() method, including 'value' lookup.
|
Dictionary style get() method, including 'value' lookup.
|
[
"Dictionary",
"style",
"get",
"()",
"method",
"including",
"value",
"lookup",
"."
] |
def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if key in self:
value = self[key]
if type(value) is type([]):
return map(attrgetter('value'), value)
else:
return value.value
else:
return default
|
[
"def",
"getvalue",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"if",
"key",
"in",
"self",
":",
"value",
"=",
"self",
"[",
"key",
"]",
"if",
"type",
"(",
"value",
")",
"is",
"type",
"(",
"[",
"]",
")",
":",
"return",
"map",
"(",
"attrgetter",
"(",
"'value'",
")",
",",
"value",
")",
"else",
":",
"return",
"value",
".",
"value",
"else",
":",
"return",
"default"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/cgi.py#L547-L556
|
||
Kitware/ParaView
|
f760af9124ff4634b23ebbeab95a4f56e0261955
|
Plugins/pvblot/blotish.py
|
python
|
nintv
|
(value=None)
|
Set nintv
|
Set nintv
|
[
"Set",
"nintv"
] |
def nintv(value=None):
"""Set nintv"""
state.time_selection.set_nintv(_maybe_convert(value, int))
state.time_selection.print_show()
|
[
"def",
"nintv",
"(",
"value",
"=",
"None",
")",
":",
"state",
".",
"time_selection",
".",
"set_nintv",
"(",
"_maybe_convert",
"(",
"value",
",",
"int",
")",
")",
"state",
".",
"time_selection",
".",
"print_show",
"(",
")"
] |
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Plugins/pvblot/blotish.py#L887-L890
|
||
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/python/ops/math_ops.py
|
python
|
to_double
|
(x, name="ToDouble")
|
return cast(x, dtypes.float64, name=name)
|
Casts a tensor to type `float64`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `float64`.
Raises:
TypeError: If `x` cannot be cast to the `float64`.
|
Casts a tensor to type `float64`.
|
[
"Casts",
"a",
"tensor",
"to",
"type",
"float64",
"."
] |
def to_double(x, name="ToDouble"):
"""Casts a tensor to type `float64`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `float64`.
Raises:
TypeError: If `x` cannot be cast to the `float64`.
"""
return cast(x, dtypes.float64, name=name)
|
[
"def",
"to_double",
"(",
"x",
",",
"name",
"=",
"\"ToDouble\"",
")",
":",
"return",
"cast",
"(",
"x",
",",
"dtypes",
".",
"float64",
",",
"name",
"=",
"name",
")"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/math_ops.py#L673-L686
|
|
citp/BlockSci
|
14ccc9358443b2eb5730bb2902c4b11ab7928abf
|
blockscipy/blocksci/__init__.py
|
python
|
filter_blocks_legacy
|
(
self, filter_func, start=None, end=None, cpu_count=psutil.cpu_count()
)
|
return mapreduce_block_ranges(
self, map_func, reduce_func, MISSING_PARAM, start, end, cpu_count=cpu_count
)
|
Return all blocks in range which match the given criteria
|
Return all blocks in range which match the given criteria
|
[
"Return",
"all",
"blocks",
"in",
"range",
"which",
"match",
"the",
"given",
"criteria"
] |
def filter_blocks_legacy(
self, filter_func, start=None, end=None, cpu_count=psutil.cpu_count()
):
"""Return all blocks in range which match the given criteria
"""
def map_func(blocks):
return [block for block in blocks if filter_func(block)]
def reduce_func(accum, new_val):
accum.extend(new_val)
return accum
return mapreduce_block_ranges(
self, map_func, reduce_func, MISSING_PARAM, start, end, cpu_count=cpu_count
)
|
[
"def",
"filter_blocks_legacy",
"(",
"self",
",",
"filter_func",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"cpu_count",
"=",
"psutil",
".",
"cpu_count",
"(",
")",
")",
":",
"def",
"map_func",
"(",
"blocks",
")",
":",
"return",
"[",
"block",
"for",
"block",
"in",
"blocks",
"if",
"filter_func",
"(",
"block",
")",
"]",
"def",
"reduce_func",
"(",
"accum",
",",
"new_val",
")",
":",
"accum",
".",
"extend",
"(",
"new_val",
")",
"return",
"accum",
"return",
"mapreduce_block_ranges",
"(",
"self",
",",
"map_func",
",",
"reduce_func",
",",
"MISSING_PARAM",
",",
"start",
",",
"end",
",",
"cpu_count",
"=",
"cpu_count",
")"
] |
https://github.com/citp/BlockSci/blob/14ccc9358443b2eb5730bb2902c4b11ab7928abf/blockscipy/blocksci/__init__.py#L194-L209
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/mailbox.py
|
python
|
_ProxyFile.__init__
|
(self, f, pos=None)
|
Initialize a _ProxyFile.
|
Initialize a _ProxyFile.
|
[
"Initialize",
"a",
"_ProxyFile",
"."
] |
def __init__(self, f, pos=None):
"""Initialize a _ProxyFile."""
self._file = f
if pos is None:
self._pos = f.tell()
else:
self._pos = pos
|
[
"def",
"__init__",
"(",
"self",
",",
"f",
",",
"pos",
"=",
"None",
")",
":",
"self",
".",
"_file",
"=",
"f",
"if",
"pos",
"is",
"None",
":",
"self",
".",
"_pos",
"=",
"f",
".",
"tell",
"(",
")",
"else",
":",
"self",
".",
"_pos",
"=",
"pos"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/mailbox.py#L1863-L1869
|
||
pmq20/node-packer
|
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
|
lts/deps/v8/third_party/jinja2/debug.py
|
python
|
ProcessedTraceback.render_as_html
|
(self, full=False)
|
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
|
Return a unicode string with the traceback as rendered HTML.
|
Return a unicode string with the traceback as rendered HTML.
|
[
"Return",
"a",
"unicode",
"string",
"with",
"the",
"traceback",
"as",
"rendered",
"HTML",
"."
] |
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
|
[
"def",
"render_as_html",
"(",
"self",
",",
"full",
"=",
"False",
")",
":",
"from",
"jinja2",
".",
"debugrenderer",
"import",
"render_traceback",
"return",
"u'%s\\n\\n<!--\\n%s\\n-->'",
"%",
"(",
"render_traceback",
"(",
"self",
",",
"full",
"=",
"full",
")",
",",
"self",
".",
"render_as_text",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
",",
"'replace'",
")",
")"
] |
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/v8/third_party/jinja2/debug.py#L103-L109
|
|
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/scripts/exomerge2.py
|
python
|
ExodusModel.delete_element_field
|
(self,
element_field_names,
element_block_ids='all')
|
Delete one or more element fields.
Examples:
>>> model.delete_element_field('eqps')
>>> model.delete_element_field('all')
|
Delete one or more element fields.
|
[
"Delete",
"one",
"or",
"more",
"element",
"fields",
"."
] |
def delete_element_field(self,
element_field_names,
element_block_ids='all'):
"""
Delete one or more element fields.
Examples:
>>> model.delete_element_field('eqps')
>>> model.delete_element_field('all')
"""
element_block_ids = self._format_element_block_id_list(
element_block_ids)
element_field_names = self._format_id_list(
element_field_names,
self.get_element_field_names(),
'element field')
# for each field
for element_field_name in element_field_names:
any_deleted = False
# for each element block
for element_block_id in element_block_ids:
fields = self._get_element_block_fields(element_block_id)
if element_field_name in fields:
any_deleted = True
del fields[element_field_name]
if not any_deleted:
self._warning('Element field not defined.',
'The element field "%s" was not defined on any '
'of the given element blocks. It cannot be '
'deleted.' % element_field_name)
|
[
"def",
"delete_element_field",
"(",
"self",
",",
"element_field_names",
",",
"element_block_ids",
"=",
"'all'",
")",
":",
"element_block_ids",
"=",
"self",
".",
"_format_element_block_id_list",
"(",
"element_block_ids",
")",
"element_field_names",
"=",
"self",
".",
"_format_id_list",
"(",
"element_field_names",
",",
"self",
".",
"get_element_field_names",
"(",
")",
",",
"'element field'",
")",
"# for each field",
"for",
"element_field_name",
"in",
"element_field_names",
":",
"any_deleted",
"=",
"False",
"# for each element block",
"for",
"element_block_id",
"in",
"element_block_ids",
":",
"fields",
"=",
"self",
".",
"_get_element_block_fields",
"(",
"element_block_id",
")",
"if",
"element_field_name",
"in",
"fields",
":",
"any_deleted",
"=",
"True",
"del",
"fields",
"[",
"element_field_name",
"]",
"if",
"not",
"any_deleted",
":",
"self",
".",
"_warning",
"(",
"'Element field not defined.'",
",",
"'The element field \"%s\" was not defined on any '",
"'of the given element blocks. It cannot be '",
"'deleted.'",
"%",
"element_field_name",
")"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge2.py#L4762-L4792
|
||
mapnik/mapnik
|
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
|
scons/scons-local-4.1.0/SCons/Taskmaster.py
|
python
|
Task._exception_raise
|
(self)
|
Raises a pending exception that was recorded while getting a
Task ready for execution.
|
Raises a pending exception that was recorded while getting a
Task ready for execution.
|
[
"Raises",
"a",
"pending",
"exception",
"that",
"was",
"recorded",
"while",
"getting",
"a",
"Task",
"ready",
"for",
"execution",
"."
] |
def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc # pylint: disable=unbalanced-tuple-unpacking
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
raise exc_value.with_traceback(exc_traceback)
else:
# else we'll create an exception using the value and raise that
raise exc_type(exc_value).with_traceback(exc_traceback)
|
[
"def",
"_exception_raise",
"(",
"self",
")",
":",
"exc",
"=",
"self",
".",
"exc_info",
"(",
")",
"[",
":",
"]",
"try",
":",
"exc_type",
",",
"exc_value",
",",
"exc_traceback",
"=",
"exc",
"except",
"ValueError",
":",
"exc_type",
",",
"exc_value",
"=",
"exc",
"# pylint: disable=unbalanced-tuple-unpacking",
"exc_traceback",
"=",
"None",
"# raise exc_type(exc_value).with_traceback(exc_traceback)",
"if",
"isinstance",
"(",
"exc_value",
",",
"Exception",
")",
":",
"#hasattr(exc_value, 'with_traceback'):",
"# If exc_value is an exception, then just reraise",
"raise",
"exc_value",
".",
"with_traceback",
"(",
"exc_traceback",
")",
"else",
":",
"# else we'll create an exception using the value and raise that",
"raise",
"exc_type",
"(",
"exc_value",
")",
".",
"with_traceback",
"(",
"exc_traceback",
")"
] |
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Taskmaster.py#L525-L543
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_core.py
|
python
|
Window.CanBeOutsideClientArea
|
(*args, **kwargs)
|
return _core_.Window_CanBeOutsideClientArea(*args, **kwargs)
|
CanBeOutsideClientArea(self) -> bool
|
CanBeOutsideClientArea(self) -> bool
|
[
"CanBeOutsideClientArea",
"(",
"self",
")",
"-",
">",
"bool"
] |
def CanBeOutsideClientArea(*args, **kwargs):
"""CanBeOutsideClientArea(self) -> bool"""
return _core_.Window_CanBeOutsideClientArea(*args, **kwargs)
|
[
"def",
"CanBeOutsideClientArea",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_CanBeOutsideClientArea",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L11595-L11597
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
src/external/boost/boost_1_68_0/libs/metaparse/tools/benchmark/char_stat.py
|
python
|
count_characters
|
(root, out)
|
Count the occurrances of the different characters in the files
|
Count the occurrances of the different characters in the files
|
[
"Count",
"the",
"occurrances",
"of",
"the",
"different",
"characters",
"in",
"the",
"files"
] |
def count_characters(root, out):
"""Count the occurrances of the different characters in the files"""
if os.path.isfile(root):
with open(root, 'rb') as in_f:
for line in in_f:
for char in line:
if char not in out:
out[char] = 0
out[char] = out[char] + 1
elif os.path.isdir(root):
for filename in os.listdir(root):
count_characters(os.path.join(root, filename), out)
|
[
"def",
"count_characters",
"(",
"root",
",",
"out",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"root",
")",
":",
"with",
"open",
"(",
"root",
",",
"'rb'",
")",
"as",
"in_f",
":",
"for",
"line",
"in",
"in_f",
":",
"for",
"char",
"in",
"line",
":",
"if",
"char",
"not",
"in",
"out",
":",
"out",
"[",
"char",
"]",
"=",
"0",
"out",
"[",
"char",
"]",
"=",
"out",
"[",
"char",
"]",
"+",
"1",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"root",
")",
":",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"root",
")",
":",
"count_characters",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"filename",
")",
",",
"out",
")"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/boost/boost_1_68_0/libs/metaparse/tools/benchmark/char_stat.py#L13-L24
|
||
blackberry/Boost
|
fc90c3fde129c62565c023f091eddc4a7ed9902b
|
tools/build/v2/tools/builtin.py
|
python
|
variant
|
(name, parents_or_properties, explicit_properties = [])
|
Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
|
Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
|
[
"Declares",
"a",
"new",
"variant",
".",
"First",
"determines",
"explicit",
"properties",
"for",
"this",
"variant",
"by",
"refining",
"parents",
"explicit",
"properties",
"with",
"the",
"passed",
"explicit",
"properties",
".",
"The",
"result",
"is",
"remembered",
"and",
"will",
"be",
"used",
"if",
"this",
"variant",
"is",
"used",
"as",
"parent",
".",
"Second",
"determines",
"the",
"full",
"property",
"set",
"for",
"this",
"variant",
"by",
"adding",
"to",
"the",
"explicit",
"properties",
"default",
"values",
"for",
"all",
"properties",
"which",
"neither",
"present",
"nor",
"are",
"symmetric",
".",
"Lastly",
"makes",
"appropriate",
"value",
"of",
"variant",
"property",
"expand",
"to",
"the",
"full",
"property",
"set",
".",
"name",
":",
"Name",
"of",
"the",
"variant",
"parents_or_properties",
":",
"Specifies",
"parent",
"variants",
"if",
"explicit_properties",
"are",
"given",
"and",
"explicit_properties",
"otherwise",
".",
"explicit_properties",
":",
"Explicit",
"properties",
"."
] |
def variant (name, parents_or_properties, explicit_properties = []):
""" Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
"""
parents = []
if not explicit_properties:
explicit_properties = parents_or_properties
else:
parents = parents_or_properties
inherited = property_set.empty()
if parents:
# If we allow multiple parents, we'd have to to check for conflicts
# between base variants, and there was no demand for so to bother.
if len (parents) > 1:
raise BaseException ("Multiple base variants are not yet supported")
p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value (p):
raise BaseException ("Invalid base varaint '%s'" % p)
inherited = __variant_explicit_properties[p]
explicit_properties = property_set.create_with_validation(explicit_properties)
explicit_properties = inherited.refine(explicit_properties)
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
__variant_explicit_properties[name] = explicit_properties
feature.extend('variant', [name])
feature.compose ("<variant>" + name, explicit_properties.all())
|
[
"def",
"variant",
"(",
"name",
",",
"parents_or_properties",
",",
"explicit_properties",
"=",
"[",
"]",
")",
":",
"parents",
"=",
"[",
"]",
"if",
"not",
"explicit_properties",
":",
"explicit_properties",
"=",
"parents_or_properties",
"else",
":",
"parents",
"=",
"parents_or_properties",
"inherited",
"=",
"property_set",
".",
"empty",
"(",
")",
"if",
"parents",
":",
"# If we allow multiple parents, we'd have to to check for conflicts",
"# between base variants, and there was no demand for so to bother.",
"if",
"len",
"(",
"parents",
")",
">",
"1",
":",
"raise",
"BaseException",
"(",
"\"Multiple base variants are not yet supported\"",
")",
"p",
"=",
"parents",
"[",
"0",
"]",
"# TODO: the check may be stricter",
"if",
"not",
"feature",
".",
"is_implicit_value",
"(",
"p",
")",
":",
"raise",
"BaseException",
"(",
"\"Invalid base varaint '%s'\"",
"%",
"p",
")",
"inherited",
"=",
"__variant_explicit_properties",
"[",
"p",
"]",
"explicit_properties",
"=",
"property_set",
".",
"create_with_validation",
"(",
"explicit_properties",
")",
"explicit_properties",
"=",
"inherited",
".",
"refine",
"(",
"explicit_properties",
")",
"# Record explicitly specified properties for this variant",
"# We do this after inheriting parents' properties, so that",
"# they affect other variants, derived from this one.",
"__variant_explicit_properties",
"[",
"name",
"]",
"=",
"explicit_properties",
"feature",
".",
"extend",
"(",
"'variant'",
",",
"[",
"name",
"]",
")",
"feature",
".",
"compose",
"(",
"\"<variant>\"",
"+",
"name",
",",
"explicit_properties",
".",
"all",
"(",
")",
")"
] |
https://github.com/blackberry/Boost/blob/fc90c3fde129c62565c023f091eddc4a7ed9902b/tools/build/v2/tools/builtin.py#L33-L82
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
Image.SetMaskFromImage
|
(*args, **kwargs)
|
return _core_.Image_SetMaskFromImage(*args, **kwargs)
|
SetMaskFromImage(self, Image mask, byte mr, byte mg, byte mb) -> bool
Sets the image's mask so that the pixels that have RGB value of
``(mr,mg,mb)`` in ``mask`` will be masked in this image. This is done
by first finding an unused colour in the image, setting this colour as
the mask colour and then using this colour to draw all pixels in the
image who corresponding pixel in mask has given RGB value.
Returns ``False`` if ``mask`` does not have same dimensions as the
image or if there is no unused colour left. Returns ``True`` if the
mask was successfully applied.
Note that this method involves computing the histogram, which is
computationally intensive operation.
|
SetMaskFromImage(self, Image mask, byte mr, byte mg, byte mb) -> bool
|
[
"SetMaskFromImage",
"(",
"self",
"Image",
"mask",
"byte",
"mr",
"byte",
"mg",
"byte",
"mb",
")",
"-",
">",
"bool"
] |
def SetMaskFromImage(*args, **kwargs):
"""
SetMaskFromImage(self, Image mask, byte mr, byte mg, byte mb) -> bool
Sets the image's mask so that the pixels that have RGB value of
``(mr,mg,mb)`` in ``mask`` will be masked in this image. This is done
by first finding an unused colour in the image, setting this colour as
the mask colour and then using this colour to draw all pixels in the
image who corresponding pixel in mask has given RGB value.
Returns ``False`` if ``mask`` does not have same dimensions as the
image or if there is no unused colour left. Returns ``True`` if the
mask was successfully applied.
Note that this method involves computing the histogram, which is
computationally intensive operation.
"""
return _core_.Image_SetMaskFromImage(*args, **kwargs)
|
[
"def",
"SetMaskFromImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Image_SetMaskFromImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L3138-L3155
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_controls.py
|
python
|
ListCtrl.HasColumnOrderSupport
|
(*args, **kwargs)
|
return _controls_.ListCtrl_HasColumnOrderSupport(*args, **kwargs)
|
HasColumnOrderSupport() -> bool
|
HasColumnOrderSupport() -> bool
|
[
"HasColumnOrderSupport",
"()",
"-",
">",
"bool"
] |
def HasColumnOrderSupport(*args, **kwargs):
"""HasColumnOrderSupport() -> bool"""
return _controls_.ListCtrl_HasColumnOrderSupport(*args, **kwargs)
|
[
"def",
"HasColumnOrderSupport",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"ListCtrl_HasColumnOrderSupport",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L4476-L4478
|
|
yyzybb537/libgo
|
4af17b7c67643c4d54aa354dcc77963ea07847d0
|
third_party/boost.context/tools/build/src/build/virtual_target.py
|
python
|
Action.actualize_sources
|
(self, sources, prop_set)
|
Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise.
New values will be *appended* to the variables. They may be non-empty,
if caller wants it.
|
Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise.
|
[
"Creates",
"actual",
"jam",
"targets",
"for",
"sources",
".",
"Initializes",
"two",
"member",
"variables",
":",
"self",
".",
"actual_sources_",
"--",
"sources",
"which",
"are",
"passed",
"to",
"updating",
"action",
"self",
".",
"dependency_only_sources_",
"--",
"sources",
"which",
"are",
"made",
"dependencies",
"but",
"are",
"not",
"used",
"otherwise",
"."
] |
def actualize_sources (self, sources, prop_set):
""" Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise.
New values will be *appended* to the variables. They may be non-empty,
if caller wants it.
"""
assert is_iterable_typed(sources, VirtualTarget)
assert isinstance(prop_set, property_set.PropertySet)
dependencies = self.properties_.get ('<dependency>')
self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
self.actual_sources_ += self.actualize_source_type (sources, prop_set)
# This is used to help bjam find dependencies in generated headers
# in other main targets.
# Say:
#
# make a.h : ....... ;
# exe hello : hello.cpp : <implicit-dependency>a.h ;
#
# However, for bjam to find the dependency the generated target must
# be actualized (i.e. have the jam target). In the above case,
# if we're building just hello ("bjam hello"), 'a.h' won't be
# actualized unless we do it here.
implicit = self.properties_.get("<implicit-dependency>")
for i in implicit:
i.actualize()
|
[
"def",
"actualize_sources",
"(",
"self",
",",
"sources",
",",
"prop_set",
")",
":",
"assert",
"is_iterable_typed",
"(",
"sources",
",",
"VirtualTarget",
")",
"assert",
"isinstance",
"(",
"prop_set",
",",
"property_set",
".",
"PropertySet",
")",
"dependencies",
"=",
"self",
".",
"properties_",
".",
"get",
"(",
"'<dependency>'",
")",
"self",
".",
"dependency_only_sources_",
"+=",
"self",
".",
"actualize_source_type",
"(",
"dependencies",
",",
"prop_set",
")",
"self",
".",
"actual_sources_",
"+=",
"self",
".",
"actualize_source_type",
"(",
"sources",
",",
"prop_set",
")",
"# This is used to help bjam find dependencies in generated headers",
"# in other main targets.",
"# Say:",
"#",
"# make a.h : ....... ;",
"# exe hello : hello.cpp : <implicit-dependency>a.h ;",
"#",
"# However, for bjam to find the dependency the generated target must",
"# be actualized (i.e. have the jam target). In the above case,",
"# if we're building just hello (\"bjam hello\"), 'a.h' won't be",
"# actualized unless we do it here.",
"implicit",
"=",
"self",
".",
"properties_",
".",
"get",
"(",
"\"<implicit-dependency>\"",
")",
"for",
"i",
"in",
"implicit",
":",
"i",
".",
"actualize",
"(",
")"
] |
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/virtual_target.py#L883-L914
|
||
NVIDIA-Merlin/HugeCTR
|
b596bcc44e14bb0c62c4f7e9c0b55301d94f2154
|
tutorial/dump_to_tf/dump.py
|
python
|
DumpToTF.parse_dense
|
(self, layer_bytes, layer_type, **kwargs)
|
get one layer weights at a time.
|
get one layer weights at a time.
|
[
"get",
"one",
"layer",
"weights",
"at",
"a",
"time",
"."
] |
def parse_dense(self, layer_bytes, layer_type, **kwargs):
"""
get one layer weights at a time.
"""
if self.model_content is None:
self.parse_json()
self.offset = 0
with open(self.dense_model_name, 'rb') as file:
print("[INFO] begin to parse dense weights: %s" %layer_type)
file.seek(self.offset, 0)
buffer = file.read(layer_bytes)
if layer_type == "BatchNorm":
# TODO
pass
elif layer_type == "InnerProduct":
in_feature = kwargs["in_feature"]
out_feature = kwargs["out_feature"]
weight = struct.unpack(str(in_feature * out_feature) + "f", buffer[ : in_feature * out_feature * 4])
bias = struct.unpack(str(out_feature) + "f", buffer[in_feature * out_feature * 4 : ])
weight = np.reshape(np.float32(weight), newshape=(in_feature, out_feature))
bias = np.reshape(np.float32(bias), newshape=(1, out_feature))
self.offset += layer_bytes
return weight, bias
elif layer_type == "MultiCross":
vec_length = kwargs["vec_length"]
num_layers = kwargs["num_layers"]
weights = []
biases = []
each_layer_bytes = layer_bytes // num_layers
for i in range(num_layers):
weight = struct.unpack(str(vec_length) + "f", buffer[i*each_layer_bytes : i*each_layer_bytes + vec_length * 4])
bias = struct.unpack(str(vec_length) + "f", buffer[i*each_layer_bytes + vec_length * 4 : (i+1)*each_layer_bytes])
weights.append(np.reshape(np.float32(weight), newshape=(1, len(weight))))
biases.append(np.reshape(np.float32(bias), newshape=(1, len(bias))))
self.offset += layer_bytes
return weights, biases
elif layer_type == "WeightMultiply":
# TODO
pass
|
[
"def",
"parse_dense",
"(",
"self",
",",
"layer_bytes",
",",
"layer_type",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"model_content",
"is",
"None",
":",
"self",
".",
"parse_json",
"(",
")",
"self",
".",
"offset",
"=",
"0",
"with",
"open",
"(",
"self",
".",
"dense_model_name",
",",
"'rb'",
")",
"as",
"file",
":",
"print",
"(",
"\"[INFO] begin to parse dense weights: %s\"",
"%",
"layer_type",
")",
"file",
".",
"seek",
"(",
"self",
".",
"offset",
",",
"0",
")",
"buffer",
"=",
"file",
".",
"read",
"(",
"layer_bytes",
")",
"if",
"layer_type",
"==",
"\"BatchNorm\"",
":",
"# TODO",
"pass",
"elif",
"layer_type",
"==",
"\"InnerProduct\"",
":",
"in_feature",
"=",
"kwargs",
"[",
"\"in_feature\"",
"]",
"out_feature",
"=",
"kwargs",
"[",
"\"out_feature\"",
"]",
"weight",
"=",
"struct",
".",
"unpack",
"(",
"str",
"(",
"in_feature",
"*",
"out_feature",
")",
"+",
"\"f\"",
",",
"buffer",
"[",
":",
"in_feature",
"*",
"out_feature",
"*",
"4",
"]",
")",
"bias",
"=",
"struct",
".",
"unpack",
"(",
"str",
"(",
"out_feature",
")",
"+",
"\"f\"",
",",
"buffer",
"[",
"in_feature",
"*",
"out_feature",
"*",
"4",
":",
"]",
")",
"weight",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"float32",
"(",
"weight",
")",
",",
"newshape",
"=",
"(",
"in_feature",
",",
"out_feature",
")",
")",
"bias",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"float32",
"(",
"bias",
")",
",",
"newshape",
"=",
"(",
"1",
",",
"out_feature",
")",
")",
"self",
".",
"offset",
"+=",
"layer_bytes",
"return",
"weight",
",",
"bias",
"elif",
"layer_type",
"==",
"\"MultiCross\"",
":",
"vec_length",
"=",
"kwargs",
"[",
"\"vec_length\"",
"]",
"num_layers",
"=",
"kwargs",
"[",
"\"num_layers\"",
"]",
"weights",
"=",
"[",
"]",
"biases",
"=",
"[",
"]",
"each_layer_bytes",
"=",
"layer_bytes",
"//",
"num_layers",
"for",
"i",
"in",
"range",
"(",
"num_layers",
")",
":",
"weight",
"=",
"struct",
".",
"unpack",
"(",
"str",
"(",
"vec_length",
")",
"+",
"\"f\"",
",",
"buffer",
"[",
"i",
"*",
"each_layer_bytes",
":",
"i",
"*",
"each_layer_bytes",
"+",
"vec_length",
"*",
"4",
"]",
")",
"bias",
"=",
"struct",
".",
"unpack",
"(",
"str",
"(",
"vec_length",
")",
"+",
"\"f\"",
",",
"buffer",
"[",
"i",
"*",
"each_layer_bytes",
"+",
"vec_length",
"*",
"4",
":",
"(",
"i",
"+",
"1",
")",
"*",
"each_layer_bytes",
"]",
")",
"weights",
".",
"append",
"(",
"np",
".",
"reshape",
"(",
"np",
".",
"float32",
"(",
"weight",
")",
",",
"newshape",
"=",
"(",
"1",
",",
"len",
"(",
"weight",
")",
")",
")",
")",
"biases",
".",
"append",
"(",
"np",
".",
"reshape",
"(",
"np",
".",
"float32",
"(",
"bias",
")",
",",
"newshape",
"=",
"(",
"1",
",",
"len",
"(",
"bias",
")",
")",
")",
")",
"self",
".",
"offset",
"+=",
"layer_bytes",
"return",
"weights",
",",
"biases",
"elif",
"layer_type",
"==",
"\"WeightMultiply\"",
":",
"# TODO",
"pass"
] |
https://github.com/NVIDIA-Merlin/HugeCTR/blob/b596bcc44e14bb0c62c4f7e9c0b55301d94f2154/tutorial/dump_to_tf/dump.py#L123-L177
|
||
google/llvm-propeller
|
45c226984fe8377ebfb2ad7713c680d652ba678d
|
clang/utils/analyzer/CmpRuns.py
|
python
|
load_results
|
(results: ResultsDirectory, delete_empty: bool = True,
verbose_log: Optional[str] = None)
|
return load_results_from_single_run(SingleRunInfo(results,
verbose_log),
delete_empty)
|
Backwards compatibility API.
|
Backwards compatibility API.
|
[
"Backwards",
"compatibility",
"API",
"."
] |
def load_results(results: ResultsDirectory, delete_empty: bool = True,
verbose_log: Optional[str] = None) -> AnalysisRun:
"""
Backwards compatibility API.
"""
return load_results_from_single_run(SingleRunInfo(results,
verbose_log),
delete_empty)
|
[
"def",
"load_results",
"(",
"results",
":",
"ResultsDirectory",
",",
"delete_empty",
":",
"bool",
"=",
"True",
",",
"verbose_log",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"AnalysisRun",
":",
"return",
"load_results_from_single_run",
"(",
"SingleRunInfo",
"(",
"results",
",",
"verbose_log",
")",
",",
"delete_empty",
")"
] |
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/clang/utils/analyzer/CmpRuns.py#L273-L280
|
|
GJDuck/LowFat
|
ecf6a0f0fa1b73a27a626cf493cc39e477b6faea
|
llvm-4.0.0.src/bindings/python/llvm/object.py
|
python
|
ObjectFile.get_symbols
|
(self, cache=False)
|
Obtain the symbols in this object file.
This is a generator for llvm.object.Symbol instances.
Each Symbol instance is a limited-use object. See this module's
documentation on iterators for more.
|
Obtain the symbols in this object file.
|
[
"Obtain",
"the",
"symbols",
"in",
"this",
"object",
"file",
"."
] |
def get_symbols(self, cache=False):
"""Obtain the symbols in this object file.
This is a generator for llvm.object.Symbol instances.
Each Symbol instance is a limited-use object. See this module's
documentation on iterators for more.
"""
symbols = lib.LLVMGetSymbols(self)
last = None
while True:
if lib.LLVMIsSymbolIteratorAtEnd(self, symbols):
break
last = Symbol(symbols, self)
if cache:
last.cache()
yield last
lib.LLVMMoveToNextSymbol(symbols)
last.expire()
if last is not None:
last.expire()
lib.LLVMDisposeSymbolIterator(symbols)
|
[
"def",
"get_symbols",
"(",
"self",
",",
"cache",
"=",
"False",
")",
":",
"symbols",
"=",
"lib",
".",
"LLVMGetSymbols",
"(",
"self",
")",
"last",
"=",
"None",
"while",
"True",
":",
"if",
"lib",
".",
"LLVMIsSymbolIteratorAtEnd",
"(",
"self",
",",
"symbols",
")",
":",
"break",
"last",
"=",
"Symbol",
"(",
"symbols",
",",
"self",
")",
"if",
"cache",
":",
"last",
".",
"cache",
"(",
")",
"yield",
"last",
"lib",
".",
"LLVMMoveToNextSymbol",
"(",
"symbols",
")",
"last",
".",
"expire",
"(",
")",
"if",
"last",
"is",
"not",
"None",
":",
"last",
".",
"expire",
"(",
")",
"lib",
".",
"LLVMDisposeSymbolIterator",
"(",
"symbols",
")"
] |
https://github.com/GJDuck/LowFat/blob/ecf6a0f0fa1b73a27a626cf493cc39e477b6faea/llvm-4.0.0.src/bindings/python/llvm/object.py#L151-L177
|
||
hakuna-m/wubiuefi
|
caec1af0a09c78fd5a345180ada1fe45e0c63493
|
src/openpgp/sap/crypto.py
|
python
|
encrypt_public_session
|
(keypkt, key, symalg)
|
return create_Packet(PKT_PUBKEYSESKEY, sesbody._d)
|
Create a public-key encrypted session key.
:Parameters:
- `keypkt`: either an `OpenPGP.packet.PublicKey.PublicKey` (or
subclass) instance or encryption passphrase string
- `key`: string encryptrion algorithm used for `symalg`
- `symalg`: integer symmetric encryption algorithm constant
:Returns: `OpenPGP.packet.PublicKeyEncryptedSessionKey.PublicKeyEncryptedSessionKey` instance
|
Create a public-key encrypted session key.
|
[
"Create",
"a",
"public",
"-",
"key",
"encrypted",
"session",
"key",
"."
] |
def encrypt_public_session(keypkt, key, symalg):
"""Create a public-key encrypted session key.
:Parameters:
- `keypkt`: either an `OpenPGP.packet.PublicKey.PublicKey` (or
subclass) instance or encryption passphrase string
- `key`: string encryptrion algorithm used for `symalg`
- `symalg`: integer symmetric encryption algorithm constant
:Returns: `OpenPGP.packet.PublicKeyEncryptedSessionKey.PublicKeyEncryptedSessionKey` instance
"""
from openpgp.sap.pkt.Packet import create_Packet
from openpgp.sap.pkt.PublicKeyEncryptedSessionKey import create_PublicKeyEncryptedSessionKeyBody
pubalg = keypkt.body.alg
rnd_prefix = []
i = 0 # fixing the "intended length" business to 127
while i <= 63 - len(key): # seems proper, but probably inefficient
rnd_byte = gen_random(1)
if '\x00' != rnd_byte:
rnd_prefix.append(rnd_byte)
i += 1
chksum = STN.int2str(STN.checksum(key))[:2]
if pubalg in [ASYM_RSA_EOS, ASYM_RSA_E]:
key_tup = (keypkt.body.RSA_n.value, keypkt.body.RSA_e.value)
elif pubalg in [ASYM_ELGAMAL_EOS, ASYM_ELGAMAL_E]:
key_tup = (keypkt.body.ELGAMAL_p.value, keypkt.body.ELGAMAL_g.value, keypkt.body.ELGAMAL_y.value)
else:
raise NotImplementedError("Unsupported public key algorithm->(%s)." % pubalg)
padded_key = ''.join(['\x02', ''.join(rnd_prefix), '\x00',
STN.int2str(symalg)[0], key, chksum])
cipher_tup = encrypt_public(pubalg, padded_key, key_tup)
sesbody = create_PublicKeyEncryptedSessionKeyBody(
keyid=keypkt.body.id, alg=pubalg, mpis=cipher_tup)
return create_Packet(PKT_PUBKEYSESKEY, sesbody._d)
|
[
"def",
"encrypt_public_session",
"(",
"keypkt",
",",
"key",
",",
"symalg",
")",
":",
"from",
"openpgp",
".",
"sap",
".",
"pkt",
".",
"Packet",
"import",
"create_Packet",
"from",
"openpgp",
".",
"sap",
".",
"pkt",
".",
"PublicKeyEncryptedSessionKey",
"import",
"create_PublicKeyEncryptedSessionKeyBody",
"pubalg",
"=",
"keypkt",
".",
"body",
".",
"alg",
"rnd_prefix",
"=",
"[",
"]",
"i",
"=",
"0",
"# fixing the \"intended length\" business to 127",
"while",
"i",
"<=",
"63",
"-",
"len",
"(",
"key",
")",
":",
"# seems proper, but probably inefficient",
"rnd_byte",
"=",
"gen_random",
"(",
"1",
")",
"if",
"'\\x00'",
"!=",
"rnd_byte",
":",
"rnd_prefix",
".",
"append",
"(",
"rnd_byte",
")",
"i",
"+=",
"1",
"chksum",
"=",
"STN",
".",
"int2str",
"(",
"STN",
".",
"checksum",
"(",
"key",
")",
")",
"[",
":",
"2",
"]",
"if",
"pubalg",
"in",
"[",
"ASYM_RSA_EOS",
",",
"ASYM_RSA_E",
"]",
":",
"key_tup",
"=",
"(",
"keypkt",
".",
"body",
".",
"RSA_n",
".",
"value",
",",
"keypkt",
".",
"body",
".",
"RSA_e",
".",
"value",
")",
"elif",
"pubalg",
"in",
"[",
"ASYM_ELGAMAL_EOS",
",",
"ASYM_ELGAMAL_E",
"]",
":",
"key_tup",
"=",
"(",
"keypkt",
".",
"body",
".",
"ELGAMAL_p",
".",
"value",
",",
"keypkt",
".",
"body",
".",
"ELGAMAL_g",
".",
"value",
",",
"keypkt",
".",
"body",
".",
"ELGAMAL_y",
".",
"value",
")",
"else",
":",
"raise",
"NotImplementedError",
"(",
"\"Unsupported public key algorithm->(%s).\"",
"%",
"pubalg",
")",
"padded_key",
"=",
"''",
".",
"join",
"(",
"[",
"'\\x02'",
",",
"''",
".",
"join",
"(",
"rnd_prefix",
")",
",",
"'\\x00'",
",",
"STN",
".",
"int2str",
"(",
"symalg",
")",
"[",
"0",
"]",
",",
"key",
",",
"chksum",
"]",
")",
"cipher_tup",
"=",
"encrypt_public",
"(",
"pubalg",
",",
"padded_key",
",",
"key_tup",
")",
"sesbody",
"=",
"create_PublicKeyEncryptedSessionKeyBody",
"(",
"keyid",
"=",
"keypkt",
".",
"body",
".",
"id",
",",
"alg",
"=",
"pubalg",
",",
"mpis",
"=",
"cipher_tup",
")",
"return",
"create_Packet",
"(",
"PKT_PUBKEYSESKEY",
",",
"sesbody",
".",
"_d",
")"
] |
https://github.com/hakuna-m/wubiuefi/blob/caec1af0a09c78fd5a345180ada1fe45e0c63493/src/openpgp/sap/crypto.py#L1218-L1262
|
|
feelpp/feelpp
|
2d547ed701cc5adb01639185b4a8eb47940367c7
|
toolboxes/pyfeelpp-toolboxes/feelpp/toolboxes/solid/__init__.py
|
python
|
solid
|
(dim=2, orderDisp=1, worldComm=None, keyword="solid", subprefix="", modelRep=None)
|
return _csms[key](prefix="solid", keyword=keyword, worldComm=worldComm, subprefix="", modelRep=modelRep)
|
create a solid toolbox solver
Keyword arguments:
dim -- the dimension (default: 2)
orderDisp -- the polynomial order for the displacement (default: 1)
worldComm -- the parallel communicator for the mesh (default: core.Environment::worldCommPtr())
|
create a solid toolbox solver
Keyword arguments:
dim -- the dimension (default: 2)
orderDisp -- the polynomial order for the displacement (default: 1)
worldComm -- the parallel communicator for the mesh (default: core.Environment::worldCommPtr())
|
[
"create",
"a",
"solid",
"toolbox",
"solver",
"Keyword",
"arguments",
":",
"dim",
"--",
"the",
"dimension",
"(",
"default",
":",
"2",
")",
"orderDisp",
"--",
"the",
"polynomial",
"order",
"for",
"the",
"displacement",
"(",
"default",
":",
"1",
")",
"worldComm",
"--",
"the",
"parallel",
"communicator",
"for",
"the",
"mesh",
"(",
"default",
":",
"core",
".",
"Environment",
"::",
"worldCommPtr",
"()",
")"
] |
def solid(dim=2, orderDisp=1, worldComm=None, keyword="solid", subprefix="", modelRep=None):
"""create a solid toolbox solver
Keyword arguments:
dim -- the dimension (default: 2)
orderDisp -- the polynomial order for the displacement (default: 1)
worldComm -- the parallel communicator for the mesh (default: core.Environment::worldCommPtr())
"""
if not has_csm:
raise Exception('Solid toolbox is not enabled in Feel++')
if worldComm is None:
worldComm=feelpp.Environment.worldCommPtr()
key='solid('+str(dim)+','+str(orderDisp)+')'
if worldComm.isMasterRank():
print(key)
if key not in _csms:
raise RuntimeError('Solid solver '+key+' not existing')
if modelRep is None:
modelRep = ModelBaseRepository()
return _csms[key](prefix="solid", keyword=keyword, worldComm=worldComm, subprefix="", modelRep=modelRep)
|
[
"def",
"solid",
"(",
"dim",
"=",
"2",
",",
"orderDisp",
"=",
"1",
",",
"worldComm",
"=",
"None",
",",
"keyword",
"=",
"\"solid\"",
",",
"subprefix",
"=",
"\"\"",
",",
"modelRep",
"=",
"None",
")",
":",
"if",
"not",
"has_csm",
":",
"raise",
"Exception",
"(",
"'Solid toolbox is not enabled in Feel++'",
")",
"if",
"worldComm",
"is",
"None",
":",
"worldComm",
"=",
"feelpp",
".",
"Environment",
".",
"worldCommPtr",
"(",
")",
"key",
"=",
"'solid('",
"+",
"str",
"(",
"dim",
")",
"+",
"','",
"+",
"str",
"(",
"orderDisp",
")",
"+",
"')'",
"if",
"worldComm",
".",
"isMasterRank",
"(",
")",
":",
"print",
"(",
"key",
")",
"if",
"key",
"not",
"in",
"_csms",
":",
"raise",
"RuntimeError",
"(",
"'Solid solver '",
"+",
"key",
"+",
"' not existing'",
")",
"if",
"modelRep",
"is",
"None",
":",
"modelRep",
"=",
"ModelBaseRepository",
"(",
")",
"return",
"_csms",
"[",
"key",
"]",
"(",
"prefix",
"=",
"\"solid\"",
",",
"keyword",
"=",
"keyword",
",",
"worldComm",
"=",
"worldComm",
",",
"subprefix",
"=",
"\"\"",
",",
"modelRep",
"=",
"modelRep",
")"
] |
https://github.com/feelpp/feelpp/blob/2d547ed701cc5adb01639185b4a8eb47940367c7/toolboxes/pyfeelpp-toolboxes/feelpp/toolboxes/solid/__init__.py#L21-L39
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/inspect.py
|
python
|
isgenerator
|
(object)
|
return isinstance(object, types.GeneratorType)
|
Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support interation over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator
|
Return true if the object is a generator.
|
[
"Return",
"true",
"if",
"the",
"object",
"is",
"a",
"generator",
"."
] |
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support interation over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
|
[
"def",
"isgenerator",
"(",
"object",
")",
":",
"return",
"isinstance",
"(",
"object",
",",
"types",
".",
"GeneratorType",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/inspect.py#L164-L179
|
|
mongodb/mongo
|
d8ff665343ad29cf286ee2cf4a1960d29371937b
|
buildscripts/resmokelib/setup_multiversion/setup_multiversion.py
|
python
|
SetupMultiversion.get_urls
|
(self, version: str, buildvariant_name: Optional[str] = None)
|
return EvgURLInfo(urls=urls, evg_version_id=evg_version.version_id)
|
Return multiversion urls for a given version (as binary version or commit hash or evergreen_version_id).
|
Return multiversion urls for a given version (as binary version or commit hash or evergreen_version_id).
|
[
"Return",
"multiversion",
"urls",
"for",
"a",
"given",
"version",
"(",
"as",
"binary",
"version",
"or",
"commit",
"hash",
"or",
"evergreen_version_id",
")",
"."
] |
def get_urls(self, version: str, buildvariant_name: Optional[str] = None) -> EvgURLInfo:
"""Return multiversion urls for a given version (as binary version or commit hash or evergreen_version_id)."""
evg_version = evergreen_conn.get_evergreen_version(self.evg_api, version)
if evg_version is None:
git_tag, commit_hash = github_conn.get_git_tag_and_commit(self.github_oauth_token,
version)
LOGGER.info("Found git attributes.", git_tag=git_tag, commit_hash=commit_hash)
evg_version = evergreen_conn.get_evergreen_version(self.evg_api, commit_hash)
if evg_version is None:
return EvgURLInfo()
if not buildvariant_name:
evg_project = evg_version.project_identifier
LOGGER.debug("Found evergreen project.", evergreen_project=evg_project)
try:
major_minor_version = re.findall(r"\d+\.\d+", evg_project)[-1]
except IndexError:
major_minor_version = "master"
buildvariant_name = self.get_buildvariant_name(major_minor_version)
LOGGER.debug("Found buildvariant.", buildvariant_name=buildvariant_name)
if buildvariant_name not in evg_version.build_variants_map:
raise ValueError(
f"Buildvariant {buildvariant_name} not found in evergreen. "
f"Available buildvariants can be found in {config.SETUP_MULTIVERSION_CONFIG}.")
urls = evergreen_conn.get_compile_artifact_urls(self.evg_api, evg_version,
buildvariant_name,
ignore_failed_push=self.ignore_failed_push)
return EvgURLInfo(urls=urls, evg_version_id=evg_version.version_id)
|
[
"def",
"get_urls",
"(",
"self",
",",
"version",
":",
"str",
",",
"buildvariant_name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"EvgURLInfo",
":",
"evg_version",
"=",
"evergreen_conn",
".",
"get_evergreen_version",
"(",
"self",
".",
"evg_api",
",",
"version",
")",
"if",
"evg_version",
"is",
"None",
":",
"git_tag",
",",
"commit_hash",
"=",
"github_conn",
".",
"get_git_tag_and_commit",
"(",
"self",
".",
"github_oauth_token",
",",
"version",
")",
"LOGGER",
".",
"info",
"(",
"\"Found git attributes.\"",
",",
"git_tag",
"=",
"git_tag",
",",
"commit_hash",
"=",
"commit_hash",
")",
"evg_version",
"=",
"evergreen_conn",
".",
"get_evergreen_version",
"(",
"self",
".",
"evg_api",
",",
"commit_hash",
")",
"if",
"evg_version",
"is",
"None",
":",
"return",
"EvgURLInfo",
"(",
")",
"if",
"not",
"buildvariant_name",
":",
"evg_project",
"=",
"evg_version",
".",
"project_identifier",
"LOGGER",
".",
"debug",
"(",
"\"Found evergreen project.\"",
",",
"evergreen_project",
"=",
"evg_project",
")",
"try",
":",
"major_minor_version",
"=",
"re",
".",
"findall",
"(",
"r\"\\d+\\.\\d+\"",
",",
"evg_project",
")",
"[",
"-",
"1",
"]",
"except",
"IndexError",
":",
"major_minor_version",
"=",
"\"master\"",
"buildvariant_name",
"=",
"self",
".",
"get_buildvariant_name",
"(",
"major_minor_version",
")",
"LOGGER",
".",
"debug",
"(",
"\"Found buildvariant.\"",
",",
"buildvariant_name",
"=",
"buildvariant_name",
")",
"if",
"buildvariant_name",
"not",
"in",
"evg_version",
".",
"build_variants_map",
":",
"raise",
"ValueError",
"(",
"f\"Buildvariant {buildvariant_name} not found in evergreen. \"",
"f\"Available buildvariants can be found in {config.SETUP_MULTIVERSION_CONFIG}.\"",
")",
"urls",
"=",
"evergreen_conn",
".",
"get_compile_artifact_urls",
"(",
"self",
".",
"evg_api",
",",
"evg_version",
",",
"buildvariant_name",
",",
"ignore_failed_push",
"=",
"self",
".",
"ignore_failed_push",
")",
"return",
"EvgURLInfo",
"(",
"urls",
"=",
"urls",
",",
"evg_version_id",
"=",
"evg_version",
".",
"version_id",
")"
] |
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/setup_multiversion/setup_multiversion.py#L338-L371
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/drill/view/DrillTableWidget.py
|
python
|
DrillTableWidget.setHorizontalHeaderLabels
|
(self, labels)
|
Overrides QTableWidget::setHorizontalHeaderLabels. This methods calls
the base class method and keeps a list of columns label for later use.
Args:
labels (list(str)): columns labels
|
Overrides QTableWidget::setHorizontalHeaderLabels. This methods calls
the base class method and keeps a list of columns label for later use.
|
[
"Overrides",
"QTableWidget",
"::",
"setHorizontalHeaderLabels",
".",
"This",
"methods",
"calls",
"the",
"base",
"class",
"method",
"and",
"keeps",
"a",
"list",
"of",
"columns",
"label",
"for",
"later",
"use",
"."
] |
def setHorizontalHeaderLabels(self, labels):
"""
Overrides QTableWidget::setHorizontalHeaderLabels. This methods calls
the base class method and keeps a list of columns label for later use.
Args:
labels (list(str)): columns labels
"""
super(DrillTableWidget, self).setHorizontalHeaderLabels(labels)
self._columns = list()
for label in labels:
self._columns.append(label)
|
[
"def",
"setHorizontalHeaderLabels",
"(",
"self",
",",
"labels",
")",
":",
"super",
"(",
"DrillTableWidget",
",",
"self",
")",
".",
"setHorizontalHeaderLabels",
"(",
"labels",
")",
"self",
".",
"_columns",
"=",
"list",
"(",
")",
"for",
"label",
"in",
"labels",
":",
"self",
".",
"_columns",
".",
"append",
"(",
"label",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/drill/view/DrillTableWidget.py#L82-L93
|
||
rdkit/rdkit
|
ede860ae316d12d8568daf5ee800921c3389c84e
|
rdkit/ML/NaiveBayes/ClassificationModel.py
|
python
|
NaiveBayesClassifier.trainModel
|
(self)
|
We will assume at this point that the training examples have been set
We have to estmate the conditional probabilities for each of the (binned) descriptor
component give a outcome (or class). Also the probabilities for each class is estimated
|
We will assume at this point that the training examples have been set
|
[
"We",
"will",
"assume",
"at",
"this",
"point",
"that",
"the",
"training",
"examples",
"have",
"been",
"set"
] |
def trainModel(self):
""" We will assume at this point that the training examples have been set
We have to estmate the conditional probabilities for each of the (binned) descriptor
component give a outcome (or class). Also the probabilities for each class is estimated
"""
# first estimate the class probabilities
n = len(self._trainingExamples)
for i in range(self._nClasses):
self._classProbs[i] = 0.0
# for i in range(self._nClasses):
# self._classProbs[i] = float(self._classProbs[i])/n
# first find the bounds for each descriptor value if necessary
if not self._useSigs and max(self._qBounds) > 0:
self._computeQuantBounds()
# now compute the probabilities
ncls = {}
incr = 1.0 / n
for eg in self._trainingExamples:
cls = eg[-1]
self._classProbs[cls] += incr
ncls[cls] = ncls.get(cls, 0) + 1
tmp = self._condProbs[cls]
if not self._useSigs:
for ai in self._attrs:
bid = eg[ai]
if self._qBounds[ai] > 0:
bid = _getBinId(bid, self._QBoundVals[ai])
tmp[ai][bid] += 1.0
else:
for ai in self._attrs:
if eg[1].GetBit(ai):
tmp[ai][1] += 1.0
else:
tmp[ai][0] += 1.0
# for key in self._condProbs:
for cls in range(self._nClasses):
if cls not in ncls:
continue
# cls = key[0]
tmp = self._condProbs[cls]
for ai in self._attrs:
if not self._useSigs:
nbnds = self._nPosVals[ai]
if (self._qBounds[ai] > 0):
nbnds = self._qBounds[ai]
else:
nbnds = 2
for bid in range(nbnds):
if self._mEstimateVal <= 0.0:
# this is simple the fraction of of time this descriptor component assume
# this value for the examples that belong a specific class
# self._condProbs[key] = (float(self._condProbs[key]))/ncls[cls]
tmp[ai][bid] /= ncls[cls]
else:
# this a bit more complicated form - more appropriate for unbalanced data
# see "Machine Learning" by Tom Mitchell section 6.9.1.1
# this is the probability that this descriptor component can take this specific value
# in the lack of any other information is is simply the inverse of the number of
# possible values 'npossible'
# If we quantized this component then
# npossible = 1 + len(self._QBoundVals[ai])
# else if we did no qunatize (the descriptor came quantized)
# npossible = nPossibleVals[ai]
# ai = key[1]
pdesc = 0.0
if self._qBounds[ai] > 0:
pdesc = 1.0 / (1 + len(self._QBoundVals[ai]))
elif (self._nPosVals[ai] > 0):
pdesc = 1.0 / (self._nPosVals[ai])
else:
raise ValueError(
'Neither Bounds set nor data pre-quantized for attribute ' + str(ai))
tmp[ai][bid] += (self._mEstimateVal) * pdesc
tmp[ai][bid] /= (ncls[cls] + self._mEstimateVal)
|
[
"def",
"trainModel",
"(",
"self",
")",
":",
"# first estimate the class probabilities",
"n",
"=",
"len",
"(",
"self",
".",
"_trainingExamples",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_nClasses",
")",
":",
"self",
".",
"_classProbs",
"[",
"i",
"]",
"=",
"0.0",
"# for i in range(self._nClasses):",
"# self._classProbs[i] = float(self._classProbs[i])/n",
"# first find the bounds for each descriptor value if necessary",
"if",
"not",
"self",
".",
"_useSigs",
"and",
"max",
"(",
"self",
".",
"_qBounds",
")",
">",
"0",
":",
"self",
".",
"_computeQuantBounds",
"(",
")",
"# now compute the probabilities",
"ncls",
"=",
"{",
"}",
"incr",
"=",
"1.0",
"/",
"n",
"for",
"eg",
"in",
"self",
".",
"_trainingExamples",
":",
"cls",
"=",
"eg",
"[",
"-",
"1",
"]",
"self",
".",
"_classProbs",
"[",
"cls",
"]",
"+=",
"incr",
"ncls",
"[",
"cls",
"]",
"=",
"ncls",
".",
"get",
"(",
"cls",
",",
"0",
")",
"+",
"1",
"tmp",
"=",
"self",
".",
"_condProbs",
"[",
"cls",
"]",
"if",
"not",
"self",
".",
"_useSigs",
":",
"for",
"ai",
"in",
"self",
".",
"_attrs",
":",
"bid",
"=",
"eg",
"[",
"ai",
"]",
"if",
"self",
".",
"_qBounds",
"[",
"ai",
"]",
">",
"0",
":",
"bid",
"=",
"_getBinId",
"(",
"bid",
",",
"self",
".",
"_QBoundVals",
"[",
"ai",
"]",
")",
"tmp",
"[",
"ai",
"]",
"[",
"bid",
"]",
"+=",
"1.0",
"else",
":",
"for",
"ai",
"in",
"self",
".",
"_attrs",
":",
"if",
"eg",
"[",
"1",
"]",
".",
"GetBit",
"(",
"ai",
")",
":",
"tmp",
"[",
"ai",
"]",
"[",
"1",
"]",
"+=",
"1.0",
"else",
":",
"tmp",
"[",
"ai",
"]",
"[",
"0",
"]",
"+=",
"1.0",
"# for key in self._condProbs:",
"for",
"cls",
"in",
"range",
"(",
"self",
".",
"_nClasses",
")",
":",
"if",
"cls",
"not",
"in",
"ncls",
":",
"continue",
"# cls = key[0]",
"tmp",
"=",
"self",
".",
"_condProbs",
"[",
"cls",
"]",
"for",
"ai",
"in",
"self",
".",
"_attrs",
":",
"if",
"not",
"self",
".",
"_useSigs",
":",
"nbnds",
"=",
"self",
".",
"_nPosVals",
"[",
"ai",
"]",
"if",
"(",
"self",
".",
"_qBounds",
"[",
"ai",
"]",
">",
"0",
")",
":",
"nbnds",
"=",
"self",
".",
"_qBounds",
"[",
"ai",
"]",
"else",
":",
"nbnds",
"=",
"2",
"for",
"bid",
"in",
"range",
"(",
"nbnds",
")",
":",
"if",
"self",
".",
"_mEstimateVal",
"<=",
"0.0",
":",
"# this is simple the fraction of of time this descriptor component assume",
"# this value for the examples that belong a specific class",
"# self._condProbs[key] = (float(self._condProbs[key]))/ncls[cls]",
"tmp",
"[",
"ai",
"]",
"[",
"bid",
"]",
"/=",
"ncls",
"[",
"cls",
"]",
"else",
":",
"# this a bit more complicated form - more appropriate for unbalanced data",
"# see \"Machine Learning\" by Tom Mitchell section 6.9.1.1",
"# this is the probability that this descriptor component can take this specific value",
"# in the lack of any other information is is simply the inverse of the number of",
"# possible values 'npossible'",
"# If we quantized this component then",
"# npossible = 1 + len(self._QBoundVals[ai])",
"# else if we did no qunatize (the descriptor came quantized)",
"# npossible = nPossibleVals[ai]",
"# ai = key[1]",
"pdesc",
"=",
"0.0",
"if",
"self",
".",
"_qBounds",
"[",
"ai",
"]",
">",
"0",
":",
"pdesc",
"=",
"1.0",
"/",
"(",
"1",
"+",
"len",
"(",
"self",
".",
"_QBoundVals",
"[",
"ai",
"]",
")",
")",
"elif",
"(",
"self",
".",
"_nPosVals",
"[",
"ai",
"]",
">",
"0",
")",
":",
"pdesc",
"=",
"1.0",
"/",
"(",
"self",
".",
"_nPosVals",
"[",
"ai",
"]",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Neither Bounds set nor data pre-quantized for attribute '",
"+",
"str",
"(",
"ai",
")",
")",
"tmp",
"[",
"ai",
"]",
"[",
"bid",
"]",
"+=",
"(",
"self",
".",
"_mEstimateVal",
")",
"*",
"pdesc",
"tmp",
"[",
"ai",
"]",
"[",
"bid",
"]",
"/=",
"(",
"ncls",
"[",
"cls",
"]",
"+",
"self",
".",
"_mEstimateVal",
")"
] |
https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/ML/NaiveBayes/ClassificationModel.py#L158-L238
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/aui/auibook.py
|
python
|
TabTextCtrl.AcceptChanges
|
(self)
|
return True
|
Accepts/refuses the changes made by the user.
|
Accepts/refuses the changes made by the user.
|
[
"Accepts",
"/",
"refuses",
"the",
"changes",
"made",
"by",
"the",
"user",
"."
] |
def AcceptChanges(self):
""" Accepts/refuses the changes made by the user. """
value = self.GetValue()
notebook = self._owner.GetParent()
if value == self._startValue:
# nothing changed, always accept
# when an item remains unchanged, the owner
# needs to be notified that the user decided
# not to change the tree item label, and that
# the edit has been cancelled
notebook.OnRenameCancelled(self._pageIndex)
return True
if not notebook.OnRenameAccept(self._pageIndex, value):
# vetoed by the user
return False
# accepted, do rename the item
notebook.SetPageText(self._pageIndex, value)
return True
|
[
"def",
"AcceptChanges",
"(",
"self",
")",
":",
"value",
"=",
"self",
".",
"GetValue",
"(",
")",
"notebook",
"=",
"self",
".",
"_owner",
".",
"GetParent",
"(",
")",
"if",
"value",
"==",
"self",
".",
"_startValue",
":",
"# nothing changed, always accept",
"# when an item remains unchanged, the owner",
"# needs to be notified that the user decided",
"# not to change the tree item label, and that",
"# the edit has been cancelled",
"notebook",
".",
"OnRenameCancelled",
"(",
"self",
".",
"_pageIndex",
")",
"return",
"True",
"if",
"not",
"notebook",
".",
"OnRenameAccept",
"(",
"self",
".",
"_pageIndex",
",",
"value",
")",
":",
"# vetoed by the user",
"return",
"False",
"# accepted, do rename the item",
"notebook",
".",
"SetPageText",
"(",
"self",
".",
"_pageIndex",
",",
"value",
")",
"return",
"True"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibook.py#L190-L212
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/distribute/cluster_resolver/tpu/tpu_cluster_resolver.py
|
python
|
TPUClusterResolver.connect
|
(tpu=None,
zone=None,
project=None)
|
return resolver
|
Initializes TPU and returns a TPUClusterResolver.
This API will connect to remote TPU cluster and initialize the TPU
hardwares. Example usage:
>>> resolver = tf.distribute.cluster_resolver.TPUClusterResolver.connect(
... tpu='')
It can be viewed as convenient wrapper of the following code:
>>> resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
>>> tf.config.experimental_connect_to_cluster(resolver)
>>> tf.tpu.experimental.initialize_tpu_system(resolver)
Args:
tpu: A string corresponding to the TPU to use. It can be the TPU name or
TPU worker gRPC address. If not set, it will try automatically resolve
the TPU address on Cloud TPUs.
zone: Zone where the TPUs are located. If omitted or empty, we will assume
that the zone of the TPU is the same as the zone of the GCE VM, which we
will try to discover from the GCE metadata service.
project: Name of the GCP project containing Cloud TPUs. If omitted or
empty, we will try to discover the project name of the GCE VM from the
GCE metadata service.
Returns:
An instance of TPUClusterResolver object.
Raises:
NotFoundError: If no TPU devices found in eager mode.
|
Initializes TPU and returns a TPUClusterResolver.
|
[
"Initializes",
"TPU",
"and",
"returns",
"a",
"TPUClusterResolver",
"."
] |
def connect(tpu=None,
zone=None,
project=None):
"""Initializes TPU and returns a TPUClusterResolver.
This API will connect to remote TPU cluster and initialize the TPU
hardwares. Example usage:
>>> resolver = tf.distribute.cluster_resolver.TPUClusterResolver.connect(
... tpu='')
It can be viewed as convenient wrapper of the following code:
>>> resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='')
>>> tf.config.experimental_connect_to_cluster(resolver)
>>> tf.tpu.experimental.initialize_tpu_system(resolver)
Args:
tpu: A string corresponding to the TPU to use. It can be the TPU name or
TPU worker gRPC address. If not set, it will try automatically resolve
the TPU address on Cloud TPUs.
zone: Zone where the TPUs are located. If omitted or empty, we will assume
that the zone of the TPU is the same as the zone of the GCE VM, which we
will try to discover from the GCE metadata service.
project: Name of the GCP project containing Cloud TPUs. If omitted or
empty, we will try to discover the project name of the GCE VM from the
GCE metadata service.
Returns:
An instance of TPUClusterResolver object.
Raises:
NotFoundError: If no TPU devices found in eager mode.
"""
resolver = TPUClusterResolver(tpu, zone, project)
from tensorflow.python.eager import remote # pylint: disable=g-import-not-at-top
remote.connect_to_cluster(resolver)
from tensorflow.python.tpu import tpu_strategy_util # pylint: disable=g-import-not-at-top
tpu_strategy_util.initialize_tpu_system(resolver)
return resolver
|
[
"def",
"connect",
"(",
"tpu",
"=",
"None",
",",
"zone",
"=",
"None",
",",
"project",
"=",
"None",
")",
":",
"resolver",
"=",
"TPUClusterResolver",
"(",
"tpu",
",",
"zone",
",",
"project",
")",
"from",
"tensorflow",
".",
"python",
".",
"eager",
"import",
"remote",
"# pylint: disable=g-import-not-at-top",
"remote",
".",
"connect_to_cluster",
"(",
"resolver",
")",
"from",
"tensorflow",
".",
"python",
".",
"tpu",
"import",
"tpu_strategy_util",
"# pylint: disable=g-import-not-at-top",
"tpu_strategy_util",
".",
"initialize_tpu_system",
"(",
"resolver",
")",
"return",
"resolver"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/cluster_resolver/tpu/tpu_cluster_resolver.py#L71-L110
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/ops/operations/_grad_ops.py
|
python
|
Conv3DBackpropFilter.__init__
|
(self,
out_channel,
kernel_size,
mode=1,
pad_mode="valid",
pad=0,
stride=(1, 1, 1, 1, 1),
dilation=(1, 1, 1, 1, 1),
group=1,
data_format="NCDHW")
|
Initialize Convolution
|
Initialize Convolution
|
[
"Initialize",
"Convolution"
] |
def __init__(self,
out_channel,
kernel_size,
mode=1,
pad_mode="valid",
pad=0,
stride=(1, 1, 1, 1, 1),
dilation=(1, 1, 1, 1, 1),
group=1,
data_format="NCDHW"):
"""Initialize Convolution"""
self.init_prim_io_names(inputs=['x', 'out_backprop', 'filter_size'], outputs=['y'])
self.out_channel = validator.check_positive_int(out_channel, 'out_channel', self.name)
self.kernel_size = _check_3d_int_or_tuple('kernel_size', kernel_size, self.name)
self.stride = _check_3d_int_or_tuple('stride', stride, self.name, allow_five=True, ret_five=True)
self.add_prim_attr('strides', self.stride)
self.dilation = _check_3d_int_or_tuple('dilation', dilation, self.name, allow_five=True, ret_five=True)
self.add_prim_attr('dilations', self.dilation)
validator.check_value_type('pad', pad, (int, tuple), self.name)
if isinstance(pad, int):
pad = (pad,) * 6
validator.check_equal_int(len(pad), 6, 'pad size', self.name)
self.add_prim_attr('pad', self.pad)
self.pad_list = pad
self.add_prim_attr('pad_list', self.pad_list)
self.pad_mode = validator.check_string(pad_mode.lower(), ['valid', 'same', 'pad'], 'pad_mode', self.name)
if self.pad_mode != 'pad' and self.pad_list != (0, 0, 0, 0, 0, 0):
raise ValueError(f"For '{self.name}', when pad is not 0, pad_mode should be set as 'pad'.")
if self.pad_mode == 'pad':
for item in pad:
validator.check_non_negative_int(item, 'pad item', self.name)
self.add_prim_attr('pad_mode', self.pad_mode)
self.mode = validator.check_equal_int(mode, 1, 'mode', self.name)
self.add_prim_attr('mode', self.mode)
self.group = validator.check_positive_int(group, 'group', self.name)
self.add_prim_attr('groups', self.group)
self.format = validator.check_string(data_format, ['NCDHW'], 'format', self.name)
self.add_prim_attr('data_format', self.format)
|
[
"def",
"__init__",
"(",
"self",
",",
"out_channel",
",",
"kernel_size",
",",
"mode",
"=",
"1",
",",
"pad_mode",
"=",
"\"valid\"",
",",
"pad",
"=",
"0",
",",
"stride",
"=",
"(",
"1",
",",
"1",
",",
"1",
",",
"1",
",",
"1",
")",
",",
"dilation",
"=",
"(",
"1",
",",
"1",
",",
"1",
",",
"1",
",",
"1",
")",
",",
"group",
"=",
"1",
",",
"data_format",
"=",
"\"NCDHW\"",
")",
":",
"self",
".",
"init_prim_io_names",
"(",
"inputs",
"=",
"[",
"'x'",
",",
"'out_backprop'",
",",
"'filter_size'",
"]",
",",
"outputs",
"=",
"[",
"'y'",
"]",
")",
"self",
".",
"out_channel",
"=",
"validator",
".",
"check_positive_int",
"(",
"out_channel",
",",
"'out_channel'",
",",
"self",
".",
"name",
")",
"self",
".",
"kernel_size",
"=",
"_check_3d_int_or_tuple",
"(",
"'kernel_size'",
",",
"kernel_size",
",",
"self",
".",
"name",
")",
"self",
".",
"stride",
"=",
"_check_3d_int_or_tuple",
"(",
"'stride'",
",",
"stride",
",",
"self",
".",
"name",
",",
"allow_five",
"=",
"True",
",",
"ret_five",
"=",
"True",
")",
"self",
".",
"add_prim_attr",
"(",
"'strides'",
",",
"self",
".",
"stride",
")",
"self",
".",
"dilation",
"=",
"_check_3d_int_or_tuple",
"(",
"'dilation'",
",",
"dilation",
",",
"self",
".",
"name",
",",
"allow_five",
"=",
"True",
",",
"ret_five",
"=",
"True",
")",
"self",
".",
"add_prim_attr",
"(",
"'dilations'",
",",
"self",
".",
"dilation",
")",
"validator",
".",
"check_value_type",
"(",
"'pad'",
",",
"pad",
",",
"(",
"int",
",",
"tuple",
")",
",",
"self",
".",
"name",
")",
"if",
"isinstance",
"(",
"pad",
",",
"int",
")",
":",
"pad",
"=",
"(",
"pad",
",",
")",
"*",
"6",
"validator",
".",
"check_equal_int",
"(",
"len",
"(",
"pad",
")",
",",
"6",
",",
"'pad size'",
",",
"self",
".",
"name",
")",
"self",
".",
"add_prim_attr",
"(",
"'pad'",
",",
"self",
".",
"pad",
")",
"self",
".",
"pad_list",
"=",
"pad",
"self",
".",
"add_prim_attr",
"(",
"'pad_list'",
",",
"self",
".",
"pad_list",
")",
"self",
".",
"pad_mode",
"=",
"validator",
".",
"check_string",
"(",
"pad_mode",
".",
"lower",
"(",
")",
",",
"[",
"'valid'",
",",
"'same'",
",",
"'pad'",
"]",
",",
"'pad_mode'",
",",
"self",
".",
"name",
")",
"if",
"self",
".",
"pad_mode",
"!=",
"'pad'",
"and",
"self",
".",
"pad_list",
"!=",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
")",
":",
"raise",
"ValueError",
"(",
"f\"For '{self.name}', when pad is not 0, pad_mode should be set as 'pad'.\"",
")",
"if",
"self",
".",
"pad_mode",
"==",
"'pad'",
":",
"for",
"item",
"in",
"pad",
":",
"validator",
".",
"check_non_negative_int",
"(",
"item",
",",
"'pad item'",
",",
"self",
".",
"name",
")",
"self",
".",
"add_prim_attr",
"(",
"'pad_mode'",
",",
"self",
".",
"pad_mode",
")",
"self",
".",
"mode",
"=",
"validator",
".",
"check_equal_int",
"(",
"mode",
",",
"1",
",",
"'mode'",
",",
"self",
".",
"name",
")",
"self",
".",
"add_prim_attr",
"(",
"'mode'",
",",
"self",
".",
"mode",
")",
"self",
".",
"group",
"=",
"validator",
".",
"check_positive_int",
"(",
"group",
",",
"'group'",
",",
"self",
".",
"name",
")",
"self",
".",
"add_prim_attr",
"(",
"'groups'",
",",
"self",
".",
"group",
")",
"self",
".",
"format",
"=",
"validator",
".",
"check_string",
"(",
"data_format",
",",
"[",
"'NCDHW'",
"]",
",",
"'format'",
",",
"self",
".",
"name",
")",
"self",
".",
"add_prim_attr",
"(",
"'data_format'",
",",
"self",
".",
"format",
")"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/operations/_grad_ops.py#L293-L332
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/setuptools/command/easy_install.py
|
python
|
PthDistributions.remove
|
(self, dist)
|
Remove `dist` from the distribution map
|
Remove `dist` from the distribution map
|
[
"Remove",
"dist",
"from",
"the",
"distribution",
"map"
] |
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
Environment.remove(self, dist)
|
[
"def",
"remove",
"(",
"self",
",",
"dist",
")",
":",
"while",
"dist",
".",
"location",
"in",
"self",
".",
"paths",
":",
"self",
".",
"paths",
".",
"remove",
"(",
"dist",
".",
"location",
")",
"self",
".",
"dirty",
"=",
"True",
"Environment",
".",
"remove",
"(",
"self",
",",
"dist",
")"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/setuptools/command/easy_install.py#L1574-L1579
|
||
SFTtech/openage
|
d6a08c53c48dc1e157807471df92197f6ca9e04d
|
openage/convert/value_object/read/media/datfile/tech.py
|
python
|
EffectBundle.get_data_format_members
|
(cls, game_version)
|
return data_format
|
Return the members in this struct.
|
Return the members in this struct.
|
[
"Return",
"the",
"members",
"in",
"this",
"struct",
"."
] |
def get_data_format_members(cls, game_version):
"""
Return the members in this struct.
"""
if game_version[0].game_id in ("AOE1DE", "AOE2DE"):
data_format = [
(SKIP, "name_len_debug", StorageType.INT_MEMBER, "uint16_t"),
(READ, "name_len", StorageType.INT_MEMBER, "uint16_t"),
(READ_GEN, "name", StorageType.STRING_MEMBER, "char[name_len]"),
]
else:
data_format = [
# always CHUN4 (change unit 4-arg) in AoE1-AoC, later versions name them
(READ_GEN, "name", StorageType.STRING_MEMBER, "char[31]"),
]
data_format.extend([
(READ, "effect_count", StorageType.INT_MEMBER, "uint16_t"),
(READ_GEN, "effects", StorageType.ARRAY_CONTAINER, SubdataMember(
ref_type=Effect,
length="effect_count",
)),
])
return data_format
|
[
"def",
"get_data_format_members",
"(",
"cls",
",",
"game_version",
")",
":",
"if",
"game_version",
"[",
"0",
"]",
".",
"game_id",
"in",
"(",
"\"AOE1DE\"",
",",
"\"AOE2DE\"",
")",
":",
"data_format",
"=",
"[",
"(",
"SKIP",
",",
"\"name_len_debug\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"uint16_t\"",
")",
",",
"(",
"READ",
",",
"\"name_len\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"uint16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"name\"",
",",
"StorageType",
".",
"STRING_MEMBER",
",",
"\"char[name_len]\"",
")",
",",
"]",
"else",
":",
"data_format",
"=",
"[",
"# always CHUN4 (change unit 4-arg) in AoE1-AoC, later versions name them",
"(",
"READ_GEN",
",",
"\"name\"",
",",
"StorageType",
".",
"STRING_MEMBER",
",",
"\"char[31]\"",
")",
",",
"]",
"data_format",
".",
"extend",
"(",
"[",
"(",
"READ",
",",
"\"effect_count\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"uint16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"effects\"",
",",
"StorageType",
".",
"ARRAY_CONTAINER",
",",
"SubdataMember",
"(",
"ref_type",
"=",
"Effect",
",",
"length",
"=",
"\"effect_count\"",
",",
")",
")",
",",
"]",
")",
"return",
"data_format"
] |
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/value_object/read/media/datfile/tech.py#L37-L61
|
|
KhronosGroup/SPIR
|
f33c27876d9f3d5810162b60fa89cc13d2b55725
|
bindings/python/clang/cindex.py
|
python
|
Token.cursor
|
(self)
|
return cursor
|
The Cursor this Token corresponds to.
|
The Cursor this Token corresponds to.
|
[
"The",
"Cursor",
"this",
"Token",
"corresponds",
"to",
"."
] |
def cursor(self):
"""The Cursor this Token corresponds to."""
cursor = Cursor()
conf.lib.clang_annotateTokens(self._tu, byref(self), 1, byref(cursor))
return cursor
|
[
"def",
"cursor",
"(",
"self",
")",
":",
"cursor",
"=",
"Cursor",
"(",
")",
"conf",
".",
"lib",
".",
"clang_annotateTokens",
"(",
"self",
".",
"_tu",
",",
"byref",
"(",
"self",
")",
",",
"1",
",",
"byref",
"(",
"cursor",
")",
")",
"return",
"cursor"
] |
https://github.com/KhronosGroup/SPIR/blob/f33c27876d9f3d5810162b60fa89cc13d2b55725/bindings/python/clang/cindex.py#L2435-L2441
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/keras/utils/tf_inspect.py
|
python
|
isroutine
|
(object)
|
return _inspect.isroutine(tf_decorator.unwrap(object)[1])
|
TFDecorator-aware replacement for inspect.isroutine.
|
TFDecorator-aware replacement for inspect.isroutine.
|
[
"TFDecorator",
"-",
"aware",
"replacement",
"for",
"inspect",
".",
"isroutine",
"."
] |
def isroutine(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isroutine."""
return _inspect.isroutine(tf_decorator.unwrap(object)[1])
|
[
"def",
"isroutine",
"(",
"object",
")",
":",
"# pylint: disable=redefined-builtin",
"return",
"_inspect",
".",
"isroutine",
"(",
"tf_decorator",
".",
"unwrap",
"(",
"object",
")",
"[",
"1",
"]",
")"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/utils/tf_inspect.py#L395-L397
|
|
crosslife/OpenBird
|
9e0198a1a2295f03fa1e8676e216e22c9c7d380b
|
cocos2d/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py
|
python
|
TranslationUnit.__init__
|
(self, ptr, index)
|
Create a TranslationUnit instance.
TranslationUnits should be created using one of the from_* @classmethod
functions above. __init__ is only called internally.
|
Create a TranslationUnit instance.
|
[
"Create",
"a",
"TranslationUnit",
"instance",
"."
] |
def __init__(self, ptr, index):
"""Create a TranslationUnit instance.
TranslationUnits should be created using one of the from_* @classmethod
functions above. __init__ is only called internally.
"""
assert isinstance(index, Index)
ClangObject.__init__(self, ptr)
|
[
"def",
"__init__",
"(",
"self",
",",
"ptr",
",",
"index",
")",
":",
"assert",
"isinstance",
"(",
"index",
",",
"Index",
")",
"ClangObject",
".",
"__init__",
"(",
"self",
",",
"ptr",
")"
] |
https://github.com/crosslife/OpenBird/blob/9e0198a1a2295f03fa1e8676e216e22c9c7d380b/cocos2d/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py#L2075-L2083
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32com/client/__init__.py
|
python
|
DispatchWithEvents
|
(clsid, user_event_class)
|
return EventsProxy(instance)
|
Create a COM object that can fire events to a user defined class.
clsid -- The ProgID or CLSID of the object to create.
user_event_class -- A Python class object that responds to the events.
This requires makepy support for the COM object being created. If
this support does not exist it will be automatically generated by
this function. If the object does not support makepy, a TypeError
exception will be raised.
The result is a class instance that both represents the COM object
and handles events from the COM object.
It is important to note that the returned instance is not a direct
instance of the user_event_class, but an instance of a temporary
class object that derives from three classes:
* The makepy generated class for the COM object
* The makepy generated class for the COM events
* The user_event_class as passed to this function.
If this is not suitable, see the getevents function for an alternative
technique of handling events.
Object Lifetimes: Whenever the object returned from this function is
cleaned-up by Python, the events will be disconnected from
the COM object. This is almost always what should happen,
but see the documentation for getevents() for more details.
Example:
>>> class IEEvents:
... def OnVisible(self, visible):
... print "Visible changed:", visible
...
>>> ie = DispatchWithEvents("InternetExplorer.Application", IEEvents)
>>> ie.Visible = 1
Visible changed: 1
>>>
|
Create a COM object that can fire events to a user defined class.
clsid -- The ProgID or CLSID of the object to create.
user_event_class -- A Python class object that responds to the events.
|
[
"Create",
"a",
"COM",
"object",
"that",
"can",
"fire",
"events",
"to",
"a",
"user",
"defined",
"class",
".",
"clsid",
"--",
"The",
"ProgID",
"or",
"CLSID",
"of",
"the",
"object",
"to",
"create",
".",
"user_event_class",
"--",
"A",
"Python",
"class",
"object",
"that",
"responds",
"to",
"the",
"events",
"."
] |
def DispatchWithEvents(clsid, user_event_class):
"""Create a COM object that can fire events to a user defined class.
clsid -- The ProgID or CLSID of the object to create.
user_event_class -- A Python class object that responds to the events.
This requires makepy support for the COM object being created. If
this support does not exist it will be automatically generated by
this function. If the object does not support makepy, a TypeError
exception will be raised.
The result is a class instance that both represents the COM object
and handles events from the COM object.
It is important to note that the returned instance is not a direct
instance of the user_event_class, but an instance of a temporary
class object that derives from three classes:
* The makepy generated class for the COM object
* The makepy generated class for the COM events
* The user_event_class as passed to this function.
If this is not suitable, see the getevents function for an alternative
technique of handling events.
Object Lifetimes: Whenever the object returned from this function is
cleaned-up by Python, the events will be disconnected from
the COM object. This is almost always what should happen,
but see the documentation for getevents() for more details.
Example:
>>> class IEEvents:
... def OnVisible(self, visible):
... print "Visible changed:", visible
...
>>> ie = DispatchWithEvents("InternetExplorer.Application", IEEvents)
>>> ie.Visible = 1
Visible changed: 1
>>>
"""
# Create/Get the object.
disp = Dispatch(clsid)
if not disp.__class__.__dict__.get("CLSID"): # Eeek - no makepy support - try and build it.
try:
ti = disp._oleobj_.GetTypeInfo()
disp_clsid = ti.GetTypeAttr()[0]
tlb, index = ti.GetContainingTypeLib()
tla = tlb.GetLibAttr()
gencache.EnsureModule(tla[0], tla[1], tla[3], tla[4], bValidateFile=0)
# Get the class from the module.
disp_class = gencache.GetClassForProgID(str(disp_clsid))
except pythoncom.com_error:
raise TypeError("This COM object can not automate the makepy process - please run makepy manually for this object")
else:
disp_class = disp.__class__
# If the clsid was an object, get the clsid
clsid = disp_class.CLSID
# Create a new class that derives from 3 classes - the dispatch class, the event sink class and the user class.
# XXX - we are still "classic style" classes in py2x, so we need can't yet
# use 'type()' everywhere - revisit soon, as py2x will move to new-style too...
try:
from types import ClassType as new_type
except ImportError:
new_type = type # py3k
events_class = getevents(clsid)
if events_class is None:
raise ValueError("This COM object does not support events.")
result_class = new_type("COMEventClass", (disp_class, events_class, user_event_class), {"__setattr__" : _event_setattr_})
instance = result_class(disp._oleobj_) # This only calls the first base class __init__.
events_class.__init__(instance, instance)
if hasattr(user_event_class, "__init__"):
user_event_class.__init__(instance)
return EventsProxy(instance)
|
[
"def",
"DispatchWithEvents",
"(",
"clsid",
",",
"user_event_class",
")",
":",
"# Create/Get the object.",
"disp",
"=",
"Dispatch",
"(",
"clsid",
")",
"if",
"not",
"disp",
".",
"__class__",
".",
"__dict__",
".",
"get",
"(",
"\"CLSID\"",
")",
":",
"# Eeek - no makepy support - try and build it.",
"try",
":",
"ti",
"=",
"disp",
".",
"_oleobj_",
".",
"GetTypeInfo",
"(",
")",
"disp_clsid",
"=",
"ti",
".",
"GetTypeAttr",
"(",
")",
"[",
"0",
"]",
"tlb",
",",
"index",
"=",
"ti",
".",
"GetContainingTypeLib",
"(",
")",
"tla",
"=",
"tlb",
".",
"GetLibAttr",
"(",
")",
"gencache",
".",
"EnsureModule",
"(",
"tla",
"[",
"0",
"]",
",",
"tla",
"[",
"1",
"]",
",",
"tla",
"[",
"3",
"]",
",",
"tla",
"[",
"4",
"]",
",",
"bValidateFile",
"=",
"0",
")",
"# Get the class from the module.",
"disp_class",
"=",
"gencache",
".",
"GetClassForProgID",
"(",
"str",
"(",
"disp_clsid",
")",
")",
"except",
"pythoncom",
".",
"com_error",
":",
"raise",
"TypeError",
"(",
"\"This COM object can not automate the makepy process - please run makepy manually for this object\"",
")",
"else",
":",
"disp_class",
"=",
"disp",
".",
"__class__",
"# If the clsid was an object, get the clsid",
"clsid",
"=",
"disp_class",
".",
"CLSID",
"# Create a new class that derives from 3 classes - the dispatch class, the event sink class and the user class.",
"# XXX - we are still \"classic style\" classes in py2x, so we need can't yet",
"# use 'type()' everywhere - revisit soon, as py2x will move to new-style too...",
"try",
":",
"from",
"types",
"import",
"ClassType",
"as",
"new_type",
"except",
"ImportError",
":",
"new_type",
"=",
"type",
"# py3k",
"events_class",
"=",
"getevents",
"(",
"clsid",
")",
"if",
"events_class",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"This COM object does not support events.\"",
")",
"result_class",
"=",
"new_type",
"(",
"\"COMEventClass\"",
",",
"(",
"disp_class",
",",
"events_class",
",",
"user_event_class",
")",
",",
"{",
"\"__setattr__\"",
":",
"_event_setattr_",
"}",
")",
"instance",
"=",
"result_class",
"(",
"disp",
".",
"_oleobj_",
")",
"# This only calls the first base class __init__.",
"events_class",
".",
"__init__",
"(",
"instance",
",",
"instance",
")",
"if",
"hasattr",
"(",
"user_event_class",
",",
"\"__init__\"",
")",
":",
"user_event_class",
".",
"__init__",
"(",
"instance",
")",
"return",
"EventsProxy",
"(",
"instance",
")"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32com/client/__init__.py#L205-L276
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/ops/_op_impl/aicpu/dropout_genmask.py
|
python
|
_dropout_genmask_aicpu
|
()
|
return
|
DropoutGenMask AiCPU register
|
DropoutGenMask AiCPU register
|
[
"DropoutGenMask",
"AiCPU",
"register"
] |
def _dropout_genmask_aicpu():
"""DropoutGenMask AiCPU register"""
return
|
[
"def",
"_dropout_genmask_aicpu",
"(",
")",
":",
"return"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_op_impl/aicpu/dropout_genmask.py#L30-L32
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/distutils/misc_util.py
|
python
|
allpath
|
(name)
|
return os.path.join(*splitted)
|
Convert a /-separated pathname to one using the OS's path separator.
|
Convert a /-separated pathname to one using the OS's path separator.
|
[
"Convert",
"a",
"/",
"-",
"separated",
"pathname",
"to",
"one",
"using",
"the",
"OS",
"s",
"path",
"separator",
"."
] |
def allpath(name):
"Convert a /-separated pathname to one using the OS's path separator."
splitted = name.split('/')
return os.path.join(*splitted)
|
[
"def",
"allpath",
"(",
"name",
")",
":",
"splitted",
"=",
"name",
".",
"split",
"(",
"'/'",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"*",
"splitted",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/distutils/misc_util.py#L128-L131
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
build/android/gyp/package_resources.py
|
python
|
CheckForMissedConfigs
|
(apk_path, check_density, languages)
|
Raises an exception if apk_path contains any unexpected configs.
|
Raises an exception if apk_path contains any unexpected configs.
|
[
"Raises",
"an",
"exception",
"if",
"apk_path",
"contains",
"any",
"unexpected",
"configs",
"."
] |
def CheckForMissedConfigs(apk_path, check_density, languages):
"""Raises an exception if apk_path contains any unexpected configs."""
triggers = []
if check_density:
triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS)
if languages:
triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages)
with zipfile.ZipFile(apk_path) as main_apk_zip:
for name in main_apk_zip.namelist():
for trigger in triggers:
if trigger.search(name) and not 'mipmap-' in name:
raise Exception(('Found config in main apk that should have been ' +
'put into a split: %s\nYou need to update ' +
'package_resources.py to include this new ' +
'config (trigger=%s)') % (name, trigger.pattern))
|
[
"def",
"CheckForMissedConfigs",
"(",
"apk_path",
",",
"check_density",
",",
"languages",
")",
":",
"triggers",
"=",
"[",
"]",
"if",
"check_density",
":",
"triggers",
".",
"extend",
"(",
"re",
".",
"compile",
"(",
"'-%s'",
"%",
"density",
")",
"for",
"density",
"in",
"DENSITY_SPLITS",
")",
"if",
"languages",
":",
"triggers",
".",
"extend",
"(",
"re",
".",
"compile",
"(",
"r'-%s\\b'",
"%",
"lang",
")",
"for",
"lang",
"in",
"languages",
")",
"with",
"zipfile",
".",
"ZipFile",
"(",
"apk_path",
")",
"as",
"main_apk_zip",
":",
"for",
"name",
"in",
"main_apk_zip",
".",
"namelist",
"(",
")",
":",
"for",
"trigger",
"in",
"triggers",
":",
"if",
"trigger",
".",
"search",
"(",
"name",
")",
"and",
"not",
"'mipmap-'",
"in",
"name",
":",
"raise",
"Exception",
"(",
"(",
"'Found config in main apk that should have been '",
"+",
"'put into a split: %s\\nYou need to update '",
"+",
"'package_resources.py to include this new '",
"+",
"'config (trigger=%s)'",
")",
"%",
"(",
"name",
",",
"trigger",
".",
"pattern",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/build/android/gyp/package_resources.py#L203-L217
|
||
runtimejs/runtime
|
0a6e84c30823d35a4548d6634166784260ae7b74
|
deps/v8/gypfiles/vs_toolchain.py
|
python
|
_GetDesiredVsToolchainHashes
|
()
|
Load a list of SHA1s corresponding to the toolchains that we want installed
to build with.
|
Load a list of SHA1s corresponding to the toolchains that we want installed
to build with.
|
[
"Load",
"a",
"list",
"of",
"SHA1s",
"corresponding",
"to",
"the",
"toolchains",
"that",
"we",
"want",
"installed",
"to",
"build",
"with",
"."
] |
def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
to build with."""
if GetVisualStudioVersion() == '2015':
# Update 2.
return ['95ddda401ec5678f15eeed01d2bee08fcbc5ee97']
else:
return ['03a4e939cd325d6bc5216af41b92d02dda1366a6']
|
[
"def",
"_GetDesiredVsToolchainHashes",
"(",
")",
":",
"if",
"GetVisualStudioVersion",
"(",
")",
"==",
"'2015'",
":",
"# Update 2.",
"return",
"[",
"'95ddda401ec5678f15eeed01d2bee08fcbc5ee97'",
"]",
"else",
":",
"return",
"[",
"'03a4e939cd325d6bc5216af41b92d02dda1366a6'",
"]"
] |
https://github.com/runtimejs/runtime/blob/0a6e84c30823d35a4548d6634166784260ae7b74/deps/v8/gypfiles/vs_toolchain.py#L279-L286
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/traitlets/py3/traitlets/traitlets.py
|
python
|
Type.validate
|
(self, obj, value)
|
Validates that the value is a valid object instance.
|
Validates that the value is a valid object instance.
|
[
"Validates",
"that",
"the",
"value",
"is",
"a",
"valid",
"object",
"instance",
"."
] |
def validate(self, obj, value):
"""Validates that the value is a valid object instance."""
if isinstance(value, str):
try:
value = self._resolve_string(value)
except ImportError:
raise TraitError("The '%s' trait of %s instance must be a type, but "
"%r could not be imported" % (self.name, obj, value))
try:
if issubclass(value, self.klass):
return value
except Exception:
pass
self.error(obj, value)
|
[
"def",
"validate",
"(",
"self",
",",
"obj",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"try",
":",
"value",
"=",
"self",
".",
"_resolve_string",
"(",
"value",
")",
"except",
"ImportError",
":",
"raise",
"TraitError",
"(",
"\"The '%s' trait of %s instance must be a type, but \"",
"\"%r could not be imported\"",
"%",
"(",
"self",
".",
"name",
",",
"obj",
",",
"value",
")",
")",
"try",
":",
"if",
"issubclass",
"(",
"value",
",",
"self",
".",
"klass",
")",
":",
"return",
"value",
"except",
"Exception",
":",
"pass",
"self",
".",
"error",
"(",
"obj",
",",
"value",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/traitlets/py3/traitlets/traitlets.py#L1744-L1758
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/pkg_resources/__init__.py
|
python
|
Distribution.egg_name
|
(self)
|
return filename
|
Return what this distribution's standard .egg filename should be
|
Return what this distribution's standard .egg filename should be
|
[
"Return",
"what",
"this",
"distribution",
"s",
"standard",
".",
"egg",
"filename",
"should",
"be"
] |
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-' + self.platform
return filename
|
[
"def",
"egg_name",
"(",
"self",
")",
":",
"filename",
"=",
"\"%s-%s-py%s\"",
"%",
"(",
"to_filename",
"(",
"self",
".",
"project_name",
")",
",",
"to_filename",
"(",
"self",
".",
"version",
")",
",",
"self",
".",
"py_version",
"or",
"PY_MAJOR",
")",
"if",
"self",
".",
"platform",
":",
"filename",
"+=",
"'-'",
"+",
"self",
".",
"platform",
"return",
"filename"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/pkg_resources/__init__.py#L2794-L2803
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py
|
python
|
CudnnParamsFormatConverter._cu_canonical_to_tf_canonical_single_layer
|
(self,
cu_weights,
cu_biases,
tf_weights,
tf_biases,
tf_weigths_proj=None)
|
r"""Transform single layer Cudnn canonicals to tf canonicals.
The elements of cu_weights, cu_biases are laid out in the following format:
-------------------------------------------------------------------------
| gate0 param on inputs | gate0 param on hidden state | gate1 ..........|
-------------------------------------------------------------------------
Args:
cu_weights: a list of tensors, single layer weights.
cu_biases: a list of tensors, single layer biases.
tf_weights: a list where transformed weights are stored.
tf_biases: a list where transformed biases are stored.
|
r"""Transform single layer Cudnn canonicals to tf canonicals.
|
[
"r",
"Transform",
"single",
"layer",
"Cudnn",
"canonicals",
"to",
"tf",
"canonicals",
"."
] |
def _cu_canonical_to_tf_canonical_single_layer(self,
cu_weights,
cu_biases,
tf_weights,
tf_biases,
tf_weigths_proj=None):
r"""Transform single layer Cudnn canonicals to tf canonicals.
The elements of cu_weights, cu_biases are laid out in the following format:
-------------------------------------------------------------------------
| gate0 param on inputs | gate0 param on hidden state | gate1 ..........|
-------------------------------------------------------------------------
Args:
cu_weights: a list of tensors, single layer weights.
cu_biases: a list of tensors, single layer biases.
tf_weights: a list where transformed weights are stored.
tf_biases: a list where transformed biases are stored.
"""
raise NotImplementedError("Abstract method")
|
[
"def",
"_cu_canonical_to_tf_canonical_single_layer",
"(",
"self",
",",
"cu_weights",
",",
"cu_biases",
",",
"tf_weights",
",",
"tf_biases",
",",
"tf_weigths_proj",
"=",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"Abstract method\"",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py#L378-L396
|
||
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/dataset/core/config.py
|
python
|
load
|
(file)
|
Load the project configuration from the file format.
Args:
file (str): Path of the configuration file to be loaded.
Raises:
RuntimeError: If file is invalid and parsing fails.
Examples:
>>> # Set new default configuration according to values in the configuration file.
>>> # example config file:
>>> # {
>>> # "logFilePath": "/tmp",
>>> # "numParallelWorkers": 4,
>>> # "seed": 5489,
>>> # "monitorSamplingInterval": 30
>>> # }
>>> config_file = "/path/to/config/file"
>>> ds.config.load(config_file)
|
Load the project configuration from the file format.
|
[
"Load",
"the",
"project",
"configuration",
"from",
"the",
"file",
"format",
"."
] |
def load(file):
"""
Load the project configuration from the file format.
Args:
file (str): Path of the configuration file to be loaded.
Raises:
RuntimeError: If file is invalid and parsing fails.
Examples:
>>> # Set new default configuration according to values in the configuration file.
>>> # example config file:
>>> # {
>>> # "logFilePath": "/tmp",
>>> # "numParallelWorkers": 4,
>>> # "seed": 5489,
>>> # "monitorSamplingInterval": 30
>>> # }
>>> config_file = "/path/to/config/file"
>>> ds.config.load(config_file)
"""
_config.load(file)
|
[
"def",
"load",
"(",
"file",
")",
":",
"_config",
".",
"load",
"(",
"file",
")"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/dataset/core/config.py#L399-L421
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/socksipy-branch/socks.py
|
python
|
wrapmodule
|
(module)
|
wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
|
wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
|
[
"wrapmodule",
"(",
"module",
")",
"Attempts",
"to",
"replace",
"a",
"module",
"s",
"socket",
"library",
"with",
"a",
"SOCKS",
"socket",
".",
"Must",
"set",
"a",
"default",
"proxy",
"using",
"setdefaultproxy",
"(",
"...",
")",
"first",
".",
"This",
"will",
"only",
"work",
"on",
"modules",
"that",
"import",
"socket",
"directly",
"into",
"the",
"namespace",
";",
"most",
"of",
"the",
"Python",
"Standard",
"Library",
"falls",
"into",
"this",
"category",
"."
] |
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
|
[
"def",
"wrapmodule",
"(",
"module",
")",
":",
"if",
"_defaultproxy",
"!=",
"None",
":",
"module",
".",
"socket",
".",
"socket",
"=",
"socksocket",
"else",
":",
"raise",
"GeneralProxyError",
"(",
"(",
"4",
",",
"\"no proxy specified\"",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/socksipy-branch/socks.py#L101-L111
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_misc.py
|
python
|
SystemOptions.SetOption
|
(*args, **kwargs)
|
return _misc_.SystemOptions_SetOption(*args, **kwargs)
|
SetOption(String name, String value)
|
SetOption(String name, String value)
|
[
"SetOption",
"(",
"String",
"name",
"String",
"value",
")"
] |
def SetOption(*args, **kwargs):
"""SetOption(String name, String value)"""
return _misc_.SystemOptions_SetOption(*args, **kwargs)
|
[
"def",
"SetOption",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"SystemOptions_SetOption",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L221-L223
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/distutils/cygwinccompiler.py
|
python
|
CygwinCCompiler.object_filenames
|
(self, source_filenames, strip_dir=0, output_dir='')
|
return obj_names
|
Adds supports for rc and res files.
|
Adds supports for rc and res files.
|
[
"Adds",
"supports",
"for",
"rc",
"and",
"res",
"files",
"."
] |
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
"""Adds supports for rc and res files."""
if output_dir is None:
output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
base, ext = os.path.splitext(os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
(ext, src_name))
if strip_dir:
base = os.path.basename (base)
if ext in ('.res', '.rc'):
# these need to be compiled to object files
obj_names.append (os.path.join(output_dir,
base + ext + self.obj_extension))
else:
obj_names.append (os.path.join(output_dir,
base + self.obj_extension))
return obj_names
|
[
"def",
"object_filenames",
"(",
"self",
",",
"source_filenames",
",",
"strip_dir",
"=",
"0",
",",
"output_dir",
"=",
"''",
")",
":",
"if",
"output_dir",
"is",
"None",
":",
"output_dir",
"=",
"''",
"obj_names",
"=",
"[",
"]",
"for",
"src_name",
"in",
"source_filenames",
":",
"# use normcase to make sure '.rc' is really '.rc' and not '.RC'",
"base",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"normcase",
"(",
"src_name",
")",
")",
"if",
"ext",
"not",
"in",
"(",
"self",
".",
"src_extensions",
"+",
"[",
"'.rc'",
",",
"'.res'",
"]",
")",
":",
"raise",
"UnknownFileError",
"(",
"\"unknown file type '%s' (from '%s')\"",
"%",
"(",
"ext",
",",
"src_name",
")",
")",
"if",
"strip_dir",
":",
"base",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"base",
")",
"if",
"ext",
"in",
"(",
"'.res'",
",",
"'.rc'",
")",
":",
"# these need to be compiled to object files",
"obj_names",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"base",
"+",
"ext",
"+",
"self",
".",
"obj_extension",
")",
")",
"else",
":",
"obj_names",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"base",
"+",
"self",
".",
"obj_extension",
")",
")",
"return",
"obj_names"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/distutils/cygwinccompiler.py#L250-L270
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py
|
python
|
Menu.tk_popup
|
(self, x, y, entry="")
|
Post the menu at position X,Y with entry ENTRY.
|
Post the menu at position X,Y with entry ENTRY.
|
[
"Post",
"the",
"menu",
"at",
"position",
"X",
"Y",
"with",
"entry",
"ENTRY",
"."
] |
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
|
[
"def",
"tk_popup",
"(",
"self",
",",
"x",
",",
"y",
",",
"entry",
"=",
"\"\"",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"'tk_popup'",
",",
"self",
".",
"_w",
",",
"x",
",",
"y",
",",
"entry",
")"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py#L2665-L2667
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/ultimatelistctrl.py
|
python
|
UltimateListMainWindow.GetItemSpacing
|
(self, isSmall=False)
|
return (isSmall and [self._small_spacing] or [self._normal_spacing])[0]
|
Returns the spacing between item texts and icons, in pixels.
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
|
Returns the spacing between item texts and icons, in pixels.
|
[
"Returns",
"the",
"spacing",
"between",
"item",
"texts",
"and",
"icons",
"in",
"pixels",
"."
] |
def GetItemSpacing(self, isSmall=False):
"""
Returns the spacing between item texts and icons, in pixels.
:param `isSmall`: ``True`` if using a ``wx.IMAGE_LIST_SMALL`` image list,
``False`` if using a ``wx.IMAGE_LIST_NORMAL`` image list.
"""
return (isSmall and [self._small_spacing] or [self._normal_spacing])[0]
|
[
"def",
"GetItemSpacing",
"(",
"self",
",",
"isSmall",
"=",
"False",
")",
":",
"return",
"(",
"isSmall",
"and",
"[",
"self",
".",
"_small_spacing",
"]",
"or",
"[",
"self",
".",
"_normal_spacing",
"]",
")",
"[",
"0",
"]"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ultimatelistctrl.py#L8592-L8600
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/inspect.py
|
python
|
ismodule
|
(object)
|
return isinstance(object, types.ModuleType)
|
Return true if the object is a module.
Module objects provide these attributes:
__doc__ documentation string
__file__ filename (missing for built-in modules)
|
Return true if the object is a module.
|
[
"Return",
"true",
"if",
"the",
"object",
"is",
"a",
"module",
"."
] |
def ismodule(object):
"""Return true if the object is a module.
Module objects provide these attributes:
__doc__ documentation string
__file__ filename (missing for built-in modules)"""
return isinstance(object, types.ModuleType)
|
[
"def",
"ismodule",
"(",
"object",
")",
":",
"return",
"isinstance",
"(",
"object",
",",
"types",
".",
"ModuleType",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/inspect.py#L51-L57
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/customtreectrl.py
|
python
|
TreeItemAttr.HasFont
|
(self)
|
return self._font != wx.NullFont
|
Returns whether the attribute has font.
:return: ``True`` if the font attribute has been set, ``False`` otherwise.
|
Returns whether the attribute has font.
|
[
"Returns",
"whether",
"the",
"attribute",
"has",
"font",
"."
] |
def HasFont(self):
"""
Returns whether the attribute has font.
:return: ``True`` if the font attribute has been set, ``False`` otherwise.
"""
return self._font != wx.NullFont
|
[
"def",
"HasFont",
"(",
"self",
")",
":",
"return",
"self",
".",
"_font",
"!=",
"wx",
".",
"NullFont"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/customtreectrl.py#L925-L932
|
|
yuxng/PoseCNN
|
9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04
|
lib/gt_synthesize_layer/layer.py
|
python
|
GtSynthesizeLayer.forward
|
(self, iter)
|
return blobs
|
Get blobs and copy them into this layer's top blob vector.
|
Get blobs and copy them into this layer's top blob vector.
|
[
"Get",
"blobs",
"and",
"copy",
"them",
"into",
"this",
"layer",
"s",
"top",
"blob",
"vector",
"."
] |
def forward(self, iter):
"""Get blobs and copy them into this layer's top blob vector."""
blobs = self._get_next_minibatch(iter)
return blobs
|
[
"def",
"forward",
"(",
"self",
",",
"iter",
")",
":",
"blobs",
"=",
"self",
".",
"_get_next_minibatch",
"(",
"iter",
")",
"return",
"blobs"
] |
https://github.com/yuxng/PoseCNN/blob/9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04/lib/gt_synthesize_layer/layer.py#L115-L119
|
|
adobe/chromium
|
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
|
tools/symsrc/pefile.py
|
python
|
PE.retrieve_flags
|
(self, flag_dict, flag_filter)
|
return [(f[0], f[1]) for f in flag_dict.items() if
isinstance(f[0], str) and f[0].startswith(flag_filter)]
|
Read the flags from a dictionary and return them in a usable form.
Will return a list of (flag, value) for all flags in "flag_dict"
matching the filter "flag_filter".
|
Read the flags from a dictionary and return them in a usable form.
Will return a list of (flag, value) for all flags in "flag_dict"
matching the filter "flag_filter".
|
[
"Read",
"the",
"flags",
"from",
"a",
"dictionary",
"and",
"return",
"them",
"in",
"a",
"usable",
"form",
".",
"Will",
"return",
"a",
"list",
"of",
"(",
"flag",
"value",
")",
"for",
"all",
"flags",
"in",
"flag_dict",
"matching",
"the",
"filter",
"flag_filter",
"."
] |
def retrieve_flags(self, flag_dict, flag_filter):
"""Read the flags from a dictionary and return them in a usable form.
Will return a list of (flag, value) for all flags in "flag_dict"
matching the filter "flag_filter".
"""
return [(f[0], f[1]) for f in flag_dict.items() if
isinstance(f[0], str) and f[0].startswith(flag_filter)]
|
[
"def",
"retrieve_flags",
"(",
"self",
",",
"flag_dict",
",",
"flag_filter",
")",
":",
"return",
"[",
"(",
"f",
"[",
"0",
"]",
",",
"f",
"[",
"1",
"]",
")",
"for",
"f",
"in",
"flag_dict",
".",
"items",
"(",
")",
"if",
"isinstance",
"(",
"f",
"[",
"0",
"]",
",",
"str",
")",
"and",
"f",
"[",
"0",
"]",
".",
"startswith",
"(",
"flag_filter",
")",
"]"
] |
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/tools/symsrc/pefile.py#L1783-L1791
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/gradients_util.py
|
python
|
_MaybeCaptured
|
(t)
|
return t
|
If t is a captured value placeholder, returns the original captured value.
Args:
t: Tensor
Returns:
A tensor, potentially from a different Graph/FuncGraph.
|
If t is a captured value placeholder, returns the original captured value.
|
[
"If",
"t",
"is",
"a",
"captured",
"value",
"placeholder",
"returns",
"the",
"original",
"captured",
"value",
"."
] |
def _MaybeCaptured(t):
"""If t is a captured value placeholder, returns the original captured value.
Args:
t: Tensor
Returns:
A tensor, potentially from a different Graph/FuncGraph.
"""
# pylint: disable=protected-access
if (not isinstance(t, ops.EagerTensor) and
_IsFunction(t.op.graph) and t.op.type == "Placeholder"):
for input_t, placeholder_t in _Captures(t.op.graph):
if t is placeholder_t:
return _MaybeCaptured(input_t)
# pylint: enable=protected-access
return t
|
[
"def",
"_MaybeCaptured",
"(",
"t",
")",
":",
"# pylint: disable=protected-access",
"if",
"(",
"not",
"isinstance",
"(",
"t",
",",
"ops",
".",
"EagerTensor",
")",
"and",
"_IsFunction",
"(",
"t",
".",
"op",
".",
"graph",
")",
"and",
"t",
".",
"op",
".",
"type",
"==",
"\"Placeholder\"",
")",
":",
"for",
"input_t",
",",
"placeholder_t",
"in",
"_Captures",
"(",
"t",
".",
"op",
".",
"graph",
")",
":",
"if",
"t",
"is",
"placeholder_t",
":",
"return",
"_MaybeCaptured",
"(",
"input_t",
")",
"# pylint: enable=protected-access",
"return",
"t"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/gradients_util.py#L409-L425
|
|
rapidsai/cudf
|
d5b2448fc69f17509304d594f029d0df56984962
|
python/cudf/cudf/core/multiindex.py
|
python
|
MultiIndex._level_index_from_level
|
(self, level)
|
Return level index from given level name or index
|
Return level index from given level name or index
|
[
"Return",
"level",
"index",
"from",
"given",
"level",
"name",
"or",
"index"
] |
def _level_index_from_level(self, level):
"""
Return level index from given level name or index
"""
try:
return self.names.index(level)
except ValueError:
if not is_integer(level):
raise KeyError(f"Level {level} not found")
if level < 0:
level += self.nlevels
if level >= self.nlevels:
raise IndexError(
f"Level {level} out of bounds. "
f"Index has {self.nlevels} levels."
) from None
return level
|
[
"def",
"_level_index_from_level",
"(",
"self",
",",
"level",
")",
":",
"try",
":",
"return",
"self",
".",
"names",
".",
"index",
"(",
"level",
")",
"except",
"ValueError",
":",
"if",
"not",
"is_integer",
"(",
"level",
")",
":",
"raise",
"KeyError",
"(",
"f\"Level {level} not found\"",
")",
"if",
"level",
"<",
"0",
":",
"level",
"+=",
"self",
".",
"nlevels",
"if",
"level",
">=",
"self",
".",
"nlevels",
":",
"raise",
"IndexError",
"(",
"f\"Level {level} out of bounds. \"",
"f\"Index has {self.nlevels} levels.\"",
")",
"from",
"None",
"return",
"level"
] |
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/multiindex.py#L1519-L1535
|
||
RLBot/RLBot
|
34332b12cf158b3ef8dbf174ae67c53683368a9d
|
src/main/python/rlbot/parsing/custom_config.py
|
python
|
ConfigObject.get_header
|
(self, header_name)
|
return self.add_header_name(header_name)
|
Returns a header with that name, creates it if it does not exist.
|
Returns a header with that name, creates it if it does not exist.
|
[
"Returns",
"a",
"header",
"with",
"that",
"name",
"creates",
"it",
"if",
"it",
"does",
"not",
"exist",
"."
] |
def get_header(self, header_name):
"""
Returns a header with that name, creates it if it does not exist.
"""
if header_name in self.headers:
return self.headers[header_name]
return self.add_header_name(header_name)
|
[
"def",
"get_header",
"(",
"self",
",",
"header_name",
")",
":",
"if",
"header_name",
"in",
"self",
".",
"headers",
":",
"return",
"self",
".",
"headers",
"[",
"header_name",
"]",
"return",
"self",
".",
"add_header_name",
"(",
"header_name",
")"
] |
https://github.com/RLBot/RLBot/blob/34332b12cf158b3ef8dbf174ae67c53683368a9d/src/main/python/rlbot/parsing/custom_config.py#L47-L53
|
|
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/python/ops/rnn_cell.py
|
python
|
EmbeddingWrapper.__call__
|
(self, inputs, state, scope=None)
|
return self._cell(embedded, state)
|
Run the cell on embedded inputs.
|
Run the cell on embedded inputs.
|
[
"Run",
"the",
"cell",
"on",
"embedded",
"inputs",
"."
] |
def __call__(self, inputs, state, scope=None):
"""Run the cell on embedded inputs."""
with vs.variable_scope(scope or type(self).__name__): # "EmbeddingWrapper"
with ops.device("/cpu:0"):
if self._initializer:
initializer = self._initializer
elif vs.get_variable_scope().initializer:
initializer = vs.get_variable_scope().initializer
else:
# Default initializer for embeddings should have variance=1.
sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1.
initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3)
if type(state) is tuple:
data_type = state[0].dtype
else:
data_type = state.dtype
embedding = vs.get_variable(
"embedding", [self._embedding_classes, self._embedding_size],
initializer=initializer,
dtype=data_type)
embedded = embedding_ops.embedding_lookup(
embedding, array_ops.reshape(inputs, [-1]))
return self._cell(embedded, state)
|
[
"def",
"__call__",
"(",
"self",
",",
"inputs",
",",
"state",
",",
"scope",
"=",
"None",
")",
":",
"with",
"vs",
".",
"variable_scope",
"(",
"scope",
"or",
"type",
"(",
"self",
")",
".",
"__name__",
")",
":",
"# \"EmbeddingWrapper\"",
"with",
"ops",
".",
"device",
"(",
"\"/cpu:0\"",
")",
":",
"if",
"self",
".",
"_initializer",
":",
"initializer",
"=",
"self",
".",
"_initializer",
"elif",
"vs",
".",
"get_variable_scope",
"(",
")",
".",
"initializer",
":",
"initializer",
"=",
"vs",
".",
"get_variable_scope",
"(",
")",
".",
"initializer",
"else",
":",
"# Default initializer for embeddings should have variance=1.",
"sqrt3",
"=",
"math",
".",
"sqrt",
"(",
"3",
")",
"# Uniform(-sqrt(3), sqrt(3)) has variance=1.",
"initializer",
"=",
"init_ops",
".",
"random_uniform_initializer",
"(",
"-",
"sqrt3",
",",
"sqrt3",
")",
"if",
"type",
"(",
"state",
")",
"is",
"tuple",
":",
"data_type",
"=",
"state",
"[",
"0",
"]",
".",
"dtype",
"else",
":",
"data_type",
"=",
"state",
".",
"dtype",
"embedding",
"=",
"vs",
".",
"get_variable",
"(",
"\"embedding\"",
",",
"[",
"self",
".",
"_embedding_classes",
",",
"self",
".",
"_embedding_size",
"]",
",",
"initializer",
"=",
"initializer",
",",
"dtype",
"=",
"data_type",
")",
"embedded",
"=",
"embedding_ops",
".",
"embedding_lookup",
"(",
"embedding",
",",
"array_ops",
".",
"reshape",
"(",
"inputs",
",",
"[",
"-",
"1",
"]",
")",
")",
"return",
"self",
".",
"_cell",
"(",
"embedded",
",",
"state",
")"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/rnn_cell.py#L732-L756
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/tf_asymmetry_fitting_context.py
|
python
|
TFAsymmetryFittingContext.tf_asymmetry_single_functions
|
(self)
|
return self._tf_asymmetry_single_functions
|
Returns the fit functions used for single TF Asymmetry fitting. Each function corresponds to a dataset.
|
Returns the fit functions used for single TF Asymmetry fitting. Each function corresponds to a dataset.
|
[
"Returns",
"the",
"fit",
"functions",
"used",
"for",
"single",
"TF",
"Asymmetry",
"fitting",
".",
"Each",
"function",
"corresponds",
"to",
"a",
"dataset",
"."
] |
def tf_asymmetry_single_functions(self) -> list:
"""Returns the fit functions used for single TF Asymmetry fitting. Each function corresponds to a dataset."""
return self._tf_asymmetry_single_functions
|
[
"def",
"tf_asymmetry_single_functions",
"(",
"self",
")",
"->",
"list",
":",
"return",
"self",
".",
"_tf_asymmetry_single_functions"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/tf_asymmetry_fitting_context.py#L87-L89
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py
|
python
|
Tk.__getattr__
|
(self, attr)
|
return getattr(self.tk, attr)
|
Delegate attribute access to the interpreter object
|
Delegate attribute access to the interpreter object
|
[
"Delegate",
"attribute",
"access",
"to",
"the",
"interpreter",
"object"
] |
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
|
[
"def",
"__getattr__",
"(",
"self",
",",
"attr",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"tk",
",",
"attr",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py#L2099-L2101
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/boto/boto/dynamodb/layer1.py
|
python
|
Layer1.scan
|
(self, table_name, scan_filter=None,
attributes_to_get=None, limit=None,
exclusive_start_key=None, object_hook=None, count=False)
|
return self.make_request('Scan', json_input, object_hook=object_hook)
|
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
:type table_name: str
:param table_name: The name of the table to scan.
:type scan_filter: dict
:param scan_filter: A Python version of the
ScanFilter data structure.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type limit: int
:param limit: The maximum number of items to evaluate.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
|
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
|
[
"Perform",
"a",
"scan",
"of",
"DynamoDB",
".",
"This",
"version",
"is",
"currently",
"punting",
"and",
"expecting",
"you",
"to",
"provide",
"a",
"full",
"and",
"correct",
"JSON",
"body",
"which",
"is",
"passed",
"as",
"is",
"to",
"DynamoDB",
"."
] |
def scan(self, table_name, scan_filter=None,
attributes_to_get=None, limit=None,
exclusive_start_key=None, object_hook=None, count=False):
"""
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
:type table_name: str
:param table_name: The name of the table to scan.
:type scan_filter: dict
:param scan_filter: A Python version of the
ScanFilter data structure.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type limit: int
:param limit: The maximum number of items to evaluate.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
"""
data = {'TableName': table_name}
if scan_filter:
data['ScanFilter'] = scan_filter
if attributes_to_get:
data['AttributesToGet'] = attributes_to_get
if limit:
data['Limit'] = limit
if count:
data['Count'] = True
if exclusive_start_key:
data['ExclusiveStartKey'] = exclusive_start_key
json_input = json.dumps(data)
return self.make_request('Scan', json_input, object_hook=object_hook)
|
[
"def",
"scan",
"(",
"self",
",",
"table_name",
",",
"scan_filter",
"=",
"None",
",",
"attributes_to_get",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"exclusive_start_key",
"=",
"None",
",",
"object_hook",
"=",
"None",
",",
"count",
"=",
"False",
")",
":",
"data",
"=",
"{",
"'TableName'",
":",
"table_name",
"}",
"if",
"scan_filter",
":",
"data",
"[",
"'ScanFilter'",
"]",
"=",
"scan_filter",
"if",
"attributes_to_get",
":",
"data",
"[",
"'AttributesToGet'",
"]",
"=",
"attributes_to_get",
"if",
"limit",
":",
"data",
"[",
"'Limit'",
"]",
"=",
"limit",
"if",
"count",
":",
"data",
"[",
"'Count'",
"]",
"=",
"True",
"if",
"exclusive_start_key",
":",
"data",
"[",
"'ExclusiveStartKey'",
"]",
"=",
"exclusive_start_key",
"json_input",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"return",
"self",
".",
"make_request",
"(",
"'Scan'",
",",
"json_input",
",",
"object_hook",
"=",
"object_hook",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/dynamodb/layer1.py#L532-L577
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/scipy/interpolate/_cubic.py
|
python
|
pchip_interpolate
|
(xi, yi, x, der=0, axis=0)
|
Convenience function for pchip interpolation.
xi and yi are arrays of values used to approximate some function f,
with ``yi = f(xi)``. The interpolant uses monotonic cubic splines
to find the value of new points x and the derivatives there.
See `PchipInterpolator` for details.
Parameters
----------
xi : array_like
A sorted list of x-coordinates, of length N.
yi : array_like
A 1-D array of real values. `yi`'s length along the interpolation
axis must be equal to the length of `xi`. If N-D array, use axis
parameter to select correct axis.
x : scalar or array_like
Of length M.
der : int or list, optional
Derivatives to extract. The 0-th derivative can be included to
return the function value.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
See Also
--------
PchipInterpolator
Returns
-------
y : scalar or array_like
The result, of length R or length M or M by R,
|
Convenience function for pchip interpolation.
xi and yi are arrays of values used to approximate some function f,
with ``yi = f(xi)``. The interpolant uses monotonic cubic splines
to find the value of new points x and the derivatives there.
|
[
"Convenience",
"function",
"for",
"pchip",
"interpolation",
".",
"xi",
"and",
"yi",
"are",
"arrays",
"of",
"values",
"used",
"to",
"approximate",
"some",
"function",
"f",
"with",
"yi",
"=",
"f",
"(",
"xi",
")",
".",
"The",
"interpolant",
"uses",
"monotonic",
"cubic",
"splines",
"to",
"find",
"the",
"value",
"of",
"new",
"points",
"x",
"and",
"the",
"derivatives",
"there",
"."
] |
def pchip_interpolate(xi, yi, x, der=0, axis=0):
"""
Convenience function for pchip interpolation.
xi and yi are arrays of values used to approximate some function f,
with ``yi = f(xi)``. The interpolant uses monotonic cubic splines
to find the value of new points x and the derivatives there.
See `PchipInterpolator` for details.
Parameters
----------
xi : array_like
A sorted list of x-coordinates, of length N.
yi : array_like
A 1-D array of real values. `yi`'s length along the interpolation
axis must be equal to the length of `xi`. If N-D array, use axis
parameter to select correct axis.
x : scalar or array_like
Of length M.
der : int or list, optional
Derivatives to extract. The 0-th derivative can be included to
return the function value.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
See Also
--------
PchipInterpolator
Returns
-------
y : scalar or array_like
The result, of length R or length M or M by R,
"""
P = PchipInterpolator(xi, yi, axis=axis)
if der == 0:
return P(x)
elif _isscalar(der):
return P.derivative(der)(x)
else:
return [P.derivative(nu)(x) for nu in der]
|
[
"def",
"pchip_interpolate",
"(",
"xi",
",",
"yi",
",",
"x",
",",
"der",
"=",
"0",
",",
"axis",
"=",
"0",
")",
":",
"P",
"=",
"PchipInterpolator",
"(",
"xi",
",",
"yi",
",",
"axis",
"=",
"axis",
")",
"if",
"der",
"==",
"0",
":",
"return",
"P",
"(",
"x",
")",
"elif",
"_isscalar",
"(",
"der",
")",
":",
"return",
"P",
".",
"derivative",
"(",
"der",
")",
"(",
"x",
")",
"else",
":",
"return",
"[",
"P",
".",
"derivative",
"(",
"nu",
")",
"(",
"x",
")",
"for",
"nu",
"in",
"der",
"]"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/interpolate/_cubic.py#L179-L221
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/warnings.py
|
python
|
filterwarnings
|
(action, message="", category=Warning, module="", lineno=0,
append=0)
|
Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
|
Insert an entry into the list of warnings filters (at the front).
|
[
"Insert",
"an",
"entry",
"into",
"the",
"list",
"of",
"warnings",
"filters",
"(",
"at",
"the",
"front",
")",
"."
] |
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=0):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, basestring), "message must be a string"
assert isinstance(category, (type, types.ClassType)), \
"category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, basestring), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
|
[
"def",
"filterwarnings",
"(",
"action",
",",
"message",
"=",
"\"\"",
",",
"category",
"=",
"Warning",
",",
"module",
"=",
"\"\"",
",",
"lineno",
"=",
"0",
",",
"append",
"=",
"0",
")",
":",
"import",
"re",
"assert",
"action",
"in",
"(",
"\"error\"",
",",
"\"ignore\"",
",",
"\"always\"",
",",
"\"default\"",
",",
"\"module\"",
",",
"\"once\"",
")",
",",
"\"invalid action: %r\"",
"%",
"(",
"action",
",",
")",
"assert",
"isinstance",
"(",
"message",
",",
"basestring",
")",
",",
"\"message must be a string\"",
"assert",
"isinstance",
"(",
"category",
",",
"(",
"type",
",",
"types",
".",
"ClassType",
")",
")",
",",
"\"category must be a class\"",
"assert",
"issubclass",
"(",
"category",
",",
"Warning",
")",
",",
"\"category must be a Warning subclass\"",
"assert",
"isinstance",
"(",
"module",
",",
"basestring",
")",
",",
"\"module must be a string\"",
"assert",
"isinstance",
"(",
"lineno",
",",
"int",
")",
"and",
"lineno",
">=",
"0",
",",
"\"lineno must be an int >= 0\"",
"item",
"=",
"(",
"action",
",",
"re",
".",
"compile",
"(",
"message",
",",
"re",
".",
"I",
")",
",",
"category",
",",
"re",
".",
"compile",
"(",
"module",
")",
",",
"lineno",
")",
"if",
"append",
":",
"filters",
".",
"append",
"(",
"item",
")",
"else",
":",
"filters",
".",
"insert",
"(",
"0",
",",
"item",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/warnings.py#L45-L72
|
||
yue/yue
|
619d62c191b13c51c01be451dc48917c34a5aefc
|
building/tools/cpplint.py
|
python
|
CheckIncludeLine
|
(filename, clean_lines, linenum, include_state, error)
|
Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
|
Check rules that are applicable to #include lines.
|
[
"Check",
"rules",
"that",
"are",
"applicable",
"to",
"#include",
"lines",
"."
] |
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
"""Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
fileinfo = FileInfo(filename)
line = clean_lines.lines[linenum]
# "include" should use the new style "foo/bar.h" instead of just "bar.h"
# Only do this check if the included header follows google naming
# conventions. If not, assume that it's a 3rd party API that
# requires special include conventions.
#
# We also make an exception for Lua headers, which follow google
# naming convention but not the include convention.
match = Match(r'#include\s*"([^/]+\.h)"', line)
if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)):
error(filename, linenum, 'build/include', 4,
'Include the directory when naming .h files')
# we shouldn't include a file more than once. actually, there are a
# handful of instances where doing so is okay, but in general it's
# not.
match = _RE_PATTERN_INCLUDE.search(line)
if match:
include = match.group(2)
is_system = (match.group(1) == '<')
duplicate_line = include_state.FindHeader(include)
if duplicate_line >= 0:
error(filename, linenum, 'build/include', 4,
'"%s" already included at %s:%s' %
(include, filename, duplicate_line))
elif (include.endswith('.cc') and
os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
error(filename, linenum, 'build/include', 4,
'Do not include .cc files from other packages')
elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
include_state.include_list[-1].append((include, linenum))
# We want to ensure that headers appear in the right order:
# 1) for foo.cc, foo.h (preferred location)
# 2) c system files
# 3) cpp system files
# 4) for foo.cc, foo.h (deprecated location)
# 5) other google headers
#
# We classify each include statement as one of those 5 types
# using a number of techniques. The include_state object keeps
# track of the highest type seen, and complains if we see a
# lower type after that.
error_message = include_state.CheckNextIncludeOrder(
_ClassifyInclude(fileinfo, include, is_system))
if error_message:
error(filename, linenum, 'build/include_order', 4,
'%s. Should be: %s.h, c system, c++ system, other.' %
(error_message, fileinfo.BaseName()))
canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
if not include_state.IsInAlphabeticalOrder(
clean_lines, linenum, canonical_include):
error(filename, linenum, 'build/include_alpha', 4,
'Include "%s" not in alphabetical order' % include)
include_state.SetLastHeader(canonical_include)
|
[
"def",
"CheckIncludeLine",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"include_state",
",",
"error",
")",
":",
"fileinfo",
"=",
"FileInfo",
"(",
"filename",
")",
"line",
"=",
"clean_lines",
".",
"lines",
"[",
"linenum",
"]",
"# \"include\" should use the new style \"foo/bar.h\" instead of just \"bar.h\"",
"# Only do this check if the included header follows google naming",
"# conventions. If not, assume that it's a 3rd party API that",
"# requires special include conventions.",
"#",
"# We also make an exception for Lua headers, which follow google",
"# naming convention but not the include convention.",
"match",
"=",
"Match",
"(",
"r'#include\\s*\"([^/]+\\.h)\"'",
",",
"line",
")",
"if",
"match",
"and",
"not",
"_THIRD_PARTY_HEADERS_PATTERN",
".",
"match",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include'",
",",
"4",
",",
"'Include the directory when naming .h files'",
")",
"# we shouldn't include a file more than once. actually, there are a",
"# handful of instances where doing so is okay, but in general it's",
"# not.",
"match",
"=",
"_RE_PATTERN_INCLUDE",
".",
"search",
"(",
"line",
")",
"if",
"match",
":",
"include",
"=",
"match",
".",
"group",
"(",
"2",
")",
"is_system",
"=",
"(",
"match",
".",
"group",
"(",
"1",
")",
"==",
"'<'",
")",
"duplicate_line",
"=",
"include_state",
".",
"FindHeader",
"(",
"include",
")",
"if",
"duplicate_line",
">=",
"0",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include'",
",",
"4",
",",
"'\"%s\" already included at %s:%s'",
"%",
"(",
"include",
",",
"filename",
",",
"duplicate_line",
")",
")",
"elif",
"(",
"include",
".",
"endswith",
"(",
"'.cc'",
")",
"and",
"os",
".",
"path",
".",
"dirname",
"(",
"fileinfo",
".",
"RepositoryName",
"(",
")",
")",
"!=",
"os",
".",
"path",
".",
"dirname",
"(",
"include",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include'",
",",
"4",
",",
"'Do not include .cc files from other packages'",
")",
"elif",
"not",
"_THIRD_PARTY_HEADERS_PATTERN",
".",
"match",
"(",
"include",
")",
":",
"include_state",
".",
"include_list",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"(",
"include",
",",
"linenum",
")",
")",
"# We want to ensure that headers appear in the right order:",
"# 1) for foo.cc, foo.h (preferred location)",
"# 2) c system files",
"# 3) cpp system files",
"# 4) for foo.cc, foo.h (deprecated location)",
"# 5) other google headers",
"#",
"# We classify each include statement as one of those 5 types",
"# using a number of techniques. The include_state object keeps",
"# track of the highest type seen, and complains if we see a",
"# lower type after that.",
"error_message",
"=",
"include_state",
".",
"CheckNextIncludeOrder",
"(",
"_ClassifyInclude",
"(",
"fileinfo",
",",
"include",
",",
"is_system",
")",
")",
"if",
"error_message",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include_order'",
",",
"4",
",",
"'%s. Should be: %s.h, c system, c++ system, other.'",
"%",
"(",
"error_message",
",",
"fileinfo",
".",
"BaseName",
"(",
")",
")",
")",
"canonical_include",
"=",
"include_state",
".",
"CanonicalizeAlphabeticalOrder",
"(",
"include",
")",
"if",
"not",
"include_state",
".",
"IsInAlphabeticalOrder",
"(",
"clean_lines",
",",
"linenum",
",",
"canonical_include",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include_alpha'",
",",
"4",
",",
"'Include \"%s\" not in alphabetical order'",
"%",
"include",
")",
"include_state",
".",
"SetLastHeader",
"(",
"canonical_include",
")"
] |
https://github.com/yue/yue/blob/619d62c191b13c51c01be451dc48917c34a5aefc/building/tools/cpplint.py#L4510-L4580
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/grid.py
|
python
|
GridCellAttr.Clone
|
(*args, **kwargs)
|
return _grid.GridCellAttr_Clone(*args, **kwargs)
|
Clone(self) -> GridCellAttr
|
Clone(self) -> GridCellAttr
|
[
"Clone",
"(",
"self",
")",
"-",
">",
"GridCellAttr"
] |
def Clone(*args, **kwargs):
"""Clone(self) -> GridCellAttr"""
return _grid.GridCellAttr_Clone(*args, **kwargs)
|
[
"def",
"Clone",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"GridCellAttr_Clone",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/grid.py#L535-L537
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/plotting/_core.py
|
python
|
PlotAccessor.area
|
(self, x=None, y=None, **kwargs)
|
return self(kind="area", x=x, y=y, **kwargs)
|
Draw a stacked area plot.
An area plot displays quantitative data visually.
This function wraps the matplotlib area function.
Parameters
----------
x : label or position, optional
Coordinates for the X axis. By default uses the index.
y : label or position, optional
Column to plot. By default uses all columns.
stacked : bool, default True
Area plots are stacked by default. Set to False to create a
unstacked plot.
**kwargs
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
matplotlib.axes.Axes or numpy.ndarray
Area plot, or array of area plots if subplots is True.
See Also
--------
DataFrame.plot : Make plots of DataFrame using matplotlib / pylab.
Examples
--------
Draw an area plot based on basic business metrics:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3, 9, 10, 6],
... 'signups': [5, 5, 6, 12, 14, 13],
... 'visits': [20, 42, 28, 62, 81, 50],
... }, index=pd.date_range(start='2018/01/01', end='2018/07/01',
... freq='M'))
>>> ax = df.plot.area()
Area plots are stacked by default. To produce an unstacked plot,
pass ``stacked=False``:
.. plot::
:context: close-figs
>>> ax = df.plot.area(stacked=False)
Draw an area plot for a single column:
.. plot::
:context: close-figs
>>> ax = df.plot.area(y='sales')
Draw with a different `x`:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3],
... 'visits': [20, 42, 28],
... 'day': [1, 2, 3],
... })
>>> ax = df.plot.area(x='day')
|
Draw a stacked area plot.
|
[
"Draw",
"a",
"stacked",
"area",
"plot",
"."
] |
def area(self, x=None, y=None, **kwargs):
"""
Draw a stacked area plot.
An area plot displays quantitative data visually.
This function wraps the matplotlib area function.
Parameters
----------
x : label or position, optional
Coordinates for the X axis. By default uses the index.
y : label or position, optional
Column to plot. By default uses all columns.
stacked : bool, default True
Area plots are stacked by default. Set to False to create a
unstacked plot.
**kwargs
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
matplotlib.axes.Axes or numpy.ndarray
Area plot, or array of area plots if subplots is True.
See Also
--------
DataFrame.plot : Make plots of DataFrame using matplotlib / pylab.
Examples
--------
Draw an area plot based on basic business metrics:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3, 9, 10, 6],
... 'signups': [5, 5, 6, 12, 14, 13],
... 'visits': [20, 42, 28, 62, 81, 50],
... }, index=pd.date_range(start='2018/01/01', end='2018/07/01',
... freq='M'))
>>> ax = df.plot.area()
Area plots are stacked by default. To produce an unstacked plot,
pass ``stacked=False``:
.. plot::
:context: close-figs
>>> ax = df.plot.area(stacked=False)
Draw an area plot for a single column:
.. plot::
:context: close-figs
>>> ax = df.plot.area(y='sales')
Draw with a different `x`:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3],
... 'visits': [20, 42, 28],
... 'day': [1, 2, 3],
... })
>>> ax = df.plot.area(x='day')
"""
return self(kind="area", x=x, y=y, **kwargs)
|
[
"def",
"area",
"(",
"self",
",",
"x",
"=",
"None",
",",
"y",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
"(",
"kind",
"=",
"\"area\"",
",",
"x",
"=",
"x",
",",
"y",
"=",
"y",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/plotting/_core.py#L1425-L1496
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/dataview.py
|
python
|
DataViewItem.__hash__
|
(*args, **kwargs)
|
return _dataview.DataViewItem___hash__(*args, **kwargs)
|
__hash__(self) -> long
|
__hash__(self) -> long
|
[
"__hash__",
"(",
"self",
")",
"-",
">",
"long"
] |
def __hash__(*args, **kwargs):
"""__hash__(self) -> long"""
return _dataview.DataViewItem___hash__(*args, **kwargs)
|
[
"def",
"__hash__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewItem___hash__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/dataview.py#L113-L115
|
|
SequoiaDB/SequoiaDB
|
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
|
driver/python/bson/objectid.py
|
python
|
ObjectId.from_datetime
|
(cls, generation_time)
|
return cls(oid)
|
Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This method deliberately
eliminates the uniqueness guarantee that ObjectIds
generally provide. ObjectIds generated with this method
should be used exclusively in queries.
`generation_time` will be converted to UTC. Naive datetime
instances will be treated as though they already contain UTC.
An example using this helper to get documents where ``"_id"``
was generated before January 1, 2010 would be:
>>> gen_time = datetime.datetime(2010, 1, 1)
>>> dummy_id = ObjectId.from_datetime(gen_time)
>>> result = collection.find({"_id": {"$lt": dummy_id}})
:Parameters:
- `generation_time`: :class:`~datetime.datetime` to be used
as the generation time for the resulting ObjectId.
.. versionchanged:: 1.8
Properly handle timezone aware values for
`generation_time`.
.. versionadded:: 1.6
|
Create a dummy ObjectId instance with a specific generation time.
|
[
"Create",
"a",
"dummy",
"ObjectId",
"instance",
"with",
"a",
"specific",
"generation",
"time",
"."
] |
def from_datetime(cls, generation_time):
"""Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This method deliberately
eliminates the uniqueness guarantee that ObjectIds
generally provide. ObjectIds generated with this method
should be used exclusively in queries.
`generation_time` will be converted to UTC. Naive datetime
instances will be treated as though they already contain UTC.
An example using this helper to get documents where ``"_id"``
was generated before January 1, 2010 would be:
>>> gen_time = datetime.datetime(2010, 1, 1)
>>> dummy_id = ObjectId.from_datetime(gen_time)
>>> result = collection.find({"_id": {"$lt": dummy_id}})
:Parameters:
- `generation_time`: :class:`~datetime.datetime` to be used
as the generation time for the resulting ObjectId.
.. versionchanged:: 1.8
Properly handle timezone aware values for
`generation_time`.
.. versionadded:: 1.6
"""
if generation_time.utcoffset() is not None:
generation_time = generation_time - generation_time.utcoffset()
ts = calendar.timegm(generation_time.timetuple())
oid = struct.pack(">i", int(ts)) + ZERO * 8
return cls(oid)
|
[
"def",
"from_datetime",
"(",
"cls",
",",
"generation_time",
")",
":",
"if",
"generation_time",
".",
"utcoffset",
"(",
")",
"is",
"not",
"None",
":",
"generation_time",
"=",
"generation_time",
"-",
"generation_time",
".",
"utcoffset",
"(",
")",
"ts",
"=",
"calendar",
".",
"timegm",
"(",
"generation_time",
".",
"timetuple",
"(",
")",
")",
"oid",
"=",
"struct",
".",
"pack",
"(",
"\">i\"",
",",
"int",
"(",
"ts",
")",
")",
"+",
"ZERO",
"*",
"8",
"return",
"cls",
"(",
"oid",
")"
] |
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/driver/python/bson/objectid.py#L99-L136
|
|
SequoiaDB/SequoiaDB
|
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
|
tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py
|
python
|
uCSIsMyanmar
|
(code)
|
return ret
|
Check whether the character is part of Myanmar UCS Block
|
Check whether the character is part of Myanmar UCS Block
|
[
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"Myanmar",
"UCS",
"Block"
] |
def uCSIsMyanmar(code):
"""Check whether the character is part of Myanmar UCS Block """
ret = libxml2mod.xmlUCSIsMyanmar(code)
return ret
|
[
"def",
"uCSIsMyanmar",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsMyanmar",
"(",
"code",
")",
"return",
"ret"
] |
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L2731-L2734
|
|
TimoSaemann/caffe-segnet-cudnn5
|
abcf30dca449245e101bf4ced519f716177f0885
|
scripts/cpp_lint.py
|
python
|
_BlockInfo.CheckEnd
|
(self, filename, clean_lines, linenum, error)
|
Run checks that applies to text after the closing brace.
This is mostly used for checking end of namespace comments.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
|
Run checks that applies to text after the closing brace.
|
[
"Run",
"checks",
"that",
"applies",
"to",
"text",
"after",
"the",
"closing",
"brace",
"."
] |
def CheckEnd(self, filename, clean_lines, linenum, error):
"""Run checks that applies to text after the closing brace.
This is mostly used for checking end of namespace comments.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
pass
|
[
"def",
"CheckEnd",
"(",
"self",
",",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"pass"
] |
https://github.com/TimoSaemann/caffe-segnet-cudnn5/blob/abcf30dca449245e101bf4ced519f716177f0885/scripts/cpp_lint.py#L1778-L1789
|
||
msftguy/ssh-rd
|
a5f3a79daeac5844edebf01916c9613563f1c390
|
_3rd/boost_1_48_0/tools/build/v2/build/targets.py
|
python
|
BasicTarget.create_subvariant
|
(self, root_targets, all_targets,
build_request, sources,
rproperties, usage_requirements)
|
return s
|
Creates a new subvariant-dg instances for 'targets'
- 'root-targets' the virtual targets will be returned to dependents
- 'all-targets' all virtual
targets created while building this main target
- 'build-request' is property-set instance with
requested build properties
|
Creates a new subvariant-dg instances for 'targets'
- 'root-targets' the virtual targets will be returned to dependents
- 'all-targets' all virtual
targets created while building this main target
- 'build-request' is property-set instance with
requested build properties
|
[
"Creates",
"a",
"new",
"subvariant",
"-",
"dg",
"instances",
"for",
"targets",
"-",
"root",
"-",
"targets",
"the",
"virtual",
"targets",
"will",
"be",
"returned",
"to",
"dependents",
"-",
"all",
"-",
"targets",
"all",
"virtual",
"targets",
"created",
"while",
"building",
"this",
"main",
"target",
"-",
"build",
"-",
"request",
"is",
"property",
"-",
"set",
"instance",
"with",
"requested",
"build",
"properties"
] |
def create_subvariant (self, root_targets, all_targets,
build_request, sources,
rproperties, usage_requirements):
"""Creates a new subvariant-dg instances for 'targets'
- 'root-targets' the virtual targets will be returned to dependents
- 'all-targets' all virtual
targets created while building this main target
- 'build-request' is property-set instance with
requested build properties"""
for e in root_targets:
e.root (True)
s = Subvariant (self, build_request, sources,
rproperties, usage_requirements, all_targets)
for v in all_targets:
if not v.creating_subvariant():
v.creating_subvariant(s)
return s
|
[
"def",
"create_subvariant",
"(",
"self",
",",
"root_targets",
",",
"all_targets",
",",
"build_request",
",",
"sources",
",",
"rproperties",
",",
"usage_requirements",
")",
":",
"for",
"e",
"in",
"root_targets",
":",
"e",
".",
"root",
"(",
"True",
")",
"s",
"=",
"Subvariant",
"(",
"self",
",",
"build_request",
",",
"sources",
",",
"rproperties",
",",
"usage_requirements",
",",
"all_targets",
")",
"for",
"v",
"in",
"all_targets",
":",
"if",
"not",
"v",
".",
"creating_subvariant",
"(",
")",
":",
"v",
".",
"creating_subvariant",
"(",
"s",
")",
"return",
"s"
] |
https://github.com/msftguy/ssh-rd/blob/a5f3a79daeac5844edebf01916c9613563f1c390/_3rd/boost_1_48_0/tools/build/v2/build/targets.py#L1248-L1268
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/propgrid.py
|
python
|
PGProperty.IsEnabled
|
(*args, **kwargs)
|
return _propgrid.PGProperty_IsEnabled(*args, **kwargs)
|
IsEnabled(self) -> bool
|
IsEnabled(self) -> bool
|
[
"IsEnabled",
"(",
"self",
")",
"-",
">",
"bool"
] |
def IsEnabled(*args, **kwargs):
"""IsEnabled(self) -> bool"""
return _propgrid.PGProperty_IsEnabled(*args, **kwargs)
|
[
"def",
"IsEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PGProperty_IsEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L662-L664
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.