nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
LinOTP/LinOTP
|
bb3940bbaccea99550e6c063ff824f258dd6d6d7
|
linotp/lib/reply.py
|
python
|
sendResult
|
(response, obj, id=1, opt=None, status=True)
|
return Response(response=data, status=200, mimetype="application/json")
|
sendResult - return an json result document
:param response: the pylons response object
:type response: response object
:param obj: simple result object like dict, sting or list
:type obj: dict or list or string/unicode
:param id: id value, for future versions
:type id: int
:param opt: optional parameter, which allows to provide more detail
:type opt: None or simple type like dict, list or string/unicode
:return: json rendered sting result
:rtype: string
|
sendResult - return an json result document
|
[
"sendResult",
"-",
"return",
"an",
"json",
"result",
"document"
] |
def sendResult(response, obj, id=1, opt=None, status=True):
"""
sendResult - return an json result document
:param response: the pylons response object
:type response: response object
:param obj: simple result object like dict, sting or list
:type obj: dict or list or string/unicode
:param id: id value, for future versions
:type id: int
:param opt: optional parameter, which allows to provide more detail
:type opt: None or simple type like dict, list or string/unicode
:return: json rendered sting result
:rtype: string
"""
res = {
"jsonrpc": get_api_version(),
"result": {
"status": status,
"value": obj,
},
"version": get_version(),
"id": id,
}
if opt is not None and len(opt) > 0:
res["detail"] = opt
data = json.dumps(res, indent=3)
return Response(response=data, status=200, mimetype="application/json")
|
[
"def",
"sendResult",
"(",
"response",
",",
"obj",
",",
"id",
"=",
"1",
",",
"opt",
"=",
"None",
",",
"status",
"=",
"True",
")",
":",
"res",
"=",
"{",
"\"jsonrpc\"",
":",
"get_api_version",
"(",
")",
",",
"\"result\"",
":",
"{",
"\"status\"",
":",
"status",
",",
"\"value\"",
":",
"obj",
",",
"}",
",",
"\"version\"",
":",
"get_version",
"(",
")",
",",
"\"id\"",
":",
"id",
",",
"}",
"if",
"opt",
"is",
"not",
"None",
"and",
"len",
"(",
"opt",
")",
">",
"0",
":",
"res",
"[",
"\"detail\"",
"]",
"=",
"opt",
"data",
"=",
"json",
".",
"dumps",
"(",
"res",
",",
"indent",
"=",
"3",
")",
"return",
"Response",
"(",
"response",
"=",
"data",
",",
"status",
"=",
"200",
",",
"mimetype",
"=",
"\"application/json\"",
")"
] |
https://github.com/LinOTP/LinOTP/blob/bb3940bbaccea99550e6c063ff824f258dd6d6d7/linotp/lib/reply.py#L286-L319
|
|
QCoDeS/Qcodes
|
3cda2cef44812e2aa4672781f2423bf5f816f9f9
|
qcodes/instrument_drivers/Keysight/keysightb1500/KeysightB1517A.py
|
python
|
B1517A.setup_staircase_sweep
|
(
self,
v_start: float,
v_end: float,
n_steps: int,
post_sweep_voltage_val: Union[constants.WMDCV.Post,
int] = constants.WMDCV.Post.STOP,
av_coef: int = -1,
enable_filter: bool = True,
v_src_range: constants.OutputRange = constants.VOutputRange.AUTO,
i_comp: float = 10e-6,
i_meas_range: Optional[
constants.MeasureRange] = constants.IMeasRange.FIX_10uA,
hold_time: float = 0,
delay: float = 0,
step_delay: float = 0,
measure_delay: float = 0,
abort_enabled: Union[constants.Abort,
int] = constants.Abort.ENABLED,
sweep_mode: Union[constants.SweepMode,
int] = constants.SweepMode.LINEAR
)
|
Setup the staircase sweep measurement using the same set of commands
(in the same order) as given in the programming manual - see pages
3-19 and 3-20.
Args:
v_start: starting voltage of staircase sweep
v_end: ending voltage of staircase sweep
n_steps: number of measurement points (uniformly distributed
between v_start and v_end)
post_sweep_voltage_val: voltage to hold at end of sweep (i.e.
start or end val). Sweep chan will also output this voltage
if an abort condition is encountered during the sweep
av_coef: coefficient to use for av command to set ADC
averaging. Negative value implies NPLC mode with absolute
value of av_coeff the NPLC setting to use. Positive value
implies auto mode and must be set to >= 4
enable_filter: turn SMU filter on or off
v_src_range: range setting to use for voltage source
i_comp: current compliance level
i_meas_range: current measurement range
hold_time: time (in s) to wait before starting very first
measurement in sweep
delay: time (in s) after starting to force a step output and
before starting a step measurement
step_delay: time (in s) after starting a step measurement before
next step in staircase. If step_delay is < measurement time,
B1500 waits until measurement complete and then forces the
next step value.
measure_delay: time (in s) after receiving a start step
measurement trigger and before starting a step measurement
abort_enabled: Enbale abort
sweep_mode: Linear, log, linear-2-way or log-2-way
|
Setup the staircase sweep measurement using the same set of commands
(in the same order) as given in the programming manual - see pages
3-19 and 3-20.
|
[
"Setup",
"the",
"staircase",
"sweep",
"measurement",
"using",
"the",
"same",
"set",
"of",
"commands",
"(",
"in",
"the",
"same",
"order",
")",
"as",
"given",
"in",
"the",
"programming",
"manual",
"-",
"see",
"pages",
"3",
"-",
"19",
"and",
"3",
"-",
"20",
"."
] |
def setup_staircase_sweep(
self,
v_start: float,
v_end: float,
n_steps: int,
post_sweep_voltage_val: Union[constants.WMDCV.Post,
int] = constants.WMDCV.Post.STOP,
av_coef: int = -1,
enable_filter: bool = True,
v_src_range: constants.OutputRange = constants.VOutputRange.AUTO,
i_comp: float = 10e-6,
i_meas_range: Optional[
constants.MeasureRange] = constants.IMeasRange.FIX_10uA,
hold_time: float = 0,
delay: float = 0,
step_delay: float = 0,
measure_delay: float = 0,
abort_enabled: Union[constants.Abort,
int] = constants.Abort.ENABLED,
sweep_mode: Union[constants.SweepMode,
int] = constants.SweepMode.LINEAR
) -> None:
"""
Setup the staircase sweep measurement using the same set of commands
(in the same order) as given in the programming manual - see pages
3-19 and 3-20.
Args:
v_start: starting voltage of staircase sweep
v_end: ending voltage of staircase sweep
n_steps: number of measurement points (uniformly distributed
between v_start and v_end)
post_sweep_voltage_val: voltage to hold at end of sweep (i.e.
start or end val). Sweep chan will also output this voltage
if an abort condition is encountered during the sweep
av_coef: coefficient to use for av command to set ADC
averaging. Negative value implies NPLC mode with absolute
value of av_coeff the NPLC setting to use. Positive value
implies auto mode and must be set to >= 4
enable_filter: turn SMU filter on or off
v_src_range: range setting to use for voltage source
i_comp: current compliance level
i_meas_range: current measurement range
hold_time: time (in s) to wait before starting very first
measurement in sweep
delay: time (in s) after starting to force a step output and
before starting a step measurement
step_delay: time (in s) after starting a step measurement before
next step in staircase. If step_delay is < measurement time,
B1500 waits until measurement complete and then forces the
next step value.
measure_delay: time (in s) after receiving a start step
measurement trigger and before starting a step measurement
abort_enabled: Enbale abort
sweep_mode: Linear, log, linear-2-way or log-2-way
"""
self.set_average_samples_for_high_speed_adc(av_coef)
self.enable_filter(enable_filter)
self.source_config(output_range=v_src_range,
compliance=i_comp,
min_compliance_range=i_meas_range)
self.voltage(v_start)
self.measurement_operation_mode(constants.CMM.Mode.COMPLIANCE_SIDE)
self.current_measurement_range(i_meas_range)
self.iv_sweep.hold_time(hold_time)
self.iv_sweep.delay(delay)
self.iv_sweep.step_delay(step_delay)
self.iv_sweep.measure_delay(measure_delay)
self.iv_sweep.sweep_auto_abort(abort_enabled)
self.iv_sweep.post_sweep_voltage_condition(post_sweep_voltage_val)
self.iv_sweep.sweep_mode(sweep_mode)
self.iv_sweep.sweep_range(v_src_range)
self.iv_sweep.sweep_start(v_start)
self.iv_sweep.sweep_end(v_end)
self.iv_sweep.sweep_steps(n_steps)
self.iv_sweep.current_compliance(i_comp)
self.root_instrument.clear_timer_count()
self.setup_fnc_already_run = True
|
[
"def",
"setup_staircase_sweep",
"(",
"self",
",",
"v_start",
":",
"float",
",",
"v_end",
":",
"float",
",",
"n_steps",
":",
"int",
",",
"post_sweep_voltage_val",
":",
"Union",
"[",
"constants",
".",
"WMDCV",
".",
"Post",
",",
"int",
"]",
"=",
"constants",
".",
"WMDCV",
".",
"Post",
".",
"STOP",
",",
"av_coef",
":",
"int",
"=",
"-",
"1",
",",
"enable_filter",
":",
"bool",
"=",
"True",
",",
"v_src_range",
":",
"constants",
".",
"OutputRange",
"=",
"constants",
".",
"VOutputRange",
".",
"AUTO",
",",
"i_comp",
":",
"float",
"=",
"10e-6",
",",
"i_meas_range",
":",
"Optional",
"[",
"constants",
".",
"MeasureRange",
"]",
"=",
"constants",
".",
"IMeasRange",
".",
"FIX_10uA",
",",
"hold_time",
":",
"float",
"=",
"0",
",",
"delay",
":",
"float",
"=",
"0",
",",
"step_delay",
":",
"float",
"=",
"0",
",",
"measure_delay",
":",
"float",
"=",
"0",
",",
"abort_enabled",
":",
"Union",
"[",
"constants",
".",
"Abort",
",",
"int",
"]",
"=",
"constants",
".",
"Abort",
".",
"ENABLED",
",",
"sweep_mode",
":",
"Union",
"[",
"constants",
".",
"SweepMode",
",",
"int",
"]",
"=",
"constants",
".",
"SweepMode",
".",
"LINEAR",
")",
"->",
"None",
":",
"self",
".",
"set_average_samples_for_high_speed_adc",
"(",
"av_coef",
")",
"self",
".",
"enable_filter",
"(",
"enable_filter",
")",
"self",
".",
"source_config",
"(",
"output_range",
"=",
"v_src_range",
",",
"compliance",
"=",
"i_comp",
",",
"min_compliance_range",
"=",
"i_meas_range",
")",
"self",
".",
"voltage",
"(",
"v_start",
")",
"self",
".",
"measurement_operation_mode",
"(",
"constants",
".",
"CMM",
".",
"Mode",
".",
"COMPLIANCE_SIDE",
")",
"self",
".",
"current_measurement_range",
"(",
"i_meas_range",
")",
"self",
".",
"iv_sweep",
".",
"hold_time",
"(",
"hold_time",
")",
"self",
".",
"iv_sweep",
".",
"delay",
"(",
"delay",
")",
"self",
".",
"iv_sweep",
".",
"step_delay",
"(",
"step_delay",
")",
"self",
".",
"iv_sweep",
".",
"measure_delay",
"(",
"measure_delay",
")",
"self",
".",
"iv_sweep",
".",
"sweep_auto_abort",
"(",
"abort_enabled",
")",
"self",
".",
"iv_sweep",
".",
"post_sweep_voltage_condition",
"(",
"post_sweep_voltage_val",
")",
"self",
".",
"iv_sweep",
".",
"sweep_mode",
"(",
"sweep_mode",
")",
"self",
".",
"iv_sweep",
".",
"sweep_range",
"(",
"v_src_range",
")",
"self",
".",
"iv_sweep",
".",
"sweep_start",
"(",
"v_start",
")",
"self",
".",
"iv_sweep",
".",
"sweep_end",
"(",
"v_end",
")",
"self",
".",
"iv_sweep",
".",
"sweep_steps",
"(",
"n_steps",
")",
"self",
".",
"iv_sweep",
".",
"current_compliance",
"(",
"i_comp",
")",
"self",
".",
"root_instrument",
".",
"clear_timer_count",
"(",
")",
"self",
".",
"setup_fnc_already_run",
"=",
"True"
] |
https://github.com/QCoDeS/Qcodes/blob/3cda2cef44812e2aa4672781f2423bf5f816f9f9/qcodes/instrument_drivers/Keysight/keysightb1500/KeysightB1517A.py#L1082-L1160
|
||
golismero/golismero
|
7d605b937e241f51c1ca4f47b20f755eeefb9d76
|
thirdparty_libs/nltk/metrics/confusionmatrix.py
|
python
|
ConfusionMatrix.__getitem__
|
(self, (li,lj))
|
return self._confusion[i][j]
|
:return: The number of times that value ``li`` was expected and
value ``lj`` was given.
:rtype: int
|
:return: The number of times that value ``li`` was expected and
value ``lj`` was given.
:rtype: int
|
[
":",
"return",
":",
"The",
"number",
"of",
"times",
"that",
"value",
"li",
"was",
"expected",
"and",
"value",
"lj",
"was",
"given",
".",
":",
"rtype",
":",
"int"
] |
def __getitem__(self, (li,lj)):
"""
:return: The number of times that value ``li`` was expected and
value ``lj`` was given.
:rtype: int
"""
i = self._indices[li]
j = self._indices[lj]
return self._confusion[i][j]
|
[
"def",
"__getitem__",
"(",
"self",
",",
"(",
"li",
",",
"lj",
")",
")",
":",
"i",
"=",
"self",
".",
"_indices",
"[",
"li",
"]",
"j",
"=",
"self",
".",
"_indices",
"[",
"lj",
"]",
"return",
"self",
".",
"_confusion",
"[",
"i",
"]",
"[",
"j",
"]"
] |
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/thirdparty_libs/nltk/metrics/confusionmatrix.py#L77-L85
|
|
ChineseGLUE/ChineseGLUE
|
1591b85cf5427c2ff60f718d359ecb71d2b44879
|
baselines/models/xlnet/xlnet.py
|
python
|
XLNetModel.get_initializer
|
(self)
|
return self.initializer
|
Returns:
A tf initializer. Used to initialize variables in layers on top of XLNet.
|
Returns:
A tf initializer. Used to initialize variables in layers on top of XLNet.
|
[
"Returns",
":",
"A",
"tf",
"initializer",
".",
"Used",
"to",
"initialize",
"variables",
"in",
"layers",
"on",
"top",
"of",
"XLNet",
"."
] |
def get_initializer(self):
"""
Returns:
A tf initializer. Used to initialize variables in layers on top of XLNet.
"""
return self.initializer
|
[
"def",
"get_initializer",
"(",
"self",
")",
":",
"return",
"self",
".",
"initializer"
] |
https://github.com/ChineseGLUE/ChineseGLUE/blob/1591b85cf5427c2ff60f718d359ecb71d2b44879/baselines/models/xlnet/xlnet.py#L286-L291
|
|
PixarAnimationStudios/OpenTimelineIO
|
990a54ccbe6488180a93753370fc87902b982962
|
contrib/opentimelineio_contrib/adapters/xges.py
|
python
|
XGESOtio._serialize_track_effect_to_effect_clip
|
(
self, otio_effect, layer, layer_priority, start, duration,
track_types, clip_id)
|
Convert the effect 'otio_effect' found on an otio.schema.Track
into a GESEffectClip xges <clip> under the xges 'layer' with the
given 'layer_priority'. 'start', 'duration', 'clip_id' and
'track-types' will be used for the corresponding attributes of the
<clip>.
|
Convert the effect 'otio_effect' found on an otio.schema.Track
into a GESEffectClip xges <clip> under the xges 'layer' with the
given 'layer_priority'. 'start', 'duration', 'clip_id' and
'track-types' will be used for the corresponding attributes of the
<clip>.
|
[
"Convert",
"the",
"effect",
"otio_effect",
"found",
"on",
"an",
"otio",
".",
"schema",
".",
"Track",
"into",
"a",
"GESEffectClip",
"xges",
"<clip",
">",
"under",
"the",
"xges",
"layer",
"with",
"the",
"given",
"layer_priority",
".",
"start",
"duration",
"clip_id",
"and",
"track",
"-",
"types",
"will",
"be",
"used",
"for",
"the",
"corresponding",
"attributes",
"of",
"the",
"<clip",
">",
"."
] |
def _serialize_track_effect_to_effect_clip(
self, otio_effect, layer, layer_priority, start, duration,
track_types, clip_id):
"""
Convert the effect 'otio_effect' found on an otio.schema.Track
into a GESEffectClip xges <clip> under the xges 'layer' with the
given 'layer_priority'. 'start', 'duration', 'clip_id' and
'track-types' will be used for the corresponding attributes of the
<clip>.
"""
if isinstance(otio_effect, otio.schema.TimeEffect):
_show_otio_not_supported(otio_effect, "Ignoring")
return
self._insert_new_sub_element(
layer, "clip", attrib={
"id": str(clip_id),
"asset-id": str(self._get_effect_bin_desc(otio_effect)),
"type-name": "GESEffectClip",
"track-types": str(track_types),
"layer-priority": str(layer_priority),
"start": str(start),
"rate": '0',
"inpoint": "0",
"duration": str(duration),
"properties": "properties;",
"metadatas": "metadatas;"
}
)
|
[
"def",
"_serialize_track_effect_to_effect_clip",
"(",
"self",
",",
"otio_effect",
",",
"layer",
",",
"layer_priority",
",",
"start",
",",
"duration",
",",
"track_types",
",",
"clip_id",
")",
":",
"if",
"isinstance",
"(",
"otio_effect",
",",
"otio",
".",
"schema",
".",
"TimeEffect",
")",
":",
"_show_otio_not_supported",
"(",
"otio_effect",
",",
"\"Ignoring\"",
")",
"return",
"self",
".",
"_insert_new_sub_element",
"(",
"layer",
",",
"\"clip\"",
",",
"attrib",
"=",
"{",
"\"id\"",
":",
"str",
"(",
"clip_id",
")",
",",
"\"asset-id\"",
":",
"str",
"(",
"self",
".",
"_get_effect_bin_desc",
"(",
"otio_effect",
")",
")",
",",
"\"type-name\"",
":",
"\"GESEffectClip\"",
",",
"\"track-types\"",
":",
"str",
"(",
"track_types",
")",
",",
"\"layer-priority\"",
":",
"str",
"(",
"layer_priority",
")",
",",
"\"start\"",
":",
"str",
"(",
"start",
")",
",",
"\"rate\"",
":",
"'0'",
",",
"\"inpoint\"",
":",
"\"0\"",
",",
"\"duration\"",
":",
"str",
"(",
"duration",
")",
",",
"\"properties\"",
":",
"\"properties;\"",
",",
"\"metadatas\"",
":",
"\"metadatas;\"",
"}",
")"
] |
https://github.com/PixarAnimationStudios/OpenTimelineIO/blob/990a54ccbe6488180a93753370fc87902b982962/contrib/opentimelineio_contrib/adapters/xges.py#L1432-L1459
|
||
scikit-image/scikit-image
|
ed642e2bc822f362504d24379dee94978d6fa9de
|
skimage/io/_plugins/matplotlib_plugin.py
|
python
|
_raise_warnings
|
(image_properties)
|
Raise the appropriate warning for each nonstandard image type.
Parameters
----------
image_properties : ImageProperties named tuple
The properties of the considered image.
|
Raise the appropriate warning for each nonstandard image type.
|
[
"Raise",
"the",
"appropriate",
"warning",
"for",
"each",
"nonstandard",
"image",
"type",
"."
] |
def _raise_warnings(image_properties):
"""Raise the appropriate warning for each nonstandard image type.
Parameters
----------
image_properties : ImageProperties named tuple
The properties of the considered image.
"""
ip = image_properties
if ip.unsupported_dtype:
warn("Non-standard image type; displaying image with "
"stretched contrast.", stacklevel=3)
if ip.low_data_range:
warn("Low image data range; displaying image with "
"stretched contrast.", stacklevel=3)
if ip.out_of_range_float:
warn("Float image out of standard range; displaying "
"image with stretched contrast.", stacklevel=3)
|
[
"def",
"_raise_warnings",
"(",
"image_properties",
")",
":",
"ip",
"=",
"image_properties",
"if",
"ip",
".",
"unsupported_dtype",
":",
"warn",
"(",
"\"Non-standard image type; displaying image with \"",
"\"stretched contrast.\"",
",",
"stacklevel",
"=",
"3",
")",
"if",
"ip",
".",
"low_data_range",
":",
"warn",
"(",
"\"Low image data range; displaying image with \"",
"\"stretched contrast.\"",
",",
"stacklevel",
"=",
"3",
")",
"if",
"ip",
".",
"out_of_range_float",
":",
"warn",
"(",
"\"Float image out of standard range; displaying \"",
"\"image with stretched contrast.\"",
",",
"stacklevel",
"=",
"3",
")"
] |
https://github.com/scikit-image/scikit-image/blob/ed642e2bc822f362504d24379dee94978d6fa9de/skimage/io/_plugins/matplotlib_plugin.py#L62-L79
|
||
AppScale/gts
|
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
|
AppServer/lib/django-1.2/django/utils/datastructures.py
|
python
|
MergeDict.copy
|
(self)
|
return self.__copy__()
|
Returns a copy of this object.
|
Returns a copy of this object.
|
[
"Returns",
"a",
"copy",
"of",
"this",
"object",
"."
] |
def copy(self):
"""Returns a copy of this object."""
return self.__copy__()
|
[
"def",
"copy",
"(",
"self",
")",
":",
"return",
"self",
".",
"__copy__",
"(",
")"
] |
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.2/django/utils/datastructures.py#L76-L78
|
|
iiau-tracker/SPLT
|
a196e603798e9be969d9d985c087c11cad1cda43
|
lib/object_detection/utils/static_shape.py
|
python
|
get_width
|
(tensor_shape)
|
return tensor_shape[2].value
|
Returns width from the tensor shape.
Args:
tensor_shape: A rank 4 TensorShape.
Returns:
An integer representing the width of the tensor.
|
Returns width from the tensor shape.
|
[
"Returns",
"width",
"from",
"the",
"tensor",
"shape",
"."
] |
def get_width(tensor_shape):
"""Returns width from the tensor shape.
Args:
tensor_shape: A rank 4 TensorShape.
Returns:
An integer representing the width of the tensor.
"""
tensor_shape.assert_has_rank(rank=4)
return tensor_shape[2].value
|
[
"def",
"get_width",
"(",
"tensor_shape",
")",
":",
"tensor_shape",
".",
"assert_has_rank",
"(",
"rank",
"=",
"4",
")",
"return",
"tensor_shape",
"[",
"2",
"]",
".",
"value"
] |
https://github.com/iiau-tracker/SPLT/blob/a196e603798e9be969d9d985c087c11cad1cda43/lib/object_detection/utils/static_shape.py#L48-L58
|
|
ines/wasabi
|
4cb261ce92c435f922dcc47e625d23cf038a699a
|
wasabi/util.py
|
python
|
get_raw_input
|
(description, default=False, indent=4)
|
return user_input
|
Get user input from the command line via raw_input / input.
description (unicode): Text to display before prompt.
default (unicode or False/None): Default value to display with prompt.
indent (int): Indentation in spaces.
RETURNS (unicode): User input.
|
Get user input from the command line via raw_input / input.
|
[
"Get",
"user",
"input",
"from",
"the",
"command",
"line",
"via",
"raw_input",
"/",
"input",
"."
] |
def get_raw_input(description, default=False, indent=4):
"""Get user input from the command line via raw_input / input.
description (unicode): Text to display before prompt.
default (unicode or False/None): Default value to display with prompt.
indent (int): Indentation in spaces.
RETURNS (unicode): User input.
"""
additional = " (default: {})".format(default) if default else ""
prompt = wrap("{}{}: ".format(description, additional), indent=indent)
user_input = input_(prompt)
return user_input
|
[
"def",
"get_raw_input",
"(",
"description",
",",
"default",
"=",
"False",
",",
"indent",
"=",
"4",
")",
":",
"additional",
"=",
"\" (default: {})\"",
".",
"format",
"(",
"default",
")",
"if",
"default",
"else",
"\"\"",
"prompt",
"=",
"wrap",
"(",
"\"{}{}: \"",
".",
"format",
"(",
"description",
",",
"additional",
")",
",",
"indent",
"=",
"indent",
")",
"user_input",
"=",
"input_",
"(",
"prompt",
")",
"return",
"user_input"
] |
https://github.com/ines/wasabi/blob/4cb261ce92c435f922dcc47e625d23cf038a699a/wasabi/util.py#L157-L168
|
|
phaethon/kamene
|
bf679a65d456411942ee4a907818ba3d6a183bfe
|
kamene/contrib/gsm_um.py
|
python
|
pagingRequestType1
|
(MobileId_presence=0)
|
return packet
|
PAGING REQUEST TYPE 1 Section 9.1.22
|
PAGING REQUEST TYPE 1 Section 9.1.22
|
[
"PAGING",
"REQUEST",
"TYPE",
"1",
"Section",
"9",
".",
"1",
".",
"22"
] |
def pagingRequestType1(MobileId_presence=0):
"""PAGING REQUEST TYPE 1 Section 9.1.22"""
#The L2 pseudo length of this message is the sum of lengths of all
#information elements present in the message except
#the P1 Rest Octets and L2 Pseudo Length information elements.
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x21) # 00100001
d = PageModeAndChannelNeeded()
f = MobileId()
packet = a / b / c / d / f
if MobileId_presence is 1:
g = MobileIdHdr(ieiMI=0x17, eightBitMI=0x0)
packet = packet / g
h = P1RestOctets()
packet = packet / h
return packet
|
[
"def",
"pagingRequestType1",
"(",
"MobileId_presence",
"=",
"0",
")",
":",
"#The L2 pseudo length of this message is the sum of lengths of all",
"#information elements present in the message except",
"#the P1 Rest Octets and L2 Pseudo Length information elements.",
"a",
"=",
"L2PseudoLength",
"(",
")",
"b",
"=",
"TpPd",
"(",
"pd",
"=",
"0x6",
")",
"c",
"=",
"MessageType",
"(",
"mesType",
"=",
"0x21",
")",
"# 00100001",
"d",
"=",
"PageModeAndChannelNeeded",
"(",
")",
"f",
"=",
"MobileId",
"(",
")",
"packet",
"=",
"a",
"/",
"b",
"/",
"c",
"/",
"d",
"/",
"f",
"if",
"MobileId_presence",
"is",
"1",
":",
"g",
"=",
"MobileIdHdr",
"(",
"ieiMI",
"=",
"0x17",
",",
"eightBitMI",
"=",
"0x0",
")",
"packet",
"=",
"packet",
"/",
"g",
"h",
"=",
"P1RestOctets",
"(",
")",
"packet",
"=",
"packet",
"/",
"h",
"return",
"packet"
] |
https://github.com/phaethon/kamene/blob/bf679a65d456411942ee4a907818ba3d6a183bfe/kamene/contrib/gsm_um.py#L846-L862
|
|
seanbell/opensurfaces
|
7f3e987560faa62cd37f821760683ccd1e053c7c
|
server/common/utils.py
|
python
|
dict_union
|
(a, b)
|
return ret
|
Return the union of two dictionaries without editing either.
If a key exists in both dictionaries, the second value is used.
|
Return the union of two dictionaries without editing either.
If a key exists in both dictionaries, the second value is used.
|
[
"Return",
"the",
"union",
"of",
"two",
"dictionaries",
"without",
"editing",
"either",
".",
"If",
"a",
"key",
"exists",
"in",
"both",
"dictionaries",
"the",
"second",
"value",
"is",
"used",
"."
] |
def dict_union(a, b):
"""
Return the union of two dictionaries without editing either.
If a key exists in both dictionaries, the second value is used.
"""
if not a:
return b if b else {}
if not b:
return a
ret = a.copy()
ret.update(b)
return ret
|
[
"def",
"dict_union",
"(",
"a",
",",
"b",
")",
":",
"if",
"not",
"a",
":",
"return",
"b",
"if",
"b",
"else",
"{",
"}",
"if",
"not",
"b",
":",
"return",
"a",
"ret",
"=",
"a",
".",
"copy",
"(",
")",
"ret",
".",
"update",
"(",
"b",
")",
"return",
"ret"
] |
https://github.com/seanbell/opensurfaces/blob/7f3e987560faa62cd37f821760683ccd1e053c7c/server/common/utils.py#L636-L647
|
|
dropbox/PyHive
|
b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0
|
TCLIService/ttypes.py
|
python
|
TStringColumn.read
|
(self, iprot)
|
[] |
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.values = []
(_etype93, _size90) = iprot.readListBegin()
for _i94 in range(_size90):
_elem95 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.values.append(_elem95)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.nulls = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
|
[
"def",
"read",
"(",
"self",
",",
"iprot",
")",
":",
"if",
"iprot",
".",
"_fast_decode",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"iprot",
".",
"trans",
",",
"TTransport",
".",
"CReadableTransport",
")",
"and",
"self",
".",
"thrift_spec",
"is",
"not",
"None",
":",
"iprot",
".",
"_fast_decode",
"(",
"self",
",",
"iprot",
",",
"(",
"self",
".",
"__class__",
",",
"self",
".",
"thrift_spec",
")",
")",
"return",
"iprot",
".",
"readStructBegin",
"(",
")",
"while",
"True",
":",
"(",
"fname",
",",
"ftype",
",",
"fid",
")",
"=",
"iprot",
".",
"readFieldBegin",
"(",
")",
"if",
"ftype",
"==",
"TType",
".",
"STOP",
":",
"break",
"if",
"fid",
"==",
"1",
":",
"if",
"ftype",
"==",
"TType",
".",
"LIST",
":",
"self",
".",
"values",
"=",
"[",
"]",
"(",
"_etype93",
",",
"_size90",
")",
"=",
"iprot",
".",
"readListBegin",
"(",
")",
"for",
"_i94",
"in",
"range",
"(",
"_size90",
")",
":",
"_elem95",
"=",
"iprot",
".",
"readString",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"==",
"2",
"else",
"iprot",
".",
"readString",
"(",
")",
"self",
".",
"values",
".",
"append",
"(",
"_elem95",
")",
"iprot",
".",
"readListEnd",
"(",
")",
"else",
":",
"iprot",
".",
"skip",
"(",
"ftype",
")",
"elif",
"fid",
"==",
"2",
":",
"if",
"ftype",
"==",
"TType",
".",
"STRING",
":",
"self",
".",
"nulls",
"=",
"iprot",
".",
"readBinary",
"(",
")",
"else",
":",
"iprot",
".",
"skip",
"(",
"ftype",
")",
"else",
":",
"iprot",
".",
"skip",
"(",
"ftype",
")",
"iprot",
".",
"readFieldEnd",
"(",
")",
"iprot",
".",
"readStructEnd",
"(",
")"
] |
https://github.com/dropbox/PyHive/blob/b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0/TCLIService/ttypes.py#L2507-L2534
|
||||
gabrieleangeletti/Deep-Learning-TensorFlow
|
ddeb1f2848da7b7bee166ad2152b4afc46bb2086
|
yadlt/models/autoencoders/denoising_autoencoder.py
|
python
|
DenoisingAutoencoder.get_parameters
|
(self, graph=None)
|
Return the model parameters in the form of numpy arrays.
Parameters
----------
graph : tf.Graph, optional (default = None)
Tensorflow graph object.
Returns
-------
dict : model parameters dictionary.
|
Return the model parameters in the form of numpy arrays.
|
[
"Return",
"the",
"model",
"parameters",
"in",
"the",
"form",
"of",
"numpy",
"arrays",
"."
] |
def get_parameters(self, graph=None):
"""Return the model parameters in the form of numpy arrays.
Parameters
----------
graph : tf.Graph, optional (default = None)
Tensorflow graph object.
Returns
-------
dict : model parameters dictionary.
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
return {
'enc_w': self.W_.eval(),
'enc_b': self.bh_.eval(),
'dec_b': self.bv_.eval()
}
|
[
"def",
"get_parameters",
"(",
"self",
",",
"graph",
"=",
"None",
")",
":",
"g",
"=",
"graph",
"if",
"graph",
"is",
"not",
"None",
"else",
"self",
".",
"tf_graph",
"with",
"g",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
".",
"tf_saver",
".",
"restore",
"(",
"self",
".",
"tf_session",
",",
"self",
".",
"model_path",
")",
"return",
"{",
"'enc_w'",
":",
"self",
".",
"W_",
".",
"eval",
"(",
")",
",",
"'enc_b'",
":",
"self",
".",
"bh_",
".",
"eval",
"(",
")",
",",
"'dec_b'",
":",
"self",
".",
"bv_",
".",
"eval",
"(",
")",
"}"
] |
https://github.com/gabrieleangeletti/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/autoencoders/denoising_autoencoder.py#L291-L315
|
||
fail2ban/fail2ban
|
095aeda8407b433098df35424cde2764a09566a6
|
fail2ban/server/strptime.py
|
python
|
validateTimeZone
|
(tz)
|
return zone2offset(tz, 0)
|
Validate a timezone and convert it to offset if it can (offset-based TZ).
For now this accepts the UTC[+-]hhmm format (UTC has aliases GMT/Z and optional).
Additionally it accepts all zone abbreviations mentioned below in TZ_STR.
Note that currently this zone abbreviations are offset-based and used fixed
offset without automatically DST-switch (if CET used then no automatically CEST-switch).
In the future, it may be extended for named time zones (such as Europe/Paris)
present on the system, if a suitable tz library is present (pytz).
|
Validate a timezone and convert it to offset if it can (offset-based TZ).
|
[
"Validate",
"a",
"timezone",
"and",
"convert",
"it",
"to",
"offset",
"if",
"it",
"can",
"(",
"offset",
"-",
"based",
"TZ",
")",
"."
] |
def validateTimeZone(tz):
"""Validate a timezone and convert it to offset if it can (offset-based TZ).
For now this accepts the UTC[+-]hhmm format (UTC has aliases GMT/Z and optional).
Additionally it accepts all zone abbreviations mentioned below in TZ_STR.
Note that currently this zone abbreviations are offset-based and used fixed
offset without automatically DST-switch (if CET used then no automatically CEST-switch).
In the future, it may be extended for named time zones (such as Europe/Paris)
present on the system, if a suitable tz library is present (pytz).
"""
if tz is None:
return None
m = FIXED_OFFSET_TZ_RE.match(tz)
if m is None:
raise ValueError("Unknown or unsupported time zone: %r" % tz)
tz = m.groups()
return zone2offset(tz, 0)
|
[
"def",
"validateTimeZone",
"(",
"tz",
")",
":",
"if",
"tz",
"is",
"None",
":",
"return",
"None",
"m",
"=",
"FIXED_OFFSET_TZ_RE",
".",
"match",
"(",
"tz",
")",
"if",
"m",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Unknown or unsupported time zone: %r\"",
"%",
"tz",
")",
"tz",
"=",
"m",
".",
"groups",
"(",
")",
"return",
"zone2offset",
"(",
"tz",
",",
"0",
")"
] |
https://github.com/fail2ban/fail2ban/blob/095aeda8407b433098df35424cde2764a09566a6/fail2ban/server/strptime.py#L141-L158
|
|
home-assistant-libs/pychromecast
|
d7acb9f5ae2c0daa797d78da1a1e8090b4181d21
|
pychromecast/controllers/media.py
|
python
|
MediaController.queue_prev
|
(self)
|
Send the QUEUE_PREV command.
|
Send the QUEUE_PREV command.
|
[
"Send",
"the",
"QUEUE_PREV",
"command",
"."
] |
def queue_prev(self):
"""Send the QUEUE_PREV command."""
self._send_command({MESSAGE_TYPE: TYPE_QUEUE_UPDATE, "jump": -1})
|
[
"def",
"queue_prev",
"(",
"self",
")",
":",
"self",
".",
"_send_command",
"(",
"{",
"MESSAGE_TYPE",
":",
"TYPE_QUEUE_UPDATE",
",",
"\"jump\"",
":",
"-",
"1",
"}",
")"
] |
https://github.com/home-assistant-libs/pychromecast/blob/d7acb9f5ae2c0daa797d78da1a1e8090b4181d21/pychromecast/controllers/media.py#L444-L446
|
||
robotlearn/pyrobolearn
|
9cd7c060723fda7d2779fa255ac998c2c82b8436
|
pyrobolearn/priorities/models/model.py
|
python
|
ModelInterface.get_joint_limits
|
(self)
|
r"""
Return the joint limits.
Returns:
np.array[float[2, N]]: lower and upper joint position limits.
|
r"""
Return the joint limits.
|
[
"r",
"Return",
"the",
"joint",
"limits",
"."
] |
def get_joint_limits(self):
r"""
Return the joint limits.
Returns:
np.array[float[2, N]]: lower and upper joint position limits.
"""
pass
|
[
"def",
"get_joint_limits",
"(",
"self",
")",
":",
"pass"
] |
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/priorities/models/model.py#L123-L130
|
||
BigBrotherBot/big-brother-bot
|
848823c71413c86e7f1ff9584f43e08d40a7f2c0
|
b3/tools/debug/statlib/pstat.py
|
python
|
unique
|
(inlist)
|
return uniques
|
Returns all unique items in the passed list. If the a list-of-lists
is passed, unique LISTS are found (i.e., items in the first dimension are
compared).
Usage: unique (inlist)
Returns: the unique elements (or rows) in inlist
|
Returns all unique items in the passed list. If the a list-of-lists
is passed, unique LISTS are found (i.e., items in the first dimension are
compared).
|
[
"Returns",
"all",
"unique",
"items",
"in",
"the",
"passed",
"list",
".",
"If",
"the",
"a",
"list",
"-",
"of",
"-",
"lists",
"is",
"passed",
"unique",
"LISTS",
"are",
"found",
"(",
"i",
".",
"e",
".",
"items",
"in",
"the",
"first",
"dimension",
"are",
"compared",
")",
"."
] |
def unique (inlist):
"""
Returns all unique items in the passed list. If the a list-of-lists
is passed, unique LISTS are found (i.e., items in the first dimension are
compared).
Usage: unique (inlist)
Returns: the unique elements (or rows) in inlist
"""
uniques = []
for item in inlist:
if item not in uniques:
uniques.append(item)
return uniques
|
[
"def",
"unique",
"(",
"inlist",
")",
":",
"uniques",
"=",
"[",
"]",
"for",
"item",
"in",
"inlist",
":",
"if",
"item",
"not",
"in",
"uniques",
":",
"uniques",
".",
"append",
"(",
"item",
")",
"return",
"uniques"
] |
https://github.com/BigBrotherBot/big-brother-bot/blob/848823c71413c86e7f1ff9584f43e08d40a7f2c0/b3/tools/debug/statlib/pstat.py#L660-L673
|
|
oilshell/oil
|
94388e7d44a9ad879b12615f6203b38596b5a2d3
|
Python-2.7.13/Lib/pickle.py
|
python
|
Unpickler.load_float
|
(self)
|
[] |
def load_float(self):
self.append(float(self.readline()[:-1]))
|
[
"def",
"load_float",
"(",
"self",
")",
":",
"self",
".",
"append",
"(",
"float",
"(",
"self",
".",
"readline",
"(",
")",
"[",
":",
"-",
"1",
"]",
")",
")"
] |
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/pickle.py#L960-L961
|
||||
osmr/imgclsmob
|
f2993d3ce73a2f7ddba05da3891defb08547d504
|
pytorch/pytorchcv/models/sharesnet.py
|
python
|
sharesnet50
|
(**kwargs)
|
return get_sharesnet(blocks=50, model_name="sharesnet50", **kwargs)
|
ShaResNet-50 model from 'ShaResNet: reducing residual network parameter number by sharing weights,'
https://arxiv.org/abs/1702.08782.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
|
ShaResNet-50 model from 'ShaResNet: reducing residual network parameter number by sharing weights,'
https://arxiv.org/abs/1702.08782.
|
[
"ShaResNet",
"-",
"50",
"model",
"from",
"ShaResNet",
":",
"reducing",
"residual",
"network",
"parameter",
"number",
"by",
"sharing",
"weights",
"https",
":",
"//",
"arxiv",
".",
"org",
"/",
"abs",
"/",
"1702",
".",
"08782",
"."
] |
def sharesnet50(**kwargs):
"""
ShaResNet-50 model from 'ShaResNet: reducing residual network parameter number by sharing weights,'
https://arxiv.org/abs/1702.08782.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_sharesnet(blocks=50, model_name="sharesnet50", **kwargs)
|
[
"def",
"sharesnet50",
"(",
"*",
"*",
"kwargs",
")",
":",
"return",
"get_sharesnet",
"(",
"blocks",
"=",
"50",
",",
"model_name",
"=",
"\"sharesnet50\"",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/osmr/imgclsmob/blob/f2993d3ce73a2f7ddba05da3891defb08547d504/pytorch/pytorchcv/models/sharesnet.py#L475-L487
|
|
faucetsdn/ryu
|
537f35f4b2bc634ef05e3f28373eb5e24609f989
|
ryu/services/protocols/bgp/core_managers/table_manager.py
|
python
|
TableCoreManager.learn_path
|
(self, path)
|
Inserts `path` into correct global table.
Since known paths to `Destination` has changes, we queue it for further
processing.
|
Inserts `path` into correct global table.
|
[
"Inserts",
"path",
"into",
"correct",
"global",
"table",
"."
] |
def learn_path(self, path):
"""Inserts `path` into correct global table.
Since known paths to `Destination` has changes, we queue it for further
processing.
"""
# Get VPN/Global table
table = self.get_global_table_by_route_family(path.route_family)
gpath_dest = table.insert(path)
# Since destination was updated, we enqueue it for processing.
self._signal_bus.dest_changed(gpath_dest)
|
[
"def",
"learn_path",
"(",
"self",
",",
"path",
")",
":",
"# Get VPN/Global table",
"table",
"=",
"self",
".",
"get_global_table_by_route_family",
"(",
"path",
".",
"route_family",
")",
"gpath_dest",
"=",
"table",
".",
"insert",
"(",
"path",
")",
"# Since destination was updated, we enqueue it for processing.",
"self",
".",
"_signal_bus",
".",
"dest_changed",
"(",
"gpath_dest",
")"
] |
https://github.com/faucetsdn/ryu/blob/537f35f4b2bc634ef05e3f28373eb5e24609f989/ryu/services/protocols/bgp/core_managers/table_manager.py#L162-L172
|
||
arnabgho/iSketchNFill
|
68060a34d9f78fcc5f3a143ffc733ea6ec979d3f
|
ui_shadow_draw/py-thin-plate-spline/thinplate/pytorch.py
|
python
|
tps
|
(theta, ctrl, grid)
|
return z
|
Evaluate the thin-plate-spline (TPS) surface at xy locations arranged in a grid.
The TPS surface is a minimum bend interpolation surface defined by a set of control points.
The function value for a x,y location is given by
TPS(x,y) := theta[-3] + theta[-2]*x + theta[-1]*y + \sum_t=0,T theta[t] U(x,y,ctrl[t])
This method computes the TPS value for multiple batches over multiple grid locations for 2
surfaces in one go.
Params
------
theta: Nx(T+3)x2 tensor, or Nx(T+2)x2 tensor
Batch size N, T+3 or T+2 (reduced form) model parameters for T control points in dx and dy.
ctrl: NxTx2 tensor or Tx2 tensor
T control points in normalized image coordinates [0..1]
grid: NxHxWx3 tensor
Grid locations to evaluate with homogeneous 1 in first coordinate.
Returns
-------
z: NxHxWx2 tensor
Function values at each grid location in dx and dy.
|
Evaluate the thin-plate-spline (TPS) surface at xy locations arranged in a grid.
The TPS surface is a minimum bend interpolation surface defined by a set of control points.
The function value for a x,y location is given by
TPS(x,y) := theta[-3] + theta[-2]*x + theta[-1]*y + \sum_t=0,T theta[t] U(x,y,ctrl[t])
This method computes the TPS value for multiple batches over multiple grid locations for 2
surfaces in one go.
Params
------
theta: Nx(T+3)x2 tensor, or Nx(T+2)x2 tensor
Batch size N, T+3 or T+2 (reduced form) model parameters for T control points in dx and dy.
ctrl: NxTx2 tensor or Tx2 tensor
T control points in normalized image coordinates [0..1]
grid: NxHxWx3 tensor
Grid locations to evaluate with homogeneous 1 in first coordinate.
Returns
-------
z: NxHxWx2 tensor
Function values at each grid location in dx and dy.
|
[
"Evaluate",
"the",
"thin",
"-",
"plate",
"-",
"spline",
"(",
"TPS",
")",
"surface",
"at",
"xy",
"locations",
"arranged",
"in",
"a",
"grid",
".",
"The",
"TPS",
"surface",
"is",
"a",
"minimum",
"bend",
"interpolation",
"surface",
"defined",
"by",
"a",
"set",
"of",
"control",
"points",
".",
"The",
"function",
"value",
"for",
"a",
"x",
"y",
"location",
"is",
"given",
"by",
"TPS",
"(",
"x",
"y",
")",
":",
"=",
"theta",
"[",
"-",
"3",
"]",
"+",
"theta",
"[",
"-",
"2",
"]",
"*",
"x",
"+",
"theta",
"[",
"-",
"1",
"]",
"*",
"y",
"+",
"\\",
"sum_t",
"=",
"0",
"T",
"theta",
"[",
"t",
"]",
"U",
"(",
"x",
"y",
"ctrl",
"[",
"t",
"]",
")",
"This",
"method",
"computes",
"the",
"TPS",
"value",
"for",
"multiple",
"batches",
"over",
"multiple",
"grid",
"locations",
"for",
"2",
"surfaces",
"in",
"one",
"go",
".",
"Params",
"------",
"theta",
":",
"Nx",
"(",
"T",
"+",
"3",
")",
"x2",
"tensor",
"or",
"Nx",
"(",
"T",
"+",
"2",
")",
"x2",
"tensor",
"Batch",
"size",
"N",
"T",
"+",
"3",
"or",
"T",
"+",
"2",
"(",
"reduced",
"form",
")",
"model",
"parameters",
"for",
"T",
"control",
"points",
"in",
"dx",
"and",
"dy",
".",
"ctrl",
":",
"NxTx2",
"tensor",
"or",
"Tx2",
"tensor",
"T",
"control",
"points",
"in",
"normalized",
"image",
"coordinates",
"[",
"0",
"..",
"1",
"]",
"grid",
":",
"NxHxWx3",
"tensor",
"Grid",
"locations",
"to",
"evaluate",
"with",
"homogeneous",
"1",
"in",
"first",
"coordinate",
".",
"Returns",
"-------",
"z",
":",
"NxHxWx2",
"tensor",
"Function",
"values",
"at",
"each",
"grid",
"location",
"in",
"dx",
"and",
"dy",
"."
] |
def tps(theta, ctrl, grid):
'''Evaluate the thin-plate-spline (TPS) surface at xy locations arranged in a grid.
The TPS surface is a minimum bend interpolation surface defined by a set of control points.
The function value for a x,y location is given by
TPS(x,y) := theta[-3] + theta[-2]*x + theta[-1]*y + \sum_t=0,T theta[t] U(x,y,ctrl[t])
This method computes the TPS value for multiple batches over multiple grid locations for 2
surfaces in one go.
Params
------
theta: Nx(T+3)x2 tensor, or Nx(T+2)x2 tensor
Batch size N, T+3 or T+2 (reduced form) model parameters for T control points in dx and dy.
ctrl: NxTx2 tensor or Tx2 tensor
T control points in normalized image coordinates [0..1]
grid: NxHxWx3 tensor
Grid locations to evaluate with homogeneous 1 in first coordinate.
Returns
-------
z: NxHxWx2 tensor
Function values at each grid location in dx and dy.
'''
N, H, W, _ = grid.size()
if ctrl.dim() == 2:
ctrl = ctrl.expand(N, *ctrl.size())
T = ctrl.shape[1]
diff = grid[...,1:].unsqueeze(-2) - ctrl.unsqueeze(1).unsqueeze(1)
D = torch.sqrt((diff**2).sum(-1))
U = (D**2) * torch.log(D + 1e-6)
w, a = theta[:, :-3, :], theta[:, -3:, :]
reduced = T + 2 == theta.shape[1]
if reduced:
w = torch.cat((-w.sum(dim=1, keepdim=True), w), dim=1)
# U is NxHxWxT
b = torch.bmm(U.view(N, -1, T), w).view(N,H,W,2)
# b is NxHxWx2
z = torch.bmm(grid.view(N,-1,3), a).view(N,H,W,2) + b
return z
|
[
"def",
"tps",
"(",
"theta",
",",
"ctrl",
",",
"grid",
")",
":",
"N",
",",
"H",
",",
"W",
",",
"_",
"=",
"grid",
".",
"size",
"(",
")",
"if",
"ctrl",
".",
"dim",
"(",
")",
"==",
"2",
":",
"ctrl",
"=",
"ctrl",
".",
"expand",
"(",
"N",
",",
"*",
"ctrl",
".",
"size",
"(",
")",
")",
"T",
"=",
"ctrl",
".",
"shape",
"[",
"1",
"]",
"diff",
"=",
"grid",
"[",
"...",
",",
"1",
":",
"]",
".",
"unsqueeze",
"(",
"-",
"2",
")",
"-",
"ctrl",
".",
"unsqueeze",
"(",
"1",
")",
".",
"unsqueeze",
"(",
"1",
")",
"D",
"=",
"torch",
".",
"sqrt",
"(",
"(",
"diff",
"**",
"2",
")",
".",
"sum",
"(",
"-",
"1",
")",
")",
"U",
"=",
"(",
"D",
"**",
"2",
")",
"*",
"torch",
".",
"log",
"(",
"D",
"+",
"1e-6",
")",
"w",
",",
"a",
"=",
"theta",
"[",
":",
",",
":",
"-",
"3",
",",
":",
"]",
",",
"theta",
"[",
":",
",",
"-",
"3",
":",
",",
":",
"]",
"reduced",
"=",
"T",
"+",
"2",
"==",
"theta",
".",
"shape",
"[",
"1",
"]",
"if",
"reduced",
":",
"w",
"=",
"torch",
".",
"cat",
"(",
"(",
"-",
"w",
".",
"sum",
"(",
"dim",
"=",
"1",
",",
"keepdim",
"=",
"True",
")",
",",
"w",
")",
",",
"dim",
"=",
"1",
")",
"# U is NxHxWxT",
"b",
"=",
"torch",
".",
"bmm",
"(",
"U",
".",
"view",
"(",
"N",
",",
"-",
"1",
",",
"T",
")",
",",
"w",
")",
".",
"view",
"(",
"N",
",",
"H",
",",
"W",
",",
"2",
")",
"# b is NxHxWx2",
"z",
"=",
"torch",
".",
"bmm",
"(",
"grid",
".",
"view",
"(",
"N",
",",
"-",
"1",
",",
"3",
")",
",",
"a",
")",
".",
"view",
"(",
"N",
",",
"H",
",",
"W",
",",
"2",
")",
"+",
"b",
"return",
"z"
] |
https://github.com/arnabgho/iSketchNFill/blob/68060a34d9f78fcc5f3a143ffc733ea6ec979d3f/ui_shadow_draw/py-thin-plate-spline/thinplate/pytorch.py#L8-L55
|
|
etingof/pyasn1
|
db8f1a7930c6b5826357646746337dafc983f953
|
pyasn1/type/univ.py
|
python
|
Any.tagMap
|
(self)
|
Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping
ASN.1 tags to ASN.1 objects contained within callee.
|
Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping
ASN.1 tags to ASN.1 objects contained within callee.
|
[
"Return",
"a",
":",
"class",
":",
"~pyasn1",
".",
"type",
".",
"tagmap",
".",
"TagMap",
"object",
"mapping",
"ASN",
".",
"1",
"tags",
"to",
"ASN",
".",
"1",
"objects",
"contained",
"within",
"callee",
"."
] |
def tagMap(self):
""""Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping
ASN.1 tags to ASN.1 objects contained within callee.
"""
try:
return self._tagMap
except AttributeError:
self._tagMap = tagmap.TagMap(
{self.tagSet: self},
{eoo.endOfOctets.tagSet: eoo.endOfOctets},
self
)
return self._tagMap
|
[
"def",
"tagMap",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"_tagMap",
"except",
"AttributeError",
":",
"self",
".",
"_tagMap",
"=",
"tagmap",
".",
"TagMap",
"(",
"{",
"self",
".",
"tagSet",
":",
"self",
"}",
",",
"{",
"eoo",
".",
"endOfOctets",
".",
"tagSet",
":",
"eoo",
".",
"endOfOctets",
"}",
",",
"self",
")",
"return",
"self",
".",
"_tagMap"
] |
https://github.com/etingof/pyasn1/blob/db8f1a7930c6b5826357646746337dafc983f953/pyasn1/type/univ.py#L3288-L3302
|
||
js3611/Deep-MRI-Reconstruction
|
d8a40efd892e57799c3413630e5cb92d5b035cf8
|
cascadenet/network/layers/fourier.py
|
python
|
FFTLayer.transform
|
(self, input)
|
return T.stack([out_r, out_c])
|
Perform fourier transform using Fourier matrix
Parameters
------------------------------
input must be of 4d tensor
with shape [n, 2, nx, ny] where [nx, ny] == self.data_shape. n means
number of data. 2 means channels for real and complex part of the input
(channel 1 == real, channel 2 = complex)
uses real values to simulate the complex operation
Returns
------------------------------
tensor of the shape [n, 2, nx, ny] which is equivalent to
fourier transform
|
Perform fourier transform using Fourier matrix
|
[
"Perform",
"fourier",
"transform",
"using",
"Fourier",
"matrix"
] |
def transform(self, input):
'''
Perform fourier transform using Fourier matrix
Parameters
------------------------------
input must be of 4d tensor
with shape [n, 2, nx, ny] where [nx, ny] == self.data_shape. n means
number of data. 2 means channels for real and complex part of the input
(channel 1 == real, channel 2 = complex)
uses real values to simulate the complex operation
Returns
------------------------------
tensor of the shape [n, 2, nx, ny] which is equivalent to
fourier transform
'''
in_r = input[0]
in_c = input[1]
real_fft = self.real_fft
complex_fft = self.complex_fft
out_r = T.dot(real_fft, in_r) - T.dot(complex_fft, in_c)
out_c = T.dot(complex_fft, in_r) + T.dot(real_fft, in_c)
return T.stack([out_r, out_c])
|
[
"def",
"transform",
"(",
"self",
",",
"input",
")",
":",
"in_r",
"=",
"input",
"[",
"0",
"]",
"in_c",
"=",
"input",
"[",
"1",
"]",
"real_fft",
"=",
"self",
".",
"real_fft",
"complex_fft",
"=",
"self",
".",
"complex_fft",
"out_r",
"=",
"T",
".",
"dot",
"(",
"real_fft",
",",
"in_r",
")",
"-",
"T",
".",
"dot",
"(",
"complex_fft",
",",
"in_c",
")",
"out_c",
"=",
"T",
".",
"dot",
"(",
"complex_fft",
",",
"in_r",
")",
"+",
"T",
".",
"dot",
"(",
"real_fft",
",",
"in_c",
")",
"return",
"T",
".",
"stack",
"(",
"[",
"out_r",
",",
"out_c",
"]",
")"
] |
https://github.com/js3611/Deep-MRI-Reconstruction/blob/d8a40efd892e57799c3413630e5cb92d5b035cf8/cascadenet/network/layers/fourier.py#L51-L75
|
|
thatbrguy/Pedestrian-Detection
|
b11c7d6bed0ff320811726fe1c429be26a87da9e
|
object_detection/builders/model_builder.py
|
python
|
_build_faster_rcnn_feature_extractor
|
(
feature_extractor_config, is_training, reuse_weights=None)
|
return feature_extractor_class(
is_training, first_stage_features_stride,
batch_norm_trainable, reuse_weights)
|
Builds a faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Args:
feature_extractor_config: A FasterRcnnFeatureExtractor proto config from
faster_rcnn.proto.
is_training: True if this feature extractor is being built for training.
reuse_weights: if the feature extractor should reuse weights.
Returns:
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Raises:
ValueError: On invalid feature extractor type.
|
Builds a faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
|
[
"Builds",
"a",
"faster_rcnn_meta_arch",
".",
"FasterRCNNFeatureExtractor",
"based",
"on",
"config",
"."
] |
def _build_faster_rcnn_feature_extractor(
feature_extractor_config, is_training, reuse_weights=None):
"""Builds a faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Args:
feature_extractor_config: A FasterRcnnFeatureExtractor proto config from
faster_rcnn.proto.
is_training: True if this feature extractor is being built for training.
reuse_weights: if the feature extractor should reuse weights.
Returns:
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Raises:
ValueError: On invalid feature extractor type.
"""
feature_type = feature_extractor_config.type
first_stage_features_stride = (
feature_extractor_config.first_stage_features_stride)
batch_norm_trainable = feature_extractor_config.batch_norm_trainable
if feature_type not in FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP:
raise ValueError('Unknown Faster R-CNN feature_extractor: {}'.format(
feature_type))
feature_extractor_class = FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP[
feature_type]
return feature_extractor_class(
is_training, first_stage_features_stride,
batch_norm_trainable, reuse_weights)
|
[
"def",
"_build_faster_rcnn_feature_extractor",
"(",
"feature_extractor_config",
",",
"is_training",
",",
"reuse_weights",
"=",
"None",
")",
":",
"feature_type",
"=",
"feature_extractor_config",
".",
"type",
"first_stage_features_stride",
"=",
"(",
"feature_extractor_config",
".",
"first_stage_features_stride",
")",
"batch_norm_trainable",
"=",
"feature_extractor_config",
".",
"batch_norm_trainable",
"if",
"feature_type",
"not",
"in",
"FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP",
":",
"raise",
"ValueError",
"(",
"'Unknown Faster R-CNN feature_extractor: {}'",
".",
"format",
"(",
"feature_type",
")",
")",
"feature_extractor_class",
"=",
"FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP",
"[",
"feature_type",
"]",
"return",
"feature_extractor_class",
"(",
"is_training",
",",
"first_stage_features_stride",
",",
"batch_norm_trainable",
",",
"reuse_weights",
")"
] |
https://github.com/thatbrguy/Pedestrian-Detection/blob/b11c7d6bed0ff320811726fe1c429be26a87da9e/object_detection/builders/model_builder.py#L177-L205
|
|
GoogleCloudPlatform/professional-services
|
0c707aa97437f3d154035ef8548109b7882f71da
|
tools/gmon/gmon/cli.py
|
python
|
parse_fields
|
(fields)
|
return fields
|
Parse `fields` CLI argument.
Args:
fields (list): List of fields to display.
Returns:
list: Parsed fields.
|
Parse `fields` CLI argument.
|
[
"Parse",
"fields",
"CLI",
"argument",
"."
] |
def parse_fields(fields):
"""Parse `fields` CLI argument.
Args:
fields (list): List of fields to display.
Returns:
list: Parsed fields.
"""
# display all fields
if fields == ['all']:
return None
# Remove unneeded fields
to_remove = []
if fields:
# Go through fields and check for comma-delimited fields (user mistakes)
for f in fields:
if ',' in f:
to_remove.append(f)
more = f.split(",")
fields.extend(more)
for f in to_remove:
fields.remove(f)
return fields
|
[
"def",
"parse_fields",
"(",
"fields",
")",
":",
"# display all fields",
"if",
"fields",
"==",
"[",
"'all'",
"]",
":",
"return",
"None",
"# Remove unneeded fields",
"to_remove",
"=",
"[",
"]",
"if",
"fields",
":",
"# Go through fields and check for comma-delimited fields (user mistakes)",
"for",
"f",
"in",
"fields",
":",
"if",
"','",
"in",
"f",
":",
"to_remove",
".",
"append",
"(",
"f",
")",
"more",
"=",
"f",
".",
"split",
"(",
"\",\"",
")",
"fields",
".",
"extend",
"(",
"more",
")",
"for",
"f",
"in",
"to_remove",
":",
"fields",
".",
"remove",
"(",
"f",
")",
"return",
"fields"
] |
https://github.com/GoogleCloudPlatform/professional-services/blob/0c707aa97437f3d154035ef8548109b7882f71da/tools/gmon/gmon/cli.py#L405-L429
|
|
SHI-Labs/Decoupled-Classification-Refinement
|
16202b48eb9cbf79a9b130a98e8c209d4f24693e
|
faster_rcnn/core/DataParallelExecutorGroup.py
|
python
|
DataParallelExecutorGroup.get_outputs
|
(self, merge_multi_context=True)
|
return outputs
|
Get outputs of the previous forward computation.
Parameters
----------
merge_multi_context : bool
Default is `True`. In the case when data-parallelism is used, the outputs
will be collected from multiple devices. A `True` value indicate that we
should merge the collected results so that they look like from a single
executor.
Returns
-------
If `merge_multi_context` is `True`, it is like `[out1, out2]`. Otherwise, it
is like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`. All the output
elements are `NDArray`.
|
Get outputs of the previous forward computation.
|
[
"Get",
"outputs",
"of",
"the",
"previous",
"forward",
"computation",
"."
] |
def get_outputs(self, merge_multi_context=True):
"""Get outputs of the previous forward computation.
Parameters
----------
merge_multi_context : bool
Default is `True`. In the case when data-parallelism is used, the outputs
will be collected from multiple devices. A `True` value indicate that we
should merge the collected results so that they look like from a single
executor.
Returns
-------
If `merge_multi_context` is `True`, it is like `[out1, out2]`. Otherwise, it
is like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`. All the output
elements are `NDArray`.
"""
outputs = [[exec_.outputs[i] for exec_ in self.execs]
for i in range(len(self.execs[0].outputs))]
if merge_multi_context:
outputs = _merge_multi_context(outputs, self.output_layouts)
return outputs
|
[
"def",
"get_outputs",
"(",
"self",
",",
"merge_multi_context",
"=",
"True",
")",
":",
"outputs",
"=",
"[",
"[",
"exec_",
".",
"outputs",
"[",
"i",
"]",
"for",
"exec_",
"in",
"self",
".",
"execs",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"execs",
"[",
"0",
"]",
".",
"outputs",
")",
")",
"]",
"if",
"merge_multi_context",
":",
"outputs",
"=",
"_merge_multi_context",
"(",
"outputs",
",",
"self",
".",
"output_layouts",
")",
"return",
"outputs"
] |
https://github.com/SHI-Labs/Decoupled-Classification-Refinement/blob/16202b48eb9cbf79a9b130a98e8c209d4f24693e/faster_rcnn/core/DataParallelExecutorGroup.py#L363-L384
|
|
danieljl/keras-image-captioning
|
cac7a99ed35ed787b473376ce5c5d189f191f578
|
pycocoevalcap/bleu/bleu_scorer.py
|
python
|
cook_refs
|
(refs, eff=None, n=4)
|
return (reflen, maxcounts)
|
Takes a list of reference sentences for a single segment
and returns an object that encapsulates everything that BLEU
needs to know about them.
|
Takes a list of reference sentences for a single segment
and returns an object that encapsulates everything that BLEU
needs to know about them.
|
[
"Takes",
"a",
"list",
"of",
"reference",
"sentences",
"for",
"a",
"single",
"segment",
"and",
"returns",
"an",
"object",
"that",
"encapsulates",
"everything",
"that",
"BLEU",
"needs",
"to",
"know",
"about",
"them",
"."
] |
def cook_refs(refs, eff=None, n=4): ## lhuang: oracle will call with "average"
'''Takes a list of reference sentences for a single segment
and returns an object that encapsulates everything that BLEU
needs to know about them.'''
reflen = []
maxcounts = {}
for ref in refs:
rl, counts = precook(ref, n)
reflen.append(rl)
for (ngram,count) in counts.iteritems():
maxcounts[ngram] = max(maxcounts.get(ngram,0), count)
# Calculate effective reference sentence length.
if eff == "shortest":
reflen = min(reflen)
elif eff == "average":
reflen = float(sum(reflen))/len(reflen)
## lhuang: N.B.: leave reflen computaiton to the very end!!
## lhuang: N.B.: in case of "closest", keep a list of reflens!! (bad design)
return (reflen, maxcounts)
|
[
"def",
"cook_refs",
"(",
"refs",
",",
"eff",
"=",
"None",
",",
"n",
"=",
"4",
")",
":",
"## lhuang: oracle will call with \"average\"",
"reflen",
"=",
"[",
"]",
"maxcounts",
"=",
"{",
"}",
"for",
"ref",
"in",
"refs",
":",
"rl",
",",
"counts",
"=",
"precook",
"(",
"ref",
",",
"n",
")",
"reflen",
".",
"append",
"(",
"rl",
")",
"for",
"(",
"ngram",
",",
"count",
")",
"in",
"counts",
".",
"iteritems",
"(",
")",
":",
"maxcounts",
"[",
"ngram",
"]",
"=",
"max",
"(",
"maxcounts",
".",
"get",
"(",
"ngram",
",",
"0",
")",
",",
"count",
")",
"# Calculate effective reference sentence length.",
"if",
"eff",
"==",
"\"shortest\"",
":",
"reflen",
"=",
"min",
"(",
"reflen",
")",
"elif",
"eff",
"==",
"\"average\"",
":",
"reflen",
"=",
"float",
"(",
"sum",
"(",
"reflen",
")",
")",
"/",
"len",
"(",
"reflen",
")",
"## lhuang: N.B.: leave reflen computaiton to the very end!!",
"## lhuang: N.B.: in case of \"closest\", keep a list of reflens!! (bad design)",
"return",
"(",
"reflen",
",",
"maxcounts",
")"
] |
https://github.com/danieljl/keras-image-captioning/blob/cac7a99ed35ed787b473376ce5c5d189f191f578/pycocoevalcap/bleu/bleu_scorer.py#L35-L58
|
|
pm4py/pm4py-core
|
7807b09a088b02199cd0149d724d0e28793971bf
|
pm4py/objects/stochastic_petri/ctmc.py
|
python
|
get_tangible_reachability_and_q_matrix_from_dfg_performance
|
(dfg_performance, invisible_firing_rate=1000.0, parameters=None)
|
return tang_reach_graph, tang_reach_graph, stochastic_map, q_matrix
|
Get the tangible reachability graph and the Q matrix from the performance DFG
Parameters
-------------
dfg_performance
Performance DFG
invisible_firing_rate
Firing rate for invisible transitions
parameters
Parameters
Returns
-------------
reachab_graph
Reachability graph
tangible_reach_graph
Tangible reachability graph
stochastic_info
Stochastic information
q_matrix
Q-matrix from the tangible reachability graph
|
Get the tangible reachability graph and the Q matrix from the performance DFG
|
[
"Get",
"the",
"tangible",
"reachability",
"graph",
"and",
"the",
"Q",
"matrix",
"from",
"the",
"performance",
"DFG"
] |
def get_tangible_reachability_and_q_matrix_from_dfg_performance(dfg_performance, invisible_firing_rate=1000.0, parameters=None):
"""
Get the tangible reachability graph and the Q matrix from the performance DFG
Parameters
-------------
dfg_performance
Performance DFG
invisible_firing_rate
Firing rate for invisible transitions
parameters
Parameters
Returns
-------------
reachab_graph
Reachability graph
tangible_reach_graph
Tangible reachability graph
stochastic_info
Stochastic information
q_matrix
Q-matrix from the tangible reachability graph
"""
if parameters is None:
parameters = {}
net, im, fm = dfg_converter.apply(dfg_performance, parameters=parameters)
stochastic_map = {}
for tr in net.transitions:
if tr.label is None:
rv = random_variable.RandomVariable()
exp = exponential.Exponential()
exp.scale = 1/invisible_firing_rate
rv.random_variable = exp
stochastic_map[tr] = rv
else:
input_arc = list(tr.in_arcs)[0]
output_arc = list(tr.out_arcs)[0]
rv = random_variable.RandomVariable()
el = (input_arc.source.name, output_arc.target.name)
scale = 0
if el in dfg_performance:
scale = dfg_performance[el]
if scale == 0:
scale = 1/invisible_firing_rate
exp = exponential.Exponential()
exp.scale = scale
rv.random_variable = exp
stochastic_map[tr] = rv
tang_reach_graph = construct_reachability_graph(net, im, use_trans_name=True)
q_matrix = get_q_matrix_from_tangible_exponential(tang_reach_graph, stochastic_map)
return tang_reach_graph, tang_reach_graph, stochastic_map, q_matrix
|
[
"def",
"get_tangible_reachability_and_q_matrix_from_dfg_performance",
"(",
"dfg_performance",
",",
"invisible_firing_rate",
"=",
"1000.0",
",",
"parameters",
"=",
"None",
")",
":",
"if",
"parameters",
"is",
"None",
":",
"parameters",
"=",
"{",
"}",
"net",
",",
"im",
",",
"fm",
"=",
"dfg_converter",
".",
"apply",
"(",
"dfg_performance",
",",
"parameters",
"=",
"parameters",
")",
"stochastic_map",
"=",
"{",
"}",
"for",
"tr",
"in",
"net",
".",
"transitions",
":",
"if",
"tr",
".",
"label",
"is",
"None",
":",
"rv",
"=",
"random_variable",
".",
"RandomVariable",
"(",
")",
"exp",
"=",
"exponential",
".",
"Exponential",
"(",
")",
"exp",
".",
"scale",
"=",
"1",
"/",
"invisible_firing_rate",
"rv",
".",
"random_variable",
"=",
"exp",
"stochastic_map",
"[",
"tr",
"]",
"=",
"rv",
"else",
":",
"input_arc",
"=",
"list",
"(",
"tr",
".",
"in_arcs",
")",
"[",
"0",
"]",
"output_arc",
"=",
"list",
"(",
"tr",
".",
"out_arcs",
")",
"[",
"0",
"]",
"rv",
"=",
"random_variable",
".",
"RandomVariable",
"(",
")",
"el",
"=",
"(",
"input_arc",
".",
"source",
".",
"name",
",",
"output_arc",
".",
"target",
".",
"name",
")",
"scale",
"=",
"0",
"if",
"el",
"in",
"dfg_performance",
":",
"scale",
"=",
"dfg_performance",
"[",
"el",
"]",
"if",
"scale",
"==",
"0",
":",
"scale",
"=",
"1",
"/",
"invisible_firing_rate",
"exp",
"=",
"exponential",
".",
"Exponential",
"(",
")",
"exp",
".",
"scale",
"=",
"scale",
"rv",
".",
"random_variable",
"=",
"exp",
"stochastic_map",
"[",
"tr",
"]",
"=",
"rv",
"tang_reach_graph",
"=",
"construct_reachability_graph",
"(",
"net",
",",
"im",
",",
"use_trans_name",
"=",
"True",
")",
"q_matrix",
"=",
"get_q_matrix_from_tangible_exponential",
"(",
"tang_reach_graph",
",",
"stochastic_map",
")",
"return",
"tang_reach_graph",
",",
"tang_reach_graph",
",",
"stochastic_map",
",",
"q_matrix"
] |
https://github.com/pm4py/pm4py-core/blob/7807b09a088b02199cd0149d724d0e28793971bf/pm4py/objects/stochastic_petri/ctmc.py#L86-L137
|
|
apigee/henchman
|
13c53c66669800aaa89f1799ac974b45ec473c3d
|
modules/curl/curl/requests/requests/packages/urllib3/_collections.py
|
python
|
RecentlyUsedContainer.clear
|
(self)
|
[] |
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
|
[
"def",
"clear",
"(",
"self",
")",
":",
"with",
"self",
".",
"lock",
":",
"# Copy pointers to all values, then wipe the mapping",
"values",
"=",
"list",
"(",
"itervalues",
"(",
"self",
".",
"_container",
")",
")",
"self",
".",
"_container",
".",
"clear",
"(",
")",
"if",
"self",
".",
"dispose_func",
":",
"for",
"value",
"in",
"values",
":",
"self",
".",
"dispose_func",
"(",
"value",
")"
] |
https://github.com/apigee/henchman/blob/13c53c66669800aaa89f1799ac974b45ec473c3d/modules/curl/curl/requests/requests/packages/urllib3/_collections.py#L85-L93
|
||||
PowerScript/KatanaFramework
|
0f6ad90a88de865d58ec26941cb4460501e75496
|
lib/scapy/build/lib.linux-i686-2.7/scapy/contrib/bgp.py
|
python
|
BGPOptionalParameter.extract_padding
|
(self, p)
|
return "",p
|
any thing after this packet is extracted is padding
|
any thing after this packet is extracted is padding
|
[
"any",
"thing",
"after",
"this",
"packet",
"is",
"extracted",
"is",
"padding"
] |
def extract_padding(self, p):
"""any thing after this packet is extracted is padding"""
return "",p
|
[
"def",
"extract_padding",
"(",
"self",
",",
"p",
")",
":",
"return",
"\"\"",
",",
"p"
] |
https://github.com/PowerScript/KatanaFramework/blob/0f6ad90a88de865d58ec26941cb4460501e75496/lib/scapy/build/lib.linux-i686-2.7/scapy/contrib/bgp.py#L74-L76
|
|
ACCLAB/DABEST-python
|
3ac87685a6c0859f731e9c9107bef8f32e39a61d
|
dabest/_stats_tools/effsize.py
|
python
|
_compute_hedges_correction_factor
|
(n1, n2)
|
return out
|
Computes the bias correction factor for Hedges' g.
See https://en.wikipedia.org/wiki/Effect_size#Hedges'_g
Returns
-------
j: float
References
----------
Larry V. Hedges & Ingram Olkin (1985).
Statistical Methods for Meta-Analysis. Orlando: Academic Press.
ISBN 0-12-336380-2.
|
Computes the bias correction factor for Hedges' g.
|
[
"Computes",
"the",
"bias",
"correction",
"factor",
"for",
"Hedges",
"g",
"."
] |
def _compute_hedges_correction_factor(n1, n2):
"""
Computes the bias correction factor for Hedges' g.
See https://en.wikipedia.org/wiki/Effect_size#Hedges'_g
Returns
-------
j: float
References
----------
Larry V. Hedges & Ingram Olkin (1985).
Statistical Methods for Meta-Analysis. Orlando: Academic Press.
ISBN 0-12-336380-2.
"""
from scipy.special import gamma
from numpy import sqrt, isinf
import warnings
df = n1 + n2 - 2
numer = gamma(df / 2)
denom0 = gamma((df - 1) / 2)
denom = sqrt(df / 2) * denom0
if isinf(numer) or isinf(denom):
# occurs when df is too large.
# Apply Hedges and Olkin's approximation.
df_sum = n1 + n2
denom = (4 * df_sum) - 9
out = 1 - (3 / denom)
else:
out = numer / denom
return out
|
[
"def",
"_compute_hedges_correction_factor",
"(",
"n1",
",",
"n2",
")",
":",
"from",
"scipy",
".",
"special",
"import",
"gamma",
"from",
"numpy",
"import",
"sqrt",
",",
"isinf",
"import",
"warnings",
"df",
"=",
"n1",
"+",
"n2",
"-",
"2",
"numer",
"=",
"gamma",
"(",
"df",
"/",
"2",
")",
"denom0",
"=",
"gamma",
"(",
"(",
"df",
"-",
"1",
")",
"/",
"2",
")",
"denom",
"=",
"sqrt",
"(",
"df",
"/",
"2",
")",
"*",
"denom0",
"if",
"isinf",
"(",
"numer",
")",
"or",
"isinf",
"(",
"denom",
")",
":",
"# occurs when df is too large.",
"# Apply Hedges and Olkin's approximation.",
"df_sum",
"=",
"n1",
"+",
"n2",
"denom",
"=",
"(",
"4",
"*",
"df_sum",
")",
"-",
"9",
"out",
"=",
"1",
"-",
"(",
"3",
"/",
"denom",
")",
"else",
":",
"out",
"=",
"numer",
"/",
"denom",
"return",
"out"
] |
https://github.com/ACCLAB/DABEST-python/blob/3ac87685a6c0859f731e9c9107bef8f32e39a61d/dabest/_stats_tools/effsize.py#L352-L388
|
|
rembo10/headphones
|
b3199605be1ebc83a7a8feab6b1e99b64014187c
|
headphones/webserve.py
|
python
|
WebInterface.addArtist
|
(self, artistid)
|
[] |
def addArtist(self, artistid):
thread = threading.Thread(target=importer.addArtisttoDB, args=[artistid])
thread.start()
thread.join(1)
raise cherrypy.HTTPRedirect("artistPage?ArtistID=%s" % artistid)
|
[
"def",
"addArtist",
"(",
"self",
",",
"artistid",
")",
":",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"importer",
".",
"addArtisttoDB",
",",
"args",
"=",
"[",
"artistid",
"]",
")",
"thread",
".",
"start",
"(",
")",
"thread",
".",
"join",
"(",
"1",
")",
"raise",
"cherrypy",
".",
"HTTPRedirect",
"(",
"\"artistPage?ArtistID=%s\"",
"%",
"artistid",
")"
] |
https://github.com/rembo10/headphones/blob/b3199605be1ebc83a7a8feab6b1e99b64014187c/headphones/webserve.py#L165-L169
|
||||
napari/napari
|
dbf4158e801fa7a429de8ef1cdee73bf6d64c61e
|
napari/_qt/qt_viewer.py
|
python
|
QtViewer._map_canvas2world
|
(self, position)
|
return tuple(position_world)
|
Map position from canvas pixels into world coordinates.
Parameters
----------
position : 2-tuple
Position in canvas (x, y).
Returns
-------
coords : tuple
Position in world coordinates, matches the total dimensionality
of the viewer.
|
Map position from canvas pixels into world coordinates.
|
[
"Map",
"position",
"from",
"canvas",
"pixels",
"into",
"world",
"coordinates",
"."
] |
def _map_canvas2world(self, position):
"""Map position from canvas pixels into world coordinates.
Parameters
----------
position : 2-tuple
Position in canvas (x, y).
Returns
-------
coords : tuple
Position in world coordinates, matches the total dimensionality
of the viewer.
"""
nd = self.viewer.dims.ndisplay
transform = self.view.scene.transform
mapped_position = transform.imap(list(position))[:nd]
position_world_slice = mapped_position[::-1]
position_world = list(self.viewer.dims.point)
for i, d in enumerate(self.viewer.dims.displayed):
position_world[d] = position_world_slice[i]
return tuple(position_world)
|
[
"def",
"_map_canvas2world",
"(",
"self",
",",
"position",
")",
":",
"nd",
"=",
"self",
".",
"viewer",
".",
"dims",
".",
"ndisplay",
"transform",
"=",
"self",
".",
"view",
".",
"scene",
".",
"transform",
"mapped_position",
"=",
"transform",
".",
"imap",
"(",
"list",
"(",
"position",
")",
")",
"[",
":",
"nd",
"]",
"position_world_slice",
"=",
"mapped_position",
"[",
":",
":",
"-",
"1",
"]",
"position_world",
"=",
"list",
"(",
"self",
".",
"viewer",
".",
"dims",
".",
"point",
")",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"self",
".",
"viewer",
".",
"dims",
".",
"displayed",
")",
":",
"position_world",
"[",
"d",
"]",
"=",
"position_world_slice",
"[",
"i",
"]",
"return",
"tuple",
"(",
"position_world",
")"
] |
https://github.com/napari/napari/blob/dbf4158e801fa7a429de8ef1cdee73bf6d64c61e/napari/_qt/qt_viewer.py#L816-L839
|
|
privacyidea/privacyidea
|
9490c12ddbf77a34ac935b082d09eb583dfafa2c
|
privacyidea/lib/config.py
|
python
|
get_token_list
|
()
|
return module_list
|
get the list of the tokens
:return: list of token names from the config file
|
get the list of the tokens
:return: list of token names from the config file
|
[
"get",
"the",
"list",
"of",
"the",
"tokens",
":",
"return",
":",
"list",
"of",
"token",
"names",
"from",
"the",
"config",
"file"
] |
def get_token_list():
"""
get the list of the tokens
:return: list of token names from the config file
"""
module_list = set()
module_list.add("privacyidea.lib.tokens.daplugtoken")
module_list.add("privacyidea.lib.tokens.hotptoken")
module_list.add("privacyidea.lib.tokens.motptoken")
module_list.add("privacyidea.lib.tokens.passwordtoken")
module_list.add("privacyidea.lib.tokens.remotetoken")
module_list.add("privacyidea.lib.tokens.spasstoken")
module_list.add("privacyidea.lib.tokens.sshkeytoken")
module_list.add("privacyidea.lib.tokens.totptoken")
module_list.add("privacyidea.lib.tokens.yubicotoken")
module_list.add("privacyidea.lib.tokens.yubikeytoken")
module_list.add("privacyidea.lib.tokens.radiustoken")
module_list.add("privacyidea.lib.tokens.smstoken")
module_list.add("privacyidea.lib.tokens.emailtoken")
module_list.add("privacyidea.lib.tokens.registrationtoken")
module_list.add("privacyidea.lib.tokens.certificatetoken")
module_list.add("privacyidea.lib.tokens.foureyestoken")
module_list.add("privacyidea.lib.tokens.tiqrtoken")
module_list.add("privacyidea.lib.tokens.ocratoken")
module_list.add("privacyidea.lib.tokens.u2ftoken")
module_list.add("privacyidea.lib.tokens.papertoken")
module_list.add("privacyidea.lib.tokens.questionnairetoken")
module_list.add("privacyidea.lib.tokens.vascotoken")
module_list.add("privacyidea.lib.tokens.tantoken")
module_list.add("privacyidea.lib.tokens.pushtoken")
module_list.add("privacyidea.lib.tokens.indexedsecrettoken")
module_list.add("privacyidea.lib.tokens.webauthntoken")
# Dynamic token modules
dynamic_token_modules = get_app_config_value("PI_TOKEN_MODULES")
if dynamic_token_modules:
# In the pi.cfg you can specify a list or set of 3rd party token modules like
# PI_TOKEN_MODULES = [ "myproj.tokens.tok1", "myproj.tokens.tok2" ]
module_list.update(to_list(dynamic_token_modules))
return module_list
|
[
"def",
"get_token_list",
"(",
")",
":",
"module_list",
"=",
"set",
"(",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.daplugtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.hotptoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.motptoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.passwordtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.remotetoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.spasstoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.sshkeytoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.totptoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.yubicotoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.yubikeytoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.radiustoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.smstoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.emailtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.registrationtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.certificatetoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.foureyestoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.tiqrtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.ocratoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.u2ftoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.papertoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.questionnairetoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.vascotoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.tantoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.pushtoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.indexedsecrettoken\"",
")",
"module_list",
".",
"add",
"(",
"\"privacyidea.lib.tokens.webauthntoken\"",
")",
"# Dynamic token modules",
"dynamic_token_modules",
"=",
"get_app_config_value",
"(",
"\"PI_TOKEN_MODULES\"",
")",
"if",
"dynamic_token_modules",
":",
"# In the pi.cfg you can specify a list or set of 3rd party token modules like",
"# PI_TOKEN_MODULES = [ \"myproj.tokens.tok1\", \"myproj.tokens.tok2\" ]",
"module_list",
".",
"update",
"(",
"to_list",
"(",
"dynamic_token_modules",
")",
")",
"return",
"module_list"
] |
https://github.com/privacyidea/privacyidea/blob/9490c12ddbf77a34ac935b082d09eb583dfafa2c/privacyidea/lib/config.py#L690-L731
|
|
IronLanguages/ironpython3
|
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
|
Src/StdLib/Lib/email/message.py
|
python
|
_IsAttachment.__bool__
|
(self)
|
return self.value
|
[] |
def __bool__(self):
warnings.warn("is_attachment will be a method, not a property, in 3.5",
DeprecationWarning,
stacklevel=3)
return self.value
|
[
"def",
"__bool__",
"(",
"self",
")",
":",
"warnings",
".",
"warn",
"(",
"\"is_attachment will be a method, not a property, in 3.5\"",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"3",
")",
"return",
"self",
".",
"value"
] |
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/email/message.py#L939-L943
|
|||
lebedov/scikit-cuda
|
5d3c74f926fe7ce67ecfc85e9623aab7bc0b344f
|
skcuda/magma.py
|
python
|
magma_ssyevdx_gpu
|
(jobz, rnge, uplo, n, A, lda,
vl, vu, il, iu, m,
w, wa, ldwa,
work, lwork, iwork, liwork)
|
Compute eigenvalues of real symmetric matrix.
Single-GPU, data on device, expert mode
source: dsyedx_m.cpp
|
Compute eigenvalues of real symmetric matrix.
Single-GPU, data on device, expert mode
|
[
"Compute",
"eigenvalues",
"of",
"real",
"symmetric",
"matrix",
".",
"Single",
"-",
"GPU",
"data",
"on",
"device",
"expert",
"mode"
] |
def magma_ssyevdx_gpu(jobz, rnge, uplo, n, A, lda,
vl, vu, il, iu, m,
w, wa, ldwa,
work, lwork, iwork, liwork):
"""
Compute eigenvalues of real symmetric matrix.
Single-GPU, data on device, expert mode
source: dsyedx_m.cpp
"""
# _XXX_conversion[] returns integer according to magma_types.h
jobz = _vec_conversion[jobz]
rnge = _range_conversion[rnge]
uplo = _uplo_conversion[uplo]
info = c_int_type()
status = _libmagma.magma_ssyevdx_gpu(jobz, rnge, uplo, n, int(A), lda,
vl, vu, il, iu, int(m),
int(w), int(wa), ldwa,
int(work), lwork, int(iwork), liwork,
ctypes.byref(info))
magmaCheckStatus(status)
|
[
"def",
"magma_ssyevdx_gpu",
"(",
"jobz",
",",
"rnge",
",",
"uplo",
",",
"n",
",",
"A",
",",
"lda",
",",
"vl",
",",
"vu",
",",
"il",
",",
"iu",
",",
"m",
",",
"w",
",",
"wa",
",",
"ldwa",
",",
"work",
",",
"lwork",
",",
"iwork",
",",
"liwork",
")",
":",
"# _XXX_conversion[] returns integer according to magma_types.h",
"jobz",
"=",
"_vec_conversion",
"[",
"jobz",
"]",
"rnge",
"=",
"_range_conversion",
"[",
"rnge",
"]",
"uplo",
"=",
"_uplo_conversion",
"[",
"uplo",
"]",
"info",
"=",
"c_int_type",
"(",
")",
"status",
"=",
"_libmagma",
".",
"magma_ssyevdx_gpu",
"(",
"jobz",
",",
"rnge",
",",
"uplo",
",",
"n",
",",
"int",
"(",
"A",
")",
",",
"lda",
",",
"vl",
",",
"vu",
",",
"il",
",",
"iu",
",",
"int",
"(",
"m",
")",
",",
"int",
"(",
"w",
")",
",",
"int",
"(",
"wa",
")",
",",
"ldwa",
",",
"int",
"(",
"work",
")",
",",
"lwork",
",",
"int",
"(",
"iwork",
")",
",",
"liwork",
",",
"ctypes",
".",
"byref",
"(",
"info",
")",
")",
"magmaCheckStatus",
"(",
"status",
")"
] |
https://github.com/lebedov/scikit-cuda/blob/5d3c74f926fe7ce67ecfc85e9623aab7bc0b344f/skcuda/magma.py#L3887-L3907
|
||
secureworks/dalton
|
a514a7ed5dc376a6722260910078d841017b1f80
|
app/dalton.py
|
python
|
set_job_status
|
(jobid, status)
|
set's a job status code
|
set's a job status code
|
[
"set",
"s",
"a",
"job",
"status",
"code"
] |
def set_job_status(jobid, status):
"""set's a job status code"""
global r
r.set("%s-statcode" % jobid, status)
# statcode keys do not expire if/when they are queued
if status != STAT_CODE_QUEUED:
if r.get("%s-teapotjob" % jobid):
r.expire("%s-statcode" % jobid, TEAPOT_REDIS_EXPIRE)
else:
r.expire("%s-statcode" % jobid, REDIS_EXPIRE)
|
[
"def",
"set_job_status",
"(",
"jobid",
",",
"status",
")",
":",
"global",
"r",
"r",
".",
"set",
"(",
"\"%s-statcode\"",
"%",
"jobid",
",",
"status",
")",
"# statcode keys do not expire if/when they are queued",
"if",
"status",
"!=",
"STAT_CODE_QUEUED",
":",
"if",
"r",
".",
"get",
"(",
"\"%s-teapotjob\"",
"%",
"jobid",
")",
":",
"r",
".",
"expire",
"(",
"\"%s-statcode\"",
"%",
"jobid",
",",
"TEAPOT_REDIS_EXPIRE",
")",
"else",
":",
"r",
".",
"expire",
"(",
"\"%s-statcode\"",
"%",
"jobid",
",",
"REDIS_EXPIRE",
")"
] |
https://github.com/secureworks/dalton/blob/a514a7ed5dc376a6722260910078d841017b1f80/app/dalton.py#L273-L282
|
||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/traitlets/config/loader.py
|
python
|
Config._merge
|
(self, other)
|
deprecated alias, use Config.merge()
|
deprecated alias, use Config.merge()
|
[
"deprecated",
"alias",
"use",
"Config",
".",
"merge",
"()"
] |
def _merge(self, other):
"""deprecated alias, use Config.merge()"""
self.merge(other)
|
[
"def",
"_merge",
"(",
"self",
",",
"other",
")",
":",
"self",
".",
"merge",
"(",
"other",
")"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/traitlets/config/loader.py#L178-L180
|
||
OpenTSDB/tcollector
|
37ae920d83c1002da66b5201a5311b1714cb5c14
|
collectors/0/haproxy.py
|
python
|
find_sock_file
|
(conf_file)
|
Returns the unix socket file of haproxy.
|
Returns the unix socket file of haproxy.
|
[
"Returns",
"the",
"unix",
"socket",
"file",
"of",
"haproxy",
"."
] |
def find_sock_file(conf_file):
"""Returns the unix socket file of haproxy."""
try:
fd = open(conf_file)
except IOError as e:
utils.err("Error: %s. Config file path is relative: %s" % (e, conf_file))
return None
try:
for line in fd:
if line.lstrip(" \t").startswith("stats socket"):
sock_file = line.split()[2]
if utils.is_sockfile(sock_file):
return sock_file
finally:
fd.close()
|
[
"def",
"find_sock_file",
"(",
"conf_file",
")",
":",
"try",
":",
"fd",
"=",
"open",
"(",
"conf_file",
")",
"except",
"IOError",
"as",
"e",
":",
"utils",
".",
"err",
"(",
"\"Error: %s. Config file path is relative: %s\"",
"%",
"(",
"e",
",",
"conf_file",
")",
")",
"return",
"None",
"try",
":",
"for",
"line",
"in",
"fd",
":",
"if",
"line",
".",
"lstrip",
"(",
"\" \\t\"",
")",
".",
"startswith",
"(",
"\"stats socket\"",
")",
":",
"sock_file",
"=",
"line",
".",
"split",
"(",
")",
"[",
"2",
"]",
"if",
"utils",
".",
"is_sockfile",
"(",
"sock_file",
")",
":",
"return",
"sock_file",
"finally",
":",
"fd",
".",
"close",
"(",
")"
] |
https://github.com/OpenTSDB/tcollector/blob/37ae920d83c1002da66b5201a5311b1714cb5c14/collectors/0/haproxy.py#L110-L124
|
||
WerWolv/EdiZon_CheatsConfigsAndScripts
|
d16d36c7509c01dca770f402babd83ff2e9ae6e7
|
Scripts/lib/python3.5/code.py
|
python
|
interact
|
(banner=None, readfunc=None, local=None)
|
Closely emulate the interactive Python interpreter.
This is a backwards compatible interface to the InteractiveConsole
class. When readfunc is not specified, it attempts to import the
readline module to enable GNU readline if it is available.
Arguments (all optional, all default to None):
banner -- passed to InteractiveConsole.interact()
readfunc -- if not None, replaces InteractiveConsole.raw_input()
local -- passed to InteractiveInterpreter.__init__()
|
Closely emulate the interactive Python interpreter.
|
[
"Closely",
"emulate",
"the",
"interactive",
"Python",
"interpreter",
"."
] |
def interact(banner=None, readfunc=None, local=None):
"""Closely emulate the interactive Python interpreter.
This is a backwards compatible interface to the InteractiveConsole
class. When readfunc is not specified, it attempts to import the
readline module to enable GNU readline if it is available.
Arguments (all optional, all default to None):
banner -- passed to InteractiveConsole.interact()
readfunc -- if not None, replaces InteractiveConsole.raw_input()
local -- passed to InteractiveInterpreter.__init__()
"""
console = InteractiveConsole(local)
if readfunc is not None:
console.raw_input = readfunc
else:
try:
import readline
except ImportError:
pass
console.interact(banner)
|
[
"def",
"interact",
"(",
"banner",
"=",
"None",
",",
"readfunc",
"=",
"None",
",",
"local",
"=",
"None",
")",
":",
"console",
"=",
"InteractiveConsole",
"(",
"local",
")",
"if",
"readfunc",
"is",
"not",
"None",
":",
"console",
".",
"raw_input",
"=",
"readfunc",
"else",
":",
"try",
":",
"import",
"readline",
"except",
"ImportError",
":",
"pass",
"console",
".",
"interact",
"(",
"banner",
")"
] |
https://github.com/WerWolv/EdiZon_CheatsConfigsAndScripts/blob/d16d36c7509c01dca770f402babd83ff2e9ae6e7/Scripts/lib/python3.5/code.py#L270-L292
|
||
CvvT/dumpDex
|
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
|
python/idaapi.py
|
python
|
is_type_bool
|
(*args)
|
return _idaapi.is_type_bool(*args)
|
is_type_bool(t) -> bool
|
is_type_bool(t) -> bool
|
[
"is_type_bool",
"(",
"t",
")",
"-",
">",
"bool"
] |
def is_type_bool(*args):
"""
is_type_bool(t) -> bool
"""
return _idaapi.is_type_bool(*args)
|
[
"def",
"is_type_bool",
"(",
"*",
"args",
")",
":",
"return",
"_idaapi",
".",
"is_type_bool",
"(",
"*",
"args",
")"
] |
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idaapi.py#L28508-L28512
|
|
edwardlib/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
observations/r/ant111b.py
|
python
|
ant111b
|
(path)
|
return x_train, metadata
|
Averages by block of corn yields, for treatment 111 only
These data frames have averages by blocks (parcels) for the treatment
`111`.
A data frame with 36 observations on 9 variables.
site
a factor with levels (`ant111b`:) `DBAN` `LFAN` `NSAN`
`ORAN` `OVAN` `TEAN` `WEAN` `WLAN`
parcel
a factor with levels `I` `II` `III` `IV`
code
a numeric vector
island
a numeric vector
id
a numeric vector
plot
a numeric vector
trt
a numeric vector
ears
a numeric vector
harvwt
a numeric vector
Andrews DF; Herzberg AM, 1985. Data. A Collection of Problems from Many
Fields for the Student and Research Worker. Springer-Verlag. (pp.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `ant111b.csv`.
Returns:
Tuple of np.ndarray `x_train` with 32 rows and 9 columns and
dictionary `metadata` of column headers (feature names).
|
Averages by block of corn yields, for treatment 111 only
|
[
"Averages",
"by",
"block",
"of",
"corn",
"yields",
"for",
"treatment",
"111",
"only"
] |
def ant111b(path):
"""Averages by block of corn yields, for treatment 111 only
These data frames have averages by blocks (parcels) for the treatment
`111`.
A data frame with 36 observations on 9 variables.
site
a factor with levels (`ant111b`:) `DBAN` `LFAN` `NSAN`
`ORAN` `OVAN` `TEAN` `WEAN` `WLAN`
parcel
a factor with levels `I` `II` `III` `IV`
code
a numeric vector
island
a numeric vector
id
a numeric vector
plot
a numeric vector
trt
a numeric vector
ears
a numeric vector
harvwt
a numeric vector
Andrews DF; Herzberg AM, 1985. Data. A Collection of Problems from Many
Fields for the Student and Research Worker. Springer-Verlag. (pp.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `ant111b.csv`.
Returns:
Tuple of np.ndarray `x_train` with 32 rows and 9 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'ant111b.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/DAAG/ant111b.csv'
maybe_download_and_extract(path, url,
save_file_name='ant111b.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
|
[
"def",
"ant111b",
"(",
"path",
")",
":",
"import",
"pandas",
"as",
"pd",
"path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"path",
")",
"filename",
"=",
"'ant111b.csv'",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"filename",
")",
")",
":",
"url",
"=",
"'http://dustintran.com/data/r/DAAG/ant111b.csv'",
"maybe_download_and_extract",
"(",
"path",
",",
"url",
",",
"save_file_name",
"=",
"'ant111b.csv'",
",",
"resume",
"=",
"False",
")",
"data",
"=",
"pd",
".",
"read_csv",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"filename",
")",
",",
"index_col",
"=",
"0",
",",
"parse_dates",
"=",
"True",
")",
"x_train",
"=",
"data",
".",
"values",
"metadata",
"=",
"{",
"'columns'",
":",
"data",
".",
"columns",
"}",
"return",
"x_train",
",",
"metadata"
] |
https://github.com/edwardlib/observations/blob/2c8b1ac31025938cb17762e540f2f592e302d5de/observations/r/ant111b.py#L14-L78
|
|
selfteaching/selfteaching-python-camp
|
9982ee964b984595e7d664b07c389cddaf158f1e
|
19100104/zqiwj/d3_exercise_calculator .py
|
python
|
divide
|
(x, y)
|
return x / y
|
相除
|
相除
|
[
"相除"
] |
def divide(x, y):
"""相除"""
return x / y
|
[
"def",
"divide",
"(",
"x",
",",
"y",
")",
":",
"return",
"x",
"/",
"y"
] |
https://github.com/selfteaching/selfteaching-python-camp/blob/9982ee964b984595e7d664b07c389cddaf158f1e/19100104/zqiwj/d3_exercise_calculator .py#L17-L20
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/imap_email_content/sensor.py
|
python
|
EmailContentSensor.__init__
|
(self, hass, email_reader, name, allowed_senders, value_template)
|
Initialize the sensor.
|
Initialize the sensor.
|
[
"Initialize",
"the",
"sensor",
"."
] |
def __init__(self, hass, email_reader, name, allowed_senders, value_template):
"""Initialize the sensor."""
self.hass = hass
self._email_reader = email_reader
self._name = name
self._allowed_senders = [sender.upper() for sender in allowed_senders]
self._value_template = value_template
self._last_id = None
self._message = None
self._state_attributes = None
self.connected = self._email_reader.connect()
|
[
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"email_reader",
",",
"name",
",",
"allowed_senders",
",",
"value_template",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"_email_reader",
"=",
"email_reader",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_allowed_senders",
"=",
"[",
"sender",
".",
"upper",
"(",
")",
"for",
"sender",
"in",
"allowed_senders",
"]",
"self",
".",
"_value_template",
"=",
"value_template",
"self",
".",
"_last_id",
"=",
"None",
"self",
".",
"_message",
"=",
"None",
"self",
".",
"_state_attributes",
"=",
"None",
"self",
".",
"connected",
"=",
"self",
".",
"_email_reader",
".",
"connect",
"(",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/imap_email_content/sensor.py#L157-L167
|
||
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/manifolds/differentiable/de_rham_cohomology.py
|
python
|
DeRhamCohomologyClass._latex_
|
(self)
|
return rf"\left[{latex_name}\right]"
|
r"""
Return a LaTeX representation of the object.
TESTS::
sage: M = Manifold(2, 'M', latex_name=r'\mathcal{M}')
sage: X.<x,y> = M.chart()
sage: C = M.de_rham_complex()
sage: H = C.cohomology()
sage: omega = M.diff_form(1, [1,1], name='omega', latex_name=r'\omega')
sage: u = H(omega)
sage: latex(u) # indirect doctest
\left[\omega\right]
sage: u._latex_()
'\\left[\\omega\\right]'
|
r"""
Return a LaTeX representation of the object.
|
[
"r",
"Return",
"a",
"LaTeX",
"representation",
"of",
"the",
"object",
"."
] |
def _latex_(self):
r"""
Return a LaTeX representation of the object.
TESTS::
sage: M = Manifold(2, 'M', latex_name=r'\mathcal{M}')
sage: X.<x,y> = M.chart()
sage: C = M.de_rham_complex()
sage: H = C.cohomology()
sage: omega = M.diff_form(1, [1,1], name='omega', latex_name=r'\omega')
sage: u = H(omega)
sage: latex(u) # indirect doctest
\left[\omega\right]
sage: u._latex_()
'\\left[\\omega\\right]'
"""
latex_name = self._representative._latex_name
if latex_name is None:
latex_name = r'\mathrm{unnamed form}'
return rf"\left[{latex_name}\right]"
|
[
"def",
"_latex_",
"(",
"self",
")",
":",
"latex_name",
"=",
"self",
".",
"_representative",
".",
"_latex_name",
"if",
"latex_name",
"is",
"None",
":",
"latex_name",
"=",
"r'\\mathrm{unnamed form}'",
"return",
"rf\"\\left[{latex_name}\\right]\""
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/manifolds/differentiable/de_rham_cohomology.py#L139-L160
|
|
Komodo/KomodoEdit
|
61edab75dce2bdb03943b387b0608ea36f548e8e
|
src/codeintel/play/core.py
|
python
|
MouseEvent.GetX
|
(*args, **kwargs)
|
return _core.MouseEvent_GetX(*args, **kwargs)
|
GetX() -> int
|
GetX() -> int
|
[
"GetX",
"()",
"-",
">",
"int"
] |
def GetX(*args, **kwargs):
"""GetX() -> int"""
return _core.MouseEvent_GetX(*args, **kwargs)
|
[
"def",
"GetX",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core",
".",
"MouseEvent_GetX",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/Komodo/KomodoEdit/blob/61edab75dce2bdb03943b387b0608ea36f548e8e/src/codeintel/play/core.py#L3417-L3419
|
|
NVlabs/neuralrgbd
|
c8071a0bcbd4c4e7ef95c44e7de9c51353ab9764
|
code/mutils/misc.py
|
python
|
indexMap2DMap
|
(d_range, indx_map)
|
return np.reshape(DMap, indx_map.shape)
|
[] |
def indexMap2DMap(d_range, indx_map):
indx_map_flat = indx_map.flatten()
DMap = [ d_range[indx_] for indx_ in indx_map_flat]
return np.reshape(DMap, indx_map.shape)
|
[
"def",
"indexMap2DMap",
"(",
"d_range",
",",
"indx_map",
")",
":",
"indx_map_flat",
"=",
"indx_map",
".",
"flatten",
"(",
")",
"DMap",
"=",
"[",
"d_range",
"[",
"indx_",
"]",
"for",
"indx_",
"in",
"indx_map_flat",
"]",
"return",
"np",
".",
"reshape",
"(",
"DMap",
",",
"indx_map",
".",
"shape",
")"
] |
https://github.com/NVlabs/neuralrgbd/blob/c8071a0bcbd4c4e7ef95c44e7de9c51353ab9764/code/mutils/misc.py#L231-L234
|
|||
biolab/orange2
|
db40a9449cb45b507d63dcd5739b223f9cffb8e6
|
Orange/orng/orngCA.py
|
python
|
CA.getMatrix
|
(self)
|
return self.__dataMatrix
|
Returns array object that is representation of contingency table.
|
Returns array object that is representation of contingency table.
|
[
"Returns",
"array",
"object",
"that",
"is",
"representation",
"of",
"contingency",
"table",
"."
] |
def getMatrix(self):
"""
Returns array object that is representation of contingency table.
"""
return self.__dataMatrix
|
[
"def",
"getMatrix",
"(",
"self",
")",
":",
"return",
"self",
".",
"__dataMatrix"
] |
https://github.com/biolab/orange2/blob/db40a9449cb45b507d63dcd5739b223f9cffb8e6/Orange/orng/orngCA.py#L80-L84
|
|
ARMmbed/yotta
|
82d854b43d391abb5a006b05e7beffe7d0d6ffbf
|
yotta/lib/cmakegen.py
|
python
|
CMakeGen.generateRecursive
|
(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None)
|
generate top-level CMakeLists for this component and its
dependencies: the CMakeLists are all generated in self.buildroot,
which MUST be out-of-source
!!! NOTE: experimenting with a slightly different way of doing
things here, this function is a generator that yields any errors
produced, so the correct use is:
for error in gen.generateRecursive(...):
print(error)
|
generate top-level CMakeLists for this component and its
dependencies: the CMakeLists are all generated in self.buildroot,
which MUST be out-of-source
|
[
"generate",
"top",
"-",
"level",
"CMakeLists",
"for",
"this",
"component",
"and",
"its",
"dependencies",
":",
"the",
"CMakeLists",
"are",
"all",
"generated",
"in",
"self",
".",
"buildroot",
"which",
"MUST",
"be",
"out",
"-",
"of",
"-",
"source"
] |
def generateRecursive(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None):
''' generate top-level CMakeLists for this component and its
dependencies: the CMakeLists are all generated in self.buildroot,
which MUST be out-of-source
!!! NOTE: experimenting with a slightly different way of doing
things here, this function is a generator that yields any errors
produced, so the correct use is:
for error in gen.generateRecursive(...):
print(error)
'''
assert(self.configured)
if builddir is None:
builddir = self.buildroot
if modbuilddir is None:
modbuilddir = os.path.join(builddir, 'ym')
if processed_components is None:
processed_components = dict()
if not self.target:
yield 'Target "%s" is not a valid build target' % self.target
toplevel = not len(processed_components)
logger.debug('generate build files: %s (target=%s)' % (component, self.target))
# because of the way c-family language includes work we need to put the
# public header directories of all components that this component
# depends on (directly OR indirectly) into the search path, which means
# we need to first enumerate all the direct and indirect dependencies
recursive_deps = component.getDependenciesRecursive(
available_components = all_components,
target = self.target,
available_only = True,
test = True
)
dependencies = component.getDependencies(
all_components,
target = self.target,
available_only = True,
test = True
)
for name, dep in dependencies.items():
# if dep is a test dependency, then it might not be required (if
# we're not building tests). We don't actually know at this point
if not dep:
if dep.isTestDependency():
logger.debug('Test dependency "%s" of "%s" is not installed.' % (name, component))
else:
yield 'Required dependency "%s" of "%s" is not installed.' % (name, component)
# ensure this component is assumed to have been installed before we
# check for its dependencies, in case it has a circular dependency on
# itself
processed_components[component.getName()] = component
new_dependencies = OrderedDict([(name,c) for name,c in dependencies.items() if c and not name in processed_components])
self.generate(builddir, modbuilddir, component, new_dependencies, dependencies, recursive_deps, application, toplevel)
logger.debug('recursive deps of %s:' % component)
for d in recursive_deps.values():
logger.debug(' %s' % d)
processed_components.update(new_dependencies)
for name, c in new_dependencies.items():
for error in self.generateRecursive(
c, all_components, os.path.join(modbuilddir, name), modbuilddir, processed_components, application=application
):
yield error
|
[
"def",
"generateRecursive",
"(",
"self",
",",
"component",
",",
"all_components",
",",
"builddir",
"=",
"None",
",",
"modbuilddir",
"=",
"None",
",",
"processed_components",
"=",
"None",
",",
"application",
"=",
"None",
")",
":",
"assert",
"(",
"self",
".",
"configured",
")",
"if",
"builddir",
"is",
"None",
":",
"builddir",
"=",
"self",
".",
"buildroot",
"if",
"modbuilddir",
"is",
"None",
":",
"modbuilddir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"builddir",
",",
"'ym'",
")",
"if",
"processed_components",
"is",
"None",
":",
"processed_components",
"=",
"dict",
"(",
")",
"if",
"not",
"self",
".",
"target",
":",
"yield",
"'Target \"%s\" is not a valid build target'",
"%",
"self",
".",
"target",
"toplevel",
"=",
"not",
"len",
"(",
"processed_components",
")",
"logger",
".",
"debug",
"(",
"'generate build files: %s (target=%s)'",
"%",
"(",
"component",
",",
"self",
".",
"target",
")",
")",
"# because of the way c-family language includes work we need to put the",
"# public header directories of all components that this component",
"# depends on (directly OR indirectly) into the search path, which means",
"# we need to first enumerate all the direct and indirect dependencies",
"recursive_deps",
"=",
"component",
".",
"getDependenciesRecursive",
"(",
"available_components",
"=",
"all_components",
",",
"target",
"=",
"self",
".",
"target",
",",
"available_only",
"=",
"True",
",",
"test",
"=",
"True",
")",
"dependencies",
"=",
"component",
".",
"getDependencies",
"(",
"all_components",
",",
"target",
"=",
"self",
".",
"target",
",",
"available_only",
"=",
"True",
",",
"test",
"=",
"True",
")",
"for",
"name",
",",
"dep",
"in",
"dependencies",
".",
"items",
"(",
")",
":",
"# if dep is a test dependency, then it might not be required (if",
"# we're not building tests). We don't actually know at this point",
"if",
"not",
"dep",
":",
"if",
"dep",
".",
"isTestDependency",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"'Test dependency \"%s\" of \"%s\" is not installed.'",
"%",
"(",
"name",
",",
"component",
")",
")",
"else",
":",
"yield",
"'Required dependency \"%s\" of \"%s\" is not installed.'",
"%",
"(",
"name",
",",
"component",
")",
"# ensure this component is assumed to have been installed before we",
"# check for its dependencies, in case it has a circular dependency on",
"# itself",
"processed_components",
"[",
"component",
".",
"getName",
"(",
")",
"]",
"=",
"component",
"new_dependencies",
"=",
"OrderedDict",
"(",
"[",
"(",
"name",
",",
"c",
")",
"for",
"name",
",",
"c",
"in",
"dependencies",
".",
"items",
"(",
")",
"if",
"c",
"and",
"not",
"name",
"in",
"processed_components",
"]",
")",
"self",
".",
"generate",
"(",
"builddir",
",",
"modbuilddir",
",",
"component",
",",
"new_dependencies",
",",
"dependencies",
",",
"recursive_deps",
",",
"application",
",",
"toplevel",
")",
"logger",
".",
"debug",
"(",
"'recursive deps of %s:'",
"%",
"component",
")",
"for",
"d",
"in",
"recursive_deps",
".",
"values",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"' %s'",
"%",
"d",
")",
"processed_components",
".",
"update",
"(",
"new_dependencies",
")",
"for",
"name",
",",
"c",
"in",
"new_dependencies",
".",
"items",
"(",
")",
":",
"for",
"error",
"in",
"self",
".",
"generateRecursive",
"(",
"c",
",",
"all_components",
",",
"os",
".",
"path",
".",
"join",
"(",
"modbuilddir",
",",
"name",
")",
",",
"modbuilddir",
",",
"processed_components",
",",
"application",
"=",
"application",
")",
":",
"yield",
"error"
] |
https://github.com/ARMmbed/yotta/blob/82d854b43d391abb5a006b05e7beffe7d0d6ffbf/yotta/lib/cmakegen.py#L98-L166
|
||
sfepy/sfepy
|
02ec7bb2ab39ee1dfe1eb4cd509f0ffb7dcc8b25
|
sfepy/discrete/projections.py
|
python
|
create_mass_matrix
|
(field)
|
return mtx
|
Create scalar mass matrix corresponding to the given field.
Returns
-------
mtx : csr_matrix
The mass matrix in CSR format.
|
Create scalar mass matrix corresponding to the given field.
|
[
"Create",
"scalar",
"mass",
"matrix",
"corresponding",
"to",
"the",
"given",
"field",
"."
] |
def create_mass_matrix(field):
"""
Create scalar mass matrix corresponding to the given field.
Returns
-------
mtx : csr_matrix
The mass matrix in CSR format.
"""
u = FieldVariable('u', 'unknown', field)
v = FieldVariable('v', 'test', field, primary_var_name='u')
integral = Integral('i', order=field.approx_order * 2)
term = Term.new('dw_dot(v, u)', integral, field.region, v=v, u=u)
eq = Equation('aux', term)
eqs = Equations([eq])
eqs.time_update(None)
dummy = eqs.create_state_vector()
mtx = eqs.create_matrix_graph()
mtx = eqs.eval_tangent_matrices(dummy, mtx)
return mtx
|
[
"def",
"create_mass_matrix",
"(",
"field",
")",
":",
"u",
"=",
"FieldVariable",
"(",
"'u'",
",",
"'unknown'",
",",
"field",
")",
"v",
"=",
"FieldVariable",
"(",
"'v'",
",",
"'test'",
",",
"field",
",",
"primary_var_name",
"=",
"'u'",
")",
"integral",
"=",
"Integral",
"(",
"'i'",
",",
"order",
"=",
"field",
".",
"approx_order",
"*",
"2",
")",
"term",
"=",
"Term",
".",
"new",
"(",
"'dw_dot(v, u)'",
",",
"integral",
",",
"field",
".",
"region",
",",
"v",
"=",
"v",
",",
"u",
"=",
"u",
")",
"eq",
"=",
"Equation",
"(",
"'aux'",
",",
"term",
")",
"eqs",
"=",
"Equations",
"(",
"[",
"eq",
"]",
")",
"eqs",
".",
"time_update",
"(",
"None",
")",
"dummy",
"=",
"eqs",
".",
"create_state_vector",
"(",
")",
"mtx",
"=",
"eqs",
".",
"create_matrix_graph",
"(",
")",
"mtx",
"=",
"eqs",
".",
"eval_tangent_matrices",
"(",
"dummy",
",",
"mtx",
")",
"return",
"mtx"
] |
https://github.com/sfepy/sfepy/blob/02ec7bb2ab39ee1dfe1eb4cd509f0ffb7dcc8b25/sfepy/discrete/projections.py#L16-L39
|
|
macanv/BERT-BiLSTM-CRF-NER
|
ccf3f093f0ac803e435cb8e8598fdddc2ba1105d
|
bert_base/bert/create_pretraining_data.py
|
python
|
create_instances_from_document
|
(
all_documents, document_index, max_seq_length, short_seq_prob,
masked_lm_prob, max_predictions_per_seq, vocab_words, rng)
|
return instances
|
Creates `TrainingInstance`s for a single document.
|
Creates `TrainingInstance`s for a single document.
|
[
"Creates",
"TrainingInstance",
"s",
"for",
"a",
"single",
"document",
"."
] |
def create_instances_from_document(
all_documents, document_index, max_seq_length, short_seq_prob,
masked_lm_prob, max_predictions_per_seq, vocab_words, rng):
"""Creates `TrainingInstance`s for a single document."""
document = all_documents[document_index]
# Account for [CLS], [SEP], [SEP]
max_num_tokens = max_seq_length - 3
# We *usually* want to fill up the entire sequence since we are padding
# to `max_seq_length` anyways, so short sequences are generally wasted
# computation. However, we *sometimes*
# (i.e., short_seq_prob == 0.1 == 10% of the time) want to use shorter
# sequences to minimize the mismatch between pre-training and fine-tuning.
# The `target_seq_length` is just a rough target however, whereas
# `max_seq_length` is a hard limit.
target_seq_length = max_num_tokens
if rng.random() < short_seq_prob:
target_seq_length = rng.randint(2, max_num_tokens)
# We DON'T just concatenate all of the tokens from a document into a long
# sequence and choose an arbitrary split point because this would make the
# next sentence prediction task too easy. Instead, we split the input into
# segments "A" and "B" based on the actual "sentences" provided by the user
# input.
instances = []
current_chunk = []
current_length = 0
i = 0
while i < len(document):
segment = document[i]
current_chunk.append(segment)
current_length += len(segment)
if i == len(document) - 1 or current_length >= target_seq_length:
if current_chunk:
# `a_end` is how many segments from `current_chunk` go into the `A`
# (first) sentence.
a_end = 1
if len(current_chunk) >= 2:
a_end = rng.randint(1, len(current_chunk) - 1)
tokens_a = []
for j in range(a_end):
tokens_a.extend(current_chunk[j])
tokens_b = []
# Random next
is_random_next = False
if len(current_chunk) == 1 or rng.random() < 0.5:
is_random_next = True
target_b_length = target_seq_length - len(tokens_a)
# This should rarely go for more than one iteration for large
# corpora. However, just to be careful, we try to make sure that
# the random document is not the same as the document
# we're processing.
for _ in range(10):
random_document_index = rng.randint(0, len(all_documents) - 1)
if random_document_index != document_index:
break
random_document = all_documents[random_document_index]
random_start = rng.randint(0, len(random_document) - 1)
for j in range(random_start, len(random_document)):
tokens_b.extend(random_document[j])
if len(tokens_b) >= target_b_length:
break
# We didn't actually use these segments so we "put them back" so
# they don't go to waste.
num_unused_segments = len(current_chunk) - a_end
i -= num_unused_segments
# Actual next
else:
is_random_next = False
for j in range(a_end, len(current_chunk)):
tokens_b.extend(current_chunk[j])
truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng)
assert len(tokens_a) >= 1
assert len(tokens_b) >= 1
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
(tokens, masked_lm_positions,
masked_lm_labels) = create_masked_lm_predictions(
tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng)
instance = TrainingInstance(
tokens=tokens,
segment_ids=segment_ids,
is_random_next=is_random_next,
masked_lm_positions=masked_lm_positions,
masked_lm_labels=masked_lm_labels)
instances.append(instance)
current_chunk = []
current_length = 0
i += 1
return instances
|
[
"def",
"create_instances_from_document",
"(",
"all_documents",
",",
"document_index",
",",
"max_seq_length",
",",
"short_seq_prob",
",",
"masked_lm_prob",
",",
"max_predictions_per_seq",
",",
"vocab_words",
",",
"rng",
")",
":",
"document",
"=",
"all_documents",
"[",
"document_index",
"]",
"# Account for [CLS], [SEP], [SEP]",
"max_num_tokens",
"=",
"max_seq_length",
"-",
"3",
"# We *usually* want to fill up the entire sequence since we are padding",
"# to `max_seq_length` anyways, so short sequences are generally wasted",
"# computation. However, we *sometimes*",
"# (i.e., short_seq_prob == 0.1 == 10% of the time) want to use shorter",
"# sequences to minimize the mismatch between pre-training and fine-tuning.",
"# The `target_seq_length` is just a rough target however, whereas",
"# `max_seq_length` is a hard limit.",
"target_seq_length",
"=",
"max_num_tokens",
"if",
"rng",
".",
"random",
"(",
")",
"<",
"short_seq_prob",
":",
"target_seq_length",
"=",
"rng",
".",
"randint",
"(",
"2",
",",
"max_num_tokens",
")",
"# We DON'T just concatenate all of the tokens from a document into a long",
"# sequence and choose an arbitrary split point because this would make the",
"# next sentence prediction task too easy. Instead, we split the input into",
"# segments \"A\" and \"B\" based on the actual \"sentences\" provided by the user",
"# input.",
"instances",
"=",
"[",
"]",
"current_chunk",
"=",
"[",
"]",
"current_length",
"=",
"0",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"document",
")",
":",
"segment",
"=",
"document",
"[",
"i",
"]",
"current_chunk",
".",
"append",
"(",
"segment",
")",
"current_length",
"+=",
"len",
"(",
"segment",
")",
"if",
"i",
"==",
"len",
"(",
"document",
")",
"-",
"1",
"or",
"current_length",
">=",
"target_seq_length",
":",
"if",
"current_chunk",
":",
"# `a_end` is how many segments from `current_chunk` go into the `A`",
"# (first) sentence.",
"a_end",
"=",
"1",
"if",
"len",
"(",
"current_chunk",
")",
">=",
"2",
":",
"a_end",
"=",
"rng",
".",
"randint",
"(",
"1",
",",
"len",
"(",
"current_chunk",
")",
"-",
"1",
")",
"tokens_a",
"=",
"[",
"]",
"for",
"j",
"in",
"range",
"(",
"a_end",
")",
":",
"tokens_a",
".",
"extend",
"(",
"current_chunk",
"[",
"j",
"]",
")",
"tokens_b",
"=",
"[",
"]",
"# Random next",
"is_random_next",
"=",
"False",
"if",
"len",
"(",
"current_chunk",
")",
"==",
"1",
"or",
"rng",
".",
"random",
"(",
")",
"<",
"0.5",
":",
"is_random_next",
"=",
"True",
"target_b_length",
"=",
"target_seq_length",
"-",
"len",
"(",
"tokens_a",
")",
"# This should rarely go for more than one iteration for large",
"# corpora. However, just to be careful, we try to make sure that",
"# the random document is not the same as the document",
"# we're processing.",
"for",
"_",
"in",
"range",
"(",
"10",
")",
":",
"random_document_index",
"=",
"rng",
".",
"randint",
"(",
"0",
",",
"len",
"(",
"all_documents",
")",
"-",
"1",
")",
"if",
"random_document_index",
"!=",
"document_index",
":",
"break",
"random_document",
"=",
"all_documents",
"[",
"random_document_index",
"]",
"random_start",
"=",
"rng",
".",
"randint",
"(",
"0",
",",
"len",
"(",
"random_document",
")",
"-",
"1",
")",
"for",
"j",
"in",
"range",
"(",
"random_start",
",",
"len",
"(",
"random_document",
")",
")",
":",
"tokens_b",
".",
"extend",
"(",
"random_document",
"[",
"j",
"]",
")",
"if",
"len",
"(",
"tokens_b",
")",
">=",
"target_b_length",
":",
"break",
"# We didn't actually use these segments so we \"put them back\" so",
"# they don't go to waste.",
"num_unused_segments",
"=",
"len",
"(",
"current_chunk",
")",
"-",
"a_end",
"i",
"-=",
"num_unused_segments",
"# Actual next",
"else",
":",
"is_random_next",
"=",
"False",
"for",
"j",
"in",
"range",
"(",
"a_end",
",",
"len",
"(",
"current_chunk",
")",
")",
":",
"tokens_b",
".",
"extend",
"(",
"current_chunk",
"[",
"j",
"]",
")",
"truncate_seq_pair",
"(",
"tokens_a",
",",
"tokens_b",
",",
"max_num_tokens",
",",
"rng",
")",
"assert",
"len",
"(",
"tokens_a",
")",
">=",
"1",
"assert",
"len",
"(",
"tokens_b",
")",
">=",
"1",
"tokens",
"=",
"[",
"]",
"segment_ids",
"=",
"[",
"]",
"tokens",
".",
"append",
"(",
"\"[CLS]\"",
")",
"segment_ids",
".",
"append",
"(",
"0",
")",
"for",
"token",
"in",
"tokens_a",
":",
"tokens",
".",
"append",
"(",
"token",
")",
"segment_ids",
".",
"append",
"(",
"0",
")",
"tokens",
".",
"append",
"(",
"\"[SEP]\"",
")",
"segment_ids",
".",
"append",
"(",
"0",
")",
"for",
"token",
"in",
"tokens_b",
":",
"tokens",
".",
"append",
"(",
"token",
")",
"segment_ids",
".",
"append",
"(",
"1",
")",
"tokens",
".",
"append",
"(",
"\"[SEP]\"",
")",
"segment_ids",
".",
"append",
"(",
"1",
")",
"(",
"tokens",
",",
"masked_lm_positions",
",",
"masked_lm_labels",
")",
"=",
"create_masked_lm_predictions",
"(",
"tokens",
",",
"masked_lm_prob",
",",
"max_predictions_per_seq",
",",
"vocab_words",
",",
"rng",
")",
"instance",
"=",
"TrainingInstance",
"(",
"tokens",
"=",
"tokens",
",",
"segment_ids",
"=",
"segment_ids",
",",
"is_random_next",
"=",
"is_random_next",
",",
"masked_lm_positions",
"=",
"masked_lm_positions",
",",
"masked_lm_labels",
"=",
"masked_lm_labels",
")",
"instances",
".",
"append",
"(",
"instance",
")",
"current_chunk",
"=",
"[",
"]",
"current_length",
"=",
"0",
"i",
"+=",
"1",
"return",
"instances"
] |
https://github.com/macanv/BERT-BiLSTM-CRF-NER/blob/ccf3f093f0ac803e435cb8e8598fdddc2ba1105d/bert_base/bert/create_pretraining_data.py#L220-L332
|
|
GalSim-developers/GalSim
|
a05d4ec3b8d8574f99d3b0606ad882cbba53f345
|
galsim/cdmodel.py
|
python
|
_modelShiftCoeffT
|
(x, y, r0, t0, rx, tx, r, t, alpha)
|
return cc * t * rr**(-alpha)
|
Calculate the model shift coeff of top pixel border as a function of int pixel
position (x, y).
|
Calculate the model shift coeff of top pixel border as a function of int pixel
position (x, y).
|
[
"Calculate",
"the",
"model",
"shift",
"coeff",
"of",
"top",
"pixel",
"border",
"as",
"a",
"function",
"of",
"int",
"pixel",
"position",
"(",
"x",
"y",
")",
"."
] |
def _modelShiftCoeffT(x, y, r0, t0, rx, tx, r, t, alpha):
"""Calculate the model shift coeff of top pixel border as a function of int pixel
position (x, y).
"""
# Invoke symmetry
if x < 0: return _modelShiftCoeffT(-x, y, r0, t0, rx, tx, r, t, alpha)
if y < 0: return -_modelShiftCoeffT(x, 1 - y, r0, t0, rx, tx, r, t, alpha)
# Invoke special immediate neighbour cases
if x == 0 and y == 0: return -t0
if x == 0 and y == 1: return +t0
if x == 1 and y == 0: return -tx
if x == 1 and y == 1: return +tx
# Then, for remainder, apply power law model
rr = np.sqrt((float(y) - .5)**2 + float(x)**2)
cc = (y - 0.5) / rr # projection onto relevant axis
return cc * t * rr**(-alpha)
|
[
"def",
"_modelShiftCoeffT",
"(",
"x",
",",
"y",
",",
"r0",
",",
"t0",
",",
"rx",
",",
"tx",
",",
"r",
",",
"t",
",",
"alpha",
")",
":",
"# Invoke symmetry",
"if",
"x",
"<",
"0",
":",
"return",
"_modelShiftCoeffT",
"(",
"-",
"x",
",",
"y",
",",
"r0",
",",
"t0",
",",
"rx",
",",
"tx",
",",
"r",
",",
"t",
",",
"alpha",
")",
"if",
"y",
"<",
"0",
":",
"return",
"-",
"_modelShiftCoeffT",
"(",
"x",
",",
"1",
"-",
"y",
",",
"r0",
",",
"t0",
",",
"rx",
",",
"tx",
",",
"r",
",",
"t",
",",
"alpha",
")",
"# Invoke special immediate neighbour cases",
"if",
"x",
"==",
"0",
"and",
"y",
"==",
"0",
":",
"return",
"-",
"t0",
"if",
"x",
"==",
"0",
"and",
"y",
"==",
"1",
":",
"return",
"+",
"t0",
"if",
"x",
"==",
"1",
"and",
"y",
"==",
"0",
":",
"return",
"-",
"tx",
"if",
"x",
"==",
"1",
"and",
"y",
"==",
"1",
":",
"return",
"+",
"tx",
"# Then, for remainder, apply power law model",
"rr",
"=",
"np",
".",
"sqrt",
"(",
"(",
"float",
"(",
"y",
")",
"-",
".5",
")",
"**",
"2",
"+",
"float",
"(",
"x",
")",
"**",
"2",
")",
"cc",
"=",
"(",
"y",
"-",
"0.5",
")",
"/",
"rr",
"# projection onto relevant axis",
"return",
"cc",
"*",
"t",
"*",
"rr",
"**",
"(",
"-",
"alpha",
")"
] |
https://github.com/GalSim-developers/GalSim/blob/a05d4ec3b8d8574f99d3b0606ad882cbba53f345/galsim/cdmodel.py#L147-L162
|
|
numenta/nupic
|
b9ebedaf54f49a33de22d8d44dff7c765cdb5548
|
external/linux32/lib/python2.6/site-packages/matplotlib/contour.py
|
python
|
ContourLabeler.print_label
|
(self, linecontour,labelwidth)
|
if contours are too short, don't plot a label
|
if contours are too short, don't plot a label
|
[
"if",
"contours",
"are",
"too",
"short",
"don",
"t",
"plot",
"a",
"label"
] |
def print_label(self, linecontour,labelwidth):
"if contours are too short, don't plot a label"
lcsize = len(linecontour)
if lcsize > 10 * labelwidth:
return 1
xmax = np.amax(linecontour[:,0])
xmin = np.amin(linecontour[:,0])
ymax = np.amax(linecontour[:,1])
ymin = np.amin(linecontour[:,1])
lw = labelwidth
if (xmax - xmin) > 1.2* lw or (ymax - ymin) > 1.2 * lw:
return 1
else:
return 0
|
[
"def",
"print_label",
"(",
"self",
",",
"linecontour",
",",
"labelwidth",
")",
":",
"lcsize",
"=",
"len",
"(",
"linecontour",
")",
"if",
"lcsize",
">",
"10",
"*",
"labelwidth",
":",
"return",
"1",
"xmax",
"=",
"np",
".",
"amax",
"(",
"linecontour",
"[",
":",
",",
"0",
"]",
")",
"xmin",
"=",
"np",
".",
"amin",
"(",
"linecontour",
"[",
":",
",",
"0",
"]",
")",
"ymax",
"=",
"np",
".",
"amax",
"(",
"linecontour",
"[",
":",
",",
"1",
"]",
")",
"ymin",
"=",
"np",
".",
"amin",
"(",
"linecontour",
"[",
":",
",",
"1",
"]",
")",
"lw",
"=",
"labelwidth",
"if",
"(",
"xmax",
"-",
"xmin",
")",
">",
"1.2",
"*",
"lw",
"or",
"(",
"ymax",
"-",
"ymin",
")",
">",
"1.2",
"*",
"lw",
":",
"return",
"1",
"else",
":",
"return",
"0"
] |
https://github.com/numenta/nupic/blob/b9ebedaf54f49a33de22d8d44dff7c765cdb5548/external/linux32/lib/python2.6/site-packages/matplotlib/contour.py#L189-L204
|
||
asyml/texar
|
a23f021dae289a3d768dc099b220952111da04fd
|
examples/gpt-2/prepare_data.py
|
python
|
prepare_data
|
()
|
Builds the model and runs.
|
Builds the model and runs.
|
[
"Builds",
"the",
"model",
"and",
"runs",
"."
] |
def prepare_data():
"""
Builds the model and runs.
"""
data_dir = FLAGS.data_dir
if FLAGS.tfrecord_output_dir is None:
tfrecord_output_dir = data_dir
else:
tfrecord_output_dir = FLAGS.tfrecord_output_dir
tx.utils.maybe_create_dir(tfrecord_output_dir)
# Creates a data pre-processor for, e.g., BPE encoding
proc = processor.get_encoder(FLAGS.pretrain_model_dir)
# Produces TFRecord files
data_utils.prepare_TFRecord_data(
data_dir=data_dir,
max_seq_length=FLAGS.max_seq_length,
encoder=proc,
output_dir=tfrecord_output_dir)
|
[
"def",
"prepare_data",
"(",
")",
":",
"data_dir",
"=",
"FLAGS",
".",
"data_dir",
"if",
"FLAGS",
".",
"tfrecord_output_dir",
"is",
"None",
":",
"tfrecord_output_dir",
"=",
"data_dir",
"else",
":",
"tfrecord_output_dir",
"=",
"FLAGS",
".",
"tfrecord_output_dir",
"tx",
".",
"utils",
".",
"maybe_create_dir",
"(",
"tfrecord_output_dir",
")",
"# Creates a data pre-processor for, e.g., BPE encoding",
"proc",
"=",
"processor",
".",
"get_encoder",
"(",
"FLAGS",
".",
"pretrain_model_dir",
")",
"# Produces TFRecord files",
"data_utils",
".",
"prepare_TFRecord_data",
"(",
"data_dir",
"=",
"data_dir",
",",
"max_seq_length",
"=",
"FLAGS",
".",
"max_seq_length",
",",
"encoder",
"=",
"proc",
",",
"output_dir",
"=",
"tfrecord_output_dir",
")"
] |
https://github.com/asyml/texar/blob/a23f021dae289a3d768dc099b220952111da04fd/examples/gpt-2/prepare_data.py#L46-L65
|
||
WPO-Foundation/wptagent
|
94470f007294213f900dcd9a207678b5b9fce5d3
|
internal/traffic_shaping.py
|
python
|
NetEm.apply
|
(self, target_id)
|
return
|
Stub for applying Chrome traffic-shaping
|
Stub for applying Chrome traffic-shaping
|
[
"Stub",
"for",
"applying",
"Chrome",
"traffic",
"-",
"shaping"
] |
def apply(self, target_id):
"""Stub for applying Chrome traffic-shaping"""
return
|
[
"def",
"apply",
"(",
"self",
",",
"target_id",
")",
":",
"return"
] |
https://github.com/WPO-Foundation/wptagent/blob/94470f007294213f900dcd9a207678b5b9fce5d3/internal/traffic_shaping.py#L629-L631
|
|
twilio/twilio-python
|
6e1e811ea57a1edfadd5161ace87397c563f6915
|
twilio/rest/api/v2010/account/incoming_phone_number/assigned_add_on/__init__.py
|
python
|
AssignedAddOnInstance.unique_name
|
(self)
|
return self._properties['unique_name']
|
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
|
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
|
[
":",
"returns",
":",
"An",
"application",
"-",
"defined",
"string",
"that",
"uniquely",
"identifies",
"the",
"resource",
":",
"rtype",
":",
"unicode"
] |
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
|
[
"def",
"unique_name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_properties",
"[",
"'unique_name'",
"]"
] |
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/api/v2010/account/incoming_phone_number/assigned_add_on/__init__.py#L407-L412
|
|
richq/folders2flickr
|
0b735057dbf3c0ea132668af36d30ded52e7b6d9
|
f2flickr/flickr.py
|
python
|
_get_api_sig
|
(params)
|
return api_signature
|
Generate API signature.
|
Generate API signature.
|
[
"Generate",
"API",
"signature",
"."
] |
def _get_api_sig(params):
"""Generate API signature."""
token = userToken()
parameters = ['api_key', 'auth_token']
for item in params.items():
parameters.append(item[0])
parameters.sort()
api_string = [API_SECRET]
for item in parameters:
for chocolate in params.items():
if item == chocolate[0]:
api_string.append(item)
api_string.append(str(chocolate[1]))
if item == 'api_key':
api_string.append('api_key')
api_string.append(API_KEY)
if item == 'auth_token':
api_string.append('auth_token')
api_string.append(token)
api_signature = hashlib.md5(''.join(api_string)).hexdigest()
return api_signature
|
[
"def",
"_get_api_sig",
"(",
"params",
")",
":",
"token",
"=",
"userToken",
"(",
")",
"parameters",
"=",
"[",
"'api_key'",
",",
"'auth_token'",
"]",
"for",
"item",
"in",
"params",
".",
"items",
"(",
")",
":",
"parameters",
".",
"append",
"(",
"item",
"[",
"0",
"]",
")",
"parameters",
".",
"sort",
"(",
")",
"api_string",
"=",
"[",
"API_SECRET",
"]",
"for",
"item",
"in",
"parameters",
":",
"for",
"chocolate",
"in",
"params",
".",
"items",
"(",
")",
":",
"if",
"item",
"==",
"chocolate",
"[",
"0",
"]",
":",
"api_string",
".",
"append",
"(",
"item",
")",
"api_string",
".",
"append",
"(",
"str",
"(",
"chocolate",
"[",
"1",
"]",
")",
")",
"if",
"item",
"==",
"'api_key'",
":",
"api_string",
".",
"append",
"(",
"'api_key'",
")",
"api_string",
".",
"append",
"(",
"API_KEY",
")",
"if",
"item",
"==",
"'auth_token'",
":",
"api_string",
".",
"append",
"(",
"'auth_token'",
")",
"api_string",
".",
"append",
"(",
"token",
")",
"api_signature",
"=",
"hashlib",
".",
"md5",
"(",
"''",
".",
"join",
"(",
"api_string",
")",
")",
".",
"hexdigest",
"(",
")",
"return",
"api_signature"
] |
https://github.com/richq/folders2flickr/blob/0b735057dbf3c0ea132668af36d30ded52e7b6d9/f2flickr/flickr.py#L1272-L1296
|
|
buke/GreenOdoo
|
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
|
runtime/python/lib/python2.7/site-packages/docutils/utils/math/math2html.py
|
python
|
FormulaCommand.parsewithcommand
|
(self, command, pos)
|
return None
|
Parse the command type once we have the command.
|
Parse the command type once we have the command.
|
[
"Parse",
"the",
"command",
"type",
"once",
"we",
"have",
"the",
"command",
"."
] |
def parsewithcommand(self, command, pos):
"Parse the command type once we have the command."
for type in FormulaCommand.types:
if command in type.commandmap:
return self.parsecommandtype(command, type, pos)
return None
|
[
"def",
"parsewithcommand",
"(",
"self",
",",
"command",
",",
"pos",
")",
":",
"for",
"type",
"in",
"FormulaCommand",
".",
"types",
":",
"if",
"command",
"in",
"type",
".",
"commandmap",
":",
"return",
"self",
".",
"parsecommandtype",
"(",
"command",
",",
"type",
",",
"pos",
")",
"return",
"None"
] |
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/site-packages/docutils/utils/math/math2html.py#L3967-L3972
|
|
fboender/ansible-cmdb
|
3f3e412d2a7be91c97c5a1842f4e57cc85b06961
|
lib/mako/template.py
|
python
|
Template.code
|
(self)
|
return _get_module_info_from_callable(self.callable_).code
|
Return the module source code for this :class:`.Template`.
|
Return the module source code for this :class:`.Template`.
|
[
"Return",
"the",
"module",
"source",
"code",
"for",
"this",
":",
"class",
":",
".",
"Template",
"."
] |
def code(self):
"""Return the module source code for this :class:`.Template`."""
return _get_module_info_from_callable(self.callable_).code
|
[
"def",
"code",
"(",
"self",
")",
":",
"return",
"_get_module_info_from_callable",
"(",
"self",
".",
"callable_",
")",
".",
"code"
] |
https://github.com/fboender/ansible-cmdb/blob/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/lib/mako/template.py#L412-L415
|
|
tomplus/kubernetes_asyncio
|
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
|
kubernetes_asyncio/client/models/v1beta1_event.py
|
python
|
V1beta1Event.regarding
|
(self, regarding)
|
Sets the regarding of this V1beta1Event.
:param regarding: The regarding of this V1beta1Event. # noqa: E501
:type: V1ObjectReference
|
Sets the regarding of this V1beta1Event.
|
[
"Sets",
"the",
"regarding",
"of",
"this",
"V1beta1Event",
"."
] |
def regarding(self, regarding):
"""Sets the regarding of this V1beta1Event.
:param regarding: The regarding of this V1beta1Event. # noqa: E501
:type: V1ObjectReference
"""
self._regarding = regarding
|
[
"def",
"regarding",
"(",
"self",
",",
"regarding",
")",
":",
"self",
".",
"_regarding",
"=",
"regarding"
] |
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/v1beta1_event.py#L396-L404
|
||
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/messaging/scheduling/forms.py
|
python
|
ConditionalAlertScheduleForm.update_start_date_type_choices
|
(self)
|
[] |
def update_start_date_type_choices(self):
if (
self.is_system_admin or
self.initial.get('start_date_type') == self.START_DATE_FROM_VISIT_SCHEDULER
):
self.fields['start_date_type'].choices += [
(self.START_DATE_FROM_VISIT_SCHEDULER, _("A date from a visit scheduler")),
]
|
[
"def",
"update_start_date_type_choices",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"is_system_admin",
"or",
"self",
".",
"initial",
".",
"get",
"(",
"'start_date_type'",
")",
"==",
"self",
".",
"START_DATE_FROM_VISIT_SCHEDULER",
")",
":",
"self",
".",
"fields",
"[",
"'start_date_type'",
"]",
".",
"choices",
"+=",
"[",
"(",
"self",
".",
"START_DATE_FROM_VISIT_SCHEDULER",
",",
"_",
"(",
"\"A date from a visit scheduler\"",
")",
")",
",",
"]"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/messaging/scheduling/forms.py#L2951-L2958
|
||||
uqfoundation/multiprocess
|
028cc73f02655e6451d92e5147d19d8c10aebe50
|
pypy3.6/multiprocess/connection.py
|
python
|
_ConnectionBase.closed
|
(self)
|
return self._handle is None
|
True if the connection is closed
|
True if the connection is closed
|
[
"True",
"if",
"the",
"connection",
"is",
"closed"
] |
def closed(self):
"""True if the connection is closed"""
return self._handle is None
|
[
"def",
"closed",
"(",
"self",
")",
":",
"return",
"self",
".",
"_handle",
"is",
"None"
] |
https://github.com/uqfoundation/multiprocess/blob/028cc73f02655e6451d92e5147d19d8c10aebe50/pypy3.6/multiprocess/connection.py#L157-L159
|
|
golemhq/golem
|
84f51478b169cdeab73fc7e2a22a64d0a2a29263
|
golem/actions.py
|
python
|
verify_element_text_contains
|
(element, text)
|
Verify element contains text
Parameters:
element : element
text : value
|
Verify element contains text
|
[
"Verify",
"element",
"contains",
"text"
] |
def verify_element_text_contains(element, text):
"""Verify element contains text
Parameters:
element : element
text : value
"""
element = browser.get_browser().find(element, timeout=0)
with _verify_step(f"Verify element {element.name} contains text '{text}'") as s:
s.error = f"expected element {element.name} text '{element.text}' to contain '{text}'"
s.condition = text in element.text
|
[
"def",
"verify_element_text_contains",
"(",
"element",
",",
"text",
")",
":",
"element",
"=",
"browser",
".",
"get_browser",
"(",
")",
".",
"find",
"(",
"element",
",",
"timeout",
"=",
"0",
")",
"with",
"_verify_step",
"(",
"f\"Verify element {element.name} contains text '{text}'\"",
")",
"as",
"s",
":",
"s",
".",
"error",
"=",
"f\"expected element {element.name} text '{element.text}' to contain '{text}'\"",
"s",
".",
"condition",
"=",
"text",
"in",
"element",
".",
"text"
] |
https://github.com/golemhq/golem/blob/84f51478b169cdeab73fc7e2a22a64d0a2a29263/golem/actions.py#L2125-L2135
|
||
open-mmlab/mmdetection3d
|
c7272063e818bcf33aebc498a017a95c8d065143
|
tools/data_converter/waymo_converter.py
|
python
|
Waymo2KITTI.convert_range_image_to_point_cloud
|
(self,
frame,
range_images,
camera_projections,
range_image_top_pose,
ri_index=0)
|
return points, cp_points, intensity, elongation
|
Convert range images to point cloud.
Args:
frame (:obj:`Frame`): Open dataset frame.
range_images (dict): Mapping from laser_name to list of two
range images corresponding with two returns.
camera_projections (dict): Mapping from laser_name to list of two
camera projections corresponding with two returns.
range_image_top_pose (:obj:`Transform`): Range image pixel pose for
top lidar.
ri_index (int): 0 for the first return, 1 for the second return.
Default: 0.
Returns:
tuple[list[np.ndarray]]: (List of points with shape [N, 3],
camera projections of points with shape [N, 6], intensity
with shape [N, 1], elongation with shape [N, 1]). All the
lists have the length of lidar numbers (5).
|
Convert range images to point cloud.
|
[
"Convert",
"range",
"images",
"to",
"point",
"cloud",
"."
] |
def convert_range_image_to_point_cloud(self,
frame,
range_images,
camera_projections,
range_image_top_pose,
ri_index=0):
"""Convert range images to point cloud.
Args:
frame (:obj:`Frame`): Open dataset frame.
range_images (dict): Mapping from laser_name to list of two
range images corresponding with two returns.
camera_projections (dict): Mapping from laser_name to list of two
camera projections corresponding with two returns.
range_image_top_pose (:obj:`Transform`): Range image pixel pose for
top lidar.
ri_index (int): 0 for the first return, 1 for the second return.
Default: 0.
Returns:
tuple[list[np.ndarray]]: (List of points with shape [N, 3],
camera projections of points with shape [N, 6], intensity
with shape [N, 1], elongation with shape [N, 1]). All the
lists have the length of lidar numbers (5).
"""
calibrations = sorted(
frame.context.laser_calibrations, key=lambda c: c.name)
points = []
cp_points = []
intensity = []
elongation = []
frame_pose = tf.convert_to_tensor(
value=np.reshape(np.array(frame.pose.transform), [4, 4]))
# [H, W, 6]
range_image_top_pose_tensor = tf.reshape(
tf.convert_to_tensor(value=range_image_top_pose.data),
range_image_top_pose.shape.dims)
# [H, W, 3, 3]
range_image_top_pose_tensor_rotation = \
transform_utils.get_rotation_matrix(
range_image_top_pose_tensor[..., 0],
range_image_top_pose_tensor[..., 1],
range_image_top_pose_tensor[..., 2])
range_image_top_pose_tensor_translation = \
range_image_top_pose_tensor[..., 3:]
range_image_top_pose_tensor = transform_utils.get_transform(
range_image_top_pose_tensor_rotation,
range_image_top_pose_tensor_translation)
for c in calibrations:
range_image = range_images[c.name][ri_index]
if len(c.beam_inclinations) == 0:
beam_inclinations = range_image_utils.compute_inclination(
tf.constant(
[c.beam_inclination_min, c.beam_inclination_max]),
height=range_image.shape.dims[0])
else:
beam_inclinations = tf.constant(c.beam_inclinations)
beam_inclinations = tf.reverse(beam_inclinations, axis=[-1])
extrinsic = np.reshape(np.array(c.extrinsic.transform), [4, 4])
range_image_tensor = tf.reshape(
tf.convert_to_tensor(value=range_image.data),
range_image.shape.dims)
pixel_pose_local = None
frame_pose_local = None
if c.name == dataset_pb2.LaserName.TOP:
pixel_pose_local = range_image_top_pose_tensor
pixel_pose_local = tf.expand_dims(pixel_pose_local, axis=0)
frame_pose_local = tf.expand_dims(frame_pose, axis=0)
range_image_mask = range_image_tensor[..., 0] > 0
if self.filter_no_label_zone_points:
nlz_mask = range_image_tensor[..., 3] != 1.0 # 1.0: in NLZ
range_image_mask = range_image_mask & nlz_mask
range_image_cartesian = \
range_image_utils.extract_point_cloud_from_range_image(
tf.expand_dims(range_image_tensor[..., 0], axis=0),
tf.expand_dims(extrinsic, axis=0),
tf.expand_dims(tf.convert_to_tensor(
value=beam_inclinations), axis=0),
pixel_pose=pixel_pose_local,
frame_pose=frame_pose_local)
range_image_cartesian = tf.squeeze(range_image_cartesian, axis=0)
points_tensor = tf.gather_nd(range_image_cartesian,
tf.compat.v1.where(range_image_mask))
cp = camera_projections[c.name][ri_index]
cp_tensor = tf.reshape(
tf.convert_to_tensor(value=cp.data), cp.shape.dims)
cp_points_tensor = tf.gather_nd(
cp_tensor, tf.compat.v1.where(range_image_mask))
points.append(points_tensor.numpy())
cp_points.append(cp_points_tensor.numpy())
intensity_tensor = tf.gather_nd(range_image_tensor[..., 1],
tf.where(range_image_mask))
intensity.append(intensity_tensor.numpy())
elongation_tensor = tf.gather_nd(range_image_tensor[..., 2],
tf.where(range_image_mask))
elongation.append(elongation_tensor.numpy())
return points, cp_points, intensity, elongation
|
[
"def",
"convert_range_image_to_point_cloud",
"(",
"self",
",",
"frame",
",",
"range_images",
",",
"camera_projections",
",",
"range_image_top_pose",
",",
"ri_index",
"=",
"0",
")",
":",
"calibrations",
"=",
"sorted",
"(",
"frame",
".",
"context",
".",
"laser_calibrations",
",",
"key",
"=",
"lambda",
"c",
":",
"c",
".",
"name",
")",
"points",
"=",
"[",
"]",
"cp_points",
"=",
"[",
"]",
"intensity",
"=",
"[",
"]",
"elongation",
"=",
"[",
"]",
"frame_pose",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"array",
"(",
"frame",
".",
"pose",
".",
"transform",
")",
",",
"[",
"4",
",",
"4",
"]",
")",
")",
"# [H, W, 6]",
"range_image_top_pose_tensor",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"range_image_top_pose",
".",
"data",
")",
",",
"range_image_top_pose",
".",
"shape",
".",
"dims",
")",
"# [H, W, 3, 3]",
"range_image_top_pose_tensor_rotation",
"=",
"transform_utils",
".",
"get_rotation_matrix",
"(",
"range_image_top_pose_tensor",
"[",
"...",
",",
"0",
"]",
",",
"range_image_top_pose_tensor",
"[",
"...",
",",
"1",
"]",
",",
"range_image_top_pose_tensor",
"[",
"...",
",",
"2",
"]",
")",
"range_image_top_pose_tensor_translation",
"=",
"range_image_top_pose_tensor",
"[",
"...",
",",
"3",
":",
"]",
"range_image_top_pose_tensor",
"=",
"transform_utils",
".",
"get_transform",
"(",
"range_image_top_pose_tensor_rotation",
",",
"range_image_top_pose_tensor_translation",
")",
"for",
"c",
"in",
"calibrations",
":",
"range_image",
"=",
"range_images",
"[",
"c",
".",
"name",
"]",
"[",
"ri_index",
"]",
"if",
"len",
"(",
"c",
".",
"beam_inclinations",
")",
"==",
"0",
":",
"beam_inclinations",
"=",
"range_image_utils",
".",
"compute_inclination",
"(",
"tf",
".",
"constant",
"(",
"[",
"c",
".",
"beam_inclination_min",
",",
"c",
".",
"beam_inclination_max",
"]",
")",
",",
"height",
"=",
"range_image",
".",
"shape",
".",
"dims",
"[",
"0",
"]",
")",
"else",
":",
"beam_inclinations",
"=",
"tf",
".",
"constant",
"(",
"c",
".",
"beam_inclinations",
")",
"beam_inclinations",
"=",
"tf",
".",
"reverse",
"(",
"beam_inclinations",
",",
"axis",
"=",
"[",
"-",
"1",
"]",
")",
"extrinsic",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"array",
"(",
"c",
".",
"extrinsic",
".",
"transform",
")",
",",
"[",
"4",
",",
"4",
"]",
")",
"range_image_tensor",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"range_image",
".",
"data",
")",
",",
"range_image",
".",
"shape",
".",
"dims",
")",
"pixel_pose_local",
"=",
"None",
"frame_pose_local",
"=",
"None",
"if",
"c",
".",
"name",
"==",
"dataset_pb2",
".",
"LaserName",
".",
"TOP",
":",
"pixel_pose_local",
"=",
"range_image_top_pose_tensor",
"pixel_pose_local",
"=",
"tf",
".",
"expand_dims",
"(",
"pixel_pose_local",
",",
"axis",
"=",
"0",
")",
"frame_pose_local",
"=",
"tf",
".",
"expand_dims",
"(",
"frame_pose",
",",
"axis",
"=",
"0",
")",
"range_image_mask",
"=",
"range_image_tensor",
"[",
"...",
",",
"0",
"]",
">",
"0",
"if",
"self",
".",
"filter_no_label_zone_points",
":",
"nlz_mask",
"=",
"range_image_tensor",
"[",
"...",
",",
"3",
"]",
"!=",
"1.0",
"# 1.0: in NLZ",
"range_image_mask",
"=",
"range_image_mask",
"&",
"nlz_mask",
"range_image_cartesian",
"=",
"range_image_utils",
".",
"extract_point_cloud_from_range_image",
"(",
"tf",
".",
"expand_dims",
"(",
"range_image_tensor",
"[",
"...",
",",
"0",
"]",
",",
"axis",
"=",
"0",
")",
",",
"tf",
".",
"expand_dims",
"(",
"extrinsic",
",",
"axis",
"=",
"0",
")",
",",
"tf",
".",
"expand_dims",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"beam_inclinations",
")",
",",
"axis",
"=",
"0",
")",
",",
"pixel_pose",
"=",
"pixel_pose_local",
",",
"frame_pose",
"=",
"frame_pose_local",
")",
"range_image_cartesian",
"=",
"tf",
".",
"squeeze",
"(",
"range_image_cartesian",
",",
"axis",
"=",
"0",
")",
"points_tensor",
"=",
"tf",
".",
"gather_nd",
"(",
"range_image_cartesian",
",",
"tf",
".",
"compat",
".",
"v1",
".",
"where",
"(",
"range_image_mask",
")",
")",
"cp",
"=",
"camera_projections",
"[",
"c",
".",
"name",
"]",
"[",
"ri_index",
"]",
"cp_tensor",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"cp",
".",
"data",
")",
",",
"cp",
".",
"shape",
".",
"dims",
")",
"cp_points_tensor",
"=",
"tf",
".",
"gather_nd",
"(",
"cp_tensor",
",",
"tf",
".",
"compat",
".",
"v1",
".",
"where",
"(",
"range_image_mask",
")",
")",
"points",
".",
"append",
"(",
"points_tensor",
".",
"numpy",
"(",
")",
")",
"cp_points",
".",
"append",
"(",
"cp_points_tensor",
".",
"numpy",
"(",
")",
")",
"intensity_tensor",
"=",
"tf",
".",
"gather_nd",
"(",
"range_image_tensor",
"[",
"...",
",",
"1",
"]",
",",
"tf",
".",
"where",
"(",
"range_image_mask",
")",
")",
"intensity",
".",
"append",
"(",
"intensity_tensor",
".",
"numpy",
"(",
")",
")",
"elongation_tensor",
"=",
"tf",
".",
"gather_nd",
"(",
"range_image_tensor",
"[",
"...",
",",
"2",
"]",
",",
"tf",
".",
"where",
"(",
"range_image_mask",
")",
")",
"elongation",
".",
"append",
"(",
"elongation_tensor",
".",
"numpy",
"(",
")",
")",
"return",
"points",
",",
"cp_points",
",",
"intensity",
",",
"elongation"
] |
https://github.com/open-mmlab/mmdetection3d/blob/c7272063e818bcf33aebc498a017a95c8d065143/tools/data_converter/waymo_converter.py#L389-L495
|
|
pyglet/pyglet
|
2833c1df902ca81aeeffa786c12e7e87d402434b
|
pyglet/sprite.py
|
python
|
Sprite.rotation
|
(self)
|
return self._rotation
|
Clockwise rotation of the sprite, in degrees.
The sprite image will be rotated about its image's (anchor_x, anchor_y)
position.
:type: float
|
Clockwise rotation of the sprite, in degrees.
|
[
"Clockwise",
"rotation",
"of",
"the",
"sprite",
"in",
"degrees",
"."
] |
def rotation(self):
"""Clockwise rotation of the sprite, in degrees.
The sprite image will be rotated about its image's (anchor_x, anchor_y)
position.
:type: float
"""
return self._rotation
|
[
"def",
"rotation",
"(",
"self",
")",
":",
"return",
"self",
".",
"_rotation"
] |
https://github.com/pyglet/pyglet/blob/2833c1df902ca81aeeffa786c12e7e87d402434b/pyglet/sprite.py#L546-L554
|
|
jesse-ai/jesse
|
28759547138fbc76dff12741204833e39c93b083
|
jesse/indicators/ichimoku_cloud_seq.py
|
python
|
ichimoku_cloud_seq
|
(candles: np.ndarray, conversion_line_period: int = 9, base_line_period: int = 26,
lagging_line_period: int = 52, displacement: int = 26,
sequential: bool = False)
|
Ichimoku Cloud
:param candles: np.ndarray
:param conversion_line_period: int - default: 9
:param base_line_period: int - default: 26
:param lagging_line_period: int - default: 52
:param displacement: - default: 26
:param sequential: bool - default: False
:return: IchimokuCloud
|
Ichimoku Cloud
|
[
"Ichimoku",
"Cloud"
] |
def ichimoku_cloud_seq(candles: np.ndarray, conversion_line_period: int = 9, base_line_period: int = 26,
lagging_line_period: int = 52, displacement: int = 26,
sequential: bool = False) -> IchimokuCloud:
"""
Ichimoku Cloud
:param candles: np.ndarray
:param conversion_line_period: int - default: 9
:param base_line_period: int - default: 26
:param lagging_line_period: int - default: 52
:param displacement: - default: 26
:param sequential: bool - default: False
:return: IchimokuCloud
"""
if candles.shape[0] < lagging_line_period + displacement:
raise ValueError("Too few candles available for lagging_line_period + displacement.")
candles = slice_candles(candles, sequential)
conversion_line = _line_helper(candles, conversion_line_period)
base_line = _line_helper(candles, base_line_period)
span_b_pre = _line_helper(candles, lagging_line_period)
span_b = np_shift(span_b_pre, displacement, fill_value=np.nan)
span_a_pre = (conversion_line + base_line) / 2
span_a = np_shift(span_a_pre, displacement, fill_value=np.nan)
lagging_line = np_shift(candles[:, 2], displacement - 1, fill_value=np.nan)
if sequential:
return IchimokuCloud(conversion_line, base_line, span_a, span_b, lagging_line, span_a_pre, span_b_pre)
else:
return IchimokuCloud(conversion_line[-1], base_line[-1], span_a[-1], span_b[-1], lagging_line[-1],
span_a_pre[-1], span_b_pre[-1])
|
[
"def",
"ichimoku_cloud_seq",
"(",
"candles",
":",
"np",
".",
"ndarray",
",",
"conversion_line_period",
":",
"int",
"=",
"9",
",",
"base_line_period",
":",
"int",
"=",
"26",
",",
"lagging_line_period",
":",
"int",
"=",
"52",
",",
"displacement",
":",
"int",
"=",
"26",
",",
"sequential",
":",
"bool",
"=",
"False",
")",
"->",
"IchimokuCloud",
":",
"if",
"candles",
".",
"shape",
"[",
"0",
"]",
"<",
"lagging_line_period",
"+",
"displacement",
":",
"raise",
"ValueError",
"(",
"\"Too few candles available for lagging_line_period + displacement.\"",
")",
"candles",
"=",
"slice_candles",
"(",
"candles",
",",
"sequential",
")",
"conversion_line",
"=",
"_line_helper",
"(",
"candles",
",",
"conversion_line_period",
")",
"base_line",
"=",
"_line_helper",
"(",
"candles",
",",
"base_line_period",
")",
"span_b_pre",
"=",
"_line_helper",
"(",
"candles",
",",
"lagging_line_period",
")",
"span_b",
"=",
"np_shift",
"(",
"span_b_pre",
",",
"displacement",
",",
"fill_value",
"=",
"np",
".",
"nan",
")",
"span_a_pre",
"=",
"(",
"conversion_line",
"+",
"base_line",
")",
"/",
"2",
"span_a",
"=",
"np_shift",
"(",
"span_a_pre",
",",
"displacement",
",",
"fill_value",
"=",
"np",
".",
"nan",
")",
"lagging_line",
"=",
"np_shift",
"(",
"candles",
"[",
":",
",",
"2",
"]",
",",
"displacement",
"-",
"1",
",",
"fill_value",
"=",
"np",
".",
"nan",
")",
"if",
"sequential",
":",
"return",
"IchimokuCloud",
"(",
"conversion_line",
",",
"base_line",
",",
"span_a",
",",
"span_b",
",",
"lagging_line",
",",
"span_a_pre",
",",
"span_b_pre",
")",
"else",
":",
"return",
"IchimokuCloud",
"(",
"conversion_line",
"[",
"-",
"1",
"]",
",",
"base_line",
"[",
"-",
"1",
"]",
",",
"span_a",
"[",
"-",
"1",
"]",
",",
"span_b",
"[",
"-",
"1",
"]",
",",
"lagging_line",
"[",
"-",
"1",
"]",
",",
"span_a_pre",
"[",
"-",
"1",
"]",
",",
"span_b_pre",
"[",
"-",
"1",
"]",
")"
] |
https://github.com/jesse-ai/jesse/blob/28759547138fbc76dff12741204833e39c93b083/jesse/indicators/ichimoku_cloud_seq.py#L14-L47
|
||
FairwindsOps/reckoner
|
0acad4c3e02cf13c0983cc57d632400d7a826bd0
|
reckoner/course.py
|
python
|
Course._set_chart_repository
|
(self, chart: dict)
|
_set_chart_repository will convert the string reference of a
repository into the dictionary configuration of that repository
or, if None, or if the string isn't in the repositories section,
it will leave it alone.
|
_set_chart_repository will convert the string reference of a
repository into the dictionary configuration of that repository
or, if None, or if the string isn't in the repositories section,
it will leave it alone.
|
[
"_set_chart_repository",
"will",
"convert",
"the",
"string",
"reference",
"of",
"a",
"repository",
"into",
"the",
"dictionary",
"configuration",
"of",
"that",
"repository",
"or",
"if",
"None",
"or",
"if",
"the",
"string",
"isn",
"t",
"in",
"the",
"repositories",
"section",
"it",
"will",
"leave",
"it",
"alone",
"."
] |
def _set_chart_repository(self, chart: dict):
"""_set_chart_repository will convert the string reference of a
repository into the dictionary configuration of that repository
or, if None, or if the string isn't in the repositories section,
it will leave it alone."""
if isinstance(chart.get('repository', None), str) and chart['repository'] in [x.name for x in self.repositories]:
logging.debug('Found a reference to a repository installed via repositories section of course, replacing reference.')
chart['repository'] = self._dict['repositories'][chart['repository']]
|
[
"def",
"_set_chart_repository",
"(",
"self",
",",
"chart",
":",
"dict",
")",
":",
"if",
"isinstance",
"(",
"chart",
".",
"get",
"(",
"'repository'",
",",
"None",
")",
",",
"str",
")",
"and",
"chart",
"[",
"'repository'",
"]",
"in",
"[",
"x",
".",
"name",
"for",
"x",
"in",
"self",
".",
"repositories",
"]",
":",
"logging",
".",
"debug",
"(",
"'Found a reference to a repository installed via repositories section of course, replacing reference.'",
")",
"chart",
"[",
"'repository'",
"]",
"=",
"self",
".",
"_dict",
"[",
"'repositories'",
"]",
"[",
"chart",
"[",
"'repository'",
"]",
"]"
] |
https://github.com/FairwindsOps/reckoner/blob/0acad4c3e02cf13c0983cc57d632400d7a826bd0/reckoner/course.py#L143-L150
|
||
OpenKMIP/PyKMIP
|
c0c980395660ea1b1a8009e97f17ab32d1100233
|
kmip/pie/client.py
|
python
|
ProxyKmipClient.encrypt
|
(self, data, uid=None, cryptographic_parameters=None,
iv_counter_nonce=None)
|
Encrypt data using the specified encryption key and parameters.
Args:
data (bytes): The bytes to encrypt. Required.
uid (string): The unique ID of the encryption key to use.
Optional, defaults to None.
cryptographic_parameters (dict): A dictionary containing various
cryptographic settings to be used for the encryption.
Optional, defaults to None.
iv_counter_nonce (bytes): The bytes to use for the IV/counter/
nonce, if needed by the encryption algorithm and/or cipher
mode. Optional, defaults to None.
Returns:
bytes: The encrypted data.
bytes: The IV/counter/nonce used with the encryption algorithm,
only if it was autogenerated by the server.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
Notes:
The cryptographic_parameters argument is a dictionary that can
contain the following key/value pairs:
Keys | Value
------------------------------|-----------------------------------
'block_cipher_mode' | A BlockCipherMode enumeration
| indicating the cipher mode to use
| with the encryption algorithm.
'padding_method' | A PaddingMethod enumeration
| indicating which padding method to
| use with the encryption algorithm.
'hashing_algorithm' | A HashingAlgorithm enumeration
| indicating which hashing algorithm
| to use.
'key_role_type' | A KeyRoleType enumeration
| indicating the intended use of the
| associated cryptographic key.
'digital_signature_algorithm' | A DigitalSignatureAlgorithm
| enumeration indicating which
| digital signature algorithm to
| use.
'cryptographic_algorithm' | A CryptographicAlgorithm
| enumeration indicating which
| encryption algorithm to use.
'random_iv' | A boolean indicating whether the
| server should autogenerate an IV.
'iv_length' | An integer representing the length
| of the initialization vector (IV)
| in bits.
'tag_length' | An integer representing the length
| of the authenticator tag in bytes.
'fixed_field_length' | An integer representing the length
| of the fixed field portion of the
| IV in bits.
'invocation_field_length' | An integer representing the length
| of the invocation field portion of
| the IV in bits.
'counter_length' | An integer representing the length
| of the coutner portion of the IV
| in bits.
'initial_counter_value' | An integer representing the
| starting counter value for CTR
| mode (typically 1).
|
Encrypt data using the specified encryption key and parameters.
|
[
"Encrypt",
"data",
"using",
"the",
"specified",
"encryption",
"key",
"and",
"parameters",
"."
] |
def encrypt(self, data, uid=None, cryptographic_parameters=None,
iv_counter_nonce=None):
"""
Encrypt data using the specified encryption key and parameters.
Args:
data (bytes): The bytes to encrypt. Required.
uid (string): The unique ID of the encryption key to use.
Optional, defaults to None.
cryptographic_parameters (dict): A dictionary containing various
cryptographic settings to be used for the encryption.
Optional, defaults to None.
iv_counter_nonce (bytes): The bytes to use for the IV/counter/
nonce, if needed by the encryption algorithm and/or cipher
mode. Optional, defaults to None.
Returns:
bytes: The encrypted data.
bytes: The IV/counter/nonce used with the encryption algorithm,
only if it was autogenerated by the server.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
Notes:
The cryptographic_parameters argument is a dictionary that can
contain the following key/value pairs:
Keys | Value
------------------------------|-----------------------------------
'block_cipher_mode' | A BlockCipherMode enumeration
| indicating the cipher mode to use
| with the encryption algorithm.
'padding_method' | A PaddingMethod enumeration
| indicating which padding method to
| use with the encryption algorithm.
'hashing_algorithm' | A HashingAlgorithm enumeration
| indicating which hashing algorithm
| to use.
'key_role_type' | A KeyRoleType enumeration
| indicating the intended use of the
| associated cryptographic key.
'digital_signature_algorithm' | A DigitalSignatureAlgorithm
| enumeration indicating which
| digital signature algorithm to
| use.
'cryptographic_algorithm' | A CryptographicAlgorithm
| enumeration indicating which
| encryption algorithm to use.
'random_iv' | A boolean indicating whether the
| server should autogenerate an IV.
'iv_length' | An integer representing the length
| of the initialization vector (IV)
| in bits.
'tag_length' | An integer representing the length
| of the authenticator tag in bytes.
'fixed_field_length' | An integer representing the length
| of the fixed field portion of the
| IV in bits.
'invocation_field_length' | An integer representing the length
| of the invocation field portion of
| the IV in bits.
'counter_length' | An integer representing the length
| of the coutner portion of the IV
| in bits.
'initial_counter_value' | An integer representing the
| starting counter value for CTR
| mode (typically 1).
"""
# Check input
if not isinstance(data, six.binary_type):
raise TypeError("data must be bytes")
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
if cryptographic_parameters is not None:
if not isinstance(cryptographic_parameters, dict):
raise TypeError("cryptographic_parameters must be a dict")
if iv_counter_nonce is not None:
if not isinstance(iv_counter_nonce, six.binary_type):
raise TypeError("iv_counter_nonce must be bytes")
cryptographic_parameters = self._build_cryptographic_parameters(
cryptographic_parameters
)
# Encrypt the provided data and handle the results
result = self.proxy.encrypt(
data,
uid,
cryptographic_parameters,
iv_counter_nonce
)
status = result.get('result_status')
if status == enums.ResultStatus.SUCCESS:
return result.get('data'), result.get('iv_counter_nonce')
else:
raise exceptions.KmipOperationFailure(
status,
result.get('result_reason'),
result.get('result_message')
)
|
[
"def",
"encrypt",
"(",
"self",
",",
"data",
",",
"uid",
"=",
"None",
",",
"cryptographic_parameters",
"=",
"None",
",",
"iv_counter_nonce",
"=",
"None",
")",
":",
"# Check input",
"if",
"not",
"isinstance",
"(",
"data",
",",
"six",
".",
"binary_type",
")",
":",
"raise",
"TypeError",
"(",
"\"data must be bytes\"",
")",
"if",
"uid",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"uid",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"\"uid must be a string\"",
")",
"if",
"cryptographic_parameters",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"cryptographic_parameters",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"\"cryptographic_parameters must be a dict\"",
")",
"if",
"iv_counter_nonce",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"iv_counter_nonce",
",",
"six",
".",
"binary_type",
")",
":",
"raise",
"TypeError",
"(",
"\"iv_counter_nonce must be bytes\"",
")",
"cryptographic_parameters",
"=",
"self",
".",
"_build_cryptographic_parameters",
"(",
"cryptographic_parameters",
")",
"# Encrypt the provided data and handle the results",
"result",
"=",
"self",
".",
"proxy",
".",
"encrypt",
"(",
"data",
",",
"uid",
",",
"cryptographic_parameters",
",",
"iv_counter_nonce",
")",
"status",
"=",
"result",
".",
"get",
"(",
"'result_status'",
")",
"if",
"status",
"==",
"enums",
".",
"ResultStatus",
".",
"SUCCESS",
":",
"return",
"result",
".",
"get",
"(",
"'data'",
")",
",",
"result",
".",
"get",
"(",
"'iv_counter_nonce'",
")",
"else",
":",
"raise",
"exceptions",
".",
"KmipOperationFailure",
"(",
"status",
",",
"result",
".",
"get",
"(",
"'result_reason'",
")",
",",
"result",
".",
"get",
"(",
"'result_message'",
")",
")"
] |
https://github.com/OpenKMIP/PyKMIP/blob/c0c980395660ea1b1a8009e97f17ab32d1100233/kmip/pie/client.py#L1193-L1297
|
||
CedricGuillemet/Imogen
|
ee417b42747ed5b46cb11b02ef0c3630000085b3
|
bin/Lib/logging/__init__.py
|
python
|
Handler.setFormatter
|
(self, fmt)
|
Set the formatter for this handler.
|
Set the formatter for this handler.
|
[
"Set",
"the",
"formatter",
"for",
"this",
"handler",
"."
] |
def setFormatter(self, fmt):
"""
Set the formatter for this handler.
"""
self.formatter = fmt
|
[
"def",
"setFormatter",
"(",
"self",
",",
"fmt",
")",
":",
"self",
".",
"formatter",
"=",
"fmt"
] |
https://github.com/CedricGuillemet/Imogen/blob/ee417b42747ed5b46cb11b02ef0c3630000085b3/bin/Lib/logging/__init__.py#L910-L914
|
||
lohriialo/photoshop-scripting-python
|
6b97da967a5d0a45e54f7c99631b29773b923f09
|
api_reference/photoshop_2020.py
|
python
|
_GalleryBannerOptions.__iter__
|
(self)
|
return win32com.client.util.Iterator(ob, None)
|
Return a Python iterator for this object
|
Return a Python iterator for this object
|
[
"Return",
"a",
"Python",
"iterator",
"for",
"this",
"object"
] |
def __iter__(self):
"Return a Python iterator for this object"
try:
ob = self._oleobj_.InvokeTypes(-4,LCID,3,(13, 10),())
except pythoncom.error:
raise TypeError("This object does not support enumeration")
return win32com.client.util.Iterator(ob, None)
|
[
"def",
"__iter__",
"(",
"self",
")",
":",
"try",
":",
"ob",
"=",
"self",
".",
"_oleobj_",
".",
"InvokeTypes",
"(",
"-",
"4",
",",
"LCID",
",",
"3",
",",
"(",
"13",
",",
"10",
")",
",",
"(",
")",
")",
"except",
"pythoncom",
".",
"error",
":",
"raise",
"TypeError",
"(",
"\"This object does not support enumeration\"",
")",
"return",
"win32com",
".",
"client",
".",
"util",
".",
"Iterator",
"(",
"ob",
",",
"None",
")"
] |
https://github.com/lohriialo/photoshop-scripting-python/blob/6b97da967a5d0a45e54f7c99631b29773b923f09/api_reference/photoshop_2020.py#L4899-L4905
|
|
devstructure/blueprint
|
574a9fc0dd3031c66970387f1105d8c89e61218f
|
blueprint/__init__.py
|
python
|
Blueprint.add_service_package
|
(self, manager, service, package_manager, *args)
|
Add package dependencies to a service resource.
|
Add package dependencies to a service resource.
|
[
"Add",
"package",
"dependencies",
"to",
"a",
"service",
"resource",
"."
] |
def add_service_package(self, manager, service, package_manager, *args):
"""
Add package dependencies to a service resource.
"""
if 0 == len(args):
return
d = self.services[manager][service].setdefault('packages',
defaultdict(set))
for package in args:
d[package_manager].add(package)
|
[
"def",
"add_service_package",
"(",
"self",
",",
"manager",
",",
"service",
",",
"package_manager",
",",
"*",
"args",
")",
":",
"if",
"0",
"==",
"len",
"(",
"args",
")",
":",
"return",
"d",
"=",
"self",
".",
"services",
"[",
"manager",
"]",
"[",
"service",
"]",
".",
"setdefault",
"(",
"'packages'",
",",
"defaultdict",
"(",
"set",
")",
")",
"for",
"package",
"in",
"args",
":",
"d",
"[",
"package_manager",
"]",
".",
"add",
"(",
"package",
")"
] |
https://github.com/devstructure/blueprint/blob/574a9fc0dd3031c66970387f1105d8c89e61218f/blueprint/__init__.py#L360-L369
|
||
exaile/exaile
|
a7b58996c5c15b3aa7b9975ac13ee8f784ef4689
|
xlgui/widgets/dialogs.py
|
python
|
URIOpenDialog.do_uri_selected
|
(self, uri)
|
Destroys the dialog
|
Destroys the dialog
|
[
"Destroys",
"the",
"dialog"
] |
def do_uri_selected(self, uri):
"""
Destroys the dialog
"""
self.destroy()
|
[
"def",
"do_uri_selected",
"(",
"self",
",",
"uri",
")",
":",
"self",
".",
"destroy",
"(",
")"
] |
https://github.com/exaile/exaile/blob/a7b58996c5c15b3aa7b9975ac13ee8f784ef4689/xlgui/widgets/dialogs.py#L390-L394
|
||
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py
|
python
|
OracleDialect_cx_oracle.__init__
|
(self,
auto_convert_lobs=True,
threaded=True,
coerce_to_unicode=False,
coerce_to_decimal=True,
arraysize=50,
**kwargs)
|
[] |
def __init__(self,
auto_convert_lobs=True,
threaded=True,
coerce_to_unicode=False,
coerce_to_decimal=True,
arraysize=50,
**kwargs):
self._pop_deprecated_kwargs(kwargs)
OracleDialect.__init__(self, **kwargs)
self.threaded = threaded
self.arraysize = arraysize
self.auto_convert_lobs = auto_convert_lobs
self.coerce_to_unicode = coerce_to_unicode
self.coerce_to_decimal = coerce_to_decimal
cx_Oracle = self.dbapi
if cx_Oracle is None:
self._include_setinputsizes = {}
self.cx_oracle_ver = (0, 0, 0)
else:
self.cx_oracle_ver = self._parse_cx_oracle_ver(cx_Oracle.version)
if self.cx_oracle_ver < (5, 2) and self.cx_oracle_ver > (0, 0, 0):
raise exc.InvalidRequestError(
"cx_Oracle version 5.2 and above are supported")
self._has_native_int = hasattr(cx_Oracle, "NATIVE_INT")
self._include_setinputsizes = {
cx_Oracle.NCLOB, cx_Oracle.CLOB, cx_Oracle.LOB,
cx_Oracle.NCHAR, cx_Oracle.FIXED_NCHAR,
cx_Oracle.BLOB, cx_Oracle.FIXED_CHAR, cx_Oracle.TIMESTAMP
}
self._is_cx_oracle_6 = self.cx_oracle_ver >= (6, )
|
[
"def",
"__init__",
"(",
"self",
",",
"auto_convert_lobs",
"=",
"True",
",",
"threaded",
"=",
"True",
",",
"coerce_to_unicode",
"=",
"False",
",",
"coerce_to_decimal",
"=",
"True",
",",
"arraysize",
"=",
"50",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_pop_deprecated_kwargs",
"(",
"kwargs",
")",
"OracleDialect",
".",
"__init__",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"threaded",
"=",
"threaded",
"self",
".",
"arraysize",
"=",
"arraysize",
"self",
".",
"auto_convert_lobs",
"=",
"auto_convert_lobs",
"self",
".",
"coerce_to_unicode",
"=",
"coerce_to_unicode",
"self",
".",
"coerce_to_decimal",
"=",
"coerce_to_decimal",
"cx_Oracle",
"=",
"self",
".",
"dbapi",
"if",
"cx_Oracle",
"is",
"None",
":",
"self",
".",
"_include_setinputsizes",
"=",
"{",
"}",
"self",
".",
"cx_oracle_ver",
"=",
"(",
"0",
",",
"0",
",",
"0",
")",
"else",
":",
"self",
".",
"cx_oracle_ver",
"=",
"self",
".",
"_parse_cx_oracle_ver",
"(",
"cx_Oracle",
".",
"version",
")",
"if",
"self",
".",
"cx_oracle_ver",
"<",
"(",
"5",
",",
"2",
")",
"and",
"self",
".",
"cx_oracle_ver",
">",
"(",
"0",
",",
"0",
",",
"0",
")",
":",
"raise",
"exc",
".",
"InvalidRequestError",
"(",
"\"cx_Oracle version 5.2 and above are supported\"",
")",
"self",
".",
"_has_native_int",
"=",
"hasattr",
"(",
"cx_Oracle",
",",
"\"NATIVE_INT\"",
")",
"self",
".",
"_include_setinputsizes",
"=",
"{",
"cx_Oracle",
".",
"NCLOB",
",",
"cx_Oracle",
".",
"CLOB",
",",
"cx_Oracle",
".",
"LOB",
",",
"cx_Oracle",
".",
"NCHAR",
",",
"cx_Oracle",
".",
"FIXED_NCHAR",
",",
"cx_Oracle",
".",
"BLOB",
",",
"cx_Oracle",
".",
"FIXED_CHAR",
",",
"cx_Oracle",
".",
"TIMESTAMP",
"}",
"self",
".",
"_is_cx_oracle_6",
"=",
"self",
".",
"cx_oracle_ver",
">=",
"(",
"6",
",",
")"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py#L624-L660
|
||||
spack/spack
|
675210bd8bd1c5d32ad1cc83d898fb43b569ed74
|
var/spack/repos/builtin/packages/tk/package.py
|
python
|
Tk.setup_run_environment
|
(self, env)
|
Set TK_LIBRARY to the directory containing tk.tcl.
For further info, see:
* https://www.tcl-lang.org/man/tcl/TkCmd/tkvars.htm
|
Set TK_LIBRARY to the directory containing tk.tcl.
|
[
"Set",
"TK_LIBRARY",
"to",
"the",
"directory",
"containing",
"tk",
".",
"tcl",
"."
] |
def setup_run_environment(self, env):
"""Set TK_LIBRARY to the directory containing tk.tcl.
For further info, see:
* https://www.tcl-lang.org/man/tcl/TkCmd/tkvars.htm
"""
# When using tkinter from within spack provided python+tkinter,
# python will not be able to find Tk unless TK_LIBRARY is set.
env.set('TK_LIBRARY', os.path.dirname(sorted(find(self.prefix, 'tk.tcl'))[0]))
|
[
"def",
"setup_run_environment",
"(",
"self",
",",
"env",
")",
":",
"# When using tkinter from within spack provided python+tkinter,",
"# python will not be able to find Tk unless TK_LIBRARY is set.",
"env",
".",
"set",
"(",
"'TK_LIBRARY'",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"sorted",
"(",
"find",
"(",
"self",
".",
"prefix",
",",
"'tk.tcl'",
")",
")",
"[",
"0",
"]",
")",
")"
] |
https://github.com/spack/spack/blob/675210bd8bd1c5d32ad1cc83d898fb43b569ed74/var/spack/repos/builtin/packages/tk/package.py#L112-L121
|
||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/eliqonline/sensor.py
|
python
|
async_setup_platform
|
(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
)
|
Set up the ELIQ Online sensor.
|
Set up the ELIQ Online sensor.
|
[
"Set",
"up",
"the",
"ELIQ",
"Online",
"sensor",
"."
] |
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the ELIQ Online sensor."""
access_token = config.get(CONF_ACCESS_TOKEN)
name = config.get(CONF_NAME, DEFAULT_NAME)
channel_id = config.get(CONF_CHANNEL_ID)
session = async_get_clientsession(hass)
api = eliqonline.API(session=session, access_token=access_token)
try:
_LOGGER.debug("Probing for access to ELIQ Online API")
await api.get_data_now(channelid=channel_id)
except OSError as error:
_LOGGER.error("Could not access the ELIQ Online API: %s", error)
return
async_add_entities([EliqSensor(api, channel_id, name)], True)
|
[
"async",
"def",
"async_setup_platform",
"(",
"hass",
":",
"HomeAssistant",
",",
"config",
":",
"ConfigType",
",",
"async_add_entities",
":",
"AddEntitiesCallback",
",",
"discovery_info",
":",
"DiscoveryInfoType",
"|",
"None",
"=",
"None",
",",
")",
"->",
"None",
":",
"access_token",
"=",
"config",
".",
"get",
"(",
"CONF_ACCESS_TOKEN",
")",
"name",
"=",
"config",
".",
"get",
"(",
"CONF_NAME",
",",
"DEFAULT_NAME",
")",
"channel_id",
"=",
"config",
".",
"get",
"(",
"CONF_CHANNEL_ID",
")",
"session",
"=",
"async_get_clientsession",
"(",
"hass",
")",
"api",
"=",
"eliqonline",
".",
"API",
"(",
"session",
"=",
"session",
",",
"access_token",
"=",
"access_token",
")",
"try",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Probing for access to ELIQ Online API\"",
")",
"await",
"api",
".",
"get_data_now",
"(",
"channelid",
"=",
"channel_id",
")",
"except",
"OSError",
"as",
"error",
":",
"_LOGGER",
".",
"error",
"(",
"\"Could not access the ELIQ Online API: %s\"",
",",
"error",
")",
"return",
"async_add_entities",
"(",
"[",
"EliqSensor",
"(",
"api",
",",
"channel_id",
",",
"name",
")",
"]",
",",
"True",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/eliqonline/sensor.py#L44-L65
|
||
minio/minio-py
|
b3ba3bf99fe6b9ff2b28855550d6ab5345c134e3
|
minio/commonconfig.py
|
python
|
AndOperator.prefix
|
(self)
|
return self._prefix
|
Get prefix.
|
Get prefix.
|
[
"Get",
"prefix",
"."
] |
def prefix(self):
"""Get prefix."""
return self._prefix
|
[
"def",
"prefix",
"(",
"self",
")",
":",
"return",
"self",
".",
"_prefix"
] |
https://github.com/minio/minio-py/blob/b3ba3bf99fe6b9ff2b28855550d6ab5345c134e3/minio/commonconfig.py#L138-L140
|
|
Kyubyong/transformer
|
fb023bb097e08d53baf25b46a9da490beba51a21
|
tf1.2_legacy/modules.py
|
python
|
embedding
|
(inputs,
vocab_size,
num_units,
zero_pad=True,
scale=True,
scope="embedding",
reuse=None)
|
return outputs
|
Embeds a given tensor.
Args:
inputs: A `Tensor` with type `int32` or `int64` containing the ids
to be looked up in `lookup table`.
vocab_size: An int. Vocabulary size.
num_units: An int. Number of embedding hidden units.
zero_pad: A boolean. If True, all the values of the fist row (id 0)
should be constant zeros.
scale: A boolean. If True. the outputs is multiplied by sqrt num_units.
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
A `Tensor` with one more rank than inputs's. The last dimensionality
should be `num_units`.
For example,
```
import tensorflow as tf
inputs = tf.to_int32(tf.reshape(tf.range(2*3), (2, 3)))
outputs = embedding(inputs, 6, 2, zero_pad=True)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print sess.run(outputs)
>>
[[[ 0. 0. ]
[ 0.09754146 0.67385566]
[ 0.37864095 -0.35689294]]
[[-1.01329422 -1.09939694]
[ 0.7521342 0.38203377]
[-0.04973143 -0.06210355]]]
```
```
import tensorflow as tf
inputs = tf.to_int32(tf.reshape(tf.range(2*3), (2, 3)))
outputs = embedding(inputs, 6, 2, zero_pad=False)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print sess.run(outputs)
>>
[[[-0.19172323 -0.39159766]
[-0.43212751 -0.66207761]
[ 1.03452027 -0.26704335]]
[[-0.11634696 -0.35983452]
[ 0.50208133 0.53509563]
[ 1.22204471 -0.96587461]]]
```
|
Embeds a given tensor.
|
[
"Embeds",
"a",
"given",
"tensor",
"."
] |
def embedding(inputs,
vocab_size,
num_units,
zero_pad=True,
scale=True,
scope="embedding",
reuse=None):
'''Embeds a given tensor.
Args:
inputs: A `Tensor` with type `int32` or `int64` containing the ids
to be looked up in `lookup table`.
vocab_size: An int. Vocabulary size.
num_units: An int. Number of embedding hidden units.
zero_pad: A boolean. If True, all the values of the fist row (id 0)
should be constant zeros.
scale: A boolean. If True. the outputs is multiplied by sqrt num_units.
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
A `Tensor` with one more rank than inputs's. The last dimensionality
should be `num_units`.
For example,
```
import tensorflow as tf
inputs = tf.to_int32(tf.reshape(tf.range(2*3), (2, 3)))
outputs = embedding(inputs, 6, 2, zero_pad=True)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print sess.run(outputs)
>>
[[[ 0. 0. ]
[ 0.09754146 0.67385566]
[ 0.37864095 -0.35689294]]
[[-1.01329422 -1.09939694]
[ 0.7521342 0.38203377]
[-0.04973143 -0.06210355]]]
```
```
import tensorflow as tf
inputs = tf.to_int32(tf.reshape(tf.range(2*3), (2, 3)))
outputs = embedding(inputs, 6, 2, zero_pad=False)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print sess.run(outputs)
>>
[[[-0.19172323 -0.39159766]
[-0.43212751 -0.66207761]
[ 1.03452027 -0.26704335]]
[[-0.11634696 -0.35983452]
[ 0.50208133 0.53509563]
[ 1.22204471 -0.96587461]]]
```
'''
with tf.variable_scope(scope, reuse=reuse):
lookup_table = tf.get_variable('lookup_table',
dtype=tf.float32,
shape=[vocab_size, num_units],
initializer=tf.contrib.layers.xavier_initializer())
if zero_pad:
lookup_table = tf.concat((tf.zeros(shape=[1, num_units]),
lookup_table[1:, :]), 0)
outputs = tf.nn.embedding_lookup(lookup_table, inputs)
if scale:
outputs = outputs * (num_units ** 0.5)
return outputs
|
[
"def",
"embedding",
"(",
"inputs",
",",
"vocab_size",
",",
"num_units",
",",
"zero_pad",
"=",
"True",
",",
"scale",
"=",
"True",
",",
"scope",
"=",
"\"embedding\"",
",",
"reuse",
"=",
"None",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"scope",
",",
"reuse",
"=",
"reuse",
")",
":",
"lookup_table",
"=",
"tf",
".",
"get_variable",
"(",
"'lookup_table'",
",",
"dtype",
"=",
"tf",
".",
"float32",
",",
"shape",
"=",
"[",
"vocab_size",
",",
"num_units",
"]",
",",
"initializer",
"=",
"tf",
".",
"contrib",
".",
"layers",
".",
"xavier_initializer",
"(",
")",
")",
"if",
"zero_pad",
":",
"lookup_table",
"=",
"tf",
".",
"concat",
"(",
"(",
"tf",
".",
"zeros",
"(",
"shape",
"=",
"[",
"1",
",",
"num_units",
"]",
")",
",",
"lookup_table",
"[",
"1",
":",
",",
":",
"]",
")",
",",
"0",
")",
"outputs",
"=",
"tf",
".",
"nn",
".",
"embedding_lookup",
"(",
"lookup_table",
",",
"inputs",
")",
"if",
"scale",
":",
"outputs",
"=",
"outputs",
"*",
"(",
"num_units",
"**",
"0.5",
")",
"return",
"outputs"
] |
https://github.com/Kyubyong/transformer/blob/fb023bb097e08d53baf25b46a9da490beba51a21/tf1.2_legacy/modules.py#L41-L117
|
|
spectralpython/spectral
|
e1cd919f5f66abddc219b76926450240feaaed8f
|
spectral/database/usgs.py
|
python
|
USGSDatabase.get_spectrum
|
(self, sampleID)
|
return (list(x), list(y))
|
Returns a spectrum from the database.
Usage:
(x, y) = usgs.get_spectrum(sampleID)
Arguments:
`sampleID` (int):
The **SampleID** value for the desired spectrum from the
**Samples** table in the database.
Returns:
`x` (list):
Band centers for the spectrum.
This is extraced from assumed spectrometer for given sample.
`y` (list):
Spectrum data values for each band.
Returns a pair of vectors containing the wavelengths and measured
values values of a measurment.
|
Returns a spectrum from the database.
|
[
"Returns",
"a",
"spectrum",
"from",
"the",
"database",
"."
] |
def get_spectrum(self, sampleID):
'''Returns a spectrum from the database.
Usage:
(x, y) = usgs.get_spectrum(sampleID)
Arguments:
`sampleID` (int):
The **SampleID** value for the desired spectrum from the
**Samples** table in the database.
Returns:
`x` (list):
Band centers for the spectrum.
This is extraced from assumed spectrometer for given sample.
`y` (list):
Spectrum data values for each band.
Returns a pair of vectors containing the wavelengths and measured
values values of a measurment.
'''
import array
query = '''SELECT ValuesArray, AssumedWLSpmeterDataID FROM Samples WHERE SampleID = ?'''
result = self.cursor.execute(query, (sampleID,))
rows = result.fetchall()
if len(rows) < 1:
raise Exception('Measurement record not found.')
y = array_from_blob(rows[0][0])
assumedWLSpmeterDataID = rows[0][1]
query = '''SELECT ValuesArray FROM SpectrometerData WHERE SpectrometerDataID = ?'''
result = self.cursor.execute(
query, (assumedWLSpmeterDataID,))
rows = result.fetchall()
if len(rows) < 1:
raise Exception('Measurement (wavelengths) record not found.')
x = array_from_blob(rows[0][0])
return (list(x), list(y))
|
[
"def",
"get_spectrum",
"(",
"self",
",",
"sampleID",
")",
":",
"import",
"array",
"query",
"=",
"'''SELECT ValuesArray, AssumedWLSpmeterDataID FROM Samples WHERE SampleID = ?'''",
"result",
"=",
"self",
".",
"cursor",
".",
"execute",
"(",
"query",
",",
"(",
"sampleID",
",",
")",
")",
"rows",
"=",
"result",
".",
"fetchall",
"(",
")",
"if",
"len",
"(",
"rows",
")",
"<",
"1",
":",
"raise",
"Exception",
"(",
"'Measurement record not found.'",
")",
"y",
"=",
"array_from_blob",
"(",
"rows",
"[",
"0",
"]",
"[",
"0",
"]",
")",
"assumedWLSpmeterDataID",
"=",
"rows",
"[",
"0",
"]",
"[",
"1",
"]",
"query",
"=",
"'''SELECT ValuesArray FROM SpectrometerData WHERE SpectrometerDataID = ?'''",
"result",
"=",
"self",
".",
"cursor",
".",
"execute",
"(",
"query",
",",
"(",
"assumedWLSpmeterDataID",
",",
")",
")",
"rows",
"=",
"result",
".",
"fetchall",
"(",
")",
"if",
"len",
"(",
"rows",
")",
"<",
"1",
":",
"raise",
"Exception",
"(",
"'Measurement (wavelengths) record not found.'",
")",
"x",
"=",
"array_from_blob",
"(",
"rows",
"[",
"0",
"]",
"[",
"0",
"]",
")",
"return",
"(",
"list",
"(",
"x",
")",
",",
"list",
"(",
"y",
")",
")"
] |
https://github.com/spectralpython/spectral/blob/e1cd919f5f66abddc219b76926450240feaaed8f/spectral/database/usgs.py#L499-L544
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/sympy/stats/crv_types.py
|
python
|
Beta
|
(name, alpha, beta)
|
return rv(name, BetaDistribution, (alpha, beta))
|
r"""
Create a Continuous Random Variable with a Beta distribution.
The density of the Beta distribution is given by
.. math::
f(x) := \frac{x^{\alpha-1}(1-x)^{\beta-1}} {\mathrm{B}(\alpha,\beta)}
with :math:`x \in [0,1]`.
Parameters
==========
alpha : Real number, `\alpha > 0`, a shape
beta : Real number, `\beta > 0`, a shape
Returns
=======
A RandomSymbol.
Examples
========
>>> from sympy.stats import Beta, density, E, variance
>>> from sympy import Symbol, simplify, pprint, factor
>>> alpha = Symbol("alpha", positive=True)
>>> beta = Symbol("beta", positive=True)
>>> z = Symbol("z")
>>> X = Beta("x", alpha, beta)
>>> D = density(X)(z)
>>> pprint(D, use_unicode=False)
alpha - 1 beta - 1
z *(1 - z)
--------------------------
B(alpha, beta)
>>> simplify(E(X))
alpha/(alpha + beta)
>>> factor(simplify(variance(X)))
alpha*beta/((alpha + beta)**2*(alpha + beta + 1))
References
==========
.. [1] https://en.wikipedia.org/wiki/Beta_distribution
.. [2] http://mathworld.wolfram.com/BetaDistribution.html
|
r"""
Create a Continuous Random Variable with a Beta distribution.
|
[
"r",
"Create",
"a",
"Continuous",
"Random",
"Variable",
"with",
"a",
"Beta",
"distribution",
"."
] |
def Beta(name, alpha, beta):
r"""
Create a Continuous Random Variable with a Beta distribution.
The density of the Beta distribution is given by
.. math::
f(x) := \frac{x^{\alpha-1}(1-x)^{\beta-1}} {\mathrm{B}(\alpha,\beta)}
with :math:`x \in [0,1]`.
Parameters
==========
alpha : Real number, `\alpha > 0`, a shape
beta : Real number, `\beta > 0`, a shape
Returns
=======
A RandomSymbol.
Examples
========
>>> from sympy.stats import Beta, density, E, variance
>>> from sympy import Symbol, simplify, pprint, factor
>>> alpha = Symbol("alpha", positive=True)
>>> beta = Symbol("beta", positive=True)
>>> z = Symbol("z")
>>> X = Beta("x", alpha, beta)
>>> D = density(X)(z)
>>> pprint(D, use_unicode=False)
alpha - 1 beta - 1
z *(1 - z)
--------------------------
B(alpha, beta)
>>> simplify(E(X))
alpha/(alpha + beta)
>>> factor(simplify(variance(X)))
alpha*beta/((alpha + beta)**2*(alpha + beta + 1))
References
==========
.. [1] https://en.wikipedia.org/wiki/Beta_distribution
.. [2] http://mathworld.wolfram.com/BetaDistribution.html
"""
return rv(name, BetaDistribution, (alpha, beta))
|
[
"def",
"Beta",
"(",
"name",
",",
"alpha",
",",
"beta",
")",
":",
"return",
"rv",
"(",
"name",
",",
"BetaDistribution",
",",
"(",
"alpha",
",",
"beta",
")",
")"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/stats/crv_types.py#L355-L410
|
|
SeldonIO/alibi-detect
|
b5ec53cfadcd8e3463d400259f2ea1b752ed1812
|
alibi_detect/utils/saving.py
|
python
|
load_tf_hl
|
(filepath: Union[str, os.PathLike], model: tf.keras.Model, state_dict: dict)
|
return model_hl
|
Load hidden layer models for AdversarialAE.
Parameters
----------
filepath
Saved model directory.
model
tf.keras classification model.
state_dict
Dictionary containing the detector's parameters.
Returns
-------
List with loaded tf.keras models.
|
Load hidden layer models for AdversarialAE.
|
[
"Load",
"hidden",
"layer",
"models",
"for",
"AdversarialAE",
"."
] |
def load_tf_hl(filepath: Union[str, os.PathLike], model: tf.keras.Model, state_dict: dict) -> List[tf.keras.Model]:
"""
Load hidden layer models for AdversarialAE.
Parameters
----------
filepath
Saved model directory.
model
tf.keras classification model.
state_dict
Dictionary containing the detector's parameters.
Returns
-------
List with loaded tf.keras models.
"""
model_dir = Path(filepath).joinpath('model')
hidden_layer_kld = state_dict['hidden_layer_kld']
if not hidden_layer_kld:
return []
model_hl = []
for i, (hidden_layer, output_dim) in enumerate(hidden_layer_kld.items()):
m = DenseHidden(model, hidden_layer, output_dim)
m.load_weights(model_dir.joinpath('model_hl_' + str(i) + '.ckpt'))
model_hl.append(m)
return model_hl
|
[
"def",
"load_tf_hl",
"(",
"filepath",
":",
"Union",
"[",
"str",
",",
"os",
".",
"PathLike",
"]",
",",
"model",
":",
"tf",
".",
"keras",
".",
"Model",
",",
"state_dict",
":",
"dict",
")",
"->",
"List",
"[",
"tf",
".",
"keras",
".",
"Model",
"]",
":",
"model_dir",
"=",
"Path",
"(",
"filepath",
")",
".",
"joinpath",
"(",
"'model'",
")",
"hidden_layer_kld",
"=",
"state_dict",
"[",
"'hidden_layer_kld'",
"]",
"if",
"not",
"hidden_layer_kld",
":",
"return",
"[",
"]",
"model_hl",
"=",
"[",
"]",
"for",
"i",
",",
"(",
"hidden_layer",
",",
"output_dim",
")",
"in",
"enumerate",
"(",
"hidden_layer_kld",
".",
"items",
"(",
")",
")",
":",
"m",
"=",
"DenseHidden",
"(",
"model",
",",
"hidden_layer",
",",
"output_dim",
")",
"m",
".",
"load_weights",
"(",
"model_dir",
".",
"joinpath",
"(",
"'model_hl_'",
"+",
"str",
"(",
"i",
")",
"+",
"'.ckpt'",
")",
")",
"model_hl",
".",
"append",
"(",
"m",
")",
"return",
"model_hl"
] |
https://github.com/SeldonIO/alibi-detect/blob/b5ec53cfadcd8e3463d400259f2ea1b752ed1812/alibi_detect/utils/saving.py#L1089-L1115
|
|
deluge-torrent/deluge
|
2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc
|
deluge/core/rpcserver.py
|
python
|
RPCServer.emit_event
|
(self, event)
|
Emits the event to interested clients.
:param event: the event to emit
:type event: :class:`deluge.event.DelugeEvent`
|
Emits the event to interested clients.
|
[
"Emits",
"the",
"event",
"to",
"interested",
"clients",
"."
] |
def emit_event(self, event):
"""
Emits the event to interested clients.
:param event: the event to emit
:type event: :class:`deluge.event.DelugeEvent`
"""
log.debug('intevents: %s', self.factory.interested_events)
# Find sessions interested in this event
for session_id, interest in self.factory.interested_events.items():
if event.name in interest:
log.debug('Emit Event: %s %s', event.name, event.args)
# This session is interested so send a RPC_EVENT
self.factory.session_protocols[session_id].sendData(
(RPC_EVENT, event.name, event.args)
)
|
[
"def",
"emit_event",
"(",
"self",
",",
"event",
")",
":",
"log",
".",
"debug",
"(",
"'intevents: %s'",
",",
"self",
".",
"factory",
".",
"interested_events",
")",
"# Find sessions interested in this event",
"for",
"session_id",
",",
"interest",
"in",
"self",
".",
"factory",
".",
"interested_events",
".",
"items",
"(",
")",
":",
"if",
"event",
".",
"name",
"in",
"interest",
":",
"log",
".",
"debug",
"(",
"'Emit Event: %s %s'",
",",
"event",
".",
"name",
",",
"event",
".",
"args",
")",
"# This session is interested so send a RPC_EVENT",
"self",
".",
"factory",
".",
"session_protocols",
"[",
"session_id",
"]",
".",
"sendData",
"(",
"(",
"RPC_EVENT",
",",
"event",
".",
"name",
",",
"event",
".",
"args",
")",
")"
] |
https://github.com/deluge-torrent/deluge/blob/2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc/deluge/core/rpcserver.py#L527-L542
|
||
kexinyi/ns-vqa
|
df357618af224723acffb66a17ce3e94298642a7
|
scene_parse/mask_rcnn/lib/utils/net.py
|
python
|
load_ckpt
|
(model, ckpt)
|
Load checkpoint
|
Load checkpoint
|
[
"Load",
"checkpoint"
] |
def load_ckpt(model, ckpt):
"""Load checkpoint"""
mapping, _ = model.detectron_weight_mapping
state_dict = {}
for name in ckpt:
if mapping[name]:
state_dict[name] = ckpt[name]
model.load_state_dict(state_dict, strict=False)
|
[
"def",
"load_ckpt",
"(",
"model",
",",
"ckpt",
")",
":",
"mapping",
",",
"_",
"=",
"model",
".",
"detectron_weight_mapping",
"state_dict",
"=",
"{",
"}",
"for",
"name",
"in",
"ckpt",
":",
"if",
"mapping",
"[",
"name",
"]",
":",
"state_dict",
"[",
"name",
"]",
"=",
"ckpt",
"[",
"name",
"]",
"model",
".",
"load_state_dict",
"(",
"state_dict",
",",
"strict",
"=",
"False",
")"
] |
https://github.com/kexinyi/ns-vqa/blob/df357618af224723acffb66a17ce3e94298642a7/scene_parse/mask_rcnn/lib/utils/net.py#L156-L163
|
||
gabrielfalcao/lettuce
|
f79d8f1bdbb119c423753dd958134fcef3995a93
|
lettuce/fs.py
|
python
|
FileSystem.in_directory
|
(cls, *directories)
|
return decorator
|
Decorator to set the working directory around a function
|
Decorator to set the working directory around a function
|
[
"Decorator",
"to",
"set",
"the",
"working",
"directory",
"around",
"a",
"function"
] |
def in_directory(cls, *directories):
"""Decorator to set the working directory around a function"""
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
cls.pushd(*directories)
try:
return func(*args, **kwargs)
finally:
cls.popd()
return inner
return decorator
|
[
"def",
"in_directory",
"(",
"cls",
",",
"*",
"directories",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
".",
"pushd",
"(",
"*",
"directories",
")",
"try",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"cls",
".",
"popd",
"(",
")",
"return",
"inner",
"return",
"decorator"
] |
https://github.com/gabrielfalcao/lettuce/blob/f79d8f1bdbb119c423753dd958134fcef3995a93/lettuce/fs.py#L253-L267
|
|
alerta/alerta
|
eca06235a4402c39e3a446f0066557d3c8dc5afd
|
alerta/models/key.py
|
python
|
ApiKey.find_all
|
(query: Query = None, page: int = 1, page_size: int = 1000)
|
return [ApiKey.from_db(key) for key in db.get_keys(query, page, page_size)]
|
List all API keys.
|
List all API keys.
|
[
"List",
"all",
"API",
"keys",
"."
] |
def find_all(query: Query = None, page: int = 1, page_size: int = 1000) -> List['ApiKey']:
"""
List all API keys.
"""
return [ApiKey.from_db(key) for key in db.get_keys(query, page, page_size)]
|
[
"def",
"find_all",
"(",
"query",
":",
"Query",
"=",
"None",
",",
"page",
":",
"int",
"=",
"1",
",",
"page_size",
":",
"int",
"=",
"1000",
")",
"->",
"List",
"[",
"'ApiKey'",
"]",
":",
"return",
"[",
"ApiKey",
".",
"from_db",
"(",
"key",
")",
"for",
"key",
"in",
"db",
".",
"get_keys",
"(",
"query",
",",
"page",
",",
"page_size",
")",
"]"
] |
https://github.com/alerta/alerta/blob/eca06235a4402c39e3a446f0066557d3c8dc5afd/alerta/models/key.py#L133-L137
|
|
lxtGH/OctaveConv_pytorch
|
079f7da29d55c2eeed8985d33f0b2f765d7a469e
|
libs/nn/resnet_srm.py
|
python
|
srm_resnet34
|
(pretrained=False, **kwargs)
|
return model
|
Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
Constructs a ResNet-34 model.
|
[
"Constructs",
"a",
"ResNet",
"-",
"34",
"model",
"."
] |
def srm_resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
# if pretrained:
# model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
return model
|
[
"def",
"srm_resnet34",
"(",
"pretrained",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"model",
"=",
"ResNet",
"(",
"BasicBlock",
",",
"[",
"3",
",",
"4",
",",
"6",
",",
"3",
"]",
",",
"*",
"*",
"kwargs",
")",
"# if pretrained:",
"# model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))",
"return",
"model"
] |
https://github.com/lxtGH/OctaveConv_pytorch/blob/079f7da29d55c2eeed8985d33f0b2f765d7a469e/libs/nn/resnet_srm.py#L238-L247
|
|
convexengineering/gpkit
|
3d4dd34ba4e95f1fe58fe9ea45401a6ff2fde1fa
|
gpkit/small_classes.py
|
python
|
_append_dict
|
(d_in, d_out)
|
return d_out
|
Recursively travels dict d_out and appends items found in d_in.
|
Recursively travels dict d_out and appends items found in d_in.
|
[
"Recursively",
"travels",
"dict",
"d_out",
"and",
"appends",
"items",
"found",
"in",
"d_in",
"."
] |
def _append_dict(d_in, d_out):
"Recursively travels dict d_out and appends items found in d_in."
for k, v in d_in.items():
if isinstance(v, dict):
d_out[k] = _append_dict(v, d_out[k])
else:
try:
d_out[k].append(v)
except KeyError as e:
raise RuntimeWarning("Key `%s` was added after the first sweep."
% k) from e
return d_out
|
[
"def",
"_append_dict",
"(",
"d_in",
",",
"d_out",
")",
":",
"for",
"k",
",",
"v",
"in",
"d_in",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"dict",
")",
":",
"d_out",
"[",
"k",
"]",
"=",
"_append_dict",
"(",
"v",
",",
"d_out",
"[",
"k",
"]",
")",
"else",
":",
"try",
":",
"d_out",
"[",
"k",
"]",
".",
"append",
"(",
"v",
")",
"except",
"KeyError",
"as",
"e",
":",
"raise",
"RuntimeWarning",
"(",
"\"Key `%s` was added after the first sweep.\"",
"%",
"k",
")",
"from",
"e",
"return",
"d_out"
] |
https://github.com/convexengineering/gpkit/blob/3d4dd34ba4e95f1fe58fe9ea45401a6ff2fde1fa/gpkit/small_classes.py#L122-L133
|
|
sethmlarson/virtualbox-python
|
984a6e2cb0e8996f4df40f4444c1528849f1c70d
|
virtualbox/library.py
|
python
|
ISystemProperties.get_storage_controller_hotplug_capable
|
(self, controller_type)
|
return hotplug_capable
|
Returns whether the given storage controller supports
hot-plugging devices.
in controller_type of type :class:`StorageControllerType`
The storage controller to check the setting for.
return hotplug_capable of type bool
Returned flag indicating whether the controller is hotplug capable
|
Returns whether the given storage controller supports
hot-plugging devices.
|
[
"Returns",
"whether",
"the",
"given",
"storage",
"controller",
"supports",
"hot",
"-",
"plugging",
"devices",
"."
] |
def get_storage_controller_hotplug_capable(self, controller_type):
"""Returns whether the given storage controller supports
hot-plugging devices.
in controller_type of type :class:`StorageControllerType`
The storage controller to check the setting for.
return hotplug_capable of type bool
Returned flag indicating whether the controller is hotplug capable
"""
if not isinstance(controller_type, StorageControllerType):
raise TypeError(
"controller_type can only be an instance of type StorageControllerType"
)
hotplug_capable = self._call(
"getStorageControllerHotplugCapable", in_p=[controller_type]
)
return hotplug_capable
|
[
"def",
"get_storage_controller_hotplug_capable",
"(",
"self",
",",
"controller_type",
")",
":",
"if",
"not",
"isinstance",
"(",
"controller_type",
",",
"StorageControllerType",
")",
":",
"raise",
"TypeError",
"(",
"\"controller_type can only be an instance of type StorageControllerType\"",
")",
"hotplug_capable",
"=",
"self",
".",
"_call",
"(",
"\"getStorageControllerHotplugCapable\"",
",",
"in_p",
"=",
"[",
"controller_type",
"]",
")",
"return",
"hotplug_capable"
] |
https://github.com/sethmlarson/virtualbox-python/blob/984a6e2cb0e8996f4df40f4444c1528849f1c70d/virtualbox/library.py#L21150-L21168
|
|
meduza-corp/interstellar
|
40a801ccd7856491726f5a126621d9318cabe2e1
|
gsutil/gslib/commands/rsync.py
|
python
|
_FieldedListingIterator
|
(cls, gsutil_api, base_url_str, desc)
|
Iterator over base_url_str formatting output per _BuildTmpOutputLine.
Args:
cls: Command instance.
gsutil_api: gsutil Cloud API instance to use for bucket listing.
base_url_str: The top-level URL string over which to iterate.
desc: 'source' or 'destination'.
Yields:
Output line formatted per _BuildTmpOutputLine.
|
Iterator over base_url_str formatting output per _BuildTmpOutputLine.
|
[
"Iterator",
"over",
"base_url_str",
"formatting",
"output",
"per",
"_BuildTmpOutputLine",
"."
] |
def _FieldedListingIterator(cls, gsutil_api, base_url_str, desc):
"""Iterator over base_url_str formatting output per _BuildTmpOutputLine.
Args:
cls: Command instance.
gsutil_api: gsutil Cloud API instance to use for bucket listing.
base_url_str: The top-level URL string over which to iterate.
desc: 'source' or 'destination'.
Yields:
Output line formatted per _BuildTmpOutputLine.
"""
if cls.recursion_requested:
wildcard = '%s/**' % base_url_str.rstrip('/\\')
else:
wildcard = '%s/*' % base_url_str.rstrip('/\\')
i = 0
for blr in CreateWildcardIterator(
wildcard, gsutil_api, debug=cls.debug,
project_id=cls.project_id).IterObjects(
# Request just the needed fields, to reduce bandwidth usage.
bucket_listing_fields=['crc32c', 'md5Hash', 'name', 'size']):
# Various GUI tools (like the GCS web console) create placeholder objects
# ending with '/' when the user creates an empty directory. Normally these
# tools should delete those placeholders once objects have been written
# "under" the directory, but sometimes the placeholders are left around.
# We need to filter them out here, otherwise if the user tries to rsync
# from GCS to a local directory it will result in a directory/file
# conflict (e.g., trying to download an object called "mydata/" where the
# local directory "mydata" exists).
url = blr.storage_url
if IsCloudSubdirPlaceholder(url, blr=blr):
cls.logger.info('Skipping cloud sub-directory placeholder object (%s) '
'because such objects aren\'t needed in (and would '
'interfere with) directories in the local file system',
url)
continue
if (cls.exclude_symlinks and url.IsFileUrl()
and os.path.islink(url.object_name)):
continue
if cls.exclude_pattern:
str_to_check = url.url_string[len(base_url_str):]
if str_to_check.startswith(url.delim):
str_to_check = str_to_check[1:]
if cls.exclude_pattern.match(str_to_check):
continue
i += 1
if i % _PROGRESS_REPORT_LISTING_COUNT == 0:
cls.logger.info('At %s listing %d...', desc, i)
yield _BuildTmpOutputLine(blr)
|
[
"def",
"_FieldedListingIterator",
"(",
"cls",
",",
"gsutil_api",
",",
"base_url_str",
",",
"desc",
")",
":",
"if",
"cls",
".",
"recursion_requested",
":",
"wildcard",
"=",
"'%s/**'",
"%",
"base_url_str",
".",
"rstrip",
"(",
"'/\\\\'",
")",
"else",
":",
"wildcard",
"=",
"'%s/*'",
"%",
"base_url_str",
".",
"rstrip",
"(",
"'/\\\\'",
")",
"i",
"=",
"0",
"for",
"blr",
"in",
"CreateWildcardIterator",
"(",
"wildcard",
",",
"gsutil_api",
",",
"debug",
"=",
"cls",
".",
"debug",
",",
"project_id",
"=",
"cls",
".",
"project_id",
")",
".",
"IterObjects",
"(",
"# Request just the needed fields, to reduce bandwidth usage.",
"bucket_listing_fields",
"=",
"[",
"'crc32c'",
",",
"'md5Hash'",
",",
"'name'",
",",
"'size'",
"]",
")",
":",
"# Various GUI tools (like the GCS web console) create placeholder objects",
"# ending with '/' when the user creates an empty directory. Normally these",
"# tools should delete those placeholders once objects have been written",
"# \"under\" the directory, but sometimes the placeholders are left around.",
"# We need to filter them out here, otherwise if the user tries to rsync",
"# from GCS to a local directory it will result in a directory/file",
"# conflict (e.g., trying to download an object called \"mydata/\" where the",
"# local directory \"mydata\" exists).",
"url",
"=",
"blr",
".",
"storage_url",
"if",
"IsCloudSubdirPlaceholder",
"(",
"url",
",",
"blr",
"=",
"blr",
")",
":",
"cls",
".",
"logger",
".",
"info",
"(",
"'Skipping cloud sub-directory placeholder object (%s) '",
"'because such objects aren\\'t needed in (and would '",
"'interfere with) directories in the local file system'",
",",
"url",
")",
"continue",
"if",
"(",
"cls",
".",
"exclude_symlinks",
"and",
"url",
".",
"IsFileUrl",
"(",
")",
"and",
"os",
".",
"path",
".",
"islink",
"(",
"url",
".",
"object_name",
")",
")",
":",
"continue",
"if",
"cls",
".",
"exclude_pattern",
":",
"str_to_check",
"=",
"url",
".",
"url_string",
"[",
"len",
"(",
"base_url_str",
")",
":",
"]",
"if",
"str_to_check",
".",
"startswith",
"(",
"url",
".",
"delim",
")",
":",
"str_to_check",
"=",
"str_to_check",
"[",
"1",
":",
"]",
"if",
"cls",
".",
"exclude_pattern",
".",
"match",
"(",
"str_to_check",
")",
":",
"continue",
"i",
"+=",
"1",
"if",
"i",
"%",
"_PROGRESS_REPORT_LISTING_COUNT",
"==",
"0",
":",
"cls",
".",
"logger",
".",
"info",
"(",
"'At %s listing %d...'",
",",
"desc",
",",
"i",
")",
"yield",
"_BuildTmpOutputLine",
"(",
"blr",
")"
] |
https://github.com/meduza-corp/interstellar/blob/40a801ccd7856491726f5a126621d9318cabe2e1/gsutil/gslib/commands/rsync.py#L426-L475
|
||
spack/spack
|
675210bd8bd1c5d32ad1cc83d898fb43b569ed74
|
lib/spack/spack/package.py
|
python
|
PackageBase.apply_macos_rpath_fixups
|
(self)
|
On Darwin, make installed libraries more easily relocatable.
Some build systems (handrolled, autotools, makefiles) can set their own
rpaths that are duplicated by spack's compiler wrapper. This fixup
interrogates, and postprocesses if necessary, all libraries installed
by the code.
It should be added as a @run_after to packaging systems (or individual
packages) that do not install relocatable libraries by default.
|
On Darwin, make installed libraries more easily relocatable.
|
[
"On",
"Darwin",
"make",
"installed",
"libraries",
"more",
"easily",
"relocatable",
"."
] |
def apply_macos_rpath_fixups(self):
"""On Darwin, make installed libraries more easily relocatable.
Some build systems (handrolled, autotools, makefiles) can set their own
rpaths that are duplicated by spack's compiler wrapper. This fixup
interrogates, and postprocesses if necessary, all libraries installed
by the code.
It should be added as a @run_after to packaging systems (or individual
packages) that do not install relocatable libraries by default.
"""
if 'platform=darwin' not in self.spec:
return
from spack.relocate import fixup_macos_rpaths
fixup_macos_rpaths(self.spec)
|
[
"def",
"apply_macos_rpath_fixups",
"(",
"self",
")",
":",
"if",
"'platform=darwin'",
"not",
"in",
"self",
".",
"spec",
":",
"return",
"from",
"spack",
".",
"relocate",
"import",
"fixup_macos_rpaths",
"fixup_macos_rpaths",
"(",
"self",
".",
"spec",
")"
] |
https://github.com/spack/spack/blob/675210bd8bd1c5d32ad1cc83d898fb43b569ed74/lib/spack/spack/package.py#L1972-L1987
|
||
linhaow/TextClassify
|
aa479ae0941c008602631c50124d8c07d159bfb1
|
hubconfs/transformer_xl_hubconf.py
|
python
|
transformerXLTokenizer
|
(*args, **kwargs)
|
return tokenizer
|
Instantiate a Transformer-XL tokenizer adapted from Vocab class in https://github.com/kimiyoung/transformer-xl
Args:
pretrained_model_name_or_path: Path to pretrained model archive
or one of pre-trained vocab configs below.
* transfo-xl-wt103
Example:
import torch
tokenizer = torch.hub.load('huggingface/pytorch-transformers', 'transformerXLTokenizer', 'transfo-xl-wt103')
text = "Who was Jim Henson ?"
tokenized_text = tokenizer.tokenize(tokenized_text)
indexed_tokens = tokenizer.convert_tokens_to_ids(tokenized_text)
|
Instantiate a Transformer-XL tokenizer adapted from Vocab class in https://github.com/kimiyoung/transformer-xl
|
[
"Instantiate",
"a",
"Transformer",
"-",
"XL",
"tokenizer",
"adapted",
"from",
"Vocab",
"class",
"in",
"https",
":",
"//",
"github",
".",
"com",
"/",
"kimiyoung",
"/",
"transformer",
"-",
"xl"
] |
def transformerXLTokenizer(*args, **kwargs):
"""
Instantiate a Transformer-XL tokenizer adapted from Vocab class in https://github.com/kimiyoung/transformer-xl
Args:
pretrained_model_name_or_path: Path to pretrained model archive
or one of pre-trained vocab configs below.
* transfo-xl-wt103
Example:
import torch
tokenizer = torch.hub.load('huggingface/pytorch-transformers', 'transformerXLTokenizer', 'transfo-xl-wt103')
text = "Who was Jim Henson ?"
tokenized_text = tokenizer.tokenize(tokenized_text)
indexed_tokens = tokenizer.convert_tokens_to_ids(tokenized_text)
"""
tokenizer = TransfoXLTokenizer.from_pretrained(*args, **kwargs)
return tokenizer
|
[
"def",
"transformerXLTokenizer",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"tokenizer",
"=",
"TransfoXLTokenizer",
".",
"from_pretrained",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"tokenizer"
] |
https://github.com/linhaow/TextClassify/blob/aa479ae0941c008602631c50124d8c07d159bfb1/hubconfs/transformer_xl_hubconf.py#L38-L56
|
|
robotlearn/pyrobolearn
|
9cd7c060723fda7d2779fa255ac998c2c82b8436
|
pyrobolearn/tools/bridges/controllers/robots/bridge_controller_wheeled.py
|
python
|
BridgeControllerWheeledRobot.simulator
|
(self)
|
return self._robot.simulator
|
Return the simulator instance.
|
Return the simulator instance.
|
[
"Return",
"the",
"simulator",
"instance",
"."
] |
def simulator(self):
"""Return the simulator instance."""
return self._robot.simulator
|
[
"def",
"simulator",
"(",
"self",
")",
":",
"return",
"self",
".",
"_robot",
".",
"simulator"
] |
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/tools/bridges/controllers/robots/bridge_controller_wheeled.py#L96-L98
|
|
firedrakeproject/firedrake
|
06ab4975c14c0d4dcb79be55821f8b9e41554125
|
firedrake/utility_meshes.py
|
python
|
PeriodicUnitIntervalMesh
|
(ncells, distribution_parameters=None, comm=COMM_WORLD)
|
return PeriodicIntervalMesh(ncells, length=1.0, distribution_parameters=distribution_parameters, comm=comm)
|
Generate a periodic mesh of the unit interval
:arg ncells: The number of cells in the interval.
:kwarg comm: Optional communicator to build the mesh on (defaults to
COMM_WORLD).
|
Generate a periodic mesh of the unit interval
|
[
"Generate",
"a",
"periodic",
"mesh",
"of",
"the",
"unit",
"interval"
] |
def PeriodicUnitIntervalMesh(ncells, distribution_parameters=None, comm=COMM_WORLD):
"""Generate a periodic mesh of the unit interval
:arg ncells: The number of cells in the interval.
:kwarg comm: Optional communicator to build the mesh on (defaults to
COMM_WORLD).
"""
return PeriodicIntervalMesh(ncells, length=1.0, distribution_parameters=distribution_parameters, comm=comm)
|
[
"def",
"PeriodicUnitIntervalMesh",
"(",
"ncells",
",",
"distribution_parameters",
"=",
"None",
",",
"comm",
"=",
"COMM_WORLD",
")",
":",
"return",
"PeriodicIntervalMesh",
"(",
"ncells",
",",
"length",
"=",
"1.0",
",",
"distribution_parameters",
"=",
"distribution_parameters",
",",
"comm",
"=",
"comm",
")"
] |
https://github.com/firedrakeproject/firedrake/blob/06ab4975c14c0d4dcb79be55821f8b9e41554125/firedrake/utility_meshes.py#L152-L159
|
|
osmr/imgclsmob
|
f2993d3ce73a2f7ddba05da3891defb08547d504
|
pytorch/pytorchcv/models/hrnet.py
|
python
|
get_hrnet
|
(version,
model_name=None,
pretrained=False,
root=os.path.join("~", ".torch", "models"),
**kwargs)
|
return net
|
Create HRNet model with specific parameters.
Parameters:
----------
version : str
Version of MobileNetV3 ('s' or 'm').
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
|
Create HRNet model with specific parameters.
|
[
"Create",
"HRNet",
"model",
"with",
"specific",
"parameters",
"."
] |
def get_hrnet(version,
model_name=None,
pretrained=False,
root=os.path.join("~", ".torch", "models"),
**kwargs):
"""
Create HRNet model with specific parameters.
Parameters:
----------
version : str
Version of MobileNetV3 ('s' or 'm').
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
if version == "w18s1":
init_block_channels = 128
init_num_subblocks = 1
channels = [[16, 32], [16, 32, 64], [16, 32, 64, 128]]
num_modules = [1, 1, 1]
elif version == "w18s2":
init_block_channels = 256
init_num_subblocks = 2
channels = [[18, 36], [18, 36, 72], [18, 36, 72, 144]]
num_modules = [1, 3, 2]
elif version == "w18":
init_block_channels = 256
init_num_subblocks = 4
channels = [[18, 36], [18, 36, 72], [18, 36, 72, 144]]
num_modules = [1, 4, 3]
elif version == "w30":
init_block_channels = 256
init_num_subblocks = 4
channels = [[30, 60], [30, 60, 120], [30, 60, 120, 240]]
num_modules = [1, 4, 3]
elif version == "w32":
init_block_channels = 256
init_num_subblocks = 4
channels = [[32, 64], [32, 64, 128], [32, 64, 128, 256]]
num_modules = [1, 4, 3]
elif version == "w40":
init_block_channels = 256
init_num_subblocks = 4
channels = [[40, 80], [40, 80, 160], [40, 80, 160, 320]]
num_modules = [1, 4, 3]
elif version == "w44":
init_block_channels = 256
init_num_subblocks = 4
channels = [[44, 88], [44, 88, 176], [44, 88, 176, 352]]
num_modules = [1, 4, 3]
elif version == "w48":
init_block_channels = 256
init_num_subblocks = 4
channels = [[48, 96], [48, 96, 192], [48, 96, 192, 384]]
num_modules = [1, 4, 3]
elif version == "w64":
init_block_channels = 256
init_num_subblocks = 4
channels = [[64, 128], [64, 128, 256], [64, 128, 256, 512]]
num_modules = [1, 4, 3]
else:
raise ValueError("Unsupported HRNet version {}".format(version))
num_subblocks = [[max(2, init_num_subblocks)] * len(ci) for ci in channels]
net = HRNet(
channels=channels,
init_block_channels=init_block_channels,
init_num_subblocks=init_num_subblocks,
num_modules=num_modules,
num_subblocks=num_subblocks,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import download_model
download_model(
net=net,
model_name=model_name,
local_model_store_dir_path=root)
return net
|
[
"def",
"get_hrnet",
"(",
"version",
",",
"model_name",
"=",
"None",
",",
"pretrained",
"=",
"False",
",",
"root",
"=",
"os",
".",
"path",
".",
"join",
"(",
"\"~\"",
",",
"\".torch\"",
",",
"\"models\"",
")",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"version",
"==",
"\"w18s1\"",
":",
"init_block_channels",
"=",
"128",
"init_num_subblocks",
"=",
"1",
"channels",
"=",
"[",
"[",
"16",
",",
"32",
"]",
",",
"[",
"16",
",",
"32",
",",
"64",
"]",
",",
"[",
"16",
",",
"32",
",",
"64",
",",
"128",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"1",
",",
"1",
"]",
"elif",
"version",
"==",
"\"w18s2\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"2",
"channels",
"=",
"[",
"[",
"18",
",",
"36",
"]",
",",
"[",
"18",
",",
"36",
",",
"72",
"]",
",",
"[",
"18",
",",
"36",
",",
"72",
",",
"144",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"3",
",",
"2",
"]",
"elif",
"version",
"==",
"\"w18\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"18",
",",
"36",
"]",
",",
"[",
"18",
",",
"36",
",",
"72",
"]",
",",
"[",
"18",
",",
"36",
",",
"72",
",",
"144",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w30\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"30",
",",
"60",
"]",
",",
"[",
"30",
",",
"60",
",",
"120",
"]",
",",
"[",
"30",
",",
"60",
",",
"120",
",",
"240",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w32\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"32",
",",
"64",
"]",
",",
"[",
"32",
",",
"64",
",",
"128",
"]",
",",
"[",
"32",
",",
"64",
",",
"128",
",",
"256",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w40\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"40",
",",
"80",
"]",
",",
"[",
"40",
",",
"80",
",",
"160",
"]",
",",
"[",
"40",
",",
"80",
",",
"160",
",",
"320",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w44\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"44",
",",
"88",
"]",
",",
"[",
"44",
",",
"88",
",",
"176",
"]",
",",
"[",
"44",
",",
"88",
",",
"176",
",",
"352",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w48\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"48",
",",
"96",
"]",
",",
"[",
"48",
",",
"96",
",",
"192",
"]",
",",
"[",
"48",
",",
"96",
",",
"192",
",",
"384",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"elif",
"version",
"==",
"\"w64\"",
":",
"init_block_channels",
"=",
"256",
"init_num_subblocks",
"=",
"4",
"channels",
"=",
"[",
"[",
"64",
",",
"128",
"]",
",",
"[",
"64",
",",
"128",
",",
"256",
"]",
",",
"[",
"64",
",",
"128",
",",
"256",
",",
"512",
"]",
"]",
"num_modules",
"=",
"[",
"1",
",",
"4",
",",
"3",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unsupported HRNet version {}\"",
".",
"format",
"(",
"version",
")",
")",
"num_subblocks",
"=",
"[",
"[",
"max",
"(",
"2",
",",
"init_num_subblocks",
")",
"]",
"*",
"len",
"(",
"ci",
")",
"for",
"ci",
"in",
"channels",
"]",
"net",
"=",
"HRNet",
"(",
"channels",
"=",
"channels",
",",
"init_block_channels",
"=",
"init_block_channels",
",",
"init_num_subblocks",
"=",
"init_num_subblocks",
",",
"num_modules",
"=",
"num_modules",
",",
"num_subblocks",
"=",
"num_subblocks",
",",
"*",
"*",
"kwargs",
")",
"if",
"pretrained",
":",
"if",
"(",
"model_name",
"is",
"None",
")",
"or",
"(",
"not",
"model_name",
")",
":",
"raise",
"ValueError",
"(",
"\"Parameter `model_name` should be properly initialized for loading pretrained model.\"",
")",
"from",
".",
"model_store",
"import",
"download_model",
"download_model",
"(",
"net",
"=",
"net",
",",
"model_name",
"=",
"model_name",
",",
"local_model_store_dir_path",
"=",
"root",
")",
"return",
"net"
] |
https://github.com/osmr/imgclsmob/blob/f2993d3ce73a2f7ddba05da3891defb08547d504/pytorch/pytorchcv/models/hrnet.py#L381-L467
|
|
xmengli/H-DenseUNet
|
06cc436a43196310fe933d114a353839907cc176
|
Keras-2.0.8/keras/engine/topology.py
|
python
|
Layer._node_key
|
(layer, node_index)
|
return layer.name + '_ib-' + str(node_index)
|
Converts a layer and its index to a unique (immutable type) name.
This function is used internally with `self.container_nodes`.
# Arguments
layer: The layer.
node_index: The layer's position (e.g. via enumerate) in a list of
nodes.
# Returns
The unique name.
|
Converts a layer and its index to a unique (immutable type) name.
This function is used internally with `self.container_nodes`.
|
[
"Converts",
"a",
"layer",
"and",
"its",
"index",
"to",
"a",
"unique",
"(",
"immutable",
"type",
")",
"name",
".",
"This",
"function",
"is",
"used",
"internally",
"with",
"self",
".",
"container_nodes",
"."
] |
def _node_key(layer, node_index):
"""Converts a layer and its index to a unique (immutable type) name.
This function is used internally with `self.container_nodes`.
# Arguments
layer: The layer.
node_index: The layer's position (e.g. via enumerate) in a list of
nodes.
# Returns
The unique name.
"""
return layer.name + '_ib-' + str(node_index)
|
[
"def",
"_node_key",
"(",
"layer",
",",
"node_index",
")",
":",
"return",
"layer",
".",
"name",
"+",
"'_ib-'",
"+",
"str",
"(",
"node_index",
")"
] |
https://github.com/xmengli/H-DenseUNet/blob/06cc436a43196310fe933d114a353839907cc176/Keras-2.0.8/keras/engine/topology.py#L314-L326
|
|
pythonanywhere/dirigible-spreadsheet
|
c771e9a391708f3b219248bf9974e05b1582fdd0
|
dirigible/sheet/parser/grammar.py
|
python
|
p_if_function
|
(p)
|
if_function : IF LEFTPAREN argument COMMA argument COMMA argument RIGHTPAREN
| IF LEFTPAREN argument COMMA argument COMMA RIGHTPAREN
| IF LEFTPAREN argument COMMA argument RIGHTPAREN
|
if_function : IF LEFTPAREN argument COMMA argument COMMA argument RIGHTPAREN
| IF LEFTPAREN argument COMMA argument COMMA RIGHTPAREN
| IF LEFTPAREN argument COMMA argument RIGHTPAREN
|
[
"if_function",
":",
"IF",
"LEFTPAREN",
"argument",
"COMMA",
"argument",
"COMMA",
"argument",
"RIGHTPAREN",
"|",
"IF",
"LEFTPAREN",
"argument",
"COMMA",
"argument",
"COMMA",
"RIGHTPAREN",
"|",
"IF",
"LEFTPAREN",
"argument",
"COMMA",
"argument",
"RIGHTPAREN"
] |
def p_if_function(p):
"""if_function : IF LEFTPAREN argument COMMA argument COMMA argument RIGHTPAREN
| IF LEFTPAREN argument COMMA argument COMMA RIGHTPAREN
| IF LEFTPAREN argument COMMA argument RIGHTPAREN"""
FixFunction(p)
|
[
"def",
"p_if_function",
"(",
"p",
")",
":",
"FixFunction",
"(",
"p",
")"
] |
https://github.com/pythonanywhere/dirigible-spreadsheet/blob/c771e9a391708f3b219248bf9974e05b1582fdd0/dirigible/sheet/parser/grammar.py#L774-L778
|
||
paulwinex/pw_MultiScriptEditor
|
e447e99f87cb07e238baf693b7e124e50efdbc51
|
multi_script_editor/jedi/evaluate/representation.py
|
python
|
Function._decorated_func
|
(self)
|
return f
|
Returns the function, that is to be executed in the end.
This is also the places where the decorators are processed.
|
Returns the function, that is to be executed in the end.
This is also the places where the decorators are processed.
|
[
"Returns",
"the",
"function",
"that",
"is",
"to",
"be",
"executed",
"in",
"the",
"end",
".",
"This",
"is",
"also",
"the",
"places",
"where",
"the",
"decorators",
"are",
"processed",
"."
] |
def _decorated_func(self):
"""
Returns the function, that is to be executed in the end.
This is also the places where the decorators are processed.
"""
f = self.base_func
# Only enter it, if has not already been processed.
if not self.is_decorated:
for dec in reversed(self.base_func.decorators):
debug.dbg('decorator: %s %s', dec, f)
dec_results = self._evaluator.eval_statement(dec)
if not len(dec_results):
debug.warning('decorator not found: %s on %s', dec, self.base_func)
return None
decorator = dec_results.pop()
if dec_results:
debug.warning('multiple decorators found %s %s',
self.base_func, dec_results)
# Create param array.
old_func = Function(self._evaluator, f, is_decorated=True)
wrappers = self._evaluator.execute(decorator, (old_func,))
if not len(wrappers):
debug.warning('no wrappers found %s', self.base_func)
return None
if len(wrappers) > 1:
# TODO resolve issue with multiple wrappers -> multiple types
debug.warning('multiple wrappers found %s %s',
self.base_func, wrappers)
f = wrappers[0]
debug.dbg('decorator end %s', f)
if isinstance(f, pr.Function):
f = Function(self._evaluator, f, True)
return f
|
[
"def",
"_decorated_func",
"(",
"self",
")",
":",
"f",
"=",
"self",
".",
"base_func",
"# Only enter it, if has not already been processed.",
"if",
"not",
"self",
".",
"is_decorated",
":",
"for",
"dec",
"in",
"reversed",
"(",
"self",
".",
"base_func",
".",
"decorators",
")",
":",
"debug",
".",
"dbg",
"(",
"'decorator: %s %s'",
",",
"dec",
",",
"f",
")",
"dec_results",
"=",
"self",
".",
"_evaluator",
".",
"eval_statement",
"(",
"dec",
")",
"if",
"not",
"len",
"(",
"dec_results",
")",
":",
"debug",
".",
"warning",
"(",
"'decorator not found: %s on %s'",
",",
"dec",
",",
"self",
".",
"base_func",
")",
"return",
"None",
"decorator",
"=",
"dec_results",
".",
"pop",
"(",
")",
"if",
"dec_results",
":",
"debug",
".",
"warning",
"(",
"'multiple decorators found %s %s'",
",",
"self",
".",
"base_func",
",",
"dec_results",
")",
"# Create param array.",
"old_func",
"=",
"Function",
"(",
"self",
".",
"_evaluator",
",",
"f",
",",
"is_decorated",
"=",
"True",
")",
"wrappers",
"=",
"self",
".",
"_evaluator",
".",
"execute",
"(",
"decorator",
",",
"(",
"old_func",
",",
")",
")",
"if",
"not",
"len",
"(",
"wrappers",
")",
":",
"debug",
".",
"warning",
"(",
"'no wrappers found %s'",
",",
"self",
".",
"base_func",
")",
"return",
"None",
"if",
"len",
"(",
"wrappers",
")",
">",
"1",
":",
"# TODO resolve issue with multiple wrappers -> multiple types",
"debug",
".",
"warning",
"(",
"'multiple wrappers found %s %s'",
",",
"self",
".",
"base_func",
",",
"wrappers",
")",
"f",
"=",
"wrappers",
"[",
"0",
"]",
"debug",
".",
"dbg",
"(",
"'decorator end %s'",
",",
"f",
")",
"if",
"isinstance",
"(",
"f",
",",
"pr",
".",
"Function",
")",
":",
"f",
"=",
"Function",
"(",
"self",
".",
"_evaluator",
",",
"f",
",",
"True",
")",
"return",
"f"
] |
https://github.com/paulwinex/pw_MultiScriptEditor/blob/e447e99f87cb07e238baf693b7e124e50efdbc51/multi_script_editor/jedi/evaluate/representation.py#L340-L376
|
|
pgq/skytools-legacy
|
8b7e6c118572a605d28b7a3403c96aeecfd0d272
|
python/londiste/playback.py
|
python
|
Replicator.sync_from_main_thread
|
(self, cnt, src_db, dst_db)
|
return ret
|
Main thread sync logic.
|
Main thread sync logic.
|
[
"Main",
"thread",
"sync",
"logic",
"."
] |
def sync_from_main_thread(self, cnt, src_db, dst_db):
"Main thread sync logic."
# This operates on all table, any amount can be in any state
ret = SYNC_OK
if cnt.do_sync:
# wait for copy thread to catch up
ret = SYNC_LOOP
# we need to do wanna-sync->do_sync with small batches
need_dsync = False
dsync_ok = True
if self.pgq_min_interval or self.pgq_min_count:
dsync_ok = False
elif self.dsync_backup and self.dsync_backup[0] >= self.cur_tick:
dsync_ok = False
# now check if do-sync is needed
for t in self.get_tables_in_state(TABLE_WANNA_SYNC):
# copy thread wants sync, if not behind, do it
if self.cur_tick >= t.sync_tick_id:
if dsync_ok:
self.change_table_state(dst_db, t, TABLE_DO_SYNC, self.cur_tick)
ret = SYNC_LOOP
else:
need_dsync = True
# tune batch size if needed
if need_dsync:
if self.pgq_min_count or self.pgq_min_interval:
bak = (self.cur_tick, self.pgq_min_count, self.pgq_min_interval)
self.dsync_backup = bak
self.pgq_min_count = None
self.pgq_min_interval = None
elif self.dsync_backup:
self.pgq_min_count = self.dsync_backup[1]
self.pgq_min_interval = self.dsync_backup[2]
self.dsync_backup = None
# now handle new copies
npossible = self.parallel_copies - cnt.get_copy_count()
if cnt.missing and npossible > 0:
pmap = self.get_state_map(src_db.cursor())
src_db.commit()
for t in self.get_tables_in_state(TABLE_MISSING):
if 'copy_node' in t.table_attrs:
# should we go and check this node?
pass
else:
# regular provider is used
if t.name not in pmap:
self.log.warning("Table %s not available on provider", t.name)
continue
pt = pmap[t.name]
if pt.state != TABLE_OK: # or pt.custom_snapshot: # FIXME: does snapsnot matter?
self.log.info("Table %s not OK on provider, waiting", t.name)
continue
# don't allow more copies than configured
if npossible == 0:
break
npossible -= 1
# drop all foreign keys to and from this table
self.drop_fkeys(dst_db, t.dest_table)
# change state after fkeys are dropped thus allowing
# failure inbetween
self.change_table_state(dst_db, t, TABLE_IN_COPY)
# the copy _may_ happen immediately
self.launch_copy(t)
# there cannot be interesting events in current batch
# but maybe there's several tables, lets do them in one go
ret = SYNC_LOOP
return ret
|
[
"def",
"sync_from_main_thread",
"(",
"self",
",",
"cnt",
",",
"src_db",
",",
"dst_db",
")",
":",
"# This operates on all table, any amount can be in any state",
"ret",
"=",
"SYNC_OK",
"if",
"cnt",
".",
"do_sync",
":",
"# wait for copy thread to catch up",
"ret",
"=",
"SYNC_LOOP",
"# we need to do wanna-sync->do_sync with small batches",
"need_dsync",
"=",
"False",
"dsync_ok",
"=",
"True",
"if",
"self",
".",
"pgq_min_interval",
"or",
"self",
".",
"pgq_min_count",
":",
"dsync_ok",
"=",
"False",
"elif",
"self",
".",
"dsync_backup",
"and",
"self",
".",
"dsync_backup",
"[",
"0",
"]",
">=",
"self",
".",
"cur_tick",
":",
"dsync_ok",
"=",
"False",
"# now check if do-sync is needed",
"for",
"t",
"in",
"self",
".",
"get_tables_in_state",
"(",
"TABLE_WANNA_SYNC",
")",
":",
"# copy thread wants sync, if not behind, do it",
"if",
"self",
".",
"cur_tick",
">=",
"t",
".",
"sync_tick_id",
":",
"if",
"dsync_ok",
":",
"self",
".",
"change_table_state",
"(",
"dst_db",
",",
"t",
",",
"TABLE_DO_SYNC",
",",
"self",
".",
"cur_tick",
")",
"ret",
"=",
"SYNC_LOOP",
"else",
":",
"need_dsync",
"=",
"True",
"# tune batch size if needed",
"if",
"need_dsync",
":",
"if",
"self",
".",
"pgq_min_count",
"or",
"self",
".",
"pgq_min_interval",
":",
"bak",
"=",
"(",
"self",
".",
"cur_tick",
",",
"self",
".",
"pgq_min_count",
",",
"self",
".",
"pgq_min_interval",
")",
"self",
".",
"dsync_backup",
"=",
"bak",
"self",
".",
"pgq_min_count",
"=",
"None",
"self",
".",
"pgq_min_interval",
"=",
"None",
"elif",
"self",
".",
"dsync_backup",
":",
"self",
".",
"pgq_min_count",
"=",
"self",
".",
"dsync_backup",
"[",
"1",
"]",
"self",
".",
"pgq_min_interval",
"=",
"self",
".",
"dsync_backup",
"[",
"2",
"]",
"self",
".",
"dsync_backup",
"=",
"None",
"# now handle new copies",
"npossible",
"=",
"self",
".",
"parallel_copies",
"-",
"cnt",
".",
"get_copy_count",
"(",
")",
"if",
"cnt",
".",
"missing",
"and",
"npossible",
">",
"0",
":",
"pmap",
"=",
"self",
".",
"get_state_map",
"(",
"src_db",
".",
"cursor",
"(",
")",
")",
"src_db",
".",
"commit",
"(",
")",
"for",
"t",
"in",
"self",
".",
"get_tables_in_state",
"(",
"TABLE_MISSING",
")",
":",
"if",
"'copy_node'",
"in",
"t",
".",
"table_attrs",
":",
"# should we go and check this node?",
"pass",
"else",
":",
"# regular provider is used",
"if",
"t",
".",
"name",
"not",
"in",
"pmap",
":",
"self",
".",
"log",
".",
"warning",
"(",
"\"Table %s not available on provider\"",
",",
"t",
".",
"name",
")",
"continue",
"pt",
"=",
"pmap",
"[",
"t",
".",
"name",
"]",
"if",
"pt",
".",
"state",
"!=",
"TABLE_OK",
":",
"# or pt.custom_snapshot: # FIXME: does snapsnot matter?",
"self",
".",
"log",
".",
"info",
"(",
"\"Table %s not OK on provider, waiting\"",
",",
"t",
".",
"name",
")",
"continue",
"# don't allow more copies than configured",
"if",
"npossible",
"==",
"0",
":",
"break",
"npossible",
"-=",
"1",
"# drop all foreign keys to and from this table",
"self",
".",
"drop_fkeys",
"(",
"dst_db",
",",
"t",
".",
"dest_table",
")",
"# change state after fkeys are dropped thus allowing",
"# failure inbetween",
"self",
".",
"change_table_state",
"(",
"dst_db",
",",
"t",
",",
"TABLE_IN_COPY",
")",
"# the copy _may_ happen immediately",
"self",
".",
"launch_copy",
"(",
"t",
")",
"# there cannot be interesting events in current batch",
"# but maybe there's several tables, lets do them in one go",
"ret",
"=",
"SYNC_LOOP",
"return",
"ret"
] |
https://github.com/pgq/skytools-legacy/blob/8b7e6c118572a605d28b7a3403c96aeecfd0d272/python/londiste/playback.py#L455-L534
|
|
Gandi/gandi.cli
|
5de0605126247e986f8288b467a52710a78e1794
|
gandi/cli/modules/webacc.py
|
python
|
Webacc.list
|
(cls, options=None)
|
return cls.call('hosting.rproxy.list', options)
|
List all webaccelerator
|
List all webaccelerator
|
[
"List",
"all",
"webaccelerator"
] |
def list(cls, options=None):
""" List all webaccelerator """
if not options:
options = {}
return cls.call('hosting.rproxy.list', options)
|
[
"def",
"list",
"(",
"cls",
",",
"options",
"=",
"None",
")",
":",
"if",
"not",
"options",
":",
"options",
"=",
"{",
"}",
"return",
"cls",
".",
"call",
"(",
"'hosting.rproxy.list'",
",",
"options",
")"
] |
https://github.com/Gandi/gandi.cli/blob/5de0605126247e986f8288b467a52710a78e1794/gandi/cli/modules/webacc.py#L23-L27
|
|
KalleHallden/AutoTimer
|
2d954216700c4930baa154e28dbddc34609af7ce
|
env/lib/python2.7/site-packages/pip/_internal/download.py
|
python
|
_copy_file
|
(filename, location, link)
|
[] |
def _copy_file(filename, location, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(download_location), ('i', 'w', 'b', 'a'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
elif response == 'a':
sys.exit(-1)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
|
[
"def",
"_copy_file",
"(",
"filename",
",",
"location",
",",
"link",
")",
":",
"copy",
"=",
"True",
"download_location",
"=",
"os",
".",
"path",
".",
"join",
"(",
"location",
",",
"link",
".",
"filename",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"download_location",
")",
":",
"response",
"=",
"ask_path_exists",
"(",
"'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'",
"%",
"display_path",
"(",
"download_location",
")",
",",
"(",
"'i'",
",",
"'w'",
",",
"'b'",
",",
"'a'",
")",
")",
"if",
"response",
"==",
"'i'",
":",
"copy",
"=",
"False",
"elif",
"response",
"==",
"'w'",
":",
"logger",
".",
"warning",
"(",
"'Deleting %s'",
",",
"display_path",
"(",
"download_location",
")",
")",
"os",
".",
"remove",
"(",
"download_location",
")",
"elif",
"response",
"==",
"'b'",
":",
"dest_file",
"=",
"backup_dir",
"(",
"download_location",
")",
"logger",
".",
"warning",
"(",
"'Backing up %s to %s'",
",",
"display_path",
"(",
"download_location",
")",
",",
"display_path",
"(",
"dest_file",
")",
",",
")",
"shutil",
".",
"move",
"(",
"download_location",
",",
"dest_file",
")",
"elif",
"response",
"==",
"'a'",
":",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"if",
"copy",
":",
"shutil",
".",
"copy",
"(",
"filename",
",",
"download_location",
")",
"logger",
".",
"info",
"(",
"'Saved %s'",
",",
"display_path",
"(",
"download_location",
")",
")"
] |
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/pip/_internal/download.py#L866-L890
|
||||
googleads/google-ads-python
|
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
|
google/ads/googleads/v7/services/services/recommendation_service/transports/grpc.py
|
python
|
RecommendationServiceGrpcTransport.__init__
|
(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
)
|
Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
|
Instantiate the transport.
|
[
"Instantiate",
"the",
"transport",
"."
] |
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
",",
"host",
":",
"str",
"=",
"\"googleads.googleapis.com\"",
",",
"credentials",
":",
"credentials",
".",
"Credentials",
"=",
"None",
",",
"credentials_file",
":",
"str",
"=",
"None",
",",
"scopes",
":",
"Sequence",
"[",
"str",
"]",
"=",
"None",
",",
"channel",
":",
"grpc",
".",
"Channel",
"=",
"None",
",",
"api_mtls_endpoint",
":",
"str",
"=",
"None",
",",
"client_cert_source",
":",
"Callable",
"[",
"[",
"]",
",",
"Tuple",
"[",
"bytes",
",",
"bytes",
"]",
"]",
"=",
"None",
",",
"ssl_channel_credentials",
":",
"grpc",
".",
"ChannelCredentials",
"=",
"None",
",",
"quota_project_id",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"client_info",
":",
"gapic_v1",
".",
"client_info",
".",
"ClientInfo",
"=",
"DEFAULT_CLIENT_INFO",
",",
")",
"->",
"None",
":",
"self",
".",
"_ssl_channel_credentials",
"=",
"ssl_channel_credentials",
"if",
"channel",
":",
"# Sanity check: Ensure that channel and credentials are not both",
"# provided.",
"credentials",
"=",
"False",
"# If a channel was explicitly provided, set it.",
"self",
".",
"_grpc_channel",
"=",
"channel",
"self",
".",
"_ssl_channel_credentials",
"=",
"None",
"elif",
"api_mtls_endpoint",
":",
"warnings",
".",
"warn",
"(",
"\"api_mtls_endpoint and client_cert_source are deprecated\"",
",",
"DeprecationWarning",
",",
")",
"host",
"=",
"(",
"api_mtls_endpoint",
"if",
"\":\"",
"in",
"api_mtls_endpoint",
"else",
"api_mtls_endpoint",
"+",
"\":443\"",
")",
"if",
"credentials",
"is",
"None",
":",
"credentials",
",",
"_",
"=",
"auth",
".",
"default",
"(",
"scopes",
"=",
"self",
".",
"AUTH_SCOPES",
",",
"quota_project_id",
"=",
"quota_project_id",
")",
"# Create SSL credentials with client_cert_source or application",
"# default SSL credentials.",
"if",
"client_cert_source",
":",
"cert",
",",
"key",
"=",
"client_cert_source",
"(",
")",
"ssl_credentials",
"=",
"grpc",
".",
"ssl_channel_credentials",
"(",
"certificate_chain",
"=",
"cert",
",",
"private_key",
"=",
"key",
")",
"else",
":",
"ssl_credentials",
"=",
"SslCredentials",
"(",
")",
".",
"ssl_credentials",
"# create a new channel. The provided one is ignored.",
"self",
".",
"_grpc_channel",
"=",
"type",
"(",
"self",
")",
".",
"create_channel",
"(",
"host",
",",
"credentials",
"=",
"credentials",
",",
"credentials_file",
"=",
"credentials_file",
",",
"ssl_credentials",
"=",
"ssl_credentials",
",",
"scopes",
"=",
"scopes",
"or",
"self",
".",
"AUTH_SCOPES",
",",
"quota_project_id",
"=",
"quota_project_id",
",",
"options",
"=",
"[",
"(",
"\"grpc.max_send_message_length\"",
",",
"-",
"1",
")",
",",
"(",
"\"grpc.max_receive_message_length\"",
",",
"-",
"1",
")",
",",
"]",
",",
")",
"self",
".",
"_ssl_channel_credentials",
"=",
"ssl_credentials",
"else",
":",
"host",
"=",
"host",
"if",
"\":\"",
"in",
"host",
"else",
"host",
"+",
"\":443\"",
"if",
"credentials",
"is",
"None",
":",
"credentials",
",",
"_",
"=",
"auth",
".",
"default",
"(",
"scopes",
"=",
"self",
".",
"AUTH_SCOPES",
")",
"# create a new channel. The provided one is ignored.",
"self",
".",
"_grpc_channel",
"=",
"type",
"(",
"self",
")",
".",
"create_channel",
"(",
"host",
",",
"credentials",
"=",
"credentials",
",",
"ssl_credentials",
"=",
"ssl_channel_credentials",
",",
"scopes",
"=",
"self",
".",
"AUTH_SCOPES",
",",
"options",
"=",
"[",
"(",
"\"grpc.max_send_message_length\"",
",",
"-",
"1",
")",
",",
"(",
"\"grpc.max_receive_message_length\"",
",",
"-",
"1",
")",
",",
"]",
",",
")",
"self",
".",
"_stubs",
"=",
"{",
"}",
"# type: Dict[str, Callable]",
"# Run the base constructor.",
"super",
"(",
")",
".",
"__init__",
"(",
"host",
"=",
"host",
",",
"credentials",
"=",
"credentials",
",",
"client_info",
"=",
"client_info",
",",
")"
] |
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v7/services/services/recommendation_service/transports/grpc.py#L45-L173
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.