nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
qboticslabs/mastering_ros | d83e78f30acc45b0f18522c1d5fae3a7f52974b9 | chapter_4_codes/seven_dof_arm_gazebo/scripts/pick_and_place_working_1.py | python | CokeCanPickAndPlace._publish_places | (self, places) | Publish places as poses, using a PoseArray message | Publish places as poses, using a PoseArray message | [
"Publish",
"places",
"as",
"poses",
"using",
"a",
"PoseArray",
"message"
] | def _publish_places(self, places):
"""
Publish places as poses, using a PoseArray message
"""
if self._places_pub.get_num_connections() > 0:
msg = PoseArray()
msg.header.frame_id = self._robot.get_planning_frame()
msg.header.stamp = rospy.Time.now()
for place in places:
msg.poses.append(place.place_pose.pose)
self._places_pub.publish(msg) | [
"def",
"_publish_places",
"(",
"self",
",",
"places",
")",
":",
"if",
"self",
".",
"_places_pub",
".",
"get_num_connections",
"(",
")",
">",
"0",
":",
"msg",
"=",
"PoseArray",
"(",
")",
"msg",
".",
"header",
".",
"frame_id",
"=",
"self",
".",
"_robot",
".",
"get_planning_frame",
"(",
")",
"msg",
".",
"header",
".",
"stamp",
"=",
"rospy",
".",
"Time",
".",
"now",
"(",
")",
"for",
"place",
"in",
"places",
":",
"msg",
".",
"poses",
".",
"append",
"(",
"place",
".",
"place_pose",
".",
"pose",
")",
"self",
".",
"_places_pub",
".",
"publish",
"(",
"msg",
")"
] | https://github.com/qboticslabs/mastering_ros/blob/d83e78f30acc45b0f18522c1d5fae3a7f52974b9/chapter_4_codes/seven_dof_arm_gazebo/scripts/pick_and_place_working_1.py#L368-L381 |
||
deepmind/open_spiel | 4ca53bea32bb2875c7385d215424048ae92f78c8 | open_spiel/python/algorithms/value_iteration.py | python | _get_future_states | (possibilities, state, reach=1.0) | Does a lookahead over chance nodes to all next states after (s,a).
Also works if there are no chance nodes (i.e. base case).
Arguments:
possibilities: an empty list, that will be filled with (str(next_state),
transition probability) pairs for all possible next states
state: the state following some s.apply_action(a), can be a chance node
reach: chance reach probability of getting to this point from (s,a)
Returns: nothing. | Does a lookahead over chance nodes to all next states after (s,a). | [
"Does",
"a",
"lookahead",
"over",
"chance",
"nodes",
"to",
"all",
"next",
"states",
"after",
"(",
"s",
"a",
")",
"."
] | def _get_future_states(possibilities, state, reach=1.0):
"""Does a lookahead over chance nodes to all next states after (s,a).
Also works if there are no chance nodes (i.e. base case).
Arguments:
possibilities: an empty list, that will be filled with (str(next_state),
transition probability) pairs for all possible next states
state: the state following some s.apply_action(a), can be a chance node
reach: chance reach probability of getting to this point from (s,a)
Returns: nothing.
"""
if not state.is_chance_node() or state.is_terminal():
# Base case
possibilities.append((str(state), reach))
else:
assert state.is_chance_node()
for outcome, prob in state.chance_outcomes():
next_state = state.child(outcome)
_get_future_states(possibilities, next_state, reach * prob) | [
"def",
"_get_future_states",
"(",
"possibilities",
",",
"state",
",",
"reach",
"=",
"1.0",
")",
":",
"if",
"not",
"state",
".",
"is_chance_node",
"(",
")",
"or",
"state",
".",
"is_terminal",
"(",
")",
":",
"# Base case",
"possibilities",
".",
"append",
"(",
"(",
"str",
"(",
"state",
")",
",",
"reach",
")",
")",
"else",
":",
"assert",
"state",
".",
"is_chance_node",
"(",
")",
"for",
"outcome",
",",
"prob",
"in",
"state",
".",
"chance_outcomes",
"(",
")",
":",
"next_state",
"=",
"state",
".",
"child",
"(",
"outcome",
")",
"_get_future_states",
"(",
"possibilities",
",",
"next_state",
",",
"reach",
"*",
"prob",
")"
] | https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/value_iteration.py#L26-L45 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/array_ops.py | python | _slice_helper | (tensor, slice_spec, var=None) | Overload for Tensor.__getitem__.
This operation extracts the specified region from the tensor.
The notation is similar to NumPy with the restriction that
currently only support basic indexing. That means that
using a non-scalar tensor as input is not currently allowed.
Some useful examples:
```python
# Strip leading and trailing 2 elements
foo = tf.constant([1,2,3,4,5,6])
print(foo[2:-2].eval()) # => [3,4]
# Skip every other row and reverse the order of the columns
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[::2,::-1].eval()) # => [[3,2,1], [9,8,7]]
# Use scalar tensors as indices on both dimensions
print(foo[tf.constant(0), tf.constant(2)].eval()) # => 3
# Insert another dimension
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[tf.newaxis, :, :].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[:, tf.newaxis, :].eval()) # => [[[1,2,3]], [[4,5,6]], [[7,8,9]]]
print(foo[:, :, tf.newaxis].eval()) # => [[[1],[2],[3]], [[4],[5],[6]],
[[7],[8],[9]]]
# Ellipses (3 equivalent operations)
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[tf.newaxis, :, :].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[tf.newaxis, ...].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[tf.newaxis].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
# Masks
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[foo > 2].eval()) # => [3, 4, 5, 6, 7, 8, 9]
```
Notes:
- `tf.newaxis` is `None` as in NumPy.
- An implicit ellipsis is placed at the end of the `slice_spec`
- NumPy advanced indexing is currently not supported.
Args:
tensor: An ops.Tensor object.
slice_spec: The arguments to Tensor.__getitem__.
var: In the case of variable slice assignment, the Variable object to slice
(i.e. tensor is the read-only view of this variable).
Returns:
The appropriate slice of "tensor", based on "slice_spec".
Raises:
ValueError: If a slice range is negative size.
TypeError: If the slice indices aren't int, slice, ellipsis,
tf.newaxis or scalar int32/int64 tensors. | Overload for Tensor.__getitem__. | [
"Overload",
"for",
"Tensor",
".",
"__getitem__",
"."
] | def _slice_helper(tensor, slice_spec, var=None):
"""Overload for Tensor.__getitem__.
This operation extracts the specified region from the tensor.
The notation is similar to NumPy with the restriction that
currently only support basic indexing. That means that
using a non-scalar tensor as input is not currently allowed.
Some useful examples:
```python
# Strip leading and trailing 2 elements
foo = tf.constant([1,2,3,4,5,6])
print(foo[2:-2].eval()) # => [3,4]
# Skip every other row and reverse the order of the columns
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[::2,::-1].eval()) # => [[3,2,1], [9,8,7]]
# Use scalar tensors as indices on both dimensions
print(foo[tf.constant(0), tf.constant(2)].eval()) # => 3
# Insert another dimension
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[tf.newaxis, :, :].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[:, tf.newaxis, :].eval()) # => [[[1,2,3]], [[4,5,6]], [[7,8,9]]]
print(foo[:, :, tf.newaxis].eval()) # => [[[1],[2],[3]], [[4],[5],[6]],
[[7],[8],[9]]]
# Ellipses (3 equivalent operations)
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[tf.newaxis, :, :].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[tf.newaxis, ...].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
print(foo[tf.newaxis].eval()) # => [[[1,2,3], [4,5,6], [7,8,9]]]
# Masks
foo = tf.constant([[1,2,3], [4,5,6], [7,8,9]])
print(foo[foo > 2].eval()) # => [3, 4, 5, 6, 7, 8, 9]
```
Notes:
- `tf.newaxis` is `None` as in NumPy.
- An implicit ellipsis is placed at the end of the `slice_spec`
- NumPy advanced indexing is currently not supported.
Args:
tensor: An ops.Tensor object.
slice_spec: The arguments to Tensor.__getitem__.
var: In the case of variable slice assignment, the Variable object to slice
(i.e. tensor is the read-only view of this variable).
Returns:
The appropriate slice of "tensor", based on "slice_spec".
Raises:
ValueError: If a slice range is negative size.
TypeError: If the slice indices aren't int, slice, ellipsis,
tf.newaxis or scalar int32/int64 tensors.
"""
if isinstance(slice_spec, bool) or \
(isinstance(slice_spec, ops.Tensor) and slice_spec.dtype == dtypes.bool) or \
(isinstance(slice_spec, np.ndarray) and slice_spec.dtype == bool):
return boolean_mask(tensor=tensor, mask=slice_spec)
if not isinstance(slice_spec, (list, tuple)):
slice_spec = [slice_spec]
begin, end, strides = [], [], []
index = 0
new_axis_mask, shrink_axis_mask = 0, 0
begin_mask, end_mask = 0, 0
ellipsis_mask = 0
for s in slice_spec:
if isinstance(s, _BaseSlice):
# python doesn't always use None when constructing ranges
# for example a[:] gives slice(None,sys.maxsize,None)
# whereas a[::1] gives slice(None,None,None)
if s.start is not None and (isinstance(s.start, ops.Tensor) or
s.start != sys.maxsize):
_check_index(s.start)
begin.append(s.start)
else:
begin.append(0)
begin_mask |= (1 << index)
if s.stop is not None and (isinstance(s.stop, ops.Tensor) or
s.stop != sys.maxsize):
_check_index(s.stop)
end.append(s.stop)
else:
end.append(0)
end_mask |= (1 << index)
if s.step is not None:
_check_index(s.step)
strides.append(s.step)
else:
strides.append(1)
elif s is Ellipsis:
begin.append(0)
end.append(0)
strides.append(1)
ellipsis_mask |= (1 << index)
elif s is newaxis:
begin.append(0)
end.append(0)
strides.append(1)
new_axis_mask |= (1 << index)
else:
_check_index(s)
begin.append(s)
end.append(s + 1)
strides.append(1)
shrink_axis_mask |= (1 << index)
index += 1
# stack possibly involves no tensors, so we must use op_scope correct graph.
with ops.name_scope(None, "strided_slice",
[tensor] + begin + end + strides) as name:
if begin:
packed_begin, packed_end, packed_strides = (stack(begin), stack(end),
stack(strides))
if (packed_begin.dtype == dtypes.int64 or
packed_end.dtype == dtypes.int64 or
packed_strides.dtype == dtypes.int64):
if packed_begin.dtype != dtypes.int64:
packed_begin = gen_math_ops.cast(packed_begin, dtypes.int64)
if packed_end.dtype != dtypes.int64:
packed_end = gen_math_ops.cast(packed_end, dtypes.int64)
if packed_strides.dtype != dtypes.int64:
packed_strides = gen_math_ops.cast(packed_strides, dtypes.int64)
else:
var_empty = constant([], dtype=dtypes.int32)
packed_begin = packed_end = packed_strides = var_empty
return strided_slice(
tensor,
packed_begin,
packed_end,
packed_strides,
begin_mask=begin_mask,
end_mask=end_mask,
shrink_axis_mask=shrink_axis_mask,
new_axis_mask=new_axis_mask,
ellipsis_mask=ellipsis_mask,
var=var,
name=name) | [
"def",
"_slice_helper",
"(",
"tensor",
",",
"slice_spec",
",",
"var",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"slice_spec",
",",
"bool",
")",
"or",
"(",
"isinstance",
"(",
"slice_spec",
",",
"ops",
".",
"Tensor",
")",
"and",
"slice_spec",
".",
"dtype",
"==",
"dtypes",
".",
"bool",
")",
"or",
"(",
"isinstance",
"(",
"slice_spec",
",",
"np",
".",
"ndarray",
")",
"and",
"slice_spec",
".",
"dtype",
"==",
"bool",
")",
":",
"return",
"boolean_mask",
"(",
"tensor",
"=",
"tensor",
",",
"mask",
"=",
"slice_spec",
")",
"if",
"not",
"isinstance",
"(",
"slice_spec",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"slice_spec",
"=",
"[",
"slice_spec",
"]",
"begin",
",",
"end",
",",
"strides",
"=",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
"index",
"=",
"0",
"new_axis_mask",
",",
"shrink_axis_mask",
"=",
"0",
",",
"0",
"begin_mask",
",",
"end_mask",
"=",
"0",
",",
"0",
"ellipsis_mask",
"=",
"0",
"for",
"s",
"in",
"slice_spec",
":",
"if",
"isinstance",
"(",
"s",
",",
"_BaseSlice",
")",
":",
"# python doesn't always use None when constructing ranges",
"# for example a[:] gives slice(None,sys.maxsize,None)",
"# whereas a[::1] gives slice(None,None,None)",
"if",
"s",
".",
"start",
"is",
"not",
"None",
"and",
"(",
"isinstance",
"(",
"s",
".",
"start",
",",
"ops",
".",
"Tensor",
")",
"or",
"s",
".",
"start",
"!=",
"sys",
".",
"maxsize",
")",
":",
"_check_index",
"(",
"s",
".",
"start",
")",
"begin",
".",
"append",
"(",
"s",
".",
"start",
")",
"else",
":",
"begin",
".",
"append",
"(",
"0",
")",
"begin_mask",
"|=",
"(",
"1",
"<<",
"index",
")",
"if",
"s",
".",
"stop",
"is",
"not",
"None",
"and",
"(",
"isinstance",
"(",
"s",
".",
"stop",
",",
"ops",
".",
"Tensor",
")",
"or",
"s",
".",
"stop",
"!=",
"sys",
".",
"maxsize",
")",
":",
"_check_index",
"(",
"s",
".",
"stop",
")",
"end",
".",
"append",
"(",
"s",
".",
"stop",
")",
"else",
":",
"end",
".",
"append",
"(",
"0",
")",
"end_mask",
"|=",
"(",
"1",
"<<",
"index",
")",
"if",
"s",
".",
"step",
"is",
"not",
"None",
":",
"_check_index",
"(",
"s",
".",
"step",
")",
"strides",
".",
"append",
"(",
"s",
".",
"step",
")",
"else",
":",
"strides",
".",
"append",
"(",
"1",
")",
"elif",
"s",
"is",
"Ellipsis",
":",
"begin",
".",
"append",
"(",
"0",
")",
"end",
".",
"append",
"(",
"0",
")",
"strides",
".",
"append",
"(",
"1",
")",
"ellipsis_mask",
"|=",
"(",
"1",
"<<",
"index",
")",
"elif",
"s",
"is",
"newaxis",
":",
"begin",
".",
"append",
"(",
"0",
")",
"end",
".",
"append",
"(",
"0",
")",
"strides",
".",
"append",
"(",
"1",
")",
"new_axis_mask",
"|=",
"(",
"1",
"<<",
"index",
")",
"else",
":",
"_check_index",
"(",
"s",
")",
"begin",
".",
"append",
"(",
"s",
")",
"end",
".",
"append",
"(",
"s",
"+",
"1",
")",
"strides",
".",
"append",
"(",
"1",
")",
"shrink_axis_mask",
"|=",
"(",
"1",
"<<",
"index",
")",
"index",
"+=",
"1",
"# stack possibly involves no tensors, so we must use op_scope correct graph.",
"with",
"ops",
".",
"name_scope",
"(",
"None",
",",
"\"strided_slice\"",
",",
"[",
"tensor",
"]",
"+",
"begin",
"+",
"end",
"+",
"strides",
")",
"as",
"name",
":",
"if",
"begin",
":",
"packed_begin",
",",
"packed_end",
",",
"packed_strides",
"=",
"(",
"stack",
"(",
"begin",
")",
",",
"stack",
"(",
"end",
")",
",",
"stack",
"(",
"strides",
")",
")",
"if",
"(",
"packed_begin",
".",
"dtype",
"==",
"dtypes",
".",
"int64",
"or",
"packed_end",
".",
"dtype",
"==",
"dtypes",
".",
"int64",
"or",
"packed_strides",
".",
"dtype",
"==",
"dtypes",
".",
"int64",
")",
":",
"if",
"packed_begin",
".",
"dtype",
"!=",
"dtypes",
".",
"int64",
":",
"packed_begin",
"=",
"gen_math_ops",
".",
"cast",
"(",
"packed_begin",
",",
"dtypes",
".",
"int64",
")",
"if",
"packed_end",
".",
"dtype",
"!=",
"dtypes",
".",
"int64",
":",
"packed_end",
"=",
"gen_math_ops",
".",
"cast",
"(",
"packed_end",
",",
"dtypes",
".",
"int64",
")",
"if",
"packed_strides",
".",
"dtype",
"!=",
"dtypes",
".",
"int64",
":",
"packed_strides",
"=",
"gen_math_ops",
".",
"cast",
"(",
"packed_strides",
",",
"dtypes",
".",
"int64",
")",
"else",
":",
"var_empty",
"=",
"constant",
"(",
"[",
"]",
",",
"dtype",
"=",
"dtypes",
".",
"int32",
")",
"packed_begin",
"=",
"packed_end",
"=",
"packed_strides",
"=",
"var_empty",
"return",
"strided_slice",
"(",
"tensor",
",",
"packed_begin",
",",
"packed_end",
",",
"packed_strides",
",",
"begin_mask",
"=",
"begin_mask",
",",
"end_mask",
"=",
"end_mask",
",",
"shrink_axis_mask",
"=",
"shrink_axis_mask",
",",
"new_axis_mask",
"=",
"new_axis_mask",
",",
"ellipsis_mask",
"=",
"ellipsis_mask",
",",
"var",
"=",
"var",
",",
"name",
"=",
"name",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/array_ops.py#L658-L802 |
||
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/decimal.py | python | Decimal.quantize | (self, exp, rounding=None, context=None, watchexp=True) | return ans | Quantize self so its exponent is the same as that of exp.
Similar to self._rescale(exp._exp) but with error checking. | Quantize self so its exponent is the same as that of exp. | [
"Quantize",
"self",
"so",
"its",
"exponent",
"is",
"the",
"same",
"as",
"that",
"of",
"exp",
"."
] | def quantize(self, exp, rounding=None, context=None, watchexp=True):
"""Quantize self so its exponent is the same as that of exp.
Similar to self._rescale(exp._exp) but with error checking.
"""
exp = _convert_other(exp, raiseit=True)
if context is None:
context = getcontext()
if rounding is None:
rounding = context.rounding
if self._is_special or exp._is_special:
ans = self._check_nans(exp, context)
if ans:
return ans
if exp._isinfinity() or self._isinfinity():
if exp._isinfinity() and self._isinfinity():
return Decimal(self) # if both are inf, it is OK
return context._raise_error(InvalidOperation,
'quantize with one INF')
# if we're not watching exponents, do a simple rescale
if not watchexp:
ans = self._rescale(exp._exp, rounding)
# raise Inexact and Rounded where appropriate
if ans._exp > self._exp:
context._raise_error(Rounded)
if ans != self:
context._raise_error(Inexact)
return ans
# exp._exp should be between Etiny and Emax
if not (context.Etiny() <= exp._exp <= context.Emax):
return context._raise_error(InvalidOperation,
'target exponent out of bounds in quantize')
if not self:
ans = _dec_from_triple(self._sign, '0', exp._exp)
return ans._fix(context)
self_adjusted = self.adjusted()
if self_adjusted > context.Emax:
return context._raise_error(InvalidOperation,
'exponent of quantize result too large for current context')
if self_adjusted - exp._exp + 1 > context.prec:
return context._raise_error(InvalidOperation,
'quantize result has too many digits for current context')
ans = self._rescale(exp._exp, rounding)
if ans.adjusted() > context.Emax:
return context._raise_error(InvalidOperation,
'exponent of quantize result too large for current context')
if len(ans._int) > context.prec:
return context._raise_error(InvalidOperation,
'quantize result has too many digits for current context')
# raise appropriate flags
if ans._exp > self._exp:
context._raise_error(Rounded)
if ans != self:
context._raise_error(Inexact)
if ans and ans.adjusted() < context.Emin:
context._raise_error(Subnormal)
# call to fix takes care of any necessary folddown
ans = ans._fix(context)
return ans | [
"def",
"quantize",
"(",
"self",
",",
"exp",
",",
"rounding",
"=",
"None",
",",
"context",
"=",
"None",
",",
"watchexp",
"=",
"True",
")",
":",
"exp",
"=",
"_convert_other",
"(",
"exp",
",",
"raiseit",
"=",
"True",
")",
"if",
"context",
"is",
"None",
":",
"context",
"=",
"getcontext",
"(",
")",
"if",
"rounding",
"is",
"None",
":",
"rounding",
"=",
"context",
".",
"rounding",
"if",
"self",
".",
"_is_special",
"or",
"exp",
".",
"_is_special",
":",
"ans",
"=",
"self",
".",
"_check_nans",
"(",
"exp",
",",
"context",
")",
"if",
"ans",
":",
"return",
"ans",
"if",
"exp",
".",
"_isinfinity",
"(",
")",
"or",
"self",
".",
"_isinfinity",
"(",
")",
":",
"if",
"exp",
".",
"_isinfinity",
"(",
")",
"and",
"self",
".",
"_isinfinity",
"(",
")",
":",
"return",
"Decimal",
"(",
"self",
")",
"# if both are inf, it is OK",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'quantize with one INF'",
")",
"# if we're not watching exponents, do a simple rescale",
"if",
"not",
"watchexp",
":",
"ans",
"=",
"self",
".",
"_rescale",
"(",
"exp",
".",
"_exp",
",",
"rounding",
")",
"# raise Inexact and Rounded where appropriate",
"if",
"ans",
".",
"_exp",
">",
"self",
".",
"_exp",
":",
"context",
".",
"_raise_error",
"(",
"Rounded",
")",
"if",
"ans",
"!=",
"self",
":",
"context",
".",
"_raise_error",
"(",
"Inexact",
")",
"return",
"ans",
"# exp._exp should be between Etiny and Emax",
"if",
"not",
"(",
"context",
".",
"Etiny",
"(",
")",
"<=",
"exp",
".",
"_exp",
"<=",
"context",
".",
"Emax",
")",
":",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'target exponent out of bounds in quantize'",
")",
"if",
"not",
"self",
":",
"ans",
"=",
"_dec_from_triple",
"(",
"self",
".",
"_sign",
",",
"'0'",
",",
"exp",
".",
"_exp",
")",
"return",
"ans",
".",
"_fix",
"(",
"context",
")",
"self_adjusted",
"=",
"self",
".",
"adjusted",
"(",
")",
"if",
"self_adjusted",
">",
"context",
".",
"Emax",
":",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'exponent of quantize result too large for current context'",
")",
"if",
"self_adjusted",
"-",
"exp",
".",
"_exp",
"+",
"1",
">",
"context",
".",
"prec",
":",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'quantize result has too many digits for current context'",
")",
"ans",
"=",
"self",
".",
"_rescale",
"(",
"exp",
".",
"_exp",
",",
"rounding",
")",
"if",
"ans",
".",
"adjusted",
"(",
")",
">",
"context",
".",
"Emax",
":",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'exponent of quantize result too large for current context'",
")",
"if",
"len",
"(",
"ans",
".",
"_int",
")",
">",
"context",
".",
"prec",
":",
"return",
"context",
".",
"_raise_error",
"(",
"InvalidOperation",
",",
"'quantize result has too many digits for current context'",
")",
"# raise appropriate flags",
"if",
"ans",
".",
"_exp",
">",
"self",
".",
"_exp",
":",
"context",
".",
"_raise_error",
"(",
"Rounded",
")",
"if",
"ans",
"!=",
"self",
":",
"context",
".",
"_raise_error",
"(",
"Inexact",
")",
"if",
"ans",
"and",
"ans",
".",
"adjusted",
"(",
")",
"<",
"context",
".",
"Emin",
":",
"context",
".",
"_raise_error",
"(",
"Subnormal",
")",
"# call to fix takes care of any necessary folddown",
"ans",
"=",
"ans",
".",
"_fix",
"(",
"context",
")",
"return",
"ans"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/decimal.py#L2268-L2336 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/tornado/tornado-6/tornado/locale.py | python | load_gettext_translations | (directory: str, domain: str) | Loads translations from `gettext`'s locale tree
Locale tree is similar to system's ``/usr/share/locale``, like::
{directory}/{lang}/LC_MESSAGES/{domain}.mo
Three steps are required to have your app translated:
1. Generate POT translation file::
xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc
2. Merge against existing POT file::
msgmerge old.po mydomain.po > new.po
3. Compile::
msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo | Loads translations from `gettext`'s locale tree | [
"Loads",
"translations",
"from",
"gettext",
"s",
"locale",
"tree"
] | def load_gettext_translations(directory: str, domain: str) -> None:
"""Loads translations from `gettext`'s locale tree
Locale tree is similar to system's ``/usr/share/locale``, like::
{directory}/{lang}/LC_MESSAGES/{domain}.mo
Three steps are required to have your app translated:
1. Generate POT translation file::
xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc
2. Merge against existing POT file::
msgmerge old.po mydomain.po > new.po
3. Compile::
msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo
"""
global _translations
global _supported_locales
global _use_gettext
_translations = {}
for lang in os.listdir(directory):
if lang.startswith("."):
continue # skip .svn, etc
if os.path.isfile(os.path.join(directory, lang)):
continue
try:
os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
_translations[lang] = gettext.translation(
domain, directory, languages=[lang]
)
except Exception as e:
gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
continue
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
_use_gettext = True
gen_log.debug("Supported locales: %s", sorted(_supported_locales)) | [
"def",
"load_gettext_translations",
"(",
"directory",
":",
"str",
",",
"domain",
":",
"str",
")",
"->",
"None",
":",
"global",
"_translations",
"global",
"_supported_locales",
"global",
"_use_gettext",
"_translations",
"=",
"{",
"}",
"for",
"lang",
"in",
"os",
".",
"listdir",
"(",
"directory",
")",
":",
"if",
"lang",
".",
"startswith",
"(",
"\".\"",
")",
":",
"continue",
"# skip .svn, etc",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"lang",
")",
")",
":",
"continue",
"try",
":",
"os",
".",
"stat",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"lang",
",",
"\"LC_MESSAGES\"",
",",
"domain",
"+",
"\".mo\"",
")",
")",
"_translations",
"[",
"lang",
"]",
"=",
"gettext",
".",
"translation",
"(",
"domain",
",",
"directory",
",",
"languages",
"=",
"[",
"lang",
"]",
")",
"except",
"Exception",
"as",
"e",
":",
"gen_log",
".",
"error",
"(",
"\"Cannot load translation for '%s': %s\"",
",",
"lang",
",",
"str",
"(",
"e",
")",
")",
"continue",
"_supported_locales",
"=",
"frozenset",
"(",
"list",
"(",
"_translations",
".",
"keys",
"(",
")",
")",
"+",
"[",
"_default_locale",
"]",
")",
"_use_gettext",
"=",
"True",
"gen_log",
".",
"debug",
"(",
"\"Supported locales: %s\"",
",",
"sorted",
"(",
"_supported_locales",
")",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/locale.py#L176-L216 |
||
Mahlet-Inc/hobbits | 071d7a542f1af0a7791bcaab17b08224df9ecd4e | src/hobbits-plugins/analyzers/KaitaiStruct/ksy_py/macos/mac_os_resource_snd.py | python | MacOsResourceSnd.midi_note_to_frequency | (self) | return self._m_midi_note_to_frequency if hasattr(self, '_m_midi_note_to_frequency') else None | Lookup table to convert a MIDI note into a frequency in Hz
The lookup table represents the formula (2 ** ((midi_note - 69) / 12)) * 440
.. seealso::
Source - https://en.wikipedia.org/wiki/MIDI_tuning_standard | Lookup table to convert a MIDI note into a frequency in Hz
The lookup table represents the formula (2 ** ((midi_note - 69) / 12)) * 440
.. seealso::
Source - https://en.wikipedia.org/wiki/MIDI_tuning_standard | [
"Lookup",
"table",
"to",
"convert",
"a",
"MIDI",
"note",
"into",
"a",
"frequency",
"in",
"Hz",
"The",
"lookup",
"table",
"represents",
"the",
"formula",
"(",
"2",
"**",
"((",
"midi_note",
"-",
"69",
")",
"/",
"12",
"))",
"*",
"440",
"..",
"seealso",
"::",
"Source",
"-",
"https",
":",
"//",
"en",
".",
"wikipedia",
".",
"org",
"/",
"wiki",
"/",
"MIDI_tuning_standard"
] | def midi_note_to_frequency(self):
"""Lookup table to convert a MIDI note into a frequency in Hz
The lookup table represents the formula (2 ** ((midi_note - 69) / 12)) * 440
.. seealso::
Source - https://en.wikipedia.org/wiki/MIDI_tuning_standard
"""
if hasattr(self, '_m_midi_note_to_frequency'):
return self._m_midi_note_to_frequency if hasattr(self, '_m_midi_note_to_frequency') else None
self._m_midi_note_to_frequency = [8.18, 8.66, 9.18, 9.72, 10.30, 10.91, 11.56, 12.25, 12.98, 13.75, 14.57, 15.43, 16.35, 17.32, 18.35, 19.45, 20.60, 21.83, 23.12, 24.50, 25.96, 27.50, 29.14, 30.87, 32.70, 34.65, 36.71, 38.89, 41.20, 43.65, 46.25, 49.00, 51.91, 55.00, 58.27, 61.74, 65.41, 69.30, 73.42, 77.78, 82.41, 87.31, 92.50, 98.00, 103.83, 110.00, 116.54, 123.47, 130.81, 138.59, 146.83, 155.56, 164.81, 174.61, 185.00, 196.00, 207.65, 220.00, 233.08, 246.94, 261.63, 277.18, 293.66, 311.13, 329.63, 349.23, 369.99, 392.00, 415.30, 440.00, 466.16, 493.88, 523.25, 554.37, 587.33, 622.25, 659.26, 698.46, 739.99, 783.99, 830.61, 880.00, 932.33, 987.77, 1046.50, 1108.73, 1174.66, 1244.51, 1318.51, 1396.91, 1479.98, 1567.98, 1661.22, 1760.00, 1864.66, 1975.53, 2093.00, 2217.46, 2349.32, 2489.02, 2637.02, 2793.83, 2959.96, 3135.96, 3322.44, 3520.00, 3729.31, 3951.07, 4186.01, 4434.92, 4698.64, 4978.03, 5274.04, 5587.65, 5919.91, 6271.93, 6644.88, 7040.00, 7458.62, 7902.13, 8372.02, 8869.84, 9397.27, 9956.06, 10548.08, 11175.30, 11839.82, 12543.85]
return self._m_midi_note_to_frequency if hasattr(self, '_m_midi_note_to_frequency') else None | [
"def",
"midi_note_to_frequency",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_m_midi_note_to_frequency'",
")",
":",
"return",
"self",
".",
"_m_midi_note_to_frequency",
"if",
"hasattr",
"(",
"self",
",",
"'_m_midi_note_to_frequency'",
")",
"else",
"None",
"self",
".",
"_m_midi_note_to_frequency",
"=",
"[",
"8.18",
",",
"8.66",
",",
"9.18",
",",
"9.72",
",",
"10.30",
",",
"10.91",
",",
"11.56",
",",
"12.25",
",",
"12.98",
",",
"13.75",
",",
"14.57",
",",
"15.43",
",",
"16.35",
",",
"17.32",
",",
"18.35",
",",
"19.45",
",",
"20.60",
",",
"21.83",
",",
"23.12",
",",
"24.50",
",",
"25.96",
",",
"27.50",
",",
"29.14",
",",
"30.87",
",",
"32.70",
",",
"34.65",
",",
"36.71",
",",
"38.89",
",",
"41.20",
",",
"43.65",
",",
"46.25",
",",
"49.00",
",",
"51.91",
",",
"55.00",
",",
"58.27",
",",
"61.74",
",",
"65.41",
",",
"69.30",
",",
"73.42",
",",
"77.78",
",",
"82.41",
",",
"87.31",
",",
"92.50",
",",
"98.00",
",",
"103.83",
",",
"110.00",
",",
"116.54",
",",
"123.47",
",",
"130.81",
",",
"138.59",
",",
"146.83",
",",
"155.56",
",",
"164.81",
",",
"174.61",
",",
"185.00",
",",
"196.00",
",",
"207.65",
",",
"220.00",
",",
"233.08",
",",
"246.94",
",",
"261.63",
",",
"277.18",
",",
"293.66",
",",
"311.13",
",",
"329.63",
",",
"349.23",
",",
"369.99",
",",
"392.00",
",",
"415.30",
",",
"440.00",
",",
"466.16",
",",
"493.88",
",",
"523.25",
",",
"554.37",
",",
"587.33",
",",
"622.25",
",",
"659.26",
",",
"698.46",
",",
"739.99",
",",
"783.99",
",",
"830.61",
",",
"880.00",
",",
"932.33",
",",
"987.77",
",",
"1046.50",
",",
"1108.73",
",",
"1174.66",
",",
"1244.51",
",",
"1318.51",
",",
"1396.91",
",",
"1479.98",
",",
"1567.98",
",",
"1661.22",
",",
"1760.00",
",",
"1864.66",
",",
"1975.53",
",",
"2093.00",
",",
"2217.46",
",",
"2349.32",
",",
"2489.02",
",",
"2637.02",
",",
"2793.83",
",",
"2959.96",
",",
"3135.96",
",",
"3322.44",
",",
"3520.00",
",",
"3729.31",
",",
"3951.07",
",",
"4186.01",
",",
"4434.92",
",",
"4698.64",
",",
"4978.03",
",",
"5274.04",
",",
"5587.65",
",",
"5919.91",
",",
"6271.93",
",",
"6644.88",
",",
"7040.00",
",",
"7458.62",
",",
"7902.13",
",",
"8372.02",
",",
"8869.84",
",",
"9397.27",
",",
"9956.06",
",",
"10548.08",
",",
"11175.30",
",",
"11839.82",
",",
"12543.85",
"]",
"return",
"self",
".",
"_m_midi_note_to_frequency",
"if",
"hasattr",
"(",
"self",
",",
"'_m_midi_note_to_frequency'",
")",
"else",
"None"
] | https://github.com/Mahlet-Inc/hobbits/blob/071d7a542f1af0a7791bcaab17b08224df9ecd4e/src/hobbits-plugins/analyzers/KaitaiStruct/ksy_py/macos/mac_os_resource_snd.py#L480-L491 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py | python | Scrollbar.__init__ | (self, master=None, cnf={}, **kw) | Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width. | Construct a scrollbar widget with the parent MASTER. | [
"Construct",
"a",
"scrollbar",
"widget",
"with",
"the",
"parent",
"MASTER",
"."
] | def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw) | [
"def",
"__init__",
"(",
"self",
",",
"master",
"=",
"None",
",",
"cnf",
"=",
"{",
"}",
",",
"*",
"*",
"kw",
")",
":",
"Widget",
".",
"__init__",
"(",
"self",
",",
"master",
",",
"'scrollbar'",
",",
"cnf",
",",
"kw",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py#L3034-L3043 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/xrc.py | python | XmlDocument.SetRoot | (*args, **kwargs) | return _xrc.XmlDocument_SetRoot(*args, **kwargs) | SetRoot(self, XmlNode node) | SetRoot(self, XmlNode node) | [
"SetRoot",
"(",
"self",
"XmlNode",
"node",
")"
] | def SetRoot(*args, **kwargs):
"""SetRoot(self, XmlNode node)"""
return _xrc.XmlDocument_SetRoot(*args, **kwargs) | [
"def",
"SetRoot",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_xrc",
".",
"XmlDocument_SetRoot",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/xrc.py#L551-L553 |
|
yrnkrn/zapcc | c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50 | tools/clang/tools/scan-view/share/ScanView.py | python | ScanViewRequestHandler.do_POST | (self) | Serve a POST request. | Serve a POST request. | [
"Serve",
"a",
"POST",
"request",
"."
] | def do_POST(self):
"""Serve a POST request."""
try:
length = self.headers.getheader('content-length') or "0"
try:
length = int(length)
except:
length = 0
content = self.rfile.read(length)
fields = parse_query(content)
f = self.send_head(fields)
if f:
self.copyfile(f, self.wfile)
f.close()
except Exception,e:
self.handle_exception(e) | [
"def",
"do_POST",
"(",
"self",
")",
":",
"try",
":",
"length",
"=",
"self",
".",
"headers",
".",
"getheader",
"(",
"'content-length'",
")",
"or",
"\"0\"",
"try",
":",
"length",
"=",
"int",
"(",
"length",
")",
"except",
":",
"length",
"=",
"0",
"content",
"=",
"self",
".",
"rfile",
".",
"read",
"(",
"length",
")",
"fields",
"=",
"parse_query",
"(",
"content",
")",
"f",
"=",
"self",
".",
"send_head",
"(",
"fields",
")",
"if",
"f",
":",
"self",
".",
"copyfile",
"(",
"f",
",",
"self",
".",
"wfile",
")",
"f",
".",
"close",
"(",
")",
"except",
"Exception",
",",
"e",
":",
"self",
".",
"handle_exception",
"(",
"e",
")"
] | https://github.com/yrnkrn/zapcc/blob/c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50/tools/clang/tools/scan-view/share/ScanView.py#L219-L234 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/distutils/archive_util.py | python | _get_gid | (name) | return None | Returns a gid, given a group name. | Returns a gid, given a group name. | [
"Returns",
"a",
"gid",
"given",
"a",
"group",
"name",
"."
] | def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None | [
"def",
"_get_gid",
"(",
"name",
")",
":",
"if",
"getgrnam",
"is",
"None",
"or",
"name",
"is",
"None",
":",
"return",
"None",
"try",
":",
"result",
"=",
"getgrnam",
"(",
"name",
")",
"except",
"KeyError",
":",
"result",
"=",
"None",
"if",
"result",
"is",
"not",
"None",
":",
"return",
"result",
"[",
"2",
"]",
"return",
"None"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/distutils/archive_util.py#L31-L41 |
|
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/boost_1_66_0/tools/build/src/util/regex.py | python | transform | (list, pattern, indices = [1]) | return result | Matches all elements of 'list' agains the 'pattern'
and returns a list of the elements indicated by indices of
all successfull matches. If 'indices' is omitted returns
a list of first paranthethised groups of all successfull
matches. | Matches all elements of 'list' agains the 'pattern'
and returns a list of the elements indicated by indices of
all successfull matches. If 'indices' is omitted returns
a list of first paranthethised groups of all successfull
matches. | [
"Matches",
"all",
"elements",
"of",
"list",
"agains",
"the",
"pattern",
"and",
"returns",
"a",
"list",
"of",
"the",
"elements",
"indicated",
"by",
"indices",
"of",
"all",
"successfull",
"matches",
".",
"If",
"indices",
"is",
"omitted",
"returns",
"a",
"list",
"of",
"first",
"paranthethised",
"groups",
"of",
"all",
"successfull",
"matches",
"."
] | def transform (list, pattern, indices = [1]):
""" Matches all elements of 'list' agains the 'pattern'
and returns a list of the elements indicated by indices of
all successfull matches. If 'indices' is omitted returns
a list of first paranthethised groups of all successfull
matches.
"""
result = []
for e in list:
m = re.match (pattern, e)
if m:
for i in indices:
result.append (m.group (i))
return result | [
"def",
"transform",
"(",
"list",
",",
"pattern",
",",
"indices",
"=",
"[",
"1",
"]",
")",
":",
"result",
"=",
"[",
"]",
"for",
"e",
"in",
"list",
":",
"m",
"=",
"re",
".",
"match",
"(",
"pattern",
",",
"e",
")",
"if",
"m",
":",
"for",
"i",
"in",
"indices",
":",
"result",
".",
"append",
"(",
"m",
".",
"group",
"(",
"i",
")",
")",
"return",
"result"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/boost_1_66_0/tools/build/src/util/regex.py#L11-L27 |
|
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/incubate/fleet/base/role_maker.py | python | MPIRoleMaker.get_local_ip | (self) | return self._ip | Return get local ip. | Return get local ip. | [
"Return",
"get",
"local",
"ip",
"."
] | def get_local_ip(self):
"""Return get local ip."""
import socket
self._ip = socket.gethostbyname(socket.gethostname())
return self._ip | [
"def",
"get_local_ip",
"(",
"self",
")",
":",
"import",
"socket",
"self",
".",
"_ip",
"=",
"socket",
".",
"gethostbyname",
"(",
"socket",
".",
"gethostname",
"(",
")",
")",
"return",
"self",
".",
"_ip"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/incubate/fleet/base/role_maker.py#L252-L256 |
|
takemaru/graphillion | 51879f92bb96b53ef8f914ef37a05252ce383617 | graphillion/graphset.py | python | GraphSet.show_messages | (flag=True) | return _graphillion._show_messages(flag) | Enables/disables status messages.
Args:
flag: Optional. True or False. If True, status messages are
enabled. If False, they are disabled (initial setting).
Returns:
The setting before the method call. True (enabled) or
False (disabled). | Enables/disables status messages. | [
"Enables",
"/",
"disables",
"status",
"messages",
"."
] | def show_messages(flag=True):
"""Enables/disables status messages.
Args:
flag: Optional. True or False. If True, status messages are
enabled. If False, they are disabled (initial setting).
Returns:
The setting before the method call. True (enabled) or
False (disabled).
"""
return _graphillion._show_messages(flag) | [
"def",
"show_messages",
"(",
"flag",
"=",
"True",
")",
":",
"return",
"_graphillion",
".",
"_show_messages",
"(",
"flag",
")"
] | https://github.com/takemaru/graphillion/blob/51879f92bb96b53ef8f914ef37a05252ce383617/graphillion/graphset.py#L1976-L1987 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/estimator/inputs/pandas_io.py | python | pandas_input_fn | (x,
y=None,
batch_size=128,
num_epochs=1,
shuffle=None,
queue_capacity=1000,
num_threads=1,
target_column='target') | return input_fn | Returns input function that would feed Pandas DataFrame into the model.
Note: `y`'s index must match `x`'s index.
Args:
x: pandas `DataFrame` object.
y: pandas `Series` object. `None` if absent.
batch_size: int, size of batches to return.
num_epochs: int, number of epochs to iterate over data. If not `None`,
read attempts that would exceed this value will raise `OutOfRangeError`.
shuffle: bool, whether to read the records in random order.
queue_capacity: int, size of the read queue. If `None`, it will be set
roughly to the size of `x`.
num_threads: Integer, number of threads used for reading and enqueueing. In
order to have predicted and repeatable order of reading and enqueueing,
such as in prediction and evaluation mode, `num_threads` should be 1.
target_column: str, name to give the target column `y`.
Returns:
Function, that has signature of ()->(dict of `features`, `target`)
Raises:
ValueError: if `x` already contains a column with the same name as `y`, or
if the indexes of `x` and `y` don't match.
TypeError: `shuffle` is not bool. | Returns input function that would feed Pandas DataFrame into the model. | [
"Returns",
"input",
"function",
"that",
"would",
"feed",
"Pandas",
"DataFrame",
"into",
"the",
"model",
"."
] | def pandas_input_fn(x,
y=None,
batch_size=128,
num_epochs=1,
shuffle=None,
queue_capacity=1000,
num_threads=1,
target_column='target'):
"""Returns input function that would feed Pandas DataFrame into the model.
Note: `y`'s index must match `x`'s index.
Args:
x: pandas `DataFrame` object.
y: pandas `Series` object. `None` if absent.
batch_size: int, size of batches to return.
num_epochs: int, number of epochs to iterate over data. If not `None`,
read attempts that would exceed this value will raise `OutOfRangeError`.
shuffle: bool, whether to read the records in random order.
queue_capacity: int, size of the read queue. If `None`, it will be set
roughly to the size of `x`.
num_threads: Integer, number of threads used for reading and enqueueing. In
order to have predicted and repeatable order of reading and enqueueing,
such as in prediction and evaluation mode, `num_threads` should be 1.
target_column: str, name to give the target column `y`.
Returns:
Function, that has signature of ()->(dict of `features`, `target`)
Raises:
ValueError: if `x` already contains a column with the same name as `y`, or
if the indexes of `x` and `y` don't match.
TypeError: `shuffle` is not bool.
"""
if not HAS_PANDAS:
raise TypeError(
'pandas_input_fn should not be called without pandas installed')
if not isinstance(shuffle, bool):
raise TypeError('shuffle must be explicitly set as boolean; '
'got {}'.format(shuffle))
x = x.copy()
if y is not None:
if target_column in x:
raise ValueError(
'Cannot use name %s for target column: DataFrame already has a '
'column with that name: %s' % (target_column, x.columns))
if not np.array_equal(x.index, y.index):
raise ValueError('Index for x and y are mismatched.\nIndex for x: %s\n'
'Index for y: %s\n' % (x.index, y.index))
x[target_column] = y
# TODO(mdan): These are memory copies. We probably don't need 4x slack space.
# The sizes below are consistent with what I've seen elsewhere.
if queue_capacity is None:
if shuffle:
queue_capacity = 4 * len(x)
else:
queue_capacity = len(x)
min_after_dequeue = max(queue_capacity / 4, 1)
def input_fn():
"""Pandas input function."""
queue = feeding_functions._enqueue_data( # pylint: disable=protected-access
x,
queue_capacity,
shuffle=shuffle,
min_after_dequeue=min_after_dequeue,
num_threads=num_threads,
enqueue_size=batch_size,
num_epochs=num_epochs)
if num_epochs is None:
features = queue.dequeue_many(batch_size)
else:
features = queue.dequeue_up_to(batch_size)
assert len(features) == len(x.columns) + 1, ('Features should have one '
'extra element for the index.')
features = features[1:]
features = dict(zip(list(x.columns), features))
if y is not None:
target = features.pop(target_column)
return features, target
return features
return input_fn | [
"def",
"pandas_input_fn",
"(",
"x",
",",
"y",
"=",
"None",
",",
"batch_size",
"=",
"128",
",",
"num_epochs",
"=",
"1",
",",
"shuffle",
"=",
"None",
",",
"queue_capacity",
"=",
"1000",
",",
"num_threads",
"=",
"1",
",",
"target_column",
"=",
"'target'",
")",
":",
"if",
"not",
"HAS_PANDAS",
":",
"raise",
"TypeError",
"(",
"'pandas_input_fn should not be called without pandas installed'",
")",
"if",
"not",
"isinstance",
"(",
"shuffle",
",",
"bool",
")",
":",
"raise",
"TypeError",
"(",
"'shuffle must be explicitly set as boolean; '",
"'got {}'",
".",
"format",
"(",
"shuffle",
")",
")",
"x",
"=",
"x",
".",
"copy",
"(",
")",
"if",
"y",
"is",
"not",
"None",
":",
"if",
"target_column",
"in",
"x",
":",
"raise",
"ValueError",
"(",
"'Cannot use name %s for target column: DataFrame already has a '",
"'column with that name: %s'",
"%",
"(",
"target_column",
",",
"x",
".",
"columns",
")",
")",
"if",
"not",
"np",
".",
"array_equal",
"(",
"x",
".",
"index",
",",
"y",
".",
"index",
")",
":",
"raise",
"ValueError",
"(",
"'Index for x and y are mismatched.\\nIndex for x: %s\\n'",
"'Index for y: %s\\n'",
"%",
"(",
"x",
".",
"index",
",",
"y",
".",
"index",
")",
")",
"x",
"[",
"target_column",
"]",
"=",
"y",
"# TODO(mdan): These are memory copies. We probably don't need 4x slack space.",
"# The sizes below are consistent with what I've seen elsewhere.",
"if",
"queue_capacity",
"is",
"None",
":",
"if",
"shuffle",
":",
"queue_capacity",
"=",
"4",
"*",
"len",
"(",
"x",
")",
"else",
":",
"queue_capacity",
"=",
"len",
"(",
"x",
")",
"min_after_dequeue",
"=",
"max",
"(",
"queue_capacity",
"/",
"4",
",",
"1",
")",
"def",
"input_fn",
"(",
")",
":",
"\"\"\"Pandas input function.\"\"\"",
"queue",
"=",
"feeding_functions",
".",
"_enqueue_data",
"(",
"# pylint: disable=protected-access",
"x",
",",
"queue_capacity",
",",
"shuffle",
"=",
"shuffle",
",",
"min_after_dequeue",
"=",
"min_after_dequeue",
",",
"num_threads",
"=",
"num_threads",
",",
"enqueue_size",
"=",
"batch_size",
",",
"num_epochs",
"=",
"num_epochs",
")",
"if",
"num_epochs",
"is",
"None",
":",
"features",
"=",
"queue",
".",
"dequeue_many",
"(",
"batch_size",
")",
"else",
":",
"features",
"=",
"queue",
".",
"dequeue_up_to",
"(",
"batch_size",
")",
"assert",
"len",
"(",
"features",
")",
"==",
"len",
"(",
"x",
".",
"columns",
")",
"+",
"1",
",",
"(",
"'Features should have one '",
"'extra element for the index.'",
")",
"features",
"=",
"features",
"[",
"1",
":",
"]",
"features",
"=",
"dict",
"(",
"zip",
"(",
"list",
"(",
"x",
".",
"columns",
")",
",",
"features",
")",
")",
"if",
"y",
"is",
"not",
"None",
":",
"target",
"=",
"features",
".",
"pop",
"(",
"target_column",
")",
"return",
"features",
",",
"target",
"return",
"features",
"return",
"input_fn"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/estimator/inputs/pandas_io.py#L37-L121 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/imputil.py | python | ImportManager.uninstall | (self) | Restore the previous import mechanism. | Restore the previous import mechanism. | [
"Restore",
"the",
"previous",
"import",
"mechanism",
"."
] | def uninstall(self):
"Restore the previous import mechanism."
self.namespace['__import__'] = self.previous_importer | [
"def",
"uninstall",
"(",
"self",
")",
":",
"self",
".",
"namespace",
"[",
"'__import__'",
"]",
"=",
"self",
".",
"previous_importer"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/imputil.py#L49-L51 |
||
cms-sw/cmssw | fd9de012d503d3405420bcbeec0ec879baa57cf2 | FWCore/ParameterSet/python/SequenceTypes.py | python | _ModuleSequenceType.copyAndExclude | (self,listOfModulesToExclude) | return result | Returns a copy of the sequence which excludes those module in 'listOfModulesToExclude | Returns a copy of the sequence which excludes those module in 'listOfModulesToExclude | [
"Returns",
"a",
"copy",
"of",
"the",
"sequence",
"which",
"excludes",
"those",
"module",
"in",
"listOfModulesToExclude"
] | def copyAndExclude(self,listOfModulesToExclude):
"""Returns a copy of the sequence which excludes those module in 'listOfModulesToExclude'"""
# You can exclude instances of these types EDProducer, EDFilter, OutputModule,
# EDAnalyzer, ESSource, ESProducer, Service, Sequence, SequencePlaceholder, Task,
# _SequenceNegation, and _SequenceIgnore.
# Mostly this is very intuitive, but there are some complications in cases
# where objects that contain other objects are involved. See the comments
# for the _MutatingSequenceVisitor.
v = _CopyAndExcludeSequenceVisitor(listOfModulesToExclude)
self.visit(v)
result = self.__new__(type(self))
result.__init__(v.result(self)[0], *v.result(self)[1])
return result | [
"def",
"copyAndExclude",
"(",
"self",
",",
"listOfModulesToExclude",
")",
":",
"# You can exclude instances of these types EDProducer, EDFilter, OutputModule,",
"# EDAnalyzer, ESSource, ESProducer, Service, Sequence, SequencePlaceholder, Task,",
"# _SequenceNegation, and _SequenceIgnore.",
"# Mostly this is very intuitive, but there are some complications in cases",
"# where objects that contain other objects are involved. See the comments",
"# for the _MutatingSequenceVisitor.",
"v",
"=",
"_CopyAndExcludeSequenceVisitor",
"(",
"listOfModulesToExclude",
")",
"self",
".",
"visit",
"(",
"v",
")",
"result",
"=",
"self",
".",
"__new__",
"(",
"type",
"(",
"self",
")",
")",
"result",
".",
"__init__",
"(",
"v",
".",
"result",
"(",
"self",
")",
"[",
"0",
"]",
",",
"*",
"v",
".",
"result",
"(",
"self",
")",
"[",
"1",
"]",
")",
"return",
"result"
] | https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/FWCore/ParameterSet/python/SequenceTypes.py#L406-L418 |
|
grpc/grpc | 27bc6fe7797e43298dc931b96dc57322d0852a9f | src/python/grpcio/grpc/__init__.py | python | secure_channel | (target, credentials, options=None, compression=None) | return _channel.Channel(target, () if options is None else options,
credentials._credentials, compression) | Creates a secure Channel to a server.
The returned Channel is thread-safe.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel. This is an EXPERIMENTAL option.
Returns:
A Channel. | Creates a secure Channel to a server. | [
"Creates",
"a",
"secure",
"Channel",
"to",
"a",
"server",
"."
] | def secure_channel(target, credentials, options=None, compression=None):
"""Creates a secure Channel to a server.
The returned Channel is thread-safe.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (:term:`channel_arguments`
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel. This is an EXPERIMENTAL option.
Returns:
A Channel.
"""
from grpc import _channel # pylint: disable=cyclic-import
from grpc.experimental import _insecure_channel_credentials
if credentials._credentials is _insecure_channel_credentials:
raise ValueError(
"secure_channel cannot be called with insecure credentials." +
" Call insecure_channel instead.")
return _channel.Channel(target, () if options is None else options,
credentials._credentials, compression) | [
"def",
"secure_channel",
"(",
"target",
",",
"credentials",
",",
"options",
"=",
"None",
",",
"compression",
"=",
"None",
")",
":",
"from",
"grpc",
"import",
"_channel",
"# pylint: disable=cyclic-import",
"from",
"grpc",
".",
"experimental",
"import",
"_insecure_channel_credentials",
"if",
"credentials",
".",
"_credentials",
"is",
"_insecure_channel_credentials",
":",
"raise",
"ValueError",
"(",
"\"secure_channel cannot be called with insecure credentials.\"",
"+",
"\" Call insecure_channel instead.\"",
")",
"return",
"_channel",
".",
"Channel",
"(",
"target",
",",
"(",
")",
"if",
"options",
"is",
"None",
"else",
"options",
",",
"credentials",
".",
"_credentials",
",",
"compression",
")"
] | https://github.com/grpc/grpc/blob/27bc6fe7797e43298dc931b96dc57322d0852a9f/src/python/grpcio/grpc/__init__.py#L1982-L2005 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/nccl_ops.py | python | reduce_sum | (tensors) | return _apply_reduce('sum', tensors) | Returns a tensor with the reduce sum across `tensors`.
The computation is done with a reduce operation, so only one tensor is
returned.
Args:
tensors: The input tensors across which to sum; must be assigned
to GPU devices.
Returns:
A tensor containing the sum of the input tensors.
Raises:
LookupError: If context is not currently using a GPU device. | Returns a tensor with the reduce sum across `tensors`. | [
"Returns",
"a",
"tensor",
"with",
"the",
"reduce",
"sum",
"across",
"tensors",
"."
] | def reduce_sum(tensors):
"""Returns a tensor with the reduce sum across `tensors`.
The computation is done with a reduce operation, so only one tensor is
returned.
Args:
tensors: The input tensors across which to sum; must be assigned
to GPU devices.
Returns:
A tensor containing the sum of the input tensors.
Raises:
LookupError: If context is not currently using a GPU device.
"""
return _apply_reduce('sum', tensors) | [
"def",
"reduce_sum",
"(",
"tensors",
")",
":",
"return",
"_apply_reduce",
"(",
"'sum'",
",",
"tensors",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/nccl_ops.py#L127-L143 |
|
RoboJackets/robocup-software | bce13ce53ddb2ecb9696266d980722c34617dc15 | rj_gameplay/stp/rc.py | python | Field.center_radius_m | (self) | return self.__center_radius_m | :return: returns the radius of the center of the field | :return: returns the radius of the center of the field | [
":",
"return",
":",
"returns",
"the",
"radius",
"of",
"the",
"center",
"of",
"the",
"field"
] | def center_radius_m(self) -> float:
"""
:return: returns the radius of the center of the field
"""
return self.__center_radius_m | [
"def",
"center_radius_m",
"(",
"self",
")",
"->",
"float",
":",
"return",
"self",
".",
"__center_radius_m"
] | https://github.com/RoboJackets/robocup-software/blob/bce13ce53ddb2ecb9696266d980722c34617dc15/rj_gameplay/stp/rc.py#L406-L410 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/eclib/ctrlbox.py | python | SegmentBar.OnMouseMove | (self, evt) | Handle when the mouse moves over the bar | Handle when the mouse moves over the bar | [
"Handle",
"when",
"the",
"mouse",
"moves",
"over",
"the",
"bar"
] | def OnMouseMove(self, evt):
"""Handle when the mouse moves over the bar"""
epos = evt.GetPosition()
where, index = self.HitTest(epos)
if index == -1:
return
if not self.SegmentHasCloseButton(index):
self._RestartTimer()
return
# Update button state
button = self._buttons[index]
x_state = button.XState
button.XState = SEGMENT_STATE_NONE
if where != SEGMENT_HT_NOWHERE:
if where == SEGMENT_HT_X_BTN:
button.XState = SEGMENT_STATE_X
elif where == SEGMENT_HT_SEG:
# TODO: add highlight option for hover on segment
pass
else:
self._RestartTimer()
evt.Skip()
return
# If the hover state over a segments close button
# has changed redraw the close button to reflect the
# proper state.
if button.XState != x_state:
crect = self.GetClientRect()
if not self.IsVerticalMode():
brect = wx.Rect(button.BX1, 0,
button.BX2 - (button.BX1 - 2),
crect.Height)
else:
brect = wx.Rect(button.BX1, 0,
crect.Width,
button.BX2 - (button.BX1 - 2))
self.Refresh(False, brect)
self._RestartTimer()
evt.Skip() | [
"def",
"OnMouseMove",
"(",
"self",
",",
"evt",
")",
":",
"epos",
"=",
"evt",
".",
"GetPosition",
"(",
")",
"where",
",",
"index",
"=",
"self",
".",
"HitTest",
"(",
"epos",
")",
"if",
"index",
"==",
"-",
"1",
":",
"return",
"if",
"not",
"self",
".",
"SegmentHasCloseButton",
"(",
"index",
")",
":",
"self",
".",
"_RestartTimer",
"(",
")",
"return",
"# Update button state",
"button",
"=",
"self",
".",
"_buttons",
"[",
"index",
"]",
"x_state",
"=",
"button",
".",
"XState",
"button",
".",
"XState",
"=",
"SEGMENT_STATE_NONE",
"if",
"where",
"!=",
"SEGMENT_HT_NOWHERE",
":",
"if",
"where",
"==",
"SEGMENT_HT_X_BTN",
":",
"button",
".",
"XState",
"=",
"SEGMENT_STATE_X",
"elif",
"where",
"==",
"SEGMENT_HT_SEG",
":",
"# TODO: add highlight option for hover on segment",
"pass",
"else",
":",
"self",
".",
"_RestartTimer",
"(",
")",
"evt",
".",
"Skip",
"(",
")",
"return",
"# If the hover state over a segments close button",
"# has changed redraw the close button to reflect the",
"# proper state.",
"if",
"button",
".",
"XState",
"!=",
"x_state",
":",
"crect",
"=",
"self",
".",
"GetClientRect",
"(",
")",
"if",
"not",
"self",
".",
"IsVerticalMode",
"(",
")",
":",
"brect",
"=",
"wx",
".",
"Rect",
"(",
"button",
".",
"BX1",
",",
"0",
",",
"button",
".",
"BX2",
"-",
"(",
"button",
".",
"BX1",
"-",
"2",
")",
",",
"crect",
".",
"Height",
")",
"else",
":",
"brect",
"=",
"wx",
".",
"Rect",
"(",
"button",
".",
"BX1",
",",
"0",
",",
"crect",
".",
"Width",
",",
"button",
".",
"BX2",
"-",
"(",
"button",
".",
"BX1",
"-",
"2",
")",
")",
"self",
".",
"Refresh",
"(",
"False",
",",
"brect",
")",
"self",
".",
"_RestartTimer",
"(",
")",
"evt",
".",
"Skip",
"(",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/ctrlbox.py#L1017-L1058 |
||
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/renderer.py | python | Renderer._make_load_partial | (self) | return load_partial | Return a function that loads a partial by name. | Return a function that loads a partial by name. | [
"Return",
"a",
"function",
"that",
"loads",
"a",
"partial",
"by",
"name",
"."
] | def _make_load_partial(self):
"""
Return a function that loads a partial by name.
"""
if self.partials is None:
return self._make_load_template()
# Otherwise, create a function from the custom partial loader.
partials = self.partials
def load_partial(name):
# TODO: consider using EAFP here instead.
# http://docs.python.org/glossary.html#term-eafp
# This would mean requiring that the custom partial loader
# raise a KeyError on name not found.
template = partials.get(name)
if template is None:
raise TemplateNotFoundError("Name %s not found in partials: %s" %
(repr(name), type(partials)))
# RenderEngine requires that the return value be unicode.
return self._to_unicode_hard(template)
return load_partial | [
"def",
"_make_load_partial",
"(",
"self",
")",
":",
"if",
"self",
".",
"partials",
"is",
"None",
":",
"return",
"self",
".",
"_make_load_template",
"(",
")",
"# Otherwise, create a function from the custom partial loader.",
"partials",
"=",
"self",
".",
"partials",
"def",
"load_partial",
"(",
"name",
")",
":",
"# TODO: consider using EAFP here instead.",
"# http://docs.python.org/glossary.html#term-eafp",
"# This would mean requiring that the custom partial loader",
"# raise a KeyError on name not found.",
"template",
"=",
"partials",
".",
"get",
"(",
"name",
")",
"if",
"template",
"is",
"None",
":",
"raise",
"TemplateNotFoundError",
"(",
"\"Name %s not found in partials: %s\"",
"%",
"(",
"repr",
"(",
"name",
")",
",",
"type",
"(",
"partials",
")",
")",
")",
"# RenderEngine requires that the return value be unicode.",
"return",
"self",
".",
"_to_unicode_hard",
"(",
"template",
")",
"return",
"load_partial"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/renderer.py#L247-L271 |
|
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TMOut.GetSIn | (self, *args) | return _snap.TMOut_GetSIn(self, *args) | GetSIn(TMOut self, bool const & IsCut=True, int const & CutBfL=-1) -> PSIn
Parameters:
IsCut: bool const &
CutBfL: int const &
GetSIn(TMOut self, bool const & IsCut=True) -> PSIn
Parameters:
IsCut: bool const &
GetSIn(TMOut self) -> PSIn
Parameters:
self: TMOut * | GetSIn(TMOut self, bool const & IsCut=True, int const & CutBfL=-1) -> PSIn | [
"GetSIn",
"(",
"TMOut",
"self",
"bool",
"const",
"&",
"IsCut",
"=",
"True",
"int",
"const",
"&",
"CutBfL",
"=",
"-",
"1",
")",
"-",
">",
"PSIn"
] | def GetSIn(self, *args):
"""
GetSIn(TMOut self, bool const & IsCut=True, int const & CutBfL=-1) -> PSIn
Parameters:
IsCut: bool const &
CutBfL: int const &
GetSIn(TMOut self, bool const & IsCut=True) -> PSIn
Parameters:
IsCut: bool const &
GetSIn(TMOut self) -> PSIn
Parameters:
self: TMOut *
"""
return _snap.TMOut_GetSIn(self, *args) | [
"def",
"GetSIn",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TMOut_GetSIn",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L3068-L3087 |
|
facebook/ThreatExchange | 31914a51820c73c8a0daffe62ccca29a6e3d359e | python-threatexchange/threatexchange/hashing/pdq_hasher.py | python | pdq_from_bytes | (file_bytes: bytes) | return _pdq_from_numpy_array(np_array) | For the bytestream from an image file, compute PDQ Hash and quality. | For the bytestream from an image file, compute PDQ Hash and quality. | [
"For",
"the",
"bytestream",
"from",
"an",
"image",
"file",
"compute",
"PDQ",
"Hash",
"and",
"quality",
"."
] | def pdq_from_bytes(file_bytes: bytes) -> PDQOutput:
"""
For the bytestream from an image file, compute PDQ Hash and quality.
"""
np_array = _check_dimension_and_expand_if_needed(
np.asarray(Image.open(io.BytesIO(file_bytes)))
)
return _pdq_from_numpy_array(np_array) | [
"def",
"pdq_from_bytes",
"(",
"file_bytes",
":",
"bytes",
")",
"->",
"PDQOutput",
":",
"np_array",
"=",
"_check_dimension_and_expand_if_needed",
"(",
"np",
".",
"asarray",
"(",
"Image",
".",
"open",
"(",
"io",
".",
"BytesIO",
"(",
"file_bytes",
")",
")",
")",
")",
"return",
"_pdq_from_numpy_array",
"(",
"np_array",
")"
] | https://github.com/facebook/ThreatExchange/blob/31914a51820c73c8a0daffe62ccca29a6e3d359e/python-threatexchange/threatexchange/hashing/pdq_hasher.py#L27-L34 |
|
ROCmSoftwarePlatform/hipCaffe | 4ec5d482515cce532348553b6db6d00d015675d5 | scripts/cpp_lint.py | python | _CppLintState.PrintErrorCounts | (self) | Print a summary of errors by category, and the total. | Print a summary of errors by category, and the total. | [
"Print",
"a",
"summary",
"of",
"errors",
"by",
"category",
"and",
"the",
"total",
"."
] | def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
for category, count in self.errors_by_category.iteritems():
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count) | [
"def",
"PrintErrorCounts",
"(",
"self",
")",
":",
"for",
"category",
",",
"count",
"in",
"self",
".",
"errors_by_category",
".",
"iteritems",
"(",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Category \\'%s\\' errors found: %d\\n'",
"%",
"(",
"category",
",",
"count",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Total errors found: %d\\n'",
"%",
"self",
".",
"error_count",
")"
] | https://github.com/ROCmSoftwarePlatform/hipCaffe/blob/4ec5d482515cce532348553b6db6d00d015675d5/scripts/cpp_lint.py#L757-L762 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/distutils/command/build_ext.py | python | build_ext.find_swig | (self) | Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows. | Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows. | [
"Return",
"the",
"name",
"of",
"the",
"SWIG",
"executable",
".",
"On",
"Unix",
"this",
"is",
"just",
"swig",
"--",
"it",
"should",
"be",
"in",
"the",
"PATH",
".",
"Tries",
"a",
"bit",
"harder",
"on",
"Windows",
"."
] | def find_swig (self):
"""Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.
"""
if os.name == "posix":
return "swig"
elif os.name == "nt":
# Look for SWIG in its standard installation directory on
# Windows (or so I presume!). If we find it there, great;
# if not, act like Unix and assume it's in the PATH.
for vers in ("1.3", "1.2", "1.1"):
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
if os.path.isfile(fn):
return fn
else:
return "swig.exe"
elif os.name == "os2":
# assume swig available in the PATH.
return "swig.exe"
else:
raise DistutilsPlatformError, \
("I don't know how to find (much less run) SWIG "
"on platform '%s'") % os.name | [
"def",
"find_swig",
"(",
"self",
")",
":",
"if",
"os",
".",
"name",
"==",
"\"posix\"",
":",
"return",
"\"swig\"",
"elif",
"os",
".",
"name",
"==",
"\"nt\"",
":",
"# Look for SWIG in its standard installation directory on",
"# Windows (or so I presume!). If we find it there, great;",
"# if not, act like Unix and assume it's in the PATH.",
"for",
"vers",
"in",
"(",
"\"1.3\"",
",",
"\"1.2\"",
",",
"\"1.1\"",
")",
":",
"fn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"\"c:\\\\swig%s\"",
"%",
"vers",
",",
"\"swig.exe\"",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"fn",
")",
":",
"return",
"fn",
"else",
":",
"return",
"\"swig.exe\"",
"elif",
"os",
".",
"name",
"==",
"\"os2\"",
":",
"# assume swig available in the PATH.",
"return",
"\"swig.exe\"",
"else",
":",
"raise",
"DistutilsPlatformError",
",",
"(",
"\"I don't know how to find (much less run) SWIG \"",
"\"on platform '%s'\"",
")",
"%",
"os",
".",
"name"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/distutils/command/build_ext.py#L594-L621 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/command/sdist.py | python | sdist._remove_os_link | () | In a context, remove and restore os.link if it exists | In a context, remove and restore os.link if it exists | [
"In",
"a",
"context",
"remove",
"and",
"restore",
"os",
".",
"link",
"if",
"it",
"exists"
] | def _remove_os_link():
"""
In a context, remove and restore os.link if it exists
"""
class NoValue:
pass
orig_val = getattr(os, 'link', NoValue)
try:
del os.link
except Exception:
pass
try:
yield
finally:
if orig_val is not NoValue:
setattr(os, 'link', orig_val) | [
"def",
"_remove_os_link",
"(",
")",
":",
"class",
"NoValue",
":",
"pass",
"orig_val",
"=",
"getattr",
"(",
"os",
",",
"'link'",
",",
"NoValue",
")",
"try",
":",
"del",
"os",
".",
"link",
"except",
"Exception",
":",
"pass",
"try",
":",
"yield",
"finally",
":",
"if",
"orig_val",
"is",
"not",
"NoValue",
":",
"setattr",
"(",
"os",
",",
"'link'",
",",
"orig_val",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/command/sdist.py#L82-L99 |
||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/training/saver.py | python | Saver.save | (self,
sess,
save_path,
global_step=None,
latest_filename=None,
meta_graph_suffix="meta",
write_meta_graph=True,
write_state=True) | Saves variables.
This method runs the ops added by the constructor for saving variables.
It requires a session in which the graph was launched. The variables to
save must also have been initialized.
The method returns the path of the newly created checkpoint file. This
path can be passed directly to a call to `restore()`.
Args:
sess: A Session to use to save the variables.
save_path: String. Path to the checkpoint filename. If the saver is
`sharded`, this is the prefix of the sharded checkpoint filename.
global_step: If provided the global step number is appended to
`save_path` to create the checkpoint filename. The optional argument
can be a `Tensor`, a `Tensor` name or an integer.
latest_filename: Optional name for the protocol buffer file that will
contains the list of most recent checkpoint filenames. That file,
kept in the same directory as the checkpoint files, is automatically
managed by the saver to keep track of recent checkpoints. Defaults to
'checkpoint'.
meta_graph_suffix: Suffix for `MetaGraphDef` file. Defaults to 'meta'.
write_meta_graph: `Boolean` indicating whether or not to write the meta
graph file.
write_state: `Boolean` indicating whether or not to write the
`CheckpointStateProto`.
Returns:
A string: path at which the variables were saved. If the saver is
sharded, this string ends with: '-?????-of-nnnnn' where 'nnnnn'
is the number of shards created.
If the saver is empty, returns None.
Raises:
TypeError: If `sess` is not a `Session`.
ValueError: If `latest_filename` contains path components, or if it
collides with `save_path`.
RuntimeError: If save and restore ops weren't built. | Saves variables. | [
"Saves",
"variables",
"."
] | def save(self,
sess,
save_path,
global_step=None,
latest_filename=None,
meta_graph_suffix="meta",
write_meta_graph=True,
write_state=True):
"""Saves variables.
This method runs the ops added by the constructor for saving variables.
It requires a session in which the graph was launched. The variables to
save must also have been initialized.
The method returns the path of the newly created checkpoint file. This
path can be passed directly to a call to `restore()`.
Args:
sess: A Session to use to save the variables.
save_path: String. Path to the checkpoint filename. If the saver is
`sharded`, this is the prefix of the sharded checkpoint filename.
global_step: If provided the global step number is appended to
`save_path` to create the checkpoint filename. The optional argument
can be a `Tensor`, a `Tensor` name or an integer.
latest_filename: Optional name for the protocol buffer file that will
contains the list of most recent checkpoint filenames. That file,
kept in the same directory as the checkpoint files, is automatically
managed by the saver to keep track of recent checkpoints. Defaults to
'checkpoint'.
meta_graph_suffix: Suffix for `MetaGraphDef` file. Defaults to 'meta'.
write_meta_graph: `Boolean` indicating whether or not to write the meta
graph file.
write_state: `Boolean` indicating whether or not to write the
`CheckpointStateProto`.
Returns:
A string: path at which the variables were saved. If the saver is
sharded, this string ends with: '-?????-of-nnnnn' where 'nnnnn'
is the number of shards created.
If the saver is empty, returns None.
Raises:
TypeError: If `sess` is not a `Session`.
ValueError: If `latest_filename` contains path components, or if it
collides with `save_path`.
RuntimeError: If save and restore ops weren't built.
"""
if not self._is_built:
raise RuntimeError(
"`build()` should be called before save if defer_build==True")
if latest_filename is None:
latest_filename = "checkpoint"
if self._write_version != saver_pb2.SaverDef.V2:
logging.warning("*******************************************************")
logging.warning("TensorFlow's V1 checkpoint format has been deprecated.")
logging.warning("Consider switching to the more efficient V2 format:")
logging.warning(" `tf.train.Saver(write_version=tf.train.SaverDef.V2)`")
logging.warning("now on by default.")
logging.warning("*******************************************************")
if os.path.split(latest_filename)[0]:
raise ValueError("'latest_filename' must not contain path components")
if global_step is not None:
if not isinstance(global_step, compat.integral_types):
global_step = training_util.global_step(sess, global_step)
checkpoint_file = "%s-%d" % (save_path, global_step)
if self._pad_step_number:
# Zero-pads the step numbers, so that they are sorted when listed.
checkpoint_file = "%s-%s" % (save_path, "{:08d}".format(global_step))
else:
checkpoint_file = save_path
if os.path.basename(
save_path) == latest_filename and not self.saver_def.sharded:
# Guard against collision between data file and checkpoint state file.
raise ValueError(
"'latest_filename' collides with 'save_path': '%s' and '%s'" %
(latest_filename, save_path))
if not isinstance(sess, session.SessionInterface):
raise TypeError("'sess' must be a Session; %s" % sess)
save_path_parent = os.path.dirname(save_path)
if not self._is_empty:
try:
model_checkpoint_path = sess.run(
self.saver_def.save_tensor_name,
{self.saver_def.filename_tensor_name: checkpoint_file})
model_checkpoint_path = compat.as_str(model_checkpoint_path)
if write_state:
self._MaybeDeleteOldCheckpoints(
model_checkpoint_path, meta_graph_suffix=meta_graph_suffix)
_update_checkpoint_state(
save_dir=save_path_parent,
model_checkpoint_path=model_checkpoint_path,
all_model_checkpoint_paths=self.last_checkpoints,
latest_filename=latest_filename,
save_relative_paths=self._save_relative_paths)
except (errors.FailedPreconditionError, errors.NotFoundError) as exc:
if not gfile.IsDirectory(save_path_parent):
exc = ValueError(
"Parent directory of {} doesn't exist, can't save.".format(
save_path))
raise exc
if write_meta_graph:
meta_graph_filename = self._MetaGraphFilename(
checkpoint_file, meta_graph_suffix=meta_graph_suffix)
with sess.graph.as_default():
self.export_meta_graph(meta_graph_filename)
if self._is_empty:
return None
else:
return model_checkpoint_path | [
"def",
"save",
"(",
"self",
",",
"sess",
",",
"save_path",
",",
"global_step",
"=",
"None",
",",
"latest_filename",
"=",
"None",
",",
"meta_graph_suffix",
"=",
"\"meta\"",
",",
"write_meta_graph",
"=",
"True",
",",
"write_state",
"=",
"True",
")",
":",
"if",
"not",
"self",
".",
"_is_built",
":",
"raise",
"RuntimeError",
"(",
"\"`build()` should be called before save if defer_build==True\"",
")",
"if",
"latest_filename",
"is",
"None",
":",
"latest_filename",
"=",
"\"checkpoint\"",
"if",
"self",
".",
"_write_version",
"!=",
"saver_pb2",
".",
"SaverDef",
".",
"V2",
":",
"logging",
".",
"warning",
"(",
"\"*******************************************************\"",
")",
"logging",
".",
"warning",
"(",
"\"TensorFlow's V1 checkpoint format has been deprecated.\"",
")",
"logging",
".",
"warning",
"(",
"\"Consider switching to the more efficient V2 format:\"",
")",
"logging",
".",
"warning",
"(",
"\" `tf.train.Saver(write_version=tf.train.SaverDef.V2)`\"",
")",
"logging",
".",
"warning",
"(",
"\"now on by default.\"",
")",
"logging",
".",
"warning",
"(",
"\"*******************************************************\"",
")",
"if",
"os",
".",
"path",
".",
"split",
"(",
"latest_filename",
")",
"[",
"0",
"]",
":",
"raise",
"ValueError",
"(",
"\"'latest_filename' must not contain path components\"",
")",
"if",
"global_step",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"global_step",
",",
"compat",
".",
"integral_types",
")",
":",
"global_step",
"=",
"training_util",
".",
"global_step",
"(",
"sess",
",",
"global_step",
")",
"checkpoint_file",
"=",
"\"%s-%d\"",
"%",
"(",
"save_path",
",",
"global_step",
")",
"if",
"self",
".",
"_pad_step_number",
":",
"# Zero-pads the step numbers, so that they are sorted when listed.",
"checkpoint_file",
"=",
"\"%s-%s\"",
"%",
"(",
"save_path",
",",
"\"{:08d}\"",
".",
"format",
"(",
"global_step",
")",
")",
"else",
":",
"checkpoint_file",
"=",
"save_path",
"if",
"os",
".",
"path",
".",
"basename",
"(",
"save_path",
")",
"==",
"latest_filename",
"and",
"not",
"self",
".",
"saver_def",
".",
"sharded",
":",
"# Guard against collision between data file and checkpoint state file.",
"raise",
"ValueError",
"(",
"\"'latest_filename' collides with 'save_path': '%s' and '%s'\"",
"%",
"(",
"latest_filename",
",",
"save_path",
")",
")",
"if",
"not",
"isinstance",
"(",
"sess",
",",
"session",
".",
"SessionInterface",
")",
":",
"raise",
"TypeError",
"(",
"\"'sess' must be a Session; %s\"",
"%",
"sess",
")",
"save_path_parent",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"save_path",
")",
"if",
"not",
"self",
".",
"_is_empty",
":",
"try",
":",
"model_checkpoint_path",
"=",
"sess",
".",
"run",
"(",
"self",
".",
"saver_def",
".",
"save_tensor_name",
",",
"{",
"self",
".",
"saver_def",
".",
"filename_tensor_name",
":",
"checkpoint_file",
"}",
")",
"model_checkpoint_path",
"=",
"compat",
".",
"as_str",
"(",
"model_checkpoint_path",
")",
"if",
"write_state",
":",
"self",
".",
"_MaybeDeleteOldCheckpoints",
"(",
"model_checkpoint_path",
",",
"meta_graph_suffix",
"=",
"meta_graph_suffix",
")",
"_update_checkpoint_state",
"(",
"save_dir",
"=",
"save_path_parent",
",",
"model_checkpoint_path",
"=",
"model_checkpoint_path",
",",
"all_model_checkpoint_paths",
"=",
"self",
".",
"last_checkpoints",
",",
"latest_filename",
"=",
"latest_filename",
",",
"save_relative_paths",
"=",
"self",
".",
"_save_relative_paths",
")",
"except",
"(",
"errors",
".",
"FailedPreconditionError",
",",
"errors",
".",
"NotFoundError",
")",
"as",
"exc",
":",
"if",
"not",
"gfile",
".",
"IsDirectory",
"(",
"save_path_parent",
")",
":",
"exc",
"=",
"ValueError",
"(",
"\"Parent directory of {} doesn't exist, can't save.\"",
".",
"format",
"(",
"save_path",
")",
")",
"raise",
"exc",
"if",
"write_meta_graph",
":",
"meta_graph_filename",
"=",
"self",
".",
"_MetaGraphFilename",
"(",
"checkpoint_file",
",",
"meta_graph_suffix",
"=",
"meta_graph_suffix",
")",
"with",
"sess",
".",
"graph",
".",
"as_default",
"(",
")",
":",
"self",
".",
"export_meta_graph",
"(",
"meta_graph_filename",
")",
"if",
"self",
".",
"_is_empty",
":",
"return",
"None",
"else",
":",
"return",
"model_checkpoint_path"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/training/saver.py#L1387-L1501 |
||
ricardoquesada/Spidermonkey | 4a75ea2543408bd1b2c515aa95901523eeef7858 | dom/bindings/parser/WebIDL.py | python | Parser.p_ExtendedAttributeNoArgs | (self, p) | ExtendedAttributeNoArgs : IDENTIFIER | ExtendedAttributeNoArgs : IDENTIFIER | [
"ExtendedAttributeNoArgs",
":",
"IDENTIFIER"
] | def p_ExtendedAttributeNoArgs(self, p):
"""
ExtendedAttributeNoArgs : IDENTIFIER
"""
p[0] = (p[1],) | [
"def",
"p_ExtendedAttributeNoArgs",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"(",
"p",
"[",
"1",
"]",
",",
")"
] | https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/dom/bindings/parser/WebIDL.py#L5511-L5515 |
||
synfig/synfig | a5ec91db5b751dc12e4400ccfb5c063fd6d2d928 | synfig-studio/plugins/lottie-exporter/canvas.py | python | calc_time | (root, lottie, which) | Converts the starting time and ending time to lottie format
Args:
root (lxml.etree._Element) : Synfig format animation file
lottie (dict) : Lottie format animation file
which (str) : Differentiates between in time and out time
Returns:
(None) | Converts the starting time and ending time to lottie format | [
"Converts",
"the",
"starting",
"time",
"and",
"ending",
"time",
"to",
"lottie",
"format"
] | def calc_time(root, lottie, which):
"""
Converts the starting time and ending time to lottie format
Args:
root (lxml.etree._Element) : Synfig format animation file
lottie (dict) : Lottie format animation file
which (str) : Differentiates between in time and out time
Returns:
(None)
"""
if which == "ip":
phase = "begin-time"
elif which == "op":
phase = "end-time"
time = root.attrib[phase]
lottie[which] = convert_time_to_frames(time)
# To support canvas with single frames
if which == "op":
lottie[which] += 1 | [
"def",
"calc_time",
"(",
"root",
",",
"lottie",
",",
"which",
")",
":",
"if",
"which",
"==",
"\"ip\"",
":",
"phase",
"=",
"\"begin-time\"",
"elif",
"which",
"==",
"\"op\"",
":",
"phase",
"=",
"\"end-time\"",
"time",
"=",
"root",
".",
"attrib",
"[",
"phase",
"]",
"lottie",
"[",
"which",
"]",
"=",
"convert_time_to_frames",
"(",
"time",
")",
"# To support canvas with single frames",
"if",
"which",
"==",
"\"op\"",
":",
"lottie",
"[",
"which",
"]",
"+=",
"1"
] | https://github.com/synfig/synfig/blob/a5ec91db5b751dc12e4400ccfb5c063fd6d2d928/synfig-studio/plugins/lottie-exporter/canvas.py#L38-L59 |
||
ros-planning/moveit | ee48dc5cedc981d0869352aa3db0b41469c2735c | moveit_commander/src/moveit_commander/planning_scene_interface.py | python | PlanningSceneInterface.remove_attached_object | (self, link=None, name=None) | Remove an attached object from the robot, or all objects attached to the link if no name is provided,
or all attached objects in the scene if neither link nor name are provided.
Removed attached objects remain in the scene as world objects.
Call remove_world_object afterwards to remove them from the scene. | Remove an attached object from the robot, or all objects attached to the link if no name is provided,
or all attached objects in the scene if neither link nor name are provided. | [
"Remove",
"an",
"attached",
"object",
"from",
"the",
"robot",
"or",
"all",
"objects",
"attached",
"to",
"the",
"link",
"if",
"no",
"name",
"is",
"provided",
"or",
"all",
"attached",
"objects",
"in",
"the",
"scene",
"if",
"neither",
"link",
"nor",
"name",
"are",
"provided",
"."
] | def remove_attached_object(self, link=None, name=None):
"""
Remove an attached object from the robot, or all objects attached to the link if no name is provided,
or all attached objects in the scene if neither link nor name are provided.
Removed attached objects remain in the scene as world objects.
Call remove_world_object afterwards to remove them from the scene.
"""
aco = AttachedCollisionObject()
aco.object.operation = CollisionObject.REMOVE
if link is not None:
aco.link_name = link
if name is not None:
aco.object.id = name
self.__submit(aco, attach=True) | [
"def",
"remove_attached_object",
"(",
"self",
",",
"link",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"aco",
"=",
"AttachedCollisionObject",
"(",
")",
"aco",
".",
"object",
".",
"operation",
"=",
"CollisionObject",
".",
"REMOVE",
"if",
"link",
"is",
"not",
"None",
":",
"aco",
".",
"link_name",
"=",
"link",
"if",
"name",
"is",
"not",
"None",
":",
"aco",
".",
"object",
".",
"id",
"=",
"name",
"self",
".",
"__submit",
"(",
"aco",
",",
"attach",
"=",
"True",
")"
] | https://github.com/ros-planning/moveit/blob/ee48dc5cedc981d0869352aa3db0b41469c2735c/moveit_commander/src/moveit_commander/planning_scene_interface.py#L178-L192 |
||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/keras/python/keras/utils/layer_utils.py | python | convert_all_kernels_in_model | (model) | Converts all convolution kernels in a model from Theano to TensorFlow.
Also works from TensorFlow to Theano.
Arguments:
model: target model for the conversion. | Converts all convolution kernels in a model from Theano to TensorFlow. | [
"Converts",
"all",
"convolution",
"kernels",
"in",
"a",
"model",
"from",
"Theano",
"to",
"TensorFlow",
"."
] | def convert_all_kernels_in_model(model):
"""Converts all convolution kernels in a model from Theano to TensorFlow.
Also works from TensorFlow to Theano.
Arguments:
model: target model for the conversion.
"""
# Note: SeparableConvolution not included
# since only supported by TF.
conv_classes = {
'Conv1D',
'Conv2D',
'Conv3D',
'Conv2DTranspose',
}
to_assign = []
for layer in model.layers:
if layer.__class__.__name__ in conv_classes:
original_kernel = K.get_value(layer.kernel)
converted_kernel = convert_kernel(original_kernel)
to_assign.append((layer.kernel, converted_kernel))
K.batch_set_value(to_assign) | [
"def",
"convert_all_kernels_in_model",
"(",
"model",
")",
":",
"# Note: SeparableConvolution not included",
"# since only supported by TF.",
"conv_classes",
"=",
"{",
"'Conv1D'",
",",
"'Conv2D'",
",",
"'Conv3D'",
",",
"'Conv2DTranspose'",
",",
"}",
"to_assign",
"=",
"[",
"]",
"for",
"layer",
"in",
"model",
".",
"layers",
":",
"if",
"layer",
".",
"__class__",
".",
"__name__",
"in",
"conv_classes",
":",
"original_kernel",
"=",
"K",
".",
"get_value",
"(",
"layer",
".",
"kernel",
")",
"converted_kernel",
"=",
"convert_kernel",
"(",
"original_kernel",
")",
"to_assign",
".",
"append",
"(",
"(",
"layer",
".",
"kernel",
",",
"converted_kernel",
")",
")",
"K",
".",
"batch_set_value",
"(",
"to_assign",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/keras/python/keras/utils/layer_utils.py#L158-L180 |
||
cztomczak/cefpython | 5679f28cec18a57a56e298da2927aac8d8f83ad6 | tools/build.py | python | compile_cpp_projects_windows_DEPRECATED | () | DEPRECATED. Not used currently.
Build C++ projects using .vcproj files. | DEPRECATED. Not used currently.
Build C++ projects using .vcproj files. | [
"DEPRECATED",
".",
"Not",
"used",
"currently",
".",
"Build",
"C",
"++",
"projects",
"using",
".",
"vcproj",
"files",
"."
] | def compile_cpp_projects_windows_DEPRECATED():
"""DEPRECATED. Not used currently.
Build C++ projects using .vcproj files."""
# TODO: Remove code after setuptools compilation was tested for some time
print("[build.py] Compile C++ projects")
print("[build.py] ~~ Build CLIENT_HANDLER vcproj")
vcproj = ("client_handler_py{pyver}_{os}.vcproj"
.format(pyver=PYVERSION, os=OS_POSTFIX2))
vcproj = os.path.join(SRC_DIR, "client_handler", vcproj)
build_vcproj_DEPRECATED(vcproj)
print("[build.py] ~~ Build LIBCEFPYTHONAPP vcproj")
vcproj = ("libcefpythonapp_py{pyver}_{os}.vcproj"
.format(pyver=PYVERSION, os=OS_POSTFIX2))
vcproj = os.path.join(SRC_DIR, "subprocess", vcproj)
build_vcproj_DEPRECATED(vcproj)
print("[build.py] ~~ Build SUBPROCESS vcproj")
vcproj = ("subprocess_{os}.vcproj"
.format(os=OS_POSTFIX2))
vcproj = os.path.join(SRC_DIR, "subprocess", vcproj)
ret = build_vcproj_DEPRECATED(vcproj)
# Copy subprocess executable
subprocess_from = os.path.join(
SUBPROCESS_DIR,
"Release_{os}".format(os=OS_POSTFIX2),
"subprocess_{os}.exe".format(os=OS_POSTFIX2))
subprocess_to = os.path.join(CEFPYTHON_BINARY, "subprocess.exe")
if os.path.exists(subprocess_to):
os.remove(subprocess_to)
if ret == 0:
print("[build.py] Copy subprocess executable")
# shutil.copy() will also copy Permission bits
shutil.copy(subprocess_from, subprocess_to)
print("[build.py] ~~ Build CPP_UTILS vcproj")
vcproj = ("cpp_utils_{os}.vcproj"
.format(os=OS_POSTFIX2))
vcproj = os.path.join(SRC_DIR, "cpp_utils", vcproj)
build_vcproj_DEPRECATED(vcproj) | [
"def",
"compile_cpp_projects_windows_DEPRECATED",
"(",
")",
":",
"# TODO: Remove code after setuptools compilation was tested for some time",
"print",
"(",
"\"[build.py] Compile C++ projects\"",
")",
"print",
"(",
"\"[build.py] ~~ Build CLIENT_HANDLER vcproj\"",
")",
"vcproj",
"=",
"(",
"\"client_handler_py{pyver}_{os}.vcproj\"",
".",
"format",
"(",
"pyver",
"=",
"PYVERSION",
",",
"os",
"=",
"OS_POSTFIX2",
")",
")",
"vcproj",
"=",
"os",
".",
"path",
".",
"join",
"(",
"SRC_DIR",
",",
"\"client_handler\"",
",",
"vcproj",
")",
"build_vcproj_DEPRECATED",
"(",
"vcproj",
")",
"print",
"(",
"\"[build.py] ~~ Build LIBCEFPYTHONAPP vcproj\"",
")",
"vcproj",
"=",
"(",
"\"libcefpythonapp_py{pyver}_{os}.vcproj\"",
".",
"format",
"(",
"pyver",
"=",
"PYVERSION",
",",
"os",
"=",
"OS_POSTFIX2",
")",
")",
"vcproj",
"=",
"os",
".",
"path",
".",
"join",
"(",
"SRC_DIR",
",",
"\"subprocess\"",
",",
"vcproj",
")",
"build_vcproj_DEPRECATED",
"(",
"vcproj",
")",
"print",
"(",
"\"[build.py] ~~ Build SUBPROCESS vcproj\"",
")",
"vcproj",
"=",
"(",
"\"subprocess_{os}.vcproj\"",
".",
"format",
"(",
"os",
"=",
"OS_POSTFIX2",
")",
")",
"vcproj",
"=",
"os",
".",
"path",
".",
"join",
"(",
"SRC_DIR",
",",
"\"subprocess\"",
",",
"vcproj",
")",
"ret",
"=",
"build_vcproj_DEPRECATED",
"(",
"vcproj",
")",
"# Copy subprocess executable",
"subprocess_from",
"=",
"os",
".",
"path",
".",
"join",
"(",
"SUBPROCESS_DIR",
",",
"\"Release_{os}\"",
".",
"format",
"(",
"os",
"=",
"OS_POSTFIX2",
")",
",",
"\"subprocess_{os}.exe\"",
".",
"format",
"(",
"os",
"=",
"OS_POSTFIX2",
")",
")",
"subprocess_to",
"=",
"os",
".",
"path",
".",
"join",
"(",
"CEFPYTHON_BINARY",
",",
"\"subprocess.exe\"",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"subprocess_to",
")",
":",
"os",
".",
"remove",
"(",
"subprocess_to",
")",
"if",
"ret",
"==",
"0",
":",
"print",
"(",
"\"[build.py] Copy subprocess executable\"",
")",
"# shutil.copy() will also copy Permission bits",
"shutil",
".",
"copy",
"(",
"subprocess_from",
",",
"subprocess_to",
")",
"print",
"(",
"\"[build.py] ~~ Build CPP_UTILS vcproj\"",
")",
"vcproj",
"=",
"(",
"\"cpp_utils_{os}.vcproj\"",
".",
"format",
"(",
"os",
"=",
"OS_POSTFIX2",
")",
")",
"vcproj",
"=",
"os",
".",
"path",
".",
"join",
"(",
"SRC_DIR",
",",
"\"cpp_utils\"",
",",
"vcproj",
")",
"build_vcproj_DEPRECATED",
"(",
"vcproj",
")"
] | https://github.com/cztomczak/cefpython/blob/5679f28cec18a57a56e298da2927aac8d8f83ad6/tools/build.py#L415-L458 |
||
google/llvm-propeller | 45c226984fe8377ebfb2ad7713c680d652ba678d | openmp/runtime/tools/summarizeStats.py | python | drawChart | (data, kind, filebase) | Draw a summary bar chart for the requested data frame into the specified file | Draw a summary bar chart for the requested data frame into the specified file | [
"Draw",
"a",
"summary",
"bar",
"chart",
"for",
"the",
"requested",
"data",
"frame",
"into",
"the",
"specified",
"file"
] | def drawChart(data, kind, filebase):
"""Draw a summary bar chart for the requested data frame into the specified file"""
data["Mean"].plot(kind="bar", logy=True, grid=True, colormap="GnBu",
yerr=data["SD"], ecolor="black")
plt.xlabel("OMP Constructs")
plt.ylabel(statProperties[kind][0])
plt.title (statProperties[kind][1])
plt.tight_layout()
plt.savefig(filebase+"_"+kind) | [
"def",
"drawChart",
"(",
"data",
",",
"kind",
",",
"filebase",
")",
":",
"data",
"[",
"\"Mean\"",
"]",
".",
"plot",
"(",
"kind",
"=",
"\"bar\"",
",",
"logy",
"=",
"True",
",",
"grid",
"=",
"True",
",",
"colormap",
"=",
"\"GnBu\"",
",",
"yerr",
"=",
"data",
"[",
"\"SD\"",
"]",
",",
"ecolor",
"=",
"\"black\"",
")",
"plt",
".",
"xlabel",
"(",
"\"OMP Constructs\"",
")",
"plt",
".",
"ylabel",
"(",
"statProperties",
"[",
"kind",
"]",
"[",
"0",
"]",
")",
"plt",
".",
"title",
"(",
"statProperties",
"[",
"kind",
"]",
"[",
"1",
"]",
")",
"plt",
".",
"tight_layout",
"(",
")",
"plt",
".",
"savefig",
"(",
"filebase",
"+",
"\"_\"",
"+",
"kind",
")"
] | https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/openmp/runtime/tools/summarizeStats.py#L162-L170 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Source/ThirdParty/CEF3/cef_source/tools/cefbuilds/cef_html_builder.py | python | cef_html_builder.generate | (self, json_builder) | return root_str | Generate HTML output based on the contents of |json_builder|. | Generate HTML output based on the contents of |json_builder|. | [
"Generate",
"HTML",
"output",
"based",
"on",
"the",
"contents",
"of",
"|json_builder|",
"."
] | def generate(self, json_builder):
""" Generate HTML output based on the contents of |json_builder|. """
if not isinstance(json_builder, cef_json_builder):
raise Exception('Invalid argument')
# Substitution values are augmented at each nesting level.
subs = {
'year': '2016',
'branding': self._branding,
}
# Substitute variables.
root_str = self._replace_all(self._parts['root'], subs)
platform_link_strs = []
platform_strs = []
for platform in json_builder.get_platforms():
subs['platform'] = platform
subs['platform_name'] = self._get_platform_name(platform)
# Substitute variables.
platform_link_str = self._replace_all(self._parts['platform_link'], subs)
platform_str = self._replace_all(self._parts['platform'], subs)
version_strs = []
for version in json_builder.get_versions(platform):
subs['cef_version'] = version['cef_version']
subs['chromium_version'] = version['chromium_version']
subs['last_modified'] = self._get_date(version['files'][0]['last_modified'])
subs['cef_source_url'] = self._get_cef_source_url(version['cef_version'])
subs['chromium_source_url'] = self._get_chromium_source_url(version['chromium_version'])
# Substitute variables.
version_str = self._replace_all(self._parts['version'], subs)
file_strs = {}
for file in version['files']:
subs['last_modified'] = self._get_date(file['last_modified'])
subs['name'] = file['name']
subs['sha1'] = file['sha1']
subs['size'] = self._get_file_size(file['size'])
subs['type'] = file['type']
subs['type_name'] = self._get_type_name(file['type'])
subs['file_url'] = self._get_file_url(platform, version['cef_version'], file)
subs['sha1_url'] = self._get_sha1_url(platform, version['cef_version'], file)
subs['tooltip_text'] = self._get_tooltip_text(platform, version['cef_version'], file)
# Substitute variables.
file_str = self._replace_all(self._parts['file'], subs)
file_strs[file['type']] = file_str
if len(file_strs) > 0:
# Always output file types in the same order.
file_out = ''
type_order = ['standard', 'minimal', 'client', 'debug_symbols', 'release_symbols']
for type in type_order:
if type in file_strs:
file_out = file_out + file_strs[type]
# Insert files.
version_str = self._replace(version_str, self._section_key('file'), file_out)
version_strs.append(version_str)
if len(version_strs) > 0:
# Insert versions.
platform_str = self._replace(platform_str, self._section_key('version'), "".join(version_strs))
platform_strs.append(platform_str)
platform_link_strs.append(platform_link_str)
if len(platform_strs) > 0:
# Insert platforms.
root_str = self._replace(root_str, self._section_key('platform_link'), "".join(platform_link_strs))
root_str = self._replace(root_str, self._section_key('platform'), "".join(platform_strs))
return root_str | [
"def",
"generate",
"(",
"self",
",",
"json_builder",
")",
":",
"if",
"not",
"isinstance",
"(",
"json_builder",
",",
"cef_json_builder",
")",
":",
"raise",
"Exception",
"(",
"'Invalid argument'",
")",
"# Substitution values are augmented at each nesting level.",
"subs",
"=",
"{",
"'year'",
":",
"'2016'",
",",
"'branding'",
":",
"self",
".",
"_branding",
",",
"}",
"# Substitute variables.",
"root_str",
"=",
"self",
".",
"_replace_all",
"(",
"self",
".",
"_parts",
"[",
"'root'",
"]",
",",
"subs",
")",
"platform_link_strs",
"=",
"[",
"]",
"platform_strs",
"=",
"[",
"]",
"for",
"platform",
"in",
"json_builder",
".",
"get_platforms",
"(",
")",
":",
"subs",
"[",
"'platform'",
"]",
"=",
"platform",
"subs",
"[",
"'platform_name'",
"]",
"=",
"self",
".",
"_get_platform_name",
"(",
"platform",
")",
"# Substitute variables.",
"platform_link_str",
"=",
"self",
".",
"_replace_all",
"(",
"self",
".",
"_parts",
"[",
"'platform_link'",
"]",
",",
"subs",
")",
"platform_str",
"=",
"self",
".",
"_replace_all",
"(",
"self",
".",
"_parts",
"[",
"'platform'",
"]",
",",
"subs",
")",
"version_strs",
"=",
"[",
"]",
"for",
"version",
"in",
"json_builder",
".",
"get_versions",
"(",
"platform",
")",
":",
"subs",
"[",
"'cef_version'",
"]",
"=",
"version",
"[",
"'cef_version'",
"]",
"subs",
"[",
"'chromium_version'",
"]",
"=",
"version",
"[",
"'chromium_version'",
"]",
"subs",
"[",
"'last_modified'",
"]",
"=",
"self",
".",
"_get_date",
"(",
"version",
"[",
"'files'",
"]",
"[",
"0",
"]",
"[",
"'last_modified'",
"]",
")",
"subs",
"[",
"'cef_source_url'",
"]",
"=",
"self",
".",
"_get_cef_source_url",
"(",
"version",
"[",
"'cef_version'",
"]",
")",
"subs",
"[",
"'chromium_source_url'",
"]",
"=",
"self",
".",
"_get_chromium_source_url",
"(",
"version",
"[",
"'chromium_version'",
"]",
")",
"# Substitute variables.",
"version_str",
"=",
"self",
".",
"_replace_all",
"(",
"self",
".",
"_parts",
"[",
"'version'",
"]",
",",
"subs",
")",
"file_strs",
"=",
"{",
"}",
"for",
"file",
"in",
"version",
"[",
"'files'",
"]",
":",
"subs",
"[",
"'last_modified'",
"]",
"=",
"self",
".",
"_get_date",
"(",
"file",
"[",
"'last_modified'",
"]",
")",
"subs",
"[",
"'name'",
"]",
"=",
"file",
"[",
"'name'",
"]",
"subs",
"[",
"'sha1'",
"]",
"=",
"file",
"[",
"'sha1'",
"]",
"subs",
"[",
"'size'",
"]",
"=",
"self",
".",
"_get_file_size",
"(",
"file",
"[",
"'size'",
"]",
")",
"subs",
"[",
"'type'",
"]",
"=",
"file",
"[",
"'type'",
"]",
"subs",
"[",
"'type_name'",
"]",
"=",
"self",
".",
"_get_type_name",
"(",
"file",
"[",
"'type'",
"]",
")",
"subs",
"[",
"'file_url'",
"]",
"=",
"self",
".",
"_get_file_url",
"(",
"platform",
",",
"version",
"[",
"'cef_version'",
"]",
",",
"file",
")",
"subs",
"[",
"'sha1_url'",
"]",
"=",
"self",
".",
"_get_sha1_url",
"(",
"platform",
",",
"version",
"[",
"'cef_version'",
"]",
",",
"file",
")",
"subs",
"[",
"'tooltip_text'",
"]",
"=",
"self",
".",
"_get_tooltip_text",
"(",
"platform",
",",
"version",
"[",
"'cef_version'",
"]",
",",
"file",
")",
"# Substitute variables.",
"file_str",
"=",
"self",
".",
"_replace_all",
"(",
"self",
".",
"_parts",
"[",
"'file'",
"]",
",",
"subs",
")",
"file_strs",
"[",
"file",
"[",
"'type'",
"]",
"]",
"=",
"file_str",
"if",
"len",
"(",
"file_strs",
")",
">",
"0",
":",
"# Always output file types in the same order.",
"file_out",
"=",
"''",
"type_order",
"=",
"[",
"'standard'",
",",
"'minimal'",
",",
"'client'",
",",
"'debug_symbols'",
",",
"'release_symbols'",
"]",
"for",
"type",
"in",
"type_order",
":",
"if",
"type",
"in",
"file_strs",
":",
"file_out",
"=",
"file_out",
"+",
"file_strs",
"[",
"type",
"]",
"# Insert files.",
"version_str",
"=",
"self",
".",
"_replace",
"(",
"version_str",
",",
"self",
".",
"_section_key",
"(",
"'file'",
")",
",",
"file_out",
")",
"version_strs",
".",
"append",
"(",
"version_str",
")",
"if",
"len",
"(",
"version_strs",
")",
">",
"0",
":",
"# Insert versions.",
"platform_str",
"=",
"self",
".",
"_replace",
"(",
"platform_str",
",",
"self",
".",
"_section_key",
"(",
"'version'",
")",
",",
"\"\"",
".",
"join",
"(",
"version_strs",
")",
")",
"platform_strs",
".",
"append",
"(",
"platform_str",
")",
"platform_link_strs",
".",
"append",
"(",
"platform_link_str",
")",
"if",
"len",
"(",
"platform_strs",
")",
">",
"0",
":",
"# Insert platforms.",
"root_str",
"=",
"self",
".",
"_replace",
"(",
"root_str",
",",
"self",
".",
"_section_key",
"(",
"'platform_link'",
")",
",",
"\"\"",
".",
"join",
"(",
"platform_link_strs",
")",
")",
"root_str",
"=",
"self",
".",
"_replace",
"(",
"root_str",
",",
"self",
".",
"_section_key",
"(",
"'platform'",
")",
",",
"\"\"",
".",
"join",
"(",
"platform_strs",
")",
")",
"return",
"root_str"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/cef_source/tools/cefbuilds/cef_html_builder.py#L185-L259 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | Framework/PythonInterface/plugins/algorithms/LoadAndMerge.py | python | LoadAndMerge._load | (self, run, runnumber) | Loads the single run using the specific loader
@param run : the full file path
@param runnumber : the run number | Loads the single run using the specific loader | [
"Loads",
"the",
"single",
"run",
"using",
"the",
"specific",
"loader"
] | def _load(self, run, runnumber):
"""
Loads the single run using the specific loader
@param run : the full file path
@param runnumber : the run number
"""
self._progress.report('Loading '+runnumber)
alg = self._create_fresh_loader()
alg.setPropertyValue('Filename', run)
alg.setPropertyValue('OutputWorkspace', runnumber)
alg.execute() | [
"def",
"_load",
"(",
"self",
",",
"run",
",",
"runnumber",
")",
":",
"self",
".",
"_progress",
".",
"report",
"(",
"'Loading '",
"+",
"runnumber",
")",
"alg",
"=",
"self",
".",
"_create_fresh_loader",
"(",
")",
"alg",
".",
"setPropertyValue",
"(",
"'Filename'",
",",
"run",
")",
"alg",
".",
"setPropertyValue",
"(",
"'OutputWorkspace'",
",",
"runnumber",
")",
"alg",
".",
"execute",
"(",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/LoadAndMerge.py#L60-L70 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/aui.py | python | PyAuiTabArt.GetMeasuringFont | (*args, **kwargs) | return _aui.PyAuiTabArt_GetMeasuringFont(*args, **kwargs) | GetMeasuringFont(self) -> Font | GetMeasuringFont(self) -> Font | [
"GetMeasuringFont",
"(",
"self",
")",
"-",
">",
"Font"
] | def GetMeasuringFont(*args, **kwargs):
"""GetMeasuringFont(self) -> Font"""
return _aui.PyAuiTabArt_GetMeasuringFont(*args, **kwargs) | [
"def",
"GetMeasuringFont",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"PyAuiTabArt_GetMeasuringFont",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/aui.py#L2443-L2445 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/contrib/onnx/onnx2mx/_op_translations.py | python | sigmoid | (attrs, inputs, proto_obj) | return 'sigmoid', attrs, inputs | Computes elementwise sigmoid of the input array | Computes elementwise sigmoid of the input array | [
"Computes",
"elementwise",
"sigmoid",
"of",
"the",
"input",
"array"
] | def sigmoid(attrs, inputs, proto_obj):
"""Computes elementwise sigmoid of the input array"""
return 'sigmoid', attrs, inputs | [
"def",
"sigmoid",
"(",
"attrs",
",",
"inputs",
",",
"proto_obj",
")",
":",
"return",
"'sigmoid'",
",",
"attrs",
",",
"inputs"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/contrib/onnx/onnx2mx/_op_translations.py#L229-L231 |
|
kripken/BananaBread | 455191d2e289f6d67f22c9ec44477ff0814d9aa3 | tools/websockify/websockify/websocket.py | python | WebSocketServer.encode_hybi | (buf, opcode, base64=False) | return header + buf, len(header), 0 | Encode a HyBi style WebSocket frame.
Optional opcode:
0x0 - continuation
0x1 - text frame (base64 encode buf)
0x2 - binary frame (use raw buf)
0x8 - connection close
0x9 - ping
0xA - pong | Encode a HyBi style WebSocket frame.
Optional opcode:
0x0 - continuation
0x1 - text frame (base64 encode buf)
0x2 - binary frame (use raw buf)
0x8 - connection close
0x9 - ping
0xA - pong | [
"Encode",
"a",
"HyBi",
"style",
"WebSocket",
"frame",
".",
"Optional",
"opcode",
":",
"0x0",
"-",
"continuation",
"0x1",
"-",
"text",
"frame",
"(",
"base64",
"encode",
"buf",
")",
"0x2",
"-",
"binary",
"frame",
"(",
"use",
"raw",
"buf",
")",
"0x8",
"-",
"connection",
"close",
"0x9",
"-",
"ping",
"0xA",
"-",
"pong"
] | def encode_hybi(buf, opcode, base64=False):
""" Encode a HyBi style WebSocket frame.
Optional opcode:
0x0 - continuation
0x1 - text frame (base64 encode buf)
0x2 - binary frame (use raw buf)
0x8 - connection close
0x9 - ping
0xA - pong
"""
if base64:
buf = b64encode(buf)
b1 = 0x80 | (opcode & 0x0f) # FIN + opcode
payload_len = len(buf)
if payload_len <= 125:
header = pack('>BB', b1, payload_len)
elif payload_len > 125 and payload_len < 65536:
header = pack('>BBH', b1, 126, payload_len)
elif payload_len >= 65536:
header = pack('>BBQ', b1, 127, payload_len)
#print("Encoded: %s" % repr(header + buf))
return header + buf, len(header), 0 | [
"def",
"encode_hybi",
"(",
"buf",
",",
"opcode",
",",
"base64",
"=",
"False",
")",
":",
"if",
"base64",
":",
"buf",
"=",
"b64encode",
"(",
"buf",
")",
"b1",
"=",
"0x80",
"|",
"(",
"opcode",
"&",
"0x0f",
")",
"# FIN + opcode",
"payload_len",
"=",
"len",
"(",
"buf",
")",
"if",
"payload_len",
"<=",
"125",
":",
"header",
"=",
"pack",
"(",
"'>BB'",
",",
"b1",
",",
"payload_len",
")",
"elif",
"payload_len",
">",
"125",
"and",
"payload_len",
"<",
"65536",
":",
"header",
"=",
"pack",
"(",
"'>BBH'",
",",
"b1",
",",
"126",
",",
"payload_len",
")",
"elif",
"payload_len",
">=",
"65536",
":",
"header",
"=",
"pack",
"(",
"'>BBQ'",
",",
"b1",
",",
"127",
",",
"payload_len",
")",
"#print(\"Encoded: %s\" % repr(header + buf))",
"return",
"header",
"+",
"buf",
",",
"len",
"(",
"header",
")",
",",
"0"
] | https://github.com/kripken/BananaBread/blob/455191d2e289f6d67f22c9ec44477ff0814d9aa3/tools/websockify/websockify/websocket.py#L276-L300 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/distributions/util.py | python | gen_new_seed | (seed, salt) | return int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF | Generate a new seed, from the given seed and salt. | Generate a new seed, from the given seed and salt. | [
"Generate",
"a",
"new",
"seed",
"from",
"the",
"given",
"seed",
"and",
"salt",
"."
] | def gen_new_seed(seed, salt):
"""Generate a new seed, from the given seed and salt."""
if seed is None:
return None
string = (str(seed) + salt).encode("utf-8")
return int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF | [
"def",
"gen_new_seed",
"(",
"seed",
",",
"salt",
")",
":",
"if",
"seed",
"is",
"None",
":",
"return",
"None",
"string",
"=",
"(",
"str",
"(",
"seed",
")",
"+",
"salt",
")",
".",
"encode",
"(",
"\"utf-8\"",
")",
"return",
"int",
"(",
"hashlib",
".",
"md5",
"(",
"string",
")",
".",
"hexdigest",
"(",
")",
"[",
":",
"8",
"]",
",",
"16",
")",
"&",
"0x7FFFFFFF"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/distributions/util.py#L787-L792 |
|
hunterlew/mstar_deeplearning_project | 3761624dcbd7d44af257200542d13d1444dc634a | classification/caffe/python/caffe/io.py | python | Transformer.set_input_scale | (self, in_, scale) | Set the scale of preprocessed inputs s.t. the blob = blob * scale.
N.B. input_scale is done AFTER mean subtraction and other preprocessing
while raw_scale is done BEFORE.
Parameters
----------
in_ : which input to assign this scale factor
scale : scale coefficient | Set the scale of preprocessed inputs s.t. the blob = blob * scale.
N.B. input_scale is done AFTER mean subtraction and other preprocessing
while raw_scale is done BEFORE. | [
"Set",
"the",
"scale",
"of",
"preprocessed",
"inputs",
"s",
".",
"t",
".",
"the",
"blob",
"=",
"blob",
"*",
"scale",
".",
"N",
".",
"B",
".",
"input_scale",
"is",
"done",
"AFTER",
"mean",
"subtraction",
"and",
"other",
"preprocessing",
"while",
"raw_scale",
"is",
"done",
"BEFORE",
"."
] | def set_input_scale(self, in_, scale):
"""
Set the scale of preprocessed inputs s.t. the blob = blob * scale.
N.B. input_scale is done AFTER mean subtraction and other preprocessing
while raw_scale is done BEFORE.
Parameters
----------
in_ : which input to assign this scale factor
scale : scale coefficient
"""
self.__check_input(in_)
self.input_scale[in_] = scale | [
"def",
"set_input_scale",
"(",
"self",
",",
"in_",
",",
"scale",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"self",
".",
"input_scale",
"[",
"in_",
"]",
"=",
"scale"
] | https://github.com/hunterlew/mstar_deeplearning_project/blob/3761624dcbd7d44af257200542d13d1444dc634a/classification/caffe/python/caffe/io.py#L262-L274 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudformation/connection.py | python | CloudFormationConnection.set_stack_policy | (self, stack_name_or_id, stack_policy_body=None,
stack_policy_url=None) | return response['SetStackPolicyResponse'] | Sets a stack policy for a specified stack.
:type stack_name_or_id: string
:param stack_name_or_id: The name or stack ID that you want to
associate a policy with.
:type stack_policy_body: string
:param stack_policy_body: Structure containing the stack policy body.
(For more information, go to ` Prevent Updates to Stack Resources`_
in the AWS CloudFormation User Guide.)
You must pass `StackPolicyBody` or `StackPolicyURL`. If both are
passed, only `StackPolicyBody` is used.
:type stack_policy_url: string
:param stack_policy_url: Location of a file containing the stack
policy. The URL must point to a policy (max size: 16KB) located in
an S3 bucket in the same region as the stack. You must pass
`StackPolicyBody` or `StackPolicyURL`. If both are passed, only
`StackPolicyBody` is used. | Sets a stack policy for a specified stack. | [
"Sets",
"a",
"stack",
"policy",
"for",
"a",
"specified",
"stack",
"."
] | def set_stack_policy(self, stack_name_or_id, stack_policy_body=None,
stack_policy_url=None):
"""
Sets a stack policy for a specified stack.
:type stack_name_or_id: string
:param stack_name_or_id: The name or stack ID that you want to
associate a policy with.
:type stack_policy_body: string
:param stack_policy_body: Structure containing the stack policy body.
(For more information, go to ` Prevent Updates to Stack Resources`_
in the AWS CloudFormation User Guide.)
You must pass `StackPolicyBody` or `StackPolicyURL`. If both are
passed, only `StackPolicyBody` is used.
:type stack_policy_url: string
:param stack_policy_url: Location of a file containing the stack
policy. The URL must point to a policy (max size: 16KB) located in
an S3 bucket in the same region as the stack. You must pass
`StackPolicyBody` or `StackPolicyURL`. If both are passed, only
`StackPolicyBody` is used.
"""
params = {'ContentType': "JSON", 'StackName': stack_name_or_id, }
if stack_policy_body is not None:
params['StackPolicyBody'] = stack_policy_body
if stack_policy_url is not None:
params['StackPolicyURL'] = stack_policy_url
response = self._do_request('SetStackPolicy', params, '/', 'POST')
return response['SetStackPolicyResponse'] | [
"def",
"set_stack_policy",
"(",
"self",
",",
"stack_name_or_id",
",",
"stack_policy_body",
"=",
"None",
",",
"stack_policy_url",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'ContentType'",
":",
"\"JSON\"",
",",
"'StackName'",
":",
"stack_name_or_id",
",",
"}",
"if",
"stack_policy_body",
"is",
"not",
"None",
":",
"params",
"[",
"'StackPolicyBody'",
"]",
"=",
"stack_policy_body",
"if",
"stack_policy_url",
"is",
"not",
"None",
":",
"params",
"[",
"'StackPolicyURL'",
"]",
"=",
"stack_policy_url",
"response",
"=",
"self",
".",
"_do_request",
"(",
"'SetStackPolicy'",
",",
"params",
",",
"'/'",
",",
"'POST'",
")",
"return",
"response",
"[",
"'SetStackPolicyResponse'",
"]"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudformation/connection.py#L891-L922 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/python/ops/variables.py | python | Variable.ref | (self) | return self._variable | Returns a reference to this variable.
You usually do not need to call this method as all ops that need a reference
to the variable call it automatically.
Returns is a `Tensor` which holds a reference to the variable. You can
assign a new value to the variable by passing the tensor to an assign op.
See [`value()`](#Variable.value) if you want to get the value of the
variable.
Returns:
A `Tensor` that is a reference to the variable. | Returns a reference to this variable. | [
"Returns",
"a",
"reference",
"to",
"this",
"variable",
"."
] | def ref(self):
"""Returns a reference to this variable.
You usually do not need to call this method as all ops that need a reference
to the variable call it automatically.
Returns is a `Tensor` which holds a reference to the variable. You can
assign a new value to the variable by passing the tensor to an assign op.
See [`value()`](#Variable.value) if you want to get the value of the
variable.
Returns:
A `Tensor` that is a reference to the variable.
"""
return self._variable | [
"def",
"ref",
"(",
"self",
")",
":",
"return",
"self",
".",
"_variable"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/variables.py#L395-L409 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py3/pandas/core/indexes/base.py | python | Index.join | (
self,
other,
how: str_t = "left",
level=None,
return_indexers: bool = False,
sort: bool = False,
) | return join_index, lindexer, rindexer | Compute join_index and indexers to conform data
structures to the new index.
Parameters
----------
other : Index
how : {'left', 'right', 'inner', 'outer'}
level : int or level name, default None
return_indexers : bool, default False
sort : bool, default False
Sort the join keys lexicographically in the result Index. If False,
the order of the join keys depends on the join type (how keyword).
Returns
-------
join_index, (left_indexer, right_indexer) | Compute join_index and indexers to conform data
structures to the new index. | [
"Compute",
"join_index",
"and",
"indexers",
"to",
"conform",
"data",
"structures",
"to",
"the",
"new",
"index",
"."
] | def join(
self,
other,
how: str_t = "left",
level=None,
return_indexers: bool = False,
sort: bool = False,
):
"""
Compute join_index and indexers to conform data
structures to the new index.
Parameters
----------
other : Index
how : {'left', 'right', 'inner', 'outer'}
level : int or level name, default None
return_indexers : bool, default False
sort : bool, default False
Sort the join keys lexicographically in the result Index. If False,
the order of the join keys depends on the join type (how keyword).
Returns
-------
join_index, (left_indexer, right_indexer)
"""
other = ensure_index(other)
self_is_mi = isinstance(self, ABCMultiIndex)
other_is_mi = isinstance(other, ABCMultiIndex)
lindexer: np.ndarray | None
rindexer: np.ndarray | None
# try to figure out the join level
# GH3662
if level is None and (self_is_mi or other_is_mi):
# have the same levels/names so a simple join
if self.names == other.names:
pass
else:
return self._join_multi(other, how=how)
# join on the level
if level is not None and (self_is_mi or other_is_mi):
return self._join_level(other, level, how=how)
if len(other) == 0 and how in ("left", "outer"):
join_index = self._view()
rindexer = np.repeat(np.intp(-1), len(join_index))
return join_index, None, rindexer
if len(self) == 0 and how in ("right", "outer"):
join_index = other._view()
lindexer = np.repeat(np.intp(-1), len(join_index))
return join_index, lindexer, None
if self._join_precedence < other._join_precedence:
how = {"right": "left", "left": "right"}.get(how, how)
join_index, lidx, ridx = other.join(
self, how=how, level=level, return_indexers=True
)
lidx, ridx = ridx, lidx
return join_index, lidx, ridx
if not is_dtype_equal(self.dtype, other.dtype):
this = self.astype("O")
other = other.astype("O")
return this.join(other, how=how, return_indexers=True)
_validate_join_method(how)
if not self.is_unique and not other.is_unique:
return self._join_non_unique(other, how=how)
elif not self.is_unique or not other.is_unique:
if self.is_monotonic and other.is_monotonic:
return self._join_monotonic(other, how=how)
else:
return self._join_non_unique(other, how=how)
elif (
self.is_monotonic
and other.is_monotonic
and (
not isinstance(self, ABCMultiIndex)
or not any(is_categorical_dtype(dtype) for dtype in self.dtypes)
)
):
# Categorical is monotonic if data are ordered as categories, but join can
# not handle this in case of not lexicographically monotonic GH#38502
try:
return self._join_monotonic(other, how=how)
except TypeError:
pass
if how == "left":
join_index = self
elif how == "right":
join_index = other
elif how == "inner":
# TODO: sort=False here for backwards compat. It may
# be better to use the sort parameter passed into join
join_index = self.intersection(other, sort=False)
elif how == "outer":
# TODO: sort=True here for backwards compat. It may
# be better to use the sort parameter passed into join
join_index = self.union(other)
if sort:
join_index = join_index.sort_values()
if join_index is self:
lindexer = None
else:
lindexer = self.get_indexer(join_index)
if join_index is other:
rindexer = None
else:
rindexer = other.get_indexer(join_index)
return join_index, lindexer, rindexer | [
"def",
"join",
"(",
"self",
",",
"other",
",",
"how",
":",
"str_t",
"=",
"\"left\"",
",",
"level",
"=",
"None",
",",
"return_indexers",
":",
"bool",
"=",
"False",
",",
"sort",
":",
"bool",
"=",
"False",
",",
")",
":",
"other",
"=",
"ensure_index",
"(",
"other",
")",
"self_is_mi",
"=",
"isinstance",
"(",
"self",
",",
"ABCMultiIndex",
")",
"other_is_mi",
"=",
"isinstance",
"(",
"other",
",",
"ABCMultiIndex",
")",
"lindexer",
":",
"np",
".",
"ndarray",
"|",
"None",
"rindexer",
":",
"np",
".",
"ndarray",
"|",
"None",
"# try to figure out the join level",
"# GH3662",
"if",
"level",
"is",
"None",
"and",
"(",
"self_is_mi",
"or",
"other_is_mi",
")",
":",
"# have the same levels/names so a simple join",
"if",
"self",
".",
"names",
"==",
"other",
".",
"names",
":",
"pass",
"else",
":",
"return",
"self",
".",
"_join_multi",
"(",
"other",
",",
"how",
"=",
"how",
")",
"# join on the level",
"if",
"level",
"is",
"not",
"None",
"and",
"(",
"self_is_mi",
"or",
"other_is_mi",
")",
":",
"return",
"self",
".",
"_join_level",
"(",
"other",
",",
"level",
",",
"how",
"=",
"how",
")",
"if",
"len",
"(",
"other",
")",
"==",
"0",
"and",
"how",
"in",
"(",
"\"left\"",
",",
"\"outer\"",
")",
":",
"join_index",
"=",
"self",
".",
"_view",
"(",
")",
"rindexer",
"=",
"np",
".",
"repeat",
"(",
"np",
".",
"intp",
"(",
"-",
"1",
")",
",",
"len",
"(",
"join_index",
")",
")",
"return",
"join_index",
",",
"None",
",",
"rindexer",
"if",
"len",
"(",
"self",
")",
"==",
"0",
"and",
"how",
"in",
"(",
"\"right\"",
",",
"\"outer\"",
")",
":",
"join_index",
"=",
"other",
".",
"_view",
"(",
")",
"lindexer",
"=",
"np",
".",
"repeat",
"(",
"np",
".",
"intp",
"(",
"-",
"1",
")",
",",
"len",
"(",
"join_index",
")",
")",
"return",
"join_index",
",",
"lindexer",
",",
"None",
"if",
"self",
".",
"_join_precedence",
"<",
"other",
".",
"_join_precedence",
":",
"how",
"=",
"{",
"\"right\"",
":",
"\"left\"",
",",
"\"left\"",
":",
"\"right\"",
"}",
".",
"get",
"(",
"how",
",",
"how",
")",
"join_index",
",",
"lidx",
",",
"ridx",
"=",
"other",
".",
"join",
"(",
"self",
",",
"how",
"=",
"how",
",",
"level",
"=",
"level",
",",
"return_indexers",
"=",
"True",
")",
"lidx",
",",
"ridx",
"=",
"ridx",
",",
"lidx",
"return",
"join_index",
",",
"lidx",
",",
"ridx",
"if",
"not",
"is_dtype_equal",
"(",
"self",
".",
"dtype",
",",
"other",
".",
"dtype",
")",
":",
"this",
"=",
"self",
".",
"astype",
"(",
"\"O\"",
")",
"other",
"=",
"other",
".",
"astype",
"(",
"\"O\"",
")",
"return",
"this",
".",
"join",
"(",
"other",
",",
"how",
"=",
"how",
",",
"return_indexers",
"=",
"True",
")",
"_validate_join_method",
"(",
"how",
")",
"if",
"not",
"self",
".",
"is_unique",
"and",
"not",
"other",
".",
"is_unique",
":",
"return",
"self",
".",
"_join_non_unique",
"(",
"other",
",",
"how",
"=",
"how",
")",
"elif",
"not",
"self",
".",
"is_unique",
"or",
"not",
"other",
".",
"is_unique",
":",
"if",
"self",
".",
"is_monotonic",
"and",
"other",
".",
"is_monotonic",
":",
"return",
"self",
".",
"_join_monotonic",
"(",
"other",
",",
"how",
"=",
"how",
")",
"else",
":",
"return",
"self",
".",
"_join_non_unique",
"(",
"other",
",",
"how",
"=",
"how",
")",
"elif",
"(",
"self",
".",
"is_monotonic",
"and",
"other",
".",
"is_monotonic",
"and",
"(",
"not",
"isinstance",
"(",
"self",
",",
"ABCMultiIndex",
")",
"or",
"not",
"any",
"(",
"is_categorical_dtype",
"(",
"dtype",
")",
"for",
"dtype",
"in",
"self",
".",
"dtypes",
")",
")",
")",
":",
"# Categorical is monotonic if data are ordered as categories, but join can",
"# not handle this in case of not lexicographically monotonic GH#38502",
"try",
":",
"return",
"self",
".",
"_join_monotonic",
"(",
"other",
",",
"how",
"=",
"how",
")",
"except",
"TypeError",
":",
"pass",
"if",
"how",
"==",
"\"left\"",
":",
"join_index",
"=",
"self",
"elif",
"how",
"==",
"\"right\"",
":",
"join_index",
"=",
"other",
"elif",
"how",
"==",
"\"inner\"",
":",
"# TODO: sort=False here for backwards compat. It may",
"# be better to use the sort parameter passed into join",
"join_index",
"=",
"self",
".",
"intersection",
"(",
"other",
",",
"sort",
"=",
"False",
")",
"elif",
"how",
"==",
"\"outer\"",
":",
"# TODO: sort=True here for backwards compat. It may",
"# be better to use the sort parameter passed into join",
"join_index",
"=",
"self",
".",
"union",
"(",
"other",
")",
"if",
"sort",
":",
"join_index",
"=",
"join_index",
".",
"sort_values",
"(",
")",
"if",
"join_index",
"is",
"self",
":",
"lindexer",
"=",
"None",
"else",
":",
"lindexer",
"=",
"self",
".",
"get_indexer",
"(",
"join_index",
")",
"if",
"join_index",
"is",
"other",
":",
"rindexer",
"=",
"None",
"else",
":",
"rindexer",
"=",
"other",
".",
"get_indexer",
"(",
"join_index",
")",
"return",
"join_index",
",",
"lindexer",
",",
"rindexer"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/indexes/base.py#L3919-L4037 |
|
NASA-Tensegrity-Robotics-Toolkit/NTRTsim | 0443cbd542e12e23c04adf79ea0d8d003c428baa | scripts/learning/src/interfaces/ntrt_job.py | python | NTRTJob.cleanup | (self) | You can override this if you want and handle cleaning up any output files from this job. Not really necessary
though, I can take care of that myself later. | You can override this if you want and handle cleaning up any output files from this job. Not really necessary
though, I can take care of that myself later. | [
"You",
"can",
"override",
"this",
"if",
"you",
"want",
"and",
"handle",
"cleaning",
"up",
"any",
"output",
"files",
"from",
"this",
"job",
".",
"Not",
"really",
"necessary",
"though",
"I",
"can",
"take",
"care",
"of",
"that",
"myself",
"later",
"."
] | def cleanup(self):
"""
You can override this if you want and handle cleaning up any output files from this job. Not really necessary
though, I can take care of that myself later.
"""
pass | [
"def",
"cleanup",
"(",
"self",
")",
":",
"pass"
] | https://github.com/NASA-Tensegrity-Robotics-Toolkit/NTRTsim/blob/0443cbd542e12e23c04adf79ea0d8d003c428baa/scripts/learning/src/interfaces/ntrt_job.py#L35-L40 |
||
adobe/chromium | cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7 | base/android/jni_generator/jni_generator.py | python | MangleCalledByNatives | (called_by_natives) | return called_by_natives | Mangles all the overloads from the call_by_natives list. | Mangles all the overloads from the call_by_natives list. | [
"Mangles",
"all",
"the",
"overloads",
"from",
"the",
"call_by_natives",
"list",
"."
] | def MangleCalledByNatives(called_by_natives):
"""Mangles all the overloads from the call_by_natives list."""
method_counts = collections.defaultdict(
lambda: collections.defaultdict(lambda: 0))
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
name = called_by_native.name
method_counts[java_class_name][name] += 1
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
method_name = called_by_native.name
method_id_var_name = method_name
if method_counts[java_class_name][method_name] > 1:
jni_signature = JniSignature(called_by_native.params,
called_by_native.return_type,
False)
method_id_var_name = GetMangledMethodName(method_name, jni_signature)
called_by_native.method_id_var_name = method_id_var_name
return called_by_natives | [
"def",
"MangleCalledByNatives",
"(",
"called_by_natives",
")",
":",
"method_counts",
"=",
"collections",
".",
"defaultdict",
"(",
"lambda",
":",
"collections",
".",
"defaultdict",
"(",
"lambda",
":",
"0",
")",
")",
"for",
"called_by_native",
"in",
"called_by_natives",
":",
"java_class_name",
"=",
"called_by_native",
".",
"java_class_name",
"name",
"=",
"called_by_native",
".",
"name",
"method_counts",
"[",
"java_class_name",
"]",
"[",
"name",
"]",
"+=",
"1",
"for",
"called_by_native",
"in",
"called_by_natives",
":",
"java_class_name",
"=",
"called_by_native",
".",
"java_class_name",
"method_name",
"=",
"called_by_native",
".",
"name",
"method_id_var_name",
"=",
"method_name",
"if",
"method_counts",
"[",
"java_class_name",
"]",
"[",
"method_name",
"]",
">",
"1",
":",
"jni_signature",
"=",
"JniSignature",
"(",
"called_by_native",
".",
"params",
",",
"called_by_native",
".",
"return_type",
",",
"False",
")",
"method_id_var_name",
"=",
"GetMangledMethodName",
"(",
"method_name",
",",
"jni_signature",
")",
"called_by_native",
".",
"method_id_var_name",
"=",
"method_id_var_name",
"return",
"called_by_natives"
] | https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/base/android/jni_generator/jni_generator.py#L314-L332 |
|
klzgrad/naiveproxy | ed2c513637c77b18721fe428d7ed395b4d284c83 | src/build/lacros/lacros_resource_sizes.py | python | _run_resource_sizes | (args) | Main flow to extract and output size data. | Main flow to extract and output size data. | [
"Main",
"flow",
"to",
"extract",
"and",
"output",
"size",
"data",
"."
] | def _run_resource_sizes(args):
"""Main flow to extract and output size data."""
chartjson = _BASE_CHART.copy()
report_func = perf_tests_results_helper.ReportPerfResult
total_sizes = collections.Counter()
def report_sizes(sizes, title, track_stripped, track_compressed):
report_func(chart_data=chartjson,
graph_title=title,
trace_title='size',
value=sizes[_KEY_RAW],
units='bytes')
if track_stripped:
report_func(chart_data=chartjson,
graph_title=title + ' (Stripped)',
trace_title='size',
value=sizes[_KEY_STRIPPED],
units='bytes')
if track_compressed:
report_func(chart_data=chartjson,
graph_title=title + ' (Gzipped)',
trace_title='size',
value=sizes[_KEY_GZIPPED],
units='bytes')
if track_stripped and track_compressed:
report_func(chart_data=chartjson,
graph_title=title + ' (Stripped, Gzipped)',
trace_title='size',
value=sizes[_KEY_STRIPPED_GZIPPED],
units='bytes')
for g in _TRACKED_GROUPS:
sizes = sum(
map(_get_catagorized_filesizes, _visit_paths(args.out_dir, g.paths)),
collections.Counter())
report_sizes(sizes, g.title, g.track_stripped, g.track_compressed)
# Total compressed size is summed over individual compressed sizes, instead
# of concatanating first, then compress everything. This is done for
# simplicity. It also gives a conservative size estimate (assuming file
# metadata and overheads are negligible).
total_sizes += sizes
report_sizes(total_sizes, 'Total', True, True)
_dump_chart_json(args.output_dir, chartjson) | [
"def",
"_run_resource_sizes",
"(",
"args",
")",
":",
"chartjson",
"=",
"_BASE_CHART",
".",
"copy",
"(",
")",
"report_func",
"=",
"perf_tests_results_helper",
".",
"ReportPerfResult",
"total_sizes",
"=",
"collections",
".",
"Counter",
"(",
")",
"def",
"report_sizes",
"(",
"sizes",
",",
"title",
",",
"track_stripped",
",",
"track_compressed",
")",
":",
"report_func",
"(",
"chart_data",
"=",
"chartjson",
",",
"graph_title",
"=",
"title",
",",
"trace_title",
"=",
"'size'",
",",
"value",
"=",
"sizes",
"[",
"_KEY_RAW",
"]",
",",
"units",
"=",
"'bytes'",
")",
"if",
"track_stripped",
":",
"report_func",
"(",
"chart_data",
"=",
"chartjson",
",",
"graph_title",
"=",
"title",
"+",
"' (Stripped)'",
",",
"trace_title",
"=",
"'size'",
",",
"value",
"=",
"sizes",
"[",
"_KEY_STRIPPED",
"]",
",",
"units",
"=",
"'bytes'",
")",
"if",
"track_compressed",
":",
"report_func",
"(",
"chart_data",
"=",
"chartjson",
",",
"graph_title",
"=",
"title",
"+",
"' (Gzipped)'",
",",
"trace_title",
"=",
"'size'",
",",
"value",
"=",
"sizes",
"[",
"_KEY_GZIPPED",
"]",
",",
"units",
"=",
"'bytes'",
")",
"if",
"track_stripped",
"and",
"track_compressed",
":",
"report_func",
"(",
"chart_data",
"=",
"chartjson",
",",
"graph_title",
"=",
"title",
"+",
"' (Stripped, Gzipped)'",
",",
"trace_title",
"=",
"'size'",
",",
"value",
"=",
"sizes",
"[",
"_KEY_STRIPPED_GZIPPED",
"]",
",",
"units",
"=",
"'bytes'",
")",
"for",
"g",
"in",
"_TRACKED_GROUPS",
":",
"sizes",
"=",
"sum",
"(",
"map",
"(",
"_get_catagorized_filesizes",
",",
"_visit_paths",
"(",
"args",
".",
"out_dir",
",",
"g",
".",
"paths",
")",
")",
",",
"collections",
".",
"Counter",
"(",
")",
")",
"report_sizes",
"(",
"sizes",
",",
"g",
".",
"title",
",",
"g",
".",
"track_stripped",
",",
"g",
".",
"track_compressed",
")",
"# Total compressed size is summed over individual compressed sizes, instead",
"# of concatanating first, then compress everything. This is done for",
"# simplicity. It also gives a conservative size estimate (assuming file",
"# metadata and overheads are negligible).",
"total_sizes",
"+=",
"sizes",
"report_sizes",
"(",
"total_sizes",
",",
"'Total'",
",",
"True",
",",
"True",
")",
"_dump_chart_json",
"(",
"args",
".",
"output_dir",
",",
"chartjson",
")"
] | https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/lacros/lacros_resource_sizes.py#L247-L295 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/optimize/_tstutils.py | python | f1 | (x) | return x * (x - 1.) | r"""f1 is a quadratic with roots at 0 and 1 | r"""f1 is a quadratic with roots at 0 and 1 | [
"r",
"f1",
"is",
"a",
"quadratic",
"with",
"roots",
"at",
"0",
"and",
"1"
] | def f1(x):
r"""f1 is a quadratic with roots at 0 and 1"""
return x * (x - 1.) | [
"def",
"f1",
"(",
"x",
")",
":",
"return",
"x",
"*",
"(",
"x",
"-",
"1.",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/optimize/_tstutils.py#L63-L65 |
|
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/robotsim.py | python | Appearance.setSilhouette | (self, radius: float, r: float=0, g: float=0, b: float=0, a: float=1) | return _robotsim.Appearance_setSilhouette(self, radius, r, g, b, a) | r"""
For meshes sets a silhouette radius and color. Set the radius to 0 to disable
silhouette drawing.
Args:
radius (float)
r (float, optional): default value 0
g (float, optional): default value 0
b (float, optional): default value 0
a (float, optional): default value 1 | r"""
For meshes sets a silhouette radius and color. Set the radius to 0 to disable
silhouette drawing. | [
"r",
"For",
"meshes",
"sets",
"a",
"silhouette",
"radius",
"and",
"color",
".",
"Set",
"the",
"radius",
"to",
"0",
"to",
"disable",
"silhouette",
"drawing",
"."
] | def setSilhouette(self, radius: float, r: float=0, g: float=0, b: float=0, a: float=1) ->None:
r"""
For meshes sets a silhouette radius and color. Set the radius to 0 to disable
silhouette drawing.
Args:
radius (float)
r (float, optional): default value 0
g (float, optional): default value 0
b (float, optional): default value 0
a (float, optional): default value 1
"""
return _robotsim.Appearance_setSilhouette(self, radius, r, g, b, a) | [
"def",
"setSilhouette",
"(",
"self",
",",
"radius",
":",
"float",
",",
"r",
":",
"float",
"=",
"0",
",",
"g",
":",
"float",
"=",
"0",
",",
"b",
":",
"float",
"=",
"0",
",",
"a",
":",
"float",
"=",
"1",
")",
"->",
"None",
":",
"return",
"_robotsim",
".",
"Appearance_setSilhouette",
"(",
"self",
",",
"radius",
",",
"r",
",",
"g",
",",
"b",
",",
"a",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/robotsim.py#L3087-L3099 |
|
facebook/mysql-5.6 | 65a650660ec7b4d627d1b738f397252ff4706207 | arcanist/lint/cpp_linter/cpplint.py | python | _SetFilters | (filters) | Sets the module's error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "whitespace/indent").
Each filter should start with + or -; else we die. | Sets the module's error-message filters. | [
"Sets",
"the",
"module",
"s",
"error",
"-",
"message",
"filters",
"."
] | def _SetFilters(filters):
"""Sets the module's error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "whitespace/indent").
Each filter should start with + or -; else we die.
"""
_cpplint_state.SetFilters(filters) | [
"def",
"_SetFilters",
"(",
"filters",
")",
":",
"_cpplint_state",
".",
"SetFilters",
"(",
"filters",
")"
] | https://github.com/facebook/mysql-5.6/blob/65a650660ec7b4d627d1b738f397252ff4706207/arcanist/lint/cpp_linter/cpplint.py#L791-L801 |
||
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/learn/python/learn/learn_io/dask_io.py | python | extract_dask_data | (data) | Extract data from dask.Series or dask.DataFrame for predictors. | Extract data from dask.Series or dask.DataFrame for predictors. | [
"Extract",
"data",
"from",
"dask",
".",
"Series",
"or",
"dask",
".",
"DataFrame",
"for",
"predictors",
"."
] | def extract_dask_data(data):
"""Extract data from dask.Series or dask.DataFrame for predictors."""
if isinstance(data, allowed_classes):
return _construct_dask_df_with_divisions(data)
else:
return data | [
"def",
"extract_dask_data",
"(",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"allowed_classes",
")",
":",
"return",
"_construct_dask_df_with_divisions",
"(",
"data",
")",
"else",
":",
"return",
"data"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/learn/python/learn/learn_io/dask_io.py#L63-L68 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/html2.py | python | WebView.CanUndo | (*args, **kwargs) | return _html2.WebView_CanUndo(*args, **kwargs) | CanUndo(self) -> bool | CanUndo(self) -> bool | [
"CanUndo",
"(",
"self",
")",
"-",
">",
"bool"
] | def CanUndo(*args, **kwargs):
"""CanUndo(self) -> bool"""
return _html2.WebView_CanUndo(*args, **kwargs) | [
"def",
"CanUndo",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_html2",
".",
"WebView_CanUndo",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/html2.py#L294-L296 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/util/tf_decorator.py | python | make_decorator | (target,
decorator_func,
decorator_name=None,
decorator_doc='',
decorator_argspec=None) | return decorator_func | Make a decorator from a wrapper and a target.
Args:
target: The final callable to be wrapped.
decorator_func: The wrapper function.
decorator_name: The name of the decorator. If `None`, the name of the
function calling make_decorator.
decorator_doc: Documentation specific to this application of
`decorator_func` to `target`.
decorator_argspec: The new callable signature of this decorator.
Returns:
The `decorator_func` argument with new metadata attached. | Make a decorator from a wrapper and a target. | [
"Make",
"a",
"decorator",
"from",
"a",
"wrapper",
"and",
"a",
"target",
"."
] | def make_decorator(target,
decorator_func,
decorator_name=None,
decorator_doc='',
decorator_argspec=None):
"""Make a decorator from a wrapper and a target.
Args:
target: The final callable to be wrapped.
decorator_func: The wrapper function.
decorator_name: The name of the decorator. If `None`, the name of the
function calling make_decorator.
decorator_doc: Documentation specific to this application of
`decorator_func` to `target`.
decorator_argspec: The new callable signature of this decorator.
Returns:
The `decorator_func` argument with new metadata attached.
"""
if decorator_name is None:
frame = _traceback.extract_stack(limit=2)[0]
# frame name is tuple[2] in python2, and object.name in python3
decorator_name = getattr(frame, 'name', frame[2]) # Caller's name
decorator = TFDecorator(decorator_name, target, decorator_doc,
decorator_argspec)
setattr(decorator_func, '_tf_decorator', decorator)
decorator_func.__name__ = target.__name__
decorator_func.__module__ = target.__module__
decorator_func.__doc__ = decorator.__doc__
decorator_func.__wrapped__ = target
return decorator_func | [
"def",
"make_decorator",
"(",
"target",
",",
"decorator_func",
",",
"decorator_name",
"=",
"None",
",",
"decorator_doc",
"=",
"''",
",",
"decorator_argspec",
"=",
"None",
")",
":",
"if",
"decorator_name",
"is",
"None",
":",
"frame",
"=",
"_traceback",
".",
"extract_stack",
"(",
"limit",
"=",
"2",
")",
"[",
"0",
"]",
"# frame name is tuple[2] in python2, and object.name in python3",
"decorator_name",
"=",
"getattr",
"(",
"frame",
",",
"'name'",
",",
"frame",
"[",
"2",
"]",
")",
"# Caller's name",
"decorator",
"=",
"TFDecorator",
"(",
"decorator_name",
",",
"target",
",",
"decorator_doc",
",",
"decorator_argspec",
")",
"setattr",
"(",
"decorator_func",
",",
"'_tf_decorator'",
",",
"decorator",
")",
"decorator_func",
".",
"__name__",
"=",
"target",
".",
"__name__",
"decorator_func",
".",
"__module__",
"=",
"target",
".",
"__module__",
"decorator_func",
".",
"__doc__",
"=",
"decorator",
".",
"__doc__",
"decorator_func",
".",
"__wrapped__",
"=",
"target",
"return",
"decorator_func"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/util/tf_decorator.py#L66-L96 |
|
giuspen/cherrytree | 84712f206478fcf9acf30174009ad28c648c6344 | pygtk2/modules/core.py | python | CherryTree.replace_in_all_nodes | (self, *args) | Replace the pattern in all the Tree Nodes | Replace the pattern in all the Tree Nodes | [
"Replace",
"the",
"pattern",
"in",
"all",
"the",
"Tree",
"Nodes"
] | def replace_in_all_nodes(self, *args):
"""Replace the pattern in all the Tree Nodes"""
if not self.is_tree_not_empty_or_error(): return
self.find_handler.replace_in_all_nodes(None) | [
"def",
"replace_in_all_nodes",
"(",
"self",
",",
"*",
"args",
")",
":",
"if",
"not",
"self",
".",
"is_tree_not_empty_or_error",
"(",
")",
":",
"return",
"self",
".",
"find_handler",
".",
"replace_in_all_nodes",
"(",
"None",
")"
] | https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/core.py#L3362-L3365 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/utils/misc.py | python | ask_password | (message) | return getpass.getpass(message) | Ask for a password interactively. | Ask for a password interactively. | [
"Ask",
"for",
"a",
"password",
"interactively",
"."
] | def ask_password(message):
# type: (str) -> str
"""Ask for a password interactively."""
_check_no_input(message)
return getpass.getpass(message) | [
"def",
"ask_password",
"(",
"message",
")",
":",
"# type: (str) -> str",
"_check_no_input",
"(",
"message",
")",
"return",
"getpass",
".",
"getpass",
"(",
"message",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/utils/misc.py#L240-L244 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AWSPythonSDK/1.5.8/docutils/parsers/rst/states.py | python | Body.check_attribution | (self, indented, attribution_start) | return i, (indent or 0) | Check attribution shape.
Return the index past the end of the attribution, and the indent. | Check attribution shape.
Return the index past the end of the attribution, and the indent. | [
"Check",
"attribution",
"shape",
".",
"Return",
"the",
"index",
"past",
"the",
"end",
"of",
"the",
"attribution",
"and",
"the",
"indent",
"."
] | def check_attribution(self, indented, attribution_start):
"""
Check attribution shape.
Return the index past the end of the attribution, and the indent.
"""
indent = None
i = attribution_start + 1
for i in range(attribution_start + 1, len(indented)):
line = indented[i].rstrip()
if not line:
break
if indent is None:
indent = len(line) - len(line.lstrip())
elif len(line) - len(line.lstrip()) != indent:
return None, None # bad shape; not an attribution
else:
# return index of line after last attribution line:
i += 1
return i, (indent or 0) | [
"def",
"check_attribution",
"(",
"self",
",",
"indented",
",",
"attribution_start",
")",
":",
"indent",
"=",
"None",
"i",
"=",
"attribution_start",
"+",
"1",
"for",
"i",
"in",
"range",
"(",
"attribution_start",
"+",
"1",
",",
"len",
"(",
"indented",
")",
")",
":",
"line",
"=",
"indented",
"[",
"i",
"]",
".",
"rstrip",
"(",
")",
"if",
"not",
"line",
":",
"break",
"if",
"indent",
"is",
"None",
":",
"indent",
"=",
"len",
"(",
"line",
")",
"-",
"len",
"(",
"line",
".",
"lstrip",
"(",
")",
")",
"elif",
"len",
"(",
"line",
")",
"-",
"len",
"(",
"line",
".",
"lstrip",
"(",
")",
")",
"!=",
"indent",
":",
"return",
"None",
",",
"None",
"# bad shape; not an attribution",
"else",
":",
"# return index of line after last attribution line:",
"i",
"+=",
"1",
"return",
"i",
",",
"(",
"indent",
"or",
"0",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/parsers/rst/states.py#L1217-L1235 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/prompt-toolkit/py3/prompt_toolkit/key_binding/bindings/named_commands.py | python | downcase_word | (event: E) | Lowercase the current (or following) word. | Lowercase the current (or following) word. | [
"Lowercase",
"the",
"current",
"(",
"or",
"following",
")",
"word",
"."
] | def downcase_word(event: E) -> None:
"""
Lowercase the current (or following) word.
"""
buff = event.current_buffer
for i in range(event.arg): # XXX: not DRY: see meta_c and meta_u!!
pos = buff.document.find_next_word_ending()
words = buff.document.text_after_cursor[:pos]
buff.insert_text(words.lower(), overwrite=True) | [
"def",
"downcase_word",
"(",
"event",
":",
"E",
")",
"->",
"None",
":",
"buff",
"=",
"event",
".",
"current_buffer",
"for",
"i",
"in",
"range",
"(",
"event",
".",
"arg",
")",
":",
"# XXX: not DRY: see meta_c and meta_u!!",
"pos",
"=",
"buff",
".",
"document",
".",
"find_next_word_ending",
"(",
")",
"words",
"=",
"buff",
".",
"document",
".",
"text_after_cursor",
"[",
":",
"pos",
"]",
"buff",
".",
"insert_text",
"(",
"words",
".",
"lower",
"(",
")",
",",
"overwrite",
"=",
"True",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/key_binding/bindings/named_commands.py#L306-L315 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/tpu/tpu_embedding.py | python | ProximalAdagradParameters.__init__ | (
self,
learning_rate: float,
initial_accumulator: float = 0.1,
l1_regularization_strength: float = 0.0,
l2_regularization_strength: float = 0.0,
use_gradient_accumulation: bool = True,
clip_weight_min: Optional[float] = None,
clip_weight_max: Optional[float] = None,
weight_decay_factor: Optional[float] = None,
multiply_weight_decay_factor_by_learning_rate: Optional[bool] = None,
clip_gradient_min: Optional[float] = None,
clip_gradient_max: Optional[float] = None,
) | Optimization parameters for Adagrad.
Args:
learning_rate: used for updating embedding table.
initial_accumulator: initial accumulator for Adagrad.
l1_regularization_strength: A float value, must be greater than or equal
to zero.
l2_regularization_strength: A float value, must be greater than or equal
to zero.
use_gradient_accumulation: setting this to `False` makes embedding
gradients calculation less accurate but faster. Please see
`optimization_parameters.proto` for details. for details.
clip_weight_min: the minimum value to clip by; None means -infinity.
clip_weight_max: the maximum value to clip by; None means +infinity.
weight_decay_factor: amount of weight decay to apply; None means that the
weights are not decayed.
multiply_weight_decay_factor_by_learning_rate: if true,
`weight_decay_factor` is multiplied by the current learning rate.
clip_gradient_min: the minimum value to clip by; None means -infinity.
Gradient accumulation must be set to true if this is set.
clip_gradient_max: the maximum value to clip by; None means +infinity.
Gradient accumulation must be set to true if this is set. | Optimization parameters for Adagrad. | [
"Optimization",
"parameters",
"for",
"Adagrad",
"."
] | def __init__(
self,
learning_rate: float,
initial_accumulator: float = 0.1,
l1_regularization_strength: float = 0.0,
l2_regularization_strength: float = 0.0,
use_gradient_accumulation: bool = True,
clip_weight_min: Optional[float] = None,
clip_weight_max: Optional[float] = None,
weight_decay_factor: Optional[float] = None,
multiply_weight_decay_factor_by_learning_rate: Optional[bool] = None,
clip_gradient_min: Optional[float] = None,
clip_gradient_max: Optional[float] = None,
):
"""Optimization parameters for Adagrad.
Args:
learning_rate: used for updating embedding table.
initial_accumulator: initial accumulator for Adagrad.
l1_regularization_strength: A float value, must be greater than or equal
to zero.
l2_regularization_strength: A float value, must be greater than or equal
to zero.
use_gradient_accumulation: setting this to `False` makes embedding
gradients calculation less accurate but faster. Please see
`optimization_parameters.proto` for details. for details.
clip_weight_min: the minimum value to clip by; None means -infinity.
clip_weight_max: the maximum value to clip by; None means +infinity.
weight_decay_factor: amount of weight decay to apply; None means that the
weights are not decayed.
multiply_weight_decay_factor_by_learning_rate: if true,
`weight_decay_factor` is multiplied by the current learning rate.
clip_gradient_min: the minimum value to clip by; None means -infinity.
Gradient accumulation must be set to true if this is set.
clip_gradient_max: the maximum value to clip by; None means +infinity.
Gradient accumulation must be set to true if this is set.
"""
super(ProximalAdagradParameters, self).__init__(
learning_rate=learning_rate,
use_gradient_accumulation=use_gradient_accumulation,
clip_weight_min=clip_weight_min,
clip_weight_max=clip_weight_max,
weight_decay_factor=weight_decay_factor,
multiply_weight_decay_factor_by_learning_rate=(
multiply_weight_decay_factor_by_learning_rate),
clip_gradient_min=clip_gradient_min,
clip_gradient_max=clip_gradient_max,
)
if initial_accumulator <= 0:
raise ValueError(f'Adagrad initial_accumulator must be positive. '
f'Received: {initial_accumulator}.')
if l1_regularization_strength < 0.:
raise ValueError('l1_regularization_strength must be greater than or '
'equal to 0. got {}.'.format(l1_regularization_strength))
if l2_regularization_strength < 0.:
raise ValueError('l2_regularization_strength must be greater than or '
'equal to 0. got {}.'.format(l2_regularization_strength))
self.initial_accumulator = initial_accumulator
self.l1_regularization_strength = l1_regularization_strength
self.l2_regularization_strength = l2_regularization_strength | [
"def",
"__init__",
"(",
"self",
",",
"learning_rate",
":",
"float",
",",
"initial_accumulator",
":",
"float",
"=",
"0.1",
",",
"l1_regularization_strength",
":",
"float",
"=",
"0.0",
",",
"l2_regularization_strength",
":",
"float",
"=",
"0.0",
",",
"use_gradient_accumulation",
":",
"bool",
"=",
"True",
",",
"clip_weight_min",
":",
"Optional",
"[",
"float",
"]",
"=",
"None",
",",
"clip_weight_max",
":",
"Optional",
"[",
"float",
"]",
"=",
"None",
",",
"weight_decay_factor",
":",
"Optional",
"[",
"float",
"]",
"=",
"None",
",",
"multiply_weight_decay_factor_by_learning_rate",
":",
"Optional",
"[",
"bool",
"]",
"=",
"None",
",",
"clip_gradient_min",
":",
"Optional",
"[",
"float",
"]",
"=",
"None",
",",
"clip_gradient_max",
":",
"Optional",
"[",
"float",
"]",
"=",
"None",
",",
")",
":",
"super",
"(",
"ProximalAdagradParameters",
",",
"self",
")",
".",
"__init__",
"(",
"learning_rate",
"=",
"learning_rate",
",",
"use_gradient_accumulation",
"=",
"use_gradient_accumulation",
",",
"clip_weight_min",
"=",
"clip_weight_min",
",",
"clip_weight_max",
"=",
"clip_weight_max",
",",
"weight_decay_factor",
"=",
"weight_decay_factor",
",",
"multiply_weight_decay_factor_by_learning_rate",
"=",
"(",
"multiply_weight_decay_factor_by_learning_rate",
")",
",",
"clip_gradient_min",
"=",
"clip_gradient_min",
",",
"clip_gradient_max",
"=",
"clip_gradient_max",
",",
")",
"if",
"initial_accumulator",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"f'Adagrad initial_accumulator must be positive. '",
"f'Received: {initial_accumulator}.'",
")",
"if",
"l1_regularization_strength",
"<",
"0.",
":",
"raise",
"ValueError",
"(",
"'l1_regularization_strength must be greater than or '",
"'equal to 0. got {}.'",
".",
"format",
"(",
"l1_regularization_strength",
")",
")",
"if",
"l2_regularization_strength",
"<",
"0.",
":",
"raise",
"ValueError",
"(",
"'l2_regularization_strength must be greater than or '",
"'equal to 0. got {}.'",
".",
"format",
"(",
"l2_regularization_strength",
")",
")",
"self",
".",
"initial_accumulator",
"=",
"initial_accumulator",
"self",
".",
"l1_regularization_strength",
"=",
"l1_regularization_strength",
"self",
".",
"l2_regularization_strength",
"=",
"l2_regularization_strength"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/tpu/tpu_embedding.py#L562-L623 |
||
toggl-open-source/toggldesktop | 91865205885531cc8fd9e8d613dad49d625d56e7 | third_party/cpplint/cpplint.py | python | _BlockInfo.IsBlockInfo | (self) | return self.__class__ == _BlockInfo | Returns true if this block is a _BlockInfo.
This is convenient for verifying that an object is an instance of
a _BlockInfo, but not an instance of any of the derived classes.
Returns:
True for this class, False for derived classes. | Returns true if this block is a _BlockInfo. | [
"Returns",
"true",
"if",
"this",
"block",
"is",
"a",
"_BlockInfo",
"."
] | def IsBlockInfo(self):
"""Returns true if this block is a _BlockInfo.
This is convenient for verifying that an object is an instance of
a _BlockInfo, but not an instance of any of the derived classes.
Returns:
True for this class, False for derived classes.
"""
return self.__class__ == _BlockInfo | [
"def",
"IsBlockInfo",
"(",
"self",
")",
":",
"return",
"self",
".",
"__class__",
"==",
"_BlockInfo"
] | https://github.com/toggl-open-source/toggldesktop/blob/91865205885531cc8fd9e8d613dad49d625d56e7/third_party/cpplint/cpplint.py#L2034-L2043 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/dtypes/common.py | python | _get_dtype | (arr_or_dtype) | return pandas_dtype(arr_or_dtype) | Get the dtype instance associated with an array
or dtype object.
Parameters
----------
arr_or_dtype : array-like
The array-like or dtype object whose dtype we want to extract.
Returns
-------
obj_dtype : The extract dtype instance from the
passed in array or dtype object.
Raises
------
TypeError : The passed in object is None. | Get the dtype instance associated with an array
or dtype object. | [
"Get",
"the",
"dtype",
"instance",
"associated",
"with",
"an",
"array",
"or",
"dtype",
"object",
"."
] | def _get_dtype(arr_or_dtype):
"""
Get the dtype instance associated with an array
or dtype object.
Parameters
----------
arr_or_dtype : array-like
The array-like or dtype object whose dtype we want to extract.
Returns
-------
obj_dtype : The extract dtype instance from the
passed in array or dtype object.
Raises
------
TypeError : The passed in object is None.
"""
if arr_or_dtype is None:
raise TypeError("Cannot deduce dtype from null object")
# fastpath
elif isinstance(arr_or_dtype, np.dtype):
return arr_or_dtype
elif isinstance(arr_or_dtype, type):
return np.dtype(arr_or_dtype)
# if we have an array-like
elif hasattr(arr_or_dtype, "dtype"):
arr_or_dtype = arr_or_dtype.dtype
return pandas_dtype(arr_or_dtype) | [
"def",
"_get_dtype",
"(",
"arr_or_dtype",
")",
":",
"if",
"arr_or_dtype",
"is",
"None",
":",
"raise",
"TypeError",
"(",
"\"Cannot deduce dtype from null object\"",
")",
"# fastpath",
"elif",
"isinstance",
"(",
"arr_or_dtype",
",",
"np",
".",
"dtype",
")",
":",
"return",
"arr_or_dtype",
"elif",
"isinstance",
"(",
"arr_or_dtype",
",",
"type",
")",
":",
"return",
"np",
".",
"dtype",
"(",
"arr_or_dtype",
")",
"# if we have an array-like",
"elif",
"hasattr",
"(",
"arr_or_dtype",
",",
"\"dtype\"",
")",
":",
"arr_or_dtype",
"=",
"arr_or_dtype",
".",
"dtype",
"return",
"pandas_dtype",
"(",
"arr_or_dtype",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/dtypes/common.py#L1672-L1705 |
|
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBBreakpointName.SetThreadIndex | (self, index) | return _lldb.SBBreakpointName_SetThreadIndex(self, index) | SetThreadIndex(SBBreakpointName self, uint32_t index) | SetThreadIndex(SBBreakpointName self, uint32_t index) | [
"SetThreadIndex",
"(",
"SBBreakpointName",
"self",
"uint32_t",
"index",
")"
] | def SetThreadIndex(self, index):
"""SetThreadIndex(SBBreakpointName self, uint32_t index)"""
return _lldb.SBBreakpointName_SetThreadIndex(self, index) | [
"def",
"SetThreadIndex",
"(",
"self",
",",
"index",
")",
":",
"return",
"_lldb",
".",
"SBBreakpointName_SetThreadIndex",
"(",
"self",
",",
"index",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L2269-L2271 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/dataview.py | python | DataViewItemAttr.HasColour | (*args, **kwargs) | return _dataview.DataViewItemAttr_HasColour(*args, **kwargs) | HasColour(self) -> bool | HasColour(self) -> bool | [
"HasColour",
"(",
"self",
")",
"-",
">",
"bool"
] | def HasColour(*args, **kwargs):
"""HasColour(self) -> bool"""
return _dataview.DataViewItemAttr_HasColour(*args, **kwargs) | [
"def",
"HasColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewItemAttr_HasColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/dataview.py#L349-L351 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/pygram.py | python | Symbols.__init__ | (self, grammar) | Initializer.
Creates an attribute for each grammar symbol (nonterminal),
whose value is the symbol's type (an int >= 256). | Initializer. | [
"Initializer",
"."
] | def __init__(self, grammar):
"""Initializer.
Creates an attribute for each grammar symbol (nonterminal),
whose value is the symbol's type (an int >= 256).
"""
for name, symbol in grammar.symbol2number.items():
setattr(self, name, symbol) | [
"def",
"__init__",
"(",
"self",
",",
"grammar",
")",
":",
"for",
"name",
",",
"symbol",
"in",
"grammar",
".",
"symbol2number",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"self",
",",
"name",
",",
"symbol",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/pygram.py#L22-L29 |
||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/data_structures/sframe.py | python | SFrame._save_reference | (self, filename) | Performs an incomplete save of an existing SFrame into a directory.
This saved SFrame may reference SFrames in other locations in the same
filesystem for certain resources.
Parameters
----------
filename : string
The location to save the SFrame. Either a local directory or a
remote URL.
See Also
--------
load_sframe, SFrame
Examples
--------
>>> # Save the sframe into binary format
>>> sf.save_reference('data/training_data_sframe') | Performs an incomplete save of an existing SFrame into a directory.
This saved SFrame may reference SFrames in other locations in the same
filesystem for certain resources. | [
"Performs",
"an",
"incomplete",
"save",
"of",
"an",
"existing",
"SFrame",
"into",
"a",
"directory",
".",
"This",
"saved",
"SFrame",
"may",
"reference",
"SFrames",
"in",
"other",
"locations",
"in",
"the",
"same",
"filesystem",
"for",
"certain",
"resources",
"."
] | def _save_reference(self, filename):
"""
Performs an incomplete save of an existing SFrame into a directory.
This saved SFrame may reference SFrames in other locations in the same
filesystem for certain resources.
Parameters
----------
filename : string
The location to save the SFrame. Either a local directory or a
remote URL.
See Also
--------
load_sframe, SFrame
Examples
--------
>>> # Save the sframe into binary format
>>> sf.save_reference('data/training_data_sframe')
"""
## Save the SFrame
url = _make_internal_url(filename)
with cython_context():
self.__proxy__.save_reference(url) | [
"def",
"_save_reference",
"(",
"self",
",",
"filename",
")",
":",
"## Save the SFrame",
"url",
"=",
"_make_internal_url",
"(",
"filename",
")",
"with",
"cython_context",
"(",
")",
":",
"self",
".",
"__proxy__",
".",
"save_reference",
"(",
"url",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/data_structures/sframe.py#L3255-L3280 |
||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2class.py | python | xmlTextReader.NewWalker | (self, doc) | return ret | Setup an xmltextReader to parse a preparsed XML document.
This reuses the existing @reader xmlTextReader. | Setup an xmltextReader to parse a preparsed XML document.
This reuses the existing | [
"Setup",
"an",
"xmltextReader",
"to",
"parse",
"a",
"preparsed",
"XML",
"document",
".",
"This",
"reuses",
"the",
"existing"
] | def NewWalker(self, doc):
"""Setup an xmltextReader to parse a preparsed XML document.
This reuses the existing @reader xmlTextReader. """
if doc is None: doc__o = None
else: doc__o = doc._o
ret = libxml2mod.xmlReaderNewWalker(self._o, doc__o)
return ret | [
"def",
"NewWalker",
"(",
"self",
",",
"doc",
")",
":",
"if",
"doc",
"is",
"None",
":",
"doc__o",
"=",
"None",
"else",
":",
"doc__o",
"=",
"doc",
".",
"_o",
"ret",
"=",
"libxml2mod",
".",
"xmlReaderNewWalker",
"(",
"self",
".",
"_o",
",",
"doc__o",
")",
"return",
"ret"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L5977-L5983 |
|
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | python/mxnet/gluon/parameter.py | python | Parameter.zero_grad | (self) | Sets gradient buffer on all contexts to 0. No action is taken if
parameter is uninitialized or doesn't require gradient. | Sets gradient buffer on all contexts to 0. No action is taken if
parameter is uninitialized or doesn't require gradient. | [
"Sets",
"gradient",
"buffer",
"on",
"all",
"contexts",
"to",
"0",
".",
"No",
"action",
"is",
"taken",
"if",
"parameter",
"is",
"uninitialized",
"or",
"doesn",
"t",
"require",
"gradient",
"."
] | def zero_grad(self):
"""Sets gradient buffer on all contexts to 0. No action is taken if
parameter is uninitialized or doesn't require gradient."""
if self._grad is None:
return
for i in self._grad:
i[:] = 0 | [
"def",
"zero_grad",
"(",
"self",
")",
":",
"if",
"self",
".",
"_grad",
"is",
"None",
":",
"return",
"for",
"i",
"in",
"self",
".",
"_grad",
":",
"i",
"[",
":",
"]",
"=",
"0"
] | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/gluon/parameter.py#L427-L433 |
||
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/python/ops/array_grad.py | python | _StridedSliceGradGrad | (op, grad) | return None, None, None, None, array_ops.strided_slice(
grad,
begin,
end,
strides,
begin_mask=op.get_attr("begin_mask"),
end_mask=op.get_attr("end_mask"),
ellipsis_mask=op.get_attr("ellipsis_mask"),
new_axis_mask=op.get_attr("new_axis_mask"),
shrink_axis_mask=op.get_attr("shrink_axis_mask")) | Gradient for StridedSliceGrad op. | Gradient for StridedSliceGrad op. | [
"Gradient",
"for",
"StridedSliceGrad",
"op",
"."
] | def _StridedSliceGradGrad(op, grad):
"""Gradient for StridedSliceGrad op."""
begin = op.inputs[1]
end = op.inputs[2]
strides = op.inputs[3]
return None, None, None, None, array_ops.strided_slice(
grad,
begin,
end,
strides,
begin_mask=op.get_attr("begin_mask"),
end_mask=op.get_attr("end_mask"),
ellipsis_mask=op.get_attr("ellipsis_mask"),
new_axis_mask=op.get_attr("new_axis_mask"),
shrink_axis_mask=op.get_attr("shrink_axis_mask")) | [
"def",
"_StridedSliceGradGrad",
"(",
"op",
",",
"grad",
")",
":",
"begin",
"=",
"op",
".",
"inputs",
"[",
"1",
"]",
"end",
"=",
"op",
".",
"inputs",
"[",
"2",
"]",
"strides",
"=",
"op",
".",
"inputs",
"[",
"3",
"]",
"return",
"None",
",",
"None",
",",
"None",
",",
"None",
",",
"array_ops",
".",
"strided_slice",
"(",
"grad",
",",
"begin",
",",
"end",
",",
"strides",
",",
"begin_mask",
"=",
"op",
".",
"get_attr",
"(",
"\"begin_mask\"",
")",
",",
"end_mask",
"=",
"op",
".",
"get_attr",
"(",
"\"end_mask\"",
")",
",",
"ellipsis_mask",
"=",
"op",
".",
"get_attr",
"(",
"\"ellipsis_mask\"",
")",
",",
"new_axis_mask",
"=",
"op",
".",
"get_attr",
"(",
"\"new_axis_mask\"",
")",
",",
"shrink_axis_mask",
"=",
"op",
".",
"get_attr",
"(",
"\"shrink_axis_mask\"",
")",
")"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/array_grad.py#L195-L210 |
|
QMCPACK/qmcpack | d0948ab455e38364458740cc8e2239600a14c5cd | nexus/lib/structure.py | python | Structure.rotate | (self,r,rp=None,passive=False,units="radians",check=True) | Arbitrary rotation of the structure.
Parameters
----------
r : `array_like, float, shape (3,3)` or `array_like, float, shape (3,)` or `str`
If a 3x3 matrix, then code executes rotation consistent with this matrix --
it is assumed that the matrix acts on a column-major vector (eg, v'=Rv)
If a three-dimensional array, then the operation of the function depends
on the input type of rp in the following ways:
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
If a str then the axis, r, is defined by the input label (e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3')
and the operation of the function depends on the input type of rp in the following
ways (same as above):
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
rp : `array_like, float, shape (3), optional` or `str, optional`
If a 3-dimensional vector is given, then rp is assumed to be an axis and a rotation is made
such that the axis r is aligned with rp.
If a str, then rp is assumed to be an angle and a rotation about the axis defined by r
is made by an angle rp
If a str is given, then rp is assumed to be an axis defined by the given label
(e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3') and a rotation is made such that the axis r
is aligned with rp.
passive : `bool, optional, default False`
If `True`, perform a passive rotation
If `False`, perform an active rotation
units : `str, optional, default "radians"`
Units of rp, if rp is given as an angle (scalar)
check : `bool, optional, default True`
Perform a check to verify rotation matrix is orthogonal | Arbitrary rotation of the structure.
Parameters
----------
r : `array_like, float, shape (3,3)` or `array_like, float, shape (3,)` or `str`
If a 3x3 matrix, then code executes rotation consistent with this matrix --
it is assumed that the matrix acts on a column-major vector (eg, v'=Rv)
If a three-dimensional array, then the operation of the function depends
on the input type of rp in the following ways:
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
If a str then the axis, r, is defined by the input label (e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3')
and the operation of the function depends on the input type of rp in the following
ways (same as above):
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
rp : `array_like, float, shape (3), optional` or `str, optional`
If a 3-dimensional vector is given, then rp is assumed to be an axis and a rotation is made
such that the axis r is aligned with rp.
If a str, then rp is assumed to be an angle and a rotation about the axis defined by r
is made by an angle rp
If a str is given, then rp is assumed to be an axis defined by the given label
(e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3') and a rotation is made such that the axis r
is aligned with rp.
passive : `bool, optional, default False`
If `True`, perform a passive rotation
If `False`, perform an active rotation
units : `str, optional, default "radians"`
Units of rp, if rp is given as an angle (scalar)
check : `bool, optional, default True`
Perform a check to verify rotation matrix is orthogonal | [
"Arbitrary",
"rotation",
"of",
"the",
"structure",
".",
"Parameters",
"----------",
"r",
":",
"array_like",
"float",
"shape",
"(",
"3",
"3",
")",
"or",
"array_like",
"float",
"shape",
"(",
"3",
")",
"or",
"str",
"If",
"a",
"3x3",
"matrix",
"then",
"code",
"executes",
"rotation",
"consistent",
"with",
"this",
"matrix",
"--",
"it",
"is",
"assumed",
"that",
"the",
"matrix",
"acts",
"on",
"a",
"column",
"-",
"major",
"vector",
"(",
"eg",
"v",
"=",
"Rv",
")",
"If",
"a",
"three",
"-",
"dimensional",
"array",
"then",
"the",
"operation",
"of",
"the",
"function",
"depends",
"on",
"the",
"input",
"type",
"of",
"rp",
"in",
"the",
"following",
"ways",
":",
"1",
".",
"If",
"rp",
"is",
"a",
"scalar",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"angle",
"and",
"a",
"rotation",
"of",
"rp",
"is",
"made",
"about",
"the",
"axis",
"defined",
"by",
"r",
"2",
".",
"If",
"rp",
"is",
"a",
"vector",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"axis",
"and",
"a",
"rotation",
"is",
"made",
"such",
"that",
"r",
"aligns",
"with",
"rp",
"3",
".",
"If",
"rp",
"is",
"a",
"str",
"then",
"the",
"rotation",
"is",
"such",
"that",
"r",
"aligns",
"with",
"the",
"axis",
"given",
"by",
"the",
"str",
"(",
"x",
"y",
"z",
"a0",
"a1",
"or",
"a2",
")",
"If",
"a",
"str",
"then",
"the",
"axis",
"r",
"is",
"defined",
"by",
"the",
"input",
"label",
"(",
"e",
".",
"g",
".",
"x",
"y",
"z",
"a1",
"a2",
"or",
"a3",
")",
"and",
"the",
"operation",
"of",
"the",
"function",
"depends",
"on",
"the",
"input",
"type",
"of",
"rp",
"in",
"the",
"following",
"ways",
"(",
"same",
"as",
"above",
")",
":",
"1",
".",
"If",
"rp",
"is",
"a",
"scalar",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"angle",
"and",
"a",
"rotation",
"of",
"rp",
"is",
"made",
"about",
"the",
"axis",
"defined",
"by",
"r",
"2",
".",
"If",
"rp",
"is",
"a",
"vector",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"axis",
"and",
"a",
"rotation",
"is",
"made",
"such",
"that",
"r",
"aligns",
"with",
"rp",
"3",
".",
"If",
"rp",
"is",
"a",
"str",
"then",
"the",
"rotation",
"is",
"such",
"that",
"r",
"aligns",
"with",
"the",
"axis",
"given",
"by",
"the",
"str",
"(",
"x",
"y",
"z",
"a0",
"a1",
"or",
"a2",
")",
"rp",
":",
"array_like",
"float",
"shape",
"(",
"3",
")",
"optional",
"or",
"str",
"optional",
"If",
"a",
"3",
"-",
"dimensional",
"vector",
"is",
"given",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"axis",
"and",
"a",
"rotation",
"is",
"made",
"such",
"that",
"the",
"axis",
"r",
"is",
"aligned",
"with",
"rp",
".",
"If",
"a",
"str",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"angle",
"and",
"a",
"rotation",
"about",
"the",
"axis",
"defined",
"by",
"r",
"is",
"made",
"by",
"an",
"angle",
"rp",
"If",
"a",
"str",
"is",
"given",
"then",
"rp",
"is",
"assumed",
"to",
"be",
"an",
"axis",
"defined",
"by",
"the",
"given",
"label",
"(",
"e",
".",
"g",
".",
"x",
"y",
"z",
"a1",
"a2",
"or",
"a3",
")",
"and",
"a",
"rotation",
"is",
"made",
"such",
"that",
"the",
"axis",
"r",
"is",
"aligned",
"with",
"rp",
".",
"passive",
":",
"bool",
"optional",
"default",
"False",
"If",
"True",
"perform",
"a",
"passive",
"rotation",
"If",
"False",
"perform",
"an",
"active",
"rotation",
"units",
":",
"str",
"optional",
"default",
"radians",
"Units",
"of",
"rp",
"if",
"rp",
"is",
"given",
"as",
"an",
"angle",
"(",
"scalar",
")",
"check",
":",
"bool",
"optional",
"default",
"True",
"Perform",
"a",
"check",
"to",
"verify",
"rotation",
"matrix",
"is",
"orthogonal"
] | def rotate(self,r,rp=None,passive=False,units="radians",check=True):
"""
Arbitrary rotation of the structure.
Parameters
----------
r : `array_like, float, shape (3,3)` or `array_like, float, shape (3,)` or `str`
If a 3x3 matrix, then code executes rotation consistent with this matrix --
it is assumed that the matrix acts on a column-major vector (eg, v'=Rv)
If a three-dimensional array, then the operation of the function depends
on the input type of rp in the following ways:
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
If a str then the axis, r, is defined by the input label (e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3')
and the operation of the function depends on the input type of rp in the following
ways (same as above):
1. If rp is a scalar, then rp is assumed to be an angle and a rotation
of rp is made about the axis defined by r
2. If rp is a vector, then rp is assumed to be an axis and a rotation is made
such that r aligns with rp
3. If rp is a str, then the rotation is such that r aligns with the
axis given by the str ('x', 'y', 'z', 'a0', 'a1', or 'a2')
rp : `array_like, float, shape (3), optional` or `str, optional`
If a 3-dimensional vector is given, then rp is assumed to be an axis and a rotation is made
such that the axis r is aligned with rp.
If a str, then rp is assumed to be an angle and a rotation about the axis defined by r
is made by an angle rp
If a str is given, then rp is assumed to be an axis defined by the given label
(e.g. 'x', 'y', 'z', 'a1', 'a2', or 'a3') and a rotation is made such that the axis r
is aligned with rp.
passive : `bool, optional, default False`
If `True`, perform a passive rotation
If `False`, perform an active rotation
units : `str, optional, default "radians"`
Units of rp, if rp is given as an angle (scalar)
check : `bool, optional, default True`
Perform a check to verify rotation matrix is orthogonal
"""
if rp is not None:
dirmap = dict(x=[1,0,0],y=[0,1,0],z=[0,0,1])
if isinstance(r,str):
if r[0]=='a': # r= 'a0', 'a1', or 'a2'
r = self.axes[int(r[1])]
else: # r= 'x', 'y', or 'z'
r = dirmap[r]
#end if
else:
r = array(r,dtype=float)
if len(r.shape)>1:
self.error('r must be given as a 1-d vector or string, if rp is not None')
#end if
#end if
if isinstance(rp,(int,float)):
if units=="radians" or units=="rad":
theta = float(rp)
else:
theta = float(rp)*np.pi/180.0
#end if
c = np.cos(theta)
s = np.sin(theta)
else:
if isinstance(rp,str):
if rp[0]=='a': # rp= 'a0', 'a1', or 'a2'
rp = self.axes[int(rp[1])]
else: # rp= 'x', 'y', or 'z'
rp = dirmap[rp]
#end if
else:
rp = array(rp,dtype=float)
#end if
# go from r,rp to r,theta
c = np.dot(r,rp)/np.linalg.norm(r)/np.linalg.norm(rp)
if abs(c-1)<1e-6:
s = 0.0
r = np.array([1,0,0])
else:
s = np.dot(np.cross(r,rp),np.cross(r,rp))/np.linalg.norm(r)/np.linalg.norm(rp)/np.linalg.norm(np.cross(r,rp))
r = np.cross(r,rp)/np.linalg.norm(np.cross(r,rp))
#end if
#end if
# make R from r,theta
R = [[ c+r[0]**2.0*(1.0-c), r[0]*r[1]*(1.0-c)-r[2]*s, r[0]*r[2]*(1.0-c)+r[1]*s],
[r[1]*r[0]*(1.0-c)+r[2]*s, c+r[1]**2.0*(1.0-c), r[1]*r[2]*(1.0-c)-r[0]*s],
[r[2]*r[0]*(1.0-c)-r[1]*s, r[2]*r[1]*(1.0-c)+r[0]*s, c+r[2]**2.0*(1.0-c)]]
else:
R = r
#end if
R = array(R,dtype=float)
if passive:
R = R.T
#end if
if check:
if not np.allclose(dot(R,R.T),identity(len(R))):
self.error('the function, rotate, must be given an orthogonal matrix')
#end if
#end if
self.matrix_transform(R) | [
"def",
"rotate",
"(",
"self",
",",
"r",
",",
"rp",
"=",
"None",
",",
"passive",
"=",
"False",
",",
"units",
"=",
"\"radians\"",
",",
"check",
"=",
"True",
")",
":",
"if",
"rp",
"is",
"not",
"None",
":",
"dirmap",
"=",
"dict",
"(",
"x",
"=",
"[",
"1",
",",
"0",
",",
"0",
"]",
",",
"y",
"=",
"[",
"0",
",",
"1",
",",
"0",
"]",
",",
"z",
"=",
"[",
"0",
",",
"0",
",",
"1",
"]",
")",
"if",
"isinstance",
"(",
"r",
",",
"str",
")",
":",
"if",
"r",
"[",
"0",
"]",
"==",
"'a'",
":",
"# r= 'a0', 'a1', or 'a2'",
"r",
"=",
"self",
".",
"axes",
"[",
"int",
"(",
"r",
"[",
"1",
"]",
")",
"]",
"else",
":",
"# r= 'x', 'y', or 'z'",
"r",
"=",
"dirmap",
"[",
"r",
"]",
"#end if",
"else",
":",
"r",
"=",
"array",
"(",
"r",
",",
"dtype",
"=",
"float",
")",
"if",
"len",
"(",
"r",
".",
"shape",
")",
">",
"1",
":",
"self",
".",
"error",
"(",
"'r must be given as a 1-d vector or string, if rp is not None'",
")",
"#end if",
"#end if",
"if",
"isinstance",
"(",
"rp",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"if",
"units",
"==",
"\"radians\"",
"or",
"units",
"==",
"\"rad\"",
":",
"theta",
"=",
"float",
"(",
"rp",
")",
"else",
":",
"theta",
"=",
"float",
"(",
"rp",
")",
"*",
"np",
".",
"pi",
"/",
"180.0",
"#end if",
"c",
"=",
"np",
".",
"cos",
"(",
"theta",
")",
"s",
"=",
"np",
".",
"sin",
"(",
"theta",
")",
"else",
":",
"if",
"isinstance",
"(",
"rp",
",",
"str",
")",
":",
"if",
"rp",
"[",
"0",
"]",
"==",
"'a'",
":",
"# rp= 'a0', 'a1', or 'a2'",
"rp",
"=",
"self",
".",
"axes",
"[",
"int",
"(",
"rp",
"[",
"1",
"]",
")",
"]",
"else",
":",
"# rp= 'x', 'y', or 'z'",
"rp",
"=",
"dirmap",
"[",
"rp",
"]",
"#end if",
"else",
":",
"rp",
"=",
"array",
"(",
"rp",
",",
"dtype",
"=",
"float",
")",
"#end if",
"# go from r,rp to r,theta",
"c",
"=",
"np",
".",
"dot",
"(",
"r",
",",
"rp",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"r",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"rp",
")",
"if",
"abs",
"(",
"c",
"-",
"1",
")",
"<",
"1e-6",
":",
"s",
"=",
"0.0",
"r",
"=",
"np",
".",
"array",
"(",
"[",
"1",
",",
"0",
",",
"0",
"]",
")",
"else",
":",
"s",
"=",
"np",
".",
"dot",
"(",
"np",
".",
"cross",
"(",
"r",
",",
"rp",
")",
",",
"np",
".",
"cross",
"(",
"r",
",",
"rp",
")",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"r",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"rp",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"np",
".",
"cross",
"(",
"r",
",",
"rp",
")",
")",
"r",
"=",
"np",
".",
"cross",
"(",
"r",
",",
"rp",
")",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"np",
".",
"cross",
"(",
"r",
",",
"rp",
")",
")",
"#end if",
"#end if",
"# make R from r,theta",
"R",
"=",
"[",
"[",
"c",
"+",
"r",
"[",
"0",
"]",
"**",
"2.0",
"*",
"(",
"1.0",
"-",
"c",
")",
",",
"r",
"[",
"0",
"]",
"*",
"r",
"[",
"1",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"-",
"r",
"[",
"2",
"]",
"*",
"s",
",",
"r",
"[",
"0",
"]",
"*",
"r",
"[",
"2",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"+",
"r",
"[",
"1",
"]",
"*",
"s",
"]",
",",
"[",
"r",
"[",
"1",
"]",
"*",
"r",
"[",
"0",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"+",
"r",
"[",
"2",
"]",
"*",
"s",
",",
"c",
"+",
"r",
"[",
"1",
"]",
"**",
"2.0",
"*",
"(",
"1.0",
"-",
"c",
")",
",",
"r",
"[",
"1",
"]",
"*",
"r",
"[",
"2",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"-",
"r",
"[",
"0",
"]",
"*",
"s",
"]",
",",
"[",
"r",
"[",
"2",
"]",
"*",
"r",
"[",
"0",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"-",
"r",
"[",
"1",
"]",
"*",
"s",
",",
"r",
"[",
"2",
"]",
"*",
"r",
"[",
"1",
"]",
"*",
"(",
"1.0",
"-",
"c",
")",
"+",
"r",
"[",
"0",
"]",
"*",
"s",
",",
"c",
"+",
"r",
"[",
"2",
"]",
"**",
"2.0",
"*",
"(",
"1.0",
"-",
"c",
")",
"]",
"]",
"else",
":",
"R",
"=",
"r",
"#end if",
"R",
"=",
"array",
"(",
"R",
",",
"dtype",
"=",
"float",
")",
"if",
"passive",
":",
"R",
"=",
"R",
".",
"T",
"#end if",
"if",
"check",
":",
"if",
"not",
"np",
".",
"allclose",
"(",
"dot",
"(",
"R",
",",
"R",
".",
"T",
")",
",",
"identity",
"(",
"len",
"(",
"R",
")",
")",
")",
":",
"self",
".",
"error",
"(",
"'the function, rotate, must be given an orthogonal matrix'",
")",
"#end if",
"#end if",
"self",
".",
"matrix_transform",
"(",
"R",
")"
] | https://github.com/QMCPACK/qmcpack/blob/d0948ab455e38364458740cc8e2239600a14c5cd/nexus/lib/structure.py#L1567-L1666 |
||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/distributions/bijector_impl.py | python | _Mapping._merge | (self, old, new) | return old | Helper to merge which handles merging one value. | Helper to merge which handles merging one value. | [
"Helper",
"to",
"merge",
"which",
"handles",
"merging",
"one",
"value",
"."
] | def _merge(self, old, new):
"""Helper to merge which handles merging one value."""
if old is None:
return new
elif new is not None and old != new:
raise ValueError("Incompatible values: %s != %s" % (old, new))
return old | [
"def",
"_merge",
"(",
"self",
",",
"old",
",",
"new",
")",
":",
"if",
"old",
"is",
"None",
":",
"return",
"new",
"elif",
"new",
"is",
"not",
"None",
"and",
"old",
"!=",
"new",
":",
"raise",
"ValueError",
"(",
"\"Incompatible values: %s != %s\"",
"%",
"(",
"old",
",",
"new",
")",
")",
"return",
"old"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/distributions/bijector_impl.py#L99-L105 |
|
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozpack/files.py | python | BaseFile.mode | (self) | return None | Return the file's unix mode, or None if it has no meaning. | Return the file's unix mode, or None if it has no meaning. | [
"Return",
"the",
"file",
"s",
"unix",
"mode",
"or",
"None",
"if",
"it",
"has",
"no",
"meaning",
"."
] | def mode(self):
'''
Return the file's unix mode, or None if it has no meaning.
'''
return None | [
"def",
"mode",
"(",
"self",
")",
":",
"return",
"None"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozpack/files.py#L193-L197 |
|
openthread/openthread | 9fcdbed9c526c70f1556d1ed84099c1535c7cd32 | tools/harness-automation/autothreadharness/pdu_controller.py | python | ApcPduController.open | (self, **params) | Open telnet connection
Args:
params (dict), must contain two parameters "ip" - ip address or hostname and "port" - port number
Example:
params = {'port': 23, 'ip': 'localhost'} | Open telnet connection
Args:
params (dict), must contain two parameters "ip" - ip address or hostname and "port" - port number
Example:
params = {'port': 23, 'ip': 'localhost'} | [
"Open",
"telnet",
"connection",
"Args",
":",
"params",
"(",
"dict",
")",
"must",
"contain",
"two",
"parameters",
"ip",
"-",
"ip",
"address",
"or",
"hostname",
"and",
"port",
"-",
"port",
"number",
"Example",
":",
"params",
"=",
"{",
"port",
":",
"23",
"ip",
":",
"localhost",
"}"
] | def open(self, **params):
"""Open telnet connection
Args:
params (dict), must contain two parameters "ip" - ip address or hostname and "port" - port number
Example:
params = {'port': 23, 'ip': 'localhost'}
"""
logger.info('opening telnet')
self.port = params['port']
self.ip = params['ip']
self.tn = None
self._init() | [
"def",
"open",
"(",
"self",
",",
"*",
"*",
"params",
")",
":",
"logger",
".",
"info",
"(",
"'opening telnet'",
")",
"self",
".",
"port",
"=",
"params",
"[",
"'port'",
"]",
"self",
".",
"ip",
"=",
"params",
"[",
"'ip'",
"]",
"self",
".",
"tn",
"=",
"None",
"self",
".",
"_init",
"(",
")"
] | https://github.com/openthread/openthread/blob/9fcdbed9c526c70f1556d1ed84099c1535c7cd32/tools/harness-automation/autothreadharness/pdu_controller.py#L97-L110 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/setuptools/py2/setuptools/_vendor/packaging/version.py | python | parse | (version) | Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version. | Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version. | [
"Parse",
"the",
"given",
"version",
"string",
"and",
"return",
"either",
"a",
":",
"class",
":",
"Version",
"object",
"or",
"a",
":",
"class",
":",
"LegacyVersion",
"object",
"depending",
"on",
"if",
"the",
"given",
"version",
"is",
"a",
"valid",
"PEP",
"440",
"version",
"or",
"a",
"legacy",
"version",
"."
] | def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version) | [
"def",
"parse",
"(",
"version",
")",
":",
"try",
":",
"return",
"Version",
"(",
"version",
")",
"except",
"InvalidVersion",
":",
"return",
"LegacyVersion",
"(",
"version",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/_vendor/packaging/version.py#L21-L30 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.CharLeftRectExtend | (*args, **kwargs) | return _stc.StyledTextCtrl_CharLeftRectExtend(*args, **kwargs) | CharLeftRectExtend(self)
Move caret left one character, extending rectangular selection to new caret position. | CharLeftRectExtend(self) | [
"CharLeftRectExtend",
"(",
"self",
")"
] | def CharLeftRectExtend(*args, **kwargs):
"""
CharLeftRectExtend(self)
Move caret left one character, extending rectangular selection to new caret position.
"""
return _stc.StyledTextCtrl_CharLeftRectExtend(*args, **kwargs) | [
"def",
"CharLeftRectExtend",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_CharLeftRectExtend",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L5383-L5389 |
|
xhzdeng/crpn | a5aef0f80dbe486103123f740c634fb01e6cc9a1 | caffe-fast-rcnn/python/caffe/pycaffe.py | python | _Net_layer_dict | (self) | return self._layer_dict | An OrderedDict (bottom to top, i.e., input to output) of network
layers indexed by name | An OrderedDict (bottom to top, i.e., input to output) of network
layers indexed by name | [
"An",
"OrderedDict",
"(",
"bottom",
"to",
"top",
"i",
".",
"e",
".",
"input",
"to",
"output",
")",
"of",
"network",
"layers",
"indexed",
"by",
"name"
] | def _Net_layer_dict(self):
"""
An OrderedDict (bottom to top, i.e., input to output) of network
layers indexed by name
"""
if not hasattr(self, '_layer_dict'):
self._layer_dict = OrderedDict(zip(self._layer_names, self.layers))
return self._layer_dict | [
"def",
"_Net_layer_dict",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_layer_dict'",
")",
":",
"self",
".",
"_layer_dict",
"=",
"OrderedDict",
"(",
"zip",
"(",
"self",
".",
"_layer_names",
",",
"self",
".",
"layers",
")",
")",
"return",
"self",
".",
"_layer_dict"
] | https://github.com/xhzdeng/crpn/blob/a5aef0f80dbe486103123f740c634fb01e6cc9a1/caffe-fast-rcnn/python/caffe/pycaffe.py#L47-L54 |
|
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/contrib/slim/quantization/quantize_transpiler_v2.py | python | QuantizeTranspilerV2._transform_forward | (self, block, op, var_rename_map, is_test) | Insert fake quant op before the target ops. | Insert fake quant op before the target ops. | [
"Insert",
"fake",
"quant",
"op",
"before",
"the",
"target",
"ops",
"."
] | def _transform_forward(self, block, op, var_rename_map, is_test):
"""
Insert fake quant op before the target ops.
"""
op._set_attr("quantization_type", "qat_with_weight")
# insert fake quant op before the quantized op
for in_name in op.input_arg_names:
block_id = block.idx
idx = block.ops.index(op)
if in_name in var_rename_map[block_id]:
new_in_name = var_rename_map[block_id][in_name]
else:
in_var = block.var(in_name)
target_dtype = [
core.VarDesc.VarType.FP32, core.VarDesc.VarType.FP16
]
if in_var.dtype not in target_dtype:
continue
quant_bits = self._weight_bits if in_var.persistable \
else self._activation_bits
quant_type = self._weight_quantize_type if in_var.persistable \
else self._activation_quantize_type
if quant_type == "abs_max":
new_var = self._insert_abs_max_fq_op(block, idx, in_var,
quant_bits)
elif quant_type == "moving_average_abs_max":
new_var = self._insert_ma_abs_max_fq_op(block, idx, in_var,
quant_bits, is_test)
elif quant_type == "channel_wise_abs_max":
ch_axis = 1 if op.type in self._out_ch_axis1_ops else 0
new_var = self._insert_pc_abs_max_fq_op(block, idx, in_var,
quant_bits, ch_axis)
else:
_logger.error("Don't support the quant_type: %s" %
quant_type)
continue
new_in_name = new_var.name
var_rename_map[block_id][in_name] = new_in_name
op._rename_input(in_name, new_in_name)
# insert out scale op followed the quantized op
for out_name in op.output_arg_names:
next_ops = find_next_ops(block, out_name)
idx = block.ops.index(op)
out_var = block.var(out_name)
new_out_var = self._insert_ma_abs_max_scale_op(
block, idx + 1, out_var, is_test, True)
for next_op in next_ops:
if "_grad" not in next_op.type:
next_op._rename_input(out_name, new_out_var.name) | [
"def",
"_transform_forward",
"(",
"self",
",",
"block",
",",
"op",
",",
"var_rename_map",
",",
"is_test",
")",
":",
"op",
".",
"_set_attr",
"(",
"\"quantization_type\"",
",",
"\"qat_with_weight\"",
")",
"# insert fake quant op before the quantized op",
"for",
"in_name",
"in",
"op",
".",
"input_arg_names",
":",
"block_id",
"=",
"block",
".",
"idx",
"idx",
"=",
"block",
".",
"ops",
".",
"index",
"(",
"op",
")",
"if",
"in_name",
"in",
"var_rename_map",
"[",
"block_id",
"]",
":",
"new_in_name",
"=",
"var_rename_map",
"[",
"block_id",
"]",
"[",
"in_name",
"]",
"else",
":",
"in_var",
"=",
"block",
".",
"var",
"(",
"in_name",
")",
"target_dtype",
"=",
"[",
"core",
".",
"VarDesc",
".",
"VarType",
".",
"FP32",
",",
"core",
".",
"VarDesc",
".",
"VarType",
".",
"FP16",
"]",
"if",
"in_var",
".",
"dtype",
"not",
"in",
"target_dtype",
":",
"continue",
"quant_bits",
"=",
"self",
".",
"_weight_bits",
"if",
"in_var",
".",
"persistable",
"else",
"self",
".",
"_activation_bits",
"quant_type",
"=",
"self",
".",
"_weight_quantize_type",
"if",
"in_var",
".",
"persistable",
"else",
"self",
".",
"_activation_quantize_type",
"if",
"quant_type",
"==",
"\"abs_max\"",
":",
"new_var",
"=",
"self",
".",
"_insert_abs_max_fq_op",
"(",
"block",
",",
"idx",
",",
"in_var",
",",
"quant_bits",
")",
"elif",
"quant_type",
"==",
"\"moving_average_abs_max\"",
":",
"new_var",
"=",
"self",
".",
"_insert_ma_abs_max_fq_op",
"(",
"block",
",",
"idx",
",",
"in_var",
",",
"quant_bits",
",",
"is_test",
")",
"elif",
"quant_type",
"==",
"\"channel_wise_abs_max\"",
":",
"ch_axis",
"=",
"1",
"if",
"op",
".",
"type",
"in",
"self",
".",
"_out_ch_axis1_ops",
"else",
"0",
"new_var",
"=",
"self",
".",
"_insert_pc_abs_max_fq_op",
"(",
"block",
",",
"idx",
",",
"in_var",
",",
"quant_bits",
",",
"ch_axis",
")",
"else",
":",
"_logger",
".",
"error",
"(",
"\"Don't support the quant_type: %s\"",
"%",
"quant_type",
")",
"continue",
"new_in_name",
"=",
"new_var",
".",
"name",
"var_rename_map",
"[",
"block_id",
"]",
"[",
"in_name",
"]",
"=",
"new_in_name",
"op",
".",
"_rename_input",
"(",
"in_name",
",",
"new_in_name",
")",
"# insert out scale op followed the quantized op",
"for",
"out_name",
"in",
"op",
".",
"output_arg_names",
":",
"next_ops",
"=",
"find_next_ops",
"(",
"block",
",",
"out_name",
")",
"idx",
"=",
"block",
".",
"ops",
".",
"index",
"(",
"op",
")",
"out_var",
"=",
"block",
".",
"var",
"(",
"out_name",
")",
"new_out_var",
"=",
"self",
".",
"_insert_ma_abs_max_scale_op",
"(",
"block",
",",
"idx",
"+",
"1",
",",
"out_var",
",",
"is_test",
",",
"True",
")",
"for",
"next_op",
"in",
"next_ops",
":",
"if",
"\"_grad\"",
"not",
"in",
"next_op",
".",
"type",
":",
"next_op",
".",
"_rename_input",
"(",
"out_name",
",",
"new_out_var",
".",
"name",
")"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/contrib/slim/quantization/quantize_transpiler_v2.py#L173-L230 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_windows.py | python | ColourData.GetCustomColour | (*args, **kwargs) | return _windows_.ColourData_GetCustomColour(*args, **kwargs) | GetCustomColour(self, int i) -> Colour
Gets the i'th custom colour associated with the colour dialog. i
should be an integer between 0 and 15. The default custom colours are
all invalid colours. | GetCustomColour(self, int i) -> Colour | [
"GetCustomColour",
"(",
"self",
"int",
"i",
")",
"-",
">",
"Colour"
] | def GetCustomColour(*args, **kwargs):
"""
GetCustomColour(self, int i) -> Colour
Gets the i'th custom colour associated with the colour dialog. i
should be an integer between 0 and 15. The default custom colours are
all invalid colours.
"""
return _windows_.ColourData_GetCustomColour(*args, **kwargs) | [
"def",
"GetCustomColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"ColourData_GetCustomColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L2949-L2957 |
|
hzl123456/LibyuvDemo | d02b6500d0cf111bdd8778c56983154e6d14bdb4 | libyuv/src/main/cpp/libyuv/tools_libyuv/get_landmines.py | python | print_landmines | () | ALL LANDMINES ARE EMITTED FROM HERE. | ALL LANDMINES ARE EMITTED FROM HERE. | [
"ALL",
"LANDMINES",
"ARE",
"EMITTED",
"FROM",
"HERE",
"."
] | def print_landmines():
"""
ALL LANDMINES ARE EMITTED FROM HERE.
"""
# DO NOT add landmines as part of a regular CL. Landmines are a last-effort
# bandaid fix if a CL that got landed has a build dependency bug and all bots
# need to be cleaned up. If you're writing a new CL that causes build
# dependency problems, fix the dependency problems instead of adding a
# landmine.
# See the Chromium version in src/build/get_landmines.py for usage examples.
print 'Clobber to remove GYP artifacts after switching bots to GN.'
print 'Another try to remove GYP artifacts after switching bots to GN.' | [
"def",
"print_landmines",
"(",
")",
":",
"# DO NOT add landmines as part of a regular CL. Landmines are a last-effort",
"# bandaid fix if a CL that got landed has a build dependency bug and all bots",
"# need to be cleaned up. If you're writing a new CL that causes build",
"# dependency problems, fix the dependency problems instead of adding a",
"# landmine.",
"# See the Chromium version in src/build/get_landmines.py for usage examples.",
"print",
"'Clobber to remove GYP artifacts after switching bots to GN.'",
"print",
"'Another try to remove GYP artifacts after switching bots to GN.'"
] | https://github.com/hzl123456/LibyuvDemo/blob/d02b6500d0cf111bdd8778c56983154e6d14bdb4/libyuv/src/main/cpp/libyuv/tools_libyuv/get_landmines.py#L30-L41 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py | python | CudnnParamsFormatConverterGRU._cudnn_to_tf_biases | (self, *biases) | return (
# Save only the sum instead of individual biases. When recovering,
# return two biases each with half the value. Since RNN does not
# regularize by weight decay, it has no side effect in training or
# inference.
array_ops.concat([b_wi, b_wr], axis=0) +
array_ops.concat([b_ri, b_rr], axis=0),
b_wh,
b_rh) | r"""Stitching cudnn canonical biases to generate tf canonical biases. | r"""Stitching cudnn canonical biases to generate tf canonical biases. | [
"r",
"Stitching",
"cudnn",
"canonical",
"biases",
"to",
"generate",
"tf",
"canonical",
"biases",
"."
] | def _cudnn_to_tf_biases(self, *biases):
r"""Stitching cudnn canonical biases to generate tf canonical biases."""
b_wi, b_wr, b_wh, b_ri, b_rr, b_rh = biases
return (
# Save only the sum instead of individual biases. When recovering,
# return two biases each with half the value. Since RNN does not
# regularize by weight decay, it has no side effect in training or
# inference.
array_ops.concat([b_wi, b_wr], axis=0) +
array_ops.concat([b_ri, b_rr], axis=0),
b_wh,
b_rh) | [
"def",
"_cudnn_to_tf_biases",
"(",
"self",
",",
"*",
"biases",
")",
":",
"b_wi",
",",
"b_wr",
",",
"b_wh",
",",
"b_ri",
",",
"b_rr",
",",
"b_rh",
"=",
"biases",
"return",
"(",
"# Save only the sum instead of individual biases. When recovering,",
"# return two biases each with half the value. Since RNN does not",
"# regularize by weight decay, it has no side effect in training or",
"# inference.",
"array_ops",
".",
"concat",
"(",
"[",
"b_wi",
",",
"b_wr",
"]",
",",
"axis",
"=",
"0",
")",
"+",
"array_ops",
".",
"concat",
"(",
"[",
"b_ri",
",",
"b_rr",
"]",
",",
"axis",
"=",
"0",
")",
",",
"b_wh",
",",
"b_rh",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py#L625-L636 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/contrib/slim/python/slim/nets/inception_v2.py | python | inception_v2_arg_scope | (weight_decay=0.00004,
batch_norm_var_collection='moving_vars') | Defines the default InceptionV2 arg scope.
Args:
weight_decay: The weight decay to use for regularizing the model.
batch_norm_var_collection: The name of the collection for the batch norm
variables.
Returns:
An `arg_scope` to use for the inception v3 model. | Defines the default InceptionV2 arg scope. | [
"Defines",
"the",
"default",
"InceptionV2",
"arg",
"scope",
"."
] | def inception_v2_arg_scope(weight_decay=0.00004,
batch_norm_var_collection='moving_vars'):
"""Defines the default InceptionV2 arg scope.
Args:
weight_decay: The weight decay to use for regularizing the model.
batch_norm_var_collection: The name of the collection for the batch norm
variables.
Returns:
An `arg_scope` to use for the inception v3 model.
"""
batch_norm_params = {
# Decay for the moving averages.
'decay': 0.9997,
# epsilon to prevent 0s in variance.
'epsilon': 0.001,
# collection containing update_ops.
'updates_collections': ops.GraphKeys.UPDATE_OPS,
# collection containing the moving mean and moving variance.
'variables_collections': {
'beta': None,
'gamma': None,
'moving_mean': [batch_norm_var_collection],
'moving_variance': [batch_norm_var_collection],
}
}
# Set weight_decay for weights in Conv and FC layers.
with arg_scope(
[layers.conv2d, layers_lib.fully_connected],
weights_regularizer=regularizers.l2_regularizer(weight_decay)):
with arg_scope(
[layers.conv2d],
weights_initializer=initializers.variance_scaling_initializer(),
activation_fn=nn_ops.relu,
normalizer_fn=layers_lib.batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc | [
"def",
"inception_v2_arg_scope",
"(",
"weight_decay",
"=",
"0.00004",
",",
"batch_norm_var_collection",
"=",
"'moving_vars'",
")",
":",
"batch_norm_params",
"=",
"{",
"# Decay for the moving averages.",
"'decay'",
":",
"0.9997",
",",
"# epsilon to prevent 0s in variance.",
"'epsilon'",
":",
"0.001",
",",
"# collection containing update_ops.",
"'updates_collections'",
":",
"ops",
".",
"GraphKeys",
".",
"UPDATE_OPS",
",",
"# collection containing the moving mean and moving variance.",
"'variables_collections'",
":",
"{",
"'beta'",
":",
"None",
",",
"'gamma'",
":",
"None",
",",
"'moving_mean'",
":",
"[",
"batch_norm_var_collection",
"]",
",",
"'moving_variance'",
":",
"[",
"batch_norm_var_collection",
"]",
",",
"}",
"}",
"# Set weight_decay for weights in Conv and FC layers.",
"with",
"arg_scope",
"(",
"[",
"layers",
".",
"conv2d",
",",
"layers_lib",
".",
"fully_connected",
"]",
",",
"weights_regularizer",
"=",
"regularizers",
".",
"l2_regularizer",
"(",
"weight_decay",
")",
")",
":",
"with",
"arg_scope",
"(",
"[",
"layers",
".",
"conv2d",
"]",
",",
"weights_initializer",
"=",
"initializers",
".",
"variance_scaling_initializer",
"(",
")",
",",
"activation_fn",
"=",
"nn_ops",
".",
"relu",
",",
"normalizer_fn",
"=",
"layers_lib",
".",
"batch_norm",
",",
"normalizer_params",
"=",
"batch_norm_params",
")",
"as",
"sc",
":",
"return",
"sc"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/slim/python/slim/nets/inception_v2.py#L605-L643 |
||
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/boost_1_66_0/tools/build/src/build/virtual_target.py | python | Action.actualize_sources | (self, sources, prop_set) | Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise.
New values will be *appended* to the variables. They may be non-empty,
if caller wants it. | Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise. | [
"Creates",
"actual",
"jam",
"targets",
"for",
"sources",
".",
"Initializes",
"two",
"member",
"variables",
":",
"self",
".",
"actual_sources_",
"--",
"sources",
"which",
"are",
"passed",
"to",
"updating",
"action",
"self",
".",
"dependency_only_sources_",
"--",
"sources",
"which",
"are",
"made",
"dependencies",
"but",
"are",
"not",
"used",
"otherwise",
"."
] | def actualize_sources (self, sources, prop_set):
""" Creates actual jam targets for sources. Initializes two member
variables:
'self.actual_sources_' -- sources which are passed to updating action
'self.dependency_only_sources_' -- sources which are made dependencies, but
are not used otherwise.
New values will be *appended* to the variables. They may be non-empty,
if caller wants it.
"""
assert is_iterable_typed(sources, VirtualTarget)
assert isinstance(prop_set, property_set.PropertySet)
dependencies = self.properties_.get ('<dependency>')
self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
self.actual_sources_ += self.actualize_source_type (sources, prop_set)
# This is used to help bjam find dependencies in generated headers
# in other main targets.
# Say:
#
# make a.h : ....... ;
# exe hello : hello.cpp : <implicit-dependency>a.h ;
#
# However, for bjam to find the dependency the generated target must
# be actualized (i.e. have the jam target). In the above case,
# if we're building just hello ("bjam hello"), 'a.h' won't be
# actualized unless we do it here.
implicit = self.properties_.get("<implicit-dependency>")
for i in implicit:
i.actualize() | [
"def",
"actualize_sources",
"(",
"self",
",",
"sources",
",",
"prop_set",
")",
":",
"assert",
"is_iterable_typed",
"(",
"sources",
",",
"VirtualTarget",
")",
"assert",
"isinstance",
"(",
"prop_set",
",",
"property_set",
".",
"PropertySet",
")",
"dependencies",
"=",
"self",
".",
"properties_",
".",
"get",
"(",
"'<dependency>'",
")",
"self",
".",
"dependency_only_sources_",
"+=",
"self",
".",
"actualize_source_type",
"(",
"dependencies",
",",
"prop_set",
")",
"self",
".",
"actual_sources_",
"+=",
"self",
".",
"actualize_source_type",
"(",
"sources",
",",
"prop_set",
")",
"# This is used to help bjam find dependencies in generated headers",
"# in other main targets.",
"# Say:",
"#",
"# make a.h : ....... ;",
"# exe hello : hello.cpp : <implicit-dependency>a.h ;",
"#",
"# However, for bjam to find the dependency the generated target must",
"# be actualized (i.e. have the jam target). In the above case,",
"# if we're building just hello (\"bjam hello\"), 'a.h' won't be",
"# actualized unless we do it here.",
"implicit",
"=",
"self",
".",
"properties_",
".",
"get",
"(",
"\"<implicit-dependency>\"",
")",
"for",
"i",
"in",
"implicit",
":",
"i",
".",
"actualize",
"(",
")"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/boost_1_66_0/tools/build/src/build/virtual_target.py#L886-L917 |
||
eclipse/upm | d6f76ff8c231417666594214679c49399513112e | src/doxy2swig.py | python | Doxy2SWIG.generic_parse | (self, node, pad=0) | A Generic parser for arbitrary tags in a node.
Parameters:
- node: A node in the DOM.
- pad: `int` (default: 0)
If 0 the node data is not padded with newlines. If 1 it
appends a newline after parsing the childNodes. If 2 it
pads before and after the nodes are processed. Defaults to
0. | A Generic parser for arbitrary tags in a node. | [
"A",
"Generic",
"parser",
"for",
"arbitrary",
"tags",
"in",
"a",
"node",
"."
] | def generic_parse(self, node, pad=0):
"""A Generic parser for arbitrary tags in a node.
Parameters:
- node: A node in the DOM.
- pad: `int` (default: 0)
If 0 the node data is not padded with newlines. If 1 it
appends a newline after parsing the childNodes. If 2 it
pads before and after the nodes are processed. Defaults to
0.
"""
npiece = 0
if pad:
npiece = len(self.pieces)
if pad == 2:
self.add_text('\n')
for n in node.childNodes:
self.parse(n)
if pad:
if len(self.pieces) > npiece:
self.add_text('\n') | [
"def",
"generic_parse",
"(",
"self",
",",
"node",
",",
"pad",
"=",
"0",
")",
":",
"npiece",
"=",
"0",
"if",
"pad",
":",
"npiece",
"=",
"len",
"(",
"self",
".",
"pieces",
")",
"if",
"pad",
"==",
"2",
":",
"self",
".",
"add_text",
"(",
"'\\n'",
")",
"for",
"n",
"in",
"node",
".",
"childNodes",
":",
"self",
".",
"parse",
"(",
"n",
")",
"if",
"pad",
":",
"if",
"len",
"(",
"self",
".",
"pieces",
")",
">",
"npiece",
":",
"self",
".",
"add_text",
"(",
"'\\n'",
")"
] | https://github.com/eclipse/upm/blob/d6f76ff8c231417666594214679c49399513112e/src/doxy2swig.py#L169-L192 |
||
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | grc/gui/PropsDialog.py | python | PropsDialog.__init__ | (self, parent, block) | Properties dialog constructor.
Args:%
block: a block instance | Properties dialog constructor. | [
"Properties",
"dialog",
"constructor",
"."
] | def __init__(self, parent, block):
"""
Properties dialog constructor.
Args:%
block: a block instance
"""
Gtk.Dialog.__init__(
self,
title='Properties: ' + block.label,
transient_for=parent,
modal=True,
destroy_with_parent=True,
)
self.add_buttons(
Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT,
Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_APPLY, Gtk.ResponseType.APPLY,
)
self.set_response_sensitive(Gtk.ResponseType.APPLY, False)
self.set_size_request(*Utils.scale(
(Constants.MIN_DIALOG_WIDTH, Constants.MIN_DIALOG_HEIGHT)
))
self._block = block
self._hash = 0
self._config = parent.config
vpaned = Gtk.VPaned()
self.vbox.pack_start(vpaned, True, True, 0)
# Notebook to hold param boxes
notebook = self.notebook = Gtk.Notebook()
notebook.set_show_border(False)
notebook.set_scrollable(True) # scroll arrows for page tabs
notebook.set_tab_pos(Gtk.PositionType.TOP)
vpaned.pack1(notebook, True)
# Params boxes for block parameters
self._params_boxes = []
self._build_param_tab_boxes()
# Docs for the block
self._docs_text_display = doc_view = SimpleTextDisplay()
doc_view.get_buffer().create_tag('b', weight=Pango.Weight.BOLD)
self._docs_box = Gtk.ScrolledWindow()
self._docs_box.set_policy(
Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self._docs_vbox = Gtk.VBox(homogeneous=False, spacing=0)
self._docs_box.add(self._docs_vbox)
self._docs_link = Gtk.Label(use_markup=True)
self._docs_vbox.pack_start(self._docs_link, False, False, 0)
self._docs_vbox.pack_end(self._docs_text_display, True, True, 0)
notebook.append_page(self._docs_box, Gtk.Label(label="Documentation"))
# Generated code for the block
if Actions.TOGGLE_SHOW_CODE_PREVIEW_TAB.get_active():
self._code_text_display = code_view = SimpleTextDisplay()
code_view.set_wrap_mode(Gtk.WrapMode.NONE)
code_view.get_buffer().create_tag('b', weight=Pango.Weight.BOLD)
code_view.set_monospace(True)
# todo: set font size in non-deprecated way
# code_view.override_font(Pango.FontDescription('monospace %d' % Constants.FONT_SIZE))
code_box = Gtk.ScrolledWindow()
code_box.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
code_box.add(self._code_text_display)
notebook.append_page(code_box, Gtk.Label(label="Generated Code"))
else:
self._code_text_display = None
# Error Messages for the block
self._error_messages_text_display = SimpleTextDisplay()
self._error_box = Gtk.ScrolledWindow()
self._error_box.set_policy(
Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self._error_box.add(self._error_messages_text_display)
vpaned.pack2(self._error_box)
vpaned.set_position(int(0.65 * Constants.MIN_DIALOG_HEIGHT))
# Connect events
self.connect('key-press-event', self._handle_key_press)
self.connect('show', self.update_gui)
self.connect('response', self._handle_response)
self.show_all() | [
"def",
"__init__",
"(",
"self",
",",
"parent",
",",
"block",
")",
":",
"Gtk",
".",
"Dialog",
".",
"__init__",
"(",
"self",
",",
"title",
"=",
"'Properties: '",
"+",
"block",
".",
"label",
",",
"transient_for",
"=",
"parent",
",",
"modal",
"=",
"True",
",",
"destroy_with_parent",
"=",
"True",
",",
")",
"self",
".",
"add_buttons",
"(",
"Gtk",
".",
"STOCK_OK",
",",
"Gtk",
".",
"ResponseType",
".",
"ACCEPT",
",",
"Gtk",
".",
"STOCK_CANCEL",
",",
"Gtk",
".",
"ResponseType",
".",
"REJECT",
",",
"Gtk",
".",
"STOCK_APPLY",
",",
"Gtk",
".",
"ResponseType",
".",
"APPLY",
",",
")",
"self",
".",
"set_response_sensitive",
"(",
"Gtk",
".",
"ResponseType",
".",
"APPLY",
",",
"False",
")",
"self",
".",
"set_size_request",
"(",
"*",
"Utils",
".",
"scale",
"(",
"(",
"Constants",
".",
"MIN_DIALOG_WIDTH",
",",
"Constants",
".",
"MIN_DIALOG_HEIGHT",
")",
")",
")",
"self",
".",
"_block",
"=",
"block",
"self",
".",
"_hash",
"=",
"0",
"self",
".",
"_config",
"=",
"parent",
".",
"config",
"vpaned",
"=",
"Gtk",
".",
"VPaned",
"(",
")",
"self",
".",
"vbox",
".",
"pack_start",
"(",
"vpaned",
",",
"True",
",",
"True",
",",
"0",
")",
"# Notebook to hold param boxes",
"notebook",
"=",
"self",
".",
"notebook",
"=",
"Gtk",
".",
"Notebook",
"(",
")",
"notebook",
".",
"set_show_border",
"(",
"False",
")",
"notebook",
".",
"set_scrollable",
"(",
"True",
")",
"# scroll arrows for page tabs",
"notebook",
".",
"set_tab_pos",
"(",
"Gtk",
".",
"PositionType",
".",
"TOP",
")",
"vpaned",
".",
"pack1",
"(",
"notebook",
",",
"True",
")",
"# Params boxes for block parameters",
"self",
".",
"_params_boxes",
"=",
"[",
"]",
"self",
".",
"_build_param_tab_boxes",
"(",
")",
"# Docs for the block",
"self",
".",
"_docs_text_display",
"=",
"doc_view",
"=",
"SimpleTextDisplay",
"(",
")",
"doc_view",
".",
"get_buffer",
"(",
")",
".",
"create_tag",
"(",
"'b'",
",",
"weight",
"=",
"Pango",
".",
"Weight",
".",
"BOLD",
")",
"self",
".",
"_docs_box",
"=",
"Gtk",
".",
"ScrolledWindow",
"(",
")",
"self",
".",
"_docs_box",
".",
"set_policy",
"(",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
",",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
")",
"self",
".",
"_docs_vbox",
"=",
"Gtk",
".",
"VBox",
"(",
"homogeneous",
"=",
"False",
",",
"spacing",
"=",
"0",
")",
"self",
".",
"_docs_box",
".",
"add",
"(",
"self",
".",
"_docs_vbox",
")",
"self",
".",
"_docs_link",
"=",
"Gtk",
".",
"Label",
"(",
"use_markup",
"=",
"True",
")",
"self",
".",
"_docs_vbox",
".",
"pack_start",
"(",
"self",
".",
"_docs_link",
",",
"False",
",",
"False",
",",
"0",
")",
"self",
".",
"_docs_vbox",
".",
"pack_end",
"(",
"self",
".",
"_docs_text_display",
",",
"True",
",",
"True",
",",
"0",
")",
"notebook",
".",
"append_page",
"(",
"self",
".",
"_docs_box",
",",
"Gtk",
".",
"Label",
"(",
"label",
"=",
"\"Documentation\"",
")",
")",
"# Generated code for the block",
"if",
"Actions",
".",
"TOGGLE_SHOW_CODE_PREVIEW_TAB",
".",
"get_active",
"(",
")",
":",
"self",
".",
"_code_text_display",
"=",
"code_view",
"=",
"SimpleTextDisplay",
"(",
")",
"code_view",
".",
"set_wrap_mode",
"(",
"Gtk",
".",
"WrapMode",
".",
"NONE",
")",
"code_view",
".",
"get_buffer",
"(",
")",
".",
"create_tag",
"(",
"'b'",
",",
"weight",
"=",
"Pango",
".",
"Weight",
".",
"BOLD",
")",
"code_view",
".",
"set_monospace",
"(",
"True",
")",
"# todo: set font size in non-deprecated way",
"# code_view.override_font(Pango.FontDescription('monospace %d' % Constants.FONT_SIZE))",
"code_box",
"=",
"Gtk",
".",
"ScrolledWindow",
"(",
")",
"code_box",
".",
"set_policy",
"(",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
",",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
")",
"code_box",
".",
"add",
"(",
"self",
".",
"_code_text_display",
")",
"notebook",
".",
"append_page",
"(",
"code_box",
",",
"Gtk",
".",
"Label",
"(",
"label",
"=",
"\"Generated Code\"",
")",
")",
"else",
":",
"self",
".",
"_code_text_display",
"=",
"None",
"# Error Messages for the block",
"self",
".",
"_error_messages_text_display",
"=",
"SimpleTextDisplay",
"(",
")",
"self",
".",
"_error_box",
"=",
"Gtk",
".",
"ScrolledWindow",
"(",
")",
"self",
".",
"_error_box",
".",
"set_policy",
"(",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
",",
"Gtk",
".",
"PolicyType",
".",
"AUTOMATIC",
")",
"self",
".",
"_error_box",
".",
"add",
"(",
"self",
".",
"_error_messages_text_display",
")",
"vpaned",
".",
"pack2",
"(",
"self",
".",
"_error_box",
")",
"vpaned",
".",
"set_position",
"(",
"int",
"(",
"0.65",
"*",
"Constants",
".",
"MIN_DIALOG_HEIGHT",
")",
")",
"# Connect events",
"self",
".",
"connect",
"(",
"'key-press-event'",
",",
"self",
".",
"_handle_key_press",
")",
"self",
".",
"connect",
"(",
"'show'",
",",
"self",
".",
"update_gui",
")",
"self",
".",
"connect",
"(",
"'response'",
",",
"self",
".",
"_handle_response",
")",
"self",
".",
"show_all",
"(",
")"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/grc/gui/PropsDialog.py#L20-L105 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/html.py | python | HtmlWinParser.SetFontBold | (*args, **kwargs) | return _html.HtmlWinParser_SetFontBold(*args, **kwargs) | SetFontBold(self, int x) | SetFontBold(self, int x) | [
"SetFontBold",
"(",
"self",
"int",
"x",
")"
] | def SetFontBold(*args, **kwargs):
"""SetFontBold(self, int x)"""
return _html.HtmlWinParser_SetFontBold(*args, **kwargs) | [
"def",
"SetFontBold",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_html",
".",
"HtmlWinParser_SetFontBold",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/html.py#L304-L306 |
|
choasup/caffe-yolo9000 | e8a476c4c23d756632f7a26c681a96e3ab672544 | python/caffe/coord_map.py | python | inverse | (coord_map) | return ax, 1 / a, -b / a | Invert a coord map by de-scaling and un-shifting;
this gives the backward mapping for the gradient. | Invert a coord map by de-scaling and un-shifting;
this gives the backward mapping for the gradient. | [
"Invert",
"a",
"coord",
"map",
"by",
"de",
"-",
"scaling",
"and",
"un",
"-",
"shifting",
";",
"this",
"gives",
"the",
"backward",
"mapping",
"for",
"the",
"gradient",
"."
] | def inverse(coord_map):
"""
Invert a coord map by de-scaling and un-shifting;
this gives the backward mapping for the gradient.
"""
ax, a, b = coord_map
return ax, 1 / a, -b / a | [
"def",
"inverse",
"(",
"coord_map",
")",
":",
"ax",
",",
"a",
",",
"b",
"=",
"coord_map",
"return",
"ax",
",",
"1",
"/",
"a",
",",
"-",
"b",
"/",
"a"
] | https://github.com/choasup/caffe-yolo9000/blob/e8a476c4c23d756632f7a26c681a96e3ab672544/python/caffe/coord_map.py#L106-L112 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/ogl/_basic.py | python | Shape.GetDisableLabel | (self) | return self._disableLabel | TRUE if the default region will not be shown, FALSE otherwise. | TRUE if the default region will not be shown, FALSE otherwise. | [
"TRUE",
"if",
"the",
"default",
"region",
"will",
"not",
"be",
"shown",
"FALSE",
"otherwise",
"."
] | def GetDisableLabel(self):
"""TRUE if the default region will not be shown, FALSE otherwise."""
return self._disableLabel | [
"def",
"GetDisableLabel",
"(",
"self",
")",
":",
"return",
"self",
".",
"_disableLabel"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/ogl/_basic.py#L2030-L2032 |
|
google/fhir | d77f57706c1a168529b0b87ca7ccb1c0113e83c2 | py/google/fhir/json_format/wrappers/_primitive_wrappers.py | python | PrimitiveWrapper.get_element | (self) | return element | Returns the raw Element underlying the wrapped primitive.
Note that conversion-only extensions are removed prior to returning. | Returns the raw Element underlying the wrapped primitive. | [
"Returns",
"the",
"raw",
"Element",
"underlying",
"the",
"wrapped",
"primitive",
"."
] | def get_element(self) -> Optional[message.Message]:
"""Returns the raw Element underlying the wrapped primitive.
Note that conversion-only extensions are removed prior to returning.
"""
if not (proto_utils.field_is_set(self.wrapped, 'id') or
proto_utils.field_is_set(self.wrapped, 'extension')):
return None # Early-exit if we can't populate an Element
element = proto_utils.create_message_from_descriptor(
self.wrapped.DESCRIPTOR)
if proto_utils.field_is_set(self.wrapped, 'id'):
proto_utils.copy_common_field(self.wrapped, element, 'id')
extensions_list = cast(List[Any],
extensions.get_fhir_extensions(self.wrapped))
for extension in extensions_list:
if extension.url.value not in extensions.CONVERSION_ONLY_EXTENSION_URLS:
proto_utils.append_value_at_field(element, 'extension', extension)
return element | [
"def",
"get_element",
"(",
"self",
")",
"->",
"Optional",
"[",
"message",
".",
"Message",
"]",
":",
"if",
"not",
"(",
"proto_utils",
".",
"field_is_set",
"(",
"self",
".",
"wrapped",
",",
"'id'",
")",
"or",
"proto_utils",
".",
"field_is_set",
"(",
"self",
".",
"wrapped",
",",
"'extension'",
")",
")",
":",
"return",
"None",
"# Early-exit if we can't populate an Element",
"element",
"=",
"proto_utils",
".",
"create_message_from_descriptor",
"(",
"self",
".",
"wrapped",
".",
"DESCRIPTOR",
")",
"if",
"proto_utils",
".",
"field_is_set",
"(",
"self",
".",
"wrapped",
",",
"'id'",
")",
":",
"proto_utils",
".",
"copy_common_field",
"(",
"self",
".",
"wrapped",
",",
"element",
",",
"'id'",
")",
"extensions_list",
"=",
"cast",
"(",
"List",
"[",
"Any",
"]",
",",
"extensions",
".",
"get_fhir_extensions",
"(",
"self",
".",
"wrapped",
")",
")",
"for",
"extension",
"in",
"extensions_list",
":",
"if",
"extension",
".",
"url",
".",
"value",
"not",
"in",
"extensions",
".",
"CONVERSION_ONLY_EXTENSION_URLS",
":",
"proto_utils",
".",
"append_value_at_field",
"(",
"element",
",",
"'extension'",
",",
"extension",
")",
"return",
"element"
] | https://github.com/google/fhir/blob/d77f57706c1a168529b0b87ca7ccb1c0113e83c2/py/google/fhir/json_format/wrappers/_primitive_wrappers.py#L214-L234 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/gslib/gcs_json_api.py | python | GcsJsonApi._TranslateApitoolsException | (self, e, bucket_name=None, object_name=None,
generation=None, not_found_exception=None) | Translates apitools exceptions into their gsutil Cloud Api equivalents.
Args:
e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS.
bucket_name: Optional bucket name in request that caused the exception.
object_name: Optional object name in request that caused the exception.
generation: Optional generation in request that caused the exception.
not_found_exception: Optional exception to raise in the not-found case.
Returns:
CloudStorageApiServiceException for translatable exceptions, None
otherwise. | Translates apitools exceptions into their gsutil Cloud Api equivalents. | [
"Translates",
"apitools",
"exceptions",
"into",
"their",
"gsutil",
"Cloud",
"Api",
"equivalents",
"."
] | def _TranslateApitoolsException(self, e, bucket_name=None, object_name=None,
generation=None, not_found_exception=None):
"""Translates apitools exceptions into their gsutil Cloud Api equivalents.
Args:
e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS.
bucket_name: Optional bucket name in request that caused the exception.
object_name: Optional object name in request that caused the exception.
generation: Optional generation in request that caused the exception.
not_found_exception: Optional exception to raise in the not-found case.
Returns:
CloudStorageApiServiceException for translatable exceptions, None
otherwise.
"""
if isinstance(e, apitools_exceptions.HttpError):
message = self._GetMessageFromHttpError(e)
if e.status_code == 400:
# It is possible that the Project ID is incorrect. Unfortunately the
# JSON API does not give us much information about what part of the
# request was bad.
return BadRequestException(message or 'Bad Request',
status=e.status_code)
elif e.status_code == 401:
if 'Login Required' in str(e):
return AccessDeniedException(
message or 'Access denied: login required.',
status=e.status_code)
elif e.status_code == 403:
if 'The account for the specified project has been disabled' in str(e):
return AccessDeniedException(message or 'Account disabled.',
status=e.status_code)
elif 'Daily Limit for Unauthenticated Use Exceeded' in str(e):
return AccessDeniedException(
message or 'Access denied: quota exceeded. '
'Is your project ID valid?',
status=e.status_code)
elif 'The bucket you tried to delete was not empty.' in str(e):
return NotEmptyException('BucketNotEmpty (%s)' % bucket_name,
status=e.status_code)
elif ('The bucket you tried to create requires domain ownership '
'verification.' in str(e)):
return AccessDeniedException(
'The bucket you tried to create requires domain ownership '
'verification. Please see '
'https://developers.google.com/storage/docs/bucketnaming'
'?hl=en#verification for more details.', status=e.status_code)
elif 'User Rate Limit Exceeded' in str(e):
return AccessDeniedException('Rate limit exceeded. Please retry this '
'request later.', status=e.status_code)
elif 'Access Not Configured' in str(e):
return AccessDeniedException(
'Access Not Configured. Please go to the Google Developers '
'Console (https://cloud.google.com/console#/project) for your '
'project, select APIs and Auth and enable the '
'Google Cloud Storage JSON API.',
status=e.status_code)
else:
return AccessDeniedException(message or e.message,
status=e.status_code)
elif e.status_code == 404:
if not_found_exception:
# The exception is pre-constructed prior to translation; the HTTP
# status code isn't available at that time.
setattr(not_found_exception, 'status', e.status_code)
return not_found_exception
elif bucket_name:
if object_name:
return CreateObjectNotFoundException(e.status_code, self.provider,
bucket_name, object_name,
generation=generation)
return CreateBucketNotFoundException(e.status_code, self.provider,
bucket_name)
return NotFoundException(e.message, status=e.status_code)
elif e.status_code == 409 and bucket_name:
if 'The bucket you tried to delete was not empty.' in str(e):
return NotEmptyException('BucketNotEmpty (%s)' % bucket_name,
status=e.status_code)
return ServiceException(
'Bucket %s already exists.' % bucket_name, status=e.status_code)
elif e.status_code == 412:
return PreconditionException(message, status=e.status_code)
elif (e.status_code == 503 and
not self.http.disable_ssl_certificate_validation):
return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE,
status=e.status_code)
return ServiceException(message, status=e.status_code)
elif isinstance(e, apitools_exceptions.TransferInvalidError):
return ServiceException('Transfer invalid (possible encoding error: %s)'
% str(e)) | [
"def",
"_TranslateApitoolsException",
"(",
"self",
",",
"e",
",",
"bucket_name",
"=",
"None",
",",
"object_name",
"=",
"None",
",",
"generation",
"=",
"None",
",",
"not_found_exception",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"apitools_exceptions",
".",
"HttpError",
")",
":",
"message",
"=",
"self",
".",
"_GetMessageFromHttpError",
"(",
"e",
")",
"if",
"e",
".",
"status_code",
"==",
"400",
":",
"# It is possible that the Project ID is incorrect. Unfortunately the",
"# JSON API does not give us much information about what part of the",
"# request was bad.",
"return",
"BadRequestException",
"(",
"message",
"or",
"'Bad Request'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"e",
".",
"status_code",
"==",
"401",
":",
"if",
"'Login Required'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"AccessDeniedException",
"(",
"message",
"or",
"'Access denied: login required.'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"e",
".",
"status_code",
"==",
"403",
":",
"if",
"'The account for the specified project has been disabled'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"AccessDeniedException",
"(",
"message",
"or",
"'Account disabled.'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"'Daily Limit for Unauthenticated Use Exceeded'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"AccessDeniedException",
"(",
"message",
"or",
"'Access denied: quota exceeded. '",
"'Is your project ID valid?'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"'The bucket you tried to delete was not empty.'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"NotEmptyException",
"(",
"'BucketNotEmpty (%s)'",
"%",
"bucket_name",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"(",
"'The bucket you tried to create requires domain ownership '",
"'verification.'",
"in",
"str",
"(",
"e",
")",
")",
":",
"return",
"AccessDeniedException",
"(",
"'The bucket you tried to create requires domain ownership '",
"'verification. Please see '",
"'https://developers.google.com/storage/docs/bucketnaming'",
"'?hl=en#verification for more details.'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"'User Rate Limit Exceeded'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"AccessDeniedException",
"(",
"'Rate limit exceeded. Please retry this '",
"'request later.'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"'Access Not Configured'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"AccessDeniedException",
"(",
"'Access Not Configured. Please go to the Google Developers '",
"'Console (https://cloud.google.com/console#/project) for your '",
"'project, select APIs and Auth and enable the '",
"'Google Cloud Storage JSON API.'",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"else",
":",
"return",
"AccessDeniedException",
"(",
"message",
"or",
"e",
".",
"message",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"e",
".",
"status_code",
"==",
"404",
":",
"if",
"not_found_exception",
":",
"# The exception is pre-constructed prior to translation; the HTTP",
"# status code isn't available at that time.",
"setattr",
"(",
"not_found_exception",
",",
"'status'",
",",
"e",
".",
"status_code",
")",
"return",
"not_found_exception",
"elif",
"bucket_name",
":",
"if",
"object_name",
":",
"return",
"CreateObjectNotFoundException",
"(",
"e",
".",
"status_code",
",",
"self",
".",
"provider",
",",
"bucket_name",
",",
"object_name",
",",
"generation",
"=",
"generation",
")",
"return",
"CreateBucketNotFoundException",
"(",
"e",
".",
"status_code",
",",
"self",
".",
"provider",
",",
"bucket_name",
")",
"return",
"NotFoundException",
"(",
"e",
".",
"message",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"e",
".",
"status_code",
"==",
"409",
"and",
"bucket_name",
":",
"if",
"'The bucket you tried to delete was not empty.'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"NotEmptyException",
"(",
"'BucketNotEmpty (%s)'",
"%",
"bucket_name",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"return",
"ServiceException",
"(",
"'Bucket %s already exists.'",
"%",
"bucket_name",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"e",
".",
"status_code",
"==",
"412",
":",
"return",
"PreconditionException",
"(",
"message",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"(",
"e",
".",
"status_code",
"==",
"503",
"and",
"not",
"self",
".",
"http",
".",
"disable_ssl_certificate_validation",
")",
":",
"return",
"ServiceException",
"(",
"_VALIDATE_CERTIFICATES_503_MESSAGE",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"return",
"ServiceException",
"(",
"message",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"elif",
"isinstance",
"(",
"e",
",",
"apitools_exceptions",
".",
"TransferInvalidError",
")",
":",
"return",
"ServiceException",
"(",
"'Transfer invalid (possible encoding error: %s)'",
"%",
"str",
"(",
"e",
")",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/gslib/gcs_json_api.py#L1353-L1443 |
||
Tom94/practical-path-guiding | fcf01afb436184e8a74bf300aa89f69b03ab25a2 | mitsuba/data/scons/icl12.py | python | merge_script_vars | (env,script,args=None,vars=None) | This merges the data retieved from the script in to the Enviroment
by prepending it.
script is the name of the script, args is optional arguments to pass
vars are var we want to retrieve, if None it will retieve everything found | This merges the data retieved from the script in to the Enviroment
by prepending it.
script is the name of the script, args is optional arguments to pass
vars are var we want to retrieve, if None it will retieve everything found | [
"This",
"merges",
"the",
"data",
"retieved",
"from",
"the",
"script",
"in",
"to",
"the",
"Enviroment",
"by",
"prepending",
"it",
".",
"script",
"is",
"the",
"name",
"of",
"the",
"script",
"args",
"is",
"optional",
"arguments",
"to",
"pass",
"vars",
"are",
"var",
"we",
"want",
"to",
"retrieve",
"if",
"None",
"it",
"will",
"retieve",
"everything",
"found"
] | def merge_script_vars(env,script,args=None,vars=None):
'''
This merges the data retieved from the script in to the Enviroment
by prepending it.
script is the name of the script, args is optional arguments to pass
vars are var we want to retrieve, if None it will retieve everything found
'''
shell_env=get_script_env(env,script,args,vars)
for k, v in shell_env.iteritems():
env.PrependENVPath(k, v, delete_existing=1) | [
"def",
"merge_script_vars",
"(",
"env",
",",
"script",
",",
"args",
"=",
"None",
",",
"vars",
"=",
"None",
")",
":",
"shell_env",
"=",
"get_script_env",
"(",
"env",
",",
"script",
",",
"args",
",",
"vars",
")",
"for",
"k",
",",
"v",
"in",
"shell_env",
".",
"iteritems",
"(",
")",
":",
"env",
".",
"PrependENVPath",
"(",
"k",
",",
"v",
",",
"delete_existing",
"=",
"1",
")"
] | https://github.com/Tom94/practical-path-guiding/blob/fcf01afb436184e8a74bf300aa89f69b03ab25a2/mitsuba/data/scons/icl12.py#L84-L93 |
||
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/tools/pretty_vcproj.py | python | FlattenFilter | (node) | return node_list | Returns a list of all the node and sub nodes. | Returns a list of all the node and sub nodes. | [
"Returns",
"a",
"list",
"of",
"all",
"the",
"node",
"and",
"sub",
"nodes",
"."
] | def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list | [
"def",
"FlattenFilter",
"(",
"node",
")",
":",
"node_list",
"=",
"[",
"]",
"if",
"(",
"node",
".",
"attributes",
"and",
"node",
".",
"getAttribute",
"(",
"'Name'",
")",
"==",
"'_excluded_files'",
")",
":",
"# We don't add the \"_excluded_files\" filter.",
"return",
"[",
"]",
"for",
"current",
"in",
"node",
".",
"childNodes",
":",
"if",
"current",
".",
"nodeName",
"==",
"'Filter'",
":",
"node_list",
".",
"extend",
"(",
"FlattenFilter",
"(",
"current",
")",
")",
"else",
":",
"node_list",
".",
"append",
"(",
"current",
")",
"return",
"node_list"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/tools/pretty_vcproj.py#L95-L110 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/html.py | python | HtmlRenderingState.SetFgColour | (*args, **kwargs) | return _html.HtmlRenderingState_SetFgColour(*args, **kwargs) | SetFgColour(self, Colour c) | SetFgColour(self, Colour c) | [
"SetFgColour",
"(",
"self",
"Colour",
"c",
")"
] | def SetFgColour(*args, **kwargs):
"""SetFgColour(self, Colour c)"""
return _html.HtmlRenderingState_SetFgColour(*args, **kwargs) | [
"def",
"SetFgColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_html",
".",
"HtmlRenderingState_SetFgColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html.py#L517-L519 |
|
gwaldron/osgearth | 4c521857d59a69743e4a9cedba00afe570f984e8 | src/third_party/tinygltf/deps/cpplint.py | python | FileInfo.NoExtension | (self) | return '/'.join(self.Split()[0:2]) | File has no source file extension. | File has no source file extension. | [
"File",
"has",
"no",
"source",
"file",
"extension",
"."
] | def NoExtension(self):
"""File has no source file extension."""
return '/'.join(self.Split()[0:2]) | [
"def",
"NoExtension",
"(",
"self",
")",
":",
"return",
"'/'",
".",
"join",
"(",
"self",
".",
"Split",
"(",
")",
"[",
"0",
":",
"2",
"]",
")"
] | https://github.com/gwaldron/osgearth/blob/4c521857d59a69743e4a9cedba00afe570f984e8/src/third_party/tinygltf/deps/cpplint.py#L1055-L1057 |
|
yushroom/FishEngine | a4b9fb9b0a6dc202f7990e75f4b7d8d5163209d9 | Script/reflect/clang/cindex.py | python | SourceRange.__contains__ | (self, other) | return False | Useful to detect the Token/Lexer bug | Useful to detect the Token/Lexer bug | [
"Useful",
"to",
"detect",
"the",
"Token",
"/",
"Lexer",
"bug"
] | def __contains__(self, other):
"""Useful to detect the Token/Lexer bug"""
if not isinstance(other, SourceLocation):
return False
if other.file is None and self.start.file is None:
pass
elif ( self.start.file.name != other.file.name or
other.file.name != self.end.file.name):
# same file name
return False
# same file, in between lines
if self.start.line < other.line < self.end.line:
return True
elif self.start.line == other.line:
# same file first line
if self.start.column <= other.column:
return True
elif other.line == self.end.line:
# same file last line
if other.column <= self.end.column:
return True
return False | [
"def",
"__contains__",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"SourceLocation",
")",
":",
"return",
"False",
"if",
"other",
".",
"file",
"is",
"None",
"and",
"self",
".",
"start",
".",
"file",
"is",
"None",
":",
"pass",
"elif",
"(",
"self",
".",
"start",
".",
"file",
".",
"name",
"!=",
"other",
".",
"file",
".",
"name",
"or",
"other",
".",
"file",
".",
"name",
"!=",
"self",
".",
"end",
".",
"file",
".",
"name",
")",
":",
"# same file name",
"return",
"False",
"# same file, in between lines",
"if",
"self",
".",
"start",
".",
"line",
"<",
"other",
".",
"line",
"<",
"self",
".",
"end",
".",
"line",
":",
"return",
"True",
"elif",
"self",
".",
"start",
".",
"line",
"==",
"other",
".",
"line",
":",
"# same file first line",
"if",
"self",
".",
"start",
".",
"column",
"<=",
"other",
".",
"column",
":",
"return",
"True",
"elif",
"other",
".",
"line",
"==",
"self",
".",
"end",
".",
"line",
":",
"# same file last line",
"if",
"other",
".",
"column",
"<=",
"self",
".",
"end",
".",
"column",
":",
"return",
"True",
"return",
"False"
] | https://github.com/yushroom/FishEngine/blob/a4b9fb9b0a6dc202f7990e75f4b7d8d5163209d9/Script/reflect/clang/cindex.py#L269-L290 |
|
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/deep_memory_profiler/lib/bucket.py | python | BucketSet.load | (self, prefix) | Loads all related bucket files.
Args:
prefix: A prefix string for bucket file names. | Loads all related bucket files. | [
"Loads",
"all",
"related",
"bucket",
"files",
"."
] | def load(self, prefix):
"""Loads all related bucket files.
Args:
prefix: A prefix string for bucket file names.
"""
LOGGER.info('Loading bucket files.')
n = 0
skipped = 0
while True:
path = '%s.%04d.buckets' % (prefix, n)
if not os.path.exists(path) or not os.stat(path).st_size:
if skipped > 10:
break
n += 1
skipped += 1
continue
LOGGER.info(' %s' % path)
with open(path, 'r') as f:
self._load_file(f)
n += 1
skipped = 0 | [
"def",
"load",
"(",
"self",
",",
"prefix",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Loading bucket files.'",
")",
"n",
"=",
"0",
"skipped",
"=",
"0",
"while",
"True",
":",
"path",
"=",
"'%s.%04d.buckets'",
"%",
"(",
"prefix",
",",
"n",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
"or",
"not",
"os",
".",
"stat",
"(",
"path",
")",
".",
"st_size",
":",
"if",
"skipped",
">",
"10",
":",
"break",
"n",
"+=",
"1",
"skipped",
"+=",
"1",
"continue",
"LOGGER",
".",
"info",
"(",
"' %s'",
"%",
"path",
")",
"with",
"open",
"(",
"path",
",",
"'r'",
")",
"as",
"f",
":",
"self",
".",
"_load_file",
"(",
"f",
")",
"n",
"+=",
"1",
"skipped",
"=",
"0"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/deep_memory_profiler/lib/bucket.py#L120-L142 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/platform/self_check.py | python | preload_check | () | Raises an exception if the environment is not correctly configured.
Raises:
ImportError: If the check detects that the environment is not correctly
configured, and attempting to load the TensorFlow runtime will fail. | Raises an exception if the environment is not correctly configured. | [
"Raises",
"an",
"exception",
"if",
"the",
"environment",
"is",
"not",
"correctly",
"configured",
"."
] | def preload_check():
"""Raises an exception if the environment is not correctly configured.
Raises:
ImportError: If the check detects that the environment is not correctly
configured, and attempting to load the TensorFlow runtime will fail.
"""
if os.name == "nt":
# Attempt to load any DLLs that the Python extension depends on before
# we load the Python extension, so that we can raise an actionable error
# message if they are not found.
if MSVCP_DLL_NAMES in build_info.build_info:
missing = []
for dll_name in build_info.build_info[MSVCP_DLL_NAMES].split(","):
try:
ctypes.WinDLL(dll_name)
except OSError:
missing.append(dll_name)
if missing:
raise ImportError(
"Could not find the DLL(s) %r. TensorFlow requires that these DLLs "
"be installed in a directory that is named in your %%PATH%% "
"environment variable. You may install these DLLs by downloading "
'"Microsoft C++ Redistributable for Visual Studio 2015, 2017 and '
'2019" for your platform from this URL: '
"https://support.microsoft.com/help/2977003/the-latest-supported-visual-c-downloads"
% " or ".join(missing))
else:
# Load a library that performs CPU feature guard checking as a part of its
# static initialization. Doing this here as a preload check makes it more
# likely that we detect any CPU feature incompatibilities before we trigger
# them (which would typically result in SIGILL).
cpu_feature_guard_library = os.path.join(
os.path.dirname(__file__), "../../core/platform/_cpu_feature_guard.so")
ctypes.CDLL(cpu_feature_guard_library) | [
"def",
"preload_check",
"(",
")",
":",
"if",
"os",
".",
"name",
"==",
"\"nt\"",
":",
"# Attempt to load any DLLs that the Python extension depends on before",
"# we load the Python extension, so that we can raise an actionable error",
"# message if they are not found.",
"if",
"MSVCP_DLL_NAMES",
"in",
"build_info",
".",
"build_info",
":",
"missing",
"=",
"[",
"]",
"for",
"dll_name",
"in",
"build_info",
".",
"build_info",
"[",
"MSVCP_DLL_NAMES",
"]",
".",
"split",
"(",
"\",\"",
")",
":",
"try",
":",
"ctypes",
".",
"WinDLL",
"(",
"dll_name",
")",
"except",
"OSError",
":",
"missing",
".",
"append",
"(",
"dll_name",
")",
"if",
"missing",
":",
"raise",
"ImportError",
"(",
"\"Could not find the DLL(s) %r. TensorFlow requires that these DLLs \"",
"\"be installed in a directory that is named in your %%PATH%% \"",
"\"environment variable. You may install these DLLs by downloading \"",
"'\"Microsoft C++ Redistributable for Visual Studio 2015, 2017 and '",
"'2019\" for your platform from this URL: '",
"\"https://support.microsoft.com/help/2977003/the-latest-supported-visual-c-downloads\"",
"%",
"\" or \"",
".",
"join",
"(",
"missing",
")",
")",
"else",
":",
"# Load a library that performs CPU feature guard checking as a part of its",
"# static initialization. Doing this here as a preload check makes it more",
"# likely that we detect any CPU feature incompatibilities before we trigger",
"# them (which would typically result in SIGILL).",
"cpu_feature_guard_library",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"\"../../core/platform/_cpu_feature_guard.so\"",
")",
"ctypes",
".",
"CDLL",
"(",
"cpu_feature_guard_library",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/platform/self_check.py#L31-L65 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/feature_column/feature_column.py | python | indicator_column | (categorical_column) | return _IndicatorColumn(categorical_column) | Represents multi-hot representation of given categorical column.
Used to wrap any `categorical_column_*` (e.g., to feed to DNN). Use
`embedding_column` if the inputs are sparse.
```python
name = indicator_column(categorical_column_with_vocabulary_list(
'name', ['bob', 'george', 'wanda'])
columns = [name, ...]
features = tf.parse_example(..., features=make_parse_example_spec(columns))
dense_tensor = input_layer(features, columns)
dense_tensor == [[1, 0, 0]] # If "name" bytes_list is ["bob"]
dense_tensor == [[1, 0, 1]] # If "name" bytes_list is ["bob", "wanda"]
dense_tensor == [[2, 0, 0]] # If "name" bytes_list is ["bob", "bob"]
```
Args:
categorical_column: A `_CategoricalColumn` which is created by
`categorical_column_with_*` or `crossed_column` functions.
Returns:
An `_IndicatorColumn`. | Represents multi-hot representation of given categorical column. | [
"Represents",
"multi",
"-",
"hot",
"representation",
"of",
"given",
"categorical",
"column",
"."
] | def indicator_column(categorical_column):
"""Represents multi-hot representation of given categorical column.
Used to wrap any `categorical_column_*` (e.g., to feed to DNN). Use
`embedding_column` if the inputs are sparse.
```python
name = indicator_column(categorical_column_with_vocabulary_list(
'name', ['bob', 'george', 'wanda'])
columns = [name, ...]
features = tf.parse_example(..., features=make_parse_example_spec(columns))
dense_tensor = input_layer(features, columns)
dense_tensor == [[1, 0, 0]] # If "name" bytes_list is ["bob"]
dense_tensor == [[1, 0, 1]] # If "name" bytes_list is ["bob", "wanda"]
dense_tensor == [[2, 0, 0]] # If "name" bytes_list is ["bob", "bob"]
```
Args:
categorical_column: A `_CategoricalColumn` which is created by
`categorical_column_with_*` or `crossed_column` functions.
Returns:
An `_IndicatorColumn`.
"""
return _IndicatorColumn(categorical_column) | [
"def",
"indicator_column",
"(",
"categorical_column",
")",
":",
"return",
"_IndicatorColumn",
"(",
"categorical_column",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/feature_column/feature_column.py#L1061-L1086 |
|
tangzhenyu/Scene-Text-Understanding | 0f7ffc7aea5971a50cdc03d33d0a41075285948b | SynthText_Chinese/colorize3_poisson.py | python | FontColor.sample_normal | (self, col_mean, col_std) | return np.clip(col_sample, 0, 255).astype('uint8') | sample from a normal distribution centered around COL_MEAN
with standard deviation = COL_STD. | sample from a normal distribution centered around COL_MEAN
with standard deviation = COL_STD. | [
"sample",
"from",
"a",
"normal",
"distribution",
"centered",
"around",
"COL_MEAN",
"with",
"standard",
"deviation",
"=",
"COL_STD",
"."
] | def sample_normal(self, col_mean, col_std):
"""
sample from a normal distribution centered around COL_MEAN
with standard deviation = COL_STD.
"""
col_sample = col_mean + col_std * np.random.randn()
return np.clip(col_sample, 0, 255).astype('uint8') | [
"def",
"sample_normal",
"(",
"self",
",",
"col_mean",
",",
"col_std",
")",
":",
"col_sample",
"=",
"col_mean",
"+",
"col_std",
"*",
"np",
".",
"random",
".",
"randn",
"(",
")",
"return",
"np",
".",
"clip",
"(",
"col_sample",
",",
"0",
",",
"255",
")",
".",
"astype",
"(",
"'uint8'",
")"
] | https://github.com/tangzhenyu/Scene-Text-Understanding/blob/0f7ffc7aea5971a50cdc03d33d0a41075285948b/SynthText_Chinese/colorize3_poisson.py#L57-L63 |
|
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py | python | parserCtxt.parseEndTag | (self) | parse an end of tag [42] ETag ::= '</' Name S? '>' With
namespace [NS 9] ETag ::= '</' QName S? '>' | parse an end of tag [42] ETag ::= '</' Name S? '>' With
namespace [NS 9] ETag ::= '</' QName S? '>' | [
"parse",
"an",
"end",
"of",
"tag",
"[",
"42",
"]",
"ETag",
"::",
"=",
"<",
"/",
"Name",
"S?",
">",
"With",
"namespace",
"[",
"NS",
"9",
"]",
"ETag",
"::",
"=",
"<",
"/",
"QName",
"S?",
">"
] | def parseEndTag(self):
"""parse an end of tag [42] ETag ::= '</' Name S? '>' With
namespace [NS 9] ETag ::= '</' QName S? '>' """
libxml2mod.xmlParseEndTag(self._o) | [
"def",
"parseEndTag",
"(",
"self",
")",
":",
"libxml2mod",
".",
"xmlParseEndTag",
"(",
"self",
".",
"_o",
")"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L5277-L5280 |
||
dmlc/nnvm | dab5ce8ab6adbf4edd8bd2fa89f1a99f343b6e38 | python/nnvm/compiler/build_module.py | python | build | (graph, target=None, shape=None, dtype="float32",
params=None, target_host=None, layout=None) | return graph, libmod, params | Build graph into runtime library.
The build function will optimize the graph and do the compilation.
When params is provided, the compiler might split the graph to
pre-compute certain values, so the final execution graph can
be different from the original one.
Parameters
----------
graph : Graph
The graph to be used in lowering
target : str or :any:`tvm.target.Target`, optional
The build target
shape : dict of str to tuple, optional
The input shape to the graph
dtype : str or dict of str to str
The input types to the graph
params : dict of str to NDArray
Input parameters to the graph that do not change
during inference time. Used for pre-compute
folding optimization.
target_host : str or :any:`tvm.target.Target` optional
Host compilation target, if target is device.
When TVM compiles device specific program such as CUDA,
we also need host(CPU) side code to interact with the driver
setup the dimensions and parameters correctly.
target_host is used to specify the host side codegen target.
By default, llvm is used if it is enabled,
otherwise a stackvm intepreter is used.
layout : dict of str to str or str optional
The input layout
Returns
-------
graph : Graph
The final execution graph.
libmod : tvm.Module
The module that comes with the execution graph
params : dict of str to NDArray
The updated parameters of graph if params is passed.
This can be different from the params passed in. | Build graph into runtime library. | [
"Build",
"graph",
"into",
"runtime",
"library",
"."
] | def build(graph, target=None, shape=None, dtype="float32",
params=None, target_host=None, layout=None):
"""Build graph into runtime library.
The build function will optimize the graph and do the compilation.
When params is provided, the compiler might split the graph to
pre-compute certain values, so the final execution graph can
be different from the original one.
Parameters
----------
graph : Graph
The graph to be used in lowering
target : str or :any:`tvm.target.Target`, optional
The build target
shape : dict of str to tuple, optional
The input shape to the graph
dtype : str or dict of str to str
The input types to the graph
params : dict of str to NDArray
Input parameters to the graph that do not change
during inference time. Used for pre-compute
folding optimization.
target_host : str or :any:`tvm.target.Target` optional
Host compilation target, if target is device.
When TVM compiles device specific program such as CUDA,
we also need host(CPU) side code to interact with the driver
setup the dimensions and parameters correctly.
target_host is used to specify the host side codegen target.
By default, llvm is used if it is enabled,
otherwise a stackvm intepreter is used.
layout : dict of str to str or str optional
The input layout
Returns
-------
graph : Graph
The final execution graph.
libmod : tvm.Module
The module that comes with the execution graph
params : dict of str to NDArray
The updated parameters of graph if params is passed.
This can be different from the params passed in.
"""
target = target if target else tvm.target.current_target()
if target is None:
raise ValueError("Target is not set in env or passed as argument.")
target = tvm.target.create(target)
shape = shape if shape else {}
if not isinstance(shape, dict):
raise TypeError("require shape to be dict")
for value in shape.values():
if not all(isinstance(x, int) for x in value):
raise TypeError("shape value must be int iterator")
cfg = BuildConfig.current
graph = graph if isinstance(graph, _graph.Graph) else _graph.create(graph)
shape, dtype = _update_shape_dtype(shape, dtype, params)
# correct layout if necessary
layout = layout if layout else {}
graph = graph_attr.set_layout_inputs(graph, layout)
graph = graph.apply("CorrectLayout")
index = graph.index
layouts = graph.json_attr("layout")
layout = {x : layouts[index.entry_id(x)] for x in index.input_names}
# Initial pass do shape type inference
ishape, _ = graph_util.infer_shape(graph, **shape)
shape.update(zip(graph.index.input_names, ishape))
if not isinstance(dtype, str):
idtype, _ = graph_util.infer_dtype(graph, **dtype)
dtype.update(zip(graph.index.input_names, idtype))
# Initialize all variables specified in _all_var_init
init_var = {}
if _all_var_init:
init_var = initialize_variables(shape, dtype)
# Apply optimization
with target:
graph = optimize(graph, shape, dtype, layout)
# Precompute prune
if params and cfg.pass_enabled("PrecomputePrune"):
graph, params = precompute_prune(graph, params)
shape, dtype = _update_shape_dtype(shape, dtype, params)
# Operator Fusion and generation
graph = graph_attr.set_shape_inputs(graph, shape)
graph = graph.apply("InferShape")
graph = graph_attr.set_dtype_inputs(graph, dtype)
graph._set_json_attr("target", str(target), "str")
if target_host is not None:
graph._set_json_attr("target_host", str(target_host), "str")
if cfg.pass_enabled("OpFusion"):
graph._set_json_attr("opt_level", 1, "int")
else:
graph._set_json_attr("opt_level", 0, "int")
graph = graph.apply("InferShape").apply("InferType")
with target:
graph = graph.apply("GraphFusePartition").apply("GraphFuseCompile")
libmod = graph_attr._move_out_module(graph, "module")
# Write variable initial values into params
if init_var:
if params is None:
params = {}
params.update(init_var)
return graph, libmod, params | [
"def",
"build",
"(",
"graph",
",",
"target",
"=",
"None",
",",
"shape",
"=",
"None",
",",
"dtype",
"=",
"\"float32\"",
",",
"params",
"=",
"None",
",",
"target_host",
"=",
"None",
",",
"layout",
"=",
"None",
")",
":",
"target",
"=",
"target",
"if",
"target",
"else",
"tvm",
".",
"target",
".",
"current_target",
"(",
")",
"if",
"target",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Target is not set in env or passed as argument.\"",
")",
"target",
"=",
"tvm",
".",
"target",
".",
"create",
"(",
"target",
")",
"shape",
"=",
"shape",
"if",
"shape",
"else",
"{",
"}",
"if",
"not",
"isinstance",
"(",
"shape",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"\"require shape to be dict\"",
")",
"for",
"value",
"in",
"shape",
".",
"values",
"(",
")",
":",
"if",
"not",
"all",
"(",
"isinstance",
"(",
"x",
",",
"int",
")",
"for",
"x",
"in",
"value",
")",
":",
"raise",
"TypeError",
"(",
"\"shape value must be int iterator\"",
")",
"cfg",
"=",
"BuildConfig",
".",
"current",
"graph",
"=",
"graph",
"if",
"isinstance",
"(",
"graph",
",",
"_graph",
".",
"Graph",
")",
"else",
"_graph",
".",
"create",
"(",
"graph",
")",
"shape",
",",
"dtype",
"=",
"_update_shape_dtype",
"(",
"shape",
",",
"dtype",
",",
"params",
")",
"# correct layout if necessary",
"layout",
"=",
"layout",
"if",
"layout",
"else",
"{",
"}",
"graph",
"=",
"graph_attr",
".",
"set_layout_inputs",
"(",
"graph",
",",
"layout",
")",
"graph",
"=",
"graph",
".",
"apply",
"(",
"\"CorrectLayout\"",
")",
"index",
"=",
"graph",
".",
"index",
"layouts",
"=",
"graph",
".",
"json_attr",
"(",
"\"layout\"",
")",
"layout",
"=",
"{",
"x",
":",
"layouts",
"[",
"index",
".",
"entry_id",
"(",
"x",
")",
"]",
"for",
"x",
"in",
"index",
".",
"input_names",
"}",
"# Initial pass do shape type inference",
"ishape",
",",
"_",
"=",
"graph_util",
".",
"infer_shape",
"(",
"graph",
",",
"*",
"*",
"shape",
")",
"shape",
".",
"update",
"(",
"zip",
"(",
"graph",
".",
"index",
".",
"input_names",
",",
"ishape",
")",
")",
"if",
"not",
"isinstance",
"(",
"dtype",
",",
"str",
")",
":",
"idtype",
",",
"_",
"=",
"graph_util",
".",
"infer_dtype",
"(",
"graph",
",",
"*",
"*",
"dtype",
")",
"dtype",
".",
"update",
"(",
"zip",
"(",
"graph",
".",
"index",
".",
"input_names",
",",
"idtype",
")",
")",
"# Initialize all variables specified in _all_var_init",
"init_var",
"=",
"{",
"}",
"if",
"_all_var_init",
":",
"init_var",
"=",
"initialize_variables",
"(",
"shape",
",",
"dtype",
")",
"# Apply optimization",
"with",
"target",
":",
"graph",
"=",
"optimize",
"(",
"graph",
",",
"shape",
",",
"dtype",
",",
"layout",
")",
"# Precompute prune",
"if",
"params",
"and",
"cfg",
".",
"pass_enabled",
"(",
"\"PrecomputePrune\"",
")",
":",
"graph",
",",
"params",
"=",
"precompute_prune",
"(",
"graph",
",",
"params",
")",
"shape",
",",
"dtype",
"=",
"_update_shape_dtype",
"(",
"shape",
",",
"dtype",
",",
"params",
")",
"# Operator Fusion and generation",
"graph",
"=",
"graph_attr",
".",
"set_shape_inputs",
"(",
"graph",
",",
"shape",
")",
"graph",
"=",
"graph",
".",
"apply",
"(",
"\"InferShape\"",
")",
"graph",
"=",
"graph_attr",
".",
"set_dtype_inputs",
"(",
"graph",
",",
"dtype",
")",
"graph",
".",
"_set_json_attr",
"(",
"\"target\"",
",",
"str",
"(",
"target",
")",
",",
"\"str\"",
")",
"if",
"target_host",
"is",
"not",
"None",
":",
"graph",
".",
"_set_json_attr",
"(",
"\"target_host\"",
",",
"str",
"(",
"target_host",
")",
",",
"\"str\"",
")",
"if",
"cfg",
".",
"pass_enabled",
"(",
"\"OpFusion\"",
")",
":",
"graph",
".",
"_set_json_attr",
"(",
"\"opt_level\"",
",",
"1",
",",
"\"int\"",
")",
"else",
":",
"graph",
".",
"_set_json_attr",
"(",
"\"opt_level\"",
",",
"0",
",",
"\"int\"",
")",
"graph",
"=",
"graph",
".",
"apply",
"(",
"\"InferShape\"",
")",
".",
"apply",
"(",
"\"InferType\"",
")",
"with",
"target",
":",
"graph",
"=",
"graph",
".",
"apply",
"(",
"\"GraphFusePartition\"",
")",
".",
"apply",
"(",
"\"GraphFuseCompile\"",
")",
"libmod",
"=",
"graph_attr",
".",
"_move_out_module",
"(",
"graph",
",",
"\"module\"",
")",
"# Write variable initial values into params",
"if",
"init_var",
":",
"if",
"params",
"is",
"None",
":",
"params",
"=",
"{",
"}",
"params",
".",
"update",
"(",
"init_var",
")",
"return",
"graph",
",",
"libmod",
",",
"params"
] | https://github.com/dmlc/nnvm/blob/dab5ce8ab6adbf4edd8bd2fa89f1a99f343b6e38/python/nnvm/compiler/build_module.py#L183-L297 |
|
dfm/celerite | 62c8ce6f5816c655ad2a2d1b3eaaaf9fc7ca7908 | celerite/celerite.py | python | GP.log_likelihood | (self, y, _const=math.log(2.0 * math.pi), quiet=False) | return loglike | Compute the marginalized likelihood of the GP model
The factorized matrix from the previous call to :func:`GP.compute` is
used so ``compute`` must be called first.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
quiet (bool): If true, return ``-numpy.inf`` for non-positive
definite matrices instead of throwing an error.
Returns:
float: The marginalized likelihood of the GP model.
Raises:
ValueError: For mismatched dimensions.
solver.LinAlgError: For non-positive definite matrices. | Compute the marginalized likelihood of the GP model | [
"Compute",
"the",
"marginalized",
"likelihood",
"of",
"the",
"GP",
"model"
] | def log_likelihood(self, y, _const=math.log(2.0 * math.pi), quiet=False):
"""
Compute the marginalized likelihood of the GP model
The factorized matrix from the previous call to :func:`GP.compute` is
used so ``compute`` must be called first.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
quiet (bool): If true, return ``-numpy.inf`` for non-positive
definite matrices instead of throwing an error.
Returns:
float: The marginalized likelihood of the GP model.
Raises:
ValueError: For mismatched dimensions.
solver.LinAlgError: For non-positive definite matrices.
"""
y = self._process_input(y)
resid = y - self.mean.get_value(self._t)
try:
self._recompute()
except solver.LinAlgError:
if quiet:
return -np.inf
raise
if len(y.shape) > 1:
raise ValueError("dimension mismatch")
logdet = self.solver.log_determinant()
if not np.isfinite(logdet):
return -np.inf
loglike = -0.5 * (
self.solver.dot_solve(resid) + logdet + len(y) * _const
)
if not np.isfinite(loglike):
return -np.inf
return loglike | [
"def",
"log_likelihood",
"(",
"self",
",",
"y",
",",
"_const",
"=",
"math",
".",
"log",
"(",
"2.0",
"*",
"math",
".",
"pi",
")",
",",
"quiet",
"=",
"False",
")",
":",
"y",
"=",
"self",
".",
"_process_input",
"(",
"y",
")",
"resid",
"=",
"y",
"-",
"self",
".",
"mean",
".",
"get_value",
"(",
"self",
".",
"_t",
")",
"try",
":",
"self",
".",
"_recompute",
"(",
")",
"except",
"solver",
".",
"LinAlgError",
":",
"if",
"quiet",
":",
"return",
"-",
"np",
".",
"inf",
"raise",
"if",
"len",
"(",
"y",
".",
"shape",
")",
">",
"1",
":",
"raise",
"ValueError",
"(",
"\"dimension mismatch\"",
")",
"logdet",
"=",
"self",
".",
"solver",
".",
"log_determinant",
"(",
")",
"if",
"not",
"np",
".",
"isfinite",
"(",
"logdet",
")",
":",
"return",
"-",
"np",
".",
"inf",
"loglike",
"=",
"-",
"0.5",
"*",
"(",
"self",
".",
"solver",
".",
"dot_solve",
"(",
"resid",
")",
"+",
"logdet",
"+",
"len",
"(",
"y",
")",
"*",
"_const",
")",
"if",
"not",
"np",
".",
"isfinite",
"(",
"loglike",
")",
":",
"return",
"-",
"np",
".",
"inf",
"return",
"loglike"
] | https://github.com/dfm/celerite/blob/62c8ce6f5816c655ad2a2d1b3eaaaf9fc7ca7908/celerite/celerite.py#L180-L219 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/typing.py | python | _check_generic | (cls, parameters, elen) | Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch. | Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch. | [
"Check",
"correct",
"count",
"for",
"parameters",
"of",
"a",
"generic",
"cls",
"(",
"internal",
"helper",
")",
".",
"This",
"gives",
"a",
"nice",
"error",
"message",
"in",
"case",
"of",
"count",
"mismatch",
"."
] | def _check_generic(cls, parameters, elen):
"""Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch.
"""
if not elen:
raise TypeError(f"{cls} is not a generic class")
alen = len(parameters)
if alen != elen:
raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
f" actual {alen}, expected {elen}") | [
"def",
"_check_generic",
"(",
"cls",
",",
"parameters",
",",
"elen",
")",
":",
"if",
"not",
"elen",
":",
"raise",
"TypeError",
"(",
"f\"{cls} is not a generic class\"",
")",
"alen",
"=",
"len",
"(",
"parameters",
")",
"if",
"alen",
"!=",
"elen",
":",
"raise",
"TypeError",
"(",
"f\"Too {'many' if alen > elen else 'few'} parameters for {cls};\"",
"f\" actual {alen}, expected {elen}\"",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/typing.py#L206-L215 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py | python | _reportBeamStopMask | (reportWS, maskWS) | return _reportMasking(reportWS, maskWS, 'BeamStopMask') | Return masked spectrum numbers and add default mask information to a report workspace. | Return masked spectrum numbers and add default mask information to a report workspace. | [
"Return",
"masked",
"spectrum",
"numbers",
"and",
"add",
"default",
"mask",
"information",
"to",
"a",
"report",
"workspace",
"."
] | def _reportBeamStopMask(reportWS, maskWS):
"""Return masked spectrum numbers and add default mask information to a report workspace."""
return _reportMasking(reportWS, maskWS, 'BeamStopMask') | [
"def",
"_reportBeamStopMask",
"(",
"reportWS",
",",
"maskWS",
")",
":",
"return",
"_reportMasking",
"(",
"reportWS",
",",
"maskWS",
",",
"'BeamStopMask'",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py#L240-L242 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.