body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
c0b6d91f34ed130c5f79f687a511faa66e2c2e40ee30ad52e7adcaf397c100e7 | @app.task(bind=True, name='celery_tasks.tasks.analyze_package')
def analyze_package(self, fuzzer_image: str, volume_path: str, package: str):
'\n\n :param self:\n :param package:\n :param fuzzer_image:\n :param volume_path:\n :param seeds_path:\n :param fuzz_duration:\n :param use_asan:\n :param exec_timeout:\n :param qemu:\n :param config_dict:\n :return:\n '
volumes_dict = {os.path.abspath(os.path.join(volume_path, 'fuzz_data')): {'bind': '/results', 'mode': 'rw'}, os.path.abspath(os.path.join(volume_path, 'build_data')): {'bind': '/build', 'mode': 'rw'}}
logging.info('Now analyzing crashes for {0}'.format(package))
analyze_command_params = ['/inputinferer/configfinder/analyze_wrapper.py', '-p', package, '-v', '/results/', 'package']
container = docker_client.containers.run(image=fuzzer_image, remove=False, privileged=True, entrypoint='python', volumes=volumes_dict, command=analyze_command_params, detach=True, stream=True, stdout=True, stderr=True, name=((package + '_analyze_') + str(uuid.uuid4())[:4]))
container_output = ''
for line in container.logs(stream=True):
logging.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait()
if (status['StatusCode'] != 0):
logging.info('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True | :param self:
:param package:
:param fuzzer_image:
:param volume_path:
:param seeds_path:
:param fuzz_duration:
:param use_asan:
:param exec_timeout:
:param qemu:
:param config_dict:
:return: | fexm/celery_tasks/tasks.py | analyze_package | fgsect/fexm | 105 | python | @app.task(bind=True, name='celery_tasks.tasks.analyze_package')
def analyze_package(self, fuzzer_image: str, volume_path: str, package: str):
'\n\n :param self:\n :param package:\n :param fuzzer_image:\n :param volume_path:\n :param seeds_path:\n :param fuzz_duration:\n :param use_asan:\n :param exec_timeout:\n :param qemu:\n :param config_dict:\n :return:\n '
volumes_dict = {os.path.abspath(os.path.join(volume_path, 'fuzz_data')): {'bind': '/results', 'mode': 'rw'}, os.path.abspath(os.path.join(volume_path, 'build_data')): {'bind': '/build', 'mode': 'rw'}}
logging.info('Now analyzing crashes for {0}'.format(package))
analyze_command_params = ['/inputinferer/configfinder/analyze_wrapper.py', '-p', package, '-v', '/results/', 'package']
container = docker_client.containers.run(image=fuzzer_image, remove=False, privileged=True, entrypoint='python', volumes=volumes_dict, command=analyze_command_params, detach=True, stream=True, stdout=True, stderr=True, name=((package + '_analyze_') + str(uuid.uuid4())[:4]))
container_output =
for line in container.logs(stream=True):
logging.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait()
if (status['StatusCode'] != 0):
logging.info('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True | @app.task(bind=True, name='celery_tasks.tasks.analyze_package')
def analyze_package(self, fuzzer_image: str, volume_path: str, package: str):
'\n\n :param self:\n :param package:\n :param fuzzer_image:\n :param volume_path:\n :param seeds_path:\n :param fuzz_duration:\n :param use_asan:\n :param exec_timeout:\n :param qemu:\n :param config_dict:\n :return:\n '
volumes_dict = {os.path.abspath(os.path.join(volume_path, 'fuzz_data')): {'bind': '/results', 'mode': 'rw'}, os.path.abspath(os.path.join(volume_path, 'build_data')): {'bind': '/build', 'mode': 'rw'}}
logging.info('Now analyzing crashes for {0}'.format(package))
analyze_command_params = ['/inputinferer/configfinder/analyze_wrapper.py', '-p', package, '-v', '/results/', 'package']
container = docker_client.containers.run(image=fuzzer_image, remove=False, privileged=True, entrypoint='python', volumes=volumes_dict, command=analyze_command_params, detach=True, stream=True, stdout=True, stderr=True, name=((package + '_analyze_') + str(uuid.uuid4())[:4]))
container_output =
for line in container.logs(stream=True):
logging.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait()
if (status['StatusCode'] != 0):
logging.info('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True<|docstring|>:param self:
:param package:
:param fuzzer_image:
:param volume_path:
:param seeds_path:
:param fuzz_duration:
:param use_asan:
:param exec_timeout:
:param qemu:
:param config_dict:
:return:<|endoftext|> |
684c813e51c70187ba0a93c91ac7e248ac44bbb594406ccb0dec7fe8fb8f14ef | @app.task(bind=True, name='celery_tasks.tasks.run_asan_eval')
def run_asan_eval(self, package: str, fuzzer_image: str, volume_path: str):
'\n :param self:\n :param docker_name:\n :param package:\n :param docker_args:\n :param fuzzer_image:\n :param build_file:\n :param inference_command_args:\n :param timeout_per_package:\n :type inference_command_args: List\n :return:\n '
print('Got eval task for package {0}'.format(package))
logger.info('Got eval task for package {0}'.format(package))
volumes_dict = {volume_path: {'bind': '/results', 'mode': 'rw'}}
additional_env_variables = {'AFL_USE_ASAN': '1'}
container = docker_client.containers.run(image=fuzzer_image, remove=True, privileged=True, entrypoint='python', volumes=volumes_dict, command=['/inputinferer/configfinder/asan_crash_analyzer.py', '-p', package, '-v', '/results'], detach=True, stream=True, stdout=True, stderr=True, name=((package + '_fuzz_') + str(uuid.uuid4())[:4]), environment=additional_env_variables)
container_output = ''
for line in container.logs(stream=True):
logger.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait(timeout=600)
if (status['StatusCode'] != 0):
logger.error('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True | :param self:
:param docker_name:
:param package:
:param docker_args:
:param fuzzer_image:
:param build_file:
:param inference_command_args:
:param timeout_per_package:
:type inference_command_args: List
:return: | fexm/celery_tasks/tasks.py | run_asan_eval | fgsect/fexm | 105 | python | @app.task(bind=True, name='celery_tasks.tasks.run_asan_eval')
def run_asan_eval(self, package: str, fuzzer_image: str, volume_path: str):
'\n :param self:\n :param docker_name:\n :param package:\n :param docker_args:\n :param fuzzer_image:\n :param build_file:\n :param inference_command_args:\n :param timeout_per_package:\n :type inference_command_args: List\n :return:\n '
print('Got eval task for package {0}'.format(package))
logger.info('Got eval task for package {0}'.format(package))
volumes_dict = {volume_path: {'bind': '/results', 'mode': 'rw'}}
additional_env_variables = {'AFL_USE_ASAN': '1'}
container = docker_client.containers.run(image=fuzzer_image, remove=True, privileged=True, entrypoint='python', volumes=volumes_dict, command=['/inputinferer/configfinder/asan_crash_analyzer.py', '-p', package, '-v', '/results'], detach=True, stream=True, stdout=True, stderr=True, name=((package + '_fuzz_') + str(uuid.uuid4())[:4]), environment=additional_env_variables)
container_output =
for line in container.logs(stream=True):
logger.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait(timeout=600)
if (status['StatusCode'] != 0):
logger.error('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True | @app.task(bind=True, name='celery_tasks.tasks.run_asan_eval')
def run_asan_eval(self, package: str, fuzzer_image: str, volume_path: str):
'\n :param self:\n :param docker_name:\n :param package:\n :param docker_args:\n :param fuzzer_image:\n :param build_file:\n :param inference_command_args:\n :param timeout_per_package:\n :type inference_command_args: List\n :return:\n '
print('Got eval task for package {0}'.format(package))
logger.info('Got eval task for package {0}'.format(package))
volumes_dict = {volume_path: {'bind': '/results', 'mode': 'rw'}}
additional_env_variables = {'AFL_USE_ASAN': '1'}
container = docker_client.containers.run(image=fuzzer_image, remove=True, privileged=True, entrypoint='python', volumes=volumes_dict, command=['/inputinferer/configfinder/asan_crash_analyzer.py', '-p', package, '-v', '/results'], detach=True, stream=True, stdout=True, stderr=True, name=((package + '_fuzz_') + str(uuid.uuid4())[:4]), environment=additional_env_variables)
container_output =
for line in container.logs(stream=True):
logger.info(line.decode('utf-8').strip())
container_output += line.decode('utf-8')
status = container.wait(timeout=600)
if (status['StatusCode'] != 0):
logger.error('Error while running docker command. Docker Output:\n {0}. Return value {1}'.format(container_output, status['StatusCode']))
return False
return True<|docstring|>:param self:
:param docker_name:
:param package:
:param docker_args:
:param fuzzer_image:
:param build_file:
:param inference_command_args:
:param timeout_per_package:
:type inference_command_args: List
:return:<|endoftext|> |
a82b02b8eb7fb897069df5e692446d53250fe8638a4cbbad05430859a4521398 | def _eager_reshape(tensor, shape, ctx):
'Eager-only version of Reshape op; requires tensor is an eager Tensor.'
attr_t = tensor.dtype.as_datatype_enum
(attr_tshape, (shape,)) = execute.args_to_matching_eager([shape], ctx, dtypes.int32)
attr_tshape = attr_tshape.as_datatype_enum
inputs_flat = [tensor, shape]
attrs = ('T', attr_t, 'Tshape', attr_tshape)
(result,) = execute.execute(b'Reshape', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result | Eager-only version of Reshape op; requires tensor is an eager Tensor. | tensorflow/python/framework/constant_op.py | _eager_reshape | somashekar10/tensorflow | 522 | python | def _eager_reshape(tensor, shape, ctx):
attr_t = tensor.dtype.as_datatype_enum
(attr_tshape, (shape,)) = execute.args_to_matching_eager([shape], ctx, dtypes.int32)
attr_tshape = attr_tshape.as_datatype_enum
inputs_flat = [tensor, shape]
attrs = ('T', attr_t, 'Tshape', attr_tshape)
(result,) = execute.execute(b'Reshape', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result | def _eager_reshape(tensor, shape, ctx):
attr_t = tensor.dtype.as_datatype_enum
(attr_tshape, (shape,)) = execute.args_to_matching_eager([shape], ctx, dtypes.int32)
attr_tshape = attr_tshape.as_datatype_enum
inputs_flat = [tensor, shape]
attrs = ('T', attr_t, 'Tshape', attr_tshape)
(result,) = execute.execute(b'Reshape', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result<|docstring|>Eager-only version of Reshape op; requires tensor is an eager Tensor.<|endoftext|> |
8bc1f8c3165c3a3d0aa1e5f18d9d1b1a10418c8ad3db1d9af1910fd01be3fa32 | def _eager_fill(dims, value, ctx):
'Eager-only version of Fill op; requires value is an eager Tensor.'
attr_t = value.dtype.as_datatype_enum
dims = convert_to_eager_tensor(dims, ctx, dtypes.int32)
inputs_flat = [dims, value]
attrs = ('T', attr_t)
(result,) = execute.execute(b'Fill', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result | Eager-only version of Fill op; requires value is an eager Tensor. | tensorflow/python/framework/constant_op.py | _eager_fill | somashekar10/tensorflow | 522 | python | def _eager_fill(dims, value, ctx):
attr_t = value.dtype.as_datatype_enum
dims = convert_to_eager_tensor(dims, ctx, dtypes.int32)
inputs_flat = [dims, value]
attrs = ('T', attr_t)
(result,) = execute.execute(b'Fill', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result | def _eager_fill(dims, value, ctx):
attr_t = value.dtype.as_datatype_enum
dims = convert_to_eager_tensor(dims, ctx, dtypes.int32)
inputs_flat = [dims, value]
attrs = ('T', attr_t)
(result,) = execute.execute(b'Fill', 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result<|docstring|>Eager-only version of Fill op; requires value is an eager Tensor.<|endoftext|> |
df080b8b75bde2411b82edb40de099ba5ec5665909b49f954af356e61e572994 | def _eager_identity(tensor, ctx):
'Eager-only version of Identity op; requires tensor is an eager Tensor.'
attrs = ('T', tensor.dtype.as_datatype_enum)
(result,) = execute.execute(b'Identity', 1, inputs=[tensor], attrs=attrs, ctx=ctx)
return result | Eager-only version of Identity op; requires tensor is an eager Tensor. | tensorflow/python/framework/constant_op.py | _eager_identity | somashekar10/tensorflow | 522 | python | def _eager_identity(tensor, ctx):
attrs = ('T', tensor.dtype.as_datatype_enum)
(result,) = execute.execute(b'Identity', 1, inputs=[tensor], attrs=attrs, ctx=ctx)
return result | def _eager_identity(tensor, ctx):
attrs = ('T', tensor.dtype.as_datatype_enum)
(result,) = execute.execute(b'Identity', 1, inputs=[tensor], attrs=attrs, ctx=ctx)
return result<|docstring|>Eager-only version of Identity op; requires tensor is an eager Tensor.<|endoftext|> |
7436719cdfcf84dcac6d12cc152b0438e7482b8d92027d91884655a1b435eefd | def convert_to_eager_tensor(value, ctx, dtype=None):
'Converts the given `value` to an `EagerTensor`.\n\n Note that this function could return cached copies of created constants for\n performance reasons.\n\n Args:\n value: value to convert to EagerTensor.\n ctx: value of context.context().\n dtype: optional desired dtype of the converted EagerTensor.\n\n Returns:\n EagerTensor created from value.\n\n Raises:\n TypeError: if `dtype` is not compatible with the type of t.\n '
if isinstance(value, ops.EagerTensor):
if ((dtype is not None) and (value.dtype != dtype)):
raise TypeError(('Expected tensor with type %r not %r' % (dtype, value.dtype)))
return value
if (dtype is not None):
try:
dtype = dtype.as_datatype_enum
except AttributeError:
dtype = dtypes.as_dtype(dtype).as_datatype_enum
device = ctx.device_name
handle = ctx._handle
if isinstance(value, ((float,) + six.integer_types)):
cache_key = (device, value, dtype, type(value))
scalar_cache = ctx.scalar_cache()
tensor = scalar_cache.get(cache_key, None)
if (tensor is not None):
return tensor
t = ops.EagerTensor(value, context=handle, device=device, dtype=dtype)
scalar_cache[cache_key] = t
return t
else:
return ops.EagerTensor(value, context=handle, device=device, dtype=dtype) | Converts the given `value` to an `EagerTensor`.
Note that this function could return cached copies of created constants for
performance reasons.
Args:
value: value to convert to EagerTensor.
ctx: value of context.context().
dtype: optional desired dtype of the converted EagerTensor.
Returns:
EagerTensor created from value.
Raises:
TypeError: if `dtype` is not compatible with the type of t. | tensorflow/python/framework/constant_op.py | convert_to_eager_tensor | somashekar10/tensorflow | 522 | python | def convert_to_eager_tensor(value, ctx, dtype=None):
'Converts the given `value` to an `EagerTensor`.\n\n Note that this function could return cached copies of created constants for\n performance reasons.\n\n Args:\n value: value to convert to EagerTensor.\n ctx: value of context.context().\n dtype: optional desired dtype of the converted EagerTensor.\n\n Returns:\n EagerTensor created from value.\n\n Raises:\n TypeError: if `dtype` is not compatible with the type of t.\n '
if isinstance(value, ops.EagerTensor):
if ((dtype is not None) and (value.dtype != dtype)):
raise TypeError(('Expected tensor with type %r not %r' % (dtype, value.dtype)))
return value
if (dtype is not None):
try:
dtype = dtype.as_datatype_enum
except AttributeError:
dtype = dtypes.as_dtype(dtype).as_datatype_enum
device = ctx.device_name
handle = ctx._handle
if isinstance(value, ((float,) + six.integer_types)):
cache_key = (device, value, dtype, type(value))
scalar_cache = ctx.scalar_cache()
tensor = scalar_cache.get(cache_key, None)
if (tensor is not None):
return tensor
t = ops.EagerTensor(value, context=handle, device=device, dtype=dtype)
scalar_cache[cache_key] = t
return t
else:
return ops.EagerTensor(value, context=handle, device=device, dtype=dtype) | def convert_to_eager_tensor(value, ctx, dtype=None):
'Converts the given `value` to an `EagerTensor`.\n\n Note that this function could return cached copies of created constants for\n performance reasons.\n\n Args:\n value: value to convert to EagerTensor.\n ctx: value of context.context().\n dtype: optional desired dtype of the converted EagerTensor.\n\n Returns:\n EagerTensor created from value.\n\n Raises:\n TypeError: if `dtype` is not compatible with the type of t.\n '
if isinstance(value, ops.EagerTensor):
if ((dtype is not None) and (value.dtype != dtype)):
raise TypeError(('Expected tensor with type %r not %r' % (dtype, value.dtype)))
return value
if (dtype is not None):
try:
dtype = dtype.as_datatype_enum
except AttributeError:
dtype = dtypes.as_dtype(dtype).as_datatype_enum
device = ctx.device_name
handle = ctx._handle
if isinstance(value, ((float,) + six.integer_types)):
cache_key = (device, value, dtype, type(value))
scalar_cache = ctx.scalar_cache()
tensor = scalar_cache.get(cache_key, None)
if (tensor is not None):
return tensor
t = ops.EagerTensor(value, context=handle, device=device, dtype=dtype)
scalar_cache[cache_key] = t
return t
else:
return ops.EagerTensor(value, context=handle, device=device, dtype=dtype)<|docstring|>Converts the given `value` to an `EagerTensor`.
Note that this function could return cached copies of created constants for
performance reasons.
Args:
value: value to convert to EagerTensor.
ctx: value of context.context().
dtype: optional desired dtype of the converted EagerTensor.
Returns:
EagerTensor created from value.
Raises:
TypeError: if `dtype` is not compatible with the type of t.<|endoftext|> |
382708e8670acb14343c938af555d9990c043b33a872e2fc51a2688ad68c376b | def constant(value, dtype=None, shape=None, name='Const', verify_shape=False):
'Creates a constant tensor.\n\n The resulting tensor is populated with values of type `dtype`, as\n specified by arguments `value` and (optionally) `shape` (see examples\n below).\n\n The argument `value` can be a constant value, or a list of values of type\n `dtype`. If `value` is a list, then the length of the list must be less\n than or equal to the number of elements implied by the `shape` argument (if\n specified). In the case where the list length is less than the number of\n elements specified by `shape`, the last element in the list will be used\n to fill the remaining entries.\n\n The argument `shape` is optional. If present, it specifies the dimensions of\n the resulting tensor. If not present, the shape of `value` is used.\n\n If the argument `dtype` is not specified, then the type is inferred from\n the type of `value`.\n\n For example:\n\n ```python\n # Constant 1-D Tensor populated with value list.\n tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]\n\n # Constant 2-D tensor populated with scalar value -1.\n tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]\n [-1. -1. -1.]]\n ```\n\n Args:\n value: A constant value (or list) of output type `dtype`.\n\n dtype: The type of the elements of the resulting tensor.\n\n shape: Optional dimensions of resulting tensor.\n\n name: Optional name for the tensor.\n\n verify_shape: Boolean that enables verification of a shape of values.\n\n Returns:\n A Constant Tensor.\n\n Raises:\n TypeError: if shape is incorrectly specified or unsupported.\n '
ctx = context.context()
if (not ctx.in_graph_mode()):
t = convert_to_eager_tensor(value, ctx, dtype)
if (shape is None):
return t
shape = tensor_shape.as_shape(shape)
if (shape == t.shape):
return t
if verify_shape:
raise TypeError(("Expected Tensor's shape: %s, got %s." % (tuple(shape), tuple(t.shape))))
num_t = t.shape.num_elements()
if (num_t == shape.num_elements()):
return _eager_reshape(t, shape.as_list(), ctx)
if (num_t == 1):
if (t.dtype == dtypes.bool):
with ops.device('/device:CPU:0'):
x = _eager_fill(shape.as_list(), t.cpu(), ctx)
return _eager_identity(x, ctx)
else:
return _eager_fill(shape.as_list(), t, ctx)
raise TypeError(('Eager execution of tf.constant with unsupported shape (value has %d elements, shape is %s with %d elements).' % (num_t, shape, shape.num_elements())))
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape, verify_shape=verify_shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op('Const', [], [dtype_value.type], attrs={'value': tensor_value, 'dtype': dtype_value}, name=name).outputs[0]
return const_tensor | Creates a constant tensor.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` and (optionally) `shape` (see examples
below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the `shape` argument (if
specified). In the case where the list length is less than the number of
elements specified by `shape`, the last element in the list will be used
to fill the remaining entries.
The argument `shape` is optional. If present, it specifies the dimensions of
the resulting tensor. If not present, the shape of `value` is used.
If the argument `dtype` is not specified, then the type is inferred from
the type of `value`.
For example:
```python
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]
[-1. -1. -1.]]
```
Args:
value: A constant value (or list) of output type `dtype`.
dtype: The type of the elements of the resulting tensor.
shape: Optional dimensions of resulting tensor.
name: Optional name for the tensor.
verify_shape: Boolean that enables verification of a shape of values.
Returns:
A Constant Tensor.
Raises:
TypeError: if shape is incorrectly specified or unsupported. | tensorflow/python/framework/constant_op.py | constant | somashekar10/tensorflow | 522 | python | def constant(value, dtype=None, shape=None, name='Const', verify_shape=False):
'Creates a constant tensor.\n\n The resulting tensor is populated with values of type `dtype`, as\n specified by arguments `value` and (optionally) `shape` (see examples\n below).\n\n The argument `value` can be a constant value, or a list of values of type\n `dtype`. If `value` is a list, then the length of the list must be less\n than or equal to the number of elements implied by the `shape` argument (if\n specified). In the case where the list length is less than the number of\n elements specified by `shape`, the last element in the list will be used\n to fill the remaining entries.\n\n The argument `shape` is optional. If present, it specifies the dimensions of\n the resulting tensor. If not present, the shape of `value` is used.\n\n If the argument `dtype` is not specified, then the type is inferred from\n the type of `value`.\n\n For example:\n\n ```python\n # Constant 1-D Tensor populated with value list.\n tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]\n\n # Constant 2-D tensor populated with scalar value -1.\n tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]\n [-1. -1. -1.]]\n ```\n\n Args:\n value: A constant value (or list) of output type `dtype`.\n\n dtype: The type of the elements of the resulting tensor.\n\n shape: Optional dimensions of resulting tensor.\n\n name: Optional name for the tensor.\n\n verify_shape: Boolean that enables verification of a shape of values.\n\n Returns:\n A Constant Tensor.\n\n Raises:\n TypeError: if shape is incorrectly specified or unsupported.\n '
ctx = context.context()
if (not ctx.in_graph_mode()):
t = convert_to_eager_tensor(value, ctx, dtype)
if (shape is None):
return t
shape = tensor_shape.as_shape(shape)
if (shape == t.shape):
return t
if verify_shape:
raise TypeError(("Expected Tensor's shape: %s, got %s." % (tuple(shape), tuple(t.shape))))
num_t = t.shape.num_elements()
if (num_t == shape.num_elements()):
return _eager_reshape(t, shape.as_list(), ctx)
if (num_t == 1):
if (t.dtype == dtypes.bool):
with ops.device('/device:CPU:0'):
x = _eager_fill(shape.as_list(), t.cpu(), ctx)
return _eager_identity(x, ctx)
else:
return _eager_fill(shape.as_list(), t, ctx)
raise TypeError(('Eager execution of tf.constant with unsupported shape (value has %d elements, shape is %s with %d elements).' % (num_t, shape, shape.num_elements())))
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape, verify_shape=verify_shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op('Const', [], [dtype_value.type], attrs={'value': tensor_value, 'dtype': dtype_value}, name=name).outputs[0]
return const_tensor | def constant(value, dtype=None, shape=None, name='Const', verify_shape=False):
'Creates a constant tensor.\n\n The resulting tensor is populated with values of type `dtype`, as\n specified by arguments `value` and (optionally) `shape` (see examples\n below).\n\n The argument `value` can be a constant value, or a list of values of type\n `dtype`. If `value` is a list, then the length of the list must be less\n than or equal to the number of elements implied by the `shape` argument (if\n specified). In the case where the list length is less than the number of\n elements specified by `shape`, the last element in the list will be used\n to fill the remaining entries.\n\n The argument `shape` is optional. If present, it specifies the dimensions of\n the resulting tensor. If not present, the shape of `value` is used.\n\n If the argument `dtype` is not specified, then the type is inferred from\n the type of `value`.\n\n For example:\n\n ```python\n # Constant 1-D Tensor populated with value list.\n tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]\n\n # Constant 2-D tensor populated with scalar value -1.\n tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]\n [-1. -1. -1.]]\n ```\n\n Args:\n value: A constant value (or list) of output type `dtype`.\n\n dtype: The type of the elements of the resulting tensor.\n\n shape: Optional dimensions of resulting tensor.\n\n name: Optional name for the tensor.\n\n verify_shape: Boolean that enables verification of a shape of values.\n\n Returns:\n A Constant Tensor.\n\n Raises:\n TypeError: if shape is incorrectly specified or unsupported.\n '
ctx = context.context()
if (not ctx.in_graph_mode()):
t = convert_to_eager_tensor(value, ctx, dtype)
if (shape is None):
return t
shape = tensor_shape.as_shape(shape)
if (shape == t.shape):
return t
if verify_shape:
raise TypeError(("Expected Tensor's shape: %s, got %s." % (tuple(shape), tuple(t.shape))))
num_t = t.shape.num_elements()
if (num_t == shape.num_elements()):
return _eager_reshape(t, shape.as_list(), ctx)
if (num_t == 1):
if (t.dtype == dtypes.bool):
with ops.device('/device:CPU:0'):
x = _eager_fill(shape.as_list(), t.cpu(), ctx)
return _eager_identity(x, ctx)
else:
return _eager_fill(shape.as_list(), t, ctx)
raise TypeError(('Eager execution of tf.constant with unsupported shape (value has %d elements, shape is %s with %d elements).' % (num_t, shape, shape.num_elements())))
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape, verify_shape=verify_shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op('Const', [], [dtype_value.type], attrs={'value': tensor_value, 'dtype': dtype_value}, name=name).outputs[0]
return const_tensor<|docstring|>Creates a constant tensor.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` and (optionally) `shape` (see examples
below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the `shape` argument (if
specified). In the case where the list length is less than the number of
elements specified by `shape`, the last element in the list will be used
to fill the remaining entries.
The argument `shape` is optional. If present, it specifies the dimensions of
the resulting tensor. If not present, the shape of `value` is used.
If the argument `dtype` is not specified, then the type is inferred from
the type of `value`.
For example:
```python
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]
[-1. -1. -1.]]
```
Args:
value: A constant value (or list) of output type `dtype`.
dtype: The type of the elements of the resulting tensor.
shape: Optional dimensions of resulting tensor.
name: Optional name for the tensor.
verify_shape: Boolean that enables verification of a shape of values.
Returns:
A Constant Tensor.
Raises:
TypeError: if shape is incorrectly specified or unsupported.<|endoftext|> |
380c9742c7b295488fb595d16adc2bf8c7ce03c11877551a256fbc608d74b66d | def _tensor_shape_tensor_conversion_function(s, dtype=None, name=None, as_ref=False):
'Function to convert TensorShape to Tensor.'
_ = as_ref
if (not s.is_fully_defined()):
raise ValueError(('Cannot convert a partially known TensorShape to a Tensor: %s' % s))
s_list = s.as_list()
int64_value = 0
for dim in s_list:
if (dim >= (2 ** 31)):
int64_value = dim
break
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
if ((dtype == dtypes.int32) and int64_value):
raise ValueError(('Cannot convert a TensorShape to dtype int32; a dimension is too large (%s)' % int64_value))
else:
dtype = (dtypes.int64 if int64_value else dtypes.int32)
if (name is None):
name = 'shape_as_tensor'
return constant(s_list, dtype=dtype, name=name) | Function to convert TensorShape to Tensor. | tensorflow/python/framework/constant_op.py | _tensor_shape_tensor_conversion_function | somashekar10/tensorflow | 522 | python | def _tensor_shape_tensor_conversion_function(s, dtype=None, name=None, as_ref=False):
_ = as_ref
if (not s.is_fully_defined()):
raise ValueError(('Cannot convert a partially known TensorShape to a Tensor: %s' % s))
s_list = s.as_list()
int64_value = 0
for dim in s_list:
if (dim >= (2 ** 31)):
int64_value = dim
break
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
if ((dtype == dtypes.int32) and int64_value):
raise ValueError(('Cannot convert a TensorShape to dtype int32; a dimension is too large (%s)' % int64_value))
else:
dtype = (dtypes.int64 if int64_value else dtypes.int32)
if (name is None):
name = 'shape_as_tensor'
return constant(s_list, dtype=dtype, name=name) | def _tensor_shape_tensor_conversion_function(s, dtype=None, name=None, as_ref=False):
_ = as_ref
if (not s.is_fully_defined()):
raise ValueError(('Cannot convert a partially known TensorShape to a Tensor: %s' % s))
s_list = s.as_list()
int64_value = 0
for dim in s_list:
if (dim >= (2 ** 31)):
int64_value = dim
break
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
if ((dtype == dtypes.int32) and int64_value):
raise ValueError(('Cannot convert a TensorShape to dtype int32; a dimension is too large (%s)' % int64_value))
else:
dtype = (dtypes.int64 if int64_value else dtypes.int32)
if (name is None):
name = 'shape_as_tensor'
return constant(s_list, dtype=dtype, name=name)<|docstring|>Function to convert TensorShape to Tensor.<|endoftext|> |
98f790c28ab003aa1bb836f97df67cb34713dd442351c4acde25bde38c9b84d2 | def _dimension_tensor_conversion_function(d, dtype=None, name=None, as_ref=False):
'Function to convert Dimension to Tensor.'
_ = as_ref
if (d.value is None):
raise ValueError(('Cannot convert an unknown Dimension to a Tensor: %s' % d))
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
else:
dtype = dtypes.int32
if (name is None):
name = 'shape_as_tensor'
return constant(d.value, dtype=dtype, name=name) | Function to convert Dimension to Tensor. | tensorflow/python/framework/constant_op.py | _dimension_tensor_conversion_function | somashekar10/tensorflow | 522 | python | def _dimension_tensor_conversion_function(d, dtype=None, name=None, as_ref=False):
_ = as_ref
if (d.value is None):
raise ValueError(('Cannot convert an unknown Dimension to a Tensor: %s' % d))
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
else:
dtype = dtypes.int32
if (name is None):
name = 'shape_as_tensor'
return constant(d.value, dtype=dtype, name=name) | def _dimension_tensor_conversion_function(d, dtype=None, name=None, as_ref=False):
_ = as_ref
if (d.value is None):
raise ValueError(('Cannot convert an unknown Dimension to a Tensor: %s' % d))
if (dtype is not None):
if (dtype not in (dtypes.int32, dtypes.int64)):
raise TypeError(('Cannot convert a TensorShape to dtype: %s' % dtype))
else:
dtype = dtypes.int32
if (name is None):
name = 'shape_as_tensor'
return constant(d.value, dtype=dtype, name=name)<|docstring|>Function to convert Dimension to Tensor.<|endoftext|> |
52eda10857908c93439e36323facdde9b8638c30117fd8403935135759a31595 | def __init__(self, queue_name: str, redis, **kwargs):
'\n :param queue_name: Name of the queue\n :param redis: Redis client\n :param **kwargs: [\n synced_slaves_enabled: bool Enables slave synchronous syncing\n synced_slaves_count: int Number of slaves that need to be synced in order to continue\n synced_slaves_timeout: int Timeout for syncing slaves. If reached, exception is raised\n ]\n :return:\n '
self.client_id = '{0}[{1}][{2}]'.format(socket.gethostname(), os.getpid(), int(time.time()))
self.redis = redis
self.queue_name = queue_name
self.options = kwargs
self._register_commands() | :param queue_name: Name of the queue
:param redis: Redis client
:param **kwargs: [
synced_slaves_enabled: bool Enables slave synchronous syncing
synced_slaves_count: int Number of slaves that need to be synced in order to continue
synced_slaves_timeout: int Timeout for syncing slaves. If reached, exception is raised
]
:return: | pyrq/unique_queues.py | __init__ | ondrejkajinek/py-rq | 0 | python | def __init__(self, queue_name: str, redis, **kwargs):
'\n :param queue_name: Name of the queue\n :param redis: Redis client\n :param **kwargs: [\n synced_slaves_enabled: bool Enables slave synchronous syncing\n synced_slaves_count: int Number of slaves that need to be synced in order to continue\n synced_slaves_timeout: int Timeout for syncing slaves. If reached, exception is raised\n ]\n :return:\n '
self.client_id = '{0}[{1}][{2}]'.format(socket.gethostname(), os.getpid(), int(time.time()))
self.redis = redis
self.queue_name = queue_name
self.options = kwargs
self._register_commands() | def __init__(self, queue_name: str, redis, **kwargs):
'\n :param queue_name: Name of the queue\n :param redis: Redis client\n :param **kwargs: [\n synced_slaves_enabled: bool Enables slave synchronous syncing\n synced_slaves_count: int Number of slaves that need to be synced in order to continue\n synced_slaves_timeout: int Timeout for syncing slaves. If reached, exception is raised\n ]\n :return:\n '
self.client_id = '{0}[{1}][{2}]'.format(socket.gethostname(), os.getpid(), int(time.time()))
self.redis = redis
self.queue_name = queue_name
self.options = kwargs
self._register_commands()<|docstring|>:param queue_name: Name of the queue
:param redis: Redis client
:param **kwargs: [
synced_slaves_enabled: bool Enables slave synchronous syncing
synced_slaves_count: int Number of slaves that need to be synced in order to continue
synced_slaves_timeout: int Timeout for syncing slaves. If reached, exception is raised
]
:return:<|endoftext|> |
bc4238a962d0804a5459672231dc6b8acdfb1eba51260c55546e24d3fca69dc1 | def get_count(self) -> int:
'\n :return: Number of items in the queue\n '
return self.redis.llen(self.queue_name) | :return: Number of items in the queue | pyrq/unique_queues.py | get_count | ondrejkajinek/py-rq | 0 | python | def get_count(self) -> int:
'\n \n '
return self.redis.llen(self.queue_name) | def get_count(self) -> int:
'\n \n '
return self.redis.llen(self.queue_name)<|docstring|>:return: Number of items in the queue<|endoftext|> |
49c8966d129a428666baf6cad0439b493e3842f51e8aeed086f10fa3df2b9563 | def add_item(self, item) -> bool:
'\n :param item: Anything that is convertible to str\n '
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)])
self._wait_for_synced_slaves() | :param item: Anything that is convertible to str | pyrq/unique_queues.py | add_item | ondrejkajinek/py-rq | 0 | python | def add_item(self, item) -> bool:
'\n \n '
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)])
self._wait_for_synced_slaves() | def add_item(self, item) -> bool:
'\n \n '
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)])
self._wait_for_synced_slaves()<|docstring|>:param item: Anything that is convertible to str<|endoftext|> |
ccf1ee86a41869a32b1c69a540ba72b586eb988e6169d3cfec92a98e18cc3eeb | def add_items(self, items: list):
'\n :param items: List of items to be added via pipeline\n '
for chunk in helpers.create_chunks(items, CHUNK_SIZE):
pipeline = self.redis.pipeline()
for item in chunk:
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | :param items: List of items to be added via pipeline | pyrq/unique_queues.py | add_items | ondrejkajinek/py-rq | 0 | python | def add_items(self, items: list):
'\n \n '
for chunk in helpers.create_chunks(items, CHUNK_SIZE):
pipeline = self.redis.pipeline()
for item in chunk:
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | def add_items(self, items: list):
'\n \n '
for chunk in helpers.create_chunks(items, CHUNK_SIZE):
pipeline = self.redis.pipeline()
for item in chunk:
self.add_command(keys=[self.queue_name, self.set_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves()<|docstring|>:param items: List of items to be added via pipeline<|endoftext|> |
0da11c3044fffd1d8c43701e5303980ac2874251ef068939ff21e57d55143278 | def get_items(self, count: int) -> list:
'\n :param count: Number of items to be returned\n :return: List of items\n '
return self.get_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[count, int(time.time())]) | :param count: Number of items to be returned
:return: List of items | pyrq/unique_queues.py | get_items | ondrejkajinek/py-rq | 0 | python | def get_items(self, count: int) -> list:
'\n :param count: Number of items to be returned\n :return: List of items\n '
return self.get_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[count, int(time.time())]) | def get_items(self, count: int) -> list:
'\n :param count: Number of items to be returned\n :return: List of items\n '
return self.get_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[count, int(time.time())])<|docstring|>:param count: Number of items to be returned
:return: List of items<|endoftext|> |
a821a26bb23e1a1478adce76782fca2e3daf29cad96c274886fe4650e6aa5a03 | def ack_item(self, item):
'\n :param item: Anything that is convertible to str\n :return: Success\n '
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves() | :param item: Anything that is convertible to str
:return: Success | pyrq/unique_queues.py | ack_item | ondrejkajinek/py-rq | 0 | python | def ack_item(self, item):
'\n :param item: Anything that is convertible to str\n :return: Success\n '
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves() | def ack_item(self, item):
'\n :param item: Anything that is convertible to str\n :return: Success\n '
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves()<|docstring|>:param item: Anything that is convertible to str
:return: Success<|endoftext|> |
f7176c3d2e0774a6d4e85e1c115732b5b9a21a4a5c9cf7c9b096d65cba8e19b0 | def ack_items(self, items: list):
'\n :param items: List of items that are convertible to str\n '
pipeline = self.redis.pipeline()
for item in items:
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | :param items: List of items that are convertible to str | pyrq/unique_queues.py | ack_items | ondrejkajinek/py-rq | 0 | python | def ack_items(self, items: list):
'\n \n '
pipeline = self.redis.pipeline()
for item in items:
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | def ack_items(self, items: list):
'\n \n '
pipeline = self.redis.pipeline()
for item in items:
self.ack_command(keys=[self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves()<|docstring|>:param items: List of items that are convertible to str<|endoftext|> |
d90563abc8ad8ea667f8203245edd2456a235e53b03c498d9f5fd93fb758986d | def reject_item(self, item):
'\n :param item: Anything that is convertible to str\n '
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves() | :param item: Anything that is convertible to str | pyrq/unique_queues.py | reject_item | ondrejkajinek/py-rq | 0 | python | def reject_item(self, item):
'\n \n '
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves() | def reject_item(self, item):
'\n \n '
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)])
self._wait_for_synced_slaves()<|docstring|>:param item: Anything that is convertible to str<|endoftext|> |
b6133389e2b9dbb6777a80c468445f96902c17913c30e30f7b8a01dc686735f9 | def reject_items(self, items: list):
'\n :param items: List of items that are convertible to str\n '
pipeline = self.redis.pipeline()
for item in reversed(items):
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | :param items: List of items that are convertible to str | pyrq/unique_queues.py | reject_items | ondrejkajinek/py-rq | 0 | python | def reject_items(self, items: list):
'\n \n '
pipeline = self.redis.pipeline()
for item in reversed(items):
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves() | def reject_items(self, items: list):
'\n \n '
pipeline = self.redis.pipeline()
for item in reversed(items):
self.reject_command(keys=[self.queue_name, self.set_name, self.processing_queue_name, self.timeouts_hash_name], args=[str(item)], client=pipeline)
pipeline.execute()
self._wait_for_synced_slaves()<|docstring|>:param items: List of items that are convertible to str<|endoftext|> |
2e533ce77b7e4d8069bc043e3ee91d28783705c25f19e817c95f088cf885e154 | def re_enqueue_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n :param timeout: int seconds\n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.re_enqueue_command(keys=[self.queue_name, self.set_name, queue, self.timeouts_hash_name])
self._wait_for_synced_slaves() | :param timeout: int seconds | pyrq/unique_queues.py | re_enqueue_timeout_items | ondrejkajinek/py-rq | 0 | python | def re_enqueue_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n \n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.re_enqueue_command(keys=[self.queue_name, self.set_name, queue, self.timeouts_hash_name])
self._wait_for_synced_slaves() | def re_enqueue_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n \n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.re_enqueue_command(keys=[self.queue_name, self.set_name, queue, self.timeouts_hash_name])
self._wait_for_synced_slaves()<|docstring|>:param timeout: int seconds<|endoftext|> |
8b95cb98c66b113c0eb748a67d1937decbccb9ca91f6c176fef1c6240091aec4 | def drop_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n :param timeout: int seconds\n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.redis.delete(queue)
self.redis.hdel(self.timeouts_hash_name, queue)
self._wait_for_synced_slaves() | :param timeout: int seconds | pyrq/unique_queues.py | drop_timeout_items | ondrejkajinek/py-rq | 0 | python | def drop_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n \n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.redis.delete(queue)
self.redis.hdel(self.timeouts_hash_name, queue)
self._wait_for_synced_slaves() | def drop_timeout_items(self, timeout: int=PROCESSING_TIMEOUT):
'\n \n '
for (queue, value_time) in self._get_sorted_processing_queues():
if ((int(float(value_time)) + timeout) < int(time.time())):
self.redis.delete(queue)
self.redis.hdel(self.timeouts_hash_name, queue)
self._wait_for_synced_slaves()<|docstring|>:param timeout: int seconds<|endoftext|> |
178ed0a697ae207bb69eab6e5d76ce0acd5ffa673632f6542fe6cd0179d8ab1e | @property
def set_name(self):
'\n :return: Name of the set queue\n '
return (self.queue_name + SET_QUEUE_SUFFIX) | :return: Name of the set queue | pyrq/unique_queues.py | set_name | ondrejkajinek/py-rq | 0 | python | @property
def set_name(self):
'\n \n '
return (self.queue_name + SET_QUEUE_SUFFIX) | @property
def set_name(self):
'\n \n '
return (self.queue_name + SET_QUEUE_SUFFIX)<|docstring|>:return: Name of the set queue<|endoftext|> |
8a9dd7a38de6ce3eac3da35dfa9d59817856b86662698e249eecb66fe09b2a42 | @property
def processing_queue_name(self):
'\n :return: Name of the processing queue\n '
return (((self.queue_name + PROCESSING_SUFFIX) + '-') + self.client_id) | :return: Name of the processing queue | pyrq/unique_queues.py | processing_queue_name | ondrejkajinek/py-rq | 0 | python | @property
def processing_queue_name(self):
'\n \n '
return (((self.queue_name + PROCESSING_SUFFIX) + '-') + self.client_id) | @property
def processing_queue_name(self):
'\n \n '
return (((self.queue_name + PROCESSING_SUFFIX) + '-') + self.client_id)<|docstring|>:return: Name of the processing queue<|endoftext|> |
8fb33c83ef93ed1ecfe323114e6e7b82494e1fa78050e35f9c58892b568e9dd5 | @property
def timeouts_hash_name(self):
'\n :return: Name of the timeouts hash\n '
return (self.queue_name + PROCESSING_TIMEOUT_SUFFIX) | :return: Name of the timeouts hash | pyrq/unique_queues.py | timeouts_hash_name | ondrejkajinek/py-rq | 0 | python | @property
def timeouts_hash_name(self):
'\n \n '
return (self.queue_name + PROCESSING_TIMEOUT_SUFFIX) | @property
def timeouts_hash_name(self):
'\n \n '
return (self.queue_name + PROCESSING_TIMEOUT_SUFFIX)<|docstring|>:return: Name of the timeouts hash<|endoftext|> |
601a4850bfad7e5ddd4c7c2c5c7c8a617a53ca5a25b9101731eae635d2a81a13 | @staticmethod
def add():
'\n :return: LUA Script for ACK command\n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local item = ARGV[1]\n\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('lpush', queue, item)\n redis.call('sadd', set, item)\n end\n " | :return: LUA Script for ACK command | pyrq/unique_queues.py | add | ondrejkajinek/py-rq | 0 | python | @staticmethod
def add():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local item = ARGV[1]\n\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('lpush', queue, item)\n redis.call('sadd', set, item)\n end\n " | @staticmethod
def add():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local item = ARGV[1]\n\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('lpush', queue, item)\n redis.call('sadd', set, item)\n end\n "<|docstring|>:return: LUA Script for ACK command<|endoftext|> |
f6ea6af72e62d2c90792cd36c40a840f3d5a5b260b1220ff23476a338d4ca0f6 | @staticmethod
def ack():
'\n :return: LUA Script for ACK command\n '
return "\n local processing = KEYS[1]\n local timeouts = KEYS[2]\n local item = ARGV[1]\n local result = redis.call('lrem', processing, -1, item)\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n " | :return: LUA Script for ACK command | pyrq/unique_queues.py | ack | ondrejkajinek/py-rq | 0 | python | @staticmethod
def ack():
'\n \n '
return "\n local processing = KEYS[1]\n local timeouts = KEYS[2]\n local item = ARGV[1]\n local result = redis.call('lrem', processing, -1, item)\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n " | @staticmethod
def ack():
'\n \n '
return "\n local processing = KEYS[1]\n local timeouts = KEYS[2]\n local item = ARGV[1]\n local result = redis.call('lrem', processing, -1, item)\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n "<|docstring|>:return: LUA Script for ACK command<|endoftext|> |
3ffe29267c098ddb8cf8ac7a2fcfd90a7746a07437db9cd444a1f1f981da274b | @staticmethod
def get():
'\n :return: LUA Script for GET command\n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local size = ARGV[1]\n local time = ARGV[2]\n redis.call('hset', timeouts, processing, time)\n local item\n local items = {}\n for i = 1, size, 1 do\n item = redis.call('rpoplpush', queue, processing)\n if not item then\n break\n end\n redis.call('srem', set, item)\n table.insert(items, item)\n end\n return items\n " | :return: LUA Script for GET command | pyrq/unique_queues.py | get | ondrejkajinek/py-rq | 0 | python | @staticmethod
def get():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local size = ARGV[1]\n local time = ARGV[2]\n redis.call('hset', timeouts, processing, time)\n local item\n local items = {}\n for i = 1, size, 1 do\n item = redis.call('rpoplpush', queue, processing)\n if not item then\n break\n end\n redis.call('srem', set, item)\n table.insert(items, item)\n end\n return items\n " | @staticmethod
def get():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local size = ARGV[1]\n local time = ARGV[2]\n redis.call('hset', timeouts, processing, time)\n local item\n local items = {}\n for i = 1, size, 1 do\n item = redis.call('rpoplpush', queue, processing)\n if not item then\n break\n end\n redis.call('srem', set, item)\n table.insert(items, item)\n end\n return items\n "<|docstring|>:return: LUA Script for GET command<|endoftext|> |
2801e426c3ff11fe0eb7e67028035f2ac794d776f7eb629910f99f7b18c672ef | @staticmethod
def reject():
'\n :return: LUA Script for REJECT command\n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item = ARGV[1]\n local removed = redis.call('lrem', processing, -1, item)\n if removed == 1 then\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n end\n end\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n " | :return: LUA Script for REJECT command | pyrq/unique_queues.py | reject | ondrejkajinek/py-rq | 0 | python | @staticmethod
def reject():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item = ARGV[1]\n local removed = redis.call('lrem', processing, -1, item)\n if removed == 1 then\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n end\n end\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n " | @staticmethod
def reject():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item = ARGV[1]\n local removed = redis.call('lrem', processing, -1, item)\n if removed == 1 then\n local inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n end\n end\n local count = redis.call('llen', processing)\n if count == 0 then\n redis.call('hdel', timeouts, processing)\n end\n "<|docstring|>:return: LUA Script for REJECT command<|endoftext|> |
76c4e9293ac0d870f7463e6c0e408723c4cd0e3cd146177522cfe60987542930 | @staticmethod
def re_enqueue():
'\n :return: LUA Script for reject queue\n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item\n local inQueue\n while true do\n item = redis.call('lpop', processing);\n if not item then\n break\n end\n inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n else\n redis.call('lrem', queue, -1, item)\n redis.call('rpush', queue, item)\n end\n end\n redis.call('hdel', timeouts, processing)\n " | :return: LUA Script for reject queue | pyrq/unique_queues.py | re_enqueue | ondrejkajinek/py-rq | 0 | python | @staticmethod
def re_enqueue():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item\n local inQueue\n while true do\n item = redis.call('lpop', processing);\n if not item then\n break\n end\n inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n else\n redis.call('lrem', queue, -1, item)\n redis.call('rpush', queue, item)\n end\n end\n redis.call('hdel', timeouts, processing)\n " | @staticmethod
def re_enqueue():
'\n \n '
return "\n local queue = KEYS[1]\n local set = KEYS[2]\n local processing = KEYS[3]\n local timeouts = KEYS[4]\n local item\n local inQueue\n while true do\n item = redis.call('lpop', processing);\n if not item then\n break\n end\n inQueue = redis.call('sismember', set, item)\n if inQueue == 0 then\n redis.call('rpush', queue, item)\n redis.call('sadd', set, item)\n else\n redis.call('lrem', queue, -1, item)\n redis.call('rpush', queue, item)\n end\n end\n redis.call('hdel', timeouts, processing)\n "<|docstring|>:return: LUA Script for reject queue<|endoftext|> |
d46b7e6bbfab22e555312a7ff60627cce260e3f7c6a6ee3ebac2f8cc1a1735a9 | @staticmethod
def isAlwaysEnabled():
'Request to be always enabled.\n\n Notes:\n Setting this to true is only applicable to standard plugins. In\n this case, the plugin will be enabled upon Nuitka start-up. Any\n plugin detector class will then be ignored. Method isRelevant() may\n also be present and can be used to fine-control enabling the\n plugin: A to-be-enabled, but irrelevant plugin will still not be\n activated.\n Returns:\n True or False\n '
return False | Request to be always enabled.
Notes:
Setting this to true is only applicable to standard plugins. In
this case, the plugin will be enabled upon Nuitka start-up. Any
plugin detector class will then be ignored. Method isRelevant() may
also be present and can be used to fine-control enabling the
plugin: A to-be-enabled, but irrelevant plugin will still not be
activated.
Returns:
True or False | nuitka/plugins/PluginBase.py | isAlwaysEnabled | mikehaben69/Nuitka | 5,421 | python | @staticmethod
def isAlwaysEnabled():
'Request to be always enabled.\n\n Notes:\n Setting this to true is only applicable to standard plugins. In\n this case, the plugin will be enabled upon Nuitka start-up. Any\n plugin detector class will then be ignored. Method isRelevant() may\n also be present and can be used to fine-control enabling the\n plugin: A to-be-enabled, but irrelevant plugin will still not be\n activated.\n Returns:\n True or False\n '
return False | @staticmethod
def isAlwaysEnabled():
'Request to be always enabled.\n\n Notes:\n Setting this to true is only applicable to standard plugins. In\n this case, the plugin will be enabled upon Nuitka start-up. Any\n plugin detector class will then be ignored. Method isRelevant() may\n also be present and can be used to fine-control enabling the\n plugin: A to-be-enabled, but irrelevant plugin will still not be\n activated.\n Returns:\n True or False\n '
return False<|docstring|>Request to be always enabled.
Notes:
Setting this to true is only applicable to standard plugins. In
this case, the plugin will be enabled upon Nuitka start-up. Any
plugin detector class will then be ignored. Method isRelevant() may
also be present and can be used to fine-control enabling the
plugin: A to-be-enabled, but irrelevant plugin will still not be
activated.
Returns:
True or False<|endoftext|> |
ad5f744f0f93c6b08ffe72517edf70b762f7a330b0644f18824e08d5b4942ba5 | @classmethod
def isRelevant(cls):
'Consider if the plugin is relevant.\n\n Notes:\n A plugin may only be a needed on a certain OS, or with some options,\n but this is only a class method, so you will not have much run time\n information.\n\n Returns:\n True or False\n\n '
return True | Consider if the plugin is relevant.
Notes:
A plugin may only be a needed on a certain OS, or with some options,
but this is only a class method, so you will not have much run time
information.
Returns:
True or False | nuitka/plugins/PluginBase.py | isRelevant | mikehaben69/Nuitka | 5,421 | python | @classmethod
def isRelevant(cls):
'Consider if the plugin is relevant.\n\n Notes:\n A plugin may only be a needed on a certain OS, or with some options,\n but this is only a class method, so you will not have much run time\n information.\n\n Returns:\n True or False\n\n '
return True | @classmethod
def isRelevant(cls):
'Consider if the plugin is relevant.\n\n Notes:\n A plugin may only be a needed on a certain OS, or with some options,\n but this is only a class method, so you will not have much run time\n information.\n\n Returns:\n True or False\n\n '
return True<|docstring|>Consider if the plugin is relevant.
Notes:
A plugin may only be a needed on a certain OS, or with some options,
but this is only a class method, so you will not have much run time
information.
Returns:
True or False<|endoftext|> |
c3125bdb402e681e033bb4df4985f5183a807d6729dae0b1379896b42ef45b54 | @classmethod
def getPluginDefaultOptionValues(cls):
'This method is used to get a values to use as defaults.\n\n Since the defaults are in the command line options, we call\n that and extract them.\n '
from optparse import OptionGroup, OptionParser
parser = OptionParser()
group = OptionGroup(parser, 'Pseudo Target')
cls.addPluginCommandLineOptions(group)
result = {}
for option in group.option_list:
result[option.dest] = option.default
return result | This method is used to get a values to use as defaults.
Since the defaults are in the command line options, we call
that and extract them. | nuitka/plugins/PluginBase.py | getPluginDefaultOptionValues | mikehaben69/Nuitka | 5,421 | python | @classmethod
def getPluginDefaultOptionValues(cls):
'This method is used to get a values to use as defaults.\n\n Since the defaults are in the command line options, we call\n that and extract them.\n '
from optparse import OptionGroup, OptionParser
parser = OptionParser()
group = OptionGroup(parser, 'Pseudo Target')
cls.addPluginCommandLineOptions(group)
result = {}
for option in group.option_list:
result[option.dest] = option.default
return result | @classmethod
def getPluginDefaultOptionValues(cls):
'This method is used to get a values to use as defaults.\n\n Since the defaults are in the command line options, we call\n that and extract them.\n '
from optparse import OptionGroup, OptionParser
parser = OptionParser()
group = OptionGroup(parser, 'Pseudo Target')
cls.addPluginCommandLineOptions(group)
result = {}
for option in group.option_list:
result[option.dest] = option.default
return result<|docstring|>This method is used to get a values to use as defaults.
Since the defaults are in the command line options, we call
that and extract them.<|endoftext|> |
596d5127f5261b6cc8bf06cfc0830f537993fe30fbdf4dbef64642ff39fb325e | def isRequiredImplicitImport(self, module, full_name):
'Indicate whether an implicitly imported module should be accepted.\n\n Notes:\n You may negate importing a module specified as "implicit import",\n although this is an unexpected event.\n\n Args:\n module: the module object\n full_name: of the implicitly import module\n Returns:\n True or False\n '
return True | Indicate whether an implicitly imported module should be accepted.
Notes:
You may negate importing a module specified as "implicit import",
although this is an unexpected event.
Args:
module: the module object
full_name: of the implicitly import module
Returns:
True or False | nuitka/plugins/PluginBase.py | isRequiredImplicitImport | mikehaben69/Nuitka | 5,421 | python | def isRequiredImplicitImport(self, module, full_name):
'Indicate whether an implicitly imported module should be accepted.\n\n Notes:\n You may negate importing a module specified as "implicit import",\n although this is an unexpected event.\n\n Args:\n module: the module object\n full_name: of the implicitly import module\n Returns:\n True or False\n '
return True | def isRequiredImplicitImport(self, module, full_name):
'Indicate whether an implicitly imported module should be accepted.\n\n Notes:\n You may negate importing a module specified as "implicit import",\n although this is an unexpected event.\n\n Args:\n module: the module object\n full_name: of the implicitly import module\n Returns:\n True or False\n '
return True<|docstring|>Indicate whether an implicitly imported module should be accepted.
Notes:
You may negate importing a module specified as "implicit import",
although this is an unexpected event.
Args:
module: the module object
full_name: of the implicitly import module
Returns:
True or False<|endoftext|> |
1a57797d180bfb989d8e66b63d02b287202a2e1ffac5edecd87f688e6d39a4a7 | def getImplicitImports(self, module):
'Return the implicit imports for a given module (iterator).\n\n Args:\n module: the module object\n Yields:\n implicit imports for the module\n '
return () | Return the implicit imports for a given module (iterator).
Args:
module: the module object
Yields:
implicit imports for the module | nuitka/plugins/PluginBase.py | getImplicitImports | mikehaben69/Nuitka | 5,421 | python | def getImplicitImports(self, module):
'Return the implicit imports for a given module (iterator).\n\n Args:\n module: the module object\n Yields:\n implicit imports for the module\n '
return () | def getImplicitImports(self, module):
'Return the implicit imports for a given module (iterator).\n\n Args:\n module: the module object\n Yields:\n implicit imports for the module\n '
return ()<|docstring|>Return the implicit imports for a given module (iterator).
Args:
module: the module object
Yields:
implicit imports for the module<|endoftext|> |
b9240590c0fae74b6281237723d44eed0f120ebfa48ce73bd6c3eff029910db6 | def considerFailedImportReferrals(self, module_name):
'Provide a dictionary of fallback imports for modules that failed to import.\n\n Args:\n module_name: name of module\n Returns:\n dict\n '
return self.module_aliases.get(module_name) | Provide a dictionary of fallback imports for modules that failed to import.
Args:
module_name: name of module
Returns:
dict | nuitka/plugins/PluginBase.py | considerFailedImportReferrals | mikehaben69/Nuitka | 5,421 | python | def considerFailedImportReferrals(self, module_name):
'Provide a dictionary of fallback imports for modules that failed to import.\n\n Args:\n module_name: name of module\n Returns:\n dict\n '
return self.module_aliases.get(module_name) | def considerFailedImportReferrals(self, module_name):
'Provide a dictionary of fallback imports for modules that failed to import.\n\n Args:\n module_name: name of module\n Returns:\n dict\n '
return self.module_aliases.get(module_name)<|docstring|>Provide a dictionary of fallback imports for modules that failed to import.
Args:
module_name: name of module
Returns:
dict<|endoftext|> |
6521aa846d74bf398be29cd25330f74c0cf0dc0d47ee9dca46772cc23412955d | def onModuleSourceCode(self, module_name, source_code):
'Inspect or modify source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n source_code (str)\n Notes:\n Default implementation forwards to `checkModuleSourceCode` which is\n going to allow simply checking the source code without the need to\n pass it back.\n '
self.checkModuleSourceCode(module_name, source_code)
return source_code | Inspect or modify source code.
Args:
module_name: (str) name of module
source_code: (str) its source code
Returns:
source_code (str)
Notes:
Default implementation forwards to `checkModuleSourceCode` which is
going to allow simply checking the source code without the need to
pass it back. | nuitka/plugins/PluginBase.py | onModuleSourceCode | mikehaben69/Nuitka | 5,421 | python | def onModuleSourceCode(self, module_name, source_code):
'Inspect or modify source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n source_code (str)\n Notes:\n Default implementation forwards to `checkModuleSourceCode` which is\n going to allow simply checking the source code without the need to\n pass it back.\n '
self.checkModuleSourceCode(module_name, source_code)
return source_code | def onModuleSourceCode(self, module_name, source_code):
'Inspect or modify source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n source_code (str)\n Notes:\n Default implementation forwards to `checkModuleSourceCode` which is\n going to allow simply checking the source code without the need to\n pass it back.\n '
self.checkModuleSourceCode(module_name, source_code)
return source_code<|docstring|>Inspect or modify source code.
Args:
module_name: (str) name of module
source_code: (str) its source code
Returns:
source_code (str)
Notes:
Default implementation forwards to `checkModuleSourceCode` which is
going to allow simply checking the source code without the need to
pass it back.<|endoftext|> |
c799e396e24c418b448970e109b7573a300c1be203e43db48e1d2a60562c186a | def checkModuleSourceCode(self, module_name, source_code):
'Inspect source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n None\n ' | Inspect source code.
Args:
module_name: (str) name of module
source_code: (str) its source code
Returns:
None | nuitka/plugins/PluginBase.py | checkModuleSourceCode | mikehaben69/Nuitka | 5,421 | python | def checkModuleSourceCode(self, module_name, source_code):
'Inspect source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n None\n ' | def checkModuleSourceCode(self, module_name, source_code):
'Inspect source code.\n\n Args:\n module_name: (str) name of module\n source_code: (str) its source code\n Returns:\n None\n '<|docstring|>Inspect source code.
Args:
module_name: (str) name of module
source_code: (str) its source code
Returns:
None<|endoftext|> |
0b2ea68e098e32a84900a01d8e2c020c17bdd7711e6e45b07be12aa080516170 | def onFrozenModuleSourceCode(self, module_name, is_package, source_code):
'Inspect or modify frozen module source code.\n\n Args:\n module_name: (str) full name of module\n is_package: (bool) True indicates a package\n source_code: (str) its source code\n Returns:\n source_code (str)\n '
return source_code | Inspect or modify frozen module source code.
Args:
module_name: (str) full name of module
is_package: (bool) True indicates a package
source_code: (str) its source code
Returns:
source_code (str) | nuitka/plugins/PluginBase.py | onFrozenModuleSourceCode | mikehaben69/Nuitka | 5,421 | python | def onFrozenModuleSourceCode(self, module_name, is_package, source_code):
'Inspect or modify frozen module source code.\n\n Args:\n module_name: (str) full name of module\n is_package: (bool) True indicates a package\n source_code: (str) its source code\n Returns:\n source_code (str)\n '
return source_code | def onFrozenModuleSourceCode(self, module_name, is_package, source_code):
'Inspect or modify frozen module source code.\n\n Args:\n module_name: (str) full name of module\n is_package: (bool) True indicates a package\n source_code: (str) its source code\n Returns:\n source_code (str)\n '
return source_code<|docstring|>Inspect or modify frozen module source code.
Args:
module_name: (str) full name of module
is_package: (bool) True indicates a package
source_code: (str) its source code
Returns:
source_code (str)<|endoftext|> |
9a1e7a2b0521e3f4081c4a7afd49ba9c277f3aa98773d01fee98509995e659d5 | def onFrozenModuleBytecode(self, module_name, is_package, bytecode):
'Inspect or modify frozen module byte code.\n\n Args:\n module_name: (str) name of module\n is_package: (bool) True indicates a package\n bytecode: (bytes) byte code\n Returns:\n bytecode (bytes)\n '
return bytecode | Inspect or modify frozen module byte code.
Args:
module_name: (str) name of module
is_package: (bool) True indicates a package
bytecode: (bytes) byte code
Returns:
bytecode (bytes) | nuitka/plugins/PluginBase.py | onFrozenModuleBytecode | mikehaben69/Nuitka | 5,421 | python | def onFrozenModuleBytecode(self, module_name, is_package, bytecode):
'Inspect or modify frozen module byte code.\n\n Args:\n module_name: (str) name of module\n is_package: (bool) True indicates a package\n bytecode: (bytes) byte code\n Returns:\n bytecode (bytes)\n '
return bytecode | def onFrozenModuleBytecode(self, module_name, is_package, bytecode):
'Inspect or modify frozen module byte code.\n\n Args:\n module_name: (str) name of module\n is_package: (bool) True indicates a package\n bytecode: (bytes) byte code\n Returns:\n bytecode (bytes)\n '
return bytecode<|docstring|>Inspect or modify frozen module byte code.
Args:
module_name: (str) name of module
is_package: (bool) True indicates a package
bytecode: (bytes) byte code
Returns:
bytecode (bytes)<|endoftext|> |
6e84c1357741f95f4dd2ccc67fcf2b454108ac97f4d963c72b51202a546ebe5e | @staticmethod
def createPreModuleLoadCode(module):
'Create code to execute before importing a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None | Create code to execute before importing a module.
Notes:
Called by @onModuleDiscovered.
Args:
module: the module object
Returns:
None (does not apply, default)
tuple (code, documentary string)
tuple (code, documentary string, flags) | nuitka/plugins/PluginBase.py | createPreModuleLoadCode | mikehaben69/Nuitka | 5,421 | python | @staticmethod
def createPreModuleLoadCode(module):
'Create code to execute before importing a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None | @staticmethod
def createPreModuleLoadCode(module):
'Create code to execute before importing a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None<|docstring|>Create code to execute before importing a module.
Notes:
Called by @onModuleDiscovered.
Args:
module: the module object
Returns:
None (does not apply, default)
tuple (code, documentary string)
tuple (code, documentary string, flags)<|endoftext|> |
2aea9e75150065f52887e574fc2a5a026e6543d6bb25ef6f45adf1949d064308 | @staticmethod
def createPostModuleLoadCode(module):
'Create code to execute after loading to a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None | Create code to execute after loading to a module.
Notes:
Called by @onModuleDiscovered.
Args:
module: the module object
Returns:
None (does not apply, default)
tuple (code, documentary string)
tuple (code, documentary string, flags) | nuitka/plugins/PluginBase.py | createPostModuleLoadCode | mikehaben69/Nuitka | 5,421 | python | @staticmethod
def createPostModuleLoadCode(module):
'Create code to execute after loading to a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None | @staticmethod
def createPostModuleLoadCode(module):
'Create code to execute after loading to a module.\n\n Notes:\n Called by @onModuleDiscovered.\n\n Args:\n module: the module object\n\n Returns:\n None (does not apply, default)\n tuple (code, documentary string)\n tuple (code, documentary string, flags)\n '
return None<|docstring|>Create code to execute after loading to a module.
Notes:
Called by @onModuleDiscovered.
Args:
module: the module object
Returns:
None (does not apply, default)
tuple (code, documentary string)
tuple (code, documentary string, flags)<|endoftext|> |
150e955f4b53454f60e882902e7b441b533c888c3dfc99910dc66b17b2470d27 | def onModuleDiscovered(self, module):
'Called with a module to be loaded.\n\n Notes:\n We may specify code to be prepended and/or appended to this module.\n This code is stored in the appropriate dict.\n For every imported module and each of these two options, only one plugin may do this.\n We check this condition here.\n\n Args:\n module: the module object\n Returns:\n None\n '
return None | Called with a module to be loaded.
Notes:
We may specify code to be prepended and/or appended to this module.
This code is stored in the appropriate dict.
For every imported module and each of these two options, only one plugin may do this.
We check this condition here.
Args:
module: the module object
Returns:
None | nuitka/plugins/PluginBase.py | onModuleDiscovered | mikehaben69/Nuitka | 5,421 | python | def onModuleDiscovered(self, module):
'Called with a module to be loaded.\n\n Notes:\n We may specify code to be prepended and/or appended to this module.\n This code is stored in the appropriate dict.\n For every imported module and each of these two options, only one plugin may do this.\n We check this condition here.\n\n Args:\n module: the module object\n Returns:\n None\n '
return None | def onModuleDiscovered(self, module):
'Called with a module to be loaded.\n\n Notes:\n We may specify code to be prepended and/or appended to this module.\n This code is stored in the appropriate dict.\n For every imported module and each of these two options, only one plugin may do this.\n We check this condition here.\n\n Args:\n module: the module object\n Returns:\n None\n '
return None<|docstring|>Called with a module to be loaded.
Notes:
We may specify code to be prepended and/or appended to this module.
This code is stored in the appropriate dict.
For every imported module and each of these two options, only one plugin may do this.
We check this condition here.
Args:
module: the module object
Returns:
None<|endoftext|> |
77b42c7afd8f99b8a72d2941aae87ae3250032ad6ab441575e9dddd8197d788a | def onModuleEncounter(self, module_filename, module_name, module_kind):
'Help decide whether to include a module.\n\n Args:\n module_filename: filename\n module_name: full module name\n module_kind: one of "py", "shlib" (shared library)\n Returns:\n True or False\n '
return None | Help decide whether to include a module.
Args:
module_filename: filename
module_name: full module name
module_kind: one of "py", "shlib" (shared library)
Returns:
True or False | nuitka/plugins/PluginBase.py | onModuleEncounter | mikehaben69/Nuitka | 5,421 | python | def onModuleEncounter(self, module_filename, module_name, module_kind):
'Help decide whether to include a module.\n\n Args:\n module_filename: filename\n module_name: full module name\n module_kind: one of "py", "shlib" (shared library)\n Returns:\n True or False\n '
return None | def onModuleEncounter(self, module_filename, module_name, module_kind):
'Help decide whether to include a module.\n\n Args:\n module_filename: filename\n module_name: full module name\n module_kind: one of "py", "shlib" (shared library)\n Returns:\n True or False\n '
return None<|docstring|>Help decide whether to include a module.
Args:
module_filename: filename
module_name: full module name
module_kind: one of "py", "shlib" (shared library)
Returns:
True or False<|endoftext|> |
6c2ca264875be6a8877e370a875ea9133d2e52b0c1dbf1e31979563345468b77 | def onModuleInitialSet(self):
'Provide extra modules to the initial root module set.\n\n Args:\n None\n Returns:\n Iterable of modules, may yield.\n '
return () | Provide extra modules to the initial root module set.
Args:
None
Returns:
Iterable of modules, may yield. | nuitka/plugins/PluginBase.py | onModuleInitialSet | mikehaben69/Nuitka | 5,421 | python | def onModuleInitialSet(self):
'Provide extra modules to the initial root module set.\n\n Args:\n None\n Returns:\n Iterable of modules, may yield.\n '
return () | def onModuleInitialSet(self):
'Provide extra modules to the initial root module set.\n\n Args:\n None\n Returns:\n Iterable of modules, may yield.\n '
return ()<|docstring|>Provide extra modules to the initial root module set.
Args:
None
Returns:
Iterable of modules, may yield.<|endoftext|> |
64f6d084ccf992e0401be906796df6bca4263ec5c966f3b370ab84e8db1084bf | def onModuleCompleteSet(self, module_set):
'Provide extra modules to the initial root module set.\n\n Args:\n module_set - tuple of module objects\n Returns:\n None\n Notes:\n You must not change anything, this is purely for warning\n and error checking, and potentially for later stages to\n prepare.\n ' | Provide extra modules to the initial root module set.
Args:
module_set - tuple of module objects
Returns:
None
Notes:
You must not change anything, this is purely for warning
and error checking, and potentially for later stages to
prepare. | nuitka/plugins/PluginBase.py | onModuleCompleteSet | mikehaben69/Nuitka | 5,421 | python | def onModuleCompleteSet(self, module_set):
'Provide extra modules to the initial root module set.\n\n Args:\n module_set - tuple of module objects\n Returns:\n None\n Notes:\n You must not change anything, this is purely for warning\n and error checking, and potentially for later stages to\n prepare.\n ' | def onModuleCompleteSet(self, module_set):
'Provide extra modules to the initial root module set.\n\n Args:\n module_set - tuple of module objects\n Returns:\n None\n Notes:\n You must not change anything, this is purely for warning\n and error checking, and potentially for later stages to\n prepare.\n '<|docstring|>Provide extra modules to the initial root module set.
Args:
module_set - tuple of module objects
Returns:
None
Notes:
You must not change anything, this is purely for warning
and error checking, and potentially for later stages to
prepare.<|endoftext|> |
7f8d1a0ab03a699570c4c210e51abe0ca1782d5b88d897e299e11911a989d1d8 | @staticmethod
def locateModule(importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n Returns:\n filename for module\n '
from nuitka.importing import Importing
(_module_package, module_filename, _finding) = Importing.findModule(importing=importing, module_name=ModuleName(module_name), parent_package=None, level=(- 1), warn=False)
return module_filename | Provide a filename / -path for a to-be-imported module.
Args:
importing: module object that asked for it (tracing only)
module_name: (str or ModuleName) full name of module
Returns:
filename for module | nuitka/plugins/PluginBase.py | locateModule | mikehaben69/Nuitka | 5,421 | python | @staticmethod
def locateModule(importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n Returns:\n filename for module\n '
from nuitka.importing import Importing
(_module_package, module_filename, _finding) = Importing.findModule(importing=importing, module_name=ModuleName(module_name), parent_package=None, level=(- 1), warn=False)
return module_filename | @staticmethod
def locateModule(importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n Returns:\n filename for module\n '
from nuitka.importing import Importing
(_module_package, module_filename, _finding) = Importing.findModule(importing=importing, module_name=ModuleName(module_name), parent_package=None, level=(- 1), warn=False)
return module_filename<|docstring|>Provide a filename / -path for a to-be-imported module.
Args:
importing: module object that asked for it (tracing only)
module_name: (str or ModuleName) full name of module
Returns:
filename for module<|endoftext|> |
f7c2dcab2242c4fcebd1acf1880922481cf12858e1385849bcb8806fb3311eeb | def locateModules(self, importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n warn: (bool) True if required module\n Returns:\n list of ModuleName\n '
module_path = self.locateModule(importing, module_name)
result = []
def _scanModules(path, prefix):
for module_info in pkgutil.walk_packages((path,), prefix=(prefix + '.')):
result.append(ModuleName(module_info[1]))
if module_info[2]:
_scanModules(module_info[1], (module_name + module_info[1]))
_scanModules(module_path, module_name)
return result | Provide a filename / -path for a to-be-imported module.
Args:
importing: module object that asked for it (tracing only)
module_name: (str or ModuleName) full name of module
warn: (bool) True if required module
Returns:
list of ModuleName | nuitka/plugins/PluginBase.py | locateModules | mikehaben69/Nuitka | 5,421 | python | def locateModules(self, importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n warn: (bool) True if required module\n Returns:\n list of ModuleName\n '
module_path = self.locateModule(importing, module_name)
result = []
def _scanModules(path, prefix):
for module_info in pkgutil.walk_packages((path,), prefix=(prefix + '.')):
result.append(ModuleName(module_info[1]))
if module_info[2]:
_scanModules(module_info[1], (module_name + module_info[1]))
_scanModules(module_path, module_name)
return result | def locateModules(self, importing, module_name):
'Provide a filename / -path for a to-be-imported module.\n\n Args:\n importing: module object that asked for it (tracing only)\n module_name: (str or ModuleName) full name of module\n warn: (bool) True if required module\n Returns:\n list of ModuleName\n '
module_path = self.locateModule(importing, module_name)
result = []
def _scanModules(path, prefix):
for module_info in pkgutil.walk_packages((path,), prefix=(prefix + '.')):
result.append(ModuleName(module_info[1]))
if module_info[2]:
_scanModules(module_info[1], (module_name + module_info[1]))
_scanModules(module_path, module_name)
return result<|docstring|>Provide a filename / -path for a to-be-imported module.
Args:
importing: module object that asked for it (tracing only)
module_name: (str or ModuleName) full name of module
warn: (bool) True if required module
Returns:
list of ModuleName<|endoftext|> |
2d91716c93442f8da5358e767a8ebb187220cf1cd2980e642063fdc16fbe5bc2 | def considerExtraDlls(self, dist_dir, module):
'Provide a tuple of names of binaries to be included.\n\n Args:\n dist_dir: the distribution folder\n module: the module object needing the binaries\n Returns:\n tuple\n '
for included_entry_point in self.getExtraDlls(module):
makePath(os.path.dirname(included_entry_point.dest_path))
shutil.copyfile(included_entry_point.source_path, included_entry_point.dest_path)
(yield included_entry_point) | Provide a tuple of names of binaries to be included.
Args:
dist_dir: the distribution folder
module: the module object needing the binaries
Returns:
tuple | nuitka/plugins/PluginBase.py | considerExtraDlls | mikehaben69/Nuitka | 5,421 | python | def considerExtraDlls(self, dist_dir, module):
'Provide a tuple of names of binaries to be included.\n\n Args:\n dist_dir: the distribution folder\n module: the module object needing the binaries\n Returns:\n tuple\n '
for included_entry_point in self.getExtraDlls(module):
makePath(os.path.dirname(included_entry_point.dest_path))
shutil.copyfile(included_entry_point.source_path, included_entry_point.dest_path)
(yield included_entry_point) | def considerExtraDlls(self, dist_dir, module):
'Provide a tuple of names of binaries to be included.\n\n Args:\n dist_dir: the distribution folder\n module: the module object needing the binaries\n Returns:\n tuple\n '
for included_entry_point in self.getExtraDlls(module):
makePath(os.path.dirname(included_entry_point.dest_path))
shutil.copyfile(included_entry_point.source_path, included_entry_point.dest_path)
(yield included_entry_point)<|docstring|>Provide a tuple of names of binaries to be included.
Args:
dist_dir: the distribution folder
module: the module object needing the binaries
Returns:
tuple<|endoftext|> |
d09c768c90a37d34c1a49f1f7878d2199774be06d81e719764ecfb9f277334c4 | def getExtraDlls(self, module):
'Provide IncludedEntryPoint named tuples describing extra needs of the module.\n\n Args:\n module: the module object needing the binaries\n Returns:\n yields IncludedEntryPoint objects\n\n '
return () | Provide IncludedEntryPoint named tuples describing extra needs of the module.
Args:
module: the module object needing the binaries
Returns:
yields IncludedEntryPoint objects | nuitka/plugins/PluginBase.py | getExtraDlls | mikehaben69/Nuitka | 5,421 | python | def getExtraDlls(self, module):
'Provide IncludedEntryPoint named tuples describing extra needs of the module.\n\n Args:\n module: the module object needing the binaries\n Returns:\n yields IncludedEntryPoint objects\n\n '
return () | def getExtraDlls(self, module):
'Provide IncludedEntryPoint named tuples describing extra needs of the module.\n\n Args:\n module: the module object needing the binaries\n Returns:\n yields IncludedEntryPoint objects\n\n '
return ()<|docstring|>Provide IncludedEntryPoint named tuples describing extra needs of the module.
Args:
module: the module object needing the binaries
Returns:
yields IncludedEntryPoint objects<|endoftext|> |
2bc995bf9363750d023d9fd13fc740a9f0afb1ee7f257c83142483ea62cb4e02 | def getModuleSpecificDllPaths(self, module_name):
'Provide a list of directories, where DLLs should be searched for this package (or module).\n\n Args:\n module_name: name of a package or module, for which the DLL path addition applies.\n Returns:\n iterable of paths\n '
return () | Provide a list of directories, where DLLs should be searched for this package (or module).
Args:
module_name: name of a package or module, for which the DLL path addition applies.
Returns:
iterable of paths | nuitka/plugins/PluginBase.py | getModuleSpecificDllPaths | mikehaben69/Nuitka | 5,421 | python | def getModuleSpecificDllPaths(self, module_name):
'Provide a list of directories, where DLLs should be searched for this package (or module).\n\n Args:\n module_name: name of a package or module, for which the DLL path addition applies.\n Returns:\n iterable of paths\n '
return () | def getModuleSpecificDllPaths(self, module_name):
'Provide a list of directories, where DLLs should be searched for this package (or module).\n\n Args:\n module_name: name of a package or module, for which the DLL path addition applies.\n Returns:\n iterable of paths\n '
return ()<|docstring|>Provide a list of directories, where DLLs should be searched for this package (or module).
Args:
module_name: name of a package or module, for which the DLL path addition applies.
Returns:
iterable of paths<|endoftext|> |
2b83cf0d1709e9373b3f9a4afad9bad82ea6a066a7d625dc2a4dea27041515cd | def removeDllDependencies(self, dll_filename, dll_filenames):
'Yield any DLLs / shared libraries not to be included in distribution.\n\n Args:\n dll_filename: DLL name\n dll_filenames: list of DLLs\n Yields:\n yielded filenames to exclude\n '
return () | Yield any DLLs / shared libraries not to be included in distribution.
Args:
dll_filename: DLL name
dll_filenames: list of DLLs
Yields:
yielded filenames to exclude | nuitka/plugins/PluginBase.py | removeDllDependencies | mikehaben69/Nuitka | 5,421 | python | def removeDllDependencies(self, dll_filename, dll_filenames):
'Yield any DLLs / shared libraries not to be included in distribution.\n\n Args:\n dll_filename: DLL name\n dll_filenames: list of DLLs\n Yields:\n yielded filenames to exclude\n '
return () | def removeDllDependencies(self, dll_filename, dll_filenames):
'Yield any DLLs / shared libraries not to be included in distribution.\n\n Args:\n dll_filename: DLL name\n dll_filenames: list of DLLs\n Yields:\n yielded filenames to exclude\n '
return ()<|docstring|>Yield any DLLs / shared libraries not to be included in distribution.
Args:
dll_filename: DLL name
dll_filenames: list of DLLs
Yields:
yielded filenames to exclude<|endoftext|> |
71a39543f3f334c5968ddb7b94c3f479d361041fcbfd2fbf7656f064e9e09096 | def considerDataFiles(self, module):
'Yield data file names (source|func, target) for inclusion (iterator).\n\n Args:\n module: module object that may need extra data files\n Yields:\n Data file description pairs, either (source, dest) or (func, dest)\n where the func will be called to create the content dynamically.\n\n '
return () | Yield data file names (source|func, target) for inclusion (iterator).
Args:
module: module object that may need extra data files
Yields:
Data file description pairs, either (source, dest) or (func, dest)
where the func will be called to create the content dynamically. | nuitka/plugins/PluginBase.py | considerDataFiles | mikehaben69/Nuitka | 5,421 | python | def considerDataFiles(self, module):
'Yield data file names (source|func, target) for inclusion (iterator).\n\n Args:\n module: module object that may need extra data files\n Yields:\n Data file description pairs, either (source, dest) or (func, dest)\n where the func will be called to create the content dynamically.\n\n '
return () | def considerDataFiles(self, module):
'Yield data file names (source|func, target) for inclusion (iterator).\n\n Args:\n module: module object that may need extra data files\n Yields:\n Data file description pairs, either (source, dest) or (func, dest)\n where the func will be called to create the content dynamically.\n\n '
return ()<|docstring|>Yield data file names (source|func, target) for inclusion (iterator).
Args:
module: module object that may need extra data files
Yields:
Data file description pairs, either (source, dest) or (func, dest)
where the func will be called to create the content dynamically.<|endoftext|> |
87ab3aa35f7fbdab8b10ce5cbc21e849084be869dfb9cc61af46572f7de23463 | def onStandaloneDistributionFinished(self, dist_dir):
'Called after successfully creating a standalone distribution.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n dist_dir: the created distribution folder\n\n Returns:\n None\n '
return None | Called after successfully creating a standalone distribution.
Note:
It is up to the plugin to take subsequent action. Examples are:
insert additional information (license, copyright, company or
application description), create installation material, further
folder clean-up, start downstream applications etc.
Args:
dist_dir: the created distribution folder
Returns:
None | nuitka/plugins/PluginBase.py | onStandaloneDistributionFinished | mikehaben69/Nuitka | 5,421 | python | def onStandaloneDistributionFinished(self, dist_dir):
'Called after successfully creating a standalone distribution.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n dist_dir: the created distribution folder\n\n Returns:\n None\n '
return None | def onStandaloneDistributionFinished(self, dist_dir):
'Called after successfully creating a standalone distribution.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n dist_dir: the created distribution folder\n\n Returns:\n None\n '
return None<|docstring|>Called after successfully creating a standalone distribution.
Note:
It is up to the plugin to take subsequent action. Examples are:
insert additional information (license, copyright, company or
application description), create installation material, further
folder clean-up, start downstream applications etc.
Args:
dist_dir: the created distribution folder
Returns:
None<|endoftext|> |
15f4494d3edd204efac428fef1ede37ba726288d72fb9905fc4733ce7623e94e | def onOnefileFinished(self, filename):
'Called after successfully creating a onefile executable.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n filename: the created onefile executable\n\n Returns:\n None\n '
return None | Called after successfully creating a onefile executable.
Note:
It is up to the plugin to take subsequent action. Examples are:
insert additional information (license, copyright, company or
application description), create installation material, further
folder clean-up, start downstream applications etc.
Args:
filename: the created onefile executable
Returns:
None | nuitka/plugins/PluginBase.py | onOnefileFinished | mikehaben69/Nuitka | 5,421 | python | def onOnefileFinished(self, filename):
'Called after successfully creating a onefile executable.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n filename: the created onefile executable\n\n Returns:\n None\n '
return None | def onOnefileFinished(self, filename):
'Called after successfully creating a onefile executable.\n\n Note:\n It is up to the plugin to take subsequent action. Examples are:\n insert additional information (license, copyright, company or\n application description), create installation material, further\n folder clean-up, start downstream applications etc.\n\n Args:\n filename: the created onefile executable\n\n Returns:\n None\n '
return None<|docstring|>Called after successfully creating a onefile executable.
Note:
It is up to the plugin to take subsequent action. Examples are:
insert additional information (license, copyright, company or
application description), create installation material, further
folder clean-up, start downstream applications etc.
Args:
filename: the created onefile executable
Returns:
None<|endoftext|> |
522081dea35d3451ce92871b58744bd2a6cf8cd5c224494920cfa1aabacd0de6 | def onFinalResult(self, filename):
"Called after successfully finishing a compilation.\n\n Note:\n Plugins normally don't need this, and what filename is will be\n heavily dependent on compilation modes. Actions can be take here,\n e.g. commercial plugins output generated keys near that executable\n path.\n Args:\n filename: the created binary (module, accelerated exe, dist exe, onefile exe)\n\n Returns:\n None\n "
return None | Called after successfully finishing a compilation.
Note:
Plugins normally don't need this, and what filename is will be
heavily dependent on compilation modes. Actions can be take here,
e.g. commercial plugins output generated keys near that executable
path.
Args:
filename: the created binary (module, accelerated exe, dist exe, onefile exe)
Returns:
None | nuitka/plugins/PluginBase.py | onFinalResult | mikehaben69/Nuitka | 5,421 | python | def onFinalResult(self, filename):
"Called after successfully finishing a compilation.\n\n Note:\n Plugins normally don't need this, and what filename is will be\n heavily dependent on compilation modes. Actions can be take here,\n e.g. commercial plugins output generated keys near that executable\n path.\n Args:\n filename: the created binary (module, accelerated exe, dist exe, onefile exe)\n\n Returns:\n None\n "
return None | def onFinalResult(self, filename):
"Called after successfully finishing a compilation.\n\n Note:\n Plugins normally don't need this, and what filename is will be\n heavily dependent on compilation modes. Actions can be take here,\n e.g. commercial plugins output generated keys near that executable\n path.\n Args:\n filename: the created binary (module, accelerated exe, dist exe, onefile exe)\n\n Returns:\n None\n "
return None<|docstring|>Called after successfully finishing a compilation.
Note:
Plugins normally don't need this, and what filename is will be
heavily dependent on compilation modes. Actions can be take here,
e.g. commercial plugins output generated keys near that executable
path.
Args:
filename: the created binary (module, accelerated exe, dist exe, onefile exe)
Returns:
None<|endoftext|> |
76b3c1b233030ad38fb13125a6e8a531bbcb6babe2e78b3ff069c8ac46ef0884 | def suppressUnknownImportWarning(self, importing, module_name, source_ref):
'Suppress import warnings for unknown modules.\n\n Args:\n importing: the module object\n module_name: name of module\n source_ref: ???\n Returns:\n True or False\n '
return False | Suppress import warnings for unknown modules.
Args:
importing: the module object
module_name: name of module
source_ref: ???
Returns:
True or False | nuitka/plugins/PluginBase.py | suppressUnknownImportWarning | mikehaben69/Nuitka | 5,421 | python | def suppressUnknownImportWarning(self, importing, module_name, source_ref):
'Suppress import warnings for unknown modules.\n\n Args:\n importing: the module object\n module_name: name of module\n source_ref: ???\n Returns:\n True or False\n '
return False | def suppressUnknownImportWarning(self, importing, module_name, source_ref):
'Suppress import warnings for unknown modules.\n\n Args:\n importing: the module object\n module_name: name of module\n source_ref: ???\n Returns:\n True or False\n '
return False<|docstring|>Suppress import warnings for unknown modules.
Args:
importing: the module object
module_name: name of module
source_ref: ???
Returns:
True or False<|endoftext|> |
2f29d2d45343b23eddff6b305a337a9c27690c30570aee59939a4c210ddac914 | def decideCompilation(self, module_name, source_ref):
'Decide whether to compile a module (or just use its bytecode).\n\n Notes:\n The first plugin not returning None makes the decision. Thereafter,\n no other plugins will be checked. If all plugins return None, the\n module will be compiled.\n\n Args:\n module_name: name of module\n source_ref: ???\n\n Returns:\n "compiled" or "bytecode" or None (default)\n '
return None | Decide whether to compile a module (or just use its bytecode).
Notes:
The first plugin not returning None makes the decision. Thereafter,
no other plugins will be checked. If all plugins return None, the
module will be compiled.
Args:
module_name: name of module
source_ref: ???
Returns:
"compiled" or "bytecode" or None (default) | nuitka/plugins/PluginBase.py | decideCompilation | mikehaben69/Nuitka | 5,421 | python | def decideCompilation(self, module_name, source_ref):
'Decide whether to compile a module (or just use its bytecode).\n\n Notes:\n The first plugin not returning None makes the decision. Thereafter,\n no other plugins will be checked. If all plugins return None, the\n module will be compiled.\n\n Args:\n module_name: name of module\n source_ref: ???\n\n Returns:\n "compiled" or "bytecode" or None (default)\n '
return None | def decideCompilation(self, module_name, source_ref):
'Decide whether to compile a module (or just use its bytecode).\n\n Notes:\n The first plugin not returning None makes the decision. Thereafter,\n no other plugins will be checked. If all plugins return None, the\n module will be compiled.\n\n Args:\n module_name: name of module\n source_ref: ???\n\n Returns:\n "compiled" or "bytecode" or None (default)\n '
return None<|docstring|>Decide whether to compile a module (or just use its bytecode).
Notes:
The first plugin not returning None makes the decision. Thereafter,
no other plugins will be checked. If all plugins return None, the
module will be compiled.
Args:
module_name: name of module
source_ref: ???
Returns:
"compiled" or "bytecode" or None (default)<|endoftext|> |
9af9d371019920c5a48b52e816eb445994d12ec5f5e0e5b2fcc5a8dfc9db63b9 | def getPreprocessorSymbols(self):
'Decide which C defines to be used in compilation.\n\n Notes:\n The plugins can each contribute, but are hopefully using\n a namespace for their defines.\n\n Returns:\n None for no defines, otherwise dictionary of key to be\n defined, and non-None values if any, i.e. no "-Dkey" only\n '
return None | Decide which C defines to be used in compilation.
Notes:
The plugins can each contribute, but are hopefully using
a namespace for their defines.
Returns:
None for no defines, otherwise dictionary of key to be
defined, and non-None values if any, i.e. no "-Dkey" only | nuitka/plugins/PluginBase.py | getPreprocessorSymbols | mikehaben69/Nuitka | 5,421 | python | def getPreprocessorSymbols(self):
'Decide which C defines to be used in compilation.\n\n Notes:\n The plugins can each contribute, but are hopefully using\n a namespace for their defines.\n\n Returns:\n None for no defines, otherwise dictionary of key to be\n defined, and non-None values if any, i.e. no "-Dkey" only\n '
return None | def getPreprocessorSymbols(self):
'Decide which C defines to be used in compilation.\n\n Notes:\n The plugins can each contribute, but are hopefully using\n a namespace for their defines.\n\n Returns:\n None for no defines, otherwise dictionary of key to be\n defined, and non-None values if any, i.e. no "-Dkey" only\n '
return None<|docstring|>Decide which C defines to be used in compilation.
Notes:
The plugins can each contribute, but are hopefully using
a namespace for their defines.
Returns:
None for no defines, otherwise dictionary of key to be
defined, and non-None values if any, i.e. no "-Dkey" only<|endoftext|> |
e8df65668101fec07048f75a7528412ae1666e04ba0699cd325d11c93859fd4f | def getExtraCodeFiles(self):
'Add extra code files to the compilation.\n\n Notes:\n This is generally a bad idea to use unless you absolutely\n know what you are doing.\n\n Returns:\n None for no extra codes, otherwise dictionary of key to be\n filename, and value to be source code.\n '
return None | Add extra code files to the compilation.
Notes:
This is generally a bad idea to use unless you absolutely
know what you are doing.
Returns:
None for no extra codes, otherwise dictionary of key to be
filename, and value to be source code. | nuitka/plugins/PluginBase.py | getExtraCodeFiles | mikehaben69/Nuitka | 5,421 | python | def getExtraCodeFiles(self):
'Add extra code files to the compilation.\n\n Notes:\n This is generally a bad idea to use unless you absolutely\n know what you are doing.\n\n Returns:\n None for no extra codes, otherwise dictionary of key to be\n filename, and value to be source code.\n '
return None | def getExtraCodeFiles(self):
'Add extra code files to the compilation.\n\n Notes:\n This is generally a bad idea to use unless you absolutely\n know what you are doing.\n\n Returns:\n None for no extra codes, otherwise dictionary of key to be\n filename, and value to be source code.\n '
return None<|docstring|>Add extra code files to the compilation.
Notes:
This is generally a bad idea to use unless you absolutely
know what you are doing.
Returns:
None for no extra codes, otherwise dictionary of key to be
filename, and value to be source code.<|endoftext|> |
ebd3644eece3a929b6be1f5c23ec8f9ca179712625a3d1a286c72472df0e76db | def getExtraLinkLibraries(self):
'Decide which link library should be added.\n\n Notes:\n Names provided multiple times, e.g. by multiple plugins are\n only added once.\n\n Returns:\n None for no extra link library, otherwise the name as a **str**\n or an iterable of names of link libraries.\n '
return None | Decide which link library should be added.
Notes:
Names provided multiple times, e.g. by multiple plugins are
only added once.
Returns:
None for no extra link library, otherwise the name as a **str**
or an iterable of names of link libraries. | nuitka/plugins/PluginBase.py | getExtraLinkLibraries | mikehaben69/Nuitka | 5,421 | python | def getExtraLinkLibraries(self):
'Decide which link library should be added.\n\n Notes:\n Names provided multiple times, e.g. by multiple plugins are\n only added once.\n\n Returns:\n None for no extra link library, otherwise the name as a **str**\n or an iterable of names of link libraries.\n '
return None | def getExtraLinkLibraries(self):
'Decide which link library should be added.\n\n Notes:\n Names provided multiple times, e.g. by multiple plugins are\n only added once.\n\n Returns:\n None for no extra link library, otherwise the name as a **str**\n or an iterable of names of link libraries.\n '
return None<|docstring|>Decide which link library should be added.
Notes:
Names provided multiple times, e.g. by multiple plugins are
only added once.
Returns:
None for no extra link library, otherwise the name as a **str**
or an iterable of names of link libraries.<|endoftext|> |
77c4fb9d53b96d90937c79ac0f33135d30bdc05890ff226a90cde0e1b1c88d2d | def warnUnusedPlugin(self, message):
'An inactive plugin may issue a warning if it believes this may be wrong.\n\n Returns:\n None\n '
if (self.plugin_name not in warned_unused_plugins):
warned_unused_plugins.add(self.plugin_name)
plugins_logger.warning(("Use '--plugin-enable=%s' for: %s" % (self.plugin_name, message))) | An inactive plugin may issue a warning if it believes this may be wrong.
Returns:
None | nuitka/plugins/PluginBase.py | warnUnusedPlugin | mikehaben69/Nuitka | 5,421 | python | def warnUnusedPlugin(self, message):
'An inactive plugin may issue a warning if it believes this may be wrong.\n\n Returns:\n None\n '
if (self.plugin_name not in warned_unused_plugins):
warned_unused_plugins.add(self.plugin_name)
plugins_logger.warning(("Use '--plugin-enable=%s' for: %s" % (self.plugin_name, message))) | def warnUnusedPlugin(self, message):
'An inactive plugin may issue a warning if it believes this may be wrong.\n\n Returns:\n None\n '
if (self.plugin_name not in warned_unused_plugins):
warned_unused_plugins.add(self.plugin_name)
plugins_logger.warning(("Use '--plugin-enable=%s' for: %s" % (self.plugin_name, message)))<|docstring|>An inactive plugin may issue a warning if it believes this may be wrong.
Returns:
None<|endoftext|> |
56ac80deb209cef9f6c1c28b7d387eebe15e2f9f3a85ff120203d393738018fe | def onDataComposerResult(self, blob_filename):
'Internal use only.\n\n Returns:\n None\n '
return None | Internal use only.
Returns:
None | nuitka/plugins/PluginBase.py | onDataComposerResult | mikehaben69/Nuitka | 5,421 | python | def onDataComposerResult(self, blob_filename):
'Internal use only.\n\n Returns:\n None\n '
return None | def onDataComposerResult(self, blob_filename):
'Internal use only.\n\n Returns:\n None\n '
return None<|docstring|>Internal use only.
Returns:
None<|endoftext|> |
734ce50709da52db549e8005096add54e6b9a9de0aa1fa49e54506a58217a57f | def encodeDataComposerName(self, data_name):
'Internal use only.\n\n Returns:\n None\n '
return None | Internal use only.
Returns:
None | nuitka/plugins/PluginBase.py | encodeDataComposerName | mikehaben69/Nuitka | 5,421 | python | def encodeDataComposerName(self, data_name):
'Internal use only.\n\n Returns:\n None\n '
return None | def encodeDataComposerName(self, data_name):
'Internal use only.\n\n Returns:\n None\n '
return None<|docstring|>Internal use only.
Returns:
None<|endoftext|> |
79b94b724a787087db95a017f63fcbba95bd638392d674b8c4eae299054edc3f | def _test_element(self, chunk_elements=None, compression=None):
' test basic functionality '
if (chunk_elements is not None):
nested_dict.LazyHDFValue.chunk_elements = chunk_elements
if (compression is not None):
nested_dict.LazyHDFValue.compression = compression
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue(MockValue, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
nested_dict.LazyHDFValue.create_from_yaml_string('', str, '')
value2 = nested_dict.LazyHDFValue.create_from_yaml_string(yaml_str, MockValue, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3) | test basic functionality | utils/data_structures/tests/test_nested_dict.py | _test_element | david-zwicker/py-utils | 0 | python | def _test_element(self, chunk_elements=None, compression=None):
' '
if (chunk_elements is not None):
nested_dict.LazyHDFValue.chunk_elements = chunk_elements
if (compression is not None):
nested_dict.LazyHDFValue.compression = compression
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue(MockValue, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
nested_dict.LazyHDFValue.create_from_yaml_string(, str, )
value2 = nested_dict.LazyHDFValue.create_from_yaml_string(yaml_str, MockValue, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3) | def _test_element(self, chunk_elements=None, compression=None):
' '
if (chunk_elements is not None):
nested_dict.LazyHDFValue.chunk_elements = chunk_elements
if (compression is not None):
nested_dict.LazyHDFValue.compression = compression
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue(MockValue, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
nested_dict.LazyHDFValue.create_from_yaml_string(, str, )
value2 = nested_dict.LazyHDFValue.create_from_yaml_string(yaml_str, MockValue, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3)<|docstring|>test basic functionality<|endoftext|> |
a4cf7a2f08aac480c3120e022b800252a8e908f0f68c65236b5f353e427c6af3 | def save_to_hdf5(self, hdf_file, key):
' save the data of the current burrow to an HDF5 file '
if (key in hdf_file):
del hdf_file[key]
hdf_file.create_dataset(key, data=self.arr) | save the data of the current burrow to an HDF5 file | utils/data_structures/tests/test_nested_dict.py | save_to_hdf5 | david-zwicker/py-utils | 0 | python | def save_to_hdf5(self, hdf_file, key):
' '
if (key in hdf_file):
del hdf_file[key]
hdf_file.create_dataset(key, data=self.arr) | def save_to_hdf5(self, hdf_file, key):
' '
if (key in hdf_file):
del hdf_file[key]
hdf_file.create_dataset(key, data=self.arr)<|docstring|>save the data of the current burrow to an HDF5 file<|endoftext|> |
23df5f7f0b6a6428aed2cfb7362e0985b6e61b3328fb71fcb5a5f620a57a2506 | @classmethod
def create_from_hdf5(cls, hdf_file, key):
' creates a burrow track from data in a HDF5 file '
return cls.from_array(hdf_file[key]) | creates a burrow track from data in a HDF5 file | utils/data_structures/tests/test_nested_dict.py | create_from_hdf5 | david-zwicker/py-utils | 0 | python | @classmethod
def create_from_hdf5(cls, hdf_file, key):
' '
return cls.from_array(hdf_file[key]) | @classmethod
def create_from_hdf5(cls, hdf_file, key):
' '
return cls.from_array(hdf_file[key])<|docstring|>creates a burrow track from data in a HDF5 file<|endoftext|> |
723d799bc2186d8025e745c1302b7cfc907b9e809174784141fcb8530513603c | def test_element(self, chunk_elements=None, compression=None):
' test basic functionality '
key = 'key'
item_cls = MockCollection.item_class
data_list = [item_cls([1, 2, 3]), item_cls([5, 6, 7])]
data = MockCollection(data_list)
cls = nested_dict.LazyHDFCollection
value = cls(MockCollection, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
cls.create_from_yaml_string('', str, '')
value2 = cls.create_from_yaml_string(yaml_str, MockCollection, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3) | test basic functionality | utils/data_structures/tests/test_nested_dict.py | test_element | david-zwicker/py-utils | 0 | python | def test_element(self, chunk_elements=None, compression=None):
' '
key = 'key'
item_cls = MockCollection.item_class
data_list = [item_cls([1, 2, 3]), item_cls([5, 6, 7])]
data = MockCollection(data_list)
cls = nested_dict.LazyHDFCollection
value = cls(MockCollection, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
cls.create_from_yaml_string(, str, )
value2 = cls.create_from_yaml_string(yaml_str, MockCollection, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3) | def test_element(self, chunk_elements=None, compression=None):
' '
key = 'key'
item_cls = MockCollection.item_class
data_list = [item_cls([1, 2, 3]), item_cls([5, 6, 7])]
data = MockCollection(data_list)
cls = nested_dict.LazyHDFCollection
value = cls(MockCollection, key, self.hdf_file.name)
self.assertIsInstance(repr(value), six.string_types)
yaml_str = value.get_yaml_string()
self.assertIsInstance(yaml_str, six.string_types)
self.assertTrue(yaml_str.startswith('@'))
self.assertTrue(yaml_str.endswith((':' + key)))
with self.assertRaises(ValueError):
cls.create_from_yaml_string(, str, )
value2 = cls.create_from_yaml_string(yaml_str, MockCollection, self.hdf_folder)
self.assertEqual(value, value2)
value.set_hdf_folder(self.hdf_folder)
self.assertEqual(value, value2)
value3 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value, value3)
value4 = cls.create_from_data(key, data, self.hdf_file.name)
self.assertEqual(value3, value4)
data2 = value.load()
self.assertEqual(data, data2)
data3 = value2.load()
self.assertEqual(data, data3)<|docstring|>test basic functionality<|endoftext|> |
008461050e8da14554e319da4f32a799e72bc3171cc1e77447e890067de558d9 | def test_basics(self):
' tests miscellaneous functions '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIsInstance(repr(d), str)
stream = six.StringIO()
with misc.RedirectedStdout(stream):
d.pprint()
self.assertGreater(len(stream.getvalue()), 0) | tests miscellaneous functions | utils/data_structures/tests/test_nested_dict.py | test_basics | david-zwicker/py-utils | 0 | python | def test_basics(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIsInstance(repr(d), str)
stream = six.StringIO()
with misc.RedirectedStdout(stream):
d.pprint()
self.assertGreater(len(stream.getvalue()), 0) | def test_basics(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIsInstance(repr(d), str)
stream = six.StringIO()
with misc.RedirectedStdout(stream):
d.pprint()
self.assertGreater(len(stream.getvalue()), 0)<|docstring|>tests miscellaneous functions<|endoftext|> |
9b53ddd30dcce7fbd3946fdac3d560a350e8a840b2d97b4f4f7b404831e5b0a9 | def test_getting_data(self):
' tests that are about retrieving data '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertEqual(d['a'], self.dict_cls({'b': {}}))
self.assertEqual(d['a/b'], {})
self.assertEqual(d['c'], 1)
with self.assertRaises(KeyError):
d['z']
with self.assertRaises(KeyError):
d['a/z']
with self.assertRaises(KeyError):
d['c/z'] | tests that are about retrieving data | utils/data_structures/tests/test_nested_dict.py | test_getting_data | david-zwicker/py-utils | 0 | python | def test_getting_data(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertEqual(d['a'], self.dict_cls({'b': {}}))
self.assertEqual(d['a/b'], {})
self.assertEqual(d['c'], 1)
with self.assertRaises(KeyError):
d['z']
with self.assertRaises(KeyError):
d['a/z']
with self.assertRaises(KeyError):
d['c/z'] | def test_getting_data(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertEqual(d['a'], self.dict_cls({'b': {}}))
self.assertEqual(d['a/b'], {})
self.assertEqual(d['c'], 1)
with self.assertRaises(KeyError):
d['z']
with self.assertRaises(KeyError):
d['a/z']
with self.assertRaises(KeyError):
d['c/z']<|docstring|>tests that are about retrieving data<|endoftext|> |
7d6f666fc6e0e436d8c28d2323c7795b4e6c9cfc5c15bc14f0d27938158350b4 | def test_membership(self):
' tests that test membership '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIn('a', d)
self.assertIn('a/b', d)
self.assertIn('c', d)
self.assertNotIn('z', d)
self.assertNotIn('a/z', d)
self.assertNotIn('c/z', d) | tests that test membership | utils/data_structures/tests/test_nested_dict.py | test_membership | david-zwicker/py-utils | 0 | python | def test_membership(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIn('a', d)
self.assertIn('a/b', d)
self.assertIn('c', d)
self.assertNotIn('z', d)
self.assertNotIn('a/z', d)
self.assertNotIn('c/z', d) | def test_membership(self):
' '
d = self.dict_cls({'a': {'b': {}}, 'c': 1})
self.assertIn('a', d)
self.assertIn('a/b', d)
self.assertIn('c', d)
self.assertNotIn('z', d)
self.assertNotIn('a/z', d)
self.assertNotIn('c/z', d)<|docstring|>tests that test membership<|endoftext|> |
8bce026636f2c3366cb54c9cb64cb9e9df7e5a362a204edbcf5d049bbff35ebe | def test_setting_data(self):
' tests that are about setting data '
d = self.dict_cls({'a': {'b': {}}})
d['a/c'] = 2
self.assertEqual(d['a/c'], 2)
d['e/f'] = 3
self.assertEqual(d['e/f'], 3)
self.assertEqual(d.to_dict(), {'a': {'b': {}, 'c': 2}, 'e': {'f': 3}})
self.assertEqual(d.to_dict(flatten=True), {'a/c': 2, 'e/f': 3})
d = self.dict_cls({'a': {'b': {}}})
d['a'] = 2
self.assertEqual(d, self.dict_cls({'a': 2}))
with self.assertRaises(TypeError):
d['a/b'] = 2
r = d.create_child('f', {'1': 2})
self.assertEqual(r, self.dict_cls({'1': 2}))
self.assertEqual(d, self.dict_cls({'a': 2, 'f': {'1': 2}}))
d = self.dict_cls({'a': {'b': 1}})
d.from_dict({'a/c': 2})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
d.from_dict({'a': {'c': 3}})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 3}}))
d = self.dict_cls({'a': {'b': 1}})
d['a/b'] += 1
self.assertEqual(d, self.dict_cls({'a': {'b': 2}}))
d['a/b'] *= 2
self.assertEqual(d, self.dict_cls({'a': {'b': 4}})) | tests that are about setting data | utils/data_structures/tests/test_nested_dict.py | test_setting_data | david-zwicker/py-utils | 0 | python | def test_setting_data(self):
' '
d = self.dict_cls({'a': {'b': {}}})
d['a/c'] = 2
self.assertEqual(d['a/c'], 2)
d['e/f'] = 3
self.assertEqual(d['e/f'], 3)
self.assertEqual(d.to_dict(), {'a': {'b': {}, 'c': 2}, 'e': {'f': 3}})
self.assertEqual(d.to_dict(flatten=True), {'a/c': 2, 'e/f': 3})
d = self.dict_cls({'a': {'b': {}}})
d['a'] = 2
self.assertEqual(d, self.dict_cls({'a': 2}))
with self.assertRaises(TypeError):
d['a/b'] = 2
r = d.create_child('f', {'1': 2})
self.assertEqual(r, self.dict_cls({'1': 2}))
self.assertEqual(d, self.dict_cls({'a': 2, 'f': {'1': 2}}))
d = self.dict_cls({'a': {'b': 1}})
d.from_dict({'a/c': 2})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
d.from_dict({'a': {'c': 3}})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 3}}))
d = self.dict_cls({'a': {'b': 1}})
d['a/b'] += 1
self.assertEqual(d, self.dict_cls({'a': {'b': 2}}))
d['a/b'] *= 2
self.assertEqual(d, self.dict_cls({'a': {'b': 4}})) | def test_setting_data(self):
' '
d = self.dict_cls({'a': {'b': {}}})
d['a/c'] = 2
self.assertEqual(d['a/c'], 2)
d['e/f'] = 3
self.assertEqual(d['e/f'], 3)
self.assertEqual(d.to_dict(), {'a': {'b': {}, 'c': 2}, 'e': {'f': 3}})
self.assertEqual(d.to_dict(flatten=True), {'a/c': 2, 'e/f': 3})
d = self.dict_cls({'a': {'b': {}}})
d['a'] = 2
self.assertEqual(d, self.dict_cls({'a': 2}))
with self.assertRaises(TypeError):
d['a/b'] = 2
r = d.create_child('f', {'1': 2})
self.assertEqual(r, self.dict_cls({'1': 2}))
self.assertEqual(d, self.dict_cls({'a': 2, 'f': {'1': 2}}))
d = self.dict_cls({'a': {'b': 1}})
d.from_dict({'a/c': 2})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
d.from_dict({'a': {'c': 3}})
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 3}}))
d = self.dict_cls({'a': {'b': 1}})
d['a/b'] += 1
self.assertEqual(d, self.dict_cls({'a': {'b': 2}}))
d['a/b'] *= 2
self.assertEqual(d, self.dict_cls({'a': {'b': 4}}))<|docstring|>tests that are about setting data<|endoftext|> |
5fd2d86d8a3b52e69a0e0269741984570e58e50fc434949203a311d73ba161d0 | def test_deleting_data(self):
' tests that are about deleting data '
d = self.dict_cls({'a': {'b': 1, 'c': 2}, 'd': 3})
with self.assertRaises(KeyError):
del d['g']
with self.assertRaises(KeyError):
del d['d/z']
del d['d']
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
del d['a/c']
self.assertEqual(d, self.dict_cls({'a': {'b': 1}}))
del d['a']
self.assertEqual(d, self.dict_cls()) | tests that are about deleting data | utils/data_structures/tests/test_nested_dict.py | test_deleting_data | david-zwicker/py-utils | 0 | python | def test_deleting_data(self):
' '
d = self.dict_cls({'a': {'b': 1, 'c': 2}, 'd': 3})
with self.assertRaises(KeyError):
del d['g']
with self.assertRaises(KeyError):
del d['d/z']
del d['d']
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
del d['a/c']
self.assertEqual(d, self.dict_cls({'a': {'b': 1}}))
del d['a']
self.assertEqual(d, self.dict_cls()) | def test_deleting_data(self):
' '
d = self.dict_cls({'a': {'b': 1, 'c': 2}, 'd': 3})
with self.assertRaises(KeyError):
del d['g']
with self.assertRaises(KeyError):
del d['d/z']
del d['d']
self.assertEqual(d, self.dict_cls({'a': {'b': 1, 'c': 2}}))
del d['a/c']
self.assertEqual(d, self.dict_cls({'a': {'b': 1}}))
del d['a']
self.assertEqual(d, self.dict_cls())<|docstring|>tests that are about deleting data<|endoftext|> |
85c6327e2ce77c30d794f0102ce483b4d880d2f5983859ce1cf183ceddc8a805 | def test_iterators(self):
' test iterating over the data '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
six.assertCountEqual(self, d.iterkeys(), ['a', 'c'])
six.assertCountEqual(self, d.iterkeys(flatten=True), ['a/b', 'c'])
six.assertCountEqual(self, d.itervalues(), [self.dict_cls({'b': 1}), 2])
six.assertCountEqual(self, d.itervalues(flatten=True), [1, 2])
six.assertCountEqual(self, d.iteritems(), [('a', self.dict_cls({'b': 1})), ('c', 2)])
six.assertCountEqual(self, d.iteritems(flatten=True), [('a/b', 1), ('c', 2)])
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iterkeys(flatten=True))
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iteritems(flatten=True)) | test iterating over the data | utils/data_structures/tests/test_nested_dict.py | test_iterators | david-zwicker/py-utils | 0 | python | def test_iterators(self):
' '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
six.assertCountEqual(self, d.iterkeys(), ['a', 'c'])
six.assertCountEqual(self, d.iterkeys(flatten=True), ['a/b', 'c'])
six.assertCountEqual(self, d.itervalues(), [self.dict_cls({'b': 1}), 2])
six.assertCountEqual(self, d.itervalues(flatten=True), [1, 2])
six.assertCountEqual(self, d.iteritems(), [('a', self.dict_cls({'b': 1})), ('c', 2)])
six.assertCountEqual(self, d.iteritems(flatten=True), [('a/b', 1), ('c', 2)])
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iterkeys(flatten=True))
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iteritems(flatten=True)) | def test_iterators(self):
' '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
six.assertCountEqual(self, d.iterkeys(), ['a', 'c'])
six.assertCountEqual(self, d.iterkeys(flatten=True), ['a/b', 'c'])
six.assertCountEqual(self, d.itervalues(), [self.dict_cls({'b': 1}), 2])
six.assertCountEqual(self, d.itervalues(flatten=True), [1, 2])
six.assertCountEqual(self, d.iteritems(), [('a', self.dict_cls({'b': 1})), ('c', 2)])
six.assertCountEqual(self, d.iteritems(flatten=True), [('a/b', 1), ('c', 2)])
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iterkeys(flatten=True))
with self.assertRaises(TypeError):
list(self.dict_cls({1: {2: 3}}).iteritems(flatten=True))<|docstring|>test iterating over the data<|endoftext|> |
44deeea6074ec551da367e6b7cecfcb7cb09bee953f9092643066a76f40365c7 | def test_conversion(self):
' test the conversion of dictionaries '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
d2 = d.copy()
self.assertEqual(d2, d)
d2['c'] = 3
self.assertNotEqual(d2, d)
d3 = self.dict_cls(d.to_dict())
self.assertEqual(d3, d)
d3 = self.dict_cls(d.to_dict(flatten=True))
self.assertEqual(d3, d)
d = self.dict_cls({1: {2: 3}})
with self.assertRaises(TypeError):
d.to_dict(flatten=True) | test the conversion of dictionaries | utils/data_structures/tests/test_nested_dict.py | test_conversion | david-zwicker/py-utils | 0 | python | def test_conversion(self):
' '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
d2 = d.copy()
self.assertEqual(d2, d)
d2['c'] = 3
self.assertNotEqual(d2, d)
d3 = self.dict_cls(d.to_dict())
self.assertEqual(d3, d)
d3 = self.dict_cls(d.to_dict(flatten=True))
self.assertEqual(d3, d)
d = self.dict_cls({1: {2: 3}})
with self.assertRaises(TypeError):
d.to_dict(flatten=True) | def test_conversion(self):
' '
d = self.dict_cls({'a': {'b': 1}, 'c': 2})
d2 = d.copy()
self.assertEqual(d2, d)
d2['c'] = 3
self.assertNotEqual(d2, d)
d3 = self.dict_cls(d.to_dict())
self.assertEqual(d3, d)
d3 = self.dict_cls(d.to_dict(flatten=True))
self.assertEqual(d3, d)
d = self.dict_cls({1: {2: 3}})
with self.assertRaises(TypeError):
d.to_dict(flatten=True)<|docstring|>test the conversion of dictionaries<|endoftext|> |
5c8b02506fee83c22cf8311d970cec03188be964722a977955231118a9856c82 | def test_copy(self):
' test copies (including copy.copy and copy.deepcopy) '
a = nested_dict.NestedDict({'a': {'b': MockValue(1)}})
b = a.copy()
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.copy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.deepcopy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertNotEqual(id(a['a']['b']), id(b['a']['b'])) | test copies (including copy.copy and copy.deepcopy) | utils/data_structures/tests/test_nested_dict.py | test_copy | david-zwicker/py-utils | 0 | python | def test_copy(self):
' '
a = nested_dict.NestedDict({'a': {'b': MockValue(1)}})
b = a.copy()
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.copy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.deepcopy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertNotEqual(id(a['a']['b']), id(b['a']['b'])) | def test_copy(self):
' '
a = nested_dict.NestedDict({'a': {'b': MockValue(1)}})
b = a.copy()
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.copy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertEqual(id(a['a']), id(b['a']))
self.assertEqual(id(a['a']['b']), id(b['a']['b']))
b = copy.deepcopy(a)
self.assertEqual(a, b)
self.assertNotEqual(id(a), id(b))
self.assertNotEqual(id(a['a']), id(b['a']))
self.assertNotEqual(id(a['a']['b']), id(b['a']['b']))<|docstring|>test copies (including copy.copy and copy.deepcopy)<|endoftext|> |
c4896690e302066850d090229f7c9e6d519d5d0a083ef80c35334c2156572143 | def test_update(self):
' test the difference between update and update recursive '
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update({'a': {'c': 2}})
self.assertEqual(d, {'a': {'c': 2}})
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update_recursive({'a': {'c': 2}})
self.assertEqual(d, {'a': {'b': 1, 'c': 2}}) | test the difference between update and update recursive | utils/data_structures/tests/test_nested_dict.py | test_update | david-zwicker/py-utils | 0 | python | def test_update(self):
' '
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update({'a': {'c': 2}})
self.assertEqual(d, {'a': {'c': 2}})
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update_recursive({'a': {'c': 2}})
self.assertEqual(d, {'a': {'b': 1, 'c': 2}}) | def test_update(self):
' '
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update({'a': {'c': 2}})
self.assertEqual(d, {'a': {'c': 2}})
d = nested_dict.NestedDict({'a': {'b': 1}})
d.update_recursive({'a': {'c': 2}})
self.assertEqual(d, {'a': {'b': 1, 'c': 2}})<|docstring|>test the difference between update and update recursive<|endoftext|> |
c2986d4ca741c853ba66c1dffbf96af9082f652f024e919b8eb96497ade28787 | def test_simple(self):
' test the functionality '
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
data.arr = np.arange(5)
data2 = MockValue([1, 2, 3])
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data2)
self.assertEqual(d.get_item('a', load_data=False), data2)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data)
self.assertEqual(d.get_item('a', load_data=False), data)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
with h5py.File(self.hdf_file.name, 'r+') as hdf_db:
del hdf_db[key]
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
with self.assertRaises(nested_dict.LazyLoadError):
d.get_item('a', load_data=True) | test the functionality | utils/data_structures/tests/test_nested_dict.py | test_simple | david-zwicker/py-utils | 0 | python | def test_simple(self):
' '
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
data.arr = np.arange(5)
data2 = MockValue([1, 2, 3])
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data2)
self.assertEqual(d.get_item('a', load_data=False), data2)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data)
self.assertEqual(d.get_item('a', load_data=False), data)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
with h5py.File(self.hdf_file.name, 'r+') as hdf_db:
del hdf_db[key]
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
with self.assertRaises(nested_dict.LazyLoadError):
d.get_item('a', load_data=True) | def test_simple(self):
' '
key = 'key'
data = MockValue([1, 2, 3])
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
data.arr = np.arange(5)
data2 = MockValue([1, 2, 3])
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data2)
self.assertEqual(d.get_item('a', load_data=False), data2)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
self.assertEqual(d.get_item('a', load_data=True), data)
self.assertEqual(d.get_item('a', load_data=False), data)
value = nested_dict.LazyHDFValue.create_from_data(key, data, self.hdf_file.name)
with h5py.File(self.hdf_file.name, 'r+') as hdf_db:
del hdf_db[key]
d = nested_dict.LazyNestedDict({'a': value})
self.assertEqual(d.get_item('a', load_data=False), value)
with self.assertRaises(nested_dict.LazyLoadError):
d.get_item('a', load_data=True)<|docstring|>test the functionality<|endoftext|> |
57191dfcc3dd677b92499bb8b4e3ed1637605adc2a4a2093c7169f0239055c24 | def test_get_chunk_size(self):
' test the get_chunk_size function '
for _ in range(10):
shape = np.random.randint(1, 10, size=10)
size = np.prod(shape)
for _ in range(10):
num_elements = np.random.randint(1, size)
chunks = nested_dict.get_chunk_size(shape, num_elements)
self.assertLessEqual(np.prod(chunks), num_elements)
for (c, s) in zip(chunks, shape):
self.assertLessEqual(c, s) | test the get_chunk_size function | utils/data_structures/tests/test_nested_dict.py | test_get_chunk_size | david-zwicker/py-utils | 0 | python | def test_get_chunk_size(self):
' '
for _ in range(10):
shape = np.random.randint(1, 10, size=10)
size = np.prod(shape)
for _ in range(10):
num_elements = np.random.randint(1, size)
chunks = nested_dict.get_chunk_size(shape, num_elements)
self.assertLessEqual(np.prod(chunks), num_elements)
for (c, s) in zip(chunks, shape):
self.assertLessEqual(c, s) | def test_get_chunk_size(self):
' '
for _ in range(10):
shape = np.random.randint(1, 10, size=10)
size = np.prod(shape)
for _ in range(10):
num_elements = np.random.randint(1, size)
chunks = nested_dict.get_chunk_size(shape, num_elements)
self.assertLessEqual(np.prod(chunks), num_elements)
for (c, s) in zip(chunks, shape):
self.assertLessEqual(c, s)<|docstring|>test the get_chunk_size function<|endoftext|> |
d0adcdf104bc551aafc8d723cf3938e2344379d71e49a2bad6d76ffa1967e558 | def test_prepare_data_for_yaml(self):
' test several types '
valid_data = [np.arange(5), np.float64(1), np.float128(1), np.uint8(3), np.int64(3), {'1': 2}, [1, 2, 3], (1, 2, 3), {1, 2, 3}, None, 1, 'str', 1.2, True, False]
for data in valid_data:
nested_dict.prepare_data_for_yaml(data)
invalid_data = [MockValue([1, 2])]
for data in invalid_data:
with self.assertWarnings(['unknown instance']):
nested_dict.prepare_data_for_yaml(data) | test several types | utils/data_structures/tests/test_nested_dict.py | test_prepare_data_for_yaml | david-zwicker/py-utils | 0 | python | def test_prepare_data_for_yaml(self):
' '
valid_data = [np.arange(5), np.float64(1), np.float128(1), np.uint8(3), np.int64(3), {'1': 2}, [1, 2, 3], (1, 2, 3), {1, 2, 3}, None, 1, 'str', 1.2, True, False]
for data in valid_data:
nested_dict.prepare_data_for_yaml(data)
invalid_data = [MockValue([1, 2])]
for data in invalid_data:
with self.assertWarnings(['unknown instance']):
nested_dict.prepare_data_for_yaml(data) | def test_prepare_data_for_yaml(self):
' '
valid_data = [np.arange(5), np.float64(1), np.float128(1), np.uint8(3), np.int64(3), {'1': 2}, [1, 2, 3], (1, 2, 3), {1, 2, 3}, None, 1, 'str', 1.2, True, False]
for data in valid_data:
nested_dict.prepare_data_for_yaml(data)
invalid_data = [MockValue([1, 2])]
for data in invalid_data:
with self.assertWarnings(['unknown instance']):
nested_dict.prepare_data_for_yaml(data)<|docstring|>test several types<|endoftext|> |
33951a4cd4c34f2dfe56b80a7185de55e26376124878af11b0130984088dc84b | def __init__(self, robot_name, workspace_dim, config=None):
'\n :param robot_name: str, the name of the robot, 3link or franka\n :param workspace_dim: int, workspace dimension, either 2 or 3\n :param config: dict, for overwriting the default configs\n '
if (config is not None):
config = merge_dicts(DEFAULT_CONFIG, config)
else:
config = DEFAULT_CONFIG.copy()
self.robot_name = robot_name
self._time_step = config['time_step']
self._action_repeat = config['action_repeat']
self._horizon = config['horizon']
self._terminate_after_collision = config['terminate_after_collision']
self._env_step_counter = 0
self.terminated = False
self._render = config['render']
self._cam_dist = config['cam_dist']
self._cam_yaw = config['cam_yaw']
self._cam_pitch = config['cam_pitch']
self._cam_position = config['cam_position']
self._gravity = config['gravity']
self._p = p
if self._render:
cid = self._p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = self._p.connect(p.GUI)
self._p.resetDebugVisualizerCamera(cameraDistance=self._cam_dist, cameraYaw=self._cam_yaw, cameraPitch=self._cam_pitch, cameraTargetPosition=self._cam_position)
else:
self._p.connect(p.DIRECT)
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step)
self.cspace_dim = self._robot.cspace_dim
self.workspace_dim = workspace_dim
self.workspace_radius = config['workspace_radius']
self.goal = config['goal']
self.current_goal = None
self.goal_uid = None
self.q_init = config['q_init']
self.obstacle_cofigs = config['obstacle_configs']
if (self.obstacle_cofigs is None):
self.max_obstacle_num = config['max_obstacle_num']
self.min_obstacle_num = config['min_obstacle_num']
self.max_obstacle_radius = config['max_obstacle_radius']
self.min_obstacle_radius = config['min_obstacle_radius']
else:
self.max_obstacle_num = max((len(c) for c in self.obstacle_cofigs))
self.current_obs = []
self.obs_uids = []
self._goal_reward_weight = config['goal_reward_weight']
self._obs_reward_weight = config['obs_reward_weight']
self._ctrl_reward_weight = config['ctrl_reward_weight']
self._goal_reward_model = config['goal_reward_model']
self._goal_reward_length_scale = config['goal_reward_length_scale']
self._obs_reward_model = config['obs_reward_model']
self._obs_reward_length_scale = config['obs_reward_length_scale']
self._max_reward = config['max_reward']
self._initial_collision_buffer = config['initial_collision_buffer']
self._initial_joint_limit_buffer = config['initial_joint_limit_buffer']
self._initial_goal_distance_min = config['initial_goal_distance_min']
self._acc_control_mode = config['acc_control_mode']
self.seed()
self.action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._observation = self.reset()
self.observation_space = spaces.Box(low=(- np.inf), high=np.inf, shape=self._observation.shape, dtype=np.float32)
self.viewer = None | :param robot_name: str, the name of the robot, 3link or franka
:param workspace_dim: int, workspace dimension, either 2 or 3
:param config: dict, for overwriting the default configs | rmp2/envs/robot_env.py | __init__ | UWRobotLearning/rmp2 | 17 | python | def __init__(self, robot_name, workspace_dim, config=None):
'\n :param robot_name: str, the name of the robot, 3link or franka\n :param workspace_dim: int, workspace dimension, either 2 or 3\n :param config: dict, for overwriting the default configs\n '
if (config is not None):
config = merge_dicts(DEFAULT_CONFIG, config)
else:
config = DEFAULT_CONFIG.copy()
self.robot_name = robot_name
self._time_step = config['time_step']
self._action_repeat = config['action_repeat']
self._horizon = config['horizon']
self._terminate_after_collision = config['terminate_after_collision']
self._env_step_counter = 0
self.terminated = False
self._render = config['render']
self._cam_dist = config['cam_dist']
self._cam_yaw = config['cam_yaw']
self._cam_pitch = config['cam_pitch']
self._cam_position = config['cam_position']
self._gravity = config['gravity']
self._p = p
if self._render:
cid = self._p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = self._p.connect(p.GUI)
self._p.resetDebugVisualizerCamera(cameraDistance=self._cam_dist, cameraYaw=self._cam_yaw, cameraPitch=self._cam_pitch, cameraTargetPosition=self._cam_position)
else:
self._p.connect(p.DIRECT)
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step)
self.cspace_dim = self._robot.cspace_dim
self.workspace_dim = workspace_dim
self.workspace_radius = config['workspace_radius']
self.goal = config['goal']
self.current_goal = None
self.goal_uid = None
self.q_init = config['q_init']
self.obstacle_cofigs = config['obstacle_configs']
if (self.obstacle_cofigs is None):
self.max_obstacle_num = config['max_obstacle_num']
self.min_obstacle_num = config['min_obstacle_num']
self.max_obstacle_radius = config['max_obstacle_radius']
self.min_obstacle_radius = config['min_obstacle_radius']
else:
self.max_obstacle_num = max((len(c) for c in self.obstacle_cofigs))
self.current_obs = []
self.obs_uids = []
self._goal_reward_weight = config['goal_reward_weight']
self._obs_reward_weight = config['obs_reward_weight']
self._ctrl_reward_weight = config['ctrl_reward_weight']
self._goal_reward_model = config['goal_reward_model']
self._goal_reward_length_scale = config['goal_reward_length_scale']
self._obs_reward_model = config['obs_reward_model']
self._obs_reward_length_scale = config['obs_reward_length_scale']
self._max_reward = config['max_reward']
self._initial_collision_buffer = config['initial_collision_buffer']
self._initial_joint_limit_buffer = config['initial_joint_limit_buffer']
self._initial_goal_distance_min = config['initial_goal_distance_min']
self._acc_control_mode = config['acc_control_mode']
self.seed()
self.action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._observation = self.reset()
self.observation_space = spaces.Box(low=(- np.inf), high=np.inf, shape=self._observation.shape, dtype=np.float32)
self.viewer = None | def __init__(self, robot_name, workspace_dim, config=None):
'\n :param robot_name: str, the name of the robot, 3link or franka\n :param workspace_dim: int, workspace dimension, either 2 or 3\n :param config: dict, for overwriting the default configs\n '
if (config is not None):
config = merge_dicts(DEFAULT_CONFIG, config)
else:
config = DEFAULT_CONFIG.copy()
self.robot_name = robot_name
self._time_step = config['time_step']
self._action_repeat = config['action_repeat']
self._horizon = config['horizon']
self._terminate_after_collision = config['terminate_after_collision']
self._env_step_counter = 0
self.terminated = False
self._render = config['render']
self._cam_dist = config['cam_dist']
self._cam_yaw = config['cam_yaw']
self._cam_pitch = config['cam_pitch']
self._cam_position = config['cam_position']
self._gravity = config['gravity']
self._p = p
if self._render:
cid = self._p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = self._p.connect(p.GUI)
self._p.resetDebugVisualizerCamera(cameraDistance=self._cam_dist, cameraYaw=self._cam_yaw, cameraPitch=self._cam_pitch, cameraTargetPosition=self._cam_position)
else:
self._p.connect(p.DIRECT)
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step)
self.cspace_dim = self._robot.cspace_dim
self.workspace_dim = workspace_dim
self.workspace_radius = config['workspace_radius']
self.goal = config['goal']
self.current_goal = None
self.goal_uid = None
self.q_init = config['q_init']
self.obstacle_cofigs = config['obstacle_configs']
if (self.obstacle_cofigs is None):
self.max_obstacle_num = config['max_obstacle_num']
self.min_obstacle_num = config['min_obstacle_num']
self.max_obstacle_radius = config['max_obstacle_radius']
self.min_obstacle_radius = config['min_obstacle_radius']
else:
self.max_obstacle_num = max((len(c) for c in self.obstacle_cofigs))
self.current_obs = []
self.obs_uids = []
self._goal_reward_weight = config['goal_reward_weight']
self._obs_reward_weight = config['obs_reward_weight']
self._ctrl_reward_weight = config['ctrl_reward_weight']
self._goal_reward_model = config['goal_reward_model']
self._goal_reward_length_scale = config['goal_reward_length_scale']
self._obs_reward_model = config['obs_reward_model']
self._obs_reward_length_scale = config['obs_reward_length_scale']
self._max_reward = config['max_reward']
self._initial_collision_buffer = config['initial_collision_buffer']
self._initial_joint_limit_buffer = config['initial_joint_limit_buffer']
self._initial_goal_distance_min = config['initial_goal_distance_min']
self._acc_control_mode = config['acc_control_mode']
self.seed()
self.action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._action_space = spaces.Box(low=(- config['actuation_limit']), high=config['actuation_limit'], shape=(self.cspace_dim,), dtype=np.float32)
self._observation = self.reset()
self.observation_space = spaces.Box(low=(- np.inf), high=np.inf, shape=self._observation.shape, dtype=np.float32)
self.viewer = None<|docstring|>:param robot_name: str, the name of the robot, 3link or franka
:param workspace_dim: int, workspace dimension, either 2 or 3
:param config: dict, for overwriting the default configs<|endoftext|> |
c96f77fa5a471055194900c3889dba04e5829baf60cd7bc7288f476ed3233ac6 | def reset(self):
'\n reset time and simulator\n '
self.terminated = False
self._env_step_counter = 0
self._p.resetSimulation()
self._p.setPhysicsEngineParameter(numSolverIterations=150)
self._p.setTimeStep(self._time_step)
self._p.setGravity(0, 0, self._gravity)
self.goal_uid = None
self.obs_uids = []
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step, mode=self._acc_control_mode)
while True:
self._clear_goal_and_obstacles()
self._generate_random_initial_config()
(self.current_goal, self.goal_uid) = self._generate_random_goal()
(self.current_obs, self.obs_uids) = self._generate_random_obstacles()
self._p.stepSimulation()
if (self.goal is None):
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
else:
distance_to_goal = np.inf
if ((not self._collision(buffer=self._initial_collision_buffer)) and (not self._goal_obstacle_collision(buffer=self._initial_collision_buffer)) and (distance_to_goal >= self._initial_goal_distance_min)):
print('Successfully generated a valid initial configuration')
break
print('config in collision...regenerating...')
self._observation = self.get_extended_observation()
return np.array(self._observation) | reset time and simulator | rmp2/envs/robot_env.py | reset | UWRobotLearning/rmp2 | 17 | python | def reset(self):
'\n \n '
self.terminated = False
self._env_step_counter = 0
self._p.resetSimulation()
self._p.setPhysicsEngineParameter(numSolverIterations=150)
self._p.setTimeStep(self._time_step)
self._p.setGravity(0, 0, self._gravity)
self.goal_uid = None
self.obs_uids = []
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step, mode=self._acc_control_mode)
while True:
self._clear_goal_and_obstacles()
self._generate_random_initial_config()
(self.current_goal, self.goal_uid) = self._generate_random_goal()
(self.current_obs, self.obs_uids) = self._generate_random_obstacles()
self._p.stepSimulation()
if (self.goal is None):
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
else:
distance_to_goal = np.inf
if ((not self._collision(buffer=self._initial_collision_buffer)) and (not self._goal_obstacle_collision(buffer=self._initial_collision_buffer)) and (distance_to_goal >= self._initial_goal_distance_min)):
print('Successfully generated a valid initial configuration')
break
print('config in collision...regenerating...')
self._observation = self.get_extended_observation()
return np.array(self._observation) | def reset(self):
'\n \n '
self.terminated = False
self._env_step_counter = 0
self._p.resetSimulation()
self._p.setPhysicsEngineParameter(numSolverIterations=150)
self._p.setTimeStep(self._time_step)
self._p.setGravity(0, 0, self._gravity)
self.goal_uid = None
self.obs_uids = []
self._robot = robot_sim.create_robot_sim(self.robot_name, self._p, self._time_step, mode=self._acc_control_mode)
while True:
self._clear_goal_and_obstacles()
self._generate_random_initial_config()
(self.current_goal, self.goal_uid) = self._generate_random_goal()
(self.current_obs, self.obs_uids) = self._generate_random_obstacles()
self._p.stepSimulation()
if (self.goal is None):
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
else:
distance_to_goal = np.inf
if ((not self._collision(buffer=self._initial_collision_buffer)) and (not self._goal_obstacle_collision(buffer=self._initial_collision_buffer)) and (distance_to_goal >= self._initial_goal_distance_min)):
print('Successfully generated a valid initial configuration')
break
print('config in collision...regenerating...')
self._observation = self.get_extended_observation()
return np.array(self._observation)<|docstring|>reset time and simulator<|endoftext|> |
91c0af06d69275e9ed74e6de16a91094711383f5a257a17c21de408716483743 | def get_extended_observation(self):
'\n get observation array\n :return obs: an nd array containing\n sin(joint angles), cos(joint angles),\n joint velocities, \n goal - end-effector position\n obstacle information\n '
(joint_poses, joint_vels, _) = self._robot.get_observation()
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
delta_x = (self.current_goal[:self.workspace_dim] - eef_position[:self.workspace_dim])
vector_obstacles = []
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
vector_obs = np.array([0.0, 0.0, 0.0])
for point in closest_points:
if (point[BULLET_CLOSEST_POINT_DISTANCE_INDEX] <= distance_to_obs):
distance_to_obs = point[BULLET_CLOSEST_POINT_DISTANCE_INDEX]
vector_obs = (distance_to_obs * np.array(point[BULLET_CLOSEST_POINT_CONTACT_NORMAL_INDEX]))
vector_obstacles.append(vector_obs[:self.workspace_dim])
for i in range(len(self.obs_uids), self.max_obstacle_num):
vector_obstacles.append(np.zeros((self.workspace_dim,)))
vector_obstacles = np.array(vector_obstacles).flatten()
self._observation = np.concatenate((np.sin(joint_poses), np.cos(joint_poses), joint_vels, delta_x, vector_obstacles, self.current_obs))
return self._observation | get observation array
:return obs: an nd array containing
sin(joint angles), cos(joint angles),
joint velocities,
goal - end-effector position
obstacle information | rmp2/envs/robot_env.py | get_extended_observation | UWRobotLearning/rmp2 | 17 | python | def get_extended_observation(self):
'\n get observation array\n :return obs: an nd array containing\n sin(joint angles), cos(joint angles),\n joint velocities, \n goal - end-effector position\n obstacle information\n '
(joint_poses, joint_vels, _) = self._robot.get_observation()
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
delta_x = (self.current_goal[:self.workspace_dim] - eef_position[:self.workspace_dim])
vector_obstacles = []
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
vector_obs = np.array([0.0, 0.0, 0.0])
for point in closest_points:
if (point[BULLET_CLOSEST_POINT_DISTANCE_INDEX] <= distance_to_obs):
distance_to_obs = point[BULLET_CLOSEST_POINT_DISTANCE_INDEX]
vector_obs = (distance_to_obs * np.array(point[BULLET_CLOSEST_POINT_CONTACT_NORMAL_INDEX]))
vector_obstacles.append(vector_obs[:self.workspace_dim])
for i in range(len(self.obs_uids), self.max_obstacle_num):
vector_obstacles.append(np.zeros((self.workspace_dim,)))
vector_obstacles = np.array(vector_obstacles).flatten()
self._observation = np.concatenate((np.sin(joint_poses), np.cos(joint_poses), joint_vels, delta_x, vector_obstacles, self.current_obs))
return self._observation | def get_extended_observation(self):
'\n get observation array\n :return obs: an nd array containing\n sin(joint angles), cos(joint angles),\n joint velocities, \n goal - end-effector position\n obstacle information\n '
(joint_poses, joint_vels, _) = self._robot.get_observation()
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
delta_x = (self.current_goal[:self.workspace_dim] - eef_position[:self.workspace_dim])
vector_obstacles = []
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
vector_obs = np.array([0.0, 0.0, 0.0])
for point in closest_points:
if (point[BULLET_CLOSEST_POINT_DISTANCE_INDEX] <= distance_to_obs):
distance_to_obs = point[BULLET_CLOSEST_POINT_DISTANCE_INDEX]
vector_obs = (distance_to_obs * np.array(point[BULLET_CLOSEST_POINT_CONTACT_NORMAL_INDEX]))
vector_obstacles.append(vector_obs[:self.workspace_dim])
for i in range(len(self.obs_uids), self.max_obstacle_num):
vector_obstacles.append(np.zeros((self.workspace_dim,)))
vector_obstacles = np.array(vector_obstacles).flatten()
self._observation = np.concatenate((np.sin(joint_poses), np.cos(joint_poses), joint_vels, delta_x, vector_obstacles, self.current_obs))
return self._observation<|docstring|>get observation array
:return obs: an nd array containing
sin(joint angles), cos(joint angles),
joint velocities,
goal - end-effector position
obstacle information<|endoftext|> |
e6bc6da3ccc39cc8365f5af6788164b5c523c5ec72e30802acb2fd32f7f9bc98 | def _termination(self):
'\n check wether the current episode is terminated\n due to either out of steps or collision\n '
if (self.terminated or (self._env_step_counter > self._horizon)):
self._observation = self.get_extended_observation()
self.terminated = True
return True
if (not self._terminate_after_collision):
return False
if self._collision():
self.terminated = True
return True
return False | check wether the current episode is terminated
due to either out of steps or collision | rmp2/envs/robot_env.py | _termination | UWRobotLearning/rmp2 | 17 | python | def _termination(self):
'\n check wether the current episode is terminated\n due to either out of steps or collision\n '
if (self.terminated or (self._env_step_counter > self._horizon)):
self._observation = self.get_extended_observation()
self.terminated = True
return True
if (not self._terminate_after_collision):
return False
if self._collision():
self.terminated = True
return True
return False | def _termination(self):
'\n check wether the current episode is terminated\n due to either out of steps or collision\n '
if (self.terminated or (self._env_step_counter > self._horizon)):
self._observation = self.get_extended_observation()
self.terminated = True
return True
if (not self._terminate_after_collision):
return False
if self._collision():
self.terminated = True
return True
return False<|docstring|>check wether the current episode is terminated
due to either out of steps or collision<|endoftext|> |
eeb414ad4598b79a4dd3265cc15eaa281687a3d3bb21b02b9d888e18bf09c50b | def _get_reward(self):
'\n the reward function\n :return reward: the current reward\n '
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
reward_goal = self._get_goal_reward(distance_to_goal)
reward_obs = 0.0
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
for point in closest_points:
distance_to_obs = min(distance_to_obs, point[BULLET_CLOSEST_POINT_DISTANCE_INDEX])
if (point[BULLET_CLOSEST_POINT_CONTACT_FLAG_INDEX] and self._terminate_after_collision):
self.terminated = True
reward_obs += self._get_obstacle_reward(distance_to_obs)
(_, _, joint_torques) = self._robot.get_observation()
reward_ctrl = (- np.square(joint_torques).sum())
reward = (((reward_goal * self._goal_reward_weight) + (reward_obs * self._obs_reward_weight)) + (reward_ctrl * self._ctrl_reward_weight))
reward = np.clip(reward, (- self._max_reward), self._max_reward)
return reward | the reward function
:return reward: the current reward | rmp2/envs/robot_env.py | _get_reward | UWRobotLearning/rmp2 | 17 | python | def _get_reward(self):
'\n the reward function\n :return reward: the current reward\n '
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
reward_goal = self._get_goal_reward(distance_to_goal)
reward_obs = 0.0
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
for point in closest_points:
distance_to_obs = min(distance_to_obs, point[BULLET_CLOSEST_POINT_DISTANCE_INDEX])
if (point[BULLET_CLOSEST_POINT_CONTACT_FLAG_INDEX] and self._terminate_after_collision):
self.terminated = True
reward_obs += self._get_obstacle_reward(distance_to_obs)
(_, _, joint_torques) = self._robot.get_observation()
reward_ctrl = (- np.square(joint_torques).sum())
reward = (((reward_goal * self._goal_reward_weight) + (reward_obs * self._obs_reward_weight)) + (reward_ctrl * self._ctrl_reward_weight))
reward = np.clip(reward, (- self._max_reward), self._max_reward)
return reward | def _get_reward(self):
'\n the reward function\n :return reward: the current reward\n '
eef_position = np.array(self._p.getLinkState(self._robot.robot_uid, self._robot.eef_uid)[BULLET_LINK_POSE_INDEX])
distance_to_goal = np.linalg.norm((eef_position[:self.workspace_dim] - self.current_goal[:self.workspace_dim]))
reward_goal = self._get_goal_reward(distance_to_goal)
reward_obs = 0.0
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, 1000)
distance_to_obs = 1000.0
for point in closest_points:
distance_to_obs = min(distance_to_obs, point[BULLET_CLOSEST_POINT_DISTANCE_INDEX])
if (point[BULLET_CLOSEST_POINT_CONTACT_FLAG_INDEX] and self._terminate_after_collision):
self.terminated = True
reward_obs += self._get_obstacle_reward(distance_to_obs)
(_, _, joint_torques) = self._robot.get_observation()
reward_ctrl = (- np.square(joint_torques).sum())
reward = (((reward_goal * self._goal_reward_weight) + (reward_obs * self._obs_reward_weight)) + (reward_ctrl * self._ctrl_reward_weight))
reward = np.clip(reward, (- self._max_reward), self._max_reward)
return reward<|docstring|>the reward function
:return reward: the current reward<|endoftext|> |
15ca0d8cf903008cf3210a695011bb39718bddada3e17147b6675df2c692e978 | def _get_obstacle_reward(self, distance_to_obs):
'\n the part of reward function for avoiding obstacles\n '
if (self._obs_reward_model == 'linear'):
reward_obs = (- max(0.0, (1.0 - ((1.0 * distance_to_obs) / self._obs_reward_length_scale))))
elif (self._obs_reward_model == 'gaussian'):
reward_obs = (- np.exp((((- 0.5) * (distance_to_obs ** 2)) / (self._obs_reward_length_scale ** 2))))
elif (self._obs_reward_model == 'laplace'):
reward_obs = (- np.exp(((- distance_to_obs) / self._obs_reward_length_scale)))
else:
Warning('warning: invalid reward model')
reward_obs = 0.0
return reward_obs | the part of reward function for avoiding obstacles | rmp2/envs/robot_env.py | _get_obstacle_reward | UWRobotLearning/rmp2 | 17 | python | def _get_obstacle_reward(self, distance_to_obs):
'\n \n '
if (self._obs_reward_model == 'linear'):
reward_obs = (- max(0.0, (1.0 - ((1.0 * distance_to_obs) / self._obs_reward_length_scale))))
elif (self._obs_reward_model == 'gaussian'):
reward_obs = (- np.exp((((- 0.5) * (distance_to_obs ** 2)) / (self._obs_reward_length_scale ** 2))))
elif (self._obs_reward_model == 'laplace'):
reward_obs = (- np.exp(((- distance_to_obs) / self._obs_reward_length_scale)))
else:
Warning('warning: invalid reward model')
reward_obs = 0.0
return reward_obs | def _get_obstacle_reward(self, distance_to_obs):
'\n \n '
if (self._obs_reward_model == 'linear'):
reward_obs = (- max(0.0, (1.0 - ((1.0 * distance_to_obs) / self._obs_reward_length_scale))))
elif (self._obs_reward_model == 'gaussian'):
reward_obs = (- np.exp((((- 0.5) * (distance_to_obs ** 2)) / (self._obs_reward_length_scale ** 2))))
elif (self._obs_reward_model == 'laplace'):
reward_obs = (- np.exp(((- distance_to_obs) / self._obs_reward_length_scale)))
else:
Warning('warning: invalid reward model')
reward_obs = 0.0
return reward_obs<|docstring|>the part of reward function for avoiding obstacles<|endoftext|> |
5451bc973f5f04efa883227f3475ee93adc0f8a7692d98e71956e89f0679e900 | def _get_goal_reward(self, distance_to_goal):
'\n the part of reward function for going to goal\n '
if (self._goal_reward_model == 'linear'):
reward_goal = ((((self.workspace_radius * 2.0) - distance_to_goal) / self.workspace_radius) / 2.0)
elif (self._goal_reward_model == 'gaussian'):
reward_goal = np.exp((((- 0.5) * (distance_to_goal ** 2)) / (self._goal_reward_length_scale ** 2)))
elif (self._goal_reward_model == 'laplace'):
reward_goal = np.exp(((- distance_to_goal) / self._goal_reward_length_scale))
else:
Warning('warning: invalid reward model')
reward_goal = 0.0
return reward_goal | the part of reward function for going to goal | rmp2/envs/robot_env.py | _get_goal_reward | UWRobotLearning/rmp2 | 17 | python | def _get_goal_reward(self, distance_to_goal):
'\n \n '
if (self._goal_reward_model == 'linear'):
reward_goal = ((((self.workspace_radius * 2.0) - distance_to_goal) / self.workspace_radius) / 2.0)
elif (self._goal_reward_model == 'gaussian'):
reward_goal = np.exp((((- 0.5) * (distance_to_goal ** 2)) / (self._goal_reward_length_scale ** 2)))
elif (self._goal_reward_model == 'laplace'):
reward_goal = np.exp(((- distance_to_goal) / self._goal_reward_length_scale))
else:
Warning('warning: invalid reward model')
reward_goal = 0.0
return reward_goal | def _get_goal_reward(self, distance_to_goal):
'\n \n '
if (self._goal_reward_model == 'linear'):
reward_goal = ((((self.workspace_radius * 2.0) - distance_to_goal) / self.workspace_radius) / 2.0)
elif (self._goal_reward_model == 'gaussian'):
reward_goal = np.exp((((- 0.5) * (distance_to_goal ** 2)) / (self._goal_reward_length_scale ** 2)))
elif (self._goal_reward_model == 'laplace'):
reward_goal = np.exp(((- distance_to_goal) / self._goal_reward_length_scale))
else:
Warning('warning: invalid reward model')
reward_goal = 0.0
return reward_goal<|docstring|>the part of reward function for going to goal<|endoftext|> |
1c7dc2b4989b1c79f0c10b88a60e6aecd9bc85333898eeec9df8bf4414fabaa9 | def _collision(self, buffer=0.0):
'\n check whether the robot is in collision with obstacles\n :param buffer: buffer for collision checking\n '
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, buffer)
if (len(closest_points) > 0):
return True
if False:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, self._robot.robot_uid, buffer)
if (len(closest_points) > 31):
return True
return False | check whether the robot is in collision with obstacles
:param buffer: buffer for collision checking | rmp2/envs/robot_env.py | _collision | UWRobotLearning/rmp2 | 17 | python | def _collision(self, buffer=0.0):
'\n check whether the robot is in collision with obstacles\n :param buffer: buffer for collision checking\n '
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, buffer)
if (len(closest_points) > 0):
return True
if False:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, self._robot.robot_uid, buffer)
if (len(closest_points) > 31):
return True
return False | def _collision(self, buffer=0.0):
'\n check whether the robot is in collision with obstacles\n :param buffer: buffer for collision checking\n '
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, obs_uid, buffer)
if (len(closest_points) > 0):
return True
if False:
closest_points = self._p.getClosestPoints(self._robot.robot_uid, self._robot.robot_uid, buffer)
if (len(closest_points) > 31):
return True
return False<|docstring|>check whether the robot is in collision with obstacles
:param buffer: buffer for collision checking<|endoftext|> |
51c9cd27de616a347112313e645b2f2e1f07972954dbe571aac3b569a177cb32 | def _goal_obstacle_collision(self, buffer=0.0):
'\n check whether the (potentially randomly generated) goal\n and obstacles are in collision\n :param buffer: buffer for collision checking\n '
(goal_position, _) = self._p.getBasePositionAndOrientation(self.goal_uid)
collision_goal = add_collision_goal(self._p, goal_position)
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(collision_goal, obs_uid, buffer)
if (len(closest_points) > 0):
self._p.removeBody(collision_goal)
return True
self._p.removeBody(collision_goal)
return False | check whether the (potentially randomly generated) goal
and obstacles are in collision
:param buffer: buffer for collision checking | rmp2/envs/robot_env.py | _goal_obstacle_collision | UWRobotLearning/rmp2 | 17 | python | def _goal_obstacle_collision(self, buffer=0.0):
'\n check whether the (potentially randomly generated) goal\n and obstacles are in collision\n :param buffer: buffer for collision checking\n '
(goal_position, _) = self._p.getBasePositionAndOrientation(self.goal_uid)
collision_goal = add_collision_goal(self._p, goal_position)
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(collision_goal, obs_uid, buffer)
if (len(closest_points) > 0):
self._p.removeBody(collision_goal)
return True
self._p.removeBody(collision_goal)
return False | def _goal_obstacle_collision(self, buffer=0.0):
'\n check whether the (potentially randomly generated) goal\n and obstacles are in collision\n :param buffer: buffer for collision checking\n '
(goal_position, _) = self._p.getBasePositionAndOrientation(self.goal_uid)
collision_goal = add_collision_goal(self._p, goal_position)
for obs_uid in self.obs_uids:
closest_points = self._p.getClosestPoints(collision_goal, obs_uid, buffer)
if (len(closest_points) > 0):
self._p.removeBody(collision_goal)
return True
self._p.removeBody(collision_goal)
return False<|docstring|>check whether the (potentially randomly generated) goal
and obstacles are in collision
:param buffer: buffer for collision checking<|endoftext|> |
6f70cac0fac0cabf109fb9a59b8c13bb32d4fb61f7a1f08c99dd87d504077ed6 | def _clear_goal_and_obstacles(self):
'\n clear the goal and obstacle objects in pybullet\n '
self.current_goal = None
if (self.goal_uid is not None):
self._p.removeBody(self.goal_uid)
self.goal_uid = None
self.current_obs = []
for obs_uid in self.obs_uids:
self._p.removeBody(obs_uid)
self.obs_uids = [] | clear the goal and obstacle objects in pybullet | rmp2/envs/robot_env.py | _clear_goal_and_obstacles | UWRobotLearning/rmp2 | 17 | python | def _clear_goal_and_obstacles(self):
'\n \n '
self.current_goal = None
if (self.goal_uid is not None):
self._p.removeBody(self.goal_uid)
self.goal_uid = None
self.current_obs = []
for obs_uid in self.obs_uids:
self._p.removeBody(obs_uid)
self.obs_uids = [] | def _clear_goal_and_obstacles(self):
'\n \n '
self.current_goal = None
if (self.goal_uid is not None):
self._p.removeBody(self.goal_uid)
self.goal_uid = None
self.current_obs = []
for obs_uid in self.obs_uids:
self._p.removeBody(obs_uid)
self.obs_uids = []<|docstring|>clear the goal and obstacle objects in pybullet<|endoftext|> |
31ccaaa3bc187b510001dd959e62d39b6ad7c86e97968dcefa779511c0ff48a9 | def _generate_random_initial_config(self):
'\n generate a random initial configuration and \n joint velocities for the robot\n '
lower_limit = self._robot._joint_lower_limit
upper_limit = self._robot._joint_upper_limit
lower_limit = np.maximum((lower_limit + self._initial_joint_limit_buffer), (- np.pi))
upper_limit = np.minimum((upper_limit - self._initial_joint_limit_buffer), np.pi)
if (self.q_init is None):
initial_config = self.np_random.uniform(low=lower_limit, high=upper_limit)
else:
initial_config = (self.q_init + self.np_random.uniform(low=(- 0.1), high=0.1, size=self.cspace_dim))
initial_config = np.clip(initial_config, lower_limit, upper_limit)
initial_vel = self.np_random.uniform(low=(- 0.005), high=0.005, size=self.cspace_dim)
self._robot.reset(initial_config, initial_vel) | generate a random initial configuration and
joint velocities for the robot | rmp2/envs/robot_env.py | _generate_random_initial_config | UWRobotLearning/rmp2 | 17 | python | def _generate_random_initial_config(self):
'\n generate a random initial configuration and \n joint velocities for the robot\n '
lower_limit = self._robot._joint_lower_limit
upper_limit = self._robot._joint_upper_limit
lower_limit = np.maximum((lower_limit + self._initial_joint_limit_buffer), (- np.pi))
upper_limit = np.minimum((upper_limit - self._initial_joint_limit_buffer), np.pi)
if (self.q_init is None):
initial_config = self.np_random.uniform(low=lower_limit, high=upper_limit)
else:
initial_config = (self.q_init + self.np_random.uniform(low=(- 0.1), high=0.1, size=self.cspace_dim))
initial_config = np.clip(initial_config, lower_limit, upper_limit)
initial_vel = self.np_random.uniform(low=(- 0.005), high=0.005, size=self.cspace_dim)
self._robot.reset(initial_config, initial_vel) | def _generate_random_initial_config(self):
'\n generate a random initial configuration and \n joint velocities for the robot\n '
lower_limit = self._robot._joint_lower_limit
upper_limit = self._robot._joint_upper_limit
lower_limit = np.maximum((lower_limit + self._initial_joint_limit_buffer), (- np.pi))
upper_limit = np.minimum((upper_limit - self._initial_joint_limit_buffer), np.pi)
if (self.q_init is None):
initial_config = self.np_random.uniform(low=lower_limit, high=upper_limit)
else:
initial_config = (self.q_init + self.np_random.uniform(low=(- 0.1), high=0.1, size=self.cspace_dim))
initial_config = np.clip(initial_config, lower_limit, upper_limit)
initial_vel = self.np_random.uniform(low=(- 0.005), high=0.005, size=self.cspace_dim)
self._robot.reset(initial_config, initial_vel)<|docstring|>generate a random initial configuration and
joint velocities for the robot<|endoftext|> |
f3d2c607598921520248c0a68fcd954363ac225356ec4d0324a84df492919259 | @abstractmethod
def _generate_random_goal(self):
'\n randomly generate a goal for the end effector\n '
pass | randomly generate a goal for the end effector | rmp2/envs/robot_env.py | _generate_random_goal | UWRobotLearning/rmp2 | 17 | python | @abstractmethod
def _generate_random_goal(self):
'\n \n '
pass | @abstractmethod
def _generate_random_goal(self):
'\n \n '
pass<|docstring|>randomly generate a goal for the end effector<|endoftext|> |
2a656222d97cfc213e9617978b38613900fb5c5d5194085a7cec3fbfe2f81530 | @abstractmethod
def _generate_random_obstacles(self):
'\n randomly generate obstacles in the environment\n '
pass | randomly generate obstacles in the environment | rmp2/envs/robot_env.py | _generate_random_obstacles | UWRobotLearning/rmp2 | 17 | python | @abstractmethod
def _generate_random_obstacles(self):
'\n \n '
pass | @abstractmethod
def _generate_random_obstacles(self):
'\n \n '
pass<|docstring|>randomly generate obstacles in the environment<|endoftext|> |
e36bd95fe3479624b50d29c421ad4255eb506b3e7af52ac86ce4221c6c5f6d96 | def __init__(self, player: str, region: str):
'\n calls the init of the superclass and sets the region that the player beat.\n :param player: the player that defeated an elite 4.\n :param region: the elite 4 of what region was beat.\n '
self.region = region
self.EVENTNAME = 'elite4'
super(Elite4, self).__init__(player) | calls the init of the superclass and sets the region that the player beat.
:param player: the player that defeated an elite 4.
:param region: the elite 4 of what region was beat. | ppobyter/events/elite4.py | __init__ | graatje/highscoresbot | 0 | python | def __init__(self, player: str, region: str):
'\n calls the init of the superclass and sets the region that the player beat.\n :param player: the player that defeated an elite 4.\n :param region: the elite 4 of what region was beat.\n '
self.region = region
self.EVENTNAME = 'elite4'
super(Elite4, self).__init__(player) | def __init__(self, player: str, region: str):
'\n calls the init of the superclass and sets the region that the player beat.\n :param player: the player that defeated an elite 4.\n :param region: the elite 4 of what region was beat.\n '
self.region = region
self.EVENTNAME = 'elite4'
super(Elite4, self).__init__(player)<|docstring|>calls the init of the superclass and sets the region that the player beat.
:param player: the player that defeated an elite 4.
:param region: the elite 4 of what region was beat.<|endoftext|> |
0a1d0cd10336fbb6a620f5a929b0cfcaabadc3f5a5a4a29a6daed8c1fe25198d | def determineRecipients(self):
'\n Here the channelrecipients are determined.\n '
self._determinechannelrecipients() | Here the channelrecipients are determined. | ppobyter/events/elite4.py | determineRecipients | graatje/highscoresbot | 0 | python | def determineRecipients(self):
'\n \n '
self._determinechannelrecipients() | def determineRecipients(self):
'\n \n '
self._determinechannelrecipients()<|docstring|>Here the channelrecipients are determined.<|endoftext|> |
b4115de485f29e746deff3f38aea6d9ba18b457e5d513ed884aa50355715a583 | def makeMessage(self) -> str:
'\n Make the message that gets sent to the recipients.\n :return: The message that will get sent.\n '
return f'{self.player} has beat the {self.region} Elite 4!' | Make the message that gets sent to the recipients.
:return: The message that will get sent. | ppobyter/events/elite4.py | makeMessage | graatje/highscoresbot | 0 | python | def makeMessage(self) -> str:
'\n Make the message that gets sent to the recipients.\n :return: The message that will get sent.\n '
return f'{self.player} has beat the {self.region} Elite 4!' | def makeMessage(self) -> str:
'\n Make the message that gets sent to the recipients.\n :return: The message that will get sent.\n '
return f'{self.player} has beat the {self.region} Elite 4!'<|docstring|>Make the message that gets sent to the recipients.
:return: The message that will get sent.<|endoftext|> |
9278ef17e7d5333baf4b6b1bf7b301bce56686c65d5c064127899ca3f40d6a3a | def area_triangle(base, height):
'\n calculate area of triangle\n parameters:\n base, int\n '
A = ((base * 0.5) * height)
return A | calculate area of triangle
parameters:
base, int | shapes/triangle/area.py | area_triangle | sgill2/tutorial | 0 | python | def area_triangle(base, height):
'\n calculate area of triangle\n parameters:\n base, int\n '
A = ((base * 0.5) * height)
return A | def area_triangle(base, height):
'\n calculate area of triangle\n parameters:\n base, int\n '
A = ((base * 0.5) * height)
return A<|docstring|>calculate area of triangle
parameters:
base, int<|endoftext|> |
463d2addee7db06a156719b099d27b3073abffeeddde36f0efb0e530b364d7e6 | @task
def process_file(doc):
'Transfer uploaded file to S3 and queue up message to process PDF.'
key = upload_file_to_s3(doc)
doc.remote_document = ('http://%s.s3.amazonaws.com/%s' % (key.bucket.name, key.name))
doc.date_stored = datetime.utcnow()
doc.status = 'S'
doc.save()
queue_json_message(doc, key)
doc.status = 'Q'
doc.date_queued = datetime.utcnow()
doc.save()
return True | Transfer uploaded file to S3 and queue up message to process PDF. | pdf/tasks.py | process_file | seekelvis/django-pdf | 22 | python | @task
def process_file(doc):
key = upload_file_to_s3(doc)
doc.remote_document = ('http://%s.s3.amazonaws.com/%s' % (key.bucket.name, key.name))
doc.date_stored = datetime.utcnow()
doc.status = 'S'
doc.save()
queue_json_message(doc, key)
doc.status = 'Q'
doc.date_queued = datetime.utcnow()
doc.save()
return True | @task
def process_file(doc):
key = upload_file_to_s3(doc)
doc.remote_document = ('http://%s.s3.amazonaws.com/%s' % (key.bucket.name, key.name))
doc.date_stored = datetime.utcnow()
doc.status = 'S'
doc.save()
queue_json_message(doc, key)
doc.status = 'Q'
doc.date_queued = datetime.utcnow()
doc.save()
return True<|docstring|>Transfer uploaded file to S3 and queue up message to process PDF.<|endoftext|> |
ab572824cbd3bf4e07e6c6847ef6aa37dde1c90225efb370396db20f139ed448 | def returnConnected(server, client):
'Take two Protocol instances and connect them.\n '
clientTransport = FakeTransport()
client.makeConnection(clientTransport)
serverTransport = FakeTransport()
server.makeConnection(serverTransport)
pump = IOPump(client, server, clientTransport, serverTransport)
pump.flush()
pump.flush()
return pump | Take two Protocol instances and connect them. | ldaptor/test/util.py | returnConnected | CratlePeter/ldaptor | 0 | python | def returnConnected(server, client):
'\n '
clientTransport = FakeTransport()
client.makeConnection(clientTransport)
serverTransport = FakeTransport()
server.makeConnection(serverTransport)
pump = IOPump(client, server, clientTransport, serverTransport)
pump.flush()
pump.flush()
return pump | def returnConnected(server, client):
'\n '
clientTransport = FakeTransport()
client.makeConnection(clientTransport)
serverTransport = FakeTransport()
server.makeConnection(serverTransport)
pump = IOPump(client, server, clientTransport, serverTransport)
pump.flush()
pump.flush()
return pump<|docstring|>Take two Protocol instances and connect them.<|endoftext|> |
ff115741ff73231fcad8d84a9961ce2dcb5fdc6eb054ca78fa1fb102412ab800 | def pump(self):
'Move data back and forth.\n\n Returns whether any data was moved.\n '
self.clientIO.seek(0)
self.serverIO.seek(0)
cData = self.clientIO.read()
sData = self.serverIO.read()
self.clientIO.seek(0)
self.serverIO.seek(0)
self.clientIO.truncate()
self.serverIO.truncate()
self.server.dataReceived(cData)
self.client.dataReceived(sData)
if (cData or sData):
return 1
else:
return 0 | Move data back and forth.
Returns whether any data was moved. | ldaptor/test/util.py | pump | CratlePeter/ldaptor | 0 | python | def pump(self):
'Move data back and forth.\n\n Returns whether any data was moved.\n '
self.clientIO.seek(0)
self.serverIO.seek(0)
cData = self.clientIO.read()
sData = self.serverIO.read()
self.clientIO.seek(0)
self.serverIO.seek(0)
self.clientIO.truncate()
self.serverIO.truncate()
self.server.dataReceived(cData)
self.client.dataReceived(sData)
if (cData or sData):
return 1
else:
return 0 | def pump(self):
'Move data back and forth.\n\n Returns whether any data was moved.\n '
self.clientIO.seek(0)
self.serverIO.seek(0)
cData = self.clientIO.read()
sData = self.serverIO.read()
self.clientIO.seek(0)
self.serverIO.seek(0)
self.clientIO.truncate()
self.serverIO.truncate()
self.server.dataReceived(cData)
self.client.dataReceived(sData)
if (cData or sData):
return 1
else:
return 0<|docstring|>Move data back and forth.
Returns whether any data was moved.<|endoftext|> |
f545bad1d9e905af7f5958b96d3661129c2170a71711971e5e174b6792c0a9d9 | def get_arguments(self):
'\n Get arguments\n '
return self.arguments | Get arguments | lib/old/form.py | get_arguments | inab/vre_tool_generator | 0 | python | def get_arguments(self):
'\n \n '
return self.arguments | def get_arguments(self):
'\n \n '
return self.arguments<|docstring|>Get arguments<|endoftext|> |
0543447ace9a1474b65d5c158a2972440661b34cb23c831fbf1a22a92b655624 | def set_arguments(self, _id):
'\n Set arguments\n '
args = list()
cwl_wf_url = {'name': self.cwl_static_keys[0], 'description': self.cwl_static_keys[1], 'help': ((self.cwl_static_keys[1] + ' for ') + _id), 'type': self.cwl_static_keys[2], 'value': self.cwl_wf, 'required': True}
args.append(cwl_wf_url)
self.arguments = args
return self.arguments | Set arguments | lib/old/form.py | set_arguments | inab/vre_tool_generator | 0 | python | def set_arguments(self, _id):
'\n \n '
args = list()
cwl_wf_url = {'name': self.cwl_static_keys[0], 'description': self.cwl_static_keys[1], 'help': ((self.cwl_static_keys[1] + ' for ') + _id), 'type': self.cwl_static_keys[2], 'value': self.cwl_wf, 'required': True}
args.append(cwl_wf_url)
self.arguments = args
return self.arguments | def set_arguments(self, _id):
'\n \n '
args = list()
cwl_wf_url = {'name': self.cwl_static_keys[0], 'description': self.cwl_static_keys[1], 'help': ((self.cwl_static_keys[1] + ' for ') + _id), 'type': self.cwl_static_keys[2], 'value': self.cwl_wf, 'required': True}
args.append(cwl_wf_url)
self.arguments = args
return self.arguments<|docstring|>Set arguments<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.