docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
---|---|---|
Initializes an UUID (or GUID) data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
|
def __init__(self, data_type_definition):
super(UUIDMap, self).__init__(data_type_definition)
self._byte_order = data_type_definition.byte_order
| 814,096 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def FoldByteStream(self, mapped_value, **unused_kwargs):
value = None
try:
if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN:
value = mapped_value.bytes
elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN:
value = mapped_value.bytes_le
except Exception as exception:
error_string = (
'Unable to write: {0:s} to byte stream with error: {1!s}').format(
self._data_type_definition.name, exception)
raise errors.FoldingError(error_string)
return value
| 814,097 |
Maps the data type on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
uuid.UUID: mapped value.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def MapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
data_type_size = self._data_type_definition.GetByteSize()
self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size)
try:
if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN:
mapped_value = uuid.UUID(
bytes=byte_stream[byte_offset:byte_offset + 16])
elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN:
mapped_value = uuid.UUID(
bytes_le=byte_stream[byte_offset:byte_offset + 16])
except Exception as exception:
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: {2!s}').format(
self._data_type_definition.name, byte_offset, exception)
raise errors.MappingError(error_string)
if context:
context.byte_size = data_type_size
return mapped_value
| 814,098 |
Initializes a sequence data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
|
def __init__(self, data_type_definition):
element_data_type_definition = self._GetElementDataTypeDefinition(
data_type_definition)
super(ElementSequenceDataTypeMap, self).__init__(data_type_definition)
self._element_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType(
element_data_type_definition)
self._element_data_type_definition = element_data_type_definition
| 814,099 |
Calculates the elements data size.
Args:
context (Optional[DataTypeMapContext]): data type map context, used to
determine the size hint.
Returns:
int: the elements data size or None if not available.
|
def _CalculateElementsDataSize(self, context):
elements_data_size = None
if self._HasElementsDataSize():
elements_data_size = self._EvaluateElementsDataSize(context)
elif self._HasNumberOfElements():
element_byte_size = self._element_data_type_definition.GetByteSize()
if element_byte_size is not None:
number_of_elements = self._EvaluateNumberOfElements(context)
elements_data_size = number_of_elements * element_byte_size
return elements_data_size
| 814,100 |
Evaluates elements data size.
Args:
context (DataTypeMapContext): data type map context.
Returns:
int: elements data size.
Raises:
MappingError: if the elements data size cannot be determined.
|
def _EvaluateElementsDataSize(self, context):
elements_data_size = None
if self._data_type_definition.elements_data_size:
elements_data_size = self._data_type_definition.elements_data_size
elif self._data_type_definition.elements_data_size_expression:
expression = self._data_type_definition.elements_data_size_expression
namespace = {}
if context and context.values:
namespace.update(context.values)
# Make sure __builtins__ contains an empty dictionary.
namespace['__builtins__'] = {}
try:
elements_data_size = eval(expression, namespace) # pylint: disable=eval-used
except Exception as exception:
raise errors.MappingError(
'Unable to determine elements data size with error: {0!s}'.format(
exception))
if elements_data_size is None or elements_data_size < 0:
raise errors.MappingError(
'Invalid elements data size: {0!s}'.format(elements_data_size))
return elements_data_size
| 814,101 |
Evaluates number of elements.
Args:
context (DataTypeMapContext): data type map context.
Returns:
int: number of elements.
Raises:
MappingError: if the number of elements cannot be determined.
|
def _EvaluateNumberOfElements(self, context):
number_of_elements = None
if self._data_type_definition.number_of_elements:
number_of_elements = self._data_type_definition.number_of_elements
elif self._data_type_definition.number_of_elements_expression:
expression = self._data_type_definition.number_of_elements_expression
namespace = {}
if context and context.values:
namespace.update(context.values)
# Make sure __builtins__ contains an empty dictionary.
namespace['__builtins__'] = {}
try:
number_of_elements = eval(expression, namespace) # pylint: disable=eval-used
except Exception as exception:
raise errors.MappingError(
'Unable to determine number of elements with error: {0!s}'.format(
exception))
if number_of_elements is None or number_of_elements < 0:
raise errors.MappingError(
'Invalid number of elements: {0!s}'.format(number_of_elements))
return number_of_elements
| 814,102 |
Retrieves the element data type definition.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Returns:
DataTypeDefinition: element data type definition.
Raises:
FormatError: if the element data type cannot be determined from the data
type definition.
|
def _GetElementDataTypeDefinition(self, data_type_definition):
if not data_type_definition:
raise errors.FormatError('Missing data type definition')
element_data_type_definition = getattr(
data_type_definition, 'element_data_type_definition', None)
if not element_data_type_definition:
raise errors.FormatError(
'Invalid data type definition missing element')
return element_data_type_definition
| 814,103 |
Retrieves a hint about the size.
Args:
context (Optional[DataTypeMapContext]): data type map context, used to
determine the size hint.
Returns:
int: hint of the number of bytes needed from the byte stream or None.
|
def GetSizeHint(self, context=None, **unused_kwargs):
context_state = getattr(context, 'state', {})
elements_data_size = self.GetByteSize()
if elements_data_size:
return elements_data_size
try:
elements_data_size = self._CalculateElementsDataSize(context)
except errors.MappingError:
pass
if elements_data_size is None and self._HasElementsTerminator():
size_hints = context_state.get('size_hints', {})
size_hint = size_hints.get(self._data_type_definition.name, None)
elements_data_size = 0
if size_hint:
elements_data_size = size_hint.byte_size
if not size_hint or not size_hint.is_complete:
elements_data_size += self._element_data_type_definition.GetByteSize()
return elements_data_size
| 814,104 |
Initializes a sequence data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
|
def __init__(self, data_type_definition):
super(SequenceMap, self).__init__(data_type_definition)
self._fold_byte_stream = None
self._map_byte_stream = None
self._operation = None
if (self._element_data_type_definition.IsComposite() or
data_type_definition.elements_data_size_expression is not None or
data_type_definition.elements_terminator is not None or
data_type_definition.number_of_elements_expression is not None):
self._fold_byte_stream = self._CompositeFoldByteStream
self._map_byte_stream = self._CompositeMapByteStream
else:
self._fold_byte_stream = self._LinearFoldByteStream
self._map_byte_stream = self._LinearMapByteStream
self._operation = self._GetByteStreamOperation()
| 814,105 |
Maps a sequence of composite data types on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
tuple[object, ...]: mapped values.
Raises:
ByteStreamTooSmallError: if the byte stream is too small.
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def _CompositeMapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
elements_data_size = None
elements_terminator = None
number_of_elements = None
if self._HasElementsDataSize():
elements_data_size = self._EvaluateElementsDataSize(context)
element_byte_size = self._element_data_type_definition.GetByteSize()
if element_byte_size is not None:
number_of_elements, _ = divmod(elements_data_size, element_byte_size)
else:
elements_terminator = (
self._element_data_type_definition.elements_terminator)
elif self._HasElementsTerminator():
elements_terminator = self._data_type_definition.elements_terminator
elif self._HasNumberOfElements():
number_of_elements = self._EvaluateNumberOfElements(context)
if elements_terminator is None and number_of_elements is None:
raise errors.MappingError(
'Unable to determine element terminator or number of elements')
context_state = getattr(context, 'state', {})
elements_data_offset = context_state.get('elements_data_offset', 0)
element_index = context_state.get('element_index', 0)
element_value = None
mapped_values = context_state.get('mapped_values', [])
size_hints = context_state.get('size_hints', {})
subcontext = context_state.get('context', None)
if not subcontext:
subcontext = DataTypeMapContext()
try:
while byte_stream[byte_offset:]:
if (number_of_elements is not None and
element_index == number_of_elements):
break
if (elements_data_size is not None and
elements_data_offset >= elements_data_size):
break
element_value = self._element_data_type_map.MapByteStream(
byte_stream, byte_offset=byte_offset, context=subcontext)
byte_offset += subcontext.byte_size
elements_data_offset += subcontext.byte_size
element_index += 1
mapped_values.append(element_value)
if (elements_terminator is not None and
element_value == elements_terminator):
break
except errors.ByteStreamTooSmallError as exception:
context_state['context'] = subcontext
context_state['elements_data_offset'] = elements_data_offset
context_state['element_index'] = element_index
context_state['mapped_values'] = mapped_values
raise errors.ByteStreamTooSmallError(exception)
except Exception as exception:
raise errors.MappingError(exception)
if number_of_elements is not None and element_index != number_of_elements:
context_state['context'] = subcontext
context_state['elements_data_offset'] = elements_data_offset
context_state['element_index'] = element_index
context_state['mapped_values'] = mapped_values
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: missing element: {2:d}').format(
self._data_type_definition.name, byte_offset, element_index - 1)
raise errors.ByteStreamTooSmallError(error_string)
if (elements_terminator is not None and
element_value != elements_terminator and (
elements_data_size is None or
elements_data_offset < elements_data_size)):
byte_stream_size = len(byte_stream)
size_hints[self._data_type_definition.name] = DataTypeMapSizeHint(
byte_stream_size - byte_offset)
context_state['context'] = subcontext
context_state['elements_data_offset'] = elements_data_offset
context_state['element_index'] = element_index
context_state['mapped_values'] = mapped_values
context_state['size_hints'] = size_hints
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: unable to find elements terminator').format(
self._data_type_definition.name, byte_offset)
raise errors.ByteStreamTooSmallError(error_string)
if context:
context.byte_size = elements_data_offset
context.state = {}
return tuple(mapped_values)
| 814,106 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs):
try:
return self._operation.WriteTo(mapped_value)
except Exception as exception:
error_string = (
'Unable to write: {0:s} to byte stream with error: {1!s}').format(
self._data_type_definition.name, exception)
raise errors.FoldingError(error_string)
| 814,107 |
Maps a data type sequence on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
tuple[object, ...]: mapped values.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def _LinearMapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
elements_data_size = self._data_type_definition.GetByteSize()
self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size)
try:
struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:])
mapped_values = map(self._element_data_type_map.MapValue, struct_tuple)
except Exception as exception:
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: {2!s}').format(
self._data_type_definition.name, byte_offset, exception)
raise errors.MappingError(error_string)
if context:
context.byte_size = elements_data_size
return tuple(mapped_values)
| 814,108 |
Initializes a stream data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Raises:
FormatError: if the data type map cannot be determined from the data
type definition.
|
def __init__(self, data_type_definition):
super(StreamMap, self).__init__(data_type_definition)
self._fold_byte_stream = None
self._map_byte_stream = None
if self._element_data_type_definition.IsComposite():
raise errors.FormatError('Unsupported composite element data type')
| 814,110 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def FoldByteStream(self, mapped_value, context=None, **unused_kwargs):
elements_data_size = self._CalculateElementsDataSize(context)
if elements_data_size is not None:
if elements_data_size != len(mapped_value):
raise errors.FoldingError(
'Mismatch between elements data size and mapped value size')
elif not self._HasElementsTerminator():
raise errors.FoldingError('Unable to determine elements data size')
else:
elements_terminator = self._data_type_definition.elements_terminator
elements_terminator_size = len(elements_terminator)
if mapped_value[-elements_terminator_size:] != elements_terminator:
mapped_value = b''.join([mapped_value, elements_terminator])
return mapped_value
| 814,111 |
Maps the data type on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
tuple[object, ...]: mapped values.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def MapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
context_state = getattr(context, 'state', {})
size_hints = context_state.get('size_hints', {})
elements_data_size = self._CalculateElementsDataSize(context)
if elements_data_size is not None:
self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size)
elif not self._HasElementsTerminator():
raise errors.MappingError(
'Unable to determine elements data size and missing elements '
'terminator')
else:
byte_stream_size = len(byte_stream)
element_byte_size = self._element_data_type_definition.GetByteSize()
elements_data_offset = byte_offset
next_elements_data_offset = elements_data_offset + element_byte_size
elements_terminator = self._data_type_definition.elements_terminator
element_value = byte_stream[
elements_data_offset:next_elements_data_offset]
while byte_stream[elements_data_offset:]:
elements_data_offset = next_elements_data_offset
if element_value == elements_terminator:
elements_data_size = elements_data_offset - byte_offset
break
next_elements_data_offset += element_byte_size
element_value = byte_stream[
elements_data_offset:next_elements_data_offset]
if element_value != elements_terminator:
size_hints[self._data_type_definition.name] = DataTypeMapSizeHint(
byte_stream_size - byte_offset)
context_state['size_hints'] = size_hints
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: unable to find elements terminator').format(
self._data_type_definition.name, byte_offset)
raise errors.ByteStreamTooSmallError(error_string)
if context:
context.byte_size = elements_data_size
size_hints[self._data_type_definition.name] = DataTypeMapSizeHint(
elements_data_size, is_complete=True)
context_state['size_hints'] = size_hints
return byte_stream[byte_offset:byte_offset + elements_data_size]
| 814,112 |
Initializes a padding data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Raises:
FormatError: if the data type map cannot be determined from the data
type definition.
|
def __init__(self, data_type_definition):
super(PaddingMap, self).__init__(data_type_definition)
self.byte_size = None
| 814,113 |
Maps the data type on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
Returns:
object: mapped value.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def MapByteStream(self, byte_stream, byte_offset=0, **unused_kwargs):
return byte_stream[byte_offset:byte_offset + self.byte_size]
| 814,114 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def FoldByteStream(self, mapped_value, **kwargs):
try:
byte_stream = mapped_value.encode(self._data_type_definition.encoding)
except Exception as exception:
error_string = (
'Unable to write: {0:s} to byte stream with error: {1!s}').format(
self._data_type_definition.name, exception)
raise errors.MappingError(error_string)
return super(StringMap, self).FoldByteStream(byte_stream, **kwargs)
| 814,115 |
Maps the data type on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
Returns:
str: mapped values.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def MapByteStream(self, byte_stream, byte_offset=0, **kwargs):
byte_stream = super(StringMap, self).MapByteStream(
byte_stream, byte_offset=byte_offset, **kwargs)
if self._HasElementsTerminator():
# Remove the elements terminator and any trailing data from
# the byte stream.
elements_terminator = self._data_type_definition.elements_terminator
elements_terminator_size = len(elements_terminator)
byte_offset = 0
byte_stream_size = len(byte_stream)
while byte_offset < byte_stream_size:
end_offset = byte_offset + elements_terminator_size
if byte_stream[byte_offset:end_offset] == elements_terminator:
break
byte_offset += elements_terminator_size
byte_stream = byte_stream[:byte_offset]
try:
return byte_stream.decode(self._data_type_definition.encoding)
except Exception as exception:
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: {2!s}').format(
self._data_type_definition.name, byte_offset, exception)
raise errors.MappingError(error_string)
| 814,116 |
Initializes a structure data type map.
Args:
data_type_definition (DataTypeDefinition): data type definition.
|
def __init__(self, data_type_definition):
super(StructureMap, self).__init__(data_type_definition)
self._attribute_names = self._GetAttributeNames(data_type_definition)
self._data_type_map_cache = {}
self._data_type_maps = self._GetMemberDataTypeMaps(
data_type_definition, self._data_type_map_cache)
self._fold_byte_stream = None
self._format_string = None
self._map_byte_stream = None
self._number_of_attributes = len(self._attribute_names)
self._operation = None
self._structure_values_class = (
runtime.StructureValuesClassFactory.CreateClass(
data_type_definition))
if self._CheckCompositeMap(data_type_definition):
self._fold_byte_stream = self._CompositeFoldByteStream
self._map_byte_stream = self._CompositeMapByteStream
else:
self._fold_byte_stream = self._LinearFoldByteStream
self._map_byte_stream = self._LinearMapByteStream
self._operation = self._GetByteStreamOperation()
| 814,117 |
Determines if the data type definition needs a composite map.
Args:
data_type_definition (DataTypeDefinition): structure data type definition.
Returns:
bool: True if a composite map is needed, False otherwise.
Raises:
FormatError: if a composite map is needed cannot be determined from the
data type definition.
|
def _CheckCompositeMap(self, data_type_definition):
if not data_type_definition:
raise errors.FormatError('Missing data type definition')
members = getattr(data_type_definition, 'members', None)
if not members:
raise errors.FormatError('Invalid data type definition missing members')
is_composite_map = False
last_member_byte_order = data_type_definition.byte_order
for member_definition in members:
if member_definition.IsComposite():
is_composite_map = True
break
# TODO: check for padding type
# TODO: determine if padding type can be defined as linear
if (last_member_byte_order != definitions.BYTE_ORDER_NATIVE and
member_definition.byte_order != definitions.BYTE_ORDER_NATIVE and
last_member_byte_order != member_definition.byte_order):
is_composite_map = True
break
last_member_byte_order = member_definition.byte_order
return is_composite_map
| 814,118 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def _CompositeFoldByteStream(
self, mapped_value, context=None, **unused_kwargs):
context_state = getattr(context, 'state', {})
attribute_index = context_state.get('attribute_index', 0)
subcontext = context_state.get('context', None)
if not subcontext:
subcontext = DataTypeMapContext(values={
type(mapped_value).__name__: mapped_value})
data_attributes = []
for attribute_index in range(attribute_index, self._number_of_attributes):
attribute_name = self._attribute_names[attribute_index]
data_type_map = self._data_type_maps[attribute_index]
member_value = getattr(mapped_value, attribute_name, None)
if data_type_map is None or member_value is None:
continue
member_data = data_type_map.FoldByteStream(
member_value, context=subcontext)
if member_data is None:
return None
data_attributes.append(member_data)
if context:
context.state = {}
return b''.join(data_attributes)
| 814,119 |
Maps a sequence of composite data types on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
object: mapped value.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def _CompositeMapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
context_state = getattr(context, 'state', {})
attribute_index = context_state.get('attribute_index', 0)
mapped_values = context_state.get('mapped_values', None)
subcontext = context_state.get('context', None)
if not mapped_values:
mapped_values = self._structure_values_class()
if not subcontext:
subcontext = DataTypeMapContext(values={
type(mapped_values).__name__: mapped_values})
members_data_size = 0
for attribute_index in range(attribute_index, self._number_of_attributes):
attribute_name = self._attribute_names[attribute_index]
data_type_map = self._data_type_maps[attribute_index]
member_definition = self._data_type_definition.members[attribute_index]
condition = getattr(member_definition, 'condition', None)
if condition:
namespace = dict(subcontext.values)
# Make sure __builtins__ contains an empty dictionary.
namespace['__builtins__'] = {}
try:
condition_result = eval(condition, namespace) # pylint: disable=eval-used
except Exception as exception:
raise errors.MappingError(
'Unable to evaluate condition with error: {0!s}'.format(
exception))
if not isinstance(condition_result, bool):
raise errors.MappingError(
'Condition does not result in a boolean value')
if not condition_result:
continue
if isinstance(member_definition, data_types.PaddingDefinition):
_, byte_size = divmod(
members_data_size, member_definition.alignment_size)
if byte_size > 0:
byte_size = member_definition.alignment_size - byte_size
data_type_map.byte_size = byte_size
try:
value = data_type_map.MapByteStream(
byte_stream, byte_offset=byte_offset, context=subcontext)
setattr(mapped_values, attribute_name, value)
except errors.ByteStreamTooSmallError as exception:
context_state['attribute_index'] = attribute_index
context_state['context'] = subcontext
context_state['mapped_values'] = mapped_values
raise errors.ByteStreamTooSmallError(exception)
except Exception as exception:
raise errors.MappingError(exception)
supported_values = getattr(member_definition, 'values', None)
if supported_values and value not in supported_values:
raise errors.MappingError(
'Value: {0!s} not in supported values: {1:s}'.format(
value, ', '.join([
'{0!s}'.format(value) for value in supported_values])))
byte_offset += subcontext.byte_size
members_data_size += subcontext.byte_size
if attribute_index != (self._number_of_attributes - 1):
context_state['attribute_index'] = attribute_index
context_state['context'] = subcontext
context_state['mapped_values'] = mapped_values
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: missing attribute: {2:d}').format(
self._data_type_definition.name, byte_offset, attribute_index)
raise errors.ByteStreamTooSmallError(error_string)
if context:
context.byte_size = members_data_size
context.state = {}
return mapped_values
| 814,120 |
Determines the attribute (or field) names of the members.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Returns:
list[str]: attribute names.
Raises:
FormatError: if the attribute names cannot be determined from the data
type definition.
|
def _GetAttributeNames(self, data_type_definition):
if not data_type_definition:
raise errors.FormatError('Missing data type definition')
attribute_names = []
for member_definition in data_type_definition.members:
attribute_names.append(member_definition.name)
return attribute_names
| 814,121 |
Retrieves the member data type maps.
Args:
data_type_definition (DataTypeDefinition): data type definition.
data_type_map_cache (dict[str, DataTypeMap]): cached data type maps.
Returns:
list[DataTypeMap]: member data type maps.
Raises:
FormatError: if the data type maps cannot be determined from the data
type definition.
|
def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache):
if not data_type_definition:
raise errors.FormatError('Missing data type definition')
members = getattr(data_type_definition, 'members', None)
if not members:
raise errors.FormatError('Invalid data type definition missing members')
data_type_maps = []
members_data_size = 0
for member_definition in members:
if isinstance(member_definition, data_types.MemberDataTypeDefinition):
member_definition = member_definition.member_data_type_definition
if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and
member_definition.byte_order == definitions.BYTE_ORDER_NATIVE):
# Make a copy of the data type definition where byte-order can be
# safely changed.
member_definition = copy.copy(member_definition)
member_definition.name = '_{0:s}_{1:s}'.format(
data_type_definition.name, member_definition.name)
member_definition.byte_order = data_type_definition.byte_order
if member_definition.name not in data_type_map_cache:
data_type_map = DataTypeMapFactory.CreateDataTypeMapByType(
member_definition)
data_type_map_cache[member_definition.name] = data_type_map
data_type_map = data_type_map_cache[member_definition.name]
if members_data_size is not None:
if not isinstance(member_definition, data_types.PaddingDefinition):
byte_size = member_definition.GetByteSize()
else:
_, byte_size = divmod(
members_data_size, member_definition.alignment_size)
if byte_size > 0:
byte_size = member_definition.alignment_size - byte_size
data_type_map.byte_size = byte_size
if byte_size is None:
members_data_size = None
else:
members_data_size += byte_size
data_type_maps.append(data_type_map)
return data_type_maps
| 814,122 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs):
try:
attribute_values = [
getattr(mapped_value, attribute_name, None)
for attribute_name in self._attribute_names]
attribute_values = [
value for value in attribute_values if value is not None]
return self._operation.WriteTo(tuple(attribute_values))
except Exception as exception:
error_string = (
'Unable to write: {0:s} to byte stream with error: {1!s}').format(
self._data_type_definition.name, exception)
raise errors.FoldingError(error_string)
| 814,123 |
Maps a data type sequence on a byte stream.
Args:
byte_stream (bytes): byte stream.
byte_offset (Optional[int]): offset into the byte stream where to start.
context (Optional[DataTypeMapContext]): data type map context.
Returns:
object: mapped value.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def _LinearMapByteStream(
self, byte_stream, byte_offset=0, context=None, **unused_kwargs):
members_data_size = self._data_type_definition.GetByteSize()
self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size)
try:
struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:])
struct_values = []
for attribute_index, value in enumerate(struct_tuple):
data_type_map = self._data_type_maps[attribute_index]
member_definition = self._data_type_definition.members[attribute_index]
value = data_type_map.MapValue(value)
supported_values = getattr(member_definition, 'values', None)
if supported_values and value not in supported_values:
raise errors.MappingError(
'Value: {0!s} not in supported values: {1:s}'.format(
value, ', '.join([
'{0!s}'.format(value) for value in supported_values])))
struct_values.append(value)
mapped_value = self._structure_values_class(*struct_values)
except Exception as exception:
error_string = (
'Unable to read: {0:s} from byte stream at offset: {1:d} '
'with error: {2!s}').format(
self._data_type_definition.name, byte_offset, exception)
raise errors.MappingError(error_string)
if context:
context.byte_size = members_data_size
return mapped_value
| 814,124 |
Retrieves a hint about the size.
Args:
context (Optional[DataTypeMapContext]): data type map context, used to
determine the size hint.
Returns:
int: hint of the number of bytes needed from the byte stream or None.
|
def GetSizeHint(self, context=None, **unused_kwargs):
context_state = getattr(context, 'state', {})
subcontext = context_state.get('context', None)
if not subcontext:
mapped_values = context_state.get('mapped_values', None)
subcontext = DataTypeMapContext(values={
type(mapped_values).__name__: mapped_values})
size_hint = 0
for data_type_map in self._data_type_maps:
data_type_size = data_type_map.GetSizeHint(context=subcontext)
if data_type_size is None:
break
size_hint += data_type_size
return size_hint
| 814,125 |
Folds the data type into a byte stream.
Args:
mapped_value (object): mapped value.
Returns:
bytes: byte stream.
Raises:
FoldingError: if the data type definition cannot be folded into
the byte stream.
|
def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc
raise errors.FoldingError(
'Unable to fold {0:s} data type into byte stream'.format(
self._data_type_definition.TYPE_INDICATOR))
| 814,127 |
Maps the data type on a byte stream.
Args:
byte_stream (bytes): byte stream.
Returns:
object: mapped value.
Raises:
MappingError: if the data type definition cannot be mapped on
the byte stream.
|
def MapByteStream(self, byte_stream, **unused_kwargs): # pylint: disable=redundant-returns-doc
raise errors.MappingError(
'Unable to map {0:s} data type to byte stream'.format(
self._data_type_definition.TYPE_INDICATOR))
| 814,128 |
Retrieves the name of an enumeration value by number.
Args:
number (int): number.
Returns:
str: name of the enumeration value or None if no corresponding
enumeration value was found.
|
def GetName(self, number):
value = self._data_type_definition.values_per_number.get(number, None)
if not value:
return None
return value.name
| 814,129 |
Initializes a data type maps factory.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
|
def __init__(self, definitions_registry):
super(DataTypeMapFactory, self).__init__()
self._definitions_registry = definitions_registry
| 814,130 |
Creates a specific data type map by name.
Args:
definition_name (str): name of the data type definition.
Returns:
DataTypeMap: data type map or None if the date type definition
is not available.
|
def CreateDataTypeMap(self, definition_name):
data_type_definition = self._definitions_registry.GetDefinitionByName(
definition_name)
if not data_type_definition:
return None
return DataTypeMapFactory.CreateDataTypeMapByType(data_type_definition)
| 814,131 |
Creates a specific data type map by type indicator.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Returns:
DataTypeMap: data type map or None if the date type definition
is not available.
|
def CreateDataTypeMapByType(cls, data_type_definition):
data_type_map_class = cls._MAP_PER_DEFINITION.get(
data_type_definition.TYPE_INDICATOR, None)
if not data_type_map_class:
return None
return data_type_map_class(data_type_definition)
| 814,132 |
Initializes a data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(DataTypeDefinition, self).__init__()
self.aliases = aliases or []
self.description = description
self.name = name
self.urls = urls
| 814,133 |
Initializes a storage data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(StorageDataTypeDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.byte_order = definitions.BYTE_ORDER_NATIVE
| 814,134 |
Initializes a fixed-size data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(FixedSizeDataTypeDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.size = definitions.SIZE_NATIVE
self.units = 'bytes'
| 814,135 |
Initializes a boolean data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
false_value (Optional[int]): value that represents false.
urls (Optional[list[str]]): URLs.
|
def __init__(
self, name, aliases=None, description=None, false_value=0, urls=None):
super(BooleanDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.false_value = false_value
self.true_value = None
| 814,137 |
Initializes an integer data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
maximum_value (Optional[int]): maximum allowed value of the integer
data type.
minimum_value (Optional[int]): minimum allowed value of the integer
data type.
urls (Optional[list[str]]): URLs.
|
def __init__(
self, name, aliases=None, description=None, maximum_value=None,
minimum_value=None, urls=None):
super(IntegerDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.format = definitions.FORMAT_SIGNED
self.maximum_value = maximum_value
self.minimum_value = minimum_value
| 814,138 |
Initializes an UUID data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(UUIDDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.size = 16
| 814,139 |
Initializes a padding data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
alignment_size (Optional[int]): alignment size.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(
self, name, aliases=None, alignment_size=None, description=None,
urls=None):
super(PaddingDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.alignment_size = alignment_size
| 814,140 |
Initializes a sequence data type definition.
Args:
name (str): name.
data_type_definition (DataTypeDefinition): sequence element data type
definition.
aliases (Optional[list[str]]): aliases.
data_type (Optional[str]): name of the sequence element data type.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(
self, name, data_type_definition, aliases=None, data_type=None,
description=None, urls=None):
super(ElementSequenceDataTypeDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.byte_order = getattr(
data_type_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE)
self.elements_data_size = None
self.elements_data_size_expression = None
self.element_data_type = data_type
self.element_data_type_definition = data_type_definition
self.elements_terminator = None
self.number_of_elements = None
self.number_of_elements_expression = None
| 814,141 |
Initializes a string data type definition.
Args:
name (str): name.
data_type_definition (DataTypeDefinition): string element data type
definition.
aliases (Optional[list[str]]): aliases.
data_type (Optional[str]): name of the string element data type.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(
self, name, data_type_definition, aliases=None, data_type=None,
description=None, urls=None):
super(StringDefinition, self).__init__(
name, data_type_definition, aliases=aliases, data_type=data_type,
description=description, urls=urls)
self.encoding = 'ascii'
| 814,143 |
Initializes a data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(DataTypeDefinitionWithMembers, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self._byte_size = None
self.members = []
self.sections = []
| 814,144 |
Adds a member definition.
Args:
member_definition (DataTypeDefinition): member data type definition.
|
def AddMemberDefinition(self, member_definition):
self._byte_size = None
self.members.append(member_definition)
if self.sections:
section_definition = self.sections[-1]
section_definition.members.append(member_definition)
| 814,145 |
Initializes a member section definition.
Args:
name (str): name.
|
def __init__(self, name):
super(MemberSectionDefinition, self).__init__()
self.name = name
self.members = []
| 814,148 |
Initializes a data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(StructureDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.family_definition = None
| 814,149 |
Initializes an enumeration data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(ConstantDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.value = None
| 814,152 |
Initializes an enumeration value.
Args:
name (str): name.
number (int): number.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
|
def __init__(self, name, number, aliases=None, description=None):
super(EnumerationValue, self).__init__()
self.aliases = aliases or []
self.description = description
self.name = name
self.number = number
| 814,153 |
Initializes an enumeration data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(EnumerationDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.values = []
self.values_per_alias = {}
self.values_per_name = {}
self.values_per_number = {}
| 814,154 |
Adds an enumeration value.
Args:
name (str): name.
number (int): number.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
Raises:
KeyError: if the enumeration value already exists.
|
def AddValue(self, name, number, aliases=None, description=None):
if name in self.values_per_name:
raise KeyError('Value with name: {0:s} already exists.'.format(name))
if number in self.values_per_number:
raise KeyError('Value with number: {0!s} already exists.'.format(number))
for alias in aliases or []:
if alias in self.values_per_alias:
raise KeyError('Value with alias: {0:s} already exists.'.format(alias))
enumeration_value = EnumerationValue(
name, number, aliases=aliases, description=description)
self.values.append(enumeration_value)
self.values_per_name[name] = enumeration_value
self.values_per_number[number] = enumeration_value
for alias in aliases or []:
self.values_per_alias[alias] = enumeration_value
| 814,155 |
Initializes a format data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(FormatDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.metadata = {}
| 814,156 |
Initializes a structure family data type definition.
Args:
name (str): name.
aliases (Optional[list[str]]): aliases.
description (Optional[str]): description.
urls (Optional[list[str]]): URLs.
|
def __init__(self, name, aliases=None, description=None, urls=None):
super(StructureFamilyDefinition, self).__init__(
name, aliases=aliases, description=description, urls=urls)
self.members = []
self.runtime = None
| 814,157 |
Adds a member definition.
Args:
member_definition (DataTypeDefinition): member data type definition.
|
def AddMemberDefinition(self, member_definition):
self.members.append(member_definition)
member_definition.family_definition = self
| 814,158 |
Reads a boolean data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
BooleanDataTypeDefinition: boolean data type definition.
|
def _ReadBooleanDataTypeDefinition(
self, definitions_registry, definition_values, definition_name,
is_member=False):
return self._ReadFixedSizeDataTypeDefinition(
definitions_registry, definition_values,
data_types.BooleanDefinition, definition_name,
self._SUPPORTED_ATTRIBUTES_BOOLEAN, is_member=is_member,
supported_size_values=(1, 2, 4))
| 814,222 |
Reads a character data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
CharacterDataTypeDefinition: character data type definition.
|
def _ReadCharacterDataTypeDefinition(
self, definitions_registry, definition_values, definition_name,
is_member=False):
return self._ReadFixedSizeDataTypeDefinition(
definitions_registry, definition_values,
data_types.CharacterDefinition, definition_name,
self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE,
is_member=is_member, supported_size_values=(1, 2, 4))
| 814,223 |
Reads a floating-point data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
FloatingPointDefinition: floating-point data type definition.
|
def _ReadFloatingPointDataTypeDefinition(
self, definitions_registry, definition_values, definition_name,
is_member=False):
return self._ReadFixedSizeDataTypeDefinition(
definitions_registry, definition_values,
data_types.FloatingPointDefinition, definition_name,
self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE,
is_member=is_member, supported_size_values=(4, 8))
| 814,230 |
Reads a data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
Returns:
DataTypeDefinition: data type definition or None.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect.
|
def _ReadDefinition(self, definitions_registry, definition_values):
if not definition_values:
error_message = 'missing definition values'
raise errors.DefinitionReaderError(None, error_message)
name = definition_values.get('name', None)
if not name:
error_message = 'missing name'
raise errors.DefinitionReaderError(None, error_message)
type_indicator = definition_values.get('type', None)
if not type_indicator:
error_message = 'invalid definition missing type'
raise errors.DefinitionReaderError(name, error_message)
data_type_callback = self._DATA_TYPE_CALLBACKS.get(type_indicator, None)
if data_type_callback:
data_type_callback = getattr(self, data_type_callback, None)
if not data_type_callback:
error_message = 'unuspported data type definition: {0:s}.'.format(
type_indicator)
raise errors.DefinitionReaderError(name, error_message)
return data_type_callback(definitions_registry, definition_values, name)
| 814,245 |
Reads data type definitions from a file into the registry.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
path (str): path of the file to read from.
|
def ReadFile(self, definitions_registry, path):
with open(path, 'r') as file_object:
self.ReadFileObject(definitions_registry, file_object)
| 814,246 |
Retrieves a format error location.
Args:
yaml_definition (dict[str, object]): current YAML definition.
last_definition_object (DataTypeDefinition): previous data type
definition.
Returns:
str: format error location.
|
def _GetFormatErrorLocation(
self, yaml_definition, last_definition_object):
name = yaml_definition.get('name', None)
if name:
error_location = 'in: {0:s}'.format(name or '<NAMELESS>')
elif last_definition_object:
error_location = 'after: {0:s}'.format(last_definition_object.name)
else:
error_location = 'at start'
return error_location
| 814,247 |
Reads data type definitions from a file-like object into the registry.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
file_object (file): file-like object to read from.
Raises:
FormatError: if the definitions values are missing or if the format is
incorrect.
|
def ReadFileObject(self, definitions_registry, file_object):
last_definition_object = None
error_location = None
error_message = None
try:
yaml_generator = yaml.safe_load_all(file_object)
for yaml_definition in yaml_generator:
definition_object = self._ReadDefinition(
definitions_registry, yaml_definition)
if not definition_object:
error_location = self._GetFormatErrorLocation(
yaml_definition, last_definition_object)
error_message = '{0:s} Missing definition object.'.format(
error_location)
raise errors.FormatError(error_message)
definitions_registry.RegisterDefinition(definition_object)
last_definition_object = definition_object
except errors.DefinitionReaderError as exception:
error_message = 'in: {0:s} {1:s}'.format(
exception.name or '<NAMELESS>', exception.message)
raise errors.FormatError(error_message)
except (yaml.reader.ReaderError, yaml.scanner.ScannerError) as exception:
error_location = self._GetFormatErrorLocation({}, last_definition_object)
error_message = '{0:s} {1!s}'.format(error_location, exception)
raise errors.FormatError(error_message)
| 814,248 |
Reads the organization given by identifier from HDX and returns Organization object
Args:
identifier (str): Identifier of organization
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
Returns:
Optional[Organization]: Organization object if successful read, None if not
|
def read_from_hdx(identifier, configuration=None):
# type: (str, Optional[Configuration]) -> Optional['Organization']
organization = Organization(configuration=configuration)
result = organization._load_from_hdx('organization', identifier)
if result:
return organization
return None
| 814,272 |
Returns the organization's users.
Args:
capacity (Optional[str]): Filter by capacity eg. member, admin. Defaults to None.
Returns:
List[User]: Organization's users.
|
def get_users(self, capacity=None):
# type: (Optional[str]) -> List[User]
users = list()
usersdicts = self.data.get('users')
if usersdicts is not None:
for userdata in usersdicts:
if capacity is not None and userdata['capacity'] != capacity:
continue
id = userdata.get('id')
if id is None:
id = userdata['name']
user = hdx.data.user.User.read_from_hdx(id, configuration=self.configuration)
user['capacity'] = userdata['capacity']
users.append(user)
return users
| 814,273 |
Add new or update existing user in organization with new metadata. Capacity eg. member, admin
must be supplied either within the User object or dictionary or using the capacity argument (which takes
precedence).
Args:
user (Union[User,Dict,str]): Either a user id or user metadata either from a User object or a dictionary
capacity (Optional[str]): Capacity of user eg. member, admin. Defaults to None.
Returns:
None
|
def add_update_user(self, user, capacity=None):
# type: (Union[hdx.data.user.User,Dict,str],Optional[str]) -> None
if isinstance(user, str):
user = hdx.data.user.User.read_from_hdx(user, configuration=self.configuration)
elif isinstance(user, dict):
user = hdx.data.user.User(user, configuration=self.configuration)
if isinstance(user, hdx.data.user.User):
users = self.data.get('users')
if users is None:
users = list()
self.data['users'] = users
if capacity is not None:
user['capacity'] = capacity
self._addupdate_hdxobject(users, 'name', user)
return
raise HDXError('Type %s cannot be added as a user!' % type(user).__name__)
| 814,274 |
Add new or update existing users in organization with new metadata. Capacity eg. member, admin
must be supplied either within the User object or dictionary or using the capacity argument (which takes
precedence).
Args:
users (List[Union[User,Dict,str]]): A list of either user ids or users metadata from User objects or dictionaries
capacity (Optional[str]): Capacity of users eg. member, admin. Defaults to None.
Returns:
None
|
def add_update_users(self, users, capacity=None):
# type: (List[Union[hdx.data.user.User,Dict,str]],Optional[str]) -> None
if not isinstance(users, list):
raise HDXError('Users should be a list!')
for user in users:
self.add_update_user(user, capacity)
| 814,275 |
Makes a read call to HDX passing in given parameter.
Args:
object_type (str): Description of HDX object type (for messages)
value (str): Value of HDX field
fieldname (str): HDX field name. Defaults to id.
action (Optional[str]): Replacement CKAN action url to use. Defaults to None.
**kwargs: Other fields to pass to CKAN.
Returns:
Tuple[bool, Union[Dict, str]]: (True/False, HDX object metadata/Error)
|
def _read_from_hdx(self, object_type, value, fieldname='id',
action=None, **kwargs):
# type: (str, str, str, Optional[str], Any) -> Tuple[bool, Union[Dict, str]]
if not fieldname:
raise HDXError('Empty %s field name!' % object_type)
if action is None:
action = self.actions()['show']
data = {fieldname: value}
data.update(kwargs)
try:
result = self.configuration.call_remoteckan(action, data)
return True, result
except NotFound:
return False, '%s=%s: not found!' % (fieldname, value)
except Exception as e:
raisefrom(HDXError, 'Failed when trying to read: %s=%s! (POST)' % (fieldname, value), e)
| 814,280 |
Helper method to load the HDX object given by identifier from HDX
Args:
object_type (str): Description of HDX object type (for messages)
id_field (str): HDX object identifier
Returns:
bool: True if loaded, False if not
|
def _load_from_hdx(self, object_type, id_field):
# type: (str, str) -> bool
success, result = self._read_from_hdx(object_type, id_field)
if success:
self.old_data = self.data
self.data = result
return True
logger.debug(result)
return False
| 814,281 |
Check metadata exists and contains HDX object identifier, and if so load HDX object
Args:
object_type (str): Description of HDX object type (for messages)
id_field_name (str): Name of field containing HDX object identifier
operation (str): Operation to report if error. Defaults to update.
Returns:
None
|
def _check_load_existing_object(self, object_type, id_field_name, operation='update'):
# type: (str, str, str) -> None
self._check_existing_object(object_type, id_field_name)
if not self._load_from_hdx(object_type, self.data[id_field_name]):
raise HDXError('No existing %s to %s!' % (object_type, operation))
| 814,283 |
Helper method to check that metadata for HDX object is complete
Args:
ignore_fields (List[str]): Any fields to ignore in the check
Returns:
None
|
def _check_required_fields(self, object_type, ignore_fields):
# type: (str, List[str]) -> None
for field in self.configuration[object_type]['required_fields']:
if field not in self.data and field not in ignore_fields:
raise HDXError('Field %s is missing in %s!' % (field, object_type))
| 814,284 |
Helper method to check if HDX object exists and update it
Args:
object_type (str): Description of HDX object type (for messages)
id_field_name (str): Name of field containing HDX object identifier
file_to_upload (Optional[str]): File to upload to HDX
**kwargs: See below
operation (string): Operation to perform eg. patch. Defaults to update.
Returns:
None
|
def _merge_hdx_update(self, object_type, id_field_name, file_to_upload=None, **kwargs):
# type: (str, str, Optional[str], Any) -> None
merge_two_dictionaries(self.data, self.old_data)
if 'batch_mode' in kwargs: # Whether or not CKAN should change groupings of datasets on /datasets page
self.data['batch_mode'] = kwargs['batch_mode']
if 'skip_validation' in kwargs: # Whether or not CKAN should perform validation steps (checking fields present)
self.data['skip_validation'] = kwargs['skip_validation']
ignore_field = self.configuration['%s' % object_type].get('ignore_on_update')
self.check_required_fields(ignore_fields=[ignore_field])
operation = kwargs.get('operation', 'update')
self._save_to_hdx(operation, id_field_name, file_to_upload)
| 814,285 |
Helper method to check if HDX object exists in HDX and if so, update it
Args:
object_type (str): Description of HDX object type (for messages)
id_field_name (str): Name of field containing HDX object identifier
file_to_upload (Optional[str]): File to upload to HDX
**kwargs: See below
operation (string): Operation to perform eg. patch. Defaults to update.
Returns:
None
|
def _update_in_hdx(self, object_type, id_field_name, file_to_upload=None, **kwargs):
# type: (str, str, Optional[str], Any) -> None
self._check_load_existing_object(object_type, id_field_name)
# We load an existing object even thought it may well have been loaded already
# to prevent an admittedly unlikely race condition where someone has updated
# the object in the intervening time
self._merge_hdx_update(object_type, id_field_name, file_to_upload, **kwargs)
| 814,286 |
Creates or updates an HDX object in HDX and return HDX object metadata dict
Args:
action (str): Action to perform eg. 'create', 'update'
data (Dict): Data to write to HDX
id_field_name (str): Name of field containing HDX object identifier or None
file_to_upload (Optional[str]): File to upload to HDX
Returns:
Dict: HDX object metadata
|
def _write_to_hdx(self, action, data, id_field_name, file_to_upload=None):
# type: (str, Dict, str, Optional[str]) -> Dict
file = None
try:
if file_to_upload:
file = open(file_to_upload, 'rb')
files = [('upload', file)]
else:
files = None
return self.configuration.call_remoteckan(self.actions()[action], data, files=files)
except Exception as e:
raisefrom(HDXError, 'Failed when trying to %s %s! (POST)' % (action, data[id_field_name]), e)
finally:
if file_to_upload and file:
file.close()
| 814,287 |
Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data
from HDX
Args:
action (str): Action to perform: 'create' or 'update'
id_field_name (str): Name of field containing HDX object identifier
file_to_upload (Optional[str]): File to upload to HDX
Returns:
None
|
def _save_to_hdx(self, action, id_field_name, file_to_upload=None):
# type: (str, str, Optional[str]) -> None
result = self._write_to_hdx(action, self.data, id_field_name, file_to_upload)
self.old_data = self.data
self.data = result
| 814,288 |
Helper method to check if resource exists in HDX and if so, update it, otherwise create it
Args:
object_type (str): Description of HDX object type (for messages)
id_field_name (str): Name of field containing HDX object identifier
name_field_name (str): Name of field containing HDX object name
file_to_upload (Optional[str]): File to upload to HDX (if url not supplied)
Returns:
None
|
def _create_in_hdx(self, object_type, id_field_name, name_field_name,
file_to_upload=None):
# type: (str, str, str, Optional[str]) -> None
self.check_required_fields()
if id_field_name in self.data and self._load_from_hdx(object_type, self.data[id_field_name]):
logger.warning('%s exists. Updating %s' % (object_type, self.data[id_field_name]))
self._merge_hdx_update(object_type, id_field_name, file_to_upload)
else:
self._save_to_hdx('create', name_field_name, file_to_upload)
| 814,289 |
Helper method to deletes a resource from HDX
Args:
object_type (str): Description of HDX object type (for messages)
id_field_name (str): Name of field containing HDX object identifier
Returns:
None
|
def _delete_from_hdx(self, object_type, id_field_name):
# type: (str, str) -> None
if id_field_name not in self.data:
raise HDXError('No %s field (mandatory) in %s!' % (id_field_name, object_type))
self._save_to_hdx('delete', id_field_name)
| 814,290 |
Remove an HDX object from a list within the parent HDX object
Args:
objlist (List[Union[T <= HDXObject,Dict]]): list of HDX objects
obj (Union[T <= HDXObject,Dict,str]): Either an id or hdx object metadata either from an HDX object or a dictionary
matchon (str): Field to match on. Defaults to id.
delete (bool): Whether to delete HDX object. Defaults to False.
Returns:
bool: True if object removed, False if not
|
def _remove_hdxobject(self, objlist, obj, matchon='id', delete=False):
# type: (List[Union[HDXObjectUpperBound,Dict]], Union[HDXObjectUpperBound,Dict,str], str, bool) -> bool
if objlist is None:
return False
if isinstance(obj, six.string_types):
obj_id = obj
elif isinstance(obj, dict) or isinstance(obj, HDXObject):
obj_id = obj.get(matchon)
else:
raise HDXError('Type of object not a string, dict or T<=HDXObject')
if not obj_id:
return False
for i, objdata in enumerate(objlist):
objid = objdata.get(matchon)
if objid and objid == obj_id:
if delete:
objlist[i].delete_from_hdx()
del objlist[i]
return True
return False
| 814,292 |
Helper function to convert supplied list of HDX objects to a list of dict
Args:
hdxobjects (List[T <= HDXObject]): List of HDX objects to convert
Returns:
List[Dict]: List of HDX objects converted to simple dictionaries
|
def _convert_hdxobjects(self, hdxobjects):
# type: (List[HDXObjectUpperBound]) -> List[HDXObjectUpperBound]
newhdxobjects = list()
for hdxobject in hdxobjects:
newhdxobjects.append(hdxobject.data)
return newhdxobjects
| 814,293 |
Helper function to make a deep copy of a supplied list of HDX objects
Args:
hdxobjects (List[T <= HDXObject]): list of HDX objects to copy
hdxobjectclass (type): Type of the HDX Objects to be copied
attribute_to_copy (Optional[str]): An attribute to copy over from the HDX object. Defaults to None.
Returns:
List[T <= HDXObject]: Deep copy of list of HDX objects
|
def _copy_hdxobjects(self, hdxobjects, hdxobjectclass, attribute_to_copy=None):
# type: (List[HDXObjectUpperBound], type, Optional[str]) -> List[HDXObjectUpperBound]
newhdxobjects = list()
for hdxobject in hdxobjects:
newhdxobjectdata = copy.deepcopy(hdxobject.data)
newhdxobject = hdxobjectclass(newhdxobjectdata, configuration=self.configuration)
if attribute_to_copy:
value = getattr(hdxobject, attribute_to_copy)
setattr(newhdxobject, attribute_to_copy, value)
newhdxobjects.append(newhdxobject)
return newhdxobjects
| 814,294 |
Add a tag
Args:
tag (str): Tag to add
Returns:
bool: True if tag added or False if tag already present
|
def _add_tag(self, tag):
# type: (str) -> bool
tags = self.data.get('tags', None)
if tags:
if tag in [x['name'] for x in tags]:
return False
else:
tags = list()
tags.append({'name': tag})
self.data['tags'] = tags
return True
| 814,297 |
Add a list of tag
Args:
tags (List[str]): list of tags to add
Returns:
bool: True if all tags added or False if any already present.
|
def _add_tags(self, tags):
# type: (List[str]) -> bool
alltagsadded = True
for tag in tags:
if not self._add_tag(tag):
alltagsadded = False
return alltagsadded
| 814,298 |
Return list of strings from comma separated list
Args:
field (str): Field containing comma separated list
Returns:
List[str]: List of strings
|
def _get_stringlist_from_commastring(self, field):
# type: (str) -> List[str]
strings = self.data.get(field)
if strings:
return strings.split(',')
else:
return list()
| 814,299 |
Add a string to a comma separated list of strings
Args:
field (str): Field containing comma separated list
string (str): String to add
Returns:
bool: True if string added or False if string already present
|
def _add_string_to_commastring(self, field, string):
# type: (str, str) -> bool
if string in self._get_stringlist_from_commastring(field):
return False
strings = '%s,%s' % (self.data.get(field, ''), string)
if strings[0] == ',':
strings = strings[1:]
self.data[field] = strings
return True
| 814,300 |
Add a list of strings to a comma separated list of strings
Args:
field (str): Field containing comma separated list
strings (List[str]): list of strings to add
Returns:
bool: True if all strings added or False if any already present.
|
def _add_strings_to_commastring(self, field, strings):
# type: (str, List[str]) -> bool
allstringsadded = True
for string in strings:
if not self._add_string_to_commastring(field, string):
allstringsadded = False
return allstringsadded
| 814,301 |
Remove a string from a comma separated list of strings
Args:
field (str): Field containing comma separated list
string (str): String to remove
Returns:
bool: True if string removed or False if not
|
def _remove_string_from_commastring(self, field, string):
# type: (str, str) -> bool
commastring = self.data.get(field, '')
if string in commastring:
self.data[field] = commastring.replace(string, '')
return True
return False
| 814,302 |
Reads the resource given by identifier from HDX and returns Resource object
Args:
identifier (str): Identifier of resource
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
Returns:
Optional[Resource]: Resource object if successful read, None if not
|
def read_from_hdx(identifier, configuration=None):
# type: (str, Optional[Configuration]) -> Optional['Resource']
if is_valid_uuid(identifier) is False:
raise HDXError('%s is not a valid resource id!' % identifier)
resource = Resource(configuration=configuration)
result = resource._load_from_hdx('resource', identifier)
if result:
return resource
return None
| 814,304 |
Delete any existing url and set the file uploaded to the local path provided
Args:
file_to_upload (str): Local path to file to upload
Returns:
None
|
def set_file_to_upload(self, file_to_upload):
# type: (str) -> None
if 'url' in self.data:
del self.data['url']
self.file_to_upload = file_to_upload
| 814,305 |
Check if resource exists in HDX and if so, update it
Args:
**kwargs: See below
operation (string): Operation to perform eg. patch. Defaults to update.
Returns:
None
|
def update_in_hdx(self, **kwargs):
# type: (Any) -> None
self._check_load_existing_object('resource', 'id')
if self.file_to_upload and 'url' in self.data:
del self.data['url']
self._merge_hdx_update('resource', 'id', self.file_to_upload, **kwargs)
| 814,307 |
Download resource store to provided folder or temporary folder if no folder supplied
Args:
folder (Optional[str]): Folder to download resource to. Defaults to None.
Returns:
Tuple[str, str]: (URL downloaded, Path to downloaded file)
|
def download(self, folder=None):
# type: (Optional[str]) -> Tuple[str, str]
# Download the resource
url = self.data.get('url', None)
if not url:
raise HDXError('No URL to download!')
logger.debug('Downloading %s' % url)
filename = self.data['name']
format = '.%s' % self.data['format']
if format not in filename:
filename = '%s%s' % (filename, format)
with Download(full_agent=self.configuration.get_user_agent()) as downloader:
path = downloader.download_file(url, folder, filename)
return url, path
| 814,311 |
Get list of resources that have a datastore returning their ids.
Args:
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
Returns:
List[str]: List of resource ids that are in the datastore
|
def get_all_resource_ids_in_datastore(configuration=None):
# type: (Optional[Configuration]) -> List[str]
resource = Resource(configuration=configuration)
success, result = resource._read_from_hdx('datastore', '_table_metadata', 'resource_id',
Resource.actions()['datastore_search'], limit=10000)
resource_ids = list()
if not success:
logger.debug(result)
else:
for record in result['records']:
resource_ids.append(record['name'])
return resource_ids
| 814,312 |
For tabular data, create a resource in the HDX datastore which enables data preview in HDX using the built in
YAML definition for a topline. If path is not supplied, the file is first downloaded from HDX.
Args:
delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0.
path (Optional[str]): Local path to file that was uploaded. Defaults to None.
Returns:
None
|
def create_datastore_for_topline(self, delete_first=0, path=None):
# type: (int, Optional[str]) -> None
data = load_yaml(script_dir_plus_file(join('..', 'hdx_datasource_topline.yml'), Resource))
self.create_datastore_from_dict_schema(data, delete_first, path=path)
| 814,319 |
Get resource view id
Args:
resource_view (Union[ResourceView,Dict]): ResourceView metadata from a ResourceView object or dictionary
Returns:
ResourceView: ResourceView object
|
def _get_resource_view(self, resource_view):
# type: (Union[ResourceView,Dict]) -> ResourceView
if isinstance(resource_view, dict):
resource_view = ResourceView(resource_view, configuration=self.configuration)
if isinstance(resource_view, ResourceView):
return resource_view
raise HDXError('Type %s is not a valid resource view!' % type(resource_view).__name__)
| 814,321 |
Add new or update existing resource views in resource with new metadata.
Args:
resource_views (List[Union[ResourceView,Dict]]): A list of resource views metadata from ResourceView objects or dictionaries
Returns:
None
|
def add_update_resource_views(self, resource_views):
# type: (List[Union[ResourceView,Dict]]) -> None
if not isinstance(resource_views, list):
raise HDXError('ResourceViews should be a list!')
for resource_view in resource_views:
self.add_update_resource_view(resource_view)
| 814,322 |
Order resource views in resource.
Args:
resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries
Returns:
None
|
def reorder_resource_views(self, resource_views):
# type: (List[Union[ResourceView,Dict,str]]) -> None
if not isinstance(resource_views, list):
raise HDXError('ResourceViews should be a list!')
ids = list()
for resource_view in resource_views:
if isinstance(resource_view, str):
resource_view_id = resource_view
else:
resource_view_id = resource_view['id']
if is_valid_uuid(resource_view_id) is False:
raise HDXError('%s is not a valid resource view id!' % resource_view)
ids.append(resource_view_id)
_, result = self._read_from_hdx('resource view', self.data['id'], 'id',
ResourceView.actions()['reorder'], order=ids)
| 814,323 |
Delete a resource view from the resource and HDX
Args:
resource_view (Union[ResourceView,Dict,str]): Either a resource view id or resource view metadata either from a ResourceView object or a dictionary
Returns:
None
|
def delete_resource_view(self, resource_view):
# type: (Union[ResourceView,Dict,str]) -> None
if isinstance(resource_view, str):
if is_valid_uuid(resource_view) is False:
raise HDXError('%s is not a valid resource view id!' % resource_view)
resource_view = ResourceView({'id': resource_view}, configuration=self.configuration)
else:
resource_view = self._get_resource_view(resource_view)
if 'id' not in resource_view:
found = False
title = resource_view.get('title')
for rv in self.get_resource_views():
if resource_view['title'] == rv['title']:
resource_view = rv
found = True
break
if not found:
raise HDXError('No resource views have title %s in this resource!' % title)
resource_view.delete_from_hdx()
| 814,324 |
Creates the class template.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Returns:
str: class template.
|
def _CreateClassTemplate(cls, data_type_definition):
type_name = data_type_definition.name
type_description = data_type_definition.description or type_name
while type_description.endswith('.'):
type_description = type_description[:-1]
class_attributes_description = []
init_arguments = []
instance_attributes = []
for member_definition in data_type_definition.members:
attribute_name = member_definition.name
description = member_definition.description or attribute_name
while description.endswith('.'):
description = description[:-1]
member_data_type = getattr(member_definition, 'member_data_type', '')
if isinstance(member_definition, data_types.MemberDataTypeDefinition):
member_definition = member_definition.member_data_type_definition
member_type_indicator = member_definition.TYPE_INDICATOR
if member_type_indicator == definitions.TYPE_INDICATOR_SEQUENCE:
element_type_indicator = member_definition.element_data_type
member_type_indicator = 'tuple[{0:s}]'.format(element_type_indicator)
else:
member_type_indicator = cls._PYTHON_NATIVE_TYPES.get(
member_type_indicator, member_data_type)
argument = '{0:s}=None'.format(attribute_name)
definition = ' self.{0:s} = {0:s}'.format(attribute_name)
description = ' {0:s} ({1:s}): {2:s}.'.format(
attribute_name, member_type_indicator, description)
class_attributes_description.append(description)
init_arguments.append(argument)
instance_attributes.append(definition)
class_attributes_description = '\n'.join(
sorted(class_attributes_description))
init_arguments = ', '.join(init_arguments)
instance_attributes = '\n'.join(sorted(instance_attributes))
template_values = {
'class_attributes_description': class_attributes_description,
'init_arguments': init_arguments,
'instance_attributes': instance_attributes,
'type_description': type_description,
'type_name': type_name}
return cls._CLASS_TEMPLATE.format(**template_values)
| 814,326 |
Checks if a string contains an identifier.
Args:
string (str): string to check.
Returns:
bool: True if the string contains an identifier, False otherwise.
|
def _IsIdentifier(cls, string):
return (
string and not string[0].isdigit() and
all(character.isalnum() or character == '_' for character in string))
| 814,327 |
Validates the data type definition.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Raises:
ValueError: if the data type definition is not considered valid.
|
def _ValidateDataTypeDefinition(cls, data_type_definition):
if not cls._IsIdentifier(data_type_definition.name):
raise ValueError(
'Data type definition name: {0!s} not a valid identifier'.format(
data_type_definition.name))
if keyword.iskeyword(data_type_definition.name):
raise ValueError(
'Data type definition name: {0!s} matches keyword'.format(
data_type_definition.name))
members = getattr(data_type_definition, 'members', None)
if not members:
raise ValueError(
'Data type definition name: {0!s} missing members'.format(
data_type_definition.name))
defined_attribute_names = set()
for member_definition in members:
attribute_name = member_definition.name
if not cls._IsIdentifier(attribute_name):
raise ValueError('Attribute name: {0!s} not a valid identifier'.format(
attribute_name))
if attribute_name.startswith('_'):
raise ValueError('Attribute name: {0!s} starts with underscore'.format(
attribute_name))
if keyword.iskeyword(attribute_name):
raise ValueError('Attribute name: {0!s} matches keyword'.format(
attribute_name))
if attribute_name in defined_attribute_names:
raise ValueError('Attribute name: {0!s} already defined'.format(
attribute_name))
defined_attribute_names.add(attribute_name)
| 814,328 |
Creates a new structure values class.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Returns:
class: structure values class.
|
def CreateClass(cls, data_type_definition):
cls._ValidateDataTypeDefinition(data_type_definition)
class_definition = cls._CreateClassTemplate(data_type_definition)
namespace = {
'__builtins__' : {
'object': builtins.object,
'super': builtins.super},
'__name__': '{0:s}'.format(data_type_definition.name)}
if sys.version_info[0] >= 3:
# pylint: disable=no-member
namespace['__builtins__']['__build_class__'] = builtins.__build_class__
exec(class_definition, namespace) # pylint: disable=exec-used
return namespace[data_type_definition.name]
| 814,329 |
Deregisters a data type definition.
The data type definitions are identified based on their lower case name.
Args:
data_type_definition (DataTypeDefinition): data type definition.
Raises:
KeyError: if a data type definition is not set for the corresponding
name.
|
def DeregisterDefinition(self, data_type_definition):
name = data_type_definition.name.lower()
if name not in self._definitions:
raise KeyError('Definition not set for name: {0:s}.'.format(
data_type_definition.name))
del self._definitions[name]
| 814,331 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.