repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
amzn/ion-python
amazon/ion/reader_binary.py
_parse_var_int_components
def _parse_var_int_components(buf, signed): """Parses a ``VarInt`` or ``VarUInt`` field from a file-like object.""" value = 0 sign = 1 while True: ch = buf.read(1) if ch == '': raise IonException('Variable integer under-run') octet = ord(ch) if signed: if octet & _VAR_INT_SIGN_MASK: sign = -1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else: value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break return sign, value
python
def _parse_var_int_components(buf, signed): value = 0 sign = 1 while True: ch = buf.read(1) if ch == '': raise IonException('Variable integer under-run') octet = ord(ch) if signed: if octet & _VAR_INT_SIGN_MASK: sign = -1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else: value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break return sign, value
[ "def", "_parse_var_int_components", "(", "buf", ",", "signed", ")", ":", "value", "=", "0", "sign", "=", "1", "while", "True", ":", "ch", "=", "buf", ".", "read", "(", "1", ")", "if", "ch", "==", "''", ":", "raise", "IonException", "(", "'Variable integer under-run'", ")", "octet", "=", "ord", "(", "ch", ")", "if", "signed", ":", "if", "octet", "&", "_VAR_INT_SIGN_MASK", ":", "sign", "=", "-", "1", "value", "=", "octet", "&", "_VAR_INT_SIGN_VALUE_MASK", "signed", "=", "False", "else", ":", "value", "<<=", "_VAR_INT_VALUE_BITS", "value", "|=", "octet", "&", "_VAR_INT_VALUE_MASK", "if", "octet", "&", "_VAR_INT_SIGNAL_MASK", ":", "break", "return", "sign", ",", "value" ]
Parses a ``VarInt`` or ``VarUInt`` field from a file-like object.
[ "Parses", "a", "VarInt", "or", "VarUInt", "field", "from", "a", "file", "-", "like", "object", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L132-L152
amzn/ion-python
amazon/ion/reader_binary.py
_parse_signed_int_components
def _parse_signed_int_components(buf): """Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude. """ sign_bit = 0 value = 0 first = True while True: ch = buf.read(1) if ch == b'': break octet = ord(ch) if first: if octet & _SIGNED_INT_SIGN_MASK: sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else: value <<= 8 value |= octet return sign_bit, value
python
def _parse_signed_int_components(buf): sign_bit = 0 value = 0 first = True while True: ch = buf.read(1) if ch == b'': break octet = ord(ch) if first: if octet & _SIGNED_INT_SIGN_MASK: sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else: value <<= 8 value |= octet return sign_bit, value
[ "def", "_parse_signed_int_components", "(", "buf", ")", ":", "sign_bit", "=", "0", "value", "=", "0", "first", "=", "True", "while", "True", ":", "ch", "=", "buf", ".", "read", "(", "1", ")", "if", "ch", "==", "b''", ":", "break", "octet", "=", "ord", "(", "ch", ")", "if", "first", ":", "if", "octet", "&", "_SIGNED_INT_SIGN_MASK", ":", "sign_bit", "=", "1", "value", "=", "octet", "&", "_SIGNED_INT_SIGN_VALUE_MASK", "first", "=", "False", "else", ":", "value", "<<=", "8", "value", "|=", "octet", "return", "sign_bit", ",", "value" ]
Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude.
[ "Parses", "the", "remainder", "of", "a", "file", "-", "like", "object", "as", "a", "signed", "magnitude", "value", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L160-L184
amzn/ion-python
amazon/ion/reader_binary.py
_parse_decimal
def _parse_decimal(buf): """Parses the remainder of a file-like object as a decimal.""" exponent = _parse_var_int(buf, signed=True) sign_bit, coefficient = _parse_signed_int_components(buf) if coefficient == 0: # Handle the zero cases--especially negative zero value = Decimal((sign_bit, (0,), exponent)) else: coefficient *= sign_bit and -1 or 1 value = Decimal(coefficient).scaleb(exponent) return value
python
def _parse_decimal(buf): exponent = _parse_var_int(buf, signed=True) sign_bit, coefficient = _parse_signed_int_components(buf) if coefficient == 0: value = Decimal((sign_bit, (0,), exponent)) else: coefficient *= sign_bit and -1 or 1 value = Decimal(coefficient).scaleb(exponent) return value
[ "def", "_parse_decimal", "(", "buf", ")", ":", "exponent", "=", "_parse_var_int", "(", "buf", ",", "signed", "=", "True", ")", "sign_bit", ",", "coefficient", "=", "_parse_signed_int_components", "(", "buf", ")", "if", "coefficient", "==", "0", ":", "# Handle the zero cases--especially negative zero", "value", "=", "Decimal", "(", "(", "sign_bit", ",", "(", "0", ",", ")", ",", "exponent", ")", ")", "else", ":", "coefficient", "*=", "sign_bit", "and", "-", "1", "or", "1", "value", "=", "Decimal", "(", "coefficient", ")", ".", "scaleb", "(", "exponent", ")", "return", "value" ]
Parses the remainder of a file-like object as a decimal.
[ "Parses", "the", "remainder", "of", "a", "file", "-", "like", "object", "as", "a", "decimal", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L187-L199
amzn/ion-python
amazon/ion/reader_binary.py
_parse_sid_iter
def _parse_sid_iter(data): """Parses the given :class:`bytes` data as a list of :class:`SymbolToken`""" limit = len(data) buf = BytesIO(data) while buf.tell() < limit: sid = _parse_var_int(buf, signed=False) yield SymbolToken(None, sid)
python
def _parse_sid_iter(data): limit = len(data) buf = BytesIO(data) while buf.tell() < limit: sid = _parse_var_int(buf, signed=False) yield SymbolToken(None, sid)
[ "def", "_parse_sid_iter", "(", "data", ")", ":", "limit", "=", "len", "(", "data", ")", "buf", "=", "BytesIO", "(", "data", ")", "while", "buf", ".", "tell", "(", ")", "<", "limit", ":", "sid", "=", "_parse_var_int", "(", "buf", ",", "signed", "=", "False", ")", "yield", "SymbolToken", "(", "None", ",", "sid", ")" ]
Parses the given :class:`bytes` data as a list of :class:`SymbolToken`
[ "Parses", "the", "given", ":", "class", ":", "bytes", "data", "as", "a", "list", "of", ":", "class", ":", "SymbolToken" ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L202-L208
amzn/ion-python
amazon/ion/reader_binary.py
_create_delegate_handler
def _create_delegate_handler(delegate): """Creates a handler function that creates a co-routine that can yield once with the given positional arguments to the delegate as a transition. Args: delegate (Coroutine): The co-routine to delegate to. Returns: A :class:`callable` handler that returns a co-routine that ignores the data it receives and sends with the arguments given to the handler as a :class:`Transition`. """ @coroutine def handler(*args): yield yield delegate.send(Transition(args, delegate)) return handler
python
def _create_delegate_handler(delegate): @coroutine def handler(*args): yield yield delegate.send(Transition(args, delegate)) return handler
[ "def", "_create_delegate_handler", "(", "delegate", ")", ":", "@", "coroutine", "def", "handler", "(", "*", "args", ")", ":", "yield", "yield", "delegate", ".", "send", "(", "Transition", "(", "args", ",", "delegate", ")", ")", "return", "handler" ]
Creates a handler function that creates a co-routine that can yield once with the given positional arguments to the delegate as a transition. Args: delegate (Coroutine): The co-routine to delegate to. Returns: A :class:`callable` handler that returns a co-routine that ignores the data it receives and sends with the arguments given to the handler as a :class:`Transition`.
[ "Creates", "a", "handler", "function", "that", "creates", "a", "co", "-", "routine", "that", "can", "yield", "once", "with", "the", "given", "positional", "arguments", "to", "the", "delegate", "as", "a", "transition", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L314-L330
amzn/ion-python
amazon/ion/reader_binary.py
_read_data_handler
def _read_data_handler(length, whence, ctx, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): """Creates a co-routine for retrieving data up to a requested size. Args: length (int): The minimum length requested. whence (Coroutine): The co-routine to return to after the data is satisfied. ctx (_HandlerContext): The context for the read. skip (Optional[bool]): Whether the requested number of bytes should be skipped. stream_event (Optional[IonEvent]): The stream event to return if no bytes are read or available. """ trans = None queue = ctx.queue if length > ctx.remaining: raise IonException('Length overrun: %d bytes, %d remaining' % (length, ctx.remaining)) # Make sure to check the queue first. queue_len = len(queue) if queue_len > 0: # Any data available means we can only be incomplete. stream_event = ION_STREAM_INCOMPLETE_EVENT length -= queue_len if skip: # For skipping we need to consume any remnant in the buffer queue. if length >= 0: queue.skip(queue_len) else: queue.skip(queue_len + length) while True: data_event, self = (yield trans) if data_event is not None and data_event.data is not None: data = data_event.data data_len = len(data) if data_len > 0: # We got something so we can only be incomplete. stream_event = ION_STREAM_INCOMPLETE_EVENT length -= data_len if not skip: queue.extend(data) else: pos_adjustment = data_len if length < 0: pos_adjustment += length # More data than we need to skip, so make sure to accumulate that remnant. queue.extend(data[length:]) queue.position += pos_adjustment if length <= 0: # We got all the data we need, go back immediately yield Transition(None, whence) trans = Transition(stream_event, self)
python
def _read_data_handler(length, whence, ctx, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): trans = None queue = ctx.queue if length > ctx.remaining: raise IonException('Length overrun: %d bytes, %d remaining' % (length, ctx.remaining)) queue_len = len(queue) if queue_len > 0: stream_event = ION_STREAM_INCOMPLETE_EVENT length -= queue_len if skip: if length >= 0: queue.skip(queue_len) else: queue.skip(queue_len + length) while True: data_event, self = (yield trans) if data_event is not None and data_event.data is not None: data = data_event.data data_len = len(data) if data_len > 0: stream_event = ION_STREAM_INCOMPLETE_EVENT length -= data_len if not skip: queue.extend(data) else: pos_adjustment = data_len if length < 0: pos_adjustment += length queue.extend(data[length:]) queue.position += pos_adjustment if length <= 0: yield Transition(None, whence) trans = Transition(stream_event, self)
[ "def", "_read_data_handler", "(", "length", ",", "whence", ",", "ctx", ",", "skip", "=", "False", ",", "stream_event", "=", "ION_STREAM_INCOMPLETE_EVENT", ")", ":", "trans", "=", "None", "queue", "=", "ctx", ".", "queue", "if", "length", ">", "ctx", ".", "remaining", ":", "raise", "IonException", "(", "'Length overrun: %d bytes, %d remaining'", "%", "(", "length", ",", "ctx", ".", "remaining", ")", ")", "# Make sure to check the queue first.", "queue_len", "=", "len", "(", "queue", ")", "if", "queue_len", ">", "0", ":", "# Any data available means we can only be incomplete.", "stream_event", "=", "ION_STREAM_INCOMPLETE_EVENT", "length", "-=", "queue_len", "if", "skip", ":", "# For skipping we need to consume any remnant in the buffer queue.", "if", "length", ">=", "0", ":", "queue", ".", "skip", "(", "queue_len", ")", "else", ":", "queue", ".", "skip", "(", "queue_len", "+", "length", ")", "while", "True", ":", "data_event", ",", "self", "=", "(", "yield", "trans", ")", "if", "data_event", "is", "not", "None", "and", "data_event", ".", "data", "is", "not", "None", ":", "data", "=", "data_event", ".", "data", "data_len", "=", "len", "(", "data", ")", "if", "data_len", ">", "0", ":", "# We got something so we can only be incomplete.", "stream_event", "=", "ION_STREAM_INCOMPLETE_EVENT", "length", "-=", "data_len", "if", "not", "skip", ":", "queue", ".", "extend", "(", "data", ")", "else", ":", "pos_adjustment", "=", "data_len", "if", "length", "<", "0", ":", "pos_adjustment", "+=", "length", "# More data than we need to skip, so make sure to accumulate that remnant.", "queue", ".", "extend", "(", "data", "[", "length", ":", "]", ")", "queue", ".", "position", "+=", "pos_adjustment", "if", "length", "<=", "0", ":", "# We got all the data we need, go back immediately", "yield", "Transition", "(", "None", ",", "whence", ")", "trans", "=", "Transition", "(", "stream_event", ",", "self", ")" ]
Creates a co-routine for retrieving data up to a requested size. Args: length (int): The minimum length requested. whence (Coroutine): The co-routine to return to after the data is satisfied. ctx (_HandlerContext): The context for the read. skip (Optional[bool]): Whether the requested number of bytes should be skipped. stream_event (Optional[IonEvent]): The stream event to return if no bytes are read or available.
[ "Creates", "a", "co", "-", "routine", "for", "retrieving", "data", "up", "to", "a", "requested", "size", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L334-L387
amzn/ion-python
amazon/ion/reader_binary.py
_var_uint_field_handler
def _var_uint_field_handler(handler, ctx): """Handler co-routine for variable unsigned integer fields that. Invokes the given ``handler`` function with the read field and context, then immediately yields to the resulting co-routine. """ _, self = yield queue = ctx.queue value = 0 while True: if len(queue) == 0: # We don't know when the field ends, so read at least one byte. yield ctx.read_data_transition(1, self) octet = queue.read_byte() value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break yield ctx.immediate_transition(handler(value, ctx))
python
def _var_uint_field_handler(handler, ctx): _, self = yield queue = ctx.queue value = 0 while True: if len(queue) == 0: yield ctx.read_data_transition(1, self) octet = queue.read_byte() value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break yield ctx.immediate_transition(handler(value, ctx))
[ "def", "_var_uint_field_handler", "(", "handler", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "queue", "=", "ctx", ".", "queue", "value", "=", "0", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "# We don't know when the field ends, so read at least one byte.", "yield", "ctx", ".", "read_data_transition", "(", "1", ",", "self", ")", "octet", "=", "queue", ".", "read_byte", "(", ")", "value", "<<=", "_VAR_INT_VALUE_BITS", "value", "|=", "octet", "&", "_VAR_INT_VALUE_MASK", "if", "octet", "&", "_VAR_INT_SIGNAL_MASK", ":", "break", "yield", "ctx", ".", "immediate_transition", "(", "handler", "(", "value", ",", "ctx", ")", ")" ]
Handler co-routine for variable unsigned integer fields that. Invokes the given ``handler`` function with the read field and context, then immediately yields to the resulting co-routine.
[ "Handler", "co", "-", "routine", "for", "variable", "unsigned", "integer", "fields", "that", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L398-L416
amzn/ion-python
amazon/ion/reader_binary.py
_length_scalar_handler
def _length_scalar_handler(scalar_factory, ion_type, length, ctx): """Handles scalars, ``scalar_factory`` is a function that returns a value or thunk.""" _, self = yield if length == 0: data = b'' else: yield ctx.read_data_transition(length, self) data = ctx.queue.read(length) scalar = scalar_factory(data) event_cls = IonEvent if callable(scalar): # TODO Wrap the exception to get context position. event_cls = IonThunkEvent yield ctx.event_transition(event_cls, IonEventType.SCALAR, ion_type, scalar)
python
def _length_scalar_handler(scalar_factory, ion_type, length, ctx): _, self = yield if length == 0: data = b'' else: yield ctx.read_data_transition(length, self) data = ctx.queue.read(length) scalar = scalar_factory(data) event_cls = IonEvent if callable(scalar): event_cls = IonThunkEvent yield ctx.event_transition(event_cls, IonEventType.SCALAR, ion_type, scalar)
[ "def", "_length_scalar_handler", "(", "scalar_factory", ",", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "if", "length", "==", "0", ":", "data", "=", "b''", "else", ":", "yield", "ctx", ".", "read_data_transition", "(", "length", ",", "self", ")", "data", "=", "ctx", ".", "queue", ".", "read", "(", "length", ")", "scalar", "=", "scalar_factory", "(", "data", ")", "event_cls", "=", "IonEvent", "if", "callable", "(", "scalar", ")", ":", "# TODO Wrap the exception to get context position.", "event_cls", "=", "IonThunkEvent", "yield", "ctx", ".", "event_transition", "(", "event_cls", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "scalar", ")" ]
Handles scalars, ``scalar_factory`` is a function that returns a value or thunk.
[ "Handles", "scalars", "scalar_factory", "is", "a", "function", "that", "returns", "a", "value", "or", "thunk", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L455-L469
amzn/ion-python
amazon/ion/reader_binary.py
_annotation_handler
def _annotation_handler(ion_type, length, ctx): """Handles annotations. ``ion_type`` is ignored.""" _, self = yield self_handler = _create_delegate_handler(self) if ctx.annotations is not None: raise IonException('Annotation cannot be nested in annotations') # We have to replace our context for annotations specifically to encapsulate the limit ctx = ctx.derive_container_context(length, add_depth=0) # Immediately read the length field and the annotations (ann_length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if ann_length < 1: raise IonException('Invalid annotation length subfield; annotation wrapper must have at least one annotation.') # Read/parse the annotations. yield ctx.read_data_transition(ann_length, self) ann_data = ctx.queue.read(ann_length) annotations = tuple(_parse_sid_iter(ann_data)) if ctx.limit - ctx.queue.position < 1: # There is no space left for the 'value' subfield, which is required. raise IonException('Incorrect annotation wrapper length.') # Go parse the start of the value but go back to the real parent container. yield ctx.immediate_transition( _start_type_handler(ctx.field_name, ctx.whence, ctx, annotations=annotations) )
python
def _annotation_handler(ion_type, length, ctx): _, self = yield self_handler = _create_delegate_handler(self) if ctx.annotations is not None: raise IonException('Annotation cannot be nested in annotations') ctx = ctx.derive_container_context(length, add_depth=0) (ann_length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if ann_length < 1: raise IonException('Invalid annotation length subfield; annotation wrapper must have at least one annotation.') yield ctx.read_data_transition(ann_length, self) ann_data = ctx.queue.read(ann_length) annotations = tuple(_parse_sid_iter(ann_data)) if ctx.limit - ctx.queue.position < 1: raise IonException('Incorrect annotation wrapper length.') yield ctx.immediate_transition( _start_type_handler(ctx.field_name, ctx.whence, ctx, annotations=annotations) )
[ "def", "_annotation_handler", "(", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "self_handler", "=", "_create_delegate_handler", "(", "self", ")", "if", "ctx", ".", "annotations", "is", "not", "None", ":", "raise", "IonException", "(", "'Annotation cannot be nested in annotations'", ")", "# We have to replace our context for annotations specifically to encapsulate the limit", "ctx", "=", "ctx", ".", "derive_container_context", "(", "length", ",", "add_depth", "=", "0", ")", "# Immediately read the length field and the annotations", "(", "ann_length", ",", "_", ")", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "_var_uint_field_handler", "(", "self_handler", ",", "ctx", ")", ")", "if", "ann_length", "<", "1", ":", "raise", "IonException", "(", "'Invalid annotation length subfield; annotation wrapper must have at least one annotation.'", ")", "# Read/parse the annotations.", "yield", "ctx", ".", "read_data_transition", "(", "ann_length", ",", "self", ")", "ann_data", "=", "ctx", ".", "queue", ".", "read", "(", "ann_length", ")", "annotations", "=", "tuple", "(", "_parse_sid_iter", "(", "ann_data", ")", ")", "if", "ctx", ".", "limit", "-", "ctx", ".", "queue", ".", "position", "<", "1", ":", "# There is no space left for the 'value' subfield, which is required.", "raise", "IonException", "(", "'Incorrect annotation wrapper length.'", ")", "# Go parse the start of the value but go back to the real parent container.", "yield", "ctx", ".", "immediate_transition", "(", "_start_type_handler", "(", "ctx", ".", "field_name", ",", "ctx", ".", "whence", ",", "ctx", ",", "annotations", "=", "annotations", ")", ")" ]
Handles annotations. ``ion_type`` is ignored.
[ "Handles", "annotations", ".", "ion_type", "is", "ignored", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L496-L526
amzn/ion-python
amazon/ion/reader_binary.py
_ordered_struct_start_handler
def _ordered_struct_start_handler(handler, ctx): """Handles the special case of ordered structs, specified by the type ID 0xD1. This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair, as required by the spec. """ _, self = yield self_handler = _create_delegate_handler(self) (length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if length < 2: # A valid field name/value pair is at least two octets: one for the field name SID and one for the value. raise IonException('Ordered structs (type ID 0xD1) must have at least one field name/value pair.') yield ctx.immediate_transition(handler(length, ctx))
python
def _ordered_struct_start_handler(handler, ctx): _, self = yield self_handler = _create_delegate_handler(self) (length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if length < 2: raise IonException('Ordered structs (type ID 0xD1) must have at least one field name/value pair.') yield ctx.immediate_transition(handler(length, ctx))
[ "def", "_ordered_struct_start_handler", "(", "handler", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "self_handler", "=", "_create_delegate_handler", "(", "self", ")", "(", "length", ",", "_", ")", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "_var_uint_field_handler", "(", "self_handler", ",", "ctx", ")", ")", "if", "length", "<", "2", ":", "# A valid field name/value pair is at least two octets: one for the field name SID and one for the value.", "raise", "IonException", "(", "'Ordered structs (type ID 0xD1) must have at least one field name/value pair.'", ")", "yield", "ctx", ".", "immediate_transition", "(", "handler", "(", "length", ",", "ctx", ")", ")" ]
Handles the special case of ordered structs, specified by the type ID 0xD1. This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair, as required by the spec.
[ "Handles", "the", "special", "case", "of", "ordered", "structs", "specified", "by", "the", "type", "ID", "0xD1", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L530-L544
amzn/ion-python
amazon/ion/reader_binary.py
_container_start_handler
def _container_start_handler(ion_type, length, ctx): """Handles container delegation.""" _, self = yield container_ctx = ctx.derive_container_context(length) if ctx.annotations and ctx.limit != container_ctx.limit: # 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their # limits must match. raise IonException('Incorrect annotation wrapper length.') delegate = _container_handler(ion_type, container_ctx) # We start the container, and transition to the new container processor. yield ctx.event_transition( IonEvent, IonEventType.CONTAINER_START, ion_type, value=None, whence=delegate )
python
def _container_start_handler(ion_type, length, ctx): _, self = yield container_ctx = ctx.derive_container_context(length) if ctx.annotations and ctx.limit != container_ctx.limit: raise IonException('Incorrect annotation wrapper length.') delegate = _container_handler(ion_type, container_ctx) yield ctx.event_transition( IonEvent, IonEventType.CONTAINER_START, ion_type, value=None, whence=delegate )
[ "def", "_container_start_handler", "(", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "container_ctx", "=", "ctx", ".", "derive_container_context", "(", "length", ")", "if", "ctx", ".", "annotations", "and", "ctx", ".", "limit", "!=", "container_ctx", ".", "limit", ":", "# 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their", "# limits must match.", "raise", "IonException", "(", "'Incorrect annotation wrapper length.'", ")", "delegate", "=", "_container_handler", "(", "ion_type", ",", "container_ctx", ")", "# We start the container, and transition to the new container processor.", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "CONTAINER_START", ",", "ion_type", ",", "value", "=", "None", ",", "whence", "=", "delegate", ")" ]
Handles container delegation.
[ "Handles", "container", "delegation", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L548-L562
amzn/ion-python
amazon/ion/reader_binary.py
_container_handler
def _container_handler(ion_type, ctx): """Handler for the body of a container (or the top-level stream). Args: ion_type (Optional[IonType]): The type of the container or ``None`` for the top-level. ctx (_HandlerContext): The context for the container. """ transition = None first = True at_top = ctx.depth == 0 while True: data_event, self = (yield transition) if data_event is not None and data_event.type is ReadEventType.SKIP: yield ctx.read_data_transition(ctx.remaining, self, skip=True) if ctx.queue.position == ctx.limit: # We are at the end of the container. # Yield the close event and go to enclosing container. yield Transition( IonEvent(IonEventType.CONTAINER_END, ion_type, depth=ctx.depth-1), ctx.whence ) if ion_type is IonType.STRUCT: # Read the field name. self_handler = _create_delegate_handler(self) (field_sid, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) field_name = SymbolToken(None, field_sid) else: field_name = None expects_ivm = first and at_top transition = ctx.immediate_transition( _start_type_handler(field_name, self, ctx, expects_ivm, at_top=at_top) ) first = False
python
def _container_handler(ion_type, ctx): transition = None first = True at_top = ctx.depth == 0 while True: data_event, self = (yield transition) if data_event is not None and data_event.type is ReadEventType.SKIP: yield ctx.read_data_transition(ctx.remaining, self, skip=True) if ctx.queue.position == ctx.limit: yield Transition( IonEvent(IonEventType.CONTAINER_END, ion_type, depth=ctx.depth-1), ctx.whence ) if ion_type is IonType.STRUCT: self_handler = _create_delegate_handler(self) (field_sid, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) field_name = SymbolToken(None, field_sid) else: field_name = None expects_ivm = first and at_top transition = ctx.immediate_transition( _start_type_handler(field_name, self, ctx, expects_ivm, at_top=at_top) ) first = False
[ "def", "_container_handler", "(", "ion_type", ",", "ctx", ")", ":", "transition", "=", "None", "first", "=", "True", "at_top", "=", "ctx", ".", "depth", "==", "0", "while", "True", ":", "data_event", ",", "self", "=", "(", "yield", "transition", ")", "if", "data_event", "is", "not", "None", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "SKIP", ":", "yield", "ctx", ".", "read_data_transition", "(", "ctx", ".", "remaining", ",", "self", ",", "skip", "=", "True", ")", "if", "ctx", ".", "queue", ".", "position", "==", "ctx", ".", "limit", ":", "# We are at the end of the container.", "# Yield the close event and go to enclosing container.", "yield", "Transition", "(", "IonEvent", "(", "IonEventType", ".", "CONTAINER_END", ",", "ion_type", ",", "depth", "=", "ctx", ".", "depth", "-", "1", ")", ",", "ctx", ".", "whence", ")", "if", "ion_type", "is", "IonType", ".", "STRUCT", ":", "# Read the field name.", "self_handler", "=", "_create_delegate_handler", "(", "self", ")", "(", "field_sid", ",", "_", ")", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "_var_uint_field_handler", "(", "self_handler", ",", "ctx", ")", ")", "field_name", "=", "SymbolToken", "(", "None", ",", "field_sid", ")", "else", ":", "field_name", "=", "None", "expects_ivm", "=", "first", "and", "at_top", "transition", "=", "ctx", ".", "immediate_transition", "(", "_start_type_handler", "(", "field_name", ",", "self", ",", "ctx", ",", "expects_ivm", ",", "at_top", "=", "at_top", ")", ")", "first", "=", "False" ]
Handler for the body of a container (or the top-level stream). Args: ion_type (Optional[IonType]): The type of the container or ``None`` for the top-level. ctx (_HandlerContext): The context for the container.
[ "Handler", "for", "the", "body", "of", "a", "container", "(", "or", "the", "top", "-", "level", "stream", ")", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L566-L603
amzn/ion-python
amazon/ion/reader_binary.py
_bind_length_handlers
def _bind_length_handlers(tids, user_handler, lns): """Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): The low-nibble lengths to bind to. """ for tid in tids: for ln in lns: type_octet = _gen_type_octet(tid, ln) ion_type = _TID_VALUE_TYPE_TABLE[tid] if ln == 1 and ion_type is IonType.STRUCT: handler = partial(_ordered_struct_start_handler, partial(user_handler, ion_type)) elif ln < _LENGTH_FIELD_FOLLOWS: # Directly partially bind length. handler = partial(user_handler, ion_type, ln) else: # Delegate to length field parsing first. handler = partial(_var_uint_field_handler, partial(user_handler, ion_type)) _HANDLER_DISPATCH_TABLE[type_octet] = handler
python
def _bind_length_handlers(tids, user_handler, lns): for tid in tids: for ln in lns: type_octet = _gen_type_octet(tid, ln) ion_type = _TID_VALUE_TYPE_TABLE[tid] if ln == 1 and ion_type is IonType.STRUCT: handler = partial(_ordered_struct_start_handler, partial(user_handler, ion_type)) elif ln < _LENGTH_FIELD_FOLLOWS: handler = partial(user_handler, ion_type, ln) else: handler = partial(_var_uint_field_handler, partial(user_handler, ion_type)) _HANDLER_DISPATCH_TABLE[type_octet] = handler
[ "def", "_bind_length_handlers", "(", "tids", ",", "user_handler", ",", "lns", ")", ":", "for", "tid", "in", "tids", ":", "for", "ln", "in", "lns", ":", "type_octet", "=", "_gen_type_octet", "(", "tid", ",", "ln", ")", "ion_type", "=", "_TID_VALUE_TYPE_TABLE", "[", "tid", "]", "if", "ln", "==", "1", "and", "ion_type", "is", "IonType", ".", "STRUCT", ":", "handler", "=", "partial", "(", "_ordered_struct_start_handler", ",", "partial", "(", "user_handler", ",", "ion_type", ")", ")", "elif", "ln", "<", "_LENGTH_FIELD_FOLLOWS", ":", "# Directly partially bind length.", "handler", "=", "partial", "(", "user_handler", ",", "ion_type", ",", "ln", ")", "else", ":", "# Delegate to length field parsing first.", "handler", "=", "partial", "(", "_var_uint_field_handler", ",", "partial", "(", "user_handler", ",", "ion_type", ")", ")", "_HANDLER_DISPATCH_TABLE", "[", "type_octet", "]", "=", "handler" ]
Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): The low-nibble lengths to bind to.
[ "Binds", "a", "set", "of", "handlers", "with", "the", "given", "factory", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L777-L799
amzn/ion-python
amazon/ion/reader_binary.py
_bind_length_scalar_handlers
def _bind_length_scalar_handlers(tids, scalar_factory, lns=_NON_ZERO_LENGTH_LNS): """Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Sequence[int]): The low-nibble lengths to bind to. """ handler = partial(_length_scalar_handler, scalar_factory) return _bind_length_handlers(tids, handler, lns)
python
def _bind_length_scalar_handlers(tids, scalar_factory, lns=_NON_ZERO_LENGTH_LNS): handler = partial(_length_scalar_handler, scalar_factory) return _bind_length_handlers(tids, handler, lns)
[ "def", "_bind_length_scalar_handlers", "(", "tids", ",", "scalar_factory", ",", "lns", "=", "_NON_ZERO_LENGTH_LNS", ")", ":", "handler", "=", "partial", "(", "_length_scalar_handler", ",", "scalar_factory", ")", "return", "_bind_length_handlers", "(", "tids", ",", "handler", ",", "lns", ")" ]
Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Sequence[int]): The low-nibble lengths to bind to.
[ "Binds", "a", "set", "of", "scalar", "handlers", "for", "an", "inclusive", "range", "of", "low", "-", "nibble", "values", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L802-L813
amzn/ion-python
amazon/ion/reader_binary.py
raw_reader
def raw_reader(queue=None): """Returns a raw binary reader co-routine. Args: queue (Optional[BufferQueue]): The buffer read data for parsing, if ``None`` a new one will be created. Yields: IonEvent: parse events, will have an event type of ``INCOMPLETE`` if data is needed in the middle of a value or ``STREAM_END`` if there is no data **and** the parser is not in the middle of parsing a value. Receives :class:`DataEvent`, with :class:`ReadEventType` of ``NEXT`` or ``SKIP`` to iterate over values, or ``DATA`` if the last event was a ``INCOMPLETE`` or ``STREAM_END`` event type. ``SKIP`` is only allowed within a container. A reader is *in* a container when the ``CONTAINER_START`` event type is encountered and *not in* a container when the ``CONTAINER_END`` event type for that container is encountered. """ if queue is None: queue = BufferQueue() ctx = _HandlerContext( position=0, limit=None, queue=queue, field_name=None, annotations=None, depth=0, whence=None ) return reader_trampoline(_container_handler(None, ctx))
python
def raw_reader(queue=None): if queue is None: queue = BufferQueue() ctx = _HandlerContext( position=0, limit=None, queue=queue, field_name=None, annotations=None, depth=0, whence=None ) return reader_trampoline(_container_handler(None, ctx))
[ "def", "raw_reader", "(", "queue", "=", "None", ")", ":", "if", "queue", "is", "None", ":", "queue", "=", "BufferQueue", "(", ")", "ctx", "=", "_HandlerContext", "(", "position", "=", "0", ",", "limit", "=", "None", ",", "queue", "=", "queue", ",", "field_name", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "0", ",", "whence", "=", "None", ")", "return", "reader_trampoline", "(", "_container_handler", "(", "None", ",", "ctx", ")", ")" ]
Returns a raw binary reader co-routine. Args: queue (Optional[BufferQueue]): The buffer read data for parsing, if ``None`` a new one will be created. Yields: IonEvent: parse events, will have an event type of ``INCOMPLETE`` if data is needed in the middle of a value or ``STREAM_END`` if there is no data **and** the parser is not in the middle of parsing a value. Receives :class:`DataEvent`, with :class:`ReadEventType` of ``NEXT`` or ``SKIP`` to iterate over values, or ``DATA`` if the last event was a ``INCOMPLETE`` or ``STREAM_END`` event type. ``SKIP`` is only allowed within a container. A reader is *in* a container when the ``CONTAINER_START`` event type is encountered and *not in* a container when the ``CONTAINER_END`` event type for that container is encountered.
[ "Returns", "a", "raw", "binary", "reader", "co", "-", "routine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L838-L870
amzn/ion-python
amazon/ion/reader_binary.py
_HandlerContext.remaining
def remaining(self): """Determines how many bytes are remaining in the current context.""" if self.depth == 0: return _STREAM_REMAINING return self.limit - self.queue.position
python
def remaining(self): if self.depth == 0: return _STREAM_REMAINING return self.limit - self.queue.position
[ "def", "remaining", "(", "self", ")", ":", "if", "self", ".", "depth", "==", "0", ":", "return", "_STREAM_REMAINING", "return", "self", ".", "limit", "-", "self", ".", "queue", ".", "position" ]
Determines how many bytes are remaining in the current context.
[ "Determines", "how", "many", "bytes", "are", "remaining", "in", "the", "current", "context", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L229-L233
amzn/ion-python
amazon/ion/reader_binary.py
_HandlerContext.read_data_transition
def read_data_transition(self, length, whence=None, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): """Returns an immediate event_transition to read a specified number of bytes.""" if whence is None: whence = self.whence return Transition( None, _read_data_handler(length, whence, self, skip, stream_event) )
python
def read_data_transition(self, length, whence=None, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): if whence is None: whence = self.whence return Transition( None, _read_data_handler(length, whence, self, skip, stream_event) )
[ "def", "read_data_transition", "(", "self", ",", "length", ",", "whence", "=", "None", ",", "skip", "=", "False", ",", "stream_event", "=", "ION_STREAM_INCOMPLETE_EVENT", ")", ":", "if", "whence", "is", "None", ":", "whence", "=", "self", ".", "whence", "return", "Transition", "(", "None", ",", "_read_data_handler", "(", "length", ",", "whence", ",", "self", ",", "skip", ",", "stream_event", ")", ")" ]
Returns an immediate event_transition to read a specified number of bytes.
[ "Returns", "an", "immediate", "event_transition", "to", "read", "a", "specified", "number", "of", "bytes", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L235-L243
amzn/ion-python
amazon/ion/reader_binary.py
_HandlerContext.event_transition
def event_transition(self, event_cls, event_type, ion_type=None, value=None, annotations=None, depth=None, whence=None): """Returns an ion event event_transition that yields to another co-routine. If ``annotations`` is not specified, then the ``annotations`` are the annotations of this context. If ``depth`` is not specified, then the ``depth`` is depth of this context. If ``whence`` is not specified, then ``whence`` is the whence of this context. """ if annotations is None: annotations = self.annotations if annotations is None: annotations = () if not (event_type is IonEventType.CONTAINER_START) and \ annotations and (self.limit - self.queue.position) != 0: # This value is contained in an annotation wrapper, from which its limit was inherited. It must have # reached, but not surpassed, that limit. raise IonException('Incorrect annotation wrapper length.') if depth is None: depth = self.depth if whence is None: whence = self.whence return Transition( event_cls(event_type, ion_type, value, self.field_name, annotations, depth), whence )
python
def event_transition(self, event_cls, event_type, ion_type=None, value=None, annotations=None, depth=None, whence=None): if annotations is None: annotations = self.annotations if annotations is None: annotations = () if not (event_type is IonEventType.CONTAINER_START) and \ annotations and (self.limit - self.queue.position) != 0: raise IonException('Incorrect annotation wrapper length.') if depth is None: depth = self.depth if whence is None: whence = self.whence return Transition( event_cls(event_type, ion_type, value, self.field_name, annotations, depth), whence )
[ "def", "event_transition", "(", "self", ",", "event_cls", ",", "event_type", ",", "ion_type", "=", "None", ",", "value", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "None", ",", "whence", "=", "None", ")", ":", "if", "annotations", "is", "None", ":", "annotations", "=", "self", ".", "annotations", "if", "annotations", "is", "None", ":", "annotations", "=", "(", ")", "if", "not", "(", "event_type", "is", "IonEventType", ".", "CONTAINER_START", ")", "and", "annotations", "and", "(", "self", ".", "limit", "-", "self", ".", "queue", ".", "position", ")", "!=", "0", ":", "# This value is contained in an annotation wrapper, from which its limit was inherited. It must have", "# reached, but not surpassed, that limit.", "raise", "IonException", "(", "'Incorrect annotation wrapper length.'", ")", "if", "depth", "is", "None", ":", "depth", "=", "self", ".", "depth", "if", "whence", "is", "None", ":", "whence", "=", "self", ".", "whence", "return", "Transition", "(", "event_cls", "(", "event_type", ",", "ion_type", ",", "value", ",", "self", ".", "field_name", ",", "annotations", ",", "depth", ")", ",", "whence", ")" ]
Returns an ion event event_transition that yields to another co-routine. If ``annotations`` is not specified, then the ``annotations`` are the annotations of this context. If ``depth`` is not specified, then the ``depth`` is depth of this context. If ``whence`` is not specified, then ``whence`` is the whence of this context.
[ "Returns", "an", "ion", "event", "event_transition", "that", "yields", "to", "another", "co", "-", "routine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L245-L273
amzn/ion-python
amazon/ion/simpleion.py
dump
def dump(obj, fp, imports=None, binary=True, sequence_as_stream=False, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, for_json=None, ignore_nan=False, int_as_string_bitcount=None, iterable_as_array=False, **kw): """Serialize ``obj`` as an Ion-formatted stream to ``fp`` (a file-like object), using the following conversion table:: +-------------------+-------------------+ | Python | Ion | |-------------------+-------------------| | None | null.null | |-------------------+-------------------| | IonPyNull(<type>) | null.<type> | |-------------------+-------------------| | True, False, | | | IonPyInt(BOOL), | bool | | IonPyBool, | | |-------------------+-------------------| | int (Python 2, 3) | | | long (Python 2), | int | | IonPyInt(INT) | | |-------------------+-------------------| | float, IonPyFloat | float | |-------------------+-------------------| | Decimal, | | | IonPyDecimal | decimal | |-------------------+-------------------| | datetime, | | | Timestamp, | timestamp | | IonPyTimestamp | | |-------------------+-------------------| | SymbolToken, | | | IonPySymbol, | symbol | | IonPyText(SYMBOL) | | |-------------------+-------------------| | str (Python 3), | | | unicode (Python2),| string | | IonPyText(STRING) | | |-------------------+-------------------| | IonPyBytes(CLOB) | clob | |-------------------+-------------------| | str (Python 2), | | | bytes (Python 3) | blob | | IonPyBytes(BLOB) | | |-------------------+-------------------| | list, tuple, | | | IonPyList(LIST) | list | |-------------------+-------------------| | IonPyList(SEXP) | sexp | |-------------------+-------------------| | dict, namedtuple, | | | IonPyDict | struct | +-------------------+-------------------+ Args: obj (Any): A python object to serialize according to the above table. Any Python object which is neither an instance of nor inherits from one of the types in the above table will raise TypeError. fp (BaseIO): A file-like object. imports (Optional[Sequence[SymbolTable]]): A sequence of shared symbol tables to be used by by the writer. binary (Optional[True|False]): When True, outputs binary Ion. When false, outputs text Ion. sequence_as_stream (Optional[True|False]): When True, if ``obj`` is a sequence, it will be treated as a stream of top-level Ion values (i.e. the resulting Ion data will begin with ``obj``'s first element). Default: False. skipkeys: NOT IMPLEMENTED ensure_ascii: NOT IMPLEMENTED check_circular: NOT IMPLEMENTED allow_nan: NOT IMPLEMENTED cls: NOT IMPLEMENTED indent (Str): If binary is False and indent is a string, then members of containers will be pretty-printed with a newline followed by that string repeated for each level of nesting. None (the default) selects the most compact representation without any newlines. Example: to indent with four spaces per level of nesting, use ``' '``. separators: NOT IMPLEMENTED encoding: NOT IMPLEMENTED default: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED namedtuple_as_object: NOT IMPLEMENTED tuple_as_array: NOT IMPLEMENTED bigint_as_string: NOT IMPLEMENTED sort_keys: NOT IMPLEMENTED item_sort_key: NOT IMPLEMENTED for_json: NOT IMPLEMENTED ignore_nan: NOT IMPLEMENTED int_as_string_bitcount: NOT IMPLEMENTED iterable_as_array: NOT IMPLEMENTED **kw: NOT IMPLEMENTED """ raw_writer = binary_writer(imports) if binary else text_writer(indent=indent) writer = blocking_writer(raw_writer, fp) writer.send(ION_VERSION_MARKER_EVENT) # The IVM is emitted automatically in binary; it's optional in text. if sequence_as_stream and isinstance(obj, (list, tuple)): # Treat this top-level sequence as a stream; serialize its elements as top-level values, but don't serialize the # sequence itself. for top_level in obj: _dump(top_level, writer) else: _dump(obj, writer) writer.send(ION_STREAM_END_EVENT)
python
def dump(obj, fp, imports=None, binary=True, sequence_as_stream=False, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, for_json=None, ignore_nan=False, int_as_string_bitcount=None, iterable_as_array=False, **kw): raw_writer = binary_writer(imports) if binary else text_writer(indent=indent) writer = blocking_writer(raw_writer, fp) writer.send(ION_VERSION_MARKER_EVENT) if sequence_as_stream and isinstance(obj, (list, tuple)): for top_level in obj: _dump(top_level, writer) else: _dump(obj, writer) writer.send(ION_STREAM_END_EVENT)
[ "def", "dump", "(", "obj", ",", "fp", ",", "imports", "=", "None", ",", "binary", "=", "True", ",", "sequence_as_stream", "=", "False", ",", "skipkeys", "=", "False", ",", "ensure_ascii", "=", "True", ",", "check_circular", "=", "True", ",", "allow_nan", "=", "True", ",", "cls", "=", "None", ",", "indent", "=", "None", ",", "separators", "=", "None", ",", "encoding", "=", "'utf-8'", ",", "default", "=", "None", ",", "use_decimal", "=", "True", ",", "namedtuple_as_object", "=", "True", ",", "tuple_as_array", "=", "True", ",", "bigint_as_string", "=", "False", ",", "sort_keys", "=", "False", ",", "item_sort_key", "=", "None", ",", "for_json", "=", "None", ",", "ignore_nan", "=", "False", ",", "int_as_string_bitcount", "=", "None", ",", "iterable_as_array", "=", "False", ",", "*", "*", "kw", ")", ":", "raw_writer", "=", "binary_writer", "(", "imports", ")", "if", "binary", "else", "text_writer", "(", "indent", "=", "indent", ")", "writer", "=", "blocking_writer", "(", "raw_writer", ",", "fp", ")", "writer", ".", "send", "(", "ION_VERSION_MARKER_EVENT", ")", "# The IVM is emitted automatically in binary; it's optional in text.", "if", "sequence_as_stream", "and", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "# Treat this top-level sequence as a stream; serialize its elements as top-level values, but don't serialize the", "# sequence itself.", "for", "top_level", "in", "obj", ":", "_dump", "(", "top_level", ",", "writer", ")", "else", ":", "_dump", "(", "obj", ",", "writer", ")", "writer", ".", "send", "(", "ION_STREAM_END_EVENT", ")" ]
Serialize ``obj`` as an Ion-formatted stream to ``fp`` (a file-like object), using the following conversion table:: +-------------------+-------------------+ | Python | Ion | |-------------------+-------------------| | None | null.null | |-------------------+-------------------| | IonPyNull(<type>) | null.<type> | |-------------------+-------------------| | True, False, | | | IonPyInt(BOOL), | bool | | IonPyBool, | | |-------------------+-------------------| | int (Python 2, 3) | | | long (Python 2), | int | | IonPyInt(INT) | | |-------------------+-------------------| | float, IonPyFloat | float | |-------------------+-------------------| | Decimal, | | | IonPyDecimal | decimal | |-------------------+-------------------| | datetime, | | | Timestamp, | timestamp | | IonPyTimestamp | | |-------------------+-------------------| | SymbolToken, | | | IonPySymbol, | symbol | | IonPyText(SYMBOL) | | |-------------------+-------------------| | str (Python 3), | | | unicode (Python2),| string | | IonPyText(STRING) | | |-------------------+-------------------| | IonPyBytes(CLOB) | clob | |-------------------+-------------------| | str (Python 2), | | | bytes (Python 3) | blob | | IonPyBytes(BLOB) | | |-------------------+-------------------| | list, tuple, | | | IonPyList(LIST) | list | |-------------------+-------------------| | IonPyList(SEXP) | sexp | |-------------------+-------------------| | dict, namedtuple, | | | IonPyDict | struct | +-------------------+-------------------+ Args: obj (Any): A python object to serialize according to the above table. Any Python object which is neither an instance of nor inherits from one of the types in the above table will raise TypeError. fp (BaseIO): A file-like object. imports (Optional[Sequence[SymbolTable]]): A sequence of shared symbol tables to be used by by the writer. binary (Optional[True|False]): When True, outputs binary Ion. When false, outputs text Ion. sequence_as_stream (Optional[True|False]): When True, if ``obj`` is a sequence, it will be treated as a stream of top-level Ion values (i.e. the resulting Ion data will begin with ``obj``'s first element). Default: False. skipkeys: NOT IMPLEMENTED ensure_ascii: NOT IMPLEMENTED check_circular: NOT IMPLEMENTED allow_nan: NOT IMPLEMENTED cls: NOT IMPLEMENTED indent (Str): If binary is False and indent is a string, then members of containers will be pretty-printed with a newline followed by that string repeated for each level of nesting. None (the default) selects the most compact representation without any newlines. Example: to indent with four spaces per level of nesting, use ``' '``. separators: NOT IMPLEMENTED encoding: NOT IMPLEMENTED default: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED namedtuple_as_object: NOT IMPLEMENTED tuple_as_array: NOT IMPLEMENTED bigint_as_string: NOT IMPLEMENTED sort_keys: NOT IMPLEMENTED item_sort_key: NOT IMPLEMENTED for_json: NOT IMPLEMENTED ignore_nan: NOT IMPLEMENTED int_as_string_bitcount: NOT IMPLEMENTED iterable_as_array: NOT IMPLEMENTED **kw: NOT IMPLEMENTED
[ "Serialize", "obj", "as", "an", "Ion", "-", "formatted", "stream", "to", "fp", "(", "a", "file", "-", "like", "object", ")", "using", "the", "following", "conversion", "table", "::", "+", "-------------------", "+", "-------------------", "+", "|", "Python", "|", "Ion", "|", "|", "-------------------", "+", "-------------------", "|", "|", "None", "|", "null", ".", "null", "|", "|", "-------------------", "+", "-------------------", "|", "|", "IonPyNull", "(", "<type", ">", ")", "|", "null", ".", "<type", ">", "|", "|", "-------------------", "+", "-------------------", "|", "|", "True", "False", "|", "|", "|", "IonPyInt", "(", "BOOL", ")", "|", "bool", "|", "|", "IonPyBool", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "int", "(", "Python", "2", "3", ")", "|", "|", "|", "long", "(", "Python", "2", ")", "|", "int", "|", "|", "IonPyInt", "(", "INT", ")", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "float", "IonPyFloat", "|", "float", "|", "|", "-------------------", "+", "-------------------", "|", "|", "Decimal", "|", "|", "|", "IonPyDecimal", "|", "decimal", "|", "|", "-------------------", "+", "-------------------", "|", "|", "datetime", "|", "|", "|", "Timestamp", "|", "timestamp", "|", "|", "IonPyTimestamp", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "SymbolToken", "|", "|", "|", "IonPySymbol", "|", "symbol", "|", "|", "IonPyText", "(", "SYMBOL", ")", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "str", "(", "Python", "3", ")", "|", "|", "|", "unicode", "(", "Python2", ")", "|", "string", "|", "|", "IonPyText", "(", "STRING", ")", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "IonPyBytes", "(", "CLOB", ")", "|", "clob", "|", "|", "-------------------", "+", "-------------------", "|", "|", "str", "(", "Python", "2", ")", "|", "|", "|", "bytes", "(", "Python", "3", ")", "|", "blob", "|", "|", "IonPyBytes", "(", "BLOB", ")", "|", "|", "|", "-------------------", "+", "-------------------", "|", "|", "list", "tuple", "|", "|", "|", "IonPyList", "(", "LIST", ")", "|", "list", "|", "|", "-------------------", "+", "-------------------", "|", "|", "IonPyList", "(", "SEXP", ")", "|", "sexp", "|", "|", "-------------------", "+", "-------------------", "|", "|", "dict", "namedtuple", "|", "|", "|", "IonPyDict", "|", "struct", "|", "+", "-------------------", "+", "-------------------", "+" ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simpleion.py#L48-L147
amzn/ion-python
amazon/ion/simpleion.py
dumps
def dumps(obj, imports=None, binary=True, sequence_as_stream=False, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, for_json=None, ignore_nan=False, int_as_string_bitcount=None, iterable_as_array=False, **kw): """Serialize ``obj`` as Python ``string`` or ``bytes`` object, using the conversion table used by ``dump`` (above). Args: obj (Any): A python object to serialize according to the above table. Any Python object which is neither an instance of nor inherits from one of the types in the above table will raise TypeError. imports (Optional[Sequence[SymbolTable]]): A sequence of shared symbol tables to be used by by the writer. binary (Optional[True|False]): When True, outputs binary Ion. When false, outputs text Ion. sequence_as_stream (Optional[True|False]): When True, if ``obj`` is a sequence, it will be treated as a stream of top-level Ion values (i.e. the resulting Ion data will begin with ``obj``'s first element). Default: False. skipkeys: NOT IMPLEMENTED ensure_ascii: NOT IMPLEMENTED check_circular: NOT IMPLEMENTED allow_nan: NOT IMPLEMENTED cls: NOT IMPLEMENTED indent (Str): If binary is False and indent is a string, then members of containers will be pretty-printed with a newline followed by that string repeated for each level of nesting. None (the default) selects the most compact representation without any newlines. Example: to indent with four spaces per level of nesting, use ``' '``. separators: NOT IMPLEMENTED encoding: NOT IMPLEMENTED default: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED namedtuple_as_object: NOT IMPLEMENTED tuple_as_array: NOT IMPLEMENTED bigint_as_string: NOT IMPLEMENTED sort_keys: NOT IMPLEMENTED item_sort_key: NOT IMPLEMENTED for_json: NOT IMPLEMENTED ignore_nan: NOT IMPLEMENTED int_as_string_bitcount: NOT IMPLEMENTED iterable_as_array: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns: Union[str|bytes]: The string or binary representation of the data. if ``binary=True``, this will be a ``bytes`` object, otherwise this will be a ``str`` object (or ``unicode`` in the case of Python 2.x) """ ion_buffer = six.BytesIO() dump(obj, ion_buffer, sequence_as_stream=sequence_as_stream, binary=binary, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, cls=cls, indent=indent, separators=separators, encoding=encoding, default=default, use_decimal=use_decimal, namedtuple_as_object=namedtuple_as_object, tuple_as_array=tuple_as_array, bigint_as_string=bigint_as_string, sort_keys=sort_keys, item_sort_key=item_sort_key, for_json=for_json, ignore_nan=ignore_nan, int_as_string_bitcount=int_as_string_bitcount, iterable_as_array=iterable_as_array) ret_val = ion_buffer.getvalue() ion_buffer.close() if not binary: ret_val = ret_val.decode('utf-8') return ret_val
python
def dumps(obj, imports=None, binary=True, sequence_as_stream=False, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, for_json=None, ignore_nan=False, int_as_string_bitcount=None, iterable_as_array=False, **kw): ion_buffer = six.BytesIO() dump(obj, ion_buffer, sequence_as_stream=sequence_as_stream, binary=binary, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, cls=cls, indent=indent, separators=separators, encoding=encoding, default=default, use_decimal=use_decimal, namedtuple_as_object=namedtuple_as_object, tuple_as_array=tuple_as_array, bigint_as_string=bigint_as_string, sort_keys=sort_keys, item_sort_key=item_sort_key, for_json=for_json, ignore_nan=ignore_nan, int_as_string_bitcount=int_as_string_bitcount, iterable_as_array=iterable_as_array) ret_val = ion_buffer.getvalue() ion_buffer.close() if not binary: ret_val = ret_val.decode('utf-8') return ret_val
[ "def", "dumps", "(", "obj", ",", "imports", "=", "None", ",", "binary", "=", "True", ",", "sequence_as_stream", "=", "False", ",", "skipkeys", "=", "False", ",", "ensure_ascii", "=", "True", ",", "check_circular", "=", "True", ",", "allow_nan", "=", "True", ",", "cls", "=", "None", ",", "indent", "=", "None", ",", "separators", "=", "None", ",", "encoding", "=", "'utf-8'", ",", "default", "=", "None", ",", "use_decimal", "=", "True", ",", "namedtuple_as_object", "=", "True", ",", "tuple_as_array", "=", "True", ",", "bigint_as_string", "=", "False", ",", "sort_keys", "=", "False", ",", "item_sort_key", "=", "None", ",", "for_json", "=", "None", ",", "ignore_nan", "=", "False", ",", "int_as_string_bitcount", "=", "None", ",", "iterable_as_array", "=", "False", ",", "*", "*", "kw", ")", ":", "ion_buffer", "=", "six", ".", "BytesIO", "(", ")", "dump", "(", "obj", ",", "ion_buffer", ",", "sequence_as_stream", "=", "sequence_as_stream", ",", "binary", "=", "binary", ",", "skipkeys", "=", "skipkeys", ",", "ensure_ascii", "=", "ensure_ascii", ",", "check_circular", "=", "check_circular", ",", "allow_nan", "=", "allow_nan", ",", "cls", "=", "cls", ",", "indent", "=", "indent", ",", "separators", "=", "separators", ",", "encoding", "=", "encoding", ",", "default", "=", "default", ",", "use_decimal", "=", "use_decimal", ",", "namedtuple_as_object", "=", "namedtuple_as_object", ",", "tuple_as_array", "=", "tuple_as_array", ",", "bigint_as_string", "=", "bigint_as_string", ",", "sort_keys", "=", "sort_keys", ",", "item_sort_key", "=", "item_sort_key", ",", "for_json", "=", "for_json", ",", "ignore_nan", "=", "ignore_nan", ",", "int_as_string_bitcount", "=", "int_as_string_bitcount", ",", "iterable_as_array", "=", "iterable_as_array", ")", "ret_val", "=", "ion_buffer", ".", "getvalue", "(", ")", "ion_buffer", ".", "close", "(", ")", "if", "not", "binary", ":", "ret_val", "=", "ret_val", ".", "decode", "(", "'utf-8'", ")", "return", "ret_val" ]
Serialize ``obj`` as Python ``string`` or ``bytes`` object, using the conversion table used by ``dump`` (above). Args: obj (Any): A python object to serialize according to the above table. Any Python object which is neither an instance of nor inherits from one of the types in the above table will raise TypeError. imports (Optional[Sequence[SymbolTable]]): A sequence of shared symbol tables to be used by by the writer. binary (Optional[True|False]): When True, outputs binary Ion. When false, outputs text Ion. sequence_as_stream (Optional[True|False]): When True, if ``obj`` is a sequence, it will be treated as a stream of top-level Ion values (i.e. the resulting Ion data will begin with ``obj``'s first element). Default: False. skipkeys: NOT IMPLEMENTED ensure_ascii: NOT IMPLEMENTED check_circular: NOT IMPLEMENTED allow_nan: NOT IMPLEMENTED cls: NOT IMPLEMENTED indent (Str): If binary is False and indent is a string, then members of containers will be pretty-printed with a newline followed by that string repeated for each level of nesting. None (the default) selects the most compact representation without any newlines. Example: to indent with four spaces per level of nesting, use ``' '``. separators: NOT IMPLEMENTED encoding: NOT IMPLEMENTED default: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED namedtuple_as_object: NOT IMPLEMENTED tuple_as_array: NOT IMPLEMENTED bigint_as_string: NOT IMPLEMENTED sort_keys: NOT IMPLEMENTED item_sort_key: NOT IMPLEMENTED for_json: NOT IMPLEMENTED ignore_nan: NOT IMPLEMENTED int_as_string_bitcount: NOT IMPLEMENTED iterable_as_array: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns: Union[str|bytes]: The string or binary representation of the data. if ``binary=True``, this will be a ``bytes`` object, otherwise this will be a ``str`` object (or ``unicode`` in the case of Python 2.x)
[ "Serialize", "obj", "as", "Python", "string", "or", "bytes", "object", "using", "the", "conversion", "table", "used", "by", "dump", "(", "above", ")", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simpleion.py#L211-L265
amzn/ion-python
amazon/ion/simpleion.py
load
def load(fp, catalog=None, single_value=True, encoding='utf-8', cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=None, **kw): """Deserialize ``fp`` (a file-like object), which contains a text or binary Ion stream, to a Python object using the following conversion table:: +-------------------+-------------------+ | Ion | Python | |-------------------+-------------------| | null.<type> | IonPyNull(<type>) | |-------------------+-------------------| | bool | IonPyBool | |-------------------+-------------------| | int | IonPyInt | |-------------------+-------------------| | float | IonPyFloat | |-------------------+-------------------| | decimal | IonPyDecimal | |-------------------+-------------------| | timestamp | IonPyTimestamp | |-------------------+-------------------| | symbol | IonPySymbol | |-------------------+-------------------| | string | IonPyText(STRING) | |-------------------+-------------------| | clob | IonPyBytes(CLOB) | |-------------------+-------------------| | blob | IonPyBytes(BLOB) | |-------------------+-------------------| | list | IonPyList(LIST) | |-------------------+-------------------| | sexp | IonPyList(SEXP) | |-------------------+-------------------| | struct | IonPyDict | +-------------------+-------------------+ Args: fp (BaseIO): A file-like object containing Ion data. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving symbol table imports. single_value (Optional[True|False]): When True, the data in ``obj`` is interpreted as a single Ion value, and will be returned without an enclosing container. If True and there are multiple top-level values in the Ion stream, IonException will be raised. NOTE: this means that when data is dumped using ``sequence_as_stream=True``, it must be loaded using ``single_value=False``. Default: True. encoding: NOT IMPLEMENTED cls: NOT IMPLEMENTED object_hook: NOT IMPLEMENTED parse_float: NOT IMPLEMENTED parse_int: NOT IMPLEMENTED parse_constant: NOT IMPLEMENTED object_pairs_hook: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns (Any): if single_value is True: A Python object representing a single Ion value. else: A sequence of Python objects representing a stream of Ion values. """ if isinstance(fp, _TEXT_TYPES): raw_reader = text_reader(is_unicode=True) else: maybe_ivm = fp.read(4) fp.seek(0) if maybe_ivm == _IVM: raw_reader = binary_reader() else: raw_reader = text_reader() reader = blocking_reader(managed_reader(raw_reader, catalog), fp) out = [] # top-level _load(out, reader) if single_value: if len(out) != 1: raise IonException('Stream contained %d values; expected a single value.' % (len(out),)) return out[0] return out
python
def load(fp, catalog=None, single_value=True, encoding='utf-8', cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=None, **kw): if isinstance(fp, _TEXT_TYPES): raw_reader = text_reader(is_unicode=True) else: maybe_ivm = fp.read(4) fp.seek(0) if maybe_ivm == _IVM: raw_reader = binary_reader() else: raw_reader = text_reader() reader = blocking_reader(managed_reader(raw_reader, catalog), fp) out = [] _load(out, reader) if single_value: if len(out) != 1: raise IonException('Stream contained %d values; expected a single value.' % (len(out),)) return out[0] return out
[ "def", "load", "(", "fp", ",", "catalog", "=", "None", ",", "single_value", "=", "True", ",", "encoding", "=", "'utf-8'", ",", "cls", "=", "None", ",", "object_hook", "=", "None", ",", "parse_float", "=", "None", ",", "parse_int", "=", "None", ",", "parse_constant", "=", "None", ",", "object_pairs_hook", "=", "None", ",", "use_decimal", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "isinstance", "(", "fp", ",", "_TEXT_TYPES", ")", ":", "raw_reader", "=", "text_reader", "(", "is_unicode", "=", "True", ")", "else", ":", "maybe_ivm", "=", "fp", ".", "read", "(", "4", ")", "fp", ".", "seek", "(", "0", ")", "if", "maybe_ivm", "==", "_IVM", ":", "raw_reader", "=", "binary_reader", "(", ")", "else", ":", "raw_reader", "=", "text_reader", "(", ")", "reader", "=", "blocking_reader", "(", "managed_reader", "(", "raw_reader", ",", "catalog", ")", ",", "fp", ")", "out", "=", "[", "]", "# top-level", "_load", "(", "out", ",", "reader", ")", "if", "single_value", ":", "if", "len", "(", "out", ")", "!=", "1", ":", "raise", "IonException", "(", "'Stream contained %d values; expected a single value.'", "%", "(", "len", "(", "out", ")", ",", ")", ")", "return", "out", "[", "0", "]", "return", "out" ]
Deserialize ``fp`` (a file-like object), which contains a text or binary Ion stream, to a Python object using the following conversion table:: +-------------------+-------------------+ | Ion | Python | |-------------------+-------------------| | null.<type> | IonPyNull(<type>) | |-------------------+-------------------| | bool | IonPyBool | |-------------------+-------------------| | int | IonPyInt | |-------------------+-------------------| | float | IonPyFloat | |-------------------+-------------------| | decimal | IonPyDecimal | |-------------------+-------------------| | timestamp | IonPyTimestamp | |-------------------+-------------------| | symbol | IonPySymbol | |-------------------+-------------------| | string | IonPyText(STRING) | |-------------------+-------------------| | clob | IonPyBytes(CLOB) | |-------------------+-------------------| | blob | IonPyBytes(BLOB) | |-------------------+-------------------| | list | IonPyList(LIST) | |-------------------+-------------------| | sexp | IonPyList(SEXP) | |-------------------+-------------------| | struct | IonPyDict | +-------------------+-------------------+ Args: fp (BaseIO): A file-like object containing Ion data. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving symbol table imports. single_value (Optional[True|False]): When True, the data in ``obj`` is interpreted as a single Ion value, and will be returned without an enclosing container. If True and there are multiple top-level values in the Ion stream, IonException will be raised. NOTE: this means that when data is dumped using ``sequence_as_stream=True``, it must be loaded using ``single_value=False``. Default: True. encoding: NOT IMPLEMENTED cls: NOT IMPLEMENTED object_hook: NOT IMPLEMENTED parse_float: NOT IMPLEMENTED parse_int: NOT IMPLEMENTED parse_constant: NOT IMPLEMENTED object_pairs_hook: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns (Any): if single_value is True: A Python object representing a single Ion value. else: A sequence of Python objects representing a stream of Ion values.
[ "Deserialize", "fp", "(", "a", "file", "-", "like", "object", ")", "which", "contains", "a", "text", "or", "binary", "Ion", "stream", "to", "a", "Python", "object", "using", "the", "following", "conversion", "table", "::", "+", "-------------------", "+", "-------------------", "+", "|", "Ion", "|", "Python", "|", "|", "-------------------", "+", "-------------------", "|", "|", "null", ".", "<type", ">", "|", "IonPyNull", "(", "<type", ">", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "bool", "|", "IonPyBool", "|", "|", "-------------------", "+", "-------------------", "|", "|", "int", "|", "IonPyInt", "|", "|", "-------------------", "+", "-------------------", "|", "|", "float", "|", "IonPyFloat", "|", "|", "-------------------", "+", "-------------------", "|", "|", "decimal", "|", "IonPyDecimal", "|", "|", "-------------------", "+", "-------------------", "|", "|", "timestamp", "|", "IonPyTimestamp", "|", "|", "-------------------", "+", "-------------------", "|", "|", "symbol", "|", "IonPySymbol", "|", "|", "-------------------", "+", "-------------------", "|", "|", "string", "|", "IonPyText", "(", "STRING", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "clob", "|", "IonPyBytes", "(", "CLOB", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "blob", "|", "IonPyBytes", "(", "BLOB", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "list", "|", "IonPyList", "(", "LIST", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "sexp", "|", "IonPyList", "(", "SEXP", ")", "|", "|", "-------------------", "+", "-------------------", "|", "|", "struct", "|", "IonPyDict", "|", "+", "-------------------", "+", "-------------------", "+" ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simpleion.py#L268-L341
amzn/ion-python
amazon/ion/simpleion.py
loads
def loads(ion_str, catalog=None, single_value=True, encoding='utf-8', cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=None, **kw): """Deserialize ``ion_str``, which is a string representation of an Ion object, to a Python object using the conversion table used by load (above). Args: fp (str): A string representation of Ion data. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving symbol table imports. single_value (Optional[True|False]): When True, the data in ``ion_str`` is interpreted as a single Ion value, and will be returned without an enclosing container. If True and there are multiple top-level values in the Ion stream, IonException will be raised. NOTE: this means that when data is dumped using ``sequence_as_stream=True``, it must be loaded using ``single_value=False``. Default: True. encoding: NOT IMPLEMENTED cls: NOT IMPLEMENTED object_hook: NOT IMPLEMENTED parse_float: NOT IMPLEMENTED parse_int: NOT IMPLEMENTED parse_constant: NOT IMPLEMENTED object_pairs_hook: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns (Any): if single_value is True: A Python object representing a single Ion value. else: A sequence of Python objects representing a stream of Ion values. """ if isinstance(ion_str, six.binary_type): ion_buffer = BytesIO(ion_str) elif isinstance(ion_str, six.text_type): ion_buffer = six.StringIO(ion_str) else: raise TypeError('Unsupported text: %r' % ion_str) return load(ion_buffer, catalog=catalog, single_value=single_value, encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, use_decimal=use_decimal)
python
def loads(ion_str, catalog=None, single_value=True, encoding='utf-8', cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=None, **kw): if isinstance(ion_str, six.binary_type): ion_buffer = BytesIO(ion_str) elif isinstance(ion_str, six.text_type): ion_buffer = six.StringIO(ion_str) else: raise TypeError('Unsupported text: %r' % ion_str) return load(ion_buffer, catalog=catalog, single_value=single_value, encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, use_decimal=use_decimal)
[ "def", "loads", "(", "ion_str", ",", "catalog", "=", "None", ",", "single_value", "=", "True", ",", "encoding", "=", "'utf-8'", ",", "cls", "=", "None", ",", "object_hook", "=", "None", ",", "parse_float", "=", "None", ",", "parse_int", "=", "None", ",", "parse_constant", "=", "None", ",", "object_pairs_hook", "=", "None", ",", "use_decimal", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "isinstance", "(", "ion_str", ",", "six", ".", "binary_type", ")", ":", "ion_buffer", "=", "BytesIO", "(", "ion_str", ")", "elif", "isinstance", "(", "ion_str", ",", "six", ".", "text_type", ")", ":", "ion_buffer", "=", "six", ".", "StringIO", "(", "ion_str", ")", "else", ":", "raise", "TypeError", "(", "'Unsupported text: %r'", "%", "ion_str", ")", "return", "load", "(", "ion_buffer", ",", "catalog", "=", "catalog", ",", "single_value", "=", "single_value", ",", "encoding", "=", "encoding", ",", "cls", "=", "cls", ",", "object_hook", "=", "object_hook", ",", "parse_float", "=", "parse_float", ",", "parse_int", "=", "parse_int", ",", "parse_constant", "=", "parse_constant", ",", "object_pairs_hook", "=", "object_pairs_hook", ",", "use_decimal", "=", "use_decimal", ")" ]
Deserialize ``ion_str``, which is a string representation of an Ion object, to a Python object using the conversion table used by load (above). Args: fp (str): A string representation of Ion data. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving symbol table imports. single_value (Optional[True|False]): When True, the data in ``ion_str`` is interpreted as a single Ion value, and will be returned without an enclosing container. If True and there are multiple top-level values in the Ion stream, IonException will be raised. NOTE: this means that when data is dumped using ``sequence_as_stream=True``, it must be loaded using ``single_value=False``. Default: True. encoding: NOT IMPLEMENTED cls: NOT IMPLEMENTED object_hook: NOT IMPLEMENTED parse_float: NOT IMPLEMENTED parse_int: NOT IMPLEMENTED parse_constant: NOT IMPLEMENTED object_pairs_hook: NOT IMPLEMENTED use_decimal: NOT IMPLEMENTED **kw: NOT IMPLEMENTED Returns (Any): if single_value is True: A Python object representing a single Ion value. else: A sequence of Python objects representing a stream of Ion values.
[ "Deserialize", "ion_str", "which", "is", "a", "string", "representation", "of", "an", "Ion", "object", "to", "a", "Python", "object", "using", "the", "conversion", "table", "used", "by", "load", "(", "above", ")", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simpleion.py#L385-L423
amzn/ion-python
amazon/ion/reader.py
_narrow_unichr
def _narrow_unichr(code_point): """Retrieves the unicode character representing any given code point, in a way that won't break on narrow builds. This is necessary because the built-in unichr function will fail for ordinals above 0xFFFF on narrow builds (UCS2); ordinals above 0xFFFF would require recalculating and combining surrogate pairs. This avoids that by retrieving the unicode character that was initially read. Args: code_point (int|CodePoint): An int or a subclass of int that contains the unicode character representing its code point in an attribute named 'char'. """ try: if len(code_point.char) > 1: return code_point.char except AttributeError: pass return six.unichr(code_point)
python
def _narrow_unichr(code_point): try: if len(code_point.char) > 1: return code_point.char except AttributeError: pass return six.unichr(code_point)
[ "def", "_narrow_unichr", "(", "code_point", ")", ":", "try", ":", "if", "len", "(", "code_point", ".", "char", ")", ">", "1", ":", "return", "code_point", ".", "char", "except", "AttributeError", ":", "pass", "return", "six", ".", "unichr", "(", "code_point", ")" ]
Retrieves the unicode character representing any given code point, in a way that won't break on narrow builds. This is necessary because the built-in unichr function will fail for ordinals above 0xFFFF on narrow builds (UCS2); ordinals above 0xFFFF would require recalculating and combining surrogate pairs. This avoids that by retrieving the unicode character that was initially read. Args: code_point (int|CodePoint): An int or a subclass of int that contains the unicode character representing its code point in an attribute named 'char'.
[ "Retrieves", "the", "unicode", "character", "representing", "any", "given", "code", "point", "in", "a", "way", "that", "won", "t", "break", "on", "narrow", "builds", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L43-L59
amzn/ion-python
amazon/ion/reader.py
reader_trampoline
def reader_trampoline(start, allow_flush=False): """Provides the co-routine trampoline for a reader state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a Transition of :class:`amazon.ion.core.DataEvent` and the co-routine itself. A reader must start with a ``ReadEventType.NEXT`` event to prime the parser. In many cases this will lead to an ``IonEventType.INCOMPLETE`` being yielded, but not always (consider a reader over an in-memory data structure). Notes: A reader delimits its incomplete parse points with ``IonEventType.INCOMPLETE``. Readers also delimit complete parse points with ``IonEventType.STREAM_END``; this is similar to the ``INCOMPLETE`` case except that it denotes that a logical termination of data is *allowed*. When these event are received, the only valid input event type is a ``ReadEventType.DATA``. Generally, ``ReadEventType.NEXT`` is used to get the next parse event, but ``ReadEventType.SKIP`` can be used to skip over the current container. An internal state machine co-routine can delimit a state change without yielding to the caller by yielding ``None`` event, this will cause the trampoline to invoke the transition delegate, immediately. Args: start: The reader co-routine to initially delegate to. allow_flush(Optional[bool]): True if this reader supports receiving ``NEXT`` after yielding ``INCOMPLETE`` to trigger an attempt to flush pending parse events, otherwise False. Yields: amazon.ion.core.IonEvent: the result of parsing. Receives :class:`DataEvent` to parse into :class:`amazon.ion.core.IonEvent`. """ data_event = yield if data_event is None or data_event.type is not ReadEventType.NEXT: raise TypeError('Reader must be started with NEXT') trans = Transition(None, start) while True: trans = trans.delegate.send(Transition(data_event, trans.delegate)) data_event = None if trans.event is not None: # Only yield if there is an event. data_event = (yield trans.event) if trans.event.event_type.is_stream_signal: if data_event.type is not ReadEventType.DATA: if not allow_flush or not (trans.event.event_type is IonEventType.INCOMPLETE and data_event.type is ReadEventType.NEXT): raise TypeError('Reader expected data: %r' % (data_event,)) else: if data_event.type is ReadEventType.DATA: raise TypeError('Reader did not expect data') if data_event.type is ReadEventType.DATA and len(data_event.data) == 0: raise ValueError('Empty data not allowed') if trans.event.depth == 0 \ and trans.event.event_type is not IonEventType.CONTAINER_START \ and data_event.type is ReadEventType.SKIP: raise TypeError('Cannot skip at the top-level')
python
def reader_trampoline(start, allow_flush=False): data_event = yield if data_event is None or data_event.type is not ReadEventType.NEXT: raise TypeError('Reader must be started with NEXT') trans = Transition(None, start) while True: trans = trans.delegate.send(Transition(data_event, trans.delegate)) data_event = None if trans.event is not None: data_event = (yield trans.event) if trans.event.event_type.is_stream_signal: if data_event.type is not ReadEventType.DATA: if not allow_flush or not (trans.event.event_type is IonEventType.INCOMPLETE and data_event.type is ReadEventType.NEXT): raise TypeError('Reader expected data: %r' % (data_event,)) else: if data_event.type is ReadEventType.DATA: raise TypeError('Reader did not expect data') if data_event.type is ReadEventType.DATA and len(data_event.data) == 0: raise ValueError('Empty data not allowed') if trans.event.depth == 0 \ and trans.event.event_type is not IonEventType.CONTAINER_START \ and data_event.type is ReadEventType.SKIP: raise TypeError('Cannot skip at the top-level')
[ "def", "reader_trampoline", "(", "start", ",", "allow_flush", "=", "False", ")", ":", "data_event", "=", "yield", "if", "data_event", "is", "None", "or", "data_event", ".", "type", "is", "not", "ReadEventType", ".", "NEXT", ":", "raise", "TypeError", "(", "'Reader must be started with NEXT'", ")", "trans", "=", "Transition", "(", "None", ",", "start", ")", "while", "True", ":", "trans", "=", "trans", ".", "delegate", ".", "send", "(", "Transition", "(", "data_event", ",", "trans", ".", "delegate", ")", ")", "data_event", "=", "None", "if", "trans", ".", "event", "is", "not", "None", ":", "# Only yield if there is an event.", "data_event", "=", "(", "yield", "trans", ".", "event", ")", "if", "trans", ".", "event", ".", "event_type", ".", "is_stream_signal", ":", "if", "data_event", ".", "type", "is", "not", "ReadEventType", ".", "DATA", ":", "if", "not", "allow_flush", "or", "not", "(", "trans", ".", "event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "NEXT", ")", ":", "raise", "TypeError", "(", "'Reader expected data: %r'", "%", "(", "data_event", ",", ")", ")", "else", ":", "if", "data_event", ".", "type", "is", "ReadEventType", ".", "DATA", ":", "raise", "TypeError", "(", "'Reader did not expect data'", ")", "if", "data_event", ".", "type", "is", "ReadEventType", ".", "DATA", "and", "len", "(", "data_event", ".", "data", ")", "==", "0", ":", "raise", "ValueError", "(", "'Empty data not allowed'", ")", "if", "trans", ".", "event", ".", "depth", "==", "0", "and", "trans", ".", "event", ".", "event_type", "is", "not", "IonEventType", ".", "CONTAINER_START", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "SKIP", ":", "raise", "TypeError", "(", "'Cannot skip at the top-level'", ")" ]
Provides the co-routine trampoline for a reader state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a Transition of :class:`amazon.ion.core.DataEvent` and the co-routine itself. A reader must start with a ``ReadEventType.NEXT`` event to prime the parser. In many cases this will lead to an ``IonEventType.INCOMPLETE`` being yielded, but not always (consider a reader over an in-memory data structure). Notes: A reader delimits its incomplete parse points with ``IonEventType.INCOMPLETE``. Readers also delimit complete parse points with ``IonEventType.STREAM_END``; this is similar to the ``INCOMPLETE`` case except that it denotes that a logical termination of data is *allowed*. When these event are received, the only valid input event type is a ``ReadEventType.DATA``. Generally, ``ReadEventType.NEXT`` is used to get the next parse event, but ``ReadEventType.SKIP`` can be used to skip over the current container. An internal state machine co-routine can delimit a state change without yielding to the caller by yielding ``None`` event, this will cause the trampoline to invoke the transition delegate, immediately. Args: start: The reader co-routine to initially delegate to. allow_flush(Optional[bool]): True if this reader supports receiving ``NEXT`` after yielding ``INCOMPLETE`` to trigger an attempt to flush pending parse events, otherwise False. Yields: amazon.ion.core.IonEvent: the result of parsing. Receives :class:`DataEvent` to parse into :class:`amazon.ion.core.IonEvent`.
[ "Provides", "the", "co", "-", "routine", "trampoline", "for", "a", "reader", "state", "machine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L312-L369
amzn/ion-python
amazon/ion/reader.py
blocking_reader
def blocking_reader(reader, input, buffer_size=_DEFAULT_BUFFER_SIZE): """Provides an implementation of using the reader co-routine with a file-like object. Args: reader(Coroutine): A reader co-routine. input(BaseIO): The file-like object to read from. buffer_size(Optional[int]): The optional buffer size to use. """ ion_event = None while True: read_event = (yield ion_event) ion_event = reader.send(read_event) while ion_event is not None and ion_event.event_type.is_stream_signal: data = input.read(buffer_size) if len(data) == 0: # End of file. if ion_event.event_type is IonEventType.INCOMPLETE: ion_event = reader.send(NEXT_EVENT) continue else: yield ION_STREAM_END_EVENT return ion_event = reader.send(read_data_event(data))
python
def blocking_reader(reader, input, buffer_size=_DEFAULT_BUFFER_SIZE): ion_event = None while True: read_event = (yield ion_event) ion_event = reader.send(read_event) while ion_event is not None and ion_event.event_type.is_stream_signal: data = input.read(buffer_size) if len(data) == 0: if ion_event.event_type is IonEventType.INCOMPLETE: ion_event = reader.send(NEXT_EVENT) continue else: yield ION_STREAM_END_EVENT return ion_event = reader.send(read_data_event(data))
[ "def", "blocking_reader", "(", "reader", ",", "input", ",", "buffer_size", "=", "_DEFAULT_BUFFER_SIZE", ")", ":", "ion_event", "=", "None", "while", "True", ":", "read_event", "=", "(", "yield", "ion_event", ")", "ion_event", "=", "reader", ".", "send", "(", "read_event", ")", "while", "ion_event", "is", "not", "None", "and", "ion_event", ".", "event_type", ".", "is_stream_signal", ":", "data", "=", "input", ".", "read", "(", "buffer_size", ")", "if", "len", "(", "data", ")", "==", "0", ":", "# End of file.", "if", "ion_event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "ion_event", "=", "reader", ".", "send", "(", "NEXT_EVENT", ")", "continue", "else", ":", "yield", "ION_STREAM_END_EVENT", "return", "ion_event", "=", "reader", ".", "send", "(", "read_data_event", "(", "data", ")", ")" ]
Provides an implementation of using the reader co-routine with a file-like object. Args: reader(Coroutine): A reader co-routine. input(BaseIO): The file-like object to read from. buffer_size(Optional[int]): The optional buffer size to use.
[ "Provides", "an", "implementation", "of", "using", "the", "reader", "co", "-", "routine", "with", "a", "file", "-", "like", "object", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L376-L398
amzn/ion-python
amazon/ion/reader.py
BufferQueue.read
def read(self, length, skip=False): """Consumes the first ``length`` bytes from the accumulator.""" if length > self.__size: raise IndexError( 'Cannot pop %d bytes, %d bytes in buffer queue' % (length, self.__size)) self.position += length self.__size -= length segments = self.__segments offset = self.__offset data = self.__data_cls() while length > 0: segment = segments[0] segment_off = offset segment_len = len(segment) segment_rem = segment_len - segment_off segment_read_len = min(segment_rem, length) if segment_off == 0 and segment_read_len == segment_rem: # consume an entire segment if skip: segment_slice = self.__element_type() else: segment_slice = segment else: # Consume a part of the segment. if skip: segment_slice = self.__element_type() else: segment_slice = segment[segment_off:segment_off + segment_read_len] offset = 0 segment_off += segment_read_len if segment_off == segment_len: segments.popleft() self.__offset = 0 else: self.__offset = segment_off if length <= segment_rem and len(data) == 0: return segment_slice data.extend(segment_slice) length -= segment_read_len if self.is_unicode: return data.as_text() else: return data
python
def read(self, length, skip=False): if length > self.__size: raise IndexError( 'Cannot pop %d bytes, %d bytes in buffer queue' % (length, self.__size)) self.position += length self.__size -= length segments = self.__segments offset = self.__offset data = self.__data_cls() while length > 0: segment = segments[0] segment_off = offset segment_len = len(segment) segment_rem = segment_len - segment_off segment_read_len = min(segment_rem, length) if segment_off == 0 and segment_read_len == segment_rem: if skip: segment_slice = self.__element_type() else: segment_slice = segment else: if skip: segment_slice = self.__element_type() else: segment_slice = segment[segment_off:segment_off + segment_read_len] offset = 0 segment_off += segment_read_len if segment_off == segment_len: segments.popleft() self.__offset = 0 else: self.__offset = segment_off if length <= segment_rem and len(data) == 0: return segment_slice data.extend(segment_slice) length -= segment_read_len if self.is_unicode: return data.as_text() else: return data
[ "def", "read", "(", "self", ",", "length", ",", "skip", "=", "False", ")", ":", "if", "length", ">", "self", ".", "__size", ":", "raise", "IndexError", "(", "'Cannot pop %d bytes, %d bytes in buffer queue'", "%", "(", "length", ",", "self", ".", "__size", ")", ")", "self", ".", "position", "+=", "length", "self", ".", "__size", "-=", "length", "segments", "=", "self", ".", "__segments", "offset", "=", "self", ".", "__offset", "data", "=", "self", ".", "__data_cls", "(", ")", "while", "length", ">", "0", ":", "segment", "=", "segments", "[", "0", "]", "segment_off", "=", "offset", "segment_len", "=", "len", "(", "segment", ")", "segment_rem", "=", "segment_len", "-", "segment_off", "segment_read_len", "=", "min", "(", "segment_rem", ",", "length", ")", "if", "segment_off", "==", "0", "and", "segment_read_len", "==", "segment_rem", ":", "# consume an entire segment", "if", "skip", ":", "segment_slice", "=", "self", ".", "__element_type", "(", ")", "else", ":", "segment_slice", "=", "segment", "else", ":", "# Consume a part of the segment.", "if", "skip", ":", "segment_slice", "=", "self", ".", "__element_type", "(", ")", "else", ":", "segment_slice", "=", "segment", "[", "segment_off", ":", "segment_off", "+", "segment_read_len", "]", "offset", "=", "0", "segment_off", "+=", "segment_read_len", "if", "segment_off", "==", "segment_len", ":", "segments", ".", "popleft", "(", ")", "self", ".", "__offset", "=", "0", "else", ":", "self", ".", "__offset", "=", "segment_off", "if", "length", "<=", "segment_rem", "and", "len", "(", "data", ")", "==", "0", ":", "return", "segment_slice", "data", ".", "extend", "(", "segment_slice", ")", "length", "-=", "segment_read_len", "if", "self", ".", "is_unicode", ":", "return", "data", ".", "as_text", "(", ")", "else", ":", "return", "data" ]
Consumes the first ``length`` bytes from the accumulator.
[ "Consumes", "the", "first", "length", "bytes", "from", "the", "accumulator", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L154-L199
amzn/ion-python
amazon/ion/reader.py
BufferQueue.unread
def unread(self, c): """Unread the given character, byte, or code point. If this is a unicode buffer and the input is an int or byte, it will be interpreted as an ordinal representing a unicode code point. If this is a binary buffer, the input must be a byte or int; a unicode character will raise an error. """ if self.position < 1: raise IndexError('Cannot unread an empty buffer queue.') if isinstance(c, six.text_type): if not self.is_unicode: BufferQueue._incompatible_types(self.is_unicode, c) else: c = self.__chr(c) num_code_units = self.is_unicode and len(c) or 1 if self.__offset == 0: if num_code_units == 1 and six.PY3: if self.is_unicode: segment = c else: segment = six.int2byte(c) else: segment = c self.__segments.appendleft(segment) else: self.__offset -= num_code_units def verify(ch, idx): existing = self.__segments[0][self.__offset + idx] if existing != ch: raise ValueError('Attempted to unread %s when %s was expected.' % (ch, existing)) if num_code_units == 1: verify(c, 0) else: for i in range(num_code_units): verify(c[i], i) self.__size += num_code_units self.position -= num_code_units
python
def unread(self, c): if self.position < 1: raise IndexError('Cannot unread an empty buffer queue.') if isinstance(c, six.text_type): if not self.is_unicode: BufferQueue._incompatible_types(self.is_unicode, c) else: c = self.__chr(c) num_code_units = self.is_unicode and len(c) or 1 if self.__offset == 0: if num_code_units == 1 and six.PY3: if self.is_unicode: segment = c else: segment = six.int2byte(c) else: segment = c self.__segments.appendleft(segment) else: self.__offset -= num_code_units def verify(ch, idx): existing = self.__segments[0][self.__offset + idx] if existing != ch: raise ValueError('Attempted to unread %s when %s was expected.' % (ch, existing)) if num_code_units == 1: verify(c, 0) else: for i in range(num_code_units): verify(c[i], i) self.__size += num_code_units self.position -= num_code_units
[ "def", "unread", "(", "self", ",", "c", ")", ":", "if", "self", ".", "position", "<", "1", ":", "raise", "IndexError", "(", "'Cannot unread an empty buffer queue.'", ")", "if", "isinstance", "(", "c", ",", "six", ".", "text_type", ")", ":", "if", "not", "self", ".", "is_unicode", ":", "BufferQueue", ".", "_incompatible_types", "(", "self", ".", "is_unicode", ",", "c", ")", "else", ":", "c", "=", "self", ".", "__chr", "(", "c", ")", "num_code_units", "=", "self", ".", "is_unicode", "and", "len", "(", "c", ")", "or", "1", "if", "self", ".", "__offset", "==", "0", ":", "if", "num_code_units", "==", "1", "and", "six", ".", "PY3", ":", "if", "self", ".", "is_unicode", ":", "segment", "=", "c", "else", ":", "segment", "=", "six", ".", "int2byte", "(", "c", ")", "else", ":", "segment", "=", "c", "self", ".", "__segments", ".", "appendleft", "(", "segment", ")", "else", ":", "self", ".", "__offset", "-=", "num_code_units", "def", "verify", "(", "ch", ",", "idx", ")", ":", "existing", "=", "self", ".", "__segments", "[", "0", "]", "[", "self", ".", "__offset", "+", "idx", "]", "if", "existing", "!=", "ch", ":", "raise", "ValueError", "(", "'Attempted to unread %s when %s was expected.'", "%", "(", "ch", ",", "existing", ")", ")", "if", "num_code_units", "==", "1", ":", "verify", "(", "c", ",", "0", ")", "else", ":", "for", "i", "in", "range", "(", "num_code_units", ")", ":", "verify", "(", "c", "[", "i", "]", ",", "i", ")", "self", ".", "__size", "+=", "num_code_units", "self", ".", "position", "-=", "num_code_units" ]
Unread the given character, byte, or code point. If this is a unicode buffer and the input is an int or byte, it will be interpreted as an ordinal representing a unicode code point. If this is a binary buffer, the input must be a byte or int; a unicode character will raise an error.
[ "Unread", "the", "given", "character", "byte", "or", "code", "point", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L221-L259
amzn/ion-python
amazon/ion/reader.py
BufferQueue.skip
def skip(self, length): """Removes ``length`` bytes and returns the number length still required to skip""" if length >= self.__size: skip_amount = self.__size rem = length - skip_amount self.__segments.clear() self.__offset = 0 self.__size = 0 self.position += skip_amount else: rem = 0 self.read(length, skip=True) return rem
python
def skip(self, length): if length >= self.__size: skip_amount = self.__size rem = length - skip_amount self.__segments.clear() self.__offset = 0 self.__size = 0 self.position += skip_amount else: rem = 0 self.read(length, skip=True) return rem
[ "def", "skip", "(", "self", ",", "length", ")", ":", "if", "length", ">=", "self", ".", "__size", ":", "skip_amount", "=", "self", ".", "__size", "rem", "=", "length", "-", "skip_amount", "self", ".", "__segments", ".", "clear", "(", ")", "self", ".", "__offset", "=", "0", "self", ".", "__size", "=", "0", "self", ".", "position", "+=", "skip_amount", "else", ":", "rem", "=", "0", "self", ".", "read", "(", "length", ",", "skip", "=", "True", ")", "return", "rem" ]
Removes ``length`` bytes and returns the number length still required to skip
[ "Removes", "length", "bytes", "and", "returns", "the", "number", "length", "still", "required", "to", "skip" ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L261-L273
amzn/ion-python
amazon/ion/reader_managed.py
managed_reader
def managed_reader(reader, catalog=None): """Managed reader wrapping another reader. Args: reader (Coroutine): The underlying non-blocking reader co-routine. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving imports. Yields: Events from the underlying reader delegating to symbol table processing as needed. The user will never see things like version markers or local symbol tables. """ if catalog is None: catalog = SymbolTableCatalog() ctx = _ManagedContext(catalog) symbol_trans = Transition(None, None) ion_event = None while True: if symbol_trans.delegate is not None \ and ion_event is not None \ and not ion_event.event_type.is_stream_signal: # We have a symbol processor active, do not yield to user. delegate = symbol_trans.delegate symbol_trans = delegate.send(Transition(ion_event, delegate)) if symbol_trans.delegate is None: # When the symbol processor terminates, the event is the context # and there is no delegate. ctx = symbol_trans.event data_event = NEXT_EVENT else: data_event = symbol_trans.event else: data_event = None if ion_event is not None: event_type = ion_event.event_type ion_type = ion_event.ion_type depth = ion_event.depth # System values only happen at the top-level if depth == 0: if event_type is IonEventType.VERSION_MARKER: if ion_event != ION_VERSION_MARKER_EVENT: raise IonException('Invalid IVM: %s' % (ion_event,)) # Reset and swallow IVM ctx = _ManagedContext(ctx.catalog) data_event = NEXT_EVENT elif ion_type is IonType.SYMBOL \ and len(ion_event.annotations) == 0 \ and ion_event.value is not None \ and ctx.resolve(ion_event.value).text == TEXT_ION_1_0: assert symbol_trans.delegate is None # A faux IVM is a NOP data_event = NEXT_EVENT elif event_type is IonEventType.CONTAINER_START \ and ion_type is IonType.STRUCT \ and ctx.has_symbol_table_annotation(ion_event.annotations): assert symbol_trans.delegate is None # Activate a new symbol processor. delegate = _local_symbol_table_handler(ctx) symbol_trans = Transition(None, delegate) data_event = NEXT_EVENT if data_event is None: # No system processing or we have to get data, yield control. if ion_event is not None: ion_event = _managed_thunk_event(ctx, ion_event) data_event = yield ion_event ion_event = reader.send(data_event)
python
def managed_reader(reader, catalog=None): if catalog is None: catalog = SymbolTableCatalog() ctx = _ManagedContext(catalog) symbol_trans = Transition(None, None) ion_event = None while True: if symbol_trans.delegate is not None \ and ion_event is not None \ and not ion_event.event_type.is_stream_signal: delegate = symbol_trans.delegate symbol_trans = delegate.send(Transition(ion_event, delegate)) if symbol_trans.delegate is None: ctx = symbol_trans.event data_event = NEXT_EVENT else: data_event = symbol_trans.event else: data_event = None if ion_event is not None: event_type = ion_event.event_type ion_type = ion_event.ion_type depth = ion_event.depth if depth == 0: if event_type is IonEventType.VERSION_MARKER: if ion_event != ION_VERSION_MARKER_EVENT: raise IonException('Invalid IVM: %s' % (ion_event,)) ctx = _ManagedContext(ctx.catalog) data_event = NEXT_EVENT elif ion_type is IonType.SYMBOL \ and len(ion_event.annotations) == 0 \ and ion_event.value is not None \ and ctx.resolve(ion_event.value).text == TEXT_ION_1_0: assert symbol_trans.delegate is None data_event = NEXT_EVENT elif event_type is IonEventType.CONTAINER_START \ and ion_type is IonType.STRUCT \ and ctx.has_symbol_table_annotation(ion_event.annotations): assert symbol_trans.delegate is None delegate = _local_symbol_table_handler(ctx) symbol_trans = Transition(None, delegate) data_event = NEXT_EVENT if data_event is None: if ion_event is not None: ion_event = _managed_thunk_event(ctx, ion_event) data_event = yield ion_event ion_event = reader.send(data_event)
[ "def", "managed_reader", "(", "reader", ",", "catalog", "=", "None", ")", ":", "if", "catalog", "is", "None", ":", "catalog", "=", "SymbolTableCatalog", "(", ")", "ctx", "=", "_ManagedContext", "(", "catalog", ")", "symbol_trans", "=", "Transition", "(", "None", ",", "None", ")", "ion_event", "=", "None", "while", "True", ":", "if", "symbol_trans", ".", "delegate", "is", "not", "None", "and", "ion_event", "is", "not", "None", "and", "not", "ion_event", ".", "event_type", ".", "is_stream_signal", ":", "# We have a symbol processor active, do not yield to user.", "delegate", "=", "symbol_trans", ".", "delegate", "symbol_trans", "=", "delegate", ".", "send", "(", "Transition", "(", "ion_event", ",", "delegate", ")", ")", "if", "symbol_trans", ".", "delegate", "is", "None", ":", "# When the symbol processor terminates, the event is the context", "# and there is no delegate.", "ctx", "=", "symbol_trans", ".", "event", "data_event", "=", "NEXT_EVENT", "else", ":", "data_event", "=", "symbol_trans", ".", "event", "else", ":", "data_event", "=", "None", "if", "ion_event", "is", "not", "None", ":", "event_type", "=", "ion_event", ".", "event_type", "ion_type", "=", "ion_event", ".", "ion_type", "depth", "=", "ion_event", ".", "depth", "# System values only happen at the top-level", "if", "depth", "==", "0", ":", "if", "event_type", "is", "IonEventType", ".", "VERSION_MARKER", ":", "if", "ion_event", "!=", "ION_VERSION_MARKER_EVENT", ":", "raise", "IonException", "(", "'Invalid IVM: %s'", "%", "(", "ion_event", ",", ")", ")", "# Reset and swallow IVM", "ctx", "=", "_ManagedContext", "(", "ctx", ".", "catalog", ")", "data_event", "=", "NEXT_EVENT", "elif", "ion_type", "is", "IonType", ".", "SYMBOL", "and", "len", "(", "ion_event", ".", "annotations", ")", "==", "0", "and", "ion_event", ".", "value", "is", "not", "None", "and", "ctx", ".", "resolve", "(", "ion_event", ".", "value", ")", ".", "text", "==", "TEXT_ION_1_0", ":", "assert", "symbol_trans", ".", "delegate", "is", "None", "# A faux IVM is a NOP", "data_event", "=", "NEXT_EVENT", "elif", "event_type", "is", "IonEventType", ".", "CONTAINER_START", "and", "ion_type", "is", "IonType", ".", "STRUCT", "and", "ctx", ".", "has_symbol_table_annotation", "(", "ion_event", ".", "annotations", ")", ":", "assert", "symbol_trans", ".", "delegate", "is", "None", "# Activate a new symbol processor.", "delegate", "=", "_local_symbol_table_handler", "(", "ctx", ")", "symbol_trans", "=", "Transition", "(", "None", ",", "delegate", ")", "data_event", "=", "NEXT_EVENT", "if", "data_event", "is", "None", ":", "# No system processing or we have to get data, yield control.", "if", "ion_event", "is", "not", "None", ":", "ion_event", "=", "_managed_thunk_event", "(", "ctx", ",", "ion_event", ")", "data_event", "=", "yield", "ion_event", "ion_event", "=", "reader", ".", "send", "(", "data_event", ")" ]
Managed reader wrapping another reader. Args: reader (Coroutine): The underlying non-blocking reader co-routine. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving imports. Yields: Events from the underlying reader delegating to symbol table processing as needed. The user will never see things like version markers or local symbol tables.
[ "Managed", "reader", "wrapping", "another", "reader", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_managed.py#L261-L335
amzn/ion-python
amazon/ion/reader_managed.py
_ManagedContext.resolve
def resolve(self, token): """Attempts to resolve the :class:`SymbolToken` against the current table. If the ``text`` is not None, the token is returned, otherwise, a token in the table is attempted to be retrieved. If not token is found, then this method will raise. """ if token.text is not None: return token resolved_token = self.symbol_table.get(token.sid, None) if resolved_token is None: raise IonException('Out of range SID: %d' % token.sid) return resolved_token
python
def resolve(self, token): if token.text is not None: return token resolved_token = self.symbol_table.get(token.sid, None) if resolved_token is None: raise IonException('Out of range SID: %d' % token.sid) return resolved_token
[ "def", "resolve", "(", "self", ",", "token", ")", ":", "if", "token", ".", "text", "is", "not", "None", ":", "return", "token", "resolved_token", "=", "self", ".", "symbol_table", ".", "get", "(", "token", ".", "sid", ",", "None", ")", "if", "resolved_token", "is", "None", ":", "raise", "IonException", "(", "'Out of range SID: %d'", "%", "token", ".", "sid", ")", "return", "resolved_token" ]
Attempts to resolve the :class:`SymbolToken` against the current table. If the ``text`` is not None, the token is returned, otherwise, a token in the table is attempted to be retrieved. If not token is found, then this method will raise.
[ "Attempts", "to", "resolve", "the", ":", "class", ":", "SymbolToken", "against", "the", "current", "table", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_managed.py#L40-L52
amzn/ion-python
amazon/ion/reader_text.py
_illegal_character
def _illegal_character(c, ctx, message=''): """Raises an IonException upon encountering the given illegal character in the given context. Args: c (int|None): Ordinal of the illegal character. ctx (_HandlerContext): Context in which the illegal character was encountered. message (Optional[str]): Additional information, as necessary. """ container_type = ctx.container.ion_type is None and 'top-level' or ctx.container.ion_type.name value_type = ctx.ion_type is None and 'unknown' or ctx.ion_type.name if c is None: header = 'Illegal token' else: c = 'EOF' if BufferQueue.is_eof(c) else _chr(c) header = 'Illegal character %s' % (c,) raise IonException('%s at position %d in %s value contained in %s. %s Pending value: %s' % (header, ctx.queue.position, value_type, container_type, message, ctx.value))
python
def _illegal_character(c, ctx, message=''): container_type = ctx.container.ion_type is None and 'top-level' or ctx.container.ion_type.name value_type = ctx.ion_type is None and 'unknown' or ctx.ion_type.name if c is None: header = 'Illegal token' else: c = 'EOF' if BufferQueue.is_eof(c) else _chr(c) header = 'Illegal character %s' % (c,) raise IonException('%s at position %d in %s value contained in %s. %s Pending value: %s' % (header, ctx.queue.position, value_type, container_type, message, ctx.value))
[ "def", "_illegal_character", "(", "c", ",", "ctx", ",", "message", "=", "''", ")", ":", "container_type", "=", "ctx", ".", "container", ".", "ion_type", "is", "None", "and", "'top-level'", "or", "ctx", ".", "container", ".", "ion_type", ".", "name", "value_type", "=", "ctx", ".", "ion_type", "is", "None", "and", "'unknown'", "or", "ctx", ".", "ion_type", ".", "name", "if", "c", "is", "None", ":", "header", "=", "'Illegal token'", "else", ":", "c", "=", "'EOF'", "if", "BufferQueue", ".", "is_eof", "(", "c", ")", "else", "_chr", "(", "c", ")", "header", "=", "'Illegal character %s'", "%", "(", "c", ",", ")", "raise", "IonException", "(", "'%s at position %d in %s value contained in %s. %s Pending value: %s'", "%", "(", "header", ",", "ctx", ".", "queue", ".", "position", ",", "value_type", ",", "container_type", ",", "message", ",", "ctx", ".", "value", ")", ")" ]
Raises an IonException upon encountering the given illegal character in the given context. Args: c (int|None): Ordinal of the illegal character. ctx (_HandlerContext): Context in which the illegal character was encountered. message (Optional[str]): Additional information, as necessary.
[ "Raises", "an", "IonException", "upon", "encountering", "the", "given", "illegal", "character", "in", "the", "given", "context", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L40-L57
amzn/ion-python
amazon/ion/reader_text.py
_defaultdict
def _defaultdict(dct, fallback=_illegal_character): """Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed. """ out = defaultdict(lambda: fallback) for k, v in six.iteritems(dct): out[k] = v return out
python
def _defaultdict(dct, fallback=_illegal_character): out = defaultdict(lambda: fallback) for k, v in six.iteritems(dct): out[k] = v return out
[ "def", "_defaultdict", "(", "dct", ",", "fallback", "=", "_illegal_character", ")", ":", "out", "=", "defaultdict", "(", "lambda", ":", "fallback", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "dct", ")", ":", "out", "[", "k", "]", "=", "v", "return", "out" ]
Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed.
[ "Wraps", "the", "given", "dictionary", "such", "that", "the", "given", "fallback", "function", "will", "be", "called", "when", "a", "nonexistent", "key", "is", "accessed", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L60-L67
amzn/ion-python
amazon/ion/reader_text.py
_merge_mappings
def _merge_mappings(*args): """Merges a sequence of dictionaries and/or tuples into a single dictionary. If a given argument is a tuple, it must have two elements, the first of which is a sequence of keys and the second of which is a single value, which will be mapped to from each of the keys in the sequence. """ dct = {} for arg in args: if isinstance(arg, dict): merge = arg else: assert isinstance(arg, tuple) keys, value = arg merge = dict(zip(keys, [value]*len(keys))) dct.update(merge) return dct
python
def _merge_mappings(*args): dct = {} for arg in args: if isinstance(arg, dict): merge = arg else: assert isinstance(arg, tuple) keys, value = arg merge = dict(zip(keys, [value]*len(keys))) dct.update(merge) return dct
[ "def", "_merge_mappings", "(", "*", "args", ")", ":", "dct", "=", "{", "}", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "dict", ")", ":", "merge", "=", "arg", "else", ":", "assert", "isinstance", "(", "arg", ",", "tuple", ")", "keys", ",", "value", "=", "arg", "merge", "=", "dict", "(", "zip", "(", "keys", ",", "[", "value", "]", "*", "len", "(", "keys", ")", ")", ")", "dct", ".", "update", "(", "merge", ")", "return", "dct" ]
Merges a sequence of dictionaries and/or tuples into a single dictionary. If a given argument is a tuple, it must have two elements, the first of which is a sequence of keys and the second of which is a single value, which will be mapped to from each of the keys in the sequence.
[ "Merges", "a", "sequence", "of", "dictionaries", "and", "/", "or", "tuples", "into", "a", "single", "dictionary", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L70-L85
amzn/ion-python
amazon/ion/reader_text.py
_as_symbol
def _as_symbol(value, is_symbol_value=True): """Converts the input to a :class:`SymbolToken` suitable for being emitted as part of a :class:`IonEvent`. If the input has an `as_symbol` method (e.g. :class:`CodePointArray`), it will be converted using that method. Otherwise, it must already be a `SymbolToken`. In this case, there is nothing to do unless the input token is not a symbol value and it is an :class:`_IVMToken`. This requires the `_IVMToken` to be converted to a regular `SymbolToken`. """ try: return value.as_symbol() except AttributeError: assert isinstance(value, SymbolToken) if not is_symbol_value: try: # This converts _IVMTokens to regular SymbolTokens when the _IVMToken cannot represent an IVM (i.e. # it is a field name or annotation). return value.regular_token() except AttributeError: pass return value
python
def _as_symbol(value, is_symbol_value=True): try: return value.as_symbol() except AttributeError: assert isinstance(value, SymbolToken) if not is_symbol_value: try: return value.regular_token() except AttributeError: pass return value
[ "def", "_as_symbol", "(", "value", ",", "is_symbol_value", "=", "True", ")", ":", "try", ":", "return", "value", ".", "as_symbol", "(", ")", "except", "AttributeError", ":", "assert", "isinstance", "(", "value", ",", "SymbolToken", ")", "if", "not", "is_symbol_value", ":", "try", ":", "# This converts _IVMTokens to regular SymbolTokens when the _IVMToken cannot represent an IVM (i.e.", "# it is a field name or annotation).", "return", "value", ".", "regular_token", "(", ")", "except", "AttributeError", ":", "pass", "return", "value" ]
Converts the input to a :class:`SymbolToken` suitable for being emitted as part of a :class:`IonEvent`. If the input has an `as_symbol` method (e.g. :class:`CodePointArray`), it will be converted using that method. Otherwise, it must already be a `SymbolToken`. In this case, there is nothing to do unless the input token is not a symbol value and it is an :class:`_IVMToken`. This requires the `_IVMToken` to be converted to a regular `SymbolToken`.
[ "Converts", "the", "input", "to", "a", ":", "class", ":", "SymbolToken", "suitable", "for", "being", "emitted", "as", "part", "of", "a", ":", "class", ":", "IonEvent", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L280-L299
amzn/ion-python
amazon/ion/reader_text.py
_number_negative_start_handler
def _number_negative_start_handler(c, ctx): """Handles numeric values that start with a negative sign. Branches to delegate co-routines according to _NEGATIVE_TABLE. """ assert c == _MINUS assert len(ctx.value) == 0 ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield yield ctx.immediate_transition(_NEGATIVE_TABLE[c](c, ctx))
python
def _number_negative_start_handler(c, ctx): assert c == _MINUS assert len(ctx.value) == 0 ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield yield ctx.immediate_transition(_NEGATIVE_TABLE[c](c, ctx))
[ "def", "_number_negative_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_MINUS", "assert", "len", "(", "ctx", ".", "value", ")", "==", "0", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "yield", "ctx", ".", "immediate_transition", "(", "_NEGATIVE_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles numeric values that start with a negative sign. Branches to delegate co-routines according to _NEGATIVE_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "a", "negative", "sign", ".", "Branches", "to", "delegate", "co", "-", "routines", "according", "to", "_NEGATIVE_TABLE", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L585-L594
amzn/ion-python
amazon/ion/reader_text.py
_number_zero_start_handler
def _number_zero_start_handler(c, ctx): """Handles numeric values that start with zero or negative zero. Branches to delegate co-routines according to _ZERO_START_TABLE. """ assert c == _ZERO assert len(ctx.value) == 0 or (len(ctx.value) == 1 and ctx.value[0] == _MINUS) ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) yield trans yield ctx.immediate_transition(_ZERO_START_TABLE[c](c, ctx))
python
def _number_zero_start_handler(c, ctx): assert c == _ZERO assert len(ctx.value) == 0 or (len(ctx.value) == 1 and ctx.value[0] == _MINUS) ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) yield trans yield ctx.immediate_transition(_ZERO_START_TABLE[c](c, ctx))
[ "def", "_number_zero_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_ZERO", "assert", "len", "(", "ctx", ".", "value", ")", "==", "0", "or", "(", "len", "(", "ctx", ".", "value", ")", "==", "1", "and", "ctx", ".", "value", "[", "0", "]", "==", "_MINUS", ")", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "if", "_ends_value", "(", "c", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "_parse_decimal_int", "(", "ctx", ".", "value", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "yield", "trans", "yield", "ctx", ".", "immediate_transition", "(", "_ZERO_START_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles numeric values that start with zero or negative zero. Branches to delegate co-routines according to _ZERO_START_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "zero", "or", "negative", "zero", ".", "Branches", "to", "delegate", "co", "-", "routines", "according", "to", "_ZERO_START_TABLE", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L598-L612
amzn/ion-python
amazon/ion/reader_text.py
_number_or_timestamp_handler
def _number_or_timestamp_handler(c, ctx): """Handles numeric values that start with digits 1-9. May terminate a value, in which case that value is an int. If it does not terminate a value, it branches to delegate co-routines according to _NUMBER_OR_TIMESTAMP_TABLE. """ assert c in _DIGITS ctx.set_ion_type(IonType.INT) # If this is the last digit read, this value is an Int. val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c not in _DIGITS: trans = ctx.immediate_transition(_NUMBER_OR_TIMESTAMP_TABLE[c](c, ctx)) else: val.append(c) c, _ = yield trans
python
def _number_or_timestamp_handler(c, ctx): assert c in _DIGITS ctx.set_ion_type(IonType.INT) val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c not in _DIGITS: trans = ctx.immediate_transition(_NUMBER_OR_TIMESTAMP_TABLE[c](c, ctx)) else: val.append(c) c, _ = yield trans
[ "def", "_number_or_timestamp_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "in", "_DIGITS", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "# If this is the last digit read, this value is an Int.", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "_ends_value", "(", "c", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "_parse_decimal_int", "(", "ctx", ".", "value", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "else", ":", "if", "c", "not", "in", "_DIGITS", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_NUMBER_OR_TIMESTAMP_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")", "else", ":", "val", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles numeric values that start with digits 1-9. May terminate a value, in which case that value is an int. If it does not terminate a value, it branches to delegate co-routines according to _NUMBER_OR_TIMESTAMP_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "digits", "1", "-", "9", ".", "May", "terminate", "a", "value", "in", "which", "case", "that", "value", "is", "an", "int", ".", "If", "it", "does", "not", "terminate", "a", "value", "it", "branches", "to", "delegate", "co", "-", "routines", "according", "to", "_NUMBER_OR_TIMESTAMP_TABLE", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L616-L637
amzn/ion-python
amazon/ion/reader_text.py
_number_slash_end_handler
def _number_slash_end_handler(c, ctx, event): """Handles numeric values that end in a forward slash. This is only legal if the slash begins a comment; thus, this co-routine either results in an error being raised or an event being yielded. """ assert c == _SLASH c, self = yield next_ctx = ctx.derive_child_context(ctx.whence) comment = _comment_handler(_SLASH, next_ctx, next_ctx.whence) comment.send((c, comment)) # If the previous line returns without error, it's a valid comment and the number may be emitted. yield _CompositeTransition(event, ctx, comment, next_ctx, initialize_handler=False)
python
def _number_slash_end_handler(c, ctx, event): assert c == _SLASH c, self = yield next_ctx = ctx.derive_child_context(ctx.whence) comment = _comment_handler(_SLASH, next_ctx, next_ctx.whence) comment.send((c, comment)) yield _CompositeTransition(event, ctx, comment, next_ctx, initialize_handler=False)
[ "def", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "event", ")", ":", "assert", "c", "==", "_SLASH", "c", ",", "self", "=", "yield", "next_ctx", "=", "ctx", ".", "derive_child_context", "(", "ctx", ".", "whence", ")", "comment", "=", "_comment_handler", "(", "_SLASH", ",", "next_ctx", ",", "next_ctx", ".", "whence", ")", "comment", ".", "send", "(", "(", "c", ",", "comment", ")", ")", "# If the previous line returns without error, it's a valid comment and the number may be emitted.", "yield", "_CompositeTransition", "(", "event", ",", "ctx", ",", "comment", ",", "next_ctx", ",", "initialize_handler", "=", "False", ")" ]
Handles numeric values that end in a forward slash. This is only legal if the slash begins a comment; thus, this co-routine either results in an error being raised or an event being yielded.
[ "Handles", "numeric", "values", "that", "end", "in", "a", "forward", "slash", ".", "This", "is", "only", "legal", "if", "the", "slash", "begins", "a", "comment", ";", "thus", "this", "co", "-", "routine", "either", "results", "in", "an", "error", "being", "raised", "or", "an", "event", "being", "yielded", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L641-L651
amzn/ion-python
amazon/ion/reader_text.py
_numeric_handler_factory
def _numeric_handler_factory(charset, transition, assertion, illegal_before_underscore, parse_func, illegal_at_end=(None,), ion_type=None, append_first_if_not=None, first_char=None): """Generates a handler co-routine which tokenizes a numeric component (a token or sub-token). Args: charset (sequence): Set of ordinals of legal characters for this numeric component. transition (callable): Called upon termination of this component (i.e. when a character not in ``charset`` is found). Accepts the previous character ordinal, the current character ordinal, the current context, and the previous transition. Returns a Transition if the component ends legally; otherwise, raises an error. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. illegal_before_underscore (sequence): Set of ordinals of illegal characters to precede an underscore for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. illegal_at_end (Optional[sequence]): Set of ordinals of characters that may not legally end the value. ion_type (Optional[IonType]): The type of the value if it were to end on this component. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases). first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type). """ @coroutine def numeric_handler(c, ctx): assert assertion(c, ctx) if ion_type is not None: ctx.set_ion_type(ion_type) val = ctx.value if c != append_first_if_not: first = c if first_char is None else first_char val.append(first) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): if prev == _UNDERSCORE or prev in illegal_at_end: _illegal_character(c, ctx, '%s at end of number.' % (_chr(prev),)) trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, parse_func(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c == _UNDERSCORE: if prev == _UNDERSCORE or prev in illegal_before_underscore: _illegal_character(c, ctx, 'Underscore after %s.' % (_chr(prev),)) else: if c not in charset: trans = transition(prev, c, ctx, trans) else: val.append(c) prev = c c, _ = yield trans return numeric_handler
python
def _numeric_handler_factory(charset, transition, assertion, illegal_before_underscore, parse_func, illegal_at_end=(None,), ion_type=None, append_first_if_not=None, first_char=None): @coroutine def numeric_handler(c, ctx): assert assertion(c, ctx) if ion_type is not None: ctx.set_ion_type(ion_type) val = ctx.value if c != append_first_if_not: first = c if first_char is None else first_char val.append(first) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): if prev == _UNDERSCORE or prev in illegal_at_end: _illegal_character(c, ctx, '%s at end of number.' % (_chr(prev),)) trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, parse_func(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c == _UNDERSCORE: if prev == _UNDERSCORE or prev in illegal_before_underscore: _illegal_character(c, ctx, 'Underscore after %s.' % (_chr(prev),)) else: if c not in charset: trans = transition(prev, c, ctx, trans) else: val.append(c) prev = c c, _ = yield trans return numeric_handler
[ "def", "_numeric_handler_factory", "(", "charset", ",", "transition", ",", "assertion", ",", "illegal_before_underscore", ",", "parse_func", ",", "illegal_at_end", "=", "(", "None", ",", ")", ",", "ion_type", "=", "None", ",", "append_first_if_not", "=", "None", ",", "first_char", "=", "None", ")", ":", "@", "coroutine", "def", "numeric_handler", "(", "c", ",", "ctx", ")", ":", "assert", "assertion", "(", "c", ",", "ctx", ")", "if", "ion_type", "is", "not", "None", ":", "ctx", ".", "set_ion_type", "(", "ion_type", ")", "val", "=", "ctx", ".", "value", "if", "c", "!=", "append_first_if_not", ":", "first", "=", "c", "if", "first_char", "is", "None", "else", "first_char", "val", ".", "append", "(", "first", ")", "prev", "=", "c", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "_ends_value", "(", "c", ")", ":", "if", "prev", "==", "_UNDERSCORE", "or", "prev", "in", "illegal_at_end", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'%s at end of number.'", "%", "(", "_chr", "(", "prev", ")", ",", ")", ")", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "parse_func", "(", "ctx", ".", "value", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "else", ":", "if", "c", "==", "_UNDERSCORE", ":", "if", "prev", "==", "_UNDERSCORE", "or", "prev", "in", "illegal_before_underscore", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Underscore after %s.'", "%", "(", "_chr", "(", "prev", ")", ",", ")", ")", "else", ":", "if", "c", "not", "in", "charset", ":", "trans", "=", "transition", "(", "prev", ",", "c", ",", "ctx", ",", "trans", ")", "else", ":", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "return", "numeric_handler" ]
Generates a handler co-routine which tokenizes a numeric component (a token or sub-token). Args: charset (sequence): Set of ordinals of legal characters for this numeric component. transition (callable): Called upon termination of this component (i.e. when a character not in ``charset`` is found). Accepts the previous character ordinal, the current character ordinal, the current context, and the previous transition. Returns a Transition if the component ends legally; otherwise, raises an error. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. illegal_before_underscore (sequence): Set of ordinals of illegal characters to precede an underscore for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. illegal_at_end (Optional[sequence]): Set of ordinals of characters that may not legally end the value. ion_type (Optional[IonType]): The type of the value if it were to end on this component. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases). first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type).
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "a", "numeric", "component", "(", "a", "token", "or", "sub", "-", "token", ")", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L654-L708
amzn/ion-python
amazon/ion/reader_text.py
_exponent_handler_factory
def _exponent_handler_factory(ion_type, exp_chars, parse_func, first_char=None): """Generates a handler co-routine which tokenizes an numeric exponent. Args: ion_type (IonType): The type of the value with this exponent. exp_chars (sequence): The set of ordinals of the legal exponent characters for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type). """ def transition(prev, c, ctx, trans): if c in _SIGN and prev in exp_chars: ctx.value.append(c) else: _illegal_character(c, ctx) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory(_DIGITS, transition, lambda c, ctx: c in exp_chars, illegal, parse_func, illegal_at_end=illegal, ion_type=ion_type, first_char=first_char)
python
def _exponent_handler_factory(ion_type, exp_chars, parse_func, first_char=None): def transition(prev, c, ctx, trans): if c in _SIGN and prev in exp_chars: ctx.value.append(c) else: _illegal_character(c, ctx) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory(_DIGITS, transition, lambda c, ctx: c in exp_chars, illegal, parse_func, illegal_at_end=illegal, ion_type=ion_type, first_char=first_char)
[ "def", "_exponent_handler_factory", "(", "ion_type", ",", "exp_chars", ",", "parse_func", ",", "first_char", "=", "None", ")", ":", "def", "transition", "(", "prev", ",", "c", ",", "ctx", ",", "trans", ")", ":", "if", "c", "in", "_SIGN", "and", "prev", "in", "exp_chars", ":", "ctx", ".", "value", ".", "append", "(", "c", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "return", "trans", "illegal", "=", "exp_chars", "+", "_SIGN", "return", "_numeric_handler_factory", "(", "_DIGITS", ",", "transition", ",", "lambda", "c", ",", "ctx", ":", "c", "in", "exp_chars", ",", "illegal", ",", "parse_func", ",", "illegal_at_end", "=", "illegal", ",", "ion_type", "=", "ion_type", ",", "first_char", "=", "first_char", ")" ]
Generates a handler co-routine which tokenizes an numeric exponent. Args: ion_type (IonType): The type of the value with this exponent. exp_chars (sequence): The set of ordinals of the legal exponent characters for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type).
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "an", "numeric", "exponent", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L711-L732
amzn/ion-python
amazon/ion/reader_text.py
_coefficient_handler_factory
def _coefficient_handler_factory(trans_table, parse_func, assertion=lambda c, ctx: True, ion_type=None, append_first_if_not=None): """Generates a handler co-routine which tokenizes a numeric coefficient. Args: trans_table (dict): lookup table for the handler for the next component of this numeric token, given the ordinal of the first character in that component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. ion_type (Optional[IonType]): The type of the value if it were to end on this coefficient. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases). """ def transition(prev, c, ctx, trans): if prev == _UNDERSCORE: _illegal_character(c, ctx, 'Underscore before %s.' % (_chr(c),)) return ctx.immediate_transition(trans_table[c](c, ctx)) return _numeric_handler_factory(_DIGITS, transition, assertion, (_DOT,), parse_func, ion_type=ion_type, append_first_if_not=append_first_if_not)
python
def _coefficient_handler_factory(trans_table, parse_func, assertion=lambda c, ctx: True, ion_type=None, append_first_if_not=None): def transition(prev, c, ctx, trans): if prev == _UNDERSCORE: _illegal_character(c, ctx, 'Underscore before %s.' % (_chr(c),)) return ctx.immediate_transition(trans_table[c](c, ctx)) return _numeric_handler_factory(_DIGITS, transition, assertion, (_DOT,), parse_func, ion_type=ion_type, append_first_if_not=append_first_if_not)
[ "def", "_coefficient_handler_factory", "(", "trans_table", ",", "parse_func", ",", "assertion", "=", "lambda", "c", ",", "ctx", ":", "True", ",", "ion_type", "=", "None", ",", "append_first_if_not", "=", "None", ")", ":", "def", "transition", "(", "prev", ",", "c", ",", "ctx", ",", "trans", ")", ":", "if", "prev", "==", "_UNDERSCORE", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Underscore before %s.'", "%", "(", "_chr", "(", "c", ")", ",", ")", ")", "return", "ctx", ".", "immediate_transition", "(", "trans_table", "[", "c", "]", "(", "c", ",", "ctx", ")", ")", "return", "_numeric_handler_factory", "(", "_DIGITS", ",", "transition", ",", "assertion", ",", "(", "_DOT", ",", ")", ",", "parse_func", ",", "ion_type", "=", "ion_type", ",", "append_first_if_not", "=", "append_first_if_not", ")" ]
Generates a handler co-routine which tokenizes a numeric coefficient. Args: trans_table (dict): lookup table for the handler for the next component of this numeric token, given the ordinal of the first character in that component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. ion_type (Optional[IonType]): The type of the value if it were to end on this coefficient. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases).
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "a", "numeric", "coefficient", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L739-L759
amzn/ion-python
amazon/ion/reader_text.py
_radix_int_handler_factory
def _radix_int_handler_factory(radix_indicators, charset, parse_func): """Generates a handler co-routine which tokenizes a integer of a particular radix. Args: radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int. charset (sequence): Set of ordinals of legal characters for this radix. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. """ def assertion(c, ctx): return c in radix_indicators and \ ((len(ctx.value) == 1 and ctx.value[0] == _ZERO) or (len(ctx.value) == 2 and ctx.value[0] == _MINUS and ctx.value[1] == _ZERO)) and \ ctx.ion_type == IonType.INT return _numeric_handler_factory(charset, lambda prev, c, ctx, trans: _illegal_character(c, ctx), assertion, radix_indicators, parse_func, illegal_at_end=radix_indicators)
python
def _radix_int_handler_factory(radix_indicators, charset, parse_func): def assertion(c, ctx): return c in radix_indicators and \ ((len(ctx.value) == 1 and ctx.value[0] == _ZERO) or (len(ctx.value) == 2 and ctx.value[0] == _MINUS and ctx.value[1] == _ZERO)) and \ ctx.ion_type == IonType.INT return _numeric_handler_factory(charset, lambda prev, c, ctx, trans: _illegal_character(c, ctx), assertion, radix_indicators, parse_func, illegal_at_end=radix_indicators)
[ "def", "_radix_int_handler_factory", "(", "radix_indicators", ",", "charset", ",", "parse_func", ")", ":", "def", "assertion", "(", "c", ",", "ctx", ")", ":", "return", "c", "in", "radix_indicators", "and", "(", "(", "len", "(", "ctx", ".", "value", ")", "==", "1", "and", "ctx", ".", "value", "[", "0", "]", "==", "_ZERO", ")", "or", "(", "len", "(", "ctx", ".", "value", ")", "==", "2", "and", "ctx", ".", "value", "[", "0", "]", "==", "_MINUS", "and", "ctx", ".", "value", "[", "1", "]", "==", "_ZERO", ")", ")", "and", "ctx", ".", "ion_type", "==", "IonType", ".", "INT", "return", "_numeric_handler_factory", "(", "charset", ",", "lambda", "prev", ",", "c", ",", "ctx", ",", "trans", ":", "_illegal_character", "(", "c", ",", "ctx", ")", ",", "assertion", ",", "radix_indicators", ",", "parse_func", ",", "illegal_at_end", "=", "radix_indicators", ")" ]
Generates a handler co-routine which tokenizes a integer of a particular radix. Args: radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int. charset (sequence): Set of ordinals of legal characters for this radix. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token.
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "a", "integer", "of", "a", "particular", "radix", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L785-L800
amzn/ion-python
amazon/ion/reader_text.py
_timestamp_zero_start_handler
def _timestamp_zero_start_handler(c, ctx): """Handles numeric values that start with a zero followed by another digit. This is either a timestamp or an error. """ val = ctx.value ctx.set_ion_type(IonType.TIMESTAMP) if val[0] == _MINUS: _illegal_character(c, ctx, 'Negative year not allowed.') val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _TIMESTAMP_YEAR_DELIMITERS: trans = ctx.immediate_transition(_timestamp_handler(c, ctx)) elif c in _DIGITS: val.append(c) else: _illegal_character(c, ctx) c, _ = yield trans
python
def _timestamp_zero_start_handler(c, ctx): val = ctx.value ctx.set_ion_type(IonType.TIMESTAMP) if val[0] == _MINUS: _illegal_character(c, ctx, 'Negative year not allowed.') val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _TIMESTAMP_YEAR_DELIMITERS: trans = ctx.immediate_transition(_timestamp_handler(c, ctx)) elif c in _DIGITS: val.append(c) else: _illegal_character(c, ctx) c, _ = yield trans
[ "def", "_timestamp_zero_start_handler", "(", "c", ",", "ctx", ")", ":", "val", "=", "ctx", ".", "value", "ctx", ".", "set_ion_type", "(", "IonType", ".", "TIMESTAMP", ")", "if", "val", "[", "0", "]", "==", "_MINUS", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Negative year not allowed.'", ")", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "in", "_TIMESTAMP_YEAR_DELIMITERS", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_timestamp_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "in", "_DIGITS", ":", "val", ".", "append", "(", "c", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles numeric values that start with a zero followed by another digit. This is either a timestamp or an error.
[ "Handles", "numeric", "values", "that", "start", "with", "a", "zero", "followed", "by", "another", "digit", ".", "This", "is", "either", "a", "timestamp", "or", "an", "error", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L808-L826
amzn/ion-python
amazon/ion/reader_text.py
_parse_timestamp
def _parse_timestamp(tokens): """Parses each token in the given `_TimestampTokens` and marshals the numeric components into a `Timestamp`.""" def parse(): precision = TimestampPrecision.YEAR off_hour = tokens[_TimestampState.OFF_HOUR] off_minutes = tokens[_TimestampState.OFF_MINUTE] microsecond = None fraction_digits = None if off_hour is not None: assert off_minutes is not None off_sign = -1 if _MINUS in off_hour else 1 off_hour = int(off_hour) off_minutes = int(off_minutes) * off_sign if off_sign == -1 and off_hour == 0 and off_minutes == 0: # -00:00 (unknown UTC offset) is a naive datetime. off_hour = None off_minutes = None else: assert off_minutes is None year = tokens[_TimestampState.YEAR] assert year is not None year = int(year) month = tokens[_TimestampState.MONTH] if month is None: month = 1 else: month = int(month) precision = TimestampPrecision.MONTH day = tokens[_TimestampState.DAY] if day is None: day = 1 else: day = int(day) precision = TimestampPrecision.DAY hour = tokens[_TimestampState.HOUR] minute = tokens[_TimestampState.MINUTE] if hour is None: assert minute is None hour = 0 minute = 0 else: assert minute is not None hour = int(hour) minute = int(minute) precision = TimestampPrecision.MINUTE second = tokens[_TimestampState.SECOND] if second is None: second = 0 else: second = int(second) precision = TimestampPrecision.SECOND fraction = tokens[_TimestampState.FRACTIONAL] if fraction is not None: fraction_digits = len(fraction) if fraction_digits > MICROSECOND_PRECISION: for digit in fraction[MICROSECOND_PRECISION:]: if digit != _ZERO: raise ValueError('Only six significant digits supported in timestamp fractional. Found %s.' % (fraction,)) fraction_digits = MICROSECOND_PRECISION fraction = fraction[0:MICROSECOND_PRECISION] else: fraction.extend(_ZEROS[MICROSECOND_PRECISION - fraction_digits]) microsecond = int(fraction) return timestamp( year, month, day, hour, minute, second, microsecond, off_hour, off_minutes, precision=precision, fractional_precision=fraction_digits ) return parse
python
def _parse_timestamp(tokens): def parse(): precision = TimestampPrecision.YEAR off_hour = tokens[_TimestampState.OFF_HOUR] off_minutes = tokens[_TimestampState.OFF_MINUTE] microsecond = None fraction_digits = None if off_hour is not None: assert off_minutes is not None off_sign = -1 if _MINUS in off_hour else 1 off_hour = int(off_hour) off_minutes = int(off_minutes) * off_sign if off_sign == -1 and off_hour == 0 and off_minutes == 0: off_hour = None off_minutes = None else: assert off_minutes is None year = tokens[_TimestampState.YEAR] assert year is not None year = int(year) month = tokens[_TimestampState.MONTH] if month is None: month = 1 else: month = int(month) precision = TimestampPrecision.MONTH day = tokens[_TimestampState.DAY] if day is None: day = 1 else: day = int(day) precision = TimestampPrecision.DAY hour = tokens[_TimestampState.HOUR] minute = tokens[_TimestampState.MINUTE] if hour is None: assert minute is None hour = 0 minute = 0 else: assert minute is not None hour = int(hour) minute = int(minute) precision = TimestampPrecision.MINUTE second = tokens[_TimestampState.SECOND] if second is None: second = 0 else: second = int(second) precision = TimestampPrecision.SECOND fraction = tokens[_TimestampState.FRACTIONAL] if fraction is not None: fraction_digits = len(fraction) if fraction_digits > MICROSECOND_PRECISION: for digit in fraction[MICROSECOND_PRECISION:]: if digit != _ZERO: raise ValueError('Only six significant digits supported in timestamp fractional. Found %s.' % (fraction,)) fraction_digits = MICROSECOND_PRECISION fraction = fraction[0:MICROSECOND_PRECISION] else: fraction.extend(_ZEROS[MICROSECOND_PRECISION - fraction_digits]) microsecond = int(fraction) return timestamp( year, month, day, hour, minute, second, microsecond, off_hour, off_minutes, precision=precision, fractional_precision=fraction_digits ) return parse
[ "def", "_parse_timestamp", "(", "tokens", ")", ":", "def", "parse", "(", ")", ":", "precision", "=", "TimestampPrecision", ".", "YEAR", "off_hour", "=", "tokens", "[", "_TimestampState", ".", "OFF_HOUR", "]", "off_minutes", "=", "tokens", "[", "_TimestampState", ".", "OFF_MINUTE", "]", "microsecond", "=", "None", "fraction_digits", "=", "None", "if", "off_hour", "is", "not", "None", ":", "assert", "off_minutes", "is", "not", "None", "off_sign", "=", "-", "1", "if", "_MINUS", "in", "off_hour", "else", "1", "off_hour", "=", "int", "(", "off_hour", ")", "off_minutes", "=", "int", "(", "off_minutes", ")", "*", "off_sign", "if", "off_sign", "==", "-", "1", "and", "off_hour", "==", "0", "and", "off_minutes", "==", "0", ":", "# -00:00 (unknown UTC offset) is a naive datetime.", "off_hour", "=", "None", "off_minutes", "=", "None", "else", ":", "assert", "off_minutes", "is", "None", "year", "=", "tokens", "[", "_TimestampState", ".", "YEAR", "]", "assert", "year", "is", "not", "None", "year", "=", "int", "(", "year", ")", "month", "=", "tokens", "[", "_TimestampState", ".", "MONTH", "]", "if", "month", "is", "None", ":", "month", "=", "1", "else", ":", "month", "=", "int", "(", "month", ")", "precision", "=", "TimestampPrecision", ".", "MONTH", "day", "=", "tokens", "[", "_TimestampState", ".", "DAY", "]", "if", "day", "is", "None", ":", "day", "=", "1", "else", ":", "day", "=", "int", "(", "day", ")", "precision", "=", "TimestampPrecision", ".", "DAY", "hour", "=", "tokens", "[", "_TimestampState", ".", "HOUR", "]", "minute", "=", "tokens", "[", "_TimestampState", ".", "MINUTE", "]", "if", "hour", "is", "None", ":", "assert", "minute", "is", "None", "hour", "=", "0", "minute", "=", "0", "else", ":", "assert", "minute", "is", "not", "None", "hour", "=", "int", "(", "hour", ")", "minute", "=", "int", "(", "minute", ")", "precision", "=", "TimestampPrecision", ".", "MINUTE", "second", "=", "tokens", "[", "_TimestampState", ".", "SECOND", "]", "if", "second", "is", "None", ":", "second", "=", "0", "else", ":", "second", "=", "int", "(", "second", ")", "precision", "=", "TimestampPrecision", ".", "SECOND", "fraction", "=", "tokens", "[", "_TimestampState", ".", "FRACTIONAL", "]", "if", "fraction", "is", "not", "None", ":", "fraction_digits", "=", "len", "(", "fraction", ")", "if", "fraction_digits", ">", "MICROSECOND_PRECISION", ":", "for", "digit", "in", "fraction", "[", "MICROSECOND_PRECISION", ":", "]", ":", "if", "digit", "!=", "_ZERO", ":", "raise", "ValueError", "(", "'Only six significant digits supported in timestamp fractional. Found %s.'", "%", "(", "fraction", ",", ")", ")", "fraction_digits", "=", "MICROSECOND_PRECISION", "fraction", "=", "fraction", "[", "0", ":", "MICROSECOND_PRECISION", "]", "else", ":", "fraction", ".", "extend", "(", "_ZEROS", "[", "MICROSECOND_PRECISION", "-", "fraction_digits", "]", ")", "microsecond", "=", "int", "(", "fraction", ")", "return", "timestamp", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ",", "microsecond", ",", "off_hour", ",", "off_minutes", ",", "precision", "=", "precision", ",", "fractional_precision", "=", "fraction_digits", ")", "return", "parse" ]
Parses each token in the given `_TimestampTokens` and marshals the numeric components into a `Timestamp`.
[ "Parses", "each", "token", "in", "the", "given", "_TimestampTokens", "and", "marshals", "the", "numeric", "components", "into", "a", "Timestamp", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L870-L946
amzn/ion-python
amazon/ion/reader_text.py
_timestamp_handler
def _timestamp_handler(c, ctx): """Handles timestamp values. Entered after the year component has been completed; tokenizes the remaining components. """ assert c in _TIMESTAMP_YEAR_DELIMITERS ctx.set_ion_type(IonType.TIMESTAMP) if len(ctx.value) != 4: _illegal_character(c, ctx, 'Timestamp year is %d digits; expected 4.' % (len(ctx.value),)) prev = c c, self = yield trans = ctx.immediate_transition(self) state = _TimestampState.YEAR nxt = _DIGITS tokens = _TimestampTokens(ctx.value) val = None can_terminate = False if prev == _T: nxt += _VALUE_TERMINATORS can_terminate = True while True: is_eof = can_terminate and BufferQueue.is_eof(c) if c not in nxt and not is_eof: _illegal_character(c, ctx, 'Expected %r in state %r.' % ([_chr(x) for x in nxt], state)) if c in _VALUE_TERMINATORS or is_eof: if not can_terminate: _illegal_character(c, ctx, 'Unexpected termination of timestamp.') trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_timestamp(tokens)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: can_terminate = False if c == _Z: # Z implies UTC, i.e. +00:00 local offset. tokens.transition(_TimestampState.OFF_HOUR).append(_ZERO) tokens.transition(_TimestampState.OFF_MINUTE).append(_ZERO) nxt = _VALUE_TERMINATORS can_terminate = True elif c == _T: nxt = _VALUE_TERMINATORS + _DIGITS can_terminate = True elif c in _TIMESTAMP_DELIMITERS: nxt = _DIGITS elif c in _DIGITS: if prev == _PLUS or (state > _TimestampState.MONTH and prev == _HYPHEN): state = _TimestampState.OFF_HOUR val = tokens.transition(state) if prev == _HYPHEN: val.append(prev) elif prev in (_TIMESTAMP_DELIMITERS + (_T,)): state = _TimestampState[state + 1] val = tokens.transition(state) if state == _TimestampState.FRACTIONAL: nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS elif prev in _DIGITS: if state == _TimestampState.MONTH: nxt = _TIMESTAMP_YEAR_DELIMITERS elif state == _TimestampState.DAY: nxt = (_T,) + _VALUE_TERMINATORS can_terminate = True elif state == _TimestampState.HOUR: nxt = (_COLON,) elif state == _TimestampState.MINUTE: nxt = _TIMESTAMP_OFFSET_INDICATORS + (_COLON,) elif state == _TimestampState.SECOND: nxt = _TIMESTAMP_OFFSET_INDICATORS + (_DOT,) elif state == _TimestampState.FRACTIONAL: nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS elif state == _TimestampState.OFF_HOUR: nxt = (_COLON,) elif state == _TimestampState.OFF_MINUTE: nxt = _VALUE_TERMINATORS can_terminate = True else: raise ValueError('Unknown timestamp state %r.' % (state,)) else: # Reaching this branch would be indicative of a programming error within this state machine. raise ValueError('Digit following %s in timestamp state %r.' % (_chr(prev), state)) val.append(c) prev = c c, _ = yield trans
python
def _timestamp_handler(c, ctx): assert c in _TIMESTAMP_YEAR_DELIMITERS ctx.set_ion_type(IonType.TIMESTAMP) if len(ctx.value) != 4: _illegal_character(c, ctx, 'Timestamp year is %d digits; expected 4.' % (len(ctx.value),)) prev = c c, self = yield trans = ctx.immediate_transition(self) state = _TimestampState.YEAR nxt = _DIGITS tokens = _TimestampTokens(ctx.value) val = None can_terminate = False if prev == _T: nxt += _VALUE_TERMINATORS can_terminate = True while True: is_eof = can_terminate and BufferQueue.is_eof(c) if c not in nxt and not is_eof: _illegal_character(c, ctx, 'Expected %r in state %r.' % ([_chr(x) for x in nxt], state)) if c in _VALUE_TERMINATORS or is_eof: if not can_terminate: _illegal_character(c, ctx, 'Unexpected termination of timestamp.') trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_timestamp(tokens)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: can_terminate = False if c == _Z: tokens.transition(_TimestampState.OFF_HOUR).append(_ZERO) tokens.transition(_TimestampState.OFF_MINUTE).append(_ZERO) nxt = _VALUE_TERMINATORS can_terminate = True elif c == _T: nxt = _VALUE_TERMINATORS + _DIGITS can_terminate = True elif c in _TIMESTAMP_DELIMITERS: nxt = _DIGITS elif c in _DIGITS: if prev == _PLUS or (state > _TimestampState.MONTH and prev == _HYPHEN): state = _TimestampState.OFF_HOUR val = tokens.transition(state) if prev == _HYPHEN: val.append(prev) elif prev in (_TIMESTAMP_DELIMITERS + (_T,)): state = _TimestampState[state + 1] val = tokens.transition(state) if state == _TimestampState.FRACTIONAL: nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS elif prev in _DIGITS: if state == _TimestampState.MONTH: nxt = _TIMESTAMP_YEAR_DELIMITERS elif state == _TimestampState.DAY: nxt = (_T,) + _VALUE_TERMINATORS can_terminate = True elif state == _TimestampState.HOUR: nxt = (_COLON,) elif state == _TimestampState.MINUTE: nxt = _TIMESTAMP_OFFSET_INDICATORS + (_COLON,) elif state == _TimestampState.SECOND: nxt = _TIMESTAMP_OFFSET_INDICATORS + (_DOT,) elif state == _TimestampState.FRACTIONAL: nxt = _DIGITS + _TIMESTAMP_OFFSET_INDICATORS elif state == _TimestampState.OFF_HOUR: nxt = (_COLON,) elif state == _TimestampState.OFF_MINUTE: nxt = _VALUE_TERMINATORS can_terminate = True else: raise ValueError('Unknown timestamp state %r.' % (state,)) else: raise ValueError('Digit following %s in timestamp state %r.' % (_chr(prev), state)) val.append(c) prev = c c, _ = yield trans
[ "def", "_timestamp_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "in", "_TIMESTAMP_YEAR_DELIMITERS", "ctx", ".", "set_ion_type", "(", "IonType", ".", "TIMESTAMP", ")", "if", "len", "(", "ctx", ".", "value", ")", "!=", "4", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Timestamp year is %d digits; expected 4.'", "%", "(", "len", "(", "ctx", ".", "value", ")", ",", ")", ")", "prev", "=", "c", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "state", "=", "_TimestampState", ".", "YEAR", "nxt", "=", "_DIGITS", "tokens", "=", "_TimestampTokens", "(", "ctx", ".", "value", ")", "val", "=", "None", "can_terminate", "=", "False", "if", "prev", "==", "_T", ":", "nxt", "+=", "_VALUE_TERMINATORS", "can_terminate", "=", "True", "while", "True", ":", "is_eof", "=", "can_terminate", "and", "BufferQueue", ".", "is_eof", "(", "c", ")", "if", "c", "not", "in", "nxt", "and", "not", "is_eof", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Expected %r in state %r.'", "%", "(", "[", "_chr", "(", "x", ")", "for", "x", "in", "nxt", "]", ",", "state", ")", ")", "if", "c", "in", "_VALUE_TERMINATORS", "or", "is_eof", ":", "if", "not", "can_terminate", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Unexpected termination of timestamp.'", ")", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "_parse_timestamp", "(", "tokens", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "else", ":", "can_terminate", "=", "False", "if", "c", "==", "_Z", ":", "# Z implies UTC, i.e. +00:00 local offset.", "tokens", ".", "transition", "(", "_TimestampState", ".", "OFF_HOUR", ")", ".", "append", "(", "_ZERO", ")", "tokens", ".", "transition", "(", "_TimestampState", ".", "OFF_MINUTE", ")", ".", "append", "(", "_ZERO", ")", "nxt", "=", "_VALUE_TERMINATORS", "can_terminate", "=", "True", "elif", "c", "==", "_T", ":", "nxt", "=", "_VALUE_TERMINATORS", "+", "_DIGITS", "can_terminate", "=", "True", "elif", "c", "in", "_TIMESTAMP_DELIMITERS", ":", "nxt", "=", "_DIGITS", "elif", "c", "in", "_DIGITS", ":", "if", "prev", "==", "_PLUS", "or", "(", "state", ">", "_TimestampState", ".", "MONTH", "and", "prev", "==", "_HYPHEN", ")", ":", "state", "=", "_TimestampState", ".", "OFF_HOUR", "val", "=", "tokens", ".", "transition", "(", "state", ")", "if", "prev", "==", "_HYPHEN", ":", "val", ".", "append", "(", "prev", ")", "elif", "prev", "in", "(", "_TIMESTAMP_DELIMITERS", "+", "(", "_T", ",", ")", ")", ":", "state", "=", "_TimestampState", "[", "state", "+", "1", "]", "val", "=", "tokens", ".", "transition", "(", "state", ")", "if", "state", "==", "_TimestampState", ".", "FRACTIONAL", ":", "nxt", "=", "_DIGITS", "+", "_TIMESTAMP_OFFSET_INDICATORS", "elif", "prev", "in", "_DIGITS", ":", "if", "state", "==", "_TimestampState", ".", "MONTH", ":", "nxt", "=", "_TIMESTAMP_YEAR_DELIMITERS", "elif", "state", "==", "_TimestampState", ".", "DAY", ":", "nxt", "=", "(", "_T", ",", ")", "+", "_VALUE_TERMINATORS", "can_terminate", "=", "True", "elif", "state", "==", "_TimestampState", ".", "HOUR", ":", "nxt", "=", "(", "_COLON", ",", ")", "elif", "state", "==", "_TimestampState", ".", "MINUTE", ":", "nxt", "=", "_TIMESTAMP_OFFSET_INDICATORS", "+", "(", "_COLON", ",", ")", "elif", "state", "==", "_TimestampState", ".", "SECOND", ":", "nxt", "=", "_TIMESTAMP_OFFSET_INDICATORS", "+", "(", "_DOT", ",", ")", "elif", "state", "==", "_TimestampState", ".", "FRACTIONAL", ":", "nxt", "=", "_DIGITS", "+", "_TIMESTAMP_OFFSET_INDICATORS", "elif", "state", "==", "_TimestampState", ".", "OFF_HOUR", ":", "nxt", "=", "(", "_COLON", ",", ")", "elif", "state", "==", "_TimestampState", ".", "OFF_MINUTE", ":", "nxt", "=", "_VALUE_TERMINATORS", "can_terminate", "=", "True", "else", ":", "raise", "ValueError", "(", "'Unknown timestamp state %r.'", "%", "(", "state", ",", ")", ")", "else", ":", "# Reaching this branch would be indicative of a programming error within this state machine.", "raise", "ValueError", "(", "'Digit following %s in timestamp state %r.'", "%", "(", "_chr", "(", "prev", ")", ",", "state", ")", ")", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans" ]
Handles timestamp values. Entered after the year component has been completed; tokenizes the remaining components.
[ "Handles", "timestamp", "values", ".", "Entered", "after", "the", "year", "component", "has", "been", "completed", ";", "tokenizes", "the", "remaining", "components", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L950-L1029
amzn/ion-python
amazon/ion/reader_text.py
_comment_handler
def _comment_handler(c, ctx, whence): """Handles comments. Upon completion of the comment, immediately transitions back to `whence`.""" assert c == _SLASH c, self = yield if c == _SLASH: ctx.set_line_comment() block_comment = False elif c == _ASTERISK: if ctx.line_comment: # This happens when a block comment immediately follows a line comment. ctx.set_line_comment(False) block_comment = True else: _illegal_character(c, ctx, 'Illegal character sequence "/%s".' % (_chr(c),)) done = False prev = None trans = ctx.immediate_transition(self) while not done: c, _ = yield trans if block_comment: if prev == _ASTERISK and c == _SLASH: done = True prev = c else: if c in _NEWLINES or BufferQueue.is_eof(c): done = True yield ctx.set_self_delimiting(True).immediate_transition(whence)
python
def _comment_handler(c, ctx, whence): assert c == _SLASH c, self = yield if c == _SLASH: ctx.set_line_comment() block_comment = False elif c == _ASTERISK: if ctx.line_comment: ctx.set_line_comment(False) block_comment = True else: _illegal_character(c, ctx, 'Illegal character sequence "/%s".' % (_chr(c),)) done = False prev = None trans = ctx.immediate_transition(self) while not done: c, _ = yield trans if block_comment: if prev == _ASTERISK and c == _SLASH: done = True prev = c else: if c in _NEWLINES or BufferQueue.is_eof(c): done = True yield ctx.set_self_delimiting(True).immediate_transition(whence)
[ "def", "_comment_handler", "(", "c", ",", "ctx", ",", "whence", ")", ":", "assert", "c", "==", "_SLASH", "c", ",", "self", "=", "yield", "if", "c", "==", "_SLASH", ":", "ctx", ".", "set_line_comment", "(", ")", "block_comment", "=", "False", "elif", "c", "==", "_ASTERISK", ":", "if", "ctx", ".", "line_comment", ":", "# This happens when a block comment immediately follows a line comment.", "ctx", ".", "set_line_comment", "(", "False", ")", "block_comment", "=", "True", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal character sequence \"/%s\".'", "%", "(", "_chr", "(", "c", ")", ",", ")", ")", "done", "=", "False", "prev", "=", "None", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "not", "done", ":", "c", ",", "_", "=", "yield", "trans", "if", "block_comment", ":", "if", "prev", "==", "_ASTERISK", "and", "c", "==", "_SLASH", ":", "done", "=", "True", "prev", "=", "c", "else", ":", "if", "c", "in", "_NEWLINES", "or", "BufferQueue", ".", "is_eof", "(", "c", ")", ":", "done", "=", "True", "yield", "ctx", ".", "set_self_delimiting", "(", "True", ")", ".", "immediate_transition", "(", "whence", ")" ]
Handles comments. Upon completion of the comment, immediately transitions back to `whence`.
[ "Handles", "comments", ".", "Upon", "completion", "of", "the", "comment", "immediately", "transitions", "back", "to", "whence", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1033-L1059
amzn/ion-python
amazon/ion/reader_text.py
_sexp_slash_handler
def _sexp_slash_handler(c, ctx, whence=None, pending_event=None): """Handles the special case of a forward-slash within an s-expression. This is either an operator or a comment. """ assert c == _SLASH if whence is None: whence = ctx.whence c, self = yield ctx.queue.unread(c) if c == _ASTERISK or c == _SLASH: yield ctx.immediate_transition(_comment_handler(_SLASH, ctx, whence)) else: if pending_event is not None: # Since this is the start of a new value and not a comment, the pending event must be emitted. assert pending_event.event is not None yield _CompositeTransition(pending_event, ctx, partial(_operator_symbol_handler, _SLASH)) yield ctx.immediate_transition(_operator_symbol_handler(_SLASH, ctx))
python
def _sexp_slash_handler(c, ctx, whence=None, pending_event=None): assert c == _SLASH if whence is None: whence = ctx.whence c, self = yield ctx.queue.unread(c) if c == _ASTERISK or c == _SLASH: yield ctx.immediate_transition(_comment_handler(_SLASH, ctx, whence)) else: if pending_event is not None: assert pending_event.event is not None yield _CompositeTransition(pending_event, ctx, partial(_operator_symbol_handler, _SLASH)) yield ctx.immediate_transition(_operator_symbol_handler(_SLASH, ctx))
[ "def", "_sexp_slash_handler", "(", "c", ",", "ctx", ",", "whence", "=", "None", ",", "pending_event", "=", "None", ")", ":", "assert", "c", "==", "_SLASH", "if", "whence", "is", "None", ":", "whence", "=", "ctx", ".", "whence", "c", ",", "self", "=", "yield", "ctx", ".", "queue", ".", "unread", "(", "c", ")", "if", "c", "==", "_ASTERISK", "or", "c", "==", "_SLASH", ":", "yield", "ctx", ".", "immediate_transition", "(", "_comment_handler", "(", "_SLASH", ",", "ctx", ",", "whence", ")", ")", "else", ":", "if", "pending_event", "is", "not", "None", ":", "# Since this is the start of a new value and not a comment, the pending event must be emitted.", "assert", "pending_event", ".", "event", "is", "not", "None", "yield", "_CompositeTransition", "(", "pending_event", ",", "ctx", ",", "partial", "(", "_operator_symbol_handler", ",", "_SLASH", ")", ")", "yield", "ctx", ".", "immediate_transition", "(", "_operator_symbol_handler", "(", "_SLASH", ",", "ctx", ")", ")" ]
Handles the special case of a forward-slash within an s-expression. This is either an operator or a comment.
[ "Handles", "the", "special", "case", "of", "a", "forward", "-", "slash", "within", "an", "s", "-", "expression", ".", "This", "is", "either", "an", "operator", "or", "a", "comment", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1063-L1079
amzn/ion-python
amazon/ion/reader_text.py
_long_string_handler
def _long_string_handler(c, ctx, is_field_name=False): """Handles triple-quoted strings. Remains active until a value other than a long string is encountered.""" assert c == _SINGLE_QUOTE is_clob = ctx.ion_type is IonType.CLOB max_char = _MAX_CLOB_CHAR if is_clob else _MAX_TEXT_CHAR assert not (is_clob and is_field_name) if not is_clob and not is_field_name: ctx.set_ion_type(IonType.STRING) assert not ctx.value ctx.set_unicode(quoted_text=True) val = ctx.value if is_field_name: assert not val ctx.set_pending_symbol() val = ctx.pending_symbol quotes = 0 in_data = True c, self = yield here = ctx.immediate_transition(self) trans = here while True: if c == _SINGLE_QUOTE and not _is_escaped(c): quotes += 1 if quotes == 3: in_data = not in_data ctx.set_quoted_text(in_data) quotes = 0 else: if in_data: _validate_long_string_text(c, ctx, max_char) # Any quotes found in the meantime are part of the data val.extend(_SINGLE_QUOTES[quotes]) if not _is_escaped_newline(c): val.append(c) quotes = 0 else: if quotes > 0: assert quotes < 3 if is_field_name or is_clob: # There are at least two values here, which is illegal for field names or within clobs. _illegal_character(c, ctx, 'Malformed triple-quoted text: %s' % (val,)) else: # This string value is followed by a quoted symbol. if ctx.container.is_delimited: _illegal_character(c, ctx, 'Delimiter %s not found after value.' % (_chr(ctx.container.delimiter[0]),)) trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) if quotes == 1: if BufferQueue.is_eof(c): _illegal_character(c, ctx, "Unexpected EOF.") # c was read as a single byte. Re-read it as a code point. ctx.queue.unread(c) ctx.set_quoted_text(True) c, _ = yield ctx.immediate_transition(self) trans = _CompositeTransition( trans, ctx, partial(_quoted_symbol_handler, c, is_field_name=False), ) else: # quotes == 2 trans = _CompositeTransition(trans, ctx, None, ctx.set_empty_symbol()) elif c not in _WHITESPACE: if is_clob: trans = ctx.immediate_transition(_clob_end_handler(c, ctx)) elif c == _SLASH: if ctx.container.ion_type is IonType.SEXP: pending = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) trans = ctx.immediate_transition(_sexp_slash_handler(c, ctx, self, pending)) else: trans = ctx.immediate_transition(_comment_handler(c, ctx, self)) elif is_field_name: if c != _COLON: _illegal_character(c, ctx, 'Illegal character after field name %s.' % (val,)) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) c, _ = yield trans ctx.set_self_delimiting(False) # If comments separated long string components, this would have been set. trans = here
python
def _long_string_handler(c, ctx, is_field_name=False): assert c == _SINGLE_QUOTE is_clob = ctx.ion_type is IonType.CLOB max_char = _MAX_CLOB_CHAR if is_clob else _MAX_TEXT_CHAR assert not (is_clob and is_field_name) if not is_clob and not is_field_name: ctx.set_ion_type(IonType.STRING) assert not ctx.value ctx.set_unicode(quoted_text=True) val = ctx.value if is_field_name: assert not val ctx.set_pending_symbol() val = ctx.pending_symbol quotes = 0 in_data = True c, self = yield here = ctx.immediate_transition(self) trans = here while True: if c == _SINGLE_QUOTE and not _is_escaped(c): quotes += 1 if quotes == 3: in_data = not in_data ctx.set_quoted_text(in_data) quotes = 0 else: if in_data: _validate_long_string_text(c, ctx, max_char) val.extend(_SINGLE_QUOTES[quotes]) if not _is_escaped_newline(c): val.append(c) quotes = 0 else: if quotes > 0: assert quotes < 3 if is_field_name or is_clob: _illegal_character(c, ctx, 'Malformed triple-quoted text: %s' % (val,)) else: if ctx.container.is_delimited: _illegal_character(c, ctx, 'Delimiter %s not found after value.' % (_chr(ctx.container.delimiter[0]),)) trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) if quotes == 1: if BufferQueue.is_eof(c): _illegal_character(c, ctx, "Unexpected EOF.") ctx.queue.unread(c) ctx.set_quoted_text(True) c, _ = yield ctx.immediate_transition(self) trans = _CompositeTransition( trans, ctx, partial(_quoted_symbol_handler, c, is_field_name=False), ) else: trans = _CompositeTransition(trans, ctx, None, ctx.set_empty_symbol()) elif c not in _WHITESPACE: if is_clob: trans = ctx.immediate_transition(_clob_end_handler(c, ctx)) elif c == _SLASH: if ctx.container.ion_type is IonType.SEXP: pending = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) trans = ctx.immediate_transition(_sexp_slash_handler(c, ctx, self, pending)) else: trans = ctx.immediate_transition(_comment_handler(c, ctx, self)) elif is_field_name: if c != _COLON: _illegal_character(c, ctx, 'Illegal character after field name %s.' % (val,)) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) c, _ = yield trans ctx.set_self_delimiting(False) trans = here
[ "def", "_long_string_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "assert", "c", "==", "_SINGLE_QUOTE", "is_clob", "=", "ctx", ".", "ion_type", "is", "IonType", ".", "CLOB", "max_char", "=", "_MAX_CLOB_CHAR", "if", "is_clob", "else", "_MAX_TEXT_CHAR", "assert", "not", "(", "is_clob", "and", "is_field_name", ")", "if", "not", "is_clob", "and", "not", "is_field_name", ":", "ctx", ".", "set_ion_type", "(", "IonType", ".", "STRING", ")", "assert", "not", "ctx", ".", "value", "ctx", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "val", "=", "ctx", ".", "value", "if", "is_field_name", ":", "assert", "not", "val", "ctx", ".", "set_pending_symbol", "(", ")", "val", "=", "ctx", ".", "pending_symbol", "quotes", "=", "0", "in_data", "=", "True", "c", ",", "self", "=", "yield", "here", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "trans", "=", "here", "while", "True", ":", "if", "c", "==", "_SINGLE_QUOTE", "and", "not", "_is_escaped", "(", "c", ")", ":", "quotes", "+=", "1", "if", "quotes", "==", "3", ":", "in_data", "=", "not", "in_data", "ctx", ".", "set_quoted_text", "(", "in_data", ")", "quotes", "=", "0", "else", ":", "if", "in_data", ":", "_validate_long_string_text", "(", "c", ",", "ctx", ",", "max_char", ")", "# Any quotes found in the meantime are part of the data", "val", ".", "extend", "(", "_SINGLE_QUOTES", "[", "quotes", "]", ")", "if", "not", "_is_escaped_newline", "(", "c", ")", ":", "val", ".", "append", "(", "c", ")", "quotes", "=", "0", "else", ":", "if", "quotes", ">", "0", ":", "assert", "quotes", "<", "3", "if", "is_field_name", "or", "is_clob", ":", "# There are at least two values here, which is illegal for field names or within clobs.", "_illegal_character", "(", "c", ",", "ctx", ",", "'Malformed triple-quoted text: %s'", "%", "(", "val", ",", ")", ")", "else", ":", "# This string value is followed by a quoted symbol.", "if", "ctx", ".", "container", ".", "is_delimited", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Delimiter %s not found after value.'", "%", "(", "_chr", "(", "ctx", ".", "container", ".", "delimiter", "[", "0", "]", ")", ",", ")", ")", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "ctx", ".", "value", ".", "as_text", "(", ")", ")", "if", "quotes", "==", "1", ":", "if", "BufferQueue", ".", "is_eof", "(", "c", ")", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "\"Unexpected EOF.\"", ")", "# c was read as a single byte. Re-read it as a code point.", "ctx", ".", "queue", ".", "unread", "(", "c", ")", "ctx", ".", "set_quoted_text", "(", "True", ")", "c", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "self", ")", "trans", "=", "_CompositeTransition", "(", "trans", ",", "ctx", ",", "partial", "(", "_quoted_symbol_handler", ",", "c", ",", "is_field_name", "=", "False", ")", ",", ")", "else", ":", "# quotes == 2", "trans", "=", "_CompositeTransition", "(", "trans", ",", "ctx", ",", "None", ",", "ctx", ".", "set_empty_symbol", "(", ")", ")", "elif", "c", "not", "in", "_WHITESPACE", ":", "if", "is_clob", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_clob_end_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "==", "_SLASH", ":", "if", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", ":", "pending", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "ctx", ".", "value", ".", "as_text", "(", ")", ")", "trans", "=", "ctx", ".", "immediate_transition", "(", "_sexp_slash_handler", "(", "c", ",", "ctx", ",", "self", ",", "pending", ")", ")", "else", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_comment_handler", "(", "c", ",", "ctx", ",", "self", ")", ")", "elif", "is_field_name", ":", "if", "c", "!=", "_COLON", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal character after field name %s.'", "%", "(", "val", ",", ")", ")", "trans", "=", "ctx", ".", "immediate_transition", "(", "ctx", ".", "whence", ")", "else", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "ctx", ".", "value", ".", "as_text", "(", ")", ")", "c", ",", "_", "=", "yield", "trans", "ctx", ".", "set_self_delimiting", "(", "False", ")", "# If comments separated long string components, this would have been set.", "trans", "=", "here" ]
Handles triple-quoted strings. Remains active until a value other than a long string is encountered.
[ "Handles", "triple", "-", "quoted", "strings", ".", "Remains", "active", "until", "a", "value", "other", "than", "a", "long", "string", "is", "encountered", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1109-L1188
amzn/ion-python
amazon/ion/reader_text.py
_typed_null_handler
def _typed_null_handler(c, ctx): """Handles typed null values. Entered once `null.` has been found.""" assert c == _DOT c, self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx.immediate_transition(self) while True: if done: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, nxt.ion_type, None) else: _illegal_character(c, ctx, 'Illegal null type.') elif length is None: if c not in nxt: _illegal_character(c, ctx, 'Illegal null type.') nxt = nxt[c] if isinstance(nxt, _NullSequence): length = len(nxt.sequence) else: if c != nxt[i]: _illegal_character(c, ctx, 'Illegal null type.') i += 1 done = i == length c, _ = yield trans
python
def _typed_null_handler(c, ctx): assert c == _DOT c, self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx.immediate_transition(self) while True: if done: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, nxt.ion_type, None) else: _illegal_character(c, ctx, 'Illegal null type.') elif length is None: if c not in nxt: _illegal_character(c, ctx, 'Illegal null type.') nxt = nxt[c] if isinstance(nxt, _NullSequence): length = len(nxt.sequence) else: if c != nxt[i]: _illegal_character(c, ctx, 'Illegal null type.') i += 1 done = i == length c, _ = yield trans
[ "def", "_typed_null_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_DOT", "c", ",", "self", "=", "yield", "nxt", "=", "_NULL_STARTS", "i", "=", "0", "length", "=", "None", "done", "=", "False", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "done", ":", "if", "_ends_value", "(", "c", ")", "or", "(", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "and", "c", "in", "_OPERATORS", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "nxt", ".", "ion_type", ",", "None", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "elif", "length", "is", "None", ":", "if", "c", "not", "in", "nxt", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "nxt", "=", "nxt", "[", "c", "]", "if", "isinstance", "(", "nxt", ",", "_NullSequence", ")", ":", "length", "=", "len", "(", "nxt", ".", "sequence", ")", "else", ":", "if", "c", "!=", "nxt", "[", "i", "]", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "i", "+=", "1", "done", "=", "i", "==", "length", "c", ",", "_", "=", "yield", "trans" ]
Handles typed null values. Entered once `null.` has been found.
[ "Handles", "typed", "null", "values", ".", "Entered", "once", "null", ".", "has", "been", "found", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1192-L1218
amzn/ion-python
amazon/ion/reader_text.py
_symbol_or_keyword_handler
def _symbol_or_keyword_handler(c, ctx, is_field_name=False): """Handles the start of an unquoted text token. This may be an operator (if in an s-expression), an identifier symbol, or a keyword. """ in_sexp = ctx.container.ion_type is IonType.SEXP if c not in _IDENTIFIER_STARTS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) _illegal_character(c, ctx) assert not ctx.value ctx.set_unicode().set_ion_type(IonType.SYMBOL) val = ctx.value val.append(c) maybe_null = c == _N_LOWER maybe_nan = maybe_null maybe_true = c == _T_LOWER maybe_false = c == _F_LOWER c, self = yield trans = ctx.immediate_transition(self) keyword_trans = None match_index = 0 while True: def check_keyword(name, keyword_sequence, ion_type, value, match_transition=lambda: None): maybe_keyword = True transition = None if match_index < len(keyword_sequence): maybe_keyword = c == keyword_sequence[match_index] else: transition = match_transition() if transition is not None: pass elif _ends_value(c): if is_field_name: _illegal_character(c, ctx, '%s keyword as field name not allowed.' % (name,)) transition = ctx.event_transition(IonEvent, IonEventType.SCALAR, ion_type, value) elif c == _COLON: message = '' if is_field_name: message = '%s keyword as field name not allowed.' % (name,) _illegal_character(c, ctx, message) elif in_sexp and c in _OPERATORS: transition = ctx.event_transition(IonEvent, IonEventType.SCALAR, ion_type, value) else: maybe_keyword = False return maybe_keyword, transition if maybe_null: def check_null_dot(): transition = None found = c == _DOT if found: if is_field_name: _illegal_character(c, ctx, "Illegal character in field name.") transition = ctx.immediate_transition(_typed_null_handler(c, ctx)) return transition maybe_null, keyword_trans = check_keyword('null', _NULL_SUFFIX.sequence, IonType.NULL, None, check_null_dot) if maybe_nan: maybe_nan, keyword_trans = check_keyword('nan', _NAN_SUFFIX, IonType.FLOAT, _NAN) elif maybe_true: maybe_true, keyword_trans = check_keyword('true', _TRUE_SUFFIX, IonType.BOOL, True) elif maybe_false: maybe_false, keyword_trans = check_keyword('false', _FALSE_SUFFIX, IonType.BOOL, False) if maybe_null or maybe_nan or maybe_true or maybe_false: if keyword_trans is not None: trans = keyword_trans else: val.append(c) match_index += 1 else: if c in _SYMBOL_TOKEN_TERMINATORS: # This might be an annotation or a field name ctx.set_pending_symbol(val) trans = ctx.immediate_transition(ctx.whence) elif _ends_value(c) or (in_sexp and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol()) else: trans = ctx.immediate_transition(_unquoted_symbol_handler(c, ctx, is_field_name=is_field_name)) c, _ = yield trans
python
def _symbol_or_keyword_handler(c, ctx, is_field_name=False): in_sexp = ctx.container.ion_type is IonType.SEXP if c not in _IDENTIFIER_STARTS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) _illegal_character(c, ctx) assert not ctx.value ctx.set_unicode().set_ion_type(IonType.SYMBOL) val = ctx.value val.append(c) maybe_null = c == _N_LOWER maybe_nan = maybe_null maybe_true = c == _T_LOWER maybe_false = c == _F_LOWER c, self = yield trans = ctx.immediate_transition(self) keyword_trans = None match_index = 0 while True: def check_keyword(name, keyword_sequence, ion_type, value, match_transition=lambda: None): maybe_keyword = True transition = None if match_index < len(keyword_sequence): maybe_keyword = c == keyword_sequence[match_index] else: transition = match_transition() if transition is not None: pass elif _ends_value(c): if is_field_name: _illegal_character(c, ctx, '%s keyword as field name not allowed.' % (name,)) transition = ctx.event_transition(IonEvent, IonEventType.SCALAR, ion_type, value) elif c == _COLON: message = '' if is_field_name: message = '%s keyword as field name not allowed.' % (name,) _illegal_character(c, ctx, message) elif in_sexp and c in _OPERATORS: transition = ctx.event_transition(IonEvent, IonEventType.SCALAR, ion_type, value) else: maybe_keyword = False return maybe_keyword, transition if maybe_null: def check_null_dot(): transition = None found = c == _DOT if found: if is_field_name: _illegal_character(c, ctx, "Illegal character in field name.") transition = ctx.immediate_transition(_typed_null_handler(c, ctx)) return transition maybe_null, keyword_trans = check_keyword('null', _NULL_SUFFIX.sequence, IonType.NULL, None, check_null_dot) if maybe_nan: maybe_nan, keyword_trans = check_keyword('nan', _NAN_SUFFIX, IonType.FLOAT, _NAN) elif maybe_true: maybe_true, keyword_trans = check_keyword('true', _TRUE_SUFFIX, IonType.BOOL, True) elif maybe_false: maybe_false, keyword_trans = check_keyword('false', _FALSE_SUFFIX, IonType.BOOL, False) if maybe_null or maybe_nan or maybe_true or maybe_false: if keyword_trans is not None: trans = keyword_trans else: val.append(c) match_index += 1 else: if c in _SYMBOL_TOKEN_TERMINATORS: ctx.set_pending_symbol(val) trans = ctx.immediate_transition(ctx.whence) elif _ends_value(c) or (in_sexp and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol()) else: trans = ctx.immediate_transition(_unquoted_symbol_handler(c, ctx, is_field_name=is_field_name)) c, _ = yield trans
[ "def", "_symbol_or_keyword_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "in_sexp", "=", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "if", "c", "not", "in", "_IDENTIFIER_STARTS", ":", "if", "in_sexp", "and", "c", "in", "_OPERATORS", ":", "c_next", ",", "_", "=", "yield", "ctx", ".", "queue", ".", "unread", "(", "c_next", ")", "yield", "ctx", ".", "immediate_transition", "(", "_operator_symbol_handler", "(", "c", ",", "ctx", ")", ")", "_illegal_character", "(", "c", ",", "ctx", ")", "assert", "not", "ctx", ".", "value", "ctx", ".", "set_unicode", "(", ")", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "maybe_null", "=", "c", "==", "_N_LOWER", "maybe_nan", "=", "maybe_null", "maybe_true", "=", "c", "==", "_T_LOWER", "maybe_false", "=", "c", "==", "_F_LOWER", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "keyword_trans", "=", "None", "match_index", "=", "0", "while", "True", ":", "def", "check_keyword", "(", "name", ",", "keyword_sequence", ",", "ion_type", ",", "value", ",", "match_transition", "=", "lambda", ":", "None", ")", ":", "maybe_keyword", "=", "True", "transition", "=", "None", "if", "match_index", "<", "len", "(", "keyword_sequence", ")", ":", "maybe_keyword", "=", "c", "==", "keyword_sequence", "[", "match_index", "]", "else", ":", "transition", "=", "match_transition", "(", ")", "if", "transition", "is", "not", "None", ":", "pass", "elif", "_ends_value", "(", "c", ")", ":", "if", "is_field_name", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'%s keyword as field name not allowed.'", "%", "(", "name", ",", ")", ")", "transition", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "value", ")", "elif", "c", "==", "_COLON", ":", "message", "=", "''", "if", "is_field_name", ":", "message", "=", "'%s keyword as field name not allowed.'", "%", "(", "name", ",", ")", "_illegal_character", "(", "c", ",", "ctx", ",", "message", ")", "elif", "in_sexp", "and", "c", "in", "_OPERATORS", ":", "transition", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "value", ")", "else", ":", "maybe_keyword", "=", "False", "return", "maybe_keyword", ",", "transition", "if", "maybe_null", ":", "def", "check_null_dot", "(", ")", ":", "transition", "=", "None", "found", "=", "c", "==", "_DOT", "if", "found", ":", "if", "is_field_name", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "\"Illegal character in field name.\"", ")", "transition", "=", "ctx", ".", "immediate_transition", "(", "_typed_null_handler", "(", "c", ",", "ctx", ")", ")", "return", "transition", "maybe_null", ",", "keyword_trans", "=", "check_keyword", "(", "'null'", ",", "_NULL_SUFFIX", ".", "sequence", ",", "IonType", ".", "NULL", ",", "None", ",", "check_null_dot", ")", "if", "maybe_nan", ":", "maybe_nan", ",", "keyword_trans", "=", "check_keyword", "(", "'nan'", ",", "_NAN_SUFFIX", ",", "IonType", ".", "FLOAT", ",", "_NAN", ")", "elif", "maybe_true", ":", "maybe_true", ",", "keyword_trans", "=", "check_keyword", "(", "'true'", ",", "_TRUE_SUFFIX", ",", "IonType", ".", "BOOL", ",", "True", ")", "elif", "maybe_false", ":", "maybe_false", ",", "keyword_trans", "=", "check_keyword", "(", "'false'", ",", "_FALSE_SUFFIX", ",", "IonType", ".", "BOOL", ",", "False", ")", "if", "maybe_null", "or", "maybe_nan", "or", "maybe_true", "or", "maybe_false", ":", "if", "keyword_trans", "is", "not", "None", ":", "trans", "=", "keyword_trans", "else", ":", "val", ".", "append", "(", "c", ")", "match_index", "+=", "1", "else", ":", "if", "c", "in", "_SYMBOL_TOKEN_TERMINATORS", ":", "# This might be an annotation or a field name", "ctx", ".", "set_pending_symbol", "(", "val", ")", "trans", "=", "ctx", ".", "immediate_transition", "(", "ctx", ".", "whence", ")", "elif", "_ends_value", "(", "c", ")", "or", "(", "in_sexp", "and", "c", "in", "_OPERATORS", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "val", ".", "as_symbol", "(", ")", ")", "else", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_unquoted_symbol_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "is_field_name", ")", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles the start of an unquoted text token. This may be an operator (if in an s-expression), an identifier symbol, or a keyword.
[ "Handles", "the", "start", "of", "an", "unquoted", "text", "token", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1222-L1302
amzn/ion-python
amazon/ion/reader_text.py
_inf_or_operator_handler_factory
def _inf_or_operator_handler_factory(c_start, is_delegate=True): """Generates handler co-routines for values that may be `+inf` or `-inf`. Args: c_start (int): The ordinal of the character that starts this token (either `+` or `-`). is_delegate (bool): True if a different handler began processing this token; otherwise, False. This will only be true for `-inf`, because it is not the only value that can start with `-`; `+inf` is the only value (outside of a s-expression) that can start with `+`. """ @coroutine def inf_or_operator_handler(c, ctx): next_ctx = None if not is_delegate: ctx.value.append(c_start) c, self = yield else: assert ctx.value[0] == c_start assert c not in _DIGITS ctx.queue.unread(c) next_ctx = ctx _, self = yield assert c == _ maybe_inf = True ctx.set_ion_type(IonType.FLOAT) match_index = 0 trans = ctx.immediate_transition(self) while True: if maybe_inf: if match_index < len(_INF_SUFFIX): maybe_inf = c == _INF_SUFFIX[match_index] else: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): yield ctx.event_transition( IonEvent, IonEventType.SCALAR, IonType.FLOAT, c_start == _MINUS and _NEG_INF or _POS_INF ) else: maybe_inf = False if maybe_inf: match_index += 1 else: ctx.set_unicode() if match_index > 0: next_ctx = ctx.derive_child_context(ctx.whence) for ch in _INF_SUFFIX[0:match_index]: next_ctx.value.append(ch) break c, self = yield trans if ctx.container is not _C_SEXP: _illegal_character(c, next_ctx is None and ctx or next_ctx, 'Illegal character following %s.' % (_chr(c_start),)) if match_index == 0: if c in _OPERATORS: yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()) yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_unquoted_symbol_handler, c), next_ctx ) return inf_or_operator_handler
python
def _inf_or_operator_handler_factory(c_start, is_delegate=True): @coroutine def inf_or_operator_handler(c, ctx): next_ctx = None if not is_delegate: ctx.value.append(c_start) c, self = yield else: assert ctx.value[0] == c_start assert c not in _DIGITS ctx.queue.unread(c) next_ctx = ctx _, self = yield assert c == _ maybe_inf = True ctx.set_ion_type(IonType.FLOAT) match_index = 0 trans = ctx.immediate_transition(self) while True: if maybe_inf: if match_index < len(_INF_SUFFIX): maybe_inf = c == _INF_SUFFIX[match_index] else: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): yield ctx.event_transition( IonEvent, IonEventType.SCALAR, IonType.FLOAT, c_start == _MINUS and _NEG_INF or _POS_INF ) else: maybe_inf = False if maybe_inf: match_index += 1 else: ctx.set_unicode() if match_index > 0: next_ctx = ctx.derive_child_context(ctx.whence) for ch in _INF_SUFFIX[0:match_index]: next_ctx.value.append(ch) break c, self = yield trans if ctx.container is not _C_SEXP: _illegal_character(c, next_ctx is None and ctx or next_ctx, 'Illegal character following %s.' % (_chr(c_start),)) if match_index == 0: if c in _OPERATORS: yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()) yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_unquoted_symbol_handler, c), next_ctx ) return inf_or_operator_handler
[ "def", "_inf_or_operator_handler_factory", "(", "c_start", ",", "is_delegate", "=", "True", ")", ":", "@", "coroutine", "def", "inf_or_operator_handler", "(", "c", ",", "ctx", ")", ":", "next_ctx", "=", "None", "if", "not", "is_delegate", ":", "ctx", ".", "value", ".", "append", "(", "c_start", ")", "c", ",", "self", "=", "yield", "else", ":", "assert", "ctx", ".", "value", "[", "0", "]", "==", "c_start", "assert", "c", "not", "in", "_DIGITS", "ctx", ".", "queue", ".", "unread", "(", "c", ")", "next_ctx", "=", "ctx", "_", ",", "self", "=", "yield", "assert", "c", "==", "_", "maybe_inf", "=", "True", "ctx", ".", "set_ion_type", "(", "IonType", ".", "FLOAT", ")", "match_index", "=", "0", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "maybe_inf", ":", "if", "match_index", "<", "len", "(", "_INF_SUFFIX", ")", ":", "maybe_inf", "=", "c", "==", "_INF_SUFFIX", "[", "match_index", "]", "else", ":", "if", "_ends_value", "(", "c", ")", "or", "(", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "and", "c", "in", "_OPERATORS", ")", ":", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "FLOAT", ",", "c_start", "==", "_MINUS", "and", "_NEG_INF", "or", "_POS_INF", ")", "else", ":", "maybe_inf", "=", "False", "if", "maybe_inf", ":", "match_index", "+=", "1", "else", ":", "ctx", ".", "set_unicode", "(", ")", "if", "match_index", ">", "0", ":", "next_ctx", "=", "ctx", ".", "derive_child_context", "(", "ctx", ".", "whence", ")", "for", "ch", "in", "_INF_SUFFIX", "[", "0", ":", "match_index", "]", ":", "next_ctx", ".", "value", ".", "append", "(", "ch", ")", "break", "c", ",", "self", "=", "yield", "trans", "if", "ctx", ".", "container", "is", "not", "_C_SEXP", ":", "_illegal_character", "(", "c", ",", "next_ctx", "is", "None", "and", "ctx", "or", "next_ctx", ",", "'Illegal character following %s.'", "%", "(", "_chr", "(", "c_start", ")", ",", ")", ")", "if", "match_index", "==", "0", ":", "if", "c", "in", "_OPERATORS", ":", "yield", "ctx", ".", "immediate_transition", "(", "_operator_symbol_handler", "(", "c", ",", "ctx", ")", ")", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", "yield", "_CompositeTransition", "(", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", ",", "ctx", ",", "partial", "(", "_unquoted_symbol_handler", ",", "c", ")", ",", "next_ctx", ")", "return", "inf_or_operator_handler" ]
Generates handler co-routines for values that may be `+inf` or `-inf`. Args: c_start (int): The ordinal of the character that starts this token (either `+` or `-`). is_delegate (bool): True if a different handler began processing this token; otherwise, False. This will only be true for `-inf`, because it is not the only value that can start with `-`; `+inf` is the only value (outside of a s-expression) that can start with `+`.
[ "Generates", "handler", "co", "-", "routines", "for", "values", "that", "may", "be", "+", "inf", "or", "-", "inf", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1305-L1365
amzn/ion-python
amazon/ion/reader_text.py
_operator_symbol_handler
def _operator_symbol_handler(c, ctx): """Handles operator symbol values within s-expressions.""" assert c in _OPERATORS ctx.set_unicode() val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while c in _OPERATORS: val.append(c) c, _ = yield trans yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol())
python
def _operator_symbol_handler(c, ctx): assert c in _OPERATORS ctx.set_unicode() val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while c in _OPERATORS: val.append(c) c, _ = yield trans yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol())
[ "def", "_operator_symbol_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "in", "_OPERATORS", "ctx", ".", "set_unicode", "(", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "c", "in", "_OPERATORS", ":", "val", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "trans", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "val", ".", "as_symbol", "(", ")", ")" ]
Handles operator symbol values within s-expressions.
[ "Handles", "operator", "symbol", "values", "within", "s", "-", "expressions", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1373-L1384
amzn/ion-python
amazon/ion/reader_text.py
_symbol_token_end
def _symbol_token_end(c, ctx, is_field_name, value=None): """Returns a transition which ends the current symbol token.""" if value is None: value = ctx.value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx.quoted_text: # This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination # character has been found. ctx.set_self_delimiting(ctx.quoted_text).set_pending_symbol(value).set_quoted_text(False) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(value)) return trans
python
def _symbol_token_end(c, ctx, is_field_name, value=None): if value is None: value = ctx.value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx.quoted_text: ctx.set_self_delimiting(ctx.quoted_text).set_pending_symbol(value).set_quoted_text(False) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(value)) return trans
[ "def", "_symbol_token_end", "(", "c", ",", "ctx", ",", "is_field_name", ",", "value", "=", "None", ")", ":", "if", "value", "is", "None", ":", "value", "=", "ctx", ".", "value", "if", "is_field_name", "or", "c", "in", "_SYMBOL_TOKEN_TERMINATORS", "or", "ctx", ".", "quoted_text", ":", "# This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination", "# character has been found.", "ctx", ".", "set_self_delimiting", "(", "ctx", ".", "quoted_text", ")", ".", "set_pending_symbol", "(", "value", ")", ".", "set_quoted_text", "(", "False", ")", "trans", "=", "ctx", ".", "immediate_transition", "(", "ctx", ".", "whence", ")", "else", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "_as_symbol", "(", "value", ")", ")", "return", "trans" ]
Returns a transition which ends the current symbol token.
[ "Returns", "a", "transition", "which", "ends", "the", "current", "symbol", "token", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1387-L1398
amzn/ion-python
amazon/ion/reader_text.py
_unquoted_symbol_handler
def _unquoted_symbol_handler(c, ctx, is_field_name=False): """Handles identifier symbol tokens. If in an s-expression, these may be followed without whitespace by operators. """ in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode() if c not in _IDENTIFIER_CHARACTERS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) assert ctx.value yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_operator_symbol_handler, c) ) _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break if c not in _IDENTIFIER_CHARACTERS: _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val.append(c) prev = c c, _ = yield trans yield _symbol_token_end(c, ctx, is_field_name)
python
def _unquoted_symbol_handler(c, ctx, is_field_name=False): in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode() if c not in _IDENTIFIER_CHARACTERS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) assert ctx.value yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_operator_symbol_handler, c) ) _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break if c not in _IDENTIFIER_CHARACTERS: _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val.append(c) prev = c c, _ = yield trans yield _symbol_token_end(c, ctx, is_field_name)
[ "def", "_unquoted_symbol_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "in_sexp", "=", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "ctx", ".", "set_unicode", "(", ")", "if", "c", "not", "in", "_IDENTIFIER_CHARACTERS", ":", "if", "in_sexp", "and", "c", "in", "_OPERATORS", ":", "c_next", ",", "_", "=", "yield", "ctx", ".", "queue", ".", "unread", "(", "c_next", ")", "assert", "ctx", ".", "value", "yield", "_CompositeTransition", "(", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", ",", "ctx", ",", "partial", "(", "_operator_symbol_handler", ",", "c", ")", ")", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "not", "in", "_WHITESPACE", ":", "if", "prev", "in", "_WHITESPACE", "or", "_ends_value", "(", "c", ")", "or", "c", "==", "_COLON", "or", "(", "in_sexp", "and", "c", "in", "_OPERATORS", ")", ":", "break", "if", "c", "not", "in", "_IDENTIFIER_CHARACTERS", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", ")", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "yield", "_symbol_token_end", "(", "c", ",", "ctx", ",", "is_field_name", ")" ]
Handles identifier symbol tokens. If in an s-expression, these may be followed without whitespace by operators.
[ "Handles", "identifier", "symbol", "tokens", ".", "If", "in", "an", "s", "-", "expression", "these", "may", "be", "followed", "without", "whitespace", "by", "operators", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1402-L1433
amzn/ion-python
amazon/ion/reader_text.py
_symbol_identifier_or_unquoted_symbol_handler
def _symbol_identifier_or_unquoted_symbol_handler(c, ctx, is_field_name=False): """Handles symbol tokens that begin with a dollar sign. These may end up being system symbols ($ion_*), symbol identifiers ('$' DIGITS+), or regular unquoted symbols. """ assert c == _DOLLAR_SIGN in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode().set_ion_type(IonType.SYMBOL) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) maybe_ivm = ctx.depth == 0 and not is_field_name and not ctx.annotations complete_ivm = False maybe_symbol_identifier = True match_index = 1 ivm_post_underscore = False while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break maybe_symbol_identifier = maybe_symbol_identifier and c in _DIGITS if maybe_ivm: if match_index == len(_IVM_PREFIX): if c in _DIGITS: if ivm_post_underscore: complete_ivm = True elif c == _UNDERSCORE and not ivm_post_underscore: ivm_post_underscore = True else: maybe_ivm = False complete_ivm = False else: maybe_ivm = c == _IVM_PREFIX[match_index] if maybe_ivm: if match_index < len(_IVM_PREFIX): match_index += 1 elif not maybe_symbol_identifier: yield ctx.immediate_transition(_unquoted_symbol_handler(c, ctx, is_field_name)) val.append(c) elif match_index < len(_IVM_PREFIX): maybe_ivm = False prev = c c, _ = yield trans if len(val) == 1: assert val[0] == _chr(_DOLLAR_SIGN) elif maybe_symbol_identifier: assert not maybe_ivm sid = int(val[1:]) val = SymbolToken(None, sid) elif complete_ivm: val = _IVMToken(*val.as_symbol()) yield _symbol_token_end(c, ctx, is_field_name, value=val)
python
def _symbol_identifier_or_unquoted_symbol_handler(c, ctx, is_field_name=False): assert c == _DOLLAR_SIGN in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode().set_ion_type(IonType.SYMBOL) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) maybe_ivm = ctx.depth == 0 and not is_field_name and not ctx.annotations complete_ivm = False maybe_symbol_identifier = True match_index = 1 ivm_post_underscore = False while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break maybe_symbol_identifier = maybe_symbol_identifier and c in _DIGITS if maybe_ivm: if match_index == len(_IVM_PREFIX): if c in _DIGITS: if ivm_post_underscore: complete_ivm = True elif c == _UNDERSCORE and not ivm_post_underscore: ivm_post_underscore = True else: maybe_ivm = False complete_ivm = False else: maybe_ivm = c == _IVM_PREFIX[match_index] if maybe_ivm: if match_index < len(_IVM_PREFIX): match_index += 1 elif not maybe_symbol_identifier: yield ctx.immediate_transition(_unquoted_symbol_handler(c, ctx, is_field_name)) val.append(c) elif match_index < len(_IVM_PREFIX): maybe_ivm = False prev = c c, _ = yield trans if len(val) == 1: assert val[0] == _chr(_DOLLAR_SIGN) elif maybe_symbol_identifier: assert not maybe_ivm sid = int(val[1:]) val = SymbolToken(None, sid) elif complete_ivm: val = _IVMToken(*val.as_symbol()) yield _symbol_token_end(c, ctx, is_field_name, value=val)
[ "def", "_symbol_identifier_or_unquoted_symbol_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "assert", "c", "==", "_DOLLAR_SIGN", "in_sexp", "=", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "ctx", ".", "set_unicode", "(", ")", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "maybe_ivm", "=", "ctx", ".", "depth", "==", "0", "and", "not", "is_field_name", "and", "not", "ctx", ".", "annotations", "complete_ivm", "=", "False", "maybe_symbol_identifier", "=", "True", "match_index", "=", "1", "ivm_post_underscore", "=", "False", "while", "True", ":", "if", "c", "not", "in", "_WHITESPACE", ":", "if", "prev", "in", "_WHITESPACE", "or", "_ends_value", "(", "c", ")", "or", "c", "==", "_COLON", "or", "(", "in_sexp", "and", "c", "in", "_OPERATORS", ")", ":", "break", "maybe_symbol_identifier", "=", "maybe_symbol_identifier", "and", "c", "in", "_DIGITS", "if", "maybe_ivm", ":", "if", "match_index", "==", "len", "(", "_IVM_PREFIX", ")", ":", "if", "c", "in", "_DIGITS", ":", "if", "ivm_post_underscore", ":", "complete_ivm", "=", "True", "elif", "c", "==", "_UNDERSCORE", "and", "not", "ivm_post_underscore", ":", "ivm_post_underscore", "=", "True", "else", ":", "maybe_ivm", "=", "False", "complete_ivm", "=", "False", "else", ":", "maybe_ivm", "=", "c", "==", "_IVM_PREFIX", "[", "match_index", "]", "if", "maybe_ivm", ":", "if", "match_index", "<", "len", "(", "_IVM_PREFIX", ")", ":", "match_index", "+=", "1", "elif", "not", "maybe_symbol_identifier", ":", "yield", "ctx", ".", "immediate_transition", "(", "_unquoted_symbol_handler", "(", "c", ",", "ctx", ",", "is_field_name", ")", ")", "val", ".", "append", "(", "c", ")", "elif", "match_index", "<", "len", "(", "_IVM_PREFIX", ")", ":", "maybe_ivm", "=", "False", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "if", "len", "(", "val", ")", "==", "1", ":", "assert", "val", "[", "0", "]", "==", "_chr", "(", "_DOLLAR_SIGN", ")", "elif", "maybe_symbol_identifier", ":", "assert", "not", "maybe_ivm", "sid", "=", "int", "(", "val", "[", "1", ":", "]", ")", "val", "=", "SymbolToken", "(", "None", ",", "sid", ")", "elif", "complete_ivm", ":", "val", "=", "_IVMToken", "(", "*", "val", ".", "as_symbol", "(", ")", ")", "yield", "_symbol_token_end", "(", "c", ",", "ctx", ",", "is_field_name", ",", "value", "=", "val", ")" ]
Handles symbol tokens that begin with a dollar sign. These may end up being system symbols ($ion_*), symbol identifiers ('$' DIGITS+), or regular unquoted symbols.
[ "Handles", "symbol", "tokens", "that", "begin", "with", "a", "dollar", "sign", ".", "These", "may", "end", "up", "being", "system", "symbols", "(", "$ion_", "*", ")", "symbol", "identifiers", "(", "$", "DIGITS", "+", ")", "or", "regular", "unquoted", "symbols", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1457-L1509
amzn/ion-python
amazon/ion/reader_text.py
_quoted_text_handler_factory
def _quoted_text_handler_factory(delimiter, assertion, before, after, append_first=True, on_close=lambda ctx: None): """Generates handlers for quoted text tokens (either short strings or quoted symbols). Args: delimiter (int): Ordinal of the quoted text's delimiter. assertion (callable): Accepts the first character's ordinal, returning True if that character is a legal beginning to the token. before (callable): Called upon initialization. Accepts the first character's ordinal, the current context, True if the token is a field name, and True if the token is a clob; returns the token's current value and True if ``on_close`` should be called upon termination of the token. after (callable): Called after termination of the token. Accepts the final character's ordinal, the current context, and True if the token is a field name; returns a Transition. append_first (Optional[bool]): True if the first character the coroutine receives is part of the text data, and should therefore be appended to the value; otherwise, False (in which case, the first character must be the delimiter). on_close (Optional[callable]): Called upon termination of the token (before ``after``), if ``before`` indicated that ``on_close`` should be called. Accepts the current context and returns a Transition. This is useful for yielding a different kind of Transition based on initialization parameters given to ``before`` (e.g. string vs. clob). """ @coroutine def quoted_text_handler(c, ctx, is_field_name=False): assert assertion(c) def append(): if not _is_escaped_newline(c): val.append(c) is_clob = ctx.ion_type is IonType.CLOB max_char = _MAX_CLOB_CHAR if is_clob else _MAX_TEXT_CHAR ctx.set_unicode(quoted_text=True) val, event_on_close = before(c, ctx, is_field_name, is_clob) if append_first: append() c, self = yield trans = ctx.immediate_transition(self) done = False while not done: if c == delimiter and not _is_escaped(c): done = True if event_on_close: trans = on_close(ctx) else: break else: _validate_short_quoted_text(c, ctx, max_char) append() c, _ = yield trans yield after(c, ctx, is_field_name) return quoted_text_handler
python
def _quoted_text_handler_factory(delimiter, assertion, before, after, append_first=True, on_close=lambda ctx: None): @coroutine def quoted_text_handler(c, ctx, is_field_name=False): assert assertion(c) def append(): if not _is_escaped_newline(c): val.append(c) is_clob = ctx.ion_type is IonType.CLOB max_char = _MAX_CLOB_CHAR if is_clob else _MAX_TEXT_CHAR ctx.set_unicode(quoted_text=True) val, event_on_close = before(c, ctx, is_field_name, is_clob) if append_first: append() c, self = yield trans = ctx.immediate_transition(self) done = False while not done: if c == delimiter and not _is_escaped(c): done = True if event_on_close: trans = on_close(ctx) else: break else: _validate_short_quoted_text(c, ctx, max_char) append() c, _ = yield trans yield after(c, ctx, is_field_name) return quoted_text_handler
[ "def", "_quoted_text_handler_factory", "(", "delimiter", ",", "assertion", ",", "before", ",", "after", ",", "append_first", "=", "True", ",", "on_close", "=", "lambda", "ctx", ":", "None", ")", ":", "@", "coroutine", "def", "quoted_text_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "assert", "assertion", "(", "c", ")", "def", "append", "(", ")", ":", "if", "not", "_is_escaped_newline", "(", "c", ")", ":", "val", ".", "append", "(", "c", ")", "is_clob", "=", "ctx", ".", "ion_type", "is", "IonType", ".", "CLOB", "max_char", "=", "_MAX_CLOB_CHAR", "if", "is_clob", "else", "_MAX_TEXT_CHAR", "ctx", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "val", ",", "event_on_close", "=", "before", "(", "c", ",", "ctx", ",", "is_field_name", ",", "is_clob", ")", "if", "append_first", ":", "append", "(", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "done", "=", "False", "while", "not", "done", ":", "if", "c", "==", "delimiter", "and", "not", "_is_escaped", "(", "c", ")", ":", "done", "=", "True", "if", "event_on_close", ":", "trans", "=", "on_close", "(", "ctx", ")", "else", ":", "break", "else", ":", "_validate_short_quoted_text", "(", "c", ",", "ctx", ",", "max_char", ")", "append", "(", ")", "c", ",", "_", "=", "yield", "trans", "yield", "after", "(", "c", ",", "ctx", ",", "is_field_name", ")", "return", "quoted_text_handler" ]
Generates handlers for quoted text tokens (either short strings or quoted symbols). Args: delimiter (int): Ordinal of the quoted text's delimiter. assertion (callable): Accepts the first character's ordinal, returning True if that character is a legal beginning to the token. before (callable): Called upon initialization. Accepts the first character's ordinal, the current context, True if the token is a field name, and True if the token is a clob; returns the token's current value and True if ``on_close`` should be called upon termination of the token. after (callable): Called after termination of the token. Accepts the final character's ordinal, the current context, and True if the token is a field name; returns a Transition. append_first (Optional[bool]): True if the first character the coroutine receives is part of the text data, and should therefore be appended to the value; otherwise, False (in which case, the first character must be the delimiter). on_close (Optional[callable]): Called upon termination of the token (before ``after``), if ``before`` indicated that ``on_close`` should be called. Accepts the current context and returns a Transition. This is useful for yielding a different kind of Transition based on initialization parameters given to ``before`` (e.g. string vs. clob).
[ "Generates", "handlers", "for", "quoted", "text", "tokens", "(", "either", "short", "strings", "or", "quoted", "symbols", ")", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1515-L1564
amzn/ion-python
amazon/ion/reader_text.py
_short_string_handler_factory
def _short_string_handler_factory(): """Generates the short string (double quoted) handler.""" def before(c, ctx, is_field_name, is_clob): assert not (is_clob and is_field_name) is_string = not is_clob and not is_field_name if is_string: ctx.set_ion_type(IonType.STRING) val = ctx.value if is_field_name: assert not val ctx.set_pending_symbol() val = ctx.pending_symbol return val, is_string def on_close(ctx): ctx.set_self_delimiting(True) return ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) def after(c, ctx, is_field_name): ctx.set_quoted_text(False).set_self_delimiting(True) return ctx.immediate_transition( ctx.whence if is_field_name else _clob_end_handler(c, ctx), ) return _quoted_text_handler_factory(_DOUBLE_QUOTE, lambda c: c == _DOUBLE_QUOTE, before, after, append_first=False, on_close=on_close)
python
def _short_string_handler_factory(): def before(c, ctx, is_field_name, is_clob): assert not (is_clob and is_field_name) is_string = not is_clob and not is_field_name if is_string: ctx.set_ion_type(IonType.STRING) val = ctx.value if is_field_name: assert not val ctx.set_pending_symbol() val = ctx.pending_symbol return val, is_string def on_close(ctx): ctx.set_self_delimiting(True) return ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) def after(c, ctx, is_field_name): ctx.set_quoted_text(False).set_self_delimiting(True) return ctx.immediate_transition( ctx.whence if is_field_name else _clob_end_handler(c, ctx), ) return _quoted_text_handler_factory(_DOUBLE_QUOTE, lambda c: c == _DOUBLE_QUOTE, before, after, append_first=False, on_close=on_close)
[ "def", "_short_string_handler_factory", "(", ")", ":", "def", "before", "(", "c", ",", "ctx", ",", "is_field_name", ",", "is_clob", ")", ":", "assert", "not", "(", "is_clob", "and", "is_field_name", ")", "is_string", "=", "not", "is_clob", "and", "not", "is_field_name", "if", "is_string", ":", "ctx", ".", "set_ion_type", "(", "IonType", ".", "STRING", ")", "val", "=", "ctx", ".", "value", "if", "is_field_name", ":", "assert", "not", "val", "ctx", ".", "set_pending_symbol", "(", ")", "val", "=", "ctx", ".", "pending_symbol", "return", "val", ",", "is_string", "def", "on_close", "(", "ctx", ")", ":", "ctx", ".", "set_self_delimiting", "(", "True", ")", "return", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "ctx", ".", "value", ".", "as_text", "(", ")", ")", "def", "after", "(", "c", ",", "ctx", ",", "is_field_name", ")", ":", "ctx", ".", "set_quoted_text", "(", "False", ")", ".", "set_self_delimiting", "(", "True", ")", "return", "ctx", ".", "immediate_transition", "(", "ctx", ".", "whence", "if", "is_field_name", "else", "_clob_end_handler", "(", "c", ",", "ctx", ")", ",", ")", "return", "_quoted_text_handler_factory", "(", "_DOUBLE_QUOTE", ",", "lambda", "c", ":", "c", "==", "_DOUBLE_QUOTE", ",", "before", ",", "after", ",", "append_first", "=", "False", ",", "on_close", "=", "on_close", ")" ]
Generates the short string (double quoted) handler.
[ "Generates", "the", "short", "string", "(", "double", "quoted", ")", "handler", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1567-L1592
amzn/ion-python
amazon/ion/reader_text.py
_quoted_symbol_handler_factory
def _quoted_symbol_handler_factory(): """Generates the quoted symbol (single quoted) handler.""" def before(c, ctx, is_field_name, is_clob): assert not is_clob _validate_short_quoted_text(c, ctx, _MAX_TEXT_CHAR) return ctx.value, False return _quoted_text_handler_factory( _SINGLE_QUOTE, lambda c: (c != _SINGLE_QUOTE or _is_escaped(c)), before, _symbol_token_end, )
python
def _quoted_symbol_handler_factory(): def before(c, ctx, is_field_name, is_clob): assert not is_clob _validate_short_quoted_text(c, ctx, _MAX_TEXT_CHAR) return ctx.value, False return _quoted_text_handler_factory( _SINGLE_QUOTE, lambda c: (c != _SINGLE_QUOTE or _is_escaped(c)), before, _symbol_token_end, )
[ "def", "_quoted_symbol_handler_factory", "(", ")", ":", "def", "before", "(", "c", ",", "ctx", ",", "is_field_name", ",", "is_clob", ")", ":", "assert", "not", "is_clob", "_validate_short_quoted_text", "(", "c", ",", "ctx", ",", "_MAX_TEXT_CHAR", ")", "return", "ctx", ".", "value", ",", "False", "return", "_quoted_text_handler_factory", "(", "_SINGLE_QUOTE", ",", "lambda", "c", ":", "(", "c", "!=", "_SINGLE_QUOTE", "or", "_is_escaped", "(", "c", ")", ")", ",", "before", ",", "_symbol_token_end", ",", ")" ]
Generates the quoted symbol (single quoted) handler.
[ "Generates", "the", "quoted", "symbol", "(", "single", "quoted", ")", "handler", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1598-L1610
amzn/ion-python
amazon/ion/reader_text.py
_single_quote_handler_factory
def _single_quote_handler_factory(on_single_quote, on_other): """Generates handlers used for classifying tokens that begin with one or more single quotes. Args: on_single_quote (callable): Called when another single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. on_other (callable): Called when any character other than a single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. """ @coroutine def single_quote_handler(c, ctx, is_field_name=False): assert c == _SINGLE_QUOTE c, self = yield if c == _SINGLE_QUOTE and not _is_escaped(c): yield on_single_quote(c, ctx, is_field_name) else: ctx.set_unicode(quoted_text=True) yield on_other(c, ctx, is_field_name) return single_quote_handler
python
def _single_quote_handler_factory(on_single_quote, on_other): @coroutine def single_quote_handler(c, ctx, is_field_name=False): assert c == _SINGLE_QUOTE c, self = yield if c == _SINGLE_QUOTE and not _is_escaped(c): yield on_single_quote(c, ctx, is_field_name) else: ctx.set_unicode(quoted_text=True) yield on_other(c, ctx, is_field_name) return single_quote_handler
[ "def", "_single_quote_handler_factory", "(", "on_single_quote", ",", "on_other", ")", ":", "@", "coroutine", "def", "single_quote_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "assert", "c", "==", "_SINGLE_QUOTE", "c", ",", "self", "=", "yield", "if", "c", "==", "_SINGLE_QUOTE", "and", "not", "_is_escaped", "(", "c", ")", ":", "yield", "on_single_quote", "(", "c", ",", "ctx", ",", "is_field_name", ")", "else", ":", "ctx", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "yield", "on_other", "(", "c", ",", "ctx", ",", "is_field_name", ")", "return", "single_quote_handler" ]
Generates handlers used for classifying tokens that begin with one or more single quotes. Args: on_single_quote (callable): Called when another single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. on_other (callable): Called when any character other than a single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition.
[ "Generates", "handlers", "used", "for", "classifying", "tokens", "that", "begin", "with", "one", "or", "more", "single", "quotes", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1615-L1633
amzn/ion-python
amazon/ion/reader_text.py
_struct_or_lob_handler
def _struct_or_lob_handler(c, ctx): """Handles tokens that begin with an open brace.""" assert c == _OPEN_BRACE c, self = yield yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx))
python
def _struct_or_lob_handler(c, ctx): assert c == _OPEN_BRACE c, self = yield yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx))
[ "def", "_struct_or_lob_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_OPEN_BRACE", "c", ",", "self", "=", "yield", "yield", "ctx", ".", "immediate_transition", "(", "_STRUCT_OR_LOB_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles tokens that begin with an open brace.
[ "Handles", "tokens", "that", "begin", "with", "an", "open", "brace", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1651-L1655
amzn/ion-python
amazon/ion/reader_text.py
_lob_start_handler
def _lob_start_handler(c, ctx): """Handles tokens that begin with two open braces.""" assert c == _OPEN_BRACE c, self = yield trans = ctx.immediate_transition(self) quotes = 0 while True: if c in _WHITESPACE: if quotes > 0: _illegal_character(c, ctx) elif c == _DOUBLE_QUOTE: if quotes > 0: _illegal_character(c, ctx) ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) yield ctx.immediate_transition(_short_string_handler(c, ctx)) elif c == _SINGLE_QUOTE: if not quotes: ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) quotes += 1 if quotes == 3: yield ctx.immediate_transition(_long_string_handler(c, ctx)) else: yield ctx.immediate_transition(_blob_end_handler(c, ctx)) c, _ = yield trans
python
def _lob_start_handler(c, ctx): assert c == _OPEN_BRACE c, self = yield trans = ctx.immediate_transition(self) quotes = 0 while True: if c in _WHITESPACE: if quotes > 0: _illegal_character(c, ctx) elif c == _DOUBLE_QUOTE: if quotes > 0: _illegal_character(c, ctx) ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) yield ctx.immediate_transition(_short_string_handler(c, ctx)) elif c == _SINGLE_QUOTE: if not quotes: ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) quotes += 1 if quotes == 3: yield ctx.immediate_transition(_long_string_handler(c, ctx)) else: yield ctx.immediate_transition(_blob_end_handler(c, ctx)) c, _ = yield trans
[ "def", "_lob_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_OPEN_BRACE", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "quotes", "=", "0", "while", "True", ":", "if", "c", "in", "_WHITESPACE", ":", "if", "quotes", ">", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "elif", "c", "==", "_DOUBLE_QUOTE", ":", "if", "quotes", ">", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "ctx", ".", "set_ion_type", "(", "IonType", ".", "CLOB", ")", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "yield", "ctx", ".", "immediate_transition", "(", "_short_string_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "==", "_SINGLE_QUOTE", ":", "if", "not", "quotes", ":", "ctx", ".", "set_ion_type", "(", "IonType", ".", "CLOB", ")", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "quotes", "+=", "1", "if", "quotes", "==", "3", ":", "yield", "ctx", ".", "immediate_transition", "(", "_long_string_handler", "(", "c", ",", "ctx", ")", ")", "else", ":", "yield", "ctx", ".", "immediate_transition", "(", "_blob_end_handler", "(", "c", ",", "ctx", ")", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles tokens that begin with two open braces.
[ "Handles", "tokens", "that", "begin", "with", "two", "open", "braces", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1677-L1700
amzn/ion-python
amazon/ion/reader_text.py
_lob_end_handler_factory
def _lob_end_handler_factory(ion_type, action, validate=lambda c, ctx, action_res: None): """Generates handlers for the end of blob or clob values. Args: ion_type (IonType): The type of this lob (either blob or clob). action (callable): Called for each non-whitespace, non-closing brace character encountered before the end of the lob. Accepts the current character's ordinal, the current context, the previous character's ordinal, the result of the previous call to ``action`` (if any), and True if this is the first call to ``action``. Returns any state that will be needed by subsequent calls to ``action``. For blobs, this should validate the character is valid base64; for clobs, this should ensure there are no illegal characters (e.g. comments) between the end of the data and the end of the clob. validate (Optional[callable]): Called once the second closing brace has been found. Accepts the current character's ordinal, the current context, and the result of the last call to ``action``; raises an error if this is not a valid lob value. """ assert ion_type is IonType.BLOB or ion_type is IonType.CLOB @coroutine def lob_end_handler(c, ctx): val = ctx.value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE: action_res = action(c, ctx, prev, action_res, True) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _WHITESPACE: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(ion_type), 'Expected }.') elif c == _CLOSE_BRACE: if prev == _CLOSE_BRACE: validate(c, ctx, action_res) break else: action_res = action(c, ctx, prev, action_res, False) prev = c c, _ = yield trans ctx.set_self_delimiting(True) # Lob values are self-delimiting (they are terminated by '}}'). yield ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ion_type, _parse_lob(ion_type, val)) return lob_end_handler
python
def _lob_end_handler_factory(ion_type, action, validate=lambda c, ctx, action_res: None): assert ion_type is IonType.BLOB or ion_type is IonType.CLOB @coroutine def lob_end_handler(c, ctx): val = ctx.value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE: action_res = action(c, ctx, prev, action_res, True) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _WHITESPACE: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(ion_type), 'Expected }.') elif c == _CLOSE_BRACE: if prev == _CLOSE_BRACE: validate(c, ctx, action_res) break else: action_res = action(c, ctx, prev, action_res, False) prev = c c, _ = yield trans ctx.set_self_delimiting(True) yield ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ion_type, _parse_lob(ion_type, val)) return lob_end_handler
[ "def", "_lob_end_handler_factory", "(", "ion_type", ",", "action", ",", "validate", "=", "lambda", "c", ",", "ctx", ",", "action_res", ":", "None", ")", ":", "assert", "ion_type", "is", "IonType", ".", "BLOB", "or", "ion_type", "is", "IonType", ".", "CLOB", "@", "coroutine", "def", "lob_end_handler", "(", "c", ",", "ctx", ")", ":", "val", "=", "ctx", ".", "value", "prev", "=", "c", "action_res", "=", "None", "if", "c", "!=", "_CLOSE_BRACE", "and", "c", "not", "in", "_WHITESPACE", ":", "action_res", "=", "action", "(", "c", ",", "ctx", ",", "prev", ",", "action_res", ",", "True", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "in", "_WHITESPACE", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "ion_type", ")", ",", "'Expected }.'", ")", "elif", "c", "==", "_CLOSE_BRACE", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "validate", "(", "c", ",", "ctx", ",", "action_res", ")", "break", "else", ":", "action_res", "=", "action", "(", "c", ",", "ctx", ",", "prev", ",", "action_res", ",", "False", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "ctx", ".", "set_self_delimiting", "(", "True", ")", "# Lob values are self-delimiting (they are terminated by '}}').", "yield", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "_parse_lob", "(", "ion_type", ",", "val", ")", ")", "return", "lob_end_handler" ]
Generates handlers for the end of blob or clob values. Args: ion_type (IonType): The type of this lob (either blob or clob). action (callable): Called for each non-whitespace, non-closing brace character encountered before the end of the lob. Accepts the current character's ordinal, the current context, the previous character's ordinal, the result of the previous call to ``action`` (if any), and True if this is the first call to ``action``. Returns any state that will be needed by subsequent calls to ``action``. For blobs, this should validate the character is valid base64; for clobs, this should ensure there are no illegal characters (e.g. comments) between the end of the data and the end of the clob. validate (Optional[callable]): Called once the second closing brace has been found. Accepts the current character's ordinal, the current context, and the result of the last call to ``action``; raises an error if this is not a valid lob value.
[ "Generates", "handlers", "for", "the", "end", "of", "blob", "or", "clob", "values", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1703-L1743
amzn/ion-python
amazon/ion/reader_text.py
_blob_end_handler_factory
def _blob_end_handler_factory(): """Generates the handler for the end of a blob value. This includes the base-64 data and the two closing braces.""" def expand_res(res): if res is None: return 0, 0 return res def action(c, ctx, prev, res, is_first): num_digits, num_pads = expand_res(res) if c in _BASE64_DIGITS: if prev == _CLOSE_BRACE or prev == _BASE64_PAD: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_digits += 1 elif c == _BASE64_PAD: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_pads += 1 else: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) ctx.value.append(c) return num_digits, num_pads def validate(c, ctx, res): num_digits, num_pads = expand_res(res) if num_pads > 3 or (num_digits + num_pads) % 4 != 0: _illegal_character(c, ctx, 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % (num_pads, num_digits)) return _lob_end_handler_factory(IonType.BLOB, action, validate)
python
def _blob_end_handler_factory(): def expand_res(res): if res is None: return 0, 0 return res def action(c, ctx, prev, res, is_first): num_digits, num_pads = expand_res(res) if c in _BASE64_DIGITS: if prev == _CLOSE_BRACE or prev == _BASE64_PAD: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_digits += 1 elif c == _BASE64_PAD: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_pads += 1 else: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) ctx.value.append(c) return num_digits, num_pads def validate(c, ctx, res): num_digits, num_pads = expand_res(res) if num_pads > 3 or (num_digits + num_pads) % 4 != 0: _illegal_character(c, ctx, 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % (num_pads, num_digits)) return _lob_end_handler_factory(IonType.BLOB, action, validate)
[ "def", "_blob_end_handler_factory", "(", ")", ":", "def", "expand_res", "(", "res", ")", ":", "if", "res", "is", "None", ":", "return", "0", ",", "0", "return", "res", "def", "action", "(", "c", ",", "ctx", ",", "prev", ",", "res", ",", "is_first", ")", ":", "num_digits", ",", "num_pads", "=", "expand_res", "(", "res", ")", "if", "c", "in", "_BASE64_DIGITS", ":", "if", "prev", "==", "_CLOSE_BRACE", "or", "prev", "==", "_BASE64_PAD", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "num_digits", "+=", "1", "elif", "c", "==", "_BASE64_PAD", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "num_pads", "+=", "1", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "return", "num_digits", ",", "num_pads", "def", "validate", "(", "c", ",", "ctx", ",", "res", ")", ":", "num_digits", ",", "num_pads", "=", "expand_res", "(", "res", ")", "if", "num_pads", ">", "3", "or", "(", "num_digits", "+", "num_pads", ")", "%", "4", "!=", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.'", "%", "(", "num_pads", ",", "num_digits", ")", ")", "return", "_lob_end_handler_factory", "(", "IonType", ".", "BLOB", ",", "action", ",", "validate", ")" ]
Generates the handler for the end of a blob value. This includes the base-64 data and the two closing braces.
[ "Generates", "the", "handler", "for", "the", "end", "of", "a", "blob", "value", ".", "This", "includes", "the", "base", "-", "64", "data", "and", "the", "two", "closing", "braces", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1746-L1774
amzn/ion-python
amazon/ion/reader_text.py
_clob_end_handler_factory
def _clob_end_handler_factory(): """Generates the handler for the end of a clob value. This includes anything from the data's closing quote through the second closing brace. """ def action(c, ctx, prev, res, is_first): if is_first and ctx.is_self_delimiting and c == _DOUBLE_QUOTE: assert c is prev return res _illegal_character(c, ctx) return _lob_end_handler_factory(IonType.CLOB, action)
python
def _clob_end_handler_factory(): def action(c, ctx, prev, res, is_first): if is_first and ctx.is_self_delimiting and c == _DOUBLE_QUOTE: assert c is prev return res _illegal_character(c, ctx) return _lob_end_handler_factory(IonType.CLOB, action)
[ "def", "_clob_end_handler_factory", "(", ")", ":", "def", "action", "(", "c", ",", "ctx", ",", "prev", ",", "res", ",", "is_first", ")", ":", "if", "is_first", "and", "ctx", ".", "is_self_delimiting", "and", "c", "==", "_DOUBLE_QUOTE", ":", "assert", "c", "is", "prev", "return", "res", "_illegal_character", "(", "c", ",", "ctx", ")", "return", "_lob_end_handler_factory", "(", "IonType", ".", "CLOB", ",", "action", ")" ]
Generates the handler for the end of a clob value. This includes anything from the data's closing quote through the second closing brace.
[ "Generates", "the", "handler", "for", "the", "end", "of", "a", "clob", "value", ".", "This", "includes", "anything", "from", "the", "data", "s", "closing", "quote", "through", "the", "second", "closing", "brace", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1779-L1789
amzn/ion-python
amazon/ion/reader_text.py
_container_start_handler_factory
def _container_start_handler_factory(ion_type, before_yield=lambda c, ctx: None): """Generates handlers for tokens that begin with container start characters. Args: ion_type (IonType): The type of this container. before_yield (Optional[callable]): Called at initialization. Accepts the first character's ordinal and the current context; performs any necessary initialization actions. """ assert ion_type.is_container @coroutine def container_start_handler(c, ctx): before_yield(c, ctx) yield yield ctx.event_transition(IonEvent, IonEventType.CONTAINER_START, ion_type, value=None) return container_start_handler
python
def _container_start_handler_factory(ion_type, before_yield=lambda c, ctx: None): assert ion_type.is_container @coroutine def container_start_handler(c, ctx): before_yield(c, ctx) yield yield ctx.event_transition(IonEvent, IonEventType.CONTAINER_START, ion_type, value=None) return container_start_handler
[ "def", "_container_start_handler_factory", "(", "ion_type", ",", "before_yield", "=", "lambda", "c", ",", "ctx", ":", "None", ")", ":", "assert", "ion_type", ".", "is_container", "@", "coroutine", "def", "container_start_handler", "(", "c", ",", "ctx", ")", ":", "before_yield", "(", "c", ",", "ctx", ")", "yield", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "CONTAINER_START", ",", "ion_type", ",", "value", "=", "None", ")", "return", "container_start_handler" ]
Generates handlers for tokens that begin with container start characters. Args: ion_type (IonType): The type of this container. before_yield (Optional[callable]): Called at initialization. Accepts the first character's ordinal and the current context; performs any necessary initialization actions.
[ "Generates", "handlers", "for", "tokens", "that", "begin", "with", "container", "start", "characters", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1801-L1816
amzn/ion-python
amazon/ion/reader_text.py
_read_data_handler
def _read_data_handler(whence, ctx, complete, can_flush): """Creates a co-routine for retrieving data up to a requested size. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. ctx (_HandlerContext): The context for the read. complete (True|False): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (True|False): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request. """ trans = None queue = ctx.queue while True: data_event, self = (yield trans) if data_event is not None: if data_event.data is not None: data = data_event.data data_len = len(data) if data_len > 0: queue.extend(data) yield Transition(None, whence) elif data_event.type is ReadEventType.NEXT: queue.mark_eof() if not can_flush: _illegal_character(queue.read_byte(), ctx, "Unexpected EOF.") yield Transition(None, whence) trans = Transition(complete and ION_STREAM_END_EVENT or ION_STREAM_INCOMPLETE_EVENT, self)
python
def _read_data_handler(whence, ctx, complete, can_flush): trans = None queue = ctx.queue while True: data_event, self = (yield trans) if data_event is not None: if data_event.data is not None: data = data_event.data data_len = len(data) if data_len > 0: queue.extend(data) yield Transition(None, whence) elif data_event.type is ReadEventType.NEXT: queue.mark_eof() if not can_flush: _illegal_character(queue.read_byte(), ctx, "Unexpected EOF.") yield Transition(None, whence) trans = Transition(complete and ION_STREAM_END_EVENT or ION_STREAM_INCOMPLETE_EVENT, self)
[ "def", "_read_data_handler", "(", "whence", ",", "ctx", ",", "complete", ",", "can_flush", ")", ":", "trans", "=", "None", "queue", "=", "ctx", ".", "queue", "while", "True", ":", "data_event", ",", "self", "=", "(", "yield", "trans", ")", "if", "data_event", "is", "not", "None", ":", "if", "data_event", ".", "data", "is", "not", "None", ":", "data", "=", "data_event", ".", "data", "data_len", "=", "len", "(", "data", ")", "if", "data_len", ">", "0", ":", "queue", ".", "extend", "(", "data", ")", "yield", "Transition", "(", "None", ",", "whence", ")", "elif", "data_event", ".", "type", "is", "ReadEventType", ".", "NEXT", ":", "queue", ".", "mark_eof", "(", ")", "if", "not", "can_flush", ":", "_illegal_character", "(", "queue", ".", "read_byte", "(", ")", ",", "ctx", ",", "\"Unexpected EOF.\"", ")", "yield", "Transition", "(", "None", ",", "whence", ")", "trans", "=", "Transition", "(", "complete", "and", "ION_STREAM_END_EVENT", "or", "ION_STREAM_INCOMPLETE_EVENT", ",", "self", ")" ]
Creates a co-routine for retrieving data up to a requested size. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. ctx (_HandlerContext): The context for the read. complete (True|False): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (True|False): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request.
[ "Creates", "a", "co", "-", "routine", "for", "retrieving", "data", "up", "to", "a", "requested", "size", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1826-L1854
amzn/ion-python
amazon/ion/reader_text.py
_container_handler
def _container_handler(c, ctx): """Coroutine for container values. Delegates to other coroutines to tokenize all child values.""" _, self = (yield None) queue = ctx.queue child_context = None is_field_name = ctx.ion_type is IonType.STRUCT delimiter_required = False complete = ctx.depth == 0 can_flush = False def has_pending_symbol(): return child_context and child_context.pending_symbol is not None def symbol_value_event(): return child_context.event_transition( IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(child_context.pending_symbol)) def pending_symbol_value(): if has_pending_symbol(): assert not child_context.value if ctx.ion_type is IonType.STRUCT and child_context.field_name is None: _illegal_character(c, ctx, 'Encountered STRUCT value %s without field name.' % (child_context.pending_symbol,)) return symbol_value_event() return None def is_value_decorated(): return child_context is not None and (child_context.annotations or child_context.field_name is not None) def _can_flush(): return child_context is not None and \ child_context.depth == 0 and \ ( ( child_context.ion_type is not None and ( child_context.ion_type.is_numeric or (child_context.ion_type.is_text and not ctx.quoted_text and not is_field_name) ) ) or ( child_context.line_comment and not is_value_decorated() ) ) while True: # Loop over all values in this container. if c in ctx.container.end or c in ctx.container.delimiter or BufferQueue.is_eof(c): symbol_event = pending_symbol_value() if symbol_event is not None: yield symbol_event child_context = None delimiter_required = ctx.container.is_delimited if c in ctx.container.end: if not delimiter_required and is_value_decorated(): _illegal_character(c, child_context, 'Dangling field name (%s) and/or annotation(s) (%r) at end of container.' % (child_context.field_name, child_context.annotations)) # Yield the close event and go to enclosing container. This coroutine instance will never resume. yield Transition( IonEvent(IonEventType.CONTAINER_END, ctx.ion_type, depth=ctx.depth-1), ctx.whence ) raise ValueError('Resumed a finished container handler.') elif c in ctx.container.delimiter: if not delimiter_required: _illegal_character(c, ctx.derive_child_context(None), 'Encountered delimiter %s without preceding value.' % (_chr(ctx.container.delimiter[0]),)) is_field_name = ctx.ion_type is IonType.STRUCT delimiter_required = False c = None else: assert BufferQueue.is_eof(c) assert len(queue) == 0 yield ctx.read_data_event(self, complete=True) c = None if c is not None and c not in _WHITESPACE: can_flush = False if c == _SLASH: if child_context is None: # This is the start of a new child value (or, if this is a comment, a new value will start after the # comment ends). child_context = ctx.derive_child_context(self) if ctx.ion_type is IonType.SEXP: handler = _sexp_slash_handler(c, child_context, pending_event=pending_symbol_value()) else: handler = _comment_handler(c, child_context, self) elif delimiter_required: # This is not the delimiter, or whitespace, or the start of a comment. Throw. _illegal_character(c, ctx.derive_child_context(None), 'Delimiter %s not found after value.' % (_chr(ctx.container.delimiter[0]),)) elif has_pending_symbol(): # A character besides whitespace, comments, and delimiters has been found, and there is a pending # symbol. That pending symbol is either an annotation, a field name, or a symbol value. if c == _COLON: if is_field_name: is_field_name = False child_context.set_field_name() c = None else: assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self) c = queue.read_byte() if c == _COLON: child_context.set_annotation() c = None # forces another character to be read safely else: # Colon that doesn't indicate a field name or annotation. _illegal_character(c, child_context) else: if is_field_name: _illegal_character(c, child_context, 'Illegal character after field name %s.' % child_context.pending_symbol) # It's a symbol value delimited by something other than a comma (i.e. whitespace or comment) yield symbol_value_event() child_context = None delimiter_required = ctx.container.is_delimited continue else: if not is_value_decorated(): # This is the start of a new child value. child_context = ctx.derive_child_context(self) if is_field_name: handler = _FIELD_NAME_START_TABLE[c](c, child_context) else: handler = _VALUE_START_TABLE[c](c, child_context) # Initialize the new handler can_flush = _IMMEDIATE_FLUSH_TABLE[c] container_start = c == _OPEN_BRACKET or \ c == _OPEN_PAREN # _OPEN_BRACE might start a lob; that is handled elsewhere. quoted_start = c == _DOUBLE_QUOTE or c == _SINGLE_QUOTE while True: # Loop over all characters in the current token. A token is either a non-symbol value or a pending # symbol, which may end up being a field name, annotation, or symbol value. if container_start: c = None container_start = False else: if child_context.quoted_text or quoted_start: quoted_start = False yield child_context.next_code_point(self) c = child_context.code_point else: if len(queue) == 0: yield ctx.read_data_event(self, can_flush=can_flush) c = queue.read_byte() trans = handler.send((c, handler)) if trans.event is not None: is_self_delimiting = False if child_context.is_composite: # This is a composite transition, i.e. it is an event transition followed by an immediate # transition to the handler coroutine for the next token. next_transition = trans.next_transition child_context = trans.next_context assert next_transition is None or next_transition.event is None else: next_transition = None is_self_delimiting = child_context.is_self_delimiting child_context = None # This child value is finished. c is now the first character in the next value or sequence. # Hence, a new character should not be read; it should be provided to the handler for the next # child context. yield trans event_ion_type = trans.event.ion_type # None in the case of IVM event. is_container = event_ion_type is not None and event_ion_type.is_container and \ trans.event.event_type is not IonEventType.SCALAR if is_container: assert next_transition is None yield Transition( None, _container_handler(c, ctx.derive_container_context(trans.event.ion_type, self)) ) complete = ctx.depth == 0 can_flush = False if is_container or is_self_delimiting: # The end of the value has been reached, and c needs to be updated assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte() delimiter_required = ctx.container.is_delimited if next_transition is None: break else: trans = next_transition elif self is trans.delegate: child_context.set_ion_type(None) # The next token will determine the type. complete = False can_flush = _can_flush() if is_field_name: assert not can_flush if c == _COLON or not child_context.is_self_delimiting: break elif has_pending_symbol(): can_flush = ctx.depth == 0 if not child_context.is_self_delimiting or child_context.line_comment: break elif child_context.is_self_delimiting: # This is the end of a comment. If this is at the top level and is un-annotated, # it may end the stream. complete = ctx.depth == 0 and not is_value_decorated() # This happens at the end of a comment within this container, or when a symbol token has been # found. In both cases, an event should not be emitted. Read the next character and continue. if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte() break # This is an immediate transition to a handler (may be the same one) for the current token. can_flush = _can_flush() handler = trans.delegate else: assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte()
python
def _container_handler(c, ctx): _, self = (yield None) queue = ctx.queue child_context = None is_field_name = ctx.ion_type is IonType.STRUCT delimiter_required = False complete = ctx.depth == 0 can_flush = False def has_pending_symbol(): return child_context and child_context.pending_symbol is not None def symbol_value_event(): return child_context.event_transition( IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(child_context.pending_symbol)) def pending_symbol_value(): if has_pending_symbol(): assert not child_context.value if ctx.ion_type is IonType.STRUCT and child_context.field_name is None: _illegal_character(c, ctx, 'Encountered STRUCT value %s without field name.' % (child_context.pending_symbol,)) return symbol_value_event() return None def is_value_decorated(): return child_context is not None and (child_context.annotations or child_context.field_name is not None) def _can_flush(): return child_context is not None and \ child_context.depth == 0 and \ ( ( child_context.ion_type is not None and ( child_context.ion_type.is_numeric or (child_context.ion_type.is_text and not ctx.quoted_text and not is_field_name) ) ) or ( child_context.line_comment and not is_value_decorated() ) ) while True: if c in ctx.container.end or c in ctx.container.delimiter or BufferQueue.is_eof(c): symbol_event = pending_symbol_value() if symbol_event is not None: yield symbol_event child_context = None delimiter_required = ctx.container.is_delimited if c in ctx.container.end: if not delimiter_required and is_value_decorated(): _illegal_character(c, child_context, 'Dangling field name (%s) and/or annotation(s) (%r) at end of container.' % (child_context.field_name, child_context.annotations)) yield Transition( IonEvent(IonEventType.CONTAINER_END, ctx.ion_type, depth=ctx.depth-1), ctx.whence ) raise ValueError('Resumed a finished container handler.') elif c in ctx.container.delimiter: if not delimiter_required: _illegal_character(c, ctx.derive_child_context(None), 'Encountered delimiter %s without preceding value.' % (_chr(ctx.container.delimiter[0]),)) is_field_name = ctx.ion_type is IonType.STRUCT delimiter_required = False c = None else: assert BufferQueue.is_eof(c) assert len(queue) == 0 yield ctx.read_data_event(self, complete=True) c = None if c is not None and c not in _WHITESPACE: can_flush = False if c == _SLASH: if child_context is None: child_context = ctx.derive_child_context(self) if ctx.ion_type is IonType.SEXP: handler = _sexp_slash_handler(c, child_context, pending_event=pending_symbol_value()) else: handler = _comment_handler(c, child_context, self) elif delimiter_required: _illegal_character(c, ctx.derive_child_context(None), 'Delimiter %s not found after value.' % (_chr(ctx.container.delimiter[0]),)) elif has_pending_symbol(): if c == _COLON: if is_field_name: is_field_name = False child_context.set_field_name() c = None else: assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self) c = queue.read_byte() if c == _COLON: child_context.set_annotation() c = None else: _illegal_character(c, child_context) else: if is_field_name: _illegal_character(c, child_context, 'Illegal character after field name %s.' % child_context.pending_symbol) yield symbol_value_event() child_context = None delimiter_required = ctx.container.is_delimited continue else: if not is_value_decorated(): child_context = ctx.derive_child_context(self) if is_field_name: handler = _FIELD_NAME_START_TABLE[c](c, child_context) else: handler = _VALUE_START_TABLE[c](c, child_context) can_flush = _IMMEDIATE_FLUSH_TABLE[c] container_start = c == _OPEN_BRACKET or \ c == _OPEN_PAREN quoted_start = c == _DOUBLE_QUOTE or c == _SINGLE_QUOTE while True: if container_start: c = None container_start = False else: if child_context.quoted_text or quoted_start: quoted_start = False yield child_context.next_code_point(self) c = child_context.code_point else: if len(queue) == 0: yield ctx.read_data_event(self, can_flush=can_flush) c = queue.read_byte() trans = handler.send((c, handler)) if trans.event is not None: is_self_delimiting = False if child_context.is_composite: next_transition = trans.next_transition child_context = trans.next_context assert next_transition is None or next_transition.event is None else: next_transition = None is_self_delimiting = child_context.is_self_delimiting child_context = None yield trans event_ion_type = trans.event.ion_type is_container = event_ion_type is not None and event_ion_type.is_container and \ trans.event.event_type is not IonEventType.SCALAR if is_container: assert next_transition is None yield Transition( None, _container_handler(c, ctx.derive_container_context(trans.event.ion_type, self)) ) complete = ctx.depth == 0 can_flush = False if is_container or is_self_delimiting: assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte() delimiter_required = ctx.container.is_delimited if next_transition is None: break else: trans = next_transition elif self is trans.delegate: child_context.set_ion_type(None) complete = False can_flush = _can_flush() if is_field_name: assert not can_flush if c == _COLON or not child_context.is_self_delimiting: break elif has_pending_symbol(): can_flush = ctx.depth == 0 if not child_context.is_self_delimiting or child_context.line_comment: break elif child_context.is_self_delimiting: complete = ctx.depth == 0 and not is_value_decorated() if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte() break can_flush = _can_flush() handler = trans.delegate else: assert not ctx.quoted_text if len(queue) == 0: yield ctx.read_data_event(self, complete, can_flush) c = queue.read_byte()
[ "def", "_container_handler", "(", "c", ",", "ctx", ")", ":", "_", ",", "self", "=", "(", "yield", "None", ")", "queue", "=", "ctx", ".", "queue", "child_context", "=", "None", "is_field_name", "=", "ctx", ".", "ion_type", "is", "IonType", ".", "STRUCT", "delimiter_required", "=", "False", "complete", "=", "ctx", ".", "depth", "==", "0", "can_flush", "=", "False", "def", "has_pending_symbol", "(", ")", ":", "return", "child_context", "and", "child_context", ".", "pending_symbol", "is", "not", "None", "def", "symbol_value_event", "(", ")", ":", "return", "child_context", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "_as_symbol", "(", "child_context", ".", "pending_symbol", ")", ")", "def", "pending_symbol_value", "(", ")", ":", "if", "has_pending_symbol", "(", ")", ":", "assert", "not", "child_context", ".", "value", "if", "ctx", ".", "ion_type", "is", "IonType", ".", "STRUCT", "and", "child_context", ".", "field_name", "is", "None", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Encountered STRUCT value %s without field name.'", "%", "(", "child_context", ".", "pending_symbol", ",", ")", ")", "return", "symbol_value_event", "(", ")", "return", "None", "def", "is_value_decorated", "(", ")", ":", "return", "child_context", "is", "not", "None", "and", "(", "child_context", ".", "annotations", "or", "child_context", ".", "field_name", "is", "not", "None", ")", "def", "_can_flush", "(", ")", ":", "return", "child_context", "is", "not", "None", "and", "child_context", ".", "depth", "==", "0", "and", "(", "(", "child_context", ".", "ion_type", "is", "not", "None", "and", "(", "child_context", ".", "ion_type", ".", "is_numeric", "or", "(", "child_context", ".", "ion_type", ".", "is_text", "and", "not", "ctx", ".", "quoted_text", "and", "not", "is_field_name", ")", ")", ")", "or", "(", "child_context", ".", "line_comment", "and", "not", "is_value_decorated", "(", ")", ")", ")", "while", "True", ":", "# Loop over all values in this container.", "if", "c", "in", "ctx", ".", "container", ".", "end", "or", "c", "in", "ctx", ".", "container", ".", "delimiter", "or", "BufferQueue", ".", "is_eof", "(", "c", ")", ":", "symbol_event", "=", "pending_symbol_value", "(", ")", "if", "symbol_event", "is", "not", "None", ":", "yield", "symbol_event", "child_context", "=", "None", "delimiter_required", "=", "ctx", ".", "container", ".", "is_delimited", "if", "c", "in", "ctx", ".", "container", ".", "end", ":", "if", "not", "delimiter_required", "and", "is_value_decorated", "(", ")", ":", "_illegal_character", "(", "c", ",", "child_context", ",", "'Dangling field name (%s) and/or annotation(s) (%r) at end of container.'", "%", "(", "child_context", ".", "field_name", ",", "child_context", ".", "annotations", ")", ")", "# Yield the close event and go to enclosing container. This coroutine instance will never resume.", "yield", "Transition", "(", "IonEvent", "(", "IonEventType", ".", "CONTAINER_END", ",", "ctx", ".", "ion_type", ",", "depth", "=", "ctx", ".", "depth", "-", "1", ")", ",", "ctx", ".", "whence", ")", "raise", "ValueError", "(", "'Resumed a finished container handler.'", ")", "elif", "c", "in", "ctx", ".", "container", ".", "delimiter", ":", "if", "not", "delimiter_required", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "derive_child_context", "(", "None", ")", ",", "'Encountered delimiter %s without preceding value.'", "%", "(", "_chr", "(", "ctx", ".", "container", ".", "delimiter", "[", "0", "]", ")", ",", ")", ")", "is_field_name", "=", "ctx", ".", "ion_type", "is", "IonType", ".", "STRUCT", "delimiter_required", "=", "False", "c", "=", "None", "else", ":", "assert", "BufferQueue", ".", "is_eof", "(", "c", ")", "assert", "len", "(", "queue", ")", "==", "0", "yield", "ctx", ".", "read_data_event", "(", "self", ",", "complete", "=", "True", ")", "c", "=", "None", "if", "c", "is", "not", "None", "and", "c", "not", "in", "_WHITESPACE", ":", "can_flush", "=", "False", "if", "c", "==", "_SLASH", ":", "if", "child_context", "is", "None", ":", "# This is the start of a new child value (or, if this is a comment, a new value will start after the", "# comment ends).", "child_context", "=", "ctx", ".", "derive_child_context", "(", "self", ")", "if", "ctx", ".", "ion_type", "is", "IonType", ".", "SEXP", ":", "handler", "=", "_sexp_slash_handler", "(", "c", ",", "child_context", ",", "pending_event", "=", "pending_symbol_value", "(", ")", ")", "else", ":", "handler", "=", "_comment_handler", "(", "c", ",", "child_context", ",", "self", ")", "elif", "delimiter_required", ":", "# This is not the delimiter, or whitespace, or the start of a comment. Throw.", "_illegal_character", "(", "c", ",", "ctx", ".", "derive_child_context", "(", "None", ")", ",", "'Delimiter %s not found after value.'", "%", "(", "_chr", "(", "ctx", ".", "container", ".", "delimiter", "[", "0", "]", ")", ",", ")", ")", "elif", "has_pending_symbol", "(", ")", ":", "# A character besides whitespace, comments, and delimiters has been found, and there is a pending", "# symbol. That pending symbol is either an annotation, a field name, or a symbol value.", "if", "c", "==", "_COLON", ":", "if", "is_field_name", ":", "is_field_name", "=", "False", "child_context", ".", "set_field_name", "(", ")", "c", "=", "None", "else", ":", "assert", "not", "ctx", ".", "quoted_text", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "c", "=", "queue", ".", "read_byte", "(", ")", "if", "c", "==", "_COLON", ":", "child_context", ".", "set_annotation", "(", ")", "c", "=", "None", "# forces another character to be read safely", "else", ":", "# Colon that doesn't indicate a field name or annotation.", "_illegal_character", "(", "c", ",", "child_context", ")", "else", ":", "if", "is_field_name", ":", "_illegal_character", "(", "c", ",", "child_context", ",", "'Illegal character after field name %s.'", "%", "child_context", ".", "pending_symbol", ")", "# It's a symbol value delimited by something other than a comma (i.e. whitespace or comment)", "yield", "symbol_value_event", "(", ")", "child_context", "=", "None", "delimiter_required", "=", "ctx", ".", "container", ".", "is_delimited", "continue", "else", ":", "if", "not", "is_value_decorated", "(", ")", ":", "# This is the start of a new child value.", "child_context", "=", "ctx", ".", "derive_child_context", "(", "self", ")", "if", "is_field_name", ":", "handler", "=", "_FIELD_NAME_START_TABLE", "[", "c", "]", "(", "c", ",", "child_context", ")", "else", ":", "handler", "=", "_VALUE_START_TABLE", "[", "c", "]", "(", "c", ",", "child_context", ")", "# Initialize the new handler", "can_flush", "=", "_IMMEDIATE_FLUSH_TABLE", "[", "c", "]", "container_start", "=", "c", "==", "_OPEN_BRACKET", "or", "c", "==", "_OPEN_PAREN", "# _OPEN_BRACE might start a lob; that is handled elsewhere.", "quoted_start", "=", "c", "==", "_DOUBLE_QUOTE", "or", "c", "==", "_SINGLE_QUOTE", "while", "True", ":", "# Loop over all characters in the current token. A token is either a non-symbol value or a pending", "# symbol, which may end up being a field name, annotation, or symbol value.", "if", "container_start", ":", "c", "=", "None", "container_start", "=", "False", "else", ":", "if", "child_context", ".", "quoted_text", "or", "quoted_start", ":", "quoted_start", "=", "False", "yield", "child_context", ".", "next_code_point", "(", "self", ")", "c", "=", "child_context", ".", "code_point", "else", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ",", "can_flush", "=", "can_flush", ")", "c", "=", "queue", ".", "read_byte", "(", ")", "trans", "=", "handler", ".", "send", "(", "(", "c", ",", "handler", ")", ")", "if", "trans", ".", "event", "is", "not", "None", ":", "is_self_delimiting", "=", "False", "if", "child_context", ".", "is_composite", ":", "# This is a composite transition, i.e. it is an event transition followed by an immediate", "# transition to the handler coroutine for the next token.", "next_transition", "=", "trans", ".", "next_transition", "child_context", "=", "trans", ".", "next_context", "assert", "next_transition", "is", "None", "or", "next_transition", ".", "event", "is", "None", "else", ":", "next_transition", "=", "None", "is_self_delimiting", "=", "child_context", ".", "is_self_delimiting", "child_context", "=", "None", "# This child value is finished. c is now the first character in the next value or sequence.", "# Hence, a new character should not be read; it should be provided to the handler for the next", "# child context.", "yield", "trans", "event_ion_type", "=", "trans", ".", "event", ".", "ion_type", "# None in the case of IVM event.", "is_container", "=", "event_ion_type", "is", "not", "None", "and", "event_ion_type", ".", "is_container", "and", "trans", ".", "event", ".", "event_type", "is", "not", "IonEventType", ".", "SCALAR", "if", "is_container", ":", "assert", "next_transition", "is", "None", "yield", "Transition", "(", "None", ",", "_container_handler", "(", "c", ",", "ctx", ".", "derive_container_context", "(", "trans", ".", "event", ".", "ion_type", ",", "self", ")", ")", ")", "complete", "=", "ctx", ".", "depth", "==", "0", "can_flush", "=", "False", "if", "is_container", "or", "is_self_delimiting", ":", "# The end of the value has been reached, and c needs to be updated", "assert", "not", "ctx", ".", "quoted_text", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ",", "complete", ",", "can_flush", ")", "c", "=", "queue", ".", "read_byte", "(", ")", "delimiter_required", "=", "ctx", ".", "container", ".", "is_delimited", "if", "next_transition", "is", "None", ":", "break", "else", ":", "trans", "=", "next_transition", "elif", "self", "is", "trans", ".", "delegate", ":", "child_context", ".", "set_ion_type", "(", "None", ")", "# The next token will determine the type.", "complete", "=", "False", "can_flush", "=", "_can_flush", "(", ")", "if", "is_field_name", ":", "assert", "not", "can_flush", "if", "c", "==", "_COLON", "or", "not", "child_context", ".", "is_self_delimiting", ":", "break", "elif", "has_pending_symbol", "(", ")", ":", "can_flush", "=", "ctx", ".", "depth", "==", "0", "if", "not", "child_context", ".", "is_self_delimiting", "or", "child_context", ".", "line_comment", ":", "break", "elif", "child_context", ".", "is_self_delimiting", ":", "# This is the end of a comment. If this is at the top level and is un-annotated,", "# it may end the stream.", "complete", "=", "ctx", ".", "depth", "==", "0", "and", "not", "is_value_decorated", "(", ")", "# This happens at the end of a comment within this container, or when a symbol token has been", "# found. In both cases, an event should not be emitted. Read the next character and continue.", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ",", "complete", ",", "can_flush", ")", "c", "=", "queue", ".", "read_byte", "(", ")", "break", "# This is an immediate transition to a handler (may be the same one) for the current token.", "can_flush", "=", "_can_flush", "(", ")", "handler", "=", "trans", ".", "delegate", "else", ":", "assert", "not", "ctx", ".", "quoted_text", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ",", "complete", ",", "can_flush", ")", "c", "=", "queue", ".", "read_byte", "(", ")" ]
Coroutine for container values. Delegates to other coroutines to tokenize all child values.
[ "Coroutine", "for", "container", "values", ".", "Delegates", "to", "other", "coroutines", "to", "tokenize", "all", "child", "values", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1932-L2148
amzn/ion-python
amazon/ion/reader_text.py
_skip_trampoline
def _skip_trampoline(handler): """Intercepts events from container handlers, emitting them only if they should not be skipped.""" data_event, self = (yield None) delegate = handler event = None depth = 0 while True: def pass_through(): _trans = delegate.send(Transition(data_event, delegate)) return _trans, _trans.delegate, _trans.event if data_event is not None and data_event.type is ReadEventType.SKIP: while True: trans, delegate, event = pass_through() if event is not None: if event.event_type is IonEventType.CONTAINER_END and event.depth <= depth: break if event is None or event.event_type is IonEventType.INCOMPLETE: data_event, _ = yield Transition(event, self) else: trans, delegate, event = pass_through() if event is not None and (event.event_type is IonEventType.CONTAINER_START or event.event_type is IonEventType.CONTAINER_END): depth = event.depth data_event, _ = yield Transition(event, self)
python
def _skip_trampoline(handler): data_event, self = (yield None) delegate = handler event = None depth = 0 while True: def pass_through(): _trans = delegate.send(Transition(data_event, delegate)) return _trans, _trans.delegate, _trans.event if data_event is not None and data_event.type is ReadEventType.SKIP: while True: trans, delegate, event = pass_through() if event is not None: if event.event_type is IonEventType.CONTAINER_END and event.depth <= depth: break if event is None or event.event_type is IonEventType.INCOMPLETE: data_event, _ = yield Transition(event, self) else: trans, delegate, event = pass_through() if event is not None and (event.event_type is IonEventType.CONTAINER_START or event.event_type is IonEventType.CONTAINER_END): depth = event.depth data_event, _ = yield Transition(event, self)
[ "def", "_skip_trampoline", "(", "handler", ")", ":", "data_event", ",", "self", "=", "(", "yield", "None", ")", "delegate", "=", "handler", "event", "=", "None", "depth", "=", "0", "while", "True", ":", "def", "pass_through", "(", ")", ":", "_trans", "=", "delegate", ".", "send", "(", "Transition", "(", "data_event", ",", "delegate", ")", ")", "return", "_trans", ",", "_trans", ".", "delegate", ",", "_trans", ".", "event", "if", "data_event", "is", "not", "None", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "SKIP", ":", "while", "True", ":", "trans", ",", "delegate", ",", "event", "=", "pass_through", "(", ")", "if", "event", "is", "not", "None", ":", "if", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_END", "and", "event", ".", "depth", "<=", "depth", ":", "break", "if", "event", "is", "None", "or", "event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "data_event", ",", "_", "=", "yield", "Transition", "(", "event", ",", "self", ")", "else", ":", "trans", ",", "delegate", ",", "event", "=", "pass_through", "(", ")", "if", "event", "is", "not", "None", "and", "(", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_START", "or", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_END", ")", ":", "depth", "=", "event", ".", "depth", "data_event", ",", "_", "=", "yield", "Transition", "(", "event", ",", "self", ")" ]
Intercepts events from container handlers, emitting them only if they should not be skipped.
[ "Intercepts", "events", "from", "container", "handlers", "emitting", "them", "only", "if", "they", "should", "not", "be", "skipped", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L2152-L2176
amzn/ion-python
amazon/ion/reader_text.py
_next_code_point_handler
def _next_code_point_handler(whence, ctx): """Retrieves the next code point from within a quoted string or symbol.""" data_event, self = yield queue = ctx.queue unicode_escapes_allowed = ctx.ion_type is not IonType.CLOB escaped_newline = False escape_sequence = b'' low_surrogate_required = False while True: if len(queue) == 0: yield ctx.read_data_event(self) queue_iter = iter(queue) code_point_generator = _next_code_point_iter(queue, queue_iter) code_point = next(code_point_generator) if code_point == _BACKSLASH: escape_sequence += six.int2byte(_BACKSLASH) num_digits = None while True: if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if six.indexbytes(escape_sequence, -1) == _BACKSLASH: if code_point == _ord(b'u') and unicode_escapes_allowed: # 4-digit unicode escapes, plus '\u' for each surrogate num_digits = 12 if low_surrogate_required else 6 low_surrogate_required = False elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) elif code_point == _ord(b'x'): num_digits = 4 # 2-digit hex escapes elif code_point == _ord(b'U') and unicode_escapes_allowed: num_digits = 10 # 8-digit unicode escapes elif code_point in _COMMON_ESCAPES: if code_point == _SLASH or code_point == _QUESTION_MARK: escape_sequence = b'' # Drop the \. Python does not recognize these as escapes. escape_sequence += six.int2byte(code_point) break elif code_point in _NEWLINES: escaped_newline = True break else: # This is a backslash followed by an invalid escape character. This is illegal. _illegal_character(code_point, ctx, 'Invalid escape sequence \\%s.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) else: if code_point not in _HEX_DIGITS: _illegal_character(code_point, ctx, 'Non-hex character %s found in unicode escape.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) if len(escape_sequence) == num_digits: break if not escaped_newline: decoded_escape_sequence = escape_sequence.decode('unicode-escape') cp_iter = _next_code_point_iter(decoded_escape_sequence, iter(decoded_escape_sequence), to_int=ord) code_point = next(cp_iter) if code_point is None: # This is a high surrogate. Restart the loop to gather the low surrogate. low_surrogate_required = True continue code_point = CodePoint(code_point) code_point.char = decoded_escape_sequence code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence) elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) if code_point == _CARRIAGE_RETURN: # Normalize all newlines (\r, \n, and \r\n) to \n . if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if code_point != _NEWLINE: queue.unread(code_point) code_point = _NEWLINE while code_point is None: yield ctx.read_data_event(self) code_point = next(code_point_generator) if escaped_newline: code_point = CodePoint(code_point) code_point.char = _ESCAPED_NEWLINE code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence)
python
def _next_code_point_handler(whence, ctx): data_event, self = yield queue = ctx.queue unicode_escapes_allowed = ctx.ion_type is not IonType.CLOB escaped_newline = False escape_sequence = b'' low_surrogate_required = False while True: if len(queue) == 0: yield ctx.read_data_event(self) queue_iter = iter(queue) code_point_generator = _next_code_point_iter(queue, queue_iter) code_point = next(code_point_generator) if code_point == _BACKSLASH: escape_sequence += six.int2byte(_BACKSLASH) num_digits = None while True: if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if six.indexbytes(escape_sequence, -1) == _BACKSLASH: if code_point == _ord(b'u') and unicode_escapes_allowed: num_digits = 12 if low_surrogate_required else 6 low_surrogate_required = False elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) elif code_point == _ord(b'x'): num_digits = 4 elif code_point == _ord(b'U') and unicode_escapes_allowed: num_digits = 10 elif code_point in _COMMON_ESCAPES: if code_point == _SLASH or code_point == _QUESTION_MARK: escape_sequence = b'' escape_sequence += six.int2byte(code_point) break elif code_point in _NEWLINES: escaped_newline = True break else: _illegal_character(code_point, ctx, 'Invalid escape sequence \\%s.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) else: if code_point not in _HEX_DIGITS: _illegal_character(code_point, ctx, 'Non-hex character %s found in unicode escape.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) if len(escape_sequence) == num_digits: break if not escaped_newline: decoded_escape_sequence = escape_sequence.decode('unicode-escape') cp_iter = _next_code_point_iter(decoded_escape_sequence, iter(decoded_escape_sequence), to_int=ord) code_point = next(cp_iter) if code_point is None: low_surrogate_required = True continue code_point = CodePoint(code_point) code_point.char = decoded_escape_sequence code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence) elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) if code_point == _CARRIAGE_RETURN: if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if code_point != _NEWLINE: queue.unread(code_point) code_point = _NEWLINE while code_point is None: yield ctx.read_data_event(self) code_point = next(code_point_generator) if escaped_newline: code_point = CodePoint(code_point) code_point.char = _ESCAPED_NEWLINE code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence)
[ "def", "_next_code_point_handler", "(", "whence", ",", "ctx", ")", ":", "data_event", ",", "self", "=", "yield", "queue", "=", "ctx", ".", "queue", "unicode_escapes_allowed", "=", "ctx", ".", "ion_type", "is", "not", "IonType", ".", "CLOB", "escaped_newline", "=", "False", "escape_sequence", "=", "b''", "low_surrogate_required", "=", "False", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "queue_iter", "=", "iter", "(", "queue", ")", "code_point_generator", "=", "_next_code_point_iter", "(", "queue", ",", "queue_iter", ")", "code_point", "=", "next", "(", "code_point_generator", ")", "if", "code_point", "==", "_BACKSLASH", ":", "escape_sequence", "+=", "six", ".", "int2byte", "(", "_BACKSLASH", ")", "num_digits", "=", "None", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "queue_iter", ")", "if", "six", ".", "indexbytes", "(", "escape_sequence", ",", "-", "1", ")", "==", "_BACKSLASH", ":", "if", "code_point", "==", "_ord", "(", "b'u'", ")", "and", "unicode_escapes_allowed", ":", "# 4-digit unicode escapes, plus '\\u' for each surrogate", "num_digits", "=", "12", "if", "low_surrogate_required", "else", "6", "low_surrogate_required", "=", "False", "elif", "low_surrogate_required", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Unpaired high surrogate escape sequence %s.'", "%", "(", "escape_sequence", ",", ")", ")", "elif", "code_point", "==", "_ord", "(", "b'x'", ")", ":", "num_digits", "=", "4", "# 2-digit hex escapes", "elif", "code_point", "==", "_ord", "(", "b'U'", ")", "and", "unicode_escapes_allowed", ":", "num_digits", "=", "10", "# 8-digit unicode escapes", "elif", "code_point", "in", "_COMMON_ESCAPES", ":", "if", "code_point", "==", "_SLASH", "or", "code_point", "==", "_QUESTION_MARK", ":", "escape_sequence", "=", "b''", "# Drop the \\. Python does not recognize these as escapes.", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "break", "elif", "code_point", "in", "_NEWLINES", ":", "escaped_newline", "=", "True", "break", "else", ":", "# This is a backslash followed by an invalid escape character. This is illegal.", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Invalid escape sequence \\\\%s.'", "%", "(", "_chr", "(", "code_point", ")", ",", ")", ")", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "else", ":", "if", "code_point", "not", "in", "_HEX_DIGITS", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Non-hex character %s found in unicode escape.'", "%", "(", "_chr", "(", "code_point", ")", ",", ")", ")", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "if", "len", "(", "escape_sequence", ")", "==", "num_digits", ":", "break", "if", "not", "escaped_newline", ":", "decoded_escape_sequence", "=", "escape_sequence", ".", "decode", "(", "'unicode-escape'", ")", "cp_iter", "=", "_next_code_point_iter", "(", "decoded_escape_sequence", ",", "iter", "(", "decoded_escape_sequence", ")", ",", "to_int", "=", "ord", ")", "code_point", "=", "next", "(", "cp_iter", ")", "if", "code_point", "is", "None", ":", "# This is a high surrogate. Restart the loop to gather the low surrogate.", "low_surrogate_required", "=", "True", "continue", "code_point", "=", "CodePoint", "(", "code_point", ")", "code_point", ".", "char", "=", "decoded_escape_sequence", "code_point", ".", "is_escaped", "=", "True", "ctx", ".", "set_code_point", "(", "code_point", ")", "yield", "Transition", "(", "None", ",", "whence", ")", "elif", "low_surrogate_required", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Unpaired high surrogate escape sequence %s.'", "%", "(", "escape_sequence", ",", ")", ")", "if", "code_point", "==", "_CARRIAGE_RETURN", ":", "# Normalize all newlines (\\r, \\n, and \\r\\n) to \\n .", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "queue_iter", ")", "if", "code_point", "!=", "_NEWLINE", ":", "queue", ".", "unread", "(", "code_point", ")", "code_point", "=", "_NEWLINE", "while", "code_point", "is", "None", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "code_point_generator", ")", "if", "escaped_newline", ":", "code_point", "=", "CodePoint", "(", "code_point", ")", "code_point", ".", "char", "=", "_ESCAPED_NEWLINE", "code_point", ".", "is_escaped", "=", "True", "ctx", ".", "set_code_point", "(", "code_point", ")", "yield", "Transition", "(", "None", ",", "whence", ")" ]
Retrieves the next code point from within a quoted string or symbol.
[ "Retrieves", "the", "next", "code", "point", "from", "within", "a", "quoted", "string", "or", "symbol", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L2183-L2266
amzn/ion-python
amazon/ion/reader_text.py
reader
def reader(queue=None, is_unicode=False): """Returns a raw binary reader co-routine. Args: queue (Optional[BufferQueue]): The buffer read data for parsing, if ``None`` a new one will be created. is_unicode (Optional[bool]): True if all input data to this reader will be of unicode text type; False if all input data to this reader will be of binary type. Yields: IonEvent: parse events, will have an event type of ``INCOMPLETE`` if data is needed in the middle of a value or ``STREAM_END`` if there is no data **and** the parser is not in the middle of parsing a value. Receives :class:`DataEvent`, with :class:`ReadEventType` of ``NEXT`` or ``SKIP`` to iterate over values; ``DATA`` or ``NEXT`` if the last event type was ``INCOMPLETE``; or ``DATA`` if the last event type was ``STREAM_END``. When the reader receives ``NEXT`` after yielding ``INCOMPLETE``, this signals to the reader that no further data is coming, and that any pending data should be flushed as either parse events or errors. This is **only** valid at the top-level, and will **only** result in a parse event if the last character encountered... * was a digit or a decimal point in a non-timestamp, non-keyword numeric value; OR * ended a valid partial timestamp; OR * ended a keyword value (special floats, booleans, ``null``, and typed nulls); OR * was part of an unquoted symbol token, or whitespace or the end of a comment following an unquoted symbol token (as long as no colons were encountered after the token); OR * was the closing quote of a quoted symbol token, or whitespace or the end of a comment following a quoted symbol token (as long as no colons were encountered after the token); OR * was the final closing quote of a long string, or whitespace or the end of a comment following a long string. If the reader successfully yields a parse event as a result of this, ``NEXT`` is the only input that may immediately follow. At that point, there are only two possible responses from the reader: * If the last character read was the closing quote of an empty symbol following a long string, the reader will emit a parse event representing a symbol value with empty text. The next reader input/output event pair must be (``NEXT``, ``STREAM_END``). * Otherwise, the reader will emit ``STREAM_END``. After that ``STREAM_END``, the user may later provide ``DATA`` to resume reading. If this occurs, the new data will be interpreted as if it were at the start of the stream (i.e. it can never continue the previous value), except that it occurs within the same symbol table context. This has the following implications (where ``<FLUSH>`` stands for the (``INCOMPLETE``, ``NEXT``) transaction): * If the previously-emitted value was a numeric value (``int``, ``float``, ``decimal``, ``timestamp``), the new data will never extend that value, even if it would be a valid continuation. For example, ``123<FLUSH>456`` will always be emitted as two parse events (ints ``123`` and ``456``), even though it would have been interpreted as ``123456`` without the ``<FLUSH>``. * If the previously-emitted value was a symbol value or long string, the new data will be interpreted as the start of a new value. For example, ``abc<FLUSH>::123`` will be emitted as the symbol value ``'abc'``, followed by an error upon encountering ':' at the start of a value, even though it would have been interpreted as the ``int`` ``123`` annotated with ``'abc'`` without the ``<FLUSH>``. The input ``abc<FLUSH>abc`` will be emitted as the symbol value ``'abc'`` (represented by a :class:`SymbolToken`), followed by another symbol value ``'abc'`` (represented by a ``SymbolToken`` with the same symbol ID), even though it would have been interpreted as ``'abcabc'`` without the ``<FLUSH>``. Similarly, ``'''abc'''<FLUSH>'''def'''`` will the interpreted as two strings (``'abc'`` and ``'def'``), even though it would have been interpreted as ``'abcdef'`` without the ``<FLUSH>``. ``SKIP`` is only allowed within a container. A reader is *in* a container when the ``CONTAINER_START`` event type is encountered and *not in* a container when the ``CONTAINER_END`` event type for that container is encountered. """ if queue is None: queue = BufferQueue(is_unicode) ctx = _HandlerContext( container=_C_TOP_LEVEL, queue=queue, field_name=None, annotations=None, depth=0, whence=None, value=None, ion_type=None, # Top level pending_symbol=None ) return reader_trampoline(_skip_trampoline(_container_handler(None, ctx)), allow_flush=True)
python
def reader(queue=None, is_unicode=False): if queue is None: queue = BufferQueue(is_unicode) ctx = _HandlerContext( container=_C_TOP_LEVEL, queue=queue, field_name=None, annotations=None, depth=0, whence=None, value=None, ion_type=None, pending_symbol=None ) return reader_trampoline(_skip_trampoline(_container_handler(None, ctx)), allow_flush=True)
[ "def", "reader", "(", "queue", "=", "None", ",", "is_unicode", "=", "False", ")", ":", "if", "queue", "is", "None", ":", "queue", "=", "BufferQueue", "(", "is_unicode", ")", "ctx", "=", "_HandlerContext", "(", "container", "=", "_C_TOP_LEVEL", ",", "queue", "=", "queue", ",", "field_name", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "0", ",", "whence", "=", "None", ",", "value", "=", "None", ",", "ion_type", "=", "None", ",", "# Top level", "pending_symbol", "=", "None", ")", "return", "reader_trampoline", "(", "_skip_trampoline", "(", "_container_handler", "(", "None", ",", "ctx", ")", ")", ",", "allow_flush", "=", "True", ")" ]
Returns a raw binary reader co-routine. Args: queue (Optional[BufferQueue]): The buffer read data for parsing, if ``None`` a new one will be created. is_unicode (Optional[bool]): True if all input data to this reader will be of unicode text type; False if all input data to this reader will be of binary type. Yields: IonEvent: parse events, will have an event type of ``INCOMPLETE`` if data is needed in the middle of a value or ``STREAM_END`` if there is no data **and** the parser is not in the middle of parsing a value. Receives :class:`DataEvent`, with :class:`ReadEventType` of ``NEXT`` or ``SKIP`` to iterate over values; ``DATA`` or ``NEXT`` if the last event type was ``INCOMPLETE``; or ``DATA`` if the last event type was ``STREAM_END``. When the reader receives ``NEXT`` after yielding ``INCOMPLETE``, this signals to the reader that no further data is coming, and that any pending data should be flushed as either parse events or errors. This is **only** valid at the top-level, and will **only** result in a parse event if the last character encountered... * was a digit or a decimal point in a non-timestamp, non-keyword numeric value; OR * ended a valid partial timestamp; OR * ended a keyword value (special floats, booleans, ``null``, and typed nulls); OR * was part of an unquoted symbol token, or whitespace or the end of a comment following an unquoted symbol token (as long as no colons were encountered after the token); OR * was the closing quote of a quoted symbol token, or whitespace or the end of a comment following a quoted symbol token (as long as no colons were encountered after the token); OR * was the final closing quote of a long string, or whitespace or the end of a comment following a long string. If the reader successfully yields a parse event as a result of this, ``NEXT`` is the only input that may immediately follow. At that point, there are only two possible responses from the reader: * If the last character read was the closing quote of an empty symbol following a long string, the reader will emit a parse event representing a symbol value with empty text. The next reader input/output event pair must be (``NEXT``, ``STREAM_END``). * Otherwise, the reader will emit ``STREAM_END``. After that ``STREAM_END``, the user may later provide ``DATA`` to resume reading. If this occurs, the new data will be interpreted as if it were at the start of the stream (i.e. it can never continue the previous value), except that it occurs within the same symbol table context. This has the following implications (where ``<FLUSH>`` stands for the (``INCOMPLETE``, ``NEXT``) transaction): * If the previously-emitted value was a numeric value (``int``, ``float``, ``decimal``, ``timestamp``), the new data will never extend that value, even if it would be a valid continuation. For example, ``123<FLUSH>456`` will always be emitted as two parse events (ints ``123`` and ``456``), even though it would have been interpreted as ``123456`` without the ``<FLUSH>``. * If the previously-emitted value was a symbol value or long string, the new data will be interpreted as the start of a new value. For example, ``abc<FLUSH>::123`` will be emitted as the symbol value ``'abc'``, followed by an error upon encountering ':' at the start of a value, even though it would have been interpreted as the ``int`` ``123`` annotated with ``'abc'`` without the ``<FLUSH>``. The input ``abc<FLUSH>abc`` will be emitted as the symbol value ``'abc'`` (represented by a :class:`SymbolToken`), followed by another symbol value ``'abc'`` (represented by a ``SymbolToken`` with the same symbol ID), even though it would have been interpreted as ``'abcabc'`` without the ``<FLUSH>``. Similarly, ``'''abc'''<FLUSH>'''def'''`` will the interpreted as two strings (``'abc'`` and ``'def'``), even though it would have been interpreted as ``'abcdef'`` without the ``<FLUSH>``. ``SKIP`` is only allowed within a container. A reader is *in* a container when the ``CONTAINER_START`` event type is encountered and *not in* a container when the ``CONTAINER_END`` event type for that container is encountered.
[ "Returns", "a", "raw", "binary", "reader", "co", "-", "routine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L2269-L2348
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.event_transition
def event_transition(self, event_cls, event_type, ion_type, value): """Returns an ion event event_transition that yields to another co-routine.""" annotations = self.annotations or () depth = self.depth whence = self.whence if ion_type is IonType.SYMBOL: if not annotations and depth == 0 and isinstance(value, _IVMToken): event = value.ivm_event() if event is None: _illegal_character(None, self, 'Illegal IVM: %s.' % (value.text,)) return Transition(event, whence) assert not isinstance(value, _IVMToken) return Transition( event_cls(event_type, ion_type, value, self.field_name, annotations, depth), whence )
python
def event_transition(self, event_cls, event_type, ion_type, value): annotations = self.annotations or () depth = self.depth whence = self.whence if ion_type is IonType.SYMBOL: if not annotations and depth == 0 and isinstance(value, _IVMToken): event = value.ivm_event() if event is None: _illegal_character(None, self, 'Illegal IVM: %s.' % (value.text,)) return Transition(event, whence) assert not isinstance(value, _IVMToken) return Transition( event_cls(event_type, ion_type, value, self.field_name, annotations, depth), whence )
[ "def", "event_transition", "(", "self", ",", "event_cls", ",", "event_type", ",", "ion_type", ",", "value", ")", ":", "annotations", "=", "self", ".", "annotations", "or", "(", ")", "depth", "=", "self", ".", "depth", "whence", "=", "self", ".", "whence", "if", "ion_type", "is", "IonType", ".", "SYMBOL", ":", "if", "not", "annotations", "and", "depth", "==", "0", "and", "isinstance", "(", "value", ",", "_IVMToken", ")", ":", "event", "=", "value", ".", "ivm_event", "(", ")", "if", "event", "is", "None", ":", "_illegal_character", "(", "None", ",", "self", ",", "'Illegal IVM: %s.'", "%", "(", "value", ".", "text", ",", ")", ")", "return", "Transition", "(", "event", ",", "whence", ")", "assert", "not", "isinstance", "(", "value", ",", "_IVMToken", ")", "return", "Transition", "(", "event_cls", "(", "event_type", ",", "ion_type", ",", "value", ",", "self", ".", "field_name", ",", "annotations", ",", "depth", ")", ",", "whence", ")" ]
Returns an ion event event_transition that yields to another co-routine.
[ "Returns", "an", "ion", "event", "event_transition", "that", "yields", "to", "another", "co", "-", "routine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L346-L363
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.read_data_event
def read_data_event(self, whence, complete=False, can_flush=False): """Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optional[bool]): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request. """ return Transition(None, _read_data_handler(whence, self, complete, can_flush))
python
def read_data_event(self, whence, complete=False, can_flush=False): return Transition(None, _read_data_handler(whence, self, complete, can_flush))
[ "def", "read_data_event", "(", "self", ",", "whence", ",", "complete", "=", "False", ",", "can_flush", "=", "False", ")", ":", "return", "Transition", "(", "None", ",", "_read_data_handler", "(", "whence", ",", "self", ",", "complete", ",", "can_flush", ")", ")" ]
Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optional[bool]): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request.
[ "Creates", "a", "transition", "to", "a", "co", "-", "routine", "for", "retrieving", "data", "as", "bytes", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L369-L379
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_unicode
def set_unicode(self, quoted_text=False): """Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted. """ if isinstance(self.value, CodePointArray): assert self.quoted_text == quoted_text return self self.value = CodePointArray(self.value) self.quoted_text = quoted_text self.line_comment = False return self
python
def set_unicode(self, quoted_text=False): if isinstance(self.value, CodePointArray): assert self.quoted_text == quoted_text return self self.value = CodePointArray(self.value) self.quoted_text = quoted_text self.line_comment = False return self
[ "def", "set_unicode", "(", "self", ",", "quoted_text", "=", "False", ")", ":", "if", "isinstance", "(", "self", ".", "value", ",", "CodePointArray", ")", ":", "assert", "self", ".", "quoted_text", "==", "quoted_text", "return", "self", "self", ".", "value", "=", "CodePointArray", "(", "self", ".", "value", ")", "self", ".", "quoted_text", "=", "quoted_text", "self", ".", "line_comment", "=", "False", "return", "self" ]
Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted.
[ "Converts", "the", "context", "s", "value", "to", "a", "sequence", "of", "unicode", "code", "points", "for", "holding", "text", "tokens", "indicating", "whether", "the", "text", "is", "quoted", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L388-L398
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_quoted_text
def set_quoted_text(self, quoted_text): """Sets the context's ``quoted_text`` flag. Useful when entering and exiting quoted text tokens.""" self.quoted_text = quoted_text self.line_comment = False return self
python
def set_quoted_text(self, quoted_text): self.quoted_text = quoted_text self.line_comment = False return self
[ "def", "set_quoted_text", "(", "self", ",", "quoted_text", ")", ":", "self", ".", "quoted_text", "=", "quoted_text", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets the context's ``quoted_text`` flag. Useful when entering and exiting quoted text tokens.
[ "Sets", "the", "context", "s", "quoted_text", "flag", ".", "Useful", "when", "entering", "and", "exiting", "quoted", "text", "tokens", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L400-L404
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.derive_container_context
def derive_container_context(self, ion_type, whence): """Derives a container context as a child of the current context.""" if ion_type is IonType.STRUCT: container = _C_STRUCT elif ion_type is IonType.LIST: container = _C_LIST elif ion_type is IonType.SEXP: container = _C_SEXP else: raise TypeError('Cannot derive container context for non-container type %s.' % (ion_type.name,)) return _HandlerContext( container=container, queue=self.queue, field_name=self.field_name, annotations=self.annotations, depth=self.depth + 1, whence=whence, value=None, # containers don't have a value ion_type=ion_type, pending_symbol=None )
python
def derive_container_context(self, ion_type, whence): if ion_type is IonType.STRUCT: container = _C_STRUCT elif ion_type is IonType.LIST: container = _C_LIST elif ion_type is IonType.SEXP: container = _C_SEXP else: raise TypeError('Cannot derive container context for non-container type %s.' % (ion_type.name,)) return _HandlerContext( container=container, queue=self.queue, field_name=self.field_name, annotations=self.annotations, depth=self.depth + 1, whence=whence, value=None, ion_type=ion_type, pending_symbol=None )
[ "def", "derive_container_context", "(", "self", ",", "ion_type", ",", "whence", ")", ":", "if", "ion_type", "is", "IonType", ".", "STRUCT", ":", "container", "=", "_C_STRUCT", "elif", "ion_type", "is", "IonType", ".", "LIST", ":", "container", "=", "_C_LIST", "elif", "ion_type", "is", "IonType", ".", "SEXP", ":", "container", "=", "_C_SEXP", "else", ":", "raise", "TypeError", "(", "'Cannot derive container context for non-container type %s.'", "%", "(", "ion_type", ".", "name", ",", ")", ")", "return", "_HandlerContext", "(", "container", "=", "container", ",", "queue", "=", "self", ".", "queue", ",", "field_name", "=", "self", ".", "field_name", ",", "annotations", "=", "self", ".", "annotations", ",", "depth", "=", "self", ".", "depth", "+", "1", ",", "whence", "=", "whence", ",", "value", "=", "None", ",", "# containers don't have a value", "ion_type", "=", "ion_type", ",", "pending_symbol", "=", "None", ")" ]
Derives a container context as a child of the current context.
[ "Derives", "a", "container", "context", "as", "a", "child", "of", "the", "current", "context", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L421-L441
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_empty_symbol
def set_empty_symbol(self): """Resets the context, retaining the fields that make it a child of its container (``container``, ``queue``, ``depth``, ``whence``), and sets an empty ``pending_symbol``. This is useful when an empty quoted symbol immediately follows a long string. """ self.field_name = None self.annotations = None self.ion_type = None self.set_pending_symbol(CodePointArray()) return self
python
def set_empty_symbol(self): self.field_name = None self.annotations = None self.ion_type = None self.set_pending_symbol(CodePointArray()) return self
[ "def", "set_empty_symbol", "(", "self", ")", ":", "self", ".", "field_name", "=", "None", "self", ".", "annotations", "=", "None", "self", ".", "ion_type", "=", "None", "self", ".", "set_pending_symbol", "(", "CodePointArray", "(", ")", ")", "return", "self" ]
Resets the context, retaining the fields that make it a child of its container (``container``, ``queue``, ``depth``, ``whence``), and sets an empty ``pending_symbol``. This is useful when an empty quoted symbol immediately follows a long string.
[ "Resets", "the", "context", "retaining", "the", "fields", "that", "make", "it", "a", "child", "of", "its", "container", "(", "container", "queue", "depth", "whence", ")", "and", "sets", "an", "empty", "pending_symbol", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L443-L453
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.derive_child_context
def derive_child_context(self, whence): """Derives a scalar context as a child of the current context.""" return _HandlerContext( container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), # children start without a value ion_type=None, pending_symbol=None )
python
def derive_child_context(self, whence): return _HandlerContext( container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), ion_type=None, pending_symbol=None )
[ "def", "derive_child_context", "(", "self", ",", "whence", ")", ":", "return", "_HandlerContext", "(", "container", "=", "self", ".", "container", ",", "queue", "=", "self", ".", "queue", ",", "field_name", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "self", ".", "depth", ",", "whence", "=", "whence", ",", "value", "=", "bytearray", "(", ")", ",", "# children start without a value", "ion_type", "=", "None", ",", "pending_symbol", "=", "None", ")" ]
Derives a scalar context as a child of the current context.
[ "Derives", "a", "scalar", "context", "as", "a", "child", "of", "the", "current", "context", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L455-L467
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_ion_type
def set_ion_type(self, ion_type): """Sets context to the given IonType.""" if ion_type is self.ion_type: return self self.ion_type = ion_type self.line_comment = False return self
python
def set_ion_type(self, ion_type): if ion_type is self.ion_type: return self self.ion_type = ion_type self.line_comment = False return self
[ "def", "set_ion_type", "(", "self", ",", "ion_type", ")", ":", "if", "ion_type", "is", "self", ".", "ion_type", ":", "return", "self", "self", ".", "ion_type", "=", "ion_type", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets context to the given IonType.
[ "Sets", "context", "to", "the", "given", "IonType", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L474-L480
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_annotation
def set_annotation(self): """Appends the context's ``pending_symbol`` to its ``annotations`` sequence.""" assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) # pending_symbol becomes an annotation self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
python
def set_annotation(self): assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
[ "def", "set_annotation", "(", "self", ")", ":", "assert", "self", ".", "pending_symbol", "is", "not", "None", "assert", "not", "self", ".", "value", "annotations", "=", "(", "_as_symbol", "(", "self", ".", "pending_symbol", ",", "is_symbol_value", "=", "False", ")", ",", ")", "# pending_symbol becomes an annotation", "self", ".", "annotations", "=", "annotations", "if", "not", "self", ".", "annotations", "else", "self", ".", "annotations", "+", "annotations", "self", ".", "ion_type", "=", "None", "self", ".", "pending_symbol", "=", "None", "# reset pending symbol", "self", ".", "quoted_text", "=", "False", "self", ".", "line_comment", "=", "False", "self", ".", "is_self_delimiting", "=", "False", "return", "self" ]
Appends the context's ``pending_symbol`` to its ``annotations`` sequence.
[ "Appends", "the", "context", "s", "pending_symbol", "to", "its", "annotations", "sequence", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L482-L493
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_field_name
def set_field_name(self): """Sets the context's ``pending_symbol`` as its ``field_name``.""" assert self.pending_symbol is not None assert not self.value self.field_name = _as_symbol(self.pending_symbol, is_symbol_value=False) # pending_symbol becomes field name self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
python
def set_field_name(self): assert self.pending_symbol is not None assert not self.value self.field_name = _as_symbol(self.pending_symbol, is_symbol_value=False) self.pending_symbol = None self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
[ "def", "set_field_name", "(", "self", ")", ":", "assert", "self", ".", "pending_symbol", "is", "not", "None", "assert", "not", "self", ".", "value", "self", ".", "field_name", "=", "_as_symbol", "(", "self", ".", "pending_symbol", ",", "is_symbol_value", "=", "False", ")", "# pending_symbol becomes field name", "self", ".", "pending_symbol", "=", "None", "# reset pending symbol", "self", ".", "quoted_text", "=", "False", "self", ".", "line_comment", "=", "False", "self", ".", "is_self_delimiting", "=", "False", "return", "self" ]
Sets the context's ``pending_symbol`` as its ``field_name``.
[ "Sets", "the", "context", "s", "pending_symbol", "as", "its", "field_name", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L495-L504
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_pending_symbol
def set_pending_symbol(self, pending_symbol=None): """Sets the context's ``pending_symbol`` with the given unicode sequence and resets the context's ``value``. If the input is None, an empty :class:`CodePointArray` is used. """ if pending_symbol is None: pending_symbol = CodePointArray() self.value = bytearray() # reset value self.pending_symbol = pending_symbol self.line_comment = False return self
python
def set_pending_symbol(self, pending_symbol=None): if pending_symbol is None: pending_symbol = CodePointArray() self.value = bytearray() self.pending_symbol = pending_symbol self.line_comment = False return self
[ "def", "set_pending_symbol", "(", "self", ",", "pending_symbol", "=", "None", ")", ":", "if", "pending_symbol", "is", "None", ":", "pending_symbol", "=", "CodePointArray", "(", ")", "self", ".", "value", "=", "bytearray", "(", ")", "# reset value", "self", ".", "pending_symbol", "=", "pending_symbol", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets the context's ``pending_symbol`` with the given unicode sequence and resets the context's ``value``. If the input is None, an empty :class:`CodePointArray` is used.
[ "Sets", "the", "context", "s", "pending_symbol", "with", "the", "given", "unicode", "sequence", "and", "resets", "the", "context", "s", "value", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L506-L516
amzn/ion-python
amazon/ion/writer_binary_raw_fields.py
_write_base
def _write_base(buf, value, bits_per_octet, end_bit=0, sign_bit=0, is_signed=False): """Write a field to the provided buffer. Args: buf (Sequence): The buffer into which the UInt will be written in the form of integer octets. value (int): The value to write as a UInt. bits_per_octet (int): The number of value bits (i.e. exclusive of the end bit, but inclusive of the sign bit, if applicable) per octet. end_bit (Optional[int]): The end bit mask. sign_bit (Optional[int]): The sign bit mask. Returns: int: The number of octets written. """ if value == 0: buf.append(sign_bit | end_bit) return 1 num_bits = bit_length(value) num_octets = num_bits // bits_per_octet # 'remainder' is the number of value bits in the first octet. remainder = num_bits % bits_per_octet if remainder != 0 or is_signed: # If signed, the first octet has one fewer bit available, requiring another octet. num_octets += 1 else: # This ensures that unsigned values that fit exactly are not shifted too far. remainder = bits_per_octet for i in range(num_octets): octet = 0 if i == 0: octet |= sign_bit if i == num_octets - 1: octet |= end_bit # 'remainder' is used for alignment such that only the first octet # may contain insignificant zeros. octet |= ((value >> (num_bits - (remainder + bits_per_octet * i))) & _OCTET_MASKS[bits_per_octet]) buf.append(octet) return num_octets
python
def _write_base(buf, value, bits_per_octet, end_bit=0, sign_bit=0, is_signed=False): if value == 0: buf.append(sign_bit | end_bit) return 1 num_bits = bit_length(value) num_octets = num_bits // bits_per_octet remainder = num_bits % bits_per_octet if remainder != 0 or is_signed: num_octets += 1 else: remainder = bits_per_octet for i in range(num_octets): octet = 0 if i == 0: octet |= sign_bit if i == num_octets - 1: octet |= end_bit octet |= ((value >> (num_bits - (remainder + bits_per_octet * i))) & _OCTET_MASKS[bits_per_octet]) buf.append(octet) return num_octets
[ "def", "_write_base", "(", "buf", ",", "value", ",", "bits_per_octet", ",", "end_bit", "=", "0", ",", "sign_bit", "=", "0", ",", "is_signed", "=", "False", ")", ":", "if", "value", "==", "0", ":", "buf", ".", "append", "(", "sign_bit", "|", "end_bit", ")", "return", "1", "num_bits", "=", "bit_length", "(", "value", ")", "num_octets", "=", "num_bits", "//", "bits_per_octet", "# 'remainder' is the number of value bits in the first octet.", "remainder", "=", "num_bits", "%", "bits_per_octet", "if", "remainder", "!=", "0", "or", "is_signed", ":", "# If signed, the first octet has one fewer bit available, requiring another octet.", "num_octets", "+=", "1", "else", ":", "# This ensures that unsigned values that fit exactly are not shifted too far.", "remainder", "=", "bits_per_octet", "for", "i", "in", "range", "(", "num_octets", ")", ":", "octet", "=", "0", "if", "i", "==", "0", ":", "octet", "|=", "sign_bit", "if", "i", "==", "num_octets", "-", "1", ":", "octet", "|=", "end_bit", "# 'remainder' is used for alignment such that only the first octet", "# may contain insignificant zeros.", "octet", "|=", "(", "(", "value", ">>", "(", "num_bits", "-", "(", "remainder", "+", "bits_per_octet", "*", "i", ")", ")", ")", "&", "_OCTET_MASKS", "[", "bits_per_octet", "]", ")", "buf", ".", "append", "(", "octet", ")", "return", "num_octets" ]
Write a field to the provided buffer. Args: buf (Sequence): The buffer into which the UInt will be written in the form of integer octets. value (int): The value to write as a UInt. bits_per_octet (int): The number of value bits (i.e. exclusive of the end bit, but inclusive of the sign bit, if applicable) per octet. end_bit (Optional[int]): The end bit mask. sign_bit (Optional[int]): The sign bit mask. Returns: int: The number of octets written.
[ "Write", "a", "field", "to", "the", "provided", "buffer", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_binary_raw_fields.py#L147-L185
amzn/ion-python
amazon/ion/util.py
record
def record(*fields): """Constructs a type that can be extended to create immutable, value types. Examples: A typical declaration looks like:: class MyRecord(record('a', ('b', 1))): pass The above would make a sub-class of ``collections.namedtuple`` that was named ``MyRecord`` with a constructor that had the ``b`` field set to 1 by default. Note: This uses meta-class machinery to rewrite the inheritance hierarchy. This is done in order to make sure that the underlying ``namedtuple`` instance is bound to the right type name and to make sure that the synthetic class that is generated to enable this machinery is not enabled for sub-classes of a user's record class. Args: fields (list[str | (str, any)]): A sequence of str or pairs that """ @six.add_metaclass(_RecordMetaClass) class RecordType(object): _record_sentinel = True _record_fields = fields return RecordType
python
def record(*fields): @six.add_metaclass(_RecordMetaClass) class RecordType(object): _record_sentinel = True _record_fields = fields return RecordType
[ "def", "record", "(", "*", "fields", ")", ":", "@", "six", ".", "add_metaclass", "(", "_RecordMetaClass", ")", "class", "RecordType", "(", "object", ")", ":", "_record_sentinel", "=", "True", "_record_fields", "=", "fields", "return", "RecordType" ]
Constructs a type that can be extended to create immutable, value types. Examples: A typical declaration looks like:: class MyRecord(record('a', ('b', 1))): pass The above would make a sub-class of ``collections.namedtuple`` that was named ``MyRecord`` with a constructor that had the ``b`` field set to 1 by default. Note: This uses meta-class machinery to rewrite the inheritance hierarchy. This is done in order to make sure that the underlying ``namedtuple`` instance is bound to the right type name and to make sure that the synthetic class that is generated to enable this machinery is not enabled for sub-classes of a user's record class. Args: fields (list[str | (str, any)]): A sequence of str or pairs that
[ "Constructs", "a", "type", "that", "can", "be", "extended", "to", "create", "immutable", "value", "types", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L137-L163
amzn/ion-python
amazon/ion/util.py
coroutine
def coroutine(func): """Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``. Args: func (Callable): The function constructing a generator to decorate. Returns: Callable: The decorated generator. """ def wrapper(*args, **kwargs): gen = func(*args, **kwargs) val = next(gen) if val != None: raise TypeError('Unexpected value from start of coroutine') return gen wrapper.__name__ = func.__name__ wrapper.__doc__ = func.__doc__ return wrapper
python
def coroutine(func): def wrapper(*args, **kwargs): gen = func(*args, **kwargs) val = next(gen) if val != None: raise TypeError('Unexpected value from start of coroutine') return gen wrapper.__name__ = func.__name__ wrapper.__doc__ = func.__doc__ return wrapper
[ "def", "coroutine", "(", "func", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "gen", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "val", "=", "next", "(", "gen", ")", "if", "val", "!=", "None", ":", "raise", "TypeError", "(", "'Unexpected value from start of coroutine'", ")", "return", "gen", "wrapper", ".", "__name__", "=", "func", ".", "__name__", "wrapper", ".", "__doc__", "=", "func", ".", "__doc__", "return", "wrapper" ]
Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``. Args: func (Callable): The function constructing a generator to decorate. Returns: Callable: The decorated generator.
[ "Wraps", "a", "PEP", "-", "342", "enhanced", "generator", "in", "a", "way", "that", "avoids", "boilerplate", "of", "the", "priming", "call", "to", "next", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L166-L183
amzn/ion-python
amazon/ion/util.py
unicode_iter
def unicode_iter(val): """Provides an iterator over the *code points* of the given Unicode sequence. Notes: Before PEP-393, Python has the potential to support Unicode as UTF-16 or UTF-32. This is reified in the property as ``sys.maxunicode``. As a result, naive iteration of Unicode sequences will render non-character code points such as UTF-16 surrogates. Args: val (unicode): The unicode sequence to iterate over as integer code points in the range ``0x0`` to ``0x10FFFF``. """ val_iter = iter(val) while True: try: code_point = next(_next_code_point(val, val_iter, to_int=ord)) except StopIteration: return if code_point is None: raise ValueError('Unpaired high surrogate at end of Unicode sequence: %r' % val) yield code_point
python
def unicode_iter(val): val_iter = iter(val) while True: try: code_point = next(_next_code_point(val, val_iter, to_int=ord)) except StopIteration: return if code_point is None: raise ValueError('Unpaired high surrogate at end of Unicode sequence: %r' % val) yield code_point
[ "def", "unicode_iter", "(", "val", ")", ":", "val_iter", "=", "iter", "(", "val", ")", "while", "True", ":", "try", ":", "code_point", "=", "next", "(", "_next_code_point", "(", "val", ",", "val_iter", ",", "to_int", "=", "ord", ")", ")", "except", "StopIteration", ":", "return", "if", "code_point", "is", "None", ":", "raise", "ValueError", "(", "'Unpaired high surrogate at end of Unicode sequence: %r'", "%", "val", ")", "yield", "code_point" ]
Provides an iterator over the *code points* of the given Unicode sequence. Notes: Before PEP-393, Python has the potential to support Unicode as UTF-16 or UTF-32. This is reified in the property as ``sys.maxunicode``. As a result, naive iteration of Unicode sequences will render non-character code points such as UTF-16 surrogates. Args: val (unicode): The unicode sequence to iterate over as integer code points in the range ``0x0`` to ``0x10FFFF``.
[ "Provides", "an", "iterator", "over", "the", "*", "code", "points", "*", "of", "the", "given", "Unicode", "sequence", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L196-L216
amzn/ion-python
amazon/ion/util.py
_next_code_point
def _next_code_point(val, val_iter, yield_char=False, to_int=lambda x: x): """Provides the next *code point* in the given Unicode sequence. This generator function yields complete character code points, never incomplete surrogates. When a low surrogate is found without following a high surrogate, this function raises ``ValueError`` for having encountered an unpaired low surrogate. When the provided iterator ends on a high surrogate, this function yields ``None``. This is the **only** case in which this function yields ``None``. When this occurs, the user may append additional data to the input unicode sequence and resume iterating through another ``next`` on this generator. When this function receives ``next`` after yielding ``None``, it *reinitializes the unicode iterator*. This means that this feature can only be used for values that contain an ``__iter__`` implementation that remains at the current position in the data when called (e.g. :class:`BufferQueue`). At this point, there are only two possible outcomes: * If next code point is a valid low surrogate, this function yields the combined code point represented by the surrogate pair. * Otherwise, this function raises ``ValueError`` for having encountered an unpaired high surrogate. Args: val (unicode|BufferQueue): A unicode sequence or unicode BufferQueue over which to iterate. val_iter (Iterator[unicode|BufferQueue]): The unicode sequence iterator over ``val`` from which to generate the next integer code point in the range ``0x0`` to ``0x10FFFF``. yield_char (Optional[bool]): If True **and** the character code point resulted from a surrogate pair, this function will yield a :class:`CodePoint` representing the character code point and containing the original unicode character. This is useful when the original unicode character will be needed again because UCS2 Python builds will error when trying to convert code points greater than 0xFFFF back into their unicode character representations. This avoids requiring the user to mathematically re-derive the surrogate pair in order to successfully convert the code point back to a unicode character. to_int (Optional[callable]): A function to call on each element of val_iter to convert that element to an int. """ try: high = next(val_iter) except StopIteration: return low = None code_point = to_int(high) if _LOW_SURROGATE_START <= code_point <= _LOW_SURROGATE_END: raise ValueError('Unpaired low surrogate in Unicode sequence: %d' % code_point) elif _HIGH_SURROGATE_START <= code_point <= _HIGH_SURROGATE_END: def combine_surrogates(): low_surrogate = next(val_iter) low_code_point = to_int(low_surrogate) if low_code_point < _LOW_SURROGATE_START or low_code_point > _LOW_SURROGATE_END: raise ValueError('Unpaired high surrogate: %d' % code_point) # Decode the surrogates real_code_point = _NON_BMP_OFFSET real_code_point += (code_point - _HIGH_SURROGATE_START) << 10 real_code_point += (low_code_point - _LOW_SURROGATE_START) return real_code_point, low_surrogate try: code_point, low = combine_surrogates() except StopIteration: yield None val_iter = iter(val) # More data has appeared in val. code_point, low = combine_surrogates() if yield_char and low is not None: out = CodePoint(code_point) if isinstance(val, six.text_type): # Iterating over a text type returns text types. out.char = high + low else: out.char = six.unichr(high) + six.unichr(low) else: out = code_point yield out
python
def _next_code_point(val, val_iter, yield_char=False, to_int=lambda x: x): try: high = next(val_iter) except StopIteration: return low = None code_point = to_int(high) if _LOW_SURROGATE_START <= code_point <= _LOW_SURROGATE_END: raise ValueError('Unpaired low surrogate in Unicode sequence: %d' % code_point) elif _HIGH_SURROGATE_START <= code_point <= _HIGH_SURROGATE_END: def combine_surrogates(): low_surrogate = next(val_iter) low_code_point = to_int(low_surrogate) if low_code_point < _LOW_SURROGATE_START or low_code_point > _LOW_SURROGATE_END: raise ValueError('Unpaired high surrogate: %d' % code_point) real_code_point = _NON_BMP_OFFSET real_code_point += (code_point - _HIGH_SURROGATE_START) << 10 real_code_point += (low_code_point - _LOW_SURROGATE_START) return real_code_point, low_surrogate try: code_point, low = combine_surrogates() except StopIteration: yield None val_iter = iter(val) code_point, low = combine_surrogates() if yield_char and low is not None: out = CodePoint(code_point) if isinstance(val, six.text_type): out.char = high + low else: out.char = six.unichr(high) + six.unichr(low) else: out = code_point yield out
[ "def", "_next_code_point", "(", "val", ",", "val_iter", ",", "yield_char", "=", "False", ",", "to_int", "=", "lambda", "x", ":", "x", ")", ":", "try", ":", "high", "=", "next", "(", "val_iter", ")", "except", "StopIteration", ":", "return", "low", "=", "None", "code_point", "=", "to_int", "(", "high", ")", "if", "_LOW_SURROGATE_START", "<=", "code_point", "<=", "_LOW_SURROGATE_END", ":", "raise", "ValueError", "(", "'Unpaired low surrogate in Unicode sequence: %d'", "%", "code_point", ")", "elif", "_HIGH_SURROGATE_START", "<=", "code_point", "<=", "_HIGH_SURROGATE_END", ":", "def", "combine_surrogates", "(", ")", ":", "low_surrogate", "=", "next", "(", "val_iter", ")", "low_code_point", "=", "to_int", "(", "low_surrogate", ")", "if", "low_code_point", "<", "_LOW_SURROGATE_START", "or", "low_code_point", ">", "_LOW_SURROGATE_END", ":", "raise", "ValueError", "(", "'Unpaired high surrogate: %d'", "%", "code_point", ")", "# Decode the surrogates", "real_code_point", "=", "_NON_BMP_OFFSET", "real_code_point", "+=", "(", "code_point", "-", "_HIGH_SURROGATE_START", ")", "<<", "10", "real_code_point", "+=", "(", "low_code_point", "-", "_LOW_SURROGATE_START", ")", "return", "real_code_point", ",", "low_surrogate", "try", ":", "code_point", ",", "low", "=", "combine_surrogates", "(", ")", "except", "StopIteration", ":", "yield", "None", "val_iter", "=", "iter", "(", "val", ")", "# More data has appeared in val.", "code_point", ",", "low", "=", "combine_surrogates", "(", ")", "if", "yield_char", "and", "low", "is", "not", "None", ":", "out", "=", "CodePoint", "(", "code_point", ")", "if", "isinstance", "(", "val", ",", "six", ".", "text_type", ")", ":", "# Iterating over a text type returns text types.", "out", ".", "char", "=", "high", "+", "low", "else", ":", "out", ".", "char", "=", "six", ".", "unichr", "(", "high", ")", "+", "six", ".", "unichr", "(", "low", ")", "else", ":", "out", "=", "code_point", "yield", "out" ]
Provides the next *code point* in the given Unicode sequence. This generator function yields complete character code points, never incomplete surrogates. When a low surrogate is found without following a high surrogate, this function raises ``ValueError`` for having encountered an unpaired low surrogate. When the provided iterator ends on a high surrogate, this function yields ``None``. This is the **only** case in which this function yields ``None``. When this occurs, the user may append additional data to the input unicode sequence and resume iterating through another ``next`` on this generator. When this function receives ``next`` after yielding ``None``, it *reinitializes the unicode iterator*. This means that this feature can only be used for values that contain an ``__iter__`` implementation that remains at the current position in the data when called (e.g. :class:`BufferQueue`). At this point, there are only two possible outcomes: * If next code point is a valid low surrogate, this function yields the combined code point represented by the surrogate pair. * Otherwise, this function raises ``ValueError`` for having encountered an unpaired high surrogate. Args: val (unicode|BufferQueue): A unicode sequence or unicode BufferQueue over which to iterate. val_iter (Iterator[unicode|BufferQueue]): The unicode sequence iterator over ``val`` from which to generate the next integer code point in the range ``0x0`` to ``0x10FFFF``. yield_char (Optional[bool]): If True **and** the character code point resulted from a surrogate pair, this function will yield a :class:`CodePoint` representing the character code point and containing the original unicode character. This is useful when the original unicode character will be needed again because UCS2 Python builds will error when trying to convert code points greater than 0xFFFF back into their unicode character representations. This avoids requiring the user to mathematically re-derive the surrogate pair in order to successfully convert the code point back to a unicode character. to_int (Optional[callable]): A function to call on each element of val_iter to convert that element to an int.
[ "Provides", "the", "next", "*", "code", "point", "*", "in", "the", "given", "Unicode", "sequence", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L228-L289
amzn/ion-python
amazon/ion/core.py
timestamp
def timestamp(year, month=1, day=1, hour=0, minute=0, second=0, microsecond=None, off_hours=None, off_minutes=None, precision=None, fractional_precision=None): """Shorthand for the :class:`Timestamp` constructor. Specifically, converts ``off_hours`` and ``off_minutes`` parameters to a suitable :class:`OffsetTZInfo` instance. """ delta = None if off_hours is not None: if off_hours < -23 or off_hours > 23: raise ValueError('Hour offset %d is out of required range -23..23.' % (off_hours,)) delta = timedelta(hours=off_hours) if off_minutes is not None: if off_minutes < -59 or off_minutes > 59: raise ValueError('Minute offset %d is out of required range -59..59.' % (off_minutes,)) minutes_delta = timedelta(minutes=off_minutes) if delta is None: delta = minutes_delta else: delta += minutes_delta tz = None if delta is not None: tz = OffsetTZInfo(delta) if microsecond is not None: if fractional_precision is None: fractional_precision = MICROSECOND_PRECISION else: microsecond = 0 if fractional_precision is not None: raise ValueError('Cannot have fractional precision without a fractional component.') return Timestamp( year, month, day, hour, minute, second, microsecond, tz, precision=precision, fractional_precision=fractional_precision )
python
def timestamp(year, month=1, day=1, hour=0, minute=0, second=0, microsecond=None, off_hours=None, off_minutes=None, precision=None, fractional_precision=None): delta = None if off_hours is not None: if off_hours < -23 or off_hours > 23: raise ValueError('Hour offset %d is out of required range -23..23.' % (off_hours,)) delta = timedelta(hours=off_hours) if off_minutes is not None: if off_minutes < -59 or off_minutes > 59: raise ValueError('Minute offset %d is out of required range -59..59.' % (off_minutes,)) minutes_delta = timedelta(minutes=off_minutes) if delta is None: delta = minutes_delta else: delta += minutes_delta tz = None if delta is not None: tz = OffsetTZInfo(delta) if microsecond is not None: if fractional_precision is None: fractional_precision = MICROSECOND_PRECISION else: microsecond = 0 if fractional_precision is not None: raise ValueError('Cannot have fractional precision without a fractional component.') return Timestamp( year, month, day, hour, minute, second, microsecond, tz, precision=precision, fractional_precision=fractional_precision )
[ "def", "timestamp", "(", "year", ",", "month", "=", "1", ",", "day", "=", "1", ",", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "None", ",", "off_hours", "=", "None", ",", "off_minutes", "=", "None", ",", "precision", "=", "None", ",", "fractional_precision", "=", "None", ")", ":", "delta", "=", "None", "if", "off_hours", "is", "not", "None", ":", "if", "off_hours", "<", "-", "23", "or", "off_hours", ">", "23", ":", "raise", "ValueError", "(", "'Hour offset %d is out of required range -23..23.'", "%", "(", "off_hours", ",", ")", ")", "delta", "=", "timedelta", "(", "hours", "=", "off_hours", ")", "if", "off_minutes", "is", "not", "None", ":", "if", "off_minutes", "<", "-", "59", "or", "off_minutes", ">", "59", ":", "raise", "ValueError", "(", "'Minute offset %d is out of required range -59..59.'", "%", "(", "off_minutes", ",", ")", ")", "minutes_delta", "=", "timedelta", "(", "minutes", "=", "off_minutes", ")", "if", "delta", "is", "None", ":", "delta", "=", "minutes_delta", "else", ":", "delta", "+=", "minutes_delta", "tz", "=", "None", "if", "delta", "is", "not", "None", ":", "tz", "=", "OffsetTZInfo", "(", "delta", ")", "if", "microsecond", "is", "not", "None", ":", "if", "fractional_precision", "is", "None", ":", "fractional_precision", "=", "MICROSECOND_PRECISION", "else", ":", "microsecond", "=", "0", "if", "fractional_precision", "is", "not", "None", ":", "raise", "ValueError", "(", "'Cannot have fractional precision without a fractional component.'", ")", "return", "Timestamp", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ",", "microsecond", ",", "tz", ",", "precision", "=", "precision", ",", "fractional_precision", "=", "fractional_precision", ")" ]
Shorthand for the :class:`Timestamp` constructor. Specifically, converts ``off_hours`` and ``off_minutes`` parameters to a suitable :class:`OffsetTZInfo` instance.
[ "Shorthand", "for", "the", ":", "class", ":", "Timestamp", "constructor", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L437-L476
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_field_name
def derive_field_name(self, field_name): """Derives a new event from this one setting the ``field_name`` attribute. Args: field_name (Union[amazon.ion.symbols.SymbolToken, unicode]): The field name to set. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], field_name, self[4], self[5] )
python
def derive_field_name(self, field_name): cls = type(self) return cls( self[0], self[1], self[2], field_name, self[4], self[5] )
[ "def", "derive_field_name", "(", "self", ",", "field_name", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "field_name", ",", "self", "[", "4", "]", ",", "self", "[", "5", "]", ")" ]
Derives a new event from this one setting the ``field_name`` attribute. Args: field_name (Union[amazon.ion.symbols.SymbolToken, unicode]): The field name to set. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "field_name", "attribute", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L163-L180
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_annotations
def derive_annotations(self, annotations): """Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], annotations, self[5] )
python
def derive_annotations(self, annotations): cls = type(self) return cls( self[0], self[1], self[2], self[3], annotations, self[5] )
[ "def", "derive_annotations", "(", "self", ",", "annotations", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "self", "[", "3", "]", ",", "annotations", ",", "self", "[", "5", "]", ")" ]
Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "annotations", "attribute", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L182-L201
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_value
def derive_value(self, value): """Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event. """ return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
python
def derive_value(self, value): return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
[ "def", "derive_value", "(", "self", ",", "value", ")", ":", "return", "IonEvent", "(", "self", ".", "event_type", ",", "self", ".", "ion_type", ",", "value", ",", "self", ".", "field_name", ",", "self", ".", "annotations", ",", "self", ".", "depth", ")" ]
Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "value", "attribute", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L203-L220
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_depth
def derive_depth(self, depth): """Derives a new event from this one setting the ``depth`` attribute. Args: depth: (int): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], self[4], depth )
python
def derive_depth(self, depth): cls = type(self) return cls( self[0], self[1], self[2], self[3], self[4], depth )
[ "def", "derive_depth", "(", "self", ",", "depth", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "self", "[", "3", "]", ",", "self", "[", "4", "]", ",", "depth", ")" ]
Derives a new event from this one setting the ``depth`` attribute. Args: depth: (int): The annotations associated with the derived event. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "depth", "attribute", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L222-L241
amzn/ion-python
amazon/ion/core.py
Timestamp.adjust_from_utc_fields
def adjust_from_utc_fields(*args, **kwargs): """Constructs a timestamp from UTC fields adjusted to the local offset if given.""" raw_ts = Timestamp(*args, **kwargs) offset = raw_ts.utcoffset() if offset is None or offset == timedelta(): return raw_ts # XXX This returns a datetime, not a Timestamp (which has our precision if defined) adjusted = raw_ts + offset if raw_ts.precision is None: # No precision means we can just return a regular datetime return adjusted return Timestamp( adjusted.year, adjusted.month, adjusted.day, adjusted.hour, adjusted.minute, adjusted.second, adjusted.microsecond, raw_ts.tzinfo, precision=raw_ts.precision, fractional_precision=raw_ts.fractional_precision )
python
def adjust_from_utc_fields(*args, **kwargs): raw_ts = Timestamp(*args, **kwargs) offset = raw_ts.utcoffset() if offset is None or offset == timedelta(): return raw_ts adjusted = raw_ts + offset if raw_ts.precision is None: return adjusted return Timestamp( adjusted.year, adjusted.month, adjusted.day, adjusted.hour, adjusted.minute, adjusted.second, adjusted.microsecond, raw_ts.tzinfo, precision=raw_ts.precision, fractional_precision=raw_ts.fractional_precision )
[ "def", "adjust_from_utc_fields", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raw_ts", "=", "Timestamp", "(", "*", "args", ",", "*", "*", "kwargs", ")", "offset", "=", "raw_ts", ".", "utcoffset", "(", ")", "if", "offset", "is", "None", "or", "offset", "==", "timedelta", "(", ")", ":", "return", "raw_ts", "# XXX This returns a datetime, not a Timestamp (which has our precision if defined)", "adjusted", "=", "raw_ts", "+", "offset", "if", "raw_ts", ".", "precision", "is", "None", ":", "# No precision means we can just return a regular datetime", "return", "adjusted", "return", "Timestamp", "(", "adjusted", ".", "year", ",", "adjusted", ".", "month", ",", "adjusted", ".", "day", ",", "adjusted", ".", "hour", ",", "adjusted", ".", "minute", ",", "adjusted", ".", "second", ",", "adjusted", ".", "microsecond", ",", "raw_ts", ".", "tzinfo", ",", "precision", "=", "raw_ts", ".", "precision", ",", "fractional_precision", "=", "raw_ts", ".", "fractional_precision", ")" ]
Constructs a timestamp from UTC fields adjusted to the local offset if given.
[ "Constructs", "a", "timestamp", "from", "UTC", "fields", "adjusted", "to", "the", "local", "offset", "if", "given", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L410-L434
amzn/ion-python
amazon/ion/writer_text.py
_serialize_scalar_from_string_representation_factory
def _serialize_scalar_from_string_representation_factory(type_name, types, str_func=str): """Builds functions that leverage Python ``str()`` or similar functionality. Args: type_name (str): The name of the Ion type. types (Union[Sequence[type],type]): The Python types to validate for. str_func (Optional[Callable]): The function to convert the value with, defaults to ``str``. Returns: function: The function for serializing scalars of a given type to Ion text bytes. """ def serialize(ion_event): value = ion_event.value validate_scalar_value(value, types) return six.b(str_func(value)) serialize.__name__ = '_serialize_' + type_name return serialize
python
def _serialize_scalar_from_string_representation_factory(type_name, types, str_func=str): def serialize(ion_event): value = ion_event.value validate_scalar_value(value, types) return six.b(str_func(value)) serialize.__name__ = '_serialize_' + type_name return serialize
[ "def", "_serialize_scalar_from_string_representation_factory", "(", "type_name", ",", "types", ",", "str_func", "=", "str", ")", ":", "def", "serialize", "(", "ion_event", ")", ":", "value", "=", "ion_event", ".", "value", "validate_scalar_value", "(", "value", ",", "types", ")", "return", "six", ".", "b", "(", "str_func", "(", "value", ")", ")", "serialize", ".", "__name__", "=", "'_serialize_'", "+", "type_name", "return", "serialize" ]
Builds functions that leverage Python ``str()`` or similar functionality. Args: type_name (str): The name of the Ion type. types (Union[Sequence[type],type]): The Python types to validate for. str_func (Optional[Callable]): The function to convert the value with, defaults to ``str``. Returns: function: The function for serializing scalars of a given type to Ion text bytes.
[ "Builds", "functions", "that", "leverage", "Python", "str", "()", "or", "similar", "functionality", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_text.py#L69-L85
amzn/ion-python
amazon/ion/writer_text.py
_serialize_container_factory
def _serialize_container_factory(suffix, container_map): """Returns a function that serializes container start/end. Args: suffix (str): The suffix to name the function with. container_map (Dictionary[core.IonType, bytes]): The Returns: function: The closure for serialization. """ def serialize(ion_event): if not ion_event.ion_type.is_container: raise TypeError('Expected container type') return container_map[ion_event.ion_type] serialize.__name__ = '_serialize_container_' + suffix return serialize
python
def _serialize_container_factory(suffix, container_map): def serialize(ion_event): if not ion_event.ion_type.is_container: raise TypeError('Expected container type') return container_map[ion_event.ion_type] serialize.__name__ = '_serialize_container_' + suffix return serialize
[ "def", "_serialize_container_factory", "(", "suffix", ",", "container_map", ")", ":", "def", "serialize", "(", "ion_event", ")", ":", "if", "not", "ion_event", ".", "ion_type", ".", "is_container", ":", "raise", "TypeError", "(", "'Expected container type'", ")", "return", "container_map", "[", "ion_event", ".", "ion_type", "]", "serialize", ".", "__name__", "=", "'_serialize_container_'", "+", "suffix", "return", "serialize" ]
Returns a function that serializes container start/end. Args: suffix (str): The suffix to name the function with. container_map (Dictionary[core.IonType, bytes]): The Returns: function: The closure for serialization.
[ "Returns", "a", "function", "that", "serializes", "container", "start", "/", "end", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_text.py#L318-L333
amzn/ion-python
amazon/ion/writer_text.py
raw_writer
def raw_writer(indent=None): """Returns a raw text writer co-routine. Yields: DataEvent: serialization events to write out Receives :class:`amazon.ion.core.IonEvent` or ``None`` when the co-routine yields ``HAS_PENDING`` :class:`WriteEventType` events. """ is_whitespace_str = isinstance(indent, str) and re.search(r'\A\s*\Z', indent, re.M) is not None if not (indent is None or is_whitespace_str): raise ValueError('The indent parameter must either be None or a string containing only whitespace') indent_bytes = six.b(indent) if isinstance(indent, str) else indent return writer_trampoline(_raw_writer_coroutine(indent=indent_bytes))
python
def raw_writer(indent=None): is_whitespace_str = isinstance(indent, str) and re.search(r'\A\s*\Z', indent, re.M) is not None if not (indent is None or is_whitespace_str): raise ValueError('The indent parameter must either be None or a string containing only whitespace') indent_bytes = six.b(indent) if isinstance(indent, str) else indent return writer_trampoline(_raw_writer_coroutine(indent=indent_bytes))
[ "def", "raw_writer", "(", "indent", "=", "None", ")", ":", "is_whitespace_str", "=", "isinstance", "(", "indent", ",", "str", ")", "and", "re", ".", "search", "(", "r'\\A\\s*\\Z'", ",", "indent", ",", "re", ".", "M", ")", "is", "not", "None", "if", "not", "(", "indent", "is", "None", "or", "is_whitespace_str", ")", ":", "raise", "ValueError", "(", "'The indent parameter must either be None or a string containing only whitespace'", ")", "indent_bytes", "=", "six", ".", "b", "(", "indent", ")", "if", "isinstance", "(", "indent", ",", "str", ")", "else", "indent", "return", "writer_trampoline", "(", "_raw_writer_coroutine", "(", "indent", "=", "indent_bytes", ")", ")" ]
Returns a raw text writer co-routine. Yields: DataEvent: serialization events to write out Receives :class:`amazon.ion.core.IonEvent` or ``None`` when the co-routine yields ``HAS_PENDING`` :class:`WriteEventType` events.
[ "Returns", "a", "raw", "text", "writer", "co", "-", "routine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_text.py#L433-L449
amzn/ion-python
amazon/ion/writer.py
writer_trampoline
def writer_trampoline(start): """Provides the co-routine trampoline for a writer state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a :class:`Transition` with a :class:`amazon.ion.core.IonEvent` and the co-routine itself. Notes: A writer delimits its logical flush points with ``WriteEventType.COMPLETE``, depending on the configuration, a user may need to send an ``IonEventType.STREAM_END`` to force this to occur. Args: start: The writer co-routine to initially delegate to. Yields: DataEvent: the result of serialization. Receives :class:`amazon.ion.core.IonEvent` to serialize into :class:`DataEvent`. """ trans = Transition(None, start) while True: ion_event = (yield trans.event) if trans.event is None: if ion_event is None: raise TypeError('Cannot start Writer with no event') else: if trans.event.type is WriteEventType.HAS_PENDING and ion_event is not None: raise TypeError('Writer expected to receive no event: %r' % (ion_event,)) if trans.event.type is not WriteEventType.HAS_PENDING and ion_event is None: raise TypeError('Writer expected to receive event') if ion_event is not None and ion_event.event_type is IonEventType.INCOMPLETE: raise TypeError('Writer cannot receive INCOMPLETE event') trans = trans.delegate.send(Transition(ion_event, trans.delegate))
python
def writer_trampoline(start): trans = Transition(None, start) while True: ion_event = (yield trans.event) if trans.event is None: if ion_event is None: raise TypeError('Cannot start Writer with no event') else: if trans.event.type is WriteEventType.HAS_PENDING and ion_event is not None: raise TypeError('Writer expected to receive no event: %r' % (ion_event,)) if trans.event.type is not WriteEventType.HAS_PENDING and ion_event is None: raise TypeError('Writer expected to receive event') if ion_event is not None and ion_event.event_type is IonEventType.INCOMPLETE: raise TypeError('Writer cannot receive INCOMPLETE event') trans = trans.delegate.send(Transition(ion_event, trans.delegate))
[ "def", "writer_trampoline", "(", "start", ")", ":", "trans", "=", "Transition", "(", "None", ",", "start", ")", "while", "True", ":", "ion_event", "=", "(", "yield", "trans", ".", "event", ")", "if", "trans", ".", "event", "is", "None", ":", "if", "ion_event", "is", "None", ":", "raise", "TypeError", "(", "'Cannot start Writer with no event'", ")", "else", ":", "if", "trans", ".", "event", ".", "type", "is", "WriteEventType", ".", "HAS_PENDING", "and", "ion_event", "is", "not", "None", ":", "raise", "TypeError", "(", "'Writer expected to receive no event: %r'", "%", "(", "ion_event", ",", ")", ")", "if", "trans", ".", "event", ".", "type", "is", "not", "WriteEventType", ".", "HAS_PENDING", "and", "ion_event", "is", "None", ":", "raise", "TypeError", "(", "'Writer expected to receive event'", ")", "if", "ion_event", "is", "not", "None", "and", "ion_event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "raise", "TypeError", "(", "'Writer cannot receive INCOMPLETE event'", ")", "trans", "=", "trans", ".", "delegate", ".", "send", "(", "Transition", "(", "ion_event", ",", "trans", ".", "delegate", ")", ")" ]
Provides the co-routine trampoline for a writer state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a :class:`Transition` with a :class:`amazon.ion.core.IonEvent` and the co-routine itself. Notes: A writer delimits its logical flush points with ``WriteEventType.COMPLETE``, depending on the configuration, a user may need to send an ``IonEventType.STREAM_END`` to force this to occur. Args: start: The writer co-routine to initially delegate to. Yields: DataEvent: the result of serialization. Receives :class:`amazon.ion.core.IonEvent` to serialize into :class:`DataEvent`.
[ "Provides", "the", "co", "-", "routine", "trampoline", "for", "a", "writer", "state", "machine", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L79-L111
amzn/ion-python
amazon/ion/writer.py
_drain
def _drain(writer, ion_event): """Drain the writer of its pending write events. Args: writer (Coroutine): A writer co-routine. ion_event (amazon.ion.core.IonEvent): The first event to apply to the writer. Yields: DataEvent: Yields each pending data event. """ result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event.type is WriteEventType.HAS_PENDING: result_event = writer.send(ion_event) ion_event = None yield result_event
python
def _drain(writer, ion_event): result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event.type is WriteEventType.HAS_PENDING: result_event = writer.send(ion_event) ion_event = None yield result_event
[ "def", "_drain", "(", "writer", ",", "ion_event", ")", ":", "result_event", "=", "_WRITE_EVENT_HAS_PENDING_EMPTY", "while", "result_event", ".", "type", "is", "WriteEventType", ".", "HAS_PENDING", ":", "result_event", "=", "writer", ".", "send", "(", "ion_event", ")", "ion_event", "=", "None", "yield", "result_event" ]
Drain the writer of its pending write events. Args: writer (Coroutine): A writer co-routine. ion_event (amazon.ion.core.IonEvent): The first event to apply to the writer. Yields: DataEvent: Yields each pending data event.
[ "Drain", "the", "writer", "of", "its", "pending", "write", "events", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L117-L131
amzn/ion-python
amazon/ion/writer.py
blocking_writer
def blocking_writer(writer, output): """Provides an implementation of using the writer co-routine with a file-like object. Args: writer (Coroutine): A writer co-routine. output (BaseIO): The file-like object to pipe events to. Yields: WriteEventType: Yields when no events are pending. Receives :class:`amazon.ion.core.IonEvent` to write to the ``output``. """ result_type = None while True: ion_event = (yield result_type) for result_event in _drain(writer, ion_event): output.write(result_event.data) result_type = result_event.type
python
def blocking_writer(writer, output): result_type = None while True: ion_event = (yield result_type) for result_event in _drain(writer, ion_event): output.write(result_event.data) result_type = result_event.type
[ "def", "blocking_writer", "(", "writer", ",", "output", ")", ":", "result_type", "=", "None", "while", "True", ":", "ion_event", "=", "(", "yield", "result_type", ")", "for", "result_event", "in", "_drain", "(", "writer", ",", "ion_event", ")", ":", "output", ".", "write", "(", "result_event", ".", "data", ")", "result_type", "=", "result_event", ".", "type" ]
Provides an implementation of using the writer co-routine with a file-like object. Args: writer (Coroutine): A writer co-routine. output (BaseIO): The file-like object to pipe events to. Yields: WriteEventType: Yields when no events are pending. Receives :class:`amazon.ion.core.IonEvent` to write to the ``output``.
[ "Provides", "an", "implementation", "of", "using", "the", "writer", "co", "-", "routine", "with", "a", "file", "-", "like", "object", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L135-L152
amzn/ion-python
amazon/ion/simple_types.py
_IonNature._copy
def _copy(self): """Copies this instance. Its IonEvent (if any) is not preserved. Keeping this protected until/unless we decide there's use for it publicly. """ args, kwargs = self._to_constructor_args(self) value = self.__class__(*args, **kwargs) value.ion_event = None value.ion_type = self.ion_type value.ion_annotations = self.ion_annotations return value
python
def _copy(self): args, kwargs = self._to_constructor_args(self) value = self.__class__(*args, **kwargs) value.ion_event = None value.ion_type = self.ion_type value.ion_annotations = self.ion_annotations return value
[ "def", "_copy", "(", "self", ")", ":", "args", ",", "kwargs", "=", "self", ".", "_to_constructor_args", "(", "self", ")", "value", "=", "self", ".", "__class__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", ".", "ion_event", "=", "None", "value", ".", "ion_type", "=", "self", ".", "ion_type", "value", ".", "ion_annotations", "=", "self", ".", "ion_annotations", "return", "value" ]
Copies this instance. Its IonEvent (if any) is not preserved. Keeping this protected until/unless we decide there's use for it publicly.
[ "Copies", "this", "instance", ".", "Its", "IonEvent", "(", "if", "any", ")", "is", "not", "preserved", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simple_types.py#L57-L67
amzn/ion-python
amazon/ion/simple_types.py
_IonNature.from_event
def from_event(cls, ion_event): """Constructs the given native extension from the properties of an event. Args: ion_event (IonEvent): The event to construct the native value from. """ if ion_event.value is not None: args, kwargs = cls._to_constructor_args(ion_event.value) else: # if value is None (i.e. this is a container event), args must be empty or initialization of the # underlying container will fail. args, kwargs = (), {} value = cls(*args, **kwargs) value.ion_event = ion_event value.ion_type = ion_event.ion_type value.ion_annotations = ion_event.annotations return value
python
def from_event(cls, ion_event): if ion_event.value is not None: args, kwargs = cls._to_constructor_args(ion_event.value) else: args, kwargs = (), {} value = cls(*args, **kwargs) value.ion_event = ion_event value.ion_type = ion_event.ion_type value.ion_annotations = ion_event.annotations return value
[ "def", "from_event", "(", "cls", ",", "ion_event", ")", ":", "if", "ion_event", ".", "value", "is", "not", "None", ":", "args", ",", "kwargs", "=", "cls", ".", "_to_constructor_args", "(", "ion_event", ".", "value", ")", "else", ":", "# if value is None (i.e. this is a container event), args must be empty or initialization of the", "# underlying container will fail.", "args", ",", "kwargs", "=", "(", ")", ",", "{", "}", "value", "=", "cls", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", ".", "ion_event", "=", "ion_event", "value", ".", "ion_type", "=", "ion_event", ".", "ion_type", "value", ".", "ion_annotations", "=", "ion_event", ".", "annotations", "return", "value" ]
Constructs the given native extension from the properties of an event. Args: ion_event (IonEvent): The event to construct the native value from.
[ "Constructs", "the", "given", "native", "extension", "from", "the", "properties", "of", "an", "event", "." ]
train
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simple_types.py#L74-L90