id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
900
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.__add
def __add(self, token): """Unconditionally adds a token to the table.""" self.__symbols.append(token) text = token.text if text is not None and text not in self.__mapping: self.__mapping[text] = token
python
def __add(self, token): """Unconditionally adds a token to the table.""" self.__symbols.append(token) text = token.text if text is not None and text not in self.__mapping: self.__mapping[text] = token
[ "def", "__add", "(", "self", ",", "token", ")", ":", "self", ".", "__symbols", ".", "append", "(", "token", ")", "text", "=", "token", ".", "text", "if", "text", "is", "not", "None", "and", "text", "not", "in", "self", ".", "__mapping", ":", "self", ".", "__mapping", "[", "text", "]", "=", "token" ]
Unconditionally adds a token to the table.
[ "Unconditionally", "adds", "a", "token", "to", "the", "table", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L213-L218
901
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.__add_shared
def __add_shared(self, original_token): """Adds a token, normalizing the SID and import reference to this table.""" sid = self.__new_sid() token = SymbolToken(original_token.text, sid, self.__import_location(sid)) self.__add(token) return token
python
def __add_shared(self, original_token): """Adds a token, normalizing the SID and import reference to this table.""" sid = self.__new_sid() token = SymbolToken(original_token.text, sid, self.__import_location(sid)) self.__add(token) return token
[ "def", "__add_shared", "(", "self", ",", "original_token", ")", ":", "sid", "=", "self", ".", "__new_sid", "(", ")", "token", "=", "SymbolToken", "(", "original_token", ".", "text", ",", "sid", ",", "self", ".", "__import_location", "(", "sid", ")", ")", "self", ".", "__add", "(", "token", ")", "return", "token" ]
Adds a token, normalizing the SID and import reference to this table.
[ "Adds", "a", "token", "normalizing", "the", "SID", "and", "import", "reference", "to", "this", "table", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L220-L225
902
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.__add_import
def __add_import(self, original_token): """Adds a token, normalizing only the SID""" sid = self.__new_sid() token = SymbolToken(original_token.text, sid, original_token.location) self.__add(token) return token
python
def __add_import(self, original_token): """Adds a token, normalizing only the SID""" sid = self.__new_sid() token = SymbolToken(original_token.text, sid, original_token.location) self.__add(token) return token
[ "def", "__add_import", "(", "self", ",", "original_token", ")", ":", "sid", "=", "self", ".", "__new_sid", "(", ")", "token", "=", "SymbolToken", "(", "original_token", ".", "text", ",", "sid", ",", "original_token", ".", "location", ")", "self", ".", "__add", "(", "token", ")", "return", "token" ]
Adds a token, normalizing only the SID
[ "Adds", "a", "token", "normalizing", "only", "the", "SID" ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L227-L232
903
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.__add_text
def __add_text(self, text): """Adds the given Unicode text as a locally defined symbol.""" if text is not None and not isinstance(text, six.text_type): raise TypeError('Local symbol definition must be a Unicode sequence or None: %r' % text) sid = self.__new_sid() location = None if self.table_type.is_shared: location = self.__import_location(sid) token = SymbolToken(text, sid, location) self.__add(token) return token
python
def __add_text(self, text): """Adds the given Unicode text as a locally defined symbol.""" if text is not None and not isinstance(text, six.text_type): raise TypeError('Local symbol definition must be a Unicode sequence or None: %r' % text) sid = self.__new_sid() location = None if self.table_type.is_shared: location = self.__import_location(sid) token = SymbolToken(text, sid, location) self.__add(token) return token
[ "def", "__add_text", "(", "self", ",", "text", ")", ":", "if", "text", "is", "not", "None", "and", "not", "isinstance", "(", "text", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "'Local symbol definition must be a Unicode sequence or None: %r'", "%", "text", ")", "sid", "=", "self", ".", "__new_sid", "(", ")", "location", "=", "None", "if", "self", ".", "table_type", ".", "is_shared", ":", "location", "=", "self", ".", "__import_location", "(", "sid", ")", "token", "=", "SymbolToken", "(", "text", ",", "sid", ",", "location", ")", "self", ".", "__add", "(", "token", ")", "return", "token" ]
Adds the given Unicode text as a locally defined symbol.
[ "Adds", "the", "given", "Unicode", "text", "as", "a", "locally", "defined", "symbol", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L234-L244
904
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.intern
def intern(self, text): """Interns the given Unicode sequence into the symbol table. Note: This operation is only valid on local symbol tables. Args: text (unicode): The target to intern. Returns: SymbolToken: The mapped symbol token which may already exist in the table. """ if self.table_type.is_shared: raise TypeError('Cannot intern on shared symbol table') if not isinstance(text, six.text_type): raise TypeError('Cannot intern non-Unicode sequence into symbol table: %r' % text) token = self.get(text) if token is None: token = self.__add_text(text) return token
python
def intern(self, text): """Interns the given Unicode sequence into the symbol table. Note: This operation is only valid on local symbol tables. Args: text (unicode): The target to intern. Returns: SymbolToken: The mapped symbol token which may already exist in the table. """ if self.table_type.is_shared: raise TypeError('Cannot intern on shared symbol table') if not isinstance(text, six.text_type): raise TypeError('Cannot intern non-Unicode sequence into symbol table: %r' % text) token = self.get(text) if token is None: token = self.__add_text(text) return token
[ "def", "intern", "(", "self", ",", "text", ")", ":", "if", "self", ".", "table_type", ".", "is_shared", ":", "raise", "TypeError", "(", "'Cannot intern on shared symbol table'", ")", "if", "not", "isinstance", "(", "text", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "'Cannot intern non-Unicode sequence into symbol table: %r'", "%", "text", ")", "token", "=", "self", ".", "get", "(", "text", ")", "if", "token", "is", "None", ":", "token", "=", "self", ".", "__add_text", "(", "text", ")", "return", "token" ]
Interns the given Unicode sequence into the symbol table. Note: This operation is only valid on local symbol tables. Args: text (unicode): The target to intern. Returns: SymbolToken: The mapped symbol token which may already exist in the table.
[ "Interns", "the", "given", "Unicode", "sequence", "into", "the", "symbol", "table", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L246-L266
905
amzn/ion-python
amazon/ion/symbols.py
SymbolTable.get
def get(self, key, default=None): """Returns a token by text or local ID, with a default. A given text image may be associated with more than one symbol ID. This will return the first definition. Note: User defined symbol IDs are always one-based. Symbol zero is a special symbol that always has no text. Args: key (unicode | int): The key to lookup. default(Optional[SymbolToken]): The default to return if the key is not found Returns: SymbolToken: The token associated with the key or the default if it doesn't exist. """ if isinstance(key, six.text_type): return self.__mapping.get(key, None) if not isinstance(key, int): raise TypeError('Key must be int or Unicode sequence.') # TODO determine if $0 should be returned for all symbol tables. if key == 0: return SYMBOL_ZERO_TOKEN # Translate one-based SID to zero-based intern table index = key - 1 if index < 0 or key > len(self): return default return self.__symbols[index]
python
def get(self, key, default=None): """Returns a token by text or local ID, with a default. A given text image may be associated with more than one symbol ID. This will return the first definition. Note: User defined symbol IDs are always one-based. Symbol zero is a special symbol that always has no text. Args: key (unicode | int): The key to lookup. default(Optional[SymbolToken]): The default to return if the key is not found Returns: SymbolToken: The token associated with the key or the default if it doesn't exist. """ if isinstance(key, six.text_type): return self.__mapping.get(key, None) if not isinstance(key, int): raise TypeError('Key must be int or Unicode sequence.') # TODO determine if $0 should be returned for all symbol tables. if key == 0: return SYMBOL_ZERO_TOKEN # Translate one-based SID to zero-based intern table index = key - 1 if index < 0 or key > len(self): return default return self.__symbols[index]
[ "def", "get", "(", "self", ",", "key", ",", "default", "=", "None", ")", ":", "if", "isinstance", "(", "key", ",", "six", ".", "text_type", ")", ":", "return", "self", ".", "__mapping", ".", "get", "(", "key", ",", "None", ")", "if", "not", "isinstance", "(", "key", ",", "int", ")", ":", "raise", "TypeError", "(", "'Key must be int or Unicode sequence.'", ")", "# TODO determine if $0 should be returned for all symbol tables.", "if", "key", "==", "0", ":", "return", "SYMBOL_ZERO_TOKEN", "# Translate one-based SID to zero-based intern table", "index", "=", "key", "-", "1", "if", "index", "<", "0", "or", "key", ">", "len", "(", "self", ")", ":", "return", "default", "return", "self", ".", "__symbols", "[", "index", "]" ]
Returns a token by text or local ID, with a default. A given text image may be associated with more than one symbol ID. This will return the first definition. Note: User defined symbol IDs are always one-based. Symbol zero is a special symbol that always has no text. Args: key (unicode | int): The key to lookup. default(Optional[SymbolToken]): The default to return if the key is not found Returns: SymbolToken: The token associated with the key or the default if it doesn't exist.
[ "Returns", "a", "token", "by", "text", "or", "local", "ID", "with", "a", "default", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L268-L297
906
amzn/ion-python
amazon/ion/symbols.py
SymbolTableCatalog.register
def register(self, table): """Adds a shared table to the catalog. Args: table (SymbolTable): A non-system, shared symbol table. """ if table.table_type.is_system: raise ValueError('Cannot add system table to catalog') if not table.table_type.is_shared: raise ValueError('Cannot add local table to catalog') if table.is_substitute: raise ValueError('Cannot add substitute table to catalog') versions = self.__tables.get(table.name) if versions is None: versions = {} self.__tables[table.name] = versions versions[table.version] = table
python
def register(self, table): """Adds a shared table to the catalog. Args: table (SymbolTable): A non-system, shared symbol table. """ if table.table_type.is_system: raise ValueError('Cannot add system table to catalog') if not table.table_type.is_shared: raise ValueError('Cannot add local table to catalog') if table.is_substitute: raise ValueError('Cannot add substitute table to catalog') versions = self.__tables.get(table.name) if versions is None: versions = {} self.__tables[table.name] = versions versions[table.version] = table
[ "def", "register", "(", "self", ",", "table", ")", ":", "if", "table", ".", "table_type", ".", "is_system", ":", "raise", "ValueError", "(", "'Cannot add system table to catalog'", ")", "if", "not", "table", ".", "table_type", ".", "is_shared", ":", "raise", "ValueError", "(", "'Cannot add local table to catalog'", ")", "if", "table", ".", "is_substitute", ":", "raise", "ValueError", "(", "'Cannot add substitute table to catalog'", ")", "versions", "=", "self", ".", "__tables", ".", "get", "(", "table", ".", "name", ")", "if", "versions", "is", "None", ":", "versions", "=", "{", "}", "self", ".", "__tables", "[", "table", ".", "name", "]", "=", "versions", "versions", "[", "table", ".", "version", "]", "=", "table" ]
Adds a shared table to the catalog. Args: table (SymbolTable): A non-system, shared symbol table.
[ "Adds", "a", "shared", "table", "to", "the", "catalog", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L499-L516
907
amzn/ion-python
amazon/ion/symbols.py
SymbolTableCatalog.resolve
def resolve(self, name, version, max_id): """Resolves the table for a given name and version. Args: name (unicode): The name of the table to resolve. version (int): The version of the table to resolve. max_id (Optional[int]): The maximum ID of the table requested. May be ``None`` in which case an exact match on ``name`` and ``version`` is required. Returns: SymbolTable: The *closest* matching symbol table. This is either an exact match, a placeholder, or a derived substitute depending on what tables are registered. """ if not isinstance(name, six.text_type): raise TypeError('Name must be a Unicode sequence: %r' % name) if not isinstance(version, int): raise TypeError('Version must be an int: %r' % version) if version <= 0: raise ValueError('Version must be positive: %s' % version) if max_id is not None and max_id < 0: raise ValueError('Max ID must be zero or positive: %s' % max_id) versions = self.__tables.get(name) if versions is None: if max_id is None: raise CannotSubstituteTable( 'Found no table for %s, but no max_id' % name ) return placeholder_symbol_table(name, version, max_id) table = versions.get(version) if table is None: # TODO Replace the keys map with a search tree based dictionary. keys = list(versions) keys.sort() table = versions[keys[-1]] if table.version == version and (max_id is None or table.max_id == max_id): return table if max_id is None: raise CannotSubstituteTable( 'Found match for %s, but not version %d, and no max_id' % (name, version) ) return substitute_symbol_table(table, version, max_id)
python
def resolve(self, name, version, max_id): """Resolves the table for a given name and version. Args: name (unicode): The name of the table to resolve. version (int): The version of the table to resolve. max_id (Optional[int]): The maximum ID of the table requested. May be ``None`` in which case an exact match on ``name`` and ``version`` is required. Returns: SymbolTable: The *closest* matching symbol table. This is either an exact match, a placeholder, or a derived substitute depending on what tables are registered. """ if not isinstance(name, six.text_type): raise TypeError('Name must be a Unicode sequence: %r' % name) if not isinstance(version, int): raise TypeError('Version must be an int: %r' % version) if version <= 0: raise ValueError('Version must be positive: %s' % version) if max_id is not None and max_id < 0: raise ValueError('Max ID must be zero or positive: %s' % max_id) versions = self.__tables.get(name) if versions is None: if max_id is None: raise CannotSubstituteTable( 'Found no table for %s, but no max_id' % name ) return placeholder_symbol_table(name, version, max_id) table = versions.get(version) if table is None: # TODO Replace the keys map with a search tree based dictionary. keys = list(versions) keys.sort() table = versions[keys[-1]] if table.version == version and (max_id is None or table.max_id == max_id): return table if max_id is None: raise CannotSubstituteTable( 'Found match for %s, but not version %d, and no max_id' % (name, version) ) return substitute_symbol_table(table, version, max_id)
[ "def", "resolve", "(", "self", ",", "name", ",", "version", ",", "max_id", ")", ":", "if", "not", "isinstance", "(", "name", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "'Name must be a Unicode sequence: %r'", "%", "name", ")", "if", "not", "isinstance", "(", "version", ",", "int", ")", ":", "raise", "TypeError", "(", "'Version must be an int: %r'", "%", "version", ")", "if", "version", "<=", "0", ":", "raise", "ValueError", "(", "'Version must be positive: %s'", "%", "version", ")", "if", "max_id", "is", "not", "None", "and", "max_id", "<", "0", ":", "raise", "ValueError", "(", "'Max ID must be zero or positive: %s'", "%", "max_id", ")", "versions", "=", "self", ".", "__tables", ".", "get", "(", "name", ")", "if", "versions", "is", "None", ":", "if", "max_id", "is", "None", ":", "raise", "CannotSubstituteTable", "(", "'Found no table for %s, but no max_id'", "%", "name", ")", "return", "placeholder_symbol_table", "(", "name", ",", "version", ",", "max_id", ")", "table", "=", "versions", ".", "get", "(", "version", ")", "if", "table", "is", "None", ":", "# TODO Replace the keys map with a search tree based dictionary.", "keys", "=", "list", "(", "versions", ")", "keys", ".", "sort", "(", ")", "table", "=", "versions", "[", "keys", "[", "-", "1", "]", "]", "if", "table", ".", "version", "==", "version", "and", "(", "max_id", "is", "None", "or", "table", ".", "max_id", "==", "max_id", ")", ":", "return", "table", "if", "max_id", "is", "None", ":", "raise", "CannotSubstituteTable", "(", "'Found match for %s, but not version %d, and no max_id'", "%", "(", "name", ",", "version", ")", ")", "return", "substitute_symbol_table", "(", "table", ",", "version", ",", "max_id", ")" ]
Resolves the table for a given name and version. Args: name (unicode): The name of the table to resolve. version (int): The version of the table to resolve. max_id (Optional[int]): The maximum ID of the table requested. May be ``None`` in which case an exact match on ``name`` and ``version`` is required. Returns: SymbolTable: The *closest* matching symbol table. This is either an exact match, a placeholder, or a derived substitute depending on what tables are registered.
[ "Resolves", "the", "table", "for", "a", "given", "name", "and", "version", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/symbols.py#L518-L564
908
amzn/ion-python
amazon/ion/writer_buffer.py
BufferTree.start_container
def start_container(self): """Add a node to the tree that represents the start of a container. Until end_container is called, any nodes added through add_scalar_value or start_container will be children of this new node. """ self.__container_lengths.append(self.current_container_length) self.current_container_length = 0 new_container_node = _Node() self.__container_node.add_child(new_container_node) self.__container_nodes.append(self.__container_node) self.__container_node = new_container_node
python
def start_container(self): """Add a node to the tree that represents the start of a container. Until end_container is called, any nodes added through add_scalar_value or start_container will be children of this new node. """ self.__container_lengths.append(self.current_container_length) self.current_container_length = 0 new_container_node = _Node() self.__container_node.add_child(new_container_node) self.__container_nodes.append(self.__container_node) self.__container_node = new_container_node
[ "def", "start_container", "(", "self", ")", ":", "self", ".", "__container_lengths", ".", "append", "(", "self", ".", "current_container_length", ")", "self", ".", "current_container_length", "=", "0", "new_container_node", "=", "_Node", "(", ")", "self", ".", "__container_node", ".", "add_child", "(", "new_container_node", ")", "self", ".", "__container_nodes", ".", "append", "(", "self", ".", "__container_node", ")", "self", ".", "__container_node", "=", "new_container_node" ]
Add a node to the tree that represents the start of a container. Until end_container is called, any nodes added through add_scalar_value or start_container will be children of this new node.
[ "Add", "a", "node", "to", "the", "tree", "that", "represents", "the", "start", "of", "a", "container", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_buffer.py#L91-L102
909
amzn/ion-python
amazon/ion/writer_buffer.py
BufferTree.end_container
def end_container(self, header_buf): """Add a node containing the container's header to the current subtree. This node will be added as the leftmost leaf of the subtree that was started by the matching call to start_container. Args: header_buf (bytearray): bytearray containing the container header. """ if not self.__container_nodes: raise ValueError("Attempted to end container with none active.") # Header needs to be the first node visited on this subtree. self.__container_node.add_leaf(_Node(header_buf)) self.__container_node = self.__container_nodes.pop() parent_container_length = self.__container_lengths.pop() self.current_container_length = \ parent_container_length + self.current_container_length + len(header_buf)
python
def end_container(self, header_buf): """Add a node containing the container's header to the current subtree. This node will be added as the leftmost leaf of the subtree that was started by the matching call to start_container. Args: header_buf (bytearray): bytearray containing the container header. """ if not self.__container_nodes: raise ValueError("Attempted to end container with none active.") # Header needs to be the first node visited on this subtree. self.__container_node.add_leaf(_Node(header_buf)) self.__container_node = self.__container_nodes.pop() parent_container_length = self.__container_lengths.pop() self.current_container_length = \ parent_container_length + self.current_container_length + len(header_buf)
[ "def", "end_container", "(", "self", ",", "header_buf", ")", ":", "if", "not", "self", ".", "__container_nodes", ":", "raise", "ValueError", "(", "\"Attempted to end container with none active.\"", ")", "# Header needs to be the first node visited on this subtree.", "self", ".", "__container_node", ".", "add_leaf", "(", "_Node", "(", "header_buf", ")", ")", "self", ".", "__container_node", "=", "self", ".", "__container_nodes", ".", "pop", "(", ")", "parent_container_length", "=", "self", ".", "__container_lengths", ".", "pop", "(", ")", "self", ".", "current_container_length", "=", "parent_container_length", "+", "self", ".", "current_container_length", "+", "len", "(", "header_buf", ")" ]
Add a node containing the container's header to the current subtree. This node will be added as the leftmost leaf of the subtree that was started by the matching call to start_container. Args: header_buf (bytearray): bytearray containing the container header.
[ "Add", "a", "node", "containing", "the", "container", "s", "header", "to", "the", "current", "subtree", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_buffer.py#L104-L120
910
amzn/ion-python
amazon/ion/writer_buffer.py
BufferTree.add_scalar_value
def add_scalar_value(self, value_buf): """Add a node to the tree containing a scalar value. Args: value_buf (bytearray): bytearray containing the scalar value. """ self.__container_node.add_child(_Node(value_buf)) self.current_container_length += len(value_buf)
python
def add_scalar_value(self, value_buf): """Add a node to the tree containing a scalar value. Args: value_buf (bytearray): bytearray containing the scalar value. """ self.__container_node.add_child(_Node(value_buf)) self.current_container_length += len(value_buf)
[ "def", "add_scalar_value", "(", "self", ",", "value_buf", ")", ":", "self", ".", "__container_node", ".", "add_child", "(", "_Node", "(", "value_buf", ")", ")", "self", ".", "current_container_length", "+=", "len", "(", "value_buf", ")" ]
Add a node to the tree containing a scalar value. Args: value_buf (bytearray): bytearray containing the scalar value.
[ "Add", "a", "node", "to", "the", "tree", "containing", "a", "scalar", "value", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_buffer.py#L122-L129
911
amzn/ion-python
amazon/ion/writer_buffer.py
BufferTree.drain
def drain(self): """Walk the BufferTree and reset it when finished. Yields: any: The current node's value. """ if self.__container_nodes: raise ValueError("Attempted to drain without ending all containers.") for buf in self.__depth_traverse(self.__root): if buf is not None: yield buf self.__reset()
python
def drain(self): """Walk the BufferTree and reset it when finished. Yields: any: The current node's value. """ if self.__container_nodes: raise ValueError("Attempted to drain without ending all containers.") for buf in self.__depth_traverse(self.__root): if buf is not None: yield buf self.__reset()
[ "def", "drain", "(", "self", ")", ":", "if", "self", ".", "__container_nodes", ":", "raise", "ValueError", "(", "\"Attempted to drain without ending all containers.\"", ")", "for", "buf", "in", "self", ".", "__depth_traverse", "(", "self", ".", "__root", ")", ":", "if", "buf", "is", "not", "None", ":", "yield", "buf", "self", ".", "__reset", "(", ")" ]
Walk the BufferTree and reset it when finished. Yields: any: The current node's value.
[ "Walk", "the", "BufferTree", "and", "reset", "it", "when", "finished", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_buffer.py#L131-L142
912
amzn/ion-python
amazon/ion/equivalence.py
ion_equals
def ion_equals(a, b, timestamps_instants_only=False): """Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant. """ if timestamps_instants_only: return _ion_equals_timestamps_instants(a, b) return _ion_equals_timestamps_data_model(a, b)
python
def ion_equals(a, b, timestamps_instants_only=False): """Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant. """ if timestamps_instants_only: return _ion_equals_timestamps_instants(a, b) return _ion_equals_timestamps_data_model(a, b)
[ "def", "ion_equals", "(", "a", ",", "b", ",", "timestamps_instants_only", "=", "False", ")", ":", "if", "timestamps_instants_only", ":", "return", "_ion_equals_timestamps_instants", "(", "a", ",", "b", ")", "return", "_ion_equals_timestamps_data_model", "(", "a", ",", "b", ")" ]
Tests two objects for equivalence under the Ion data model. There are three important cases: * When neither operand specifies its `ion_type` or `annotations`, this method will only return True when the values of both operands are equivalent under the Ion data model. * When only one of the operands specifies its `ion_type` and `annotations`, this method will only return True when that operand has no annotations and has a value equivalent to the other operand under the Ion data model. * When both operands specify `ion_type` and `annotations`, this method will only return True when the ion_type and annotations of both are the same and their values are equivalent under the Ion data model. Note that the order of the operands does not matter. Args: a (object): The first operand. b (object): The second operand. timestamps_instants_only (Optional[bool]): False if timestamp objects (datetime and its subclasses) should be compared according to the Ion data model (where the instant, precision, and offset must be equal); True if these objects should be considered equivalent if they simply represent the same instant.
[ "Tests", "two", "objects", "for", "equivalence", "under", "the", "Ion", "data", "model", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/equivalence.py#L35-L57
913
amzn/ion-python
amazon/ion/equivalence.py
_ion_equals
def _ion_equals(a, b, timestamp_comparison_func, recursive_comparison_func): """Compares a and b according to the description of the ion_equals method.""" for a, b in ((a, b), (b, a)): # Ensures that operand order does not matter. if isinstance(a, _IonNature): if isinstance(b, _IonNature): # Both operands have _IonNature. Their IonTypes and annotations must be equivalent. eq = a.ion_type is b.ion_type and _annotations_eq(a, b) else: # Only one operand has _IonNature. It cannot be equivalent to the other operand if it has annotations. eq = not a.ion_annotations if eq: if isinstance(a, IonPyList): return _sequences_eq(a, b, recursive_comparison_func) elif isinstance(a, IonPyDict): return _structs_eq(a, b, recursive_comparison_func) elif isinstance(a, IonPyTimestamp): return timestamp_comparison_func(a, b) elif isinstance(a, IonPyNull): return isinstance(b, IonPyNull) or (b is None and a.ion_type is IonType.NULL) elif isinstance(a, IonPySymbol) or (isinstance(a, IonPyText) and a.ion_type is IonType.SYMBOL): return _symbols_eq(a, b) elif isinstance(a, IonPyDecimal): return _decimals_eq(a, b) elif isinstance(a, IonPyFloat): return _floats_eq(a, b) else: return a == b return False # Reaching this point means that neither operand has _IonNature. for a, b in ((a, b), (b, a)): # Ensures that operand order does not matter. if isinstance(a, list): return _sequences_eq(a, b, recursive_comparison_func) elif isinstance(a, dict): return _structs_eq(a, b, recursive_comparison_func) elif isinstance(a, datetime): return timestamp_comparison_func(a, b) elif isinstance(a, SymbolToken): return _symbols_eq(a, b) elif isinstance(a, Decimal): return _decimals_eq(a, b) elif isinstance(a, float): return _floats_eq(a, b) return a == b
python
def _ion_equals(a, b, timestamp_comparison_func, recursive_comparison_func): """Compares a and b according to the description of the ion_equals method.""" for a, b in ((a, b), (b, a)): # Ensures that operand order does not matter. if isinstance(a, _IonNature): if isinstance(b, _IonNature): # Both operands have _IonNature. Their IonTypes and annotations must be equivalent. eq = a.ion_type is b.ion_type and _annotations_eq(a, b) else: # Only one operand has _IonNature. It cannot be equivalent to the other operand if it has annotations. eq = not a.ion_annotations if eq: if isinstance(a, IonPyList): return _sequences_eq(a, b, recursive_comparison_func) elif isinstance(a, IonPyDict): return _structs_eq(a, b, recursive_comparison_func) elif isinstance(a, IonPyTimestamp): return timestamp_comparison_func(a, b) elif isinstance(a, IonPyNull): return isinstance(b, IonPyNull) or (b is None and a.ion_type is IonType.NULL) elif isinstance(a, IonPySymbol) or (isinstance(a, IonPyText) and a.ion_type is IonType.SYMBOL): return _symbols_eq(a, b) elif isinstance(a, IonPyDecimal): return _decimals_eq(a, b) elif isinstance(a, IonPyFloat): return _floats_eq(a, b) else: return a == b return False # Reaching this point means that neither operand has _IonNature. for a, b in ((a, b), (b, a)): # Ensures that operand order does not matter. if isinstance(a, list): return _sequences_eq(a, b, recursive_comparison_func) elif isinstance(a, dict): return _structs_eq(a, b, recursive_comparison_func) elif isinstance(a, datetime): return timestamp_comparison_func(a, b) elif isinstance(a, SymbolToken): return _symbols_eq(a, b) elif isinstance(a, Decimal): return _decimals_eq(a, b) elif isinstance(a, float): return _floats_eq(a, b) return a == b
[ "def", "_ion_equals", "(", "a", ",", "b", ",", "timestamp_comparison_func", ",", "recursive_comparison_func", ")", ":", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "# Ensures that operand order does not matter.", "if", "isinstance", "(", "a", ",", "_IonNature", ")", ":", "if", "isinstance", "(", "b", ",", "_IonNature", ")", ":", "# Both operands have _IonNature. Their IonTypes and annotations must be equivalent.", "eq", "=", "a", ".", "ion_type", "is", "b", ".", "ion_type", "and", "_annotations_eq", "(", "a", ",", "b", ")", "else", ":", "# Only one operand has _IonNature. It cannot be equivalent to the other operand if it has annotations.", "eq", "=", "not", "a", ".", "ion_annotations", "if", "eq", ":", "if", "isinstance", "(", "a", ",", "IonPyList", ")", ":", "return", "_sequences_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "IonPyDict", ")", ":", "return", "_structs_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "IonPyTimestamp", ")", ":", "return", "timestamp_comparison_func", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyNull", ")", ":", "return", "isinstance", "(", "b", ",", "IonPyNull", ")", "or", "(", "b", "is", "None", "and", "a", ".", "ion_type", "is", "IonType", ".", "NULL", ")", "elif", "isinstance", "(", "a", ",", "IonPySymbol", ")", "or", "(", "isinstance", "(", "a", ",", "IonPyText", ")", "and", "a", ".", "ion_type", "is", "IonType", ".", "SYMBOL", ")", ":", "return", "_symbols_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyDecimal", ")", ":", "return", "_decimals_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyFloat", ")", ":", "return", "_floats_eq", "(", "a", ",", "b", ")", "else", ":", "return", "a", "==", "b", "return", "False", "# Reaching this point means that neither operand has _IonNature.", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "# Ensures that operand order does not matter.", "if", "isinstance", "(", "a", ",", "list", ")", ":", "return", "_sequences_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "dict", ")", ":", "return", "_structs_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "datetime", ")", ":", "return", "timestamp_comparison_func", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "SymbolToken", ")", ":", "return", "_symbols_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "Decimal", ")", ":", "return", "_decimals_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "float", ")", ":", "return", "_floats_eq", "(", "a", ",", "b", ")", "return", "a", "==", "b" ]
Compares a and b according to the description of the ion_equals method.
[ "Compares", "a", "and", "b", "according", "to", "the", "description", "of", "the", "ion_equals", "method", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/equivalence.py#L68-L110
914
amzn/ion-python
amazon/ion/equivalence.py
_timestamps_eq
def _timestamps_eq(a, b): """Compares two timestamp operands for equivalence under the Ion data model.""" assert isinstance(a, datetime) if not isinstance(b, datetime): return False # Local offsets must be equivalent. if (a.tzinfo is None) ^ (b.tzinfo is None): return False if a.utcoffset() != b.utcoffset(): return False for a, b in ((a, b), (b, a)): if isinstance(a, Timestamp): if isinstance(b, Timestamp): # Both operands declare their precisions. They are only equivalent if their precisions are the same. if a.precision is b.precision and a.fractional_precision is b.fractional_precision: break return False elif a.precision is not TimestampPrecision.SECOND or a.fractional_precision != MICROSECOND_PRECISION: # Only one of the operands declares its precision. It is only equivalent to the other (a naive datetime) # if it has full microseconds precision. return False return a == b
python
def _timestamps_eq(a, b): """Compares two timestamp operands for equivalence under the Ion data model.""" assert isinstance(a, datetime) if not isinstance(b, datetime): return False # Local offsets must be equivalent. if (a.tzinfo is None) ^ (b.tzinfo is None): return False if a.utcoffset() != b.utcoffset(): return False for a, b in ((a, b), (b, a)): if isinstance(a, Timestamp): if isinstance(b, Timestamp): # Both operands declare their precisions. They are only equivalent if their precisions are the same. if a.precision is b.precision and a.fractional_precision is b.fractional_precision: break return False elif a.precision is not TimestampPrecision.SECOND or a.fractional_precision != MICROSECOND_PRECISION: # Only one of the operands declares its precision. It is only equivalent to the other (a naive datetime) # if it has full microseconds precision. return False return a == b
[ "def", "_timestamps_eq", "(", "a", ",", "b", ")", ":", "assert", "isinstance", "(", "a", ",", "datetime", ")", "if", "not", "isinstance", "(", "b", ",", "datetime", ")", ":", "return", "False", "# Local offsets must be equivalent.", "if", "(", "a", ".", "tzinfo", "is", "None", ")", "^", "(", "b", ".", "tzinfo", "is", "None", ")", ":", "return", "False", "if", "a", ".", "utcoffset", "(", ")", "!=", "b", ".", "utcoffset", "(", ")", ":", "return", "False", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "if", "isinstance", "(", "a", ",", "Timestamp", ")", ":", "if", "isinstance", "(", "b", ",", "Timestamp", ")", ":", "# Both operands declare their precisions. They are only equivalent if their precisions are the same.", "if", "a", ".", "precision", "is", "b", ".", "precision", "and", "a", ".", "fractional_precision", "is", "b", ".", "fractional_precision", ":", "break", "return", "False", "elif", "a", ".", "precision", "is", "not", "TimestampPrecision", ".", "SECOND", "or", "a", ".", "fractional_precision", "!=", "MICROSECOND_PRECISION", ":", "# Only one of the operands declares its precision. It is only equivalent to the other (a naive datetime)", "# if it has full microseconds precision.", "return", "False", "return", "a", "==", "b" ]
Compares two timestamp operands for equivalence under the Ion data model.
[ "Compares", "two", "timestamp", "operands", "for", "equivalence", "under", "the", "Ion", "data", "model", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/equivalence.py#L161-L182
915
amzn/ion-python
amazon/ion/equivalence.py
_timestamp_instants_eq
def _timestamp_instants_eq(a, b): """Compares two timestamp operands for point-in-time equivalence only.""" assert isinstance(a, datetime) if not isinstance(b, datetime): return False # datetime's __eq__ can't compare a None offset and a non-None offset. For these equivalence semantics, a None # offset (unknown local offset) is treated equivalently to a +00:00. if a.tzinfo is None: a = a.replace(tzinfo=OffsetTZInfo()) if b.tzinfo is None: b = b.replace(tzinfo=OffsetTZInfo()) # datetime's __eq__ implementation compares instants; offsets and precision need not be equal. return a == b
python
def _timestamp_instants_eq(a, b): """Compares two timestamp operands for point-in-time equivalence only.""" assert isinstance(a, datetime) if not isinstance(b, datetime): return False # datetime's __eq__ can't compare a None offset and a non-None offset. For these equivalence semantics, a None # offset (unknown local offset) is treated equivalently to a +00:00. if a.tzinfo is None: a = a.replace(tzinfo=OffsetTZInfo()) if b.tzinfo is None: b = b.replace(tzinfo=OffsetTZInfo()) # datetime's __eq__ implementation compares instants; offsets and precision need not be equal. return a == b
[ "def", "_timestamp_instants_eq", "(", "a", ",", "b", ")", ":", "assert", "isinstance", "(", "a", ",", "datetime", ")", "if", "not", "isinstance", "(", "b", ",", "datetime", ")", ":", "return", "False", "# datetime's __eq__ can't compare a None offset and a non-None offset. For these equivalence semantics, a None", "# offset (unknown local offset) is treated equivalently to a +00:00.", "if", "a", ".", "tzinfo", "is", "None", ":", "a", "=", "a", ".", "replace", "(", "tzinfo", "=", "OffsetTZInfo", "(", ")", ")", "if", "b", ".", "tzinfo", "is", "None", ":", "b", "=", "b", ".", "replace", "(", "tzinfo", "=", "OffsetTZInfo", "(", ")", ")", "# datetime's __eq__ implementation compares instants; offsets and precision need not be equal.", "return", "a", "==", "b" ]
Compares two timestamp operands for point-in-time equivalence only.
[ "Compares", "two", "timestamp", "operands", "for", "point", "-", "in", "-", "time", "equivalence", "only", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/equivalence.py#L185-L197
916
amzn/ion-python
amazon/ion/reader_binary.py
_parse_var_int_components
def _parse_var_int_components(buf, signed): """Parses a ``VarInt`` or ``VarUInt`` field from a file-like object.""" value = 0 sign = 1 while True: ch = buf.read(1) if ch == '': raise IonException('Variable integer under-run') octet = ord(ch) if signed: if octet & _VAR_INT_SIGN_MASK: sign = -1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else: value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break return sign, value
python
def _parse_var_int_components(buf, signed): """Parses a ``VarInt`` or ``VarUInt`` field from a file-like object.""" value = 0 sign = 1 while True: ch = buf.read(1) if ch == '': raise IonException('Variable integer under-run') octet = ord(ch) if signed: if octet & _VAR_INT_SIGN_MASK: sign = -1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else: value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break return sign, value
[ "def", "_parse_var_int_components", "(", "buf", ",", "signed", ")", ":", "value", "=", "0", "sign", "=", "1", "while", "True", ":", "ch", "=", "buf", ".", "read", "(", "1", ")", "if", "ch", "==", "''", ":", "raise", "IonException", "(", "'Variable integer under-run'", ")", "octet", "=", "ord", "(", "ch", ")", "if", "signed", ":", "if", "octet", "&", "_VAR_INT_SIGN_MASK", ":", "sign", "=", "-", "1", "value", "=", "octet", "&", "_VAR_INT_SIGN_VALUE_MASK", "signed", "=", "False", "else", ":", "value", "<<=", "_VAR_INT_VALUE_BITS", "value", "|=", "octet", "&", "_VAR_INT_VALUE_MASK", "if", "octet", "&", "_VAR_INT_SIGNAL_MASK", ":", "break", "return", "sign", ",", "value" ]
Parses a ``VarInt`` or ``VarUInt`` field from a file-like object.
[ "Parses", "a", "VarInt", "or", "VarUInt", "field", "from", "a", "file", "-", "like", "object", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L132-L152
917
amzn/ion-python
amazon/ion/reader_binary.py
_parse_signed_int_components
def _parse_signed_int_components(buf): """Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude. """ sign_bit = 0 value = 0 first = True while True: ch = buf.read(1) if ch == b'': break octet = ord(ch) if first: if octet & _SIGNED_INT_SIGN_MASK: sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else: value <<= 8 value |= octet return sign_bit, value
python
def _parse_signed_int_components(buf): """Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude. """ sign_bit = 0 value = 0 first = True while True: ch = buf.read(1) if ch == b'': break octet = ord(ch) if first: if octet & _SIGNED_INT_SIGN_MASK: sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else: value <<= 8 value |= octet return sign_bit, value
[ "def", "_parse_signed_int_components", "(", "buf", ")", ":", "sign_bit", "=", "0", "value", "=", "0", "first", "=", "True", "while", "True", ":", "ch", "=", "buf", ".", "read", "(", "1", ")", "if", "ch", "==", "b''", ":", "break", "octet", "=", "ord", "(", "ch", ")", "if", "first", ":", "if", "octet", "&", "_SIGNED_INT_SIGN_MASK", ":", "sign_bit", "=", "1", "value", "=", "octet", "&", "_SIGNED_INT_SIGN_VALUE_MASK", "first", "=", "False", "else", ":", "value", "<<=", "8", "value", "|=", "octet", "return", "sign_bit", ",", "value" ]
Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude.
[ "Parses", "the", "remainder", "of", "a", "file", "-", "like", "object", "as", "a", "signed", "magnitude", "value", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L160-L184
918
amzn/ion-python
amazon/ion/reader_binary.py
_parse_decimal
def _parse_decimal(buf): """Parses the remainder of a file-like object as a decimal.""" exponent = _parse_var_int(buf, signed=True) sign_bit, coefficient = _parse_signed_int_components(buf) if coefficient == 0: # Handle the zero cases--especially negative zero value = Decimal((sign_bit, (0,), exponent)) else: coefficient *= sign_bit and -1 or 1 value = Decimal(coefficient).scaleb(exponent) return value
python
def _parse_decimal(buf): """Parses the remainder of a file-like object as a decimal.""" exponent = _parse_var_int(buf, signed=True) sign_bit, coefficient = _parse_signed_int_components(buf) if coefficient == 0: # Handle the zero cases--especially negative zero value = Decimal((sign_bit, (0,), exponent)) else: coefficient *= sign_bit and -1 or 1 value = Decimal(coefficient).scaleb(exponent) return value
[ "def", "_parse_decimal", "(", "buf", ")", ":", "exponent", "=", "_parse_var_int", "(", "buf", ",", "signed", "=", "True", ")", "sign_bit", ",", "coefficient", "=", "_parse_signed_int_components", "(", "buf", ")", "if", "coefficient", "==", "0", ":", "# Handle the zero cases--especially negative zero", "value", "=", "Decimal", "(", "(", "sign_bit", ",", "(", "0", ",", ")", ",", "exponent", ")", ")", "else", ":", "coefficient", "*=", "sign_bit", "and", "-", "1", "or", "1", "value", "=", "Decimal", "(", "coefficient", ")", ".", "scaleb", "(", "exponent", ")", "return", "value" ]
Parses the remainder of a file-like object as a decimal.
[ "Parses", "the", "remainder", "of", "a", "file", "-", "like", "object", "as", "a", "decimal", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L187-L199
919
amzn/ion-python
amazon/ion/reader_binary.py
_create_delegate_handler
def _create_delegate_handler(delegate): """Creates a handler function that creates a co-routine that can yield once with the given positional arguments to the delegate as a transition. Args: delegate (Coroutine): The co-routine to delegate to. Returns: A :class:`callable` handler that returns a co-routine that ignores the data it receives and sends with the arguments given to the handler as a :class:`Transition`. """ @coroutine def handler(*args): yield yield delegate.send(Transition(args, delegate)) return handler
python
def _create_delegate_handler(delegate): """Creates a handler function that creates a co-routine that can yield once with the given positional arguments to the delegate as a transition. Args: delegate (Coroutine): The co-routine to delegate to. Returns: A :class:`callable` handler that returns a co-routine that ignores the data it receives and sends with the arguments given to the handler as a :class:`Transition`. """ @coroutine def handler(*args): yield yield delegate.send(Transition(args, delegate)) return handler
[ "def", "_create_delegate_handler", "(", "delegate", ")", ":", "@", "coroutine", "def", "handler", "(", "*", "args", ")", ":", "yield", "yield", "delegate", ".", "send", "(", "Transition", "(", "args", ",", "delegate", ")", ")", "return", "handler" ]
Creates a handler function that creates a co-routine that can yield once with the given positional arguments to the delegate as a transition. Args: delegate (Coroutine): The co-routine to delegate to. Returns: A :class:`callable` handler that returns a co-routine that ignores the data it receives and sends with the arguments given to the handler as a :class:`Transition`.
[ "Creates", "a", "handler", "function", "that", "creates", "a", "co", "-", "routine", "that", "can", "yield", "once", "with", "the", "given", "positional", "arguments", "to", "the", "delegate", "as", "a", "transition", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L314-L330
920
amzn/ion-python
amazon/ion/reader_binary.py
_var_uint_field_handler
def _var_uint_field_handler(handler, ctx): """Handler co-routine for variable unsigned integer fields that. Invokes the given ``handler`` function with the read field and context, then immediately yields to the resulting co-routine. """ _, self = yield queue = ctx.queue value = 0 while True: if len(queue) == 0: # We don't know when the field ends, so read at least one byte. yield ctx.read_data_transition(1, self) octet = queue.read_byte() value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break yield ctx.immediate_transition(handler(value, ctx))
python
def _var_uint_field_handler(handler, ctx): """Handler co-routine for variable unsigned integer fields that. Invokes the given ``handler`` function with the read field and context, then immediately yields to the resulting co-routine. """ _, self = yield queue = ctx.queue value = 0 while True: if len(queue) == 0: # We don't know when the field ends, so read at least one byte. yield ctx.read_data_transition(1, self) octet = queue.read_byte() value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK: break yield ctx.immediate_transition(handler(value, ctx))
[ "def", "_var_uint_field_handler", "(", "handler", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "queue", "=", "ctx", ".", "queue", "value", "=", "0", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "# We don't know when the field ends, so read at least one byte.", "yield", "ctx", ".", "read_data_transition", "(", "1", ",", "self", ")", "octet", "=", "queue", ".", "read_byte", "(", ")", "value", "<<=", "_VAR_INT_VALUE_BITS", "value", "|=", "octet", "&", "_VAR_INT_VALUE_MASK", "if", "octet", "&", "_VAR_INT_SIGNAL_MASK", ":", "break", "yield", "ctx", ".", "immediate_transition", "(", "handler", "(", "value", ",", "ctx", ")", ")" ]
Handler co-routine for variable unsigned integer fields that. Invokes the given ``handler`` function with the read field and context, then immediately yields to the resulting co-routine.
[ "Handler", "co", "-", "routine", "for", "variable", "unsigned", "integer", "fields", "that", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L398-L416
921
amzn/ion-python
amazon/ion/reader_binary.py
_length_scalar_handler
def _length_scalar_handler(scalar_factory, ion_type, length, ctx): """Handles scalars, ``scalar_factory`` is a function that returns a value or thunk.""" _, self = yield if length == 0: data = b'' else: yield ctx.read_data_transition(length, self) data = ctx.queue.read(length) scalar = scalar_factory(data) event_cls = IonEvent if callable(scalar): # TODO Wrap the exception to get context position. event_cls = IonThunkEvent yield ctx.event_transition(event_cls, IonEventType.SCALAR, ion_type, scalar)
python
def _length_scalar_handler(scalar_factory, ion_type, length, ctx): """Handles scalars, ``scalar_factory`` is a function that returns a value or thunk.""" _, self = yield if length == 0: data = b'' else: yield ctx.read_data_transition(length, self) data = ctx.queue.read(length) scalar = scalar_factory(data) event_cls = IonEvent if callable(scalar): # TODO Wrap the exception to get context position. event_cls = IonThunkEvent yield ctx.event_transition(event_cls, IonEventType.SCALAR, ion_type, scalar)
[ "def", "_length_scalar_handler", "(", "scalar_factory", ",", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "if", "length", "==", "0", ":", "data", "=", "b''", "else", ":", "yield", "ctx", ".", "read_data_transition", "(", "length", ",", "self", ")", "data", "=", "ctx", ".", "queue", ".", "read", "(", "length", ")", "scalar", "=", "scalar_factory", "(", "data", ")", "event_cls", "=", "IonEvent", "if", "callable", "(", "scalar", ")", ":", "# TODO Wrap the exception to get context position.", "event_cls", "=", "IonThunkEvent", "yield", "ctx", ".", "event_transition", "(", "event_cls", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "scalar", ")" ]
Handles scalars, ``scalar_factory`` is a function that returns a value or thunk.
[ "Handles", "scalars", "scalar_factory", "is", "a", "function", "that", "returns", "a", "value", "or", "thunk", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L455-L469
922
amzn/ion-python
amazon/ion/reader_binary.py
_annotation_handler
def _annotation_handler(ion_type, length, ctx): """Handles annotations. ``ion_type`` is ignored.""" _, self = yield self_handler = _create_delegate_handler(self) if ctx.annotations is not None: raise IonException('Annotation cannot be nested in annotations') # We have to replace our context for annotations specifically to encapsulate the limit ctx = ctx.derive_container_context(length, add_depth=0) # Immediately read the length field and the annotations (ann_length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if ann_length < 1: raise IonException('Invalid annotation length subfield; annotation wrapper must have at least one annotation.') # Read/parse the annotations. yield ctx.read_data_transition(ann_length, self) ann_data = ctx.queue.read(ann_length) annotations = tuple(_parse_sid_iter(ann_data)) if ctx.limit - ctx.queue.position < 1: # There is no space left for the 'value' subfield, which is required. raise IonException('Incorrect annotation wrapper length.') # Go parse the start of the value but go back to the real parent container. yield ctx.immediate_transition( _start_type_handler(ctx.field_name, ctx.whence, ctx, annotations=annotations) )
python
def _annotation_handler(ion_type, length, ctx): """Handles annotations. ``ion_type`` is ignored.""" _, self = yield self_handler = _create_delegate_handler(self) if ctx.annotations is not None: raise IonException('Annotation cannot be nested in annotations') # We have to replace our context for annotations specifically to encapsulate the limit ctx = ctx.derive_container_context(length, add_depth=0) # Immediately read the length field and the annotations (ann_length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if ann_length < 1: raise IonException('Invalid annotation length subfield; annotation wrapper must have at least one annotation.') # Read/parse the annotations. yield ctx.read_data_transition(ann_length, self) ann_data = ctx.queue.read(ann_length) annotations = tuple(_parse_sid_iter(ann_data)) if ctx.limit - ctx.queue.position < 1: # There is no space left for the 'value' subfield, which is required. raise IonException('Incorrect annotation wrapper length.') # Go parse the start of the value but go back to the real parent container. yield ctx.immediate_transition( _start_type_handler(ctx.field_name, ctx.whence, ctx, annotations=annotations) )
[ "def", "_annotation_handler", "(", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "self_handler", "=", "_create_delegate_handler", "(", "self", ")", "if", "ctx", ".", "annotations", "is", "not", "None", ":", "raise", "IonException", "(", "'Annotation cannot be nested in annotations'", ")", "# We have to replace our context for annotations specifically to encapsulate the limit", "ctx", "=", "ctx", ".", "derive_container_context", "(", "length", ",", "add_depth", "=", "0", ")", "# Immediately read the length field and the annotations", "(", "ann_length", ",", "_", ")", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "_var_uint_field_handler", "(", "self_handler", ",", "ctx", ")", ")", "if", "ann_length", "<", "1", ":", "raise", "IonException", "(", "'Invalid annotation length subfield; annotation wrapper must have at least one annotation.'", ")", "# Read/parse the annotations.", "yield", "ctx", ".", "read_data_transition", "(", "ann_length", ",", "self", ")", "ann_data", "=", "ctx", ".", "queue", ".", "read", "(", "ann_length", ")", "annotations", "=", "tuple", "(", "_parse_sid_iter", "(", "ann_data", ")", ")", "if", "ctx", ".", "limit", "-", "ctx", ".", "queue", ".", "position", "<", "1", ":", "# There is no space left for the 'value' subfield, which is required.", "raise", "IonException", "(", "'Incorrect annotation wrapper length.'", ")", "# Go parse the start of the value but go back to the real parent container.", "yield", "ctx", ".", "immediate_transition", "(", "_start_type_handler", "(", "ctx", ".", "field_name", ",", "ctx", ".", "whence", ",", "ctx", ",", "annotations", "=", "annotations", ")", ")" ]
Handles annotations. ``ion_type`` is ignored.
[ "Handles", "annotations", ".", "ion_type", "is", "ignored", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L496-L526
923
amzn/ion-python
amazon/ion/reader_binary.py
_ordered_struct_start_handler
def _ordered_struct_start_handler(handler, ctx): """Handles the special case of ordered structs, specified by the type ID 0xD1. This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair, as required by the spec. """ _, self = yield self_handler = _create_delegate_handler(self) (length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if length < 2: # A valid field name/value pair is at least two octets: one for the field name SID and one for the value. raise IonException('Ordered structs (type ID 0xD1) must have at least one field name/value pair.') yield ctx.immediate_transition(handler(length, ctx))
python
def _ordered_struct_start_handler(handler, ctx): """Handles the special case of ordered structs, specified by the type ID 0xD1. This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair, as required by the spec. """ _, self = yield self_handler = _create_delegate_handler(self) (length, _), _ = yield ctx.immediate_transition( _var_uint_field_handler(self_handler, ctx) ) if length < 2: # A valid field name/value pair is at least two octets: one for the field name SID and one for the value. raise IonException('Ordered structs (type ID 0xD1) must have at least one field name/value pair.') yield ctx.immediate_transition(handler(length, ctx))
[ "def", "_ordered_struct_start_handler", "(", "handler", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "self_handler", "=", "_create_delegate_handler", "(", "self", ")", "(", "length", ",", "_", ")", ",", "_", "=", "yield", "ctx", ".", "immediate_transition", "(", "_var_uint_field_handler", "(", "self_handler", ",", "ctx", ")", ")", "if", "length", "<", "2", ":", "# A valid field name/value pair is at least two octets: one for the field name SID and one for the value.", "raise", "IonException", "(", "'Ordered structs (type ID 0xD1) must have at least one field name/value pair.'", ")", "yield", "ctx", ".", "immediate_transition", "(", "handler", "(", "length", ",", "ctx", ")", ")" ]
Handles the special case of ordered structs, specified by the type ID 0xD1. This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair, as required by the spec.
[ "Handles", "the", "special", "case", "of", "ordered", "structs", "specified", "by", "the", "type", "ID", "0xD1", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L530-L544
924
amzn/ion-python
amazon/ion/reader_binary.py
_container_start_handler
def _container_start_handler(ion_type, length, ctx): """Handles container delegation.""" _, self = yield container_ctx = ctx.derive_container_context(length) if ctx.annotations and ctx.limit != container_ctx.limit: # 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their # limits must match. raise IonException('Incorrect annotation wrapper length.') delegate = _container_handler(ion_type, container_ctx) # We start the container, and transition to the new container processor. yield ctx.event_transition( IonEvent, IonEventType.CONTAINER_START, ion_type, value=None, whence=delegate )
python
def _container_start_handler(ion_type, length, ctx): """Handles container delegation.""" _, self = yield container_ctx = ctx.derive_container_context(length) if ctx.annotations and ctx.limit != container_ctx.limit: # 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their # limits must match. raise IonException('Incorrect annotation wrapper length.') delegate = _container_handler(ion_type, container_ctx) # We start the container, and transition to the new container processor. yield ctx.event_transition( IonEvent, IonEventType.CONTAINER_START, ion_type, value=None, whence=delegate )
[ "def", "_container_start_handler", "(", "ion_type", ",", "length", ",", "ctx", ")", ":", "_", ",", "self", "=", "yield", "container_ctx", "=", "ctx", ".", "derive_container_context", "(", "length", ")", "if", "ctx", ".", "annotations", "and", "ctx", ".", "limit", "!=", "container_ctx", ".", "limit", ":", "# 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their", "# limits must match.", "raise", "IonException", "(", "'Incorrect annotation wrapper length.'", ")", "delegate", "=", "_container_handler", "(", "ion_type", ",", "container_ctx", ")", "# We start the container, and transition to the new container processor.", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "CONTAINER_START", ",", "ion_type", ",", "value", "=", "None", ",", "whence", "=", "delegate", ")" ]
Handles container delegation.
[ "Handles", "container", "delegation", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L548-L562
925
amzn/ion-python
amazon/ion/reader_binary.py
_bind_length_handlers
def _bind_length_handlers(tids, user_handler, lns): """Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): The low-nibble lengths to bind to. """ for tid in tids: for ln in lns: type_octet = _gen_type_octet(tid, ln) ion_type = _TID_VALUE_TYPE_TABLE[tid] if ln == 1 and ion_type is IonType.STRUCT: handler = partial(_ordered_struct_start_handler, partial(user_handler, ion_type)) elif ln < _LENGTH_FIELD_FOLLOWS: # Directly partially bind length. handler = partial(user_handler, ion_type, ln) else: # Delegate to length field parsing first. handler = partial(_var_uint_field_handler, partial(user_handler, ion_type)) _HANDLER_DISPATCH_TABLE[type_octet] = handler
python
def _bind_length_handlers(tids, user_handler, lns): """Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): The low-nibble lengths to bind to. """ for tid in tids: for ln in lns: type_octet = _gen_type_octet(tid, ln) ion_type = _TID_VALUE_TYPE_TABLE[tid] if ln == 1 and ion_type is IonType.STRUCT: handler = partial(_ordered_struct_start_handler, partial(user_handler, ion_type)) elif ln < _LENGTH_FIELD_FOLLOWS: # Directly partially bind length. handler = partial(user_handler, ion_type, ln) else: # Delegate to length field parsing first. handler = partial(_var_uint_field_handler, partial(user_handler, ion_type)) _HANDLER_DISPATCH_TABLE[type_octet] = handler
[ "def", "_bind_length_handlers", "(", "tids", ",", "user_handler", ",", "lns", ")", ":", "for", "tid", "in", "tids", ":", "for", "ln", "in", "lns", ":", "type_octet", "=", "_gen_type_octet", "(", "tid", ",", "ln", ")", "ion_type", "=", "_TID_VALUE_TYPE_TABLE", "[", "tid", "]", "if", "ln", "==", "1", "and", "ion_type", "is", "IonType", ".", "STRUCT", ":", "handler", "=", "partial", "(", "_ordered_struct_start_handler", ",", "partial", "(", "user_handler", ",", "ion_type", ")", ")", "elif", "ln", "<", "_LENGTH_FIELD_FOLLOWS", ":", "# Directly partially bind length.", "handler", "=", "partial", "(", "user_handler", ",", "ion_type", ",", "ln", ")", "else", ":", "# Delegate to length field parsing first.", "handler", "=", "partial", "(", "_var_uint_field_handler", ",", "partial", "(", "user_handler", ",", "ion_type", ")", ")", "_HANDLER_DISPATCH_TABLE", "[", "type_octet", "]", "=", "handler" ]
Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): The low-nibble lengths to bind to.
[ "Binds", "a", "set", "of", "handlers", "with", "the", "given", "factory", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L777-L799
926
amzn/ion-python
amazon/ion/reader_binary.py
_bind_length_scalar_handlers
def _bind_length_scalar_handlers(tids, scalar_factory, lns=_NON_ZERO_LENGTH_LNS): """Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Sequence[int]): The low-nibble lengths to bind to. """ handler = partial(_length_scalar_handler, scalar_factory) return _bind_length_handlers(tids, handler, lns)
python
def _bind_length_scalar_handlers(tids, scalar_factory, lns=_NON_ZERO_LENGTH_LNS): """Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Sequence[int]): The low-nibble lengths to bind to. """ handler = partial(_length_scalar_handler, scalar_factory) return _bind_length_handlers(tids, handler, lns)
[ "def", "_bind_length_scalar_handlers", "(", "tids", ",", "scalar_factory", ",", "lns", "=", "_NON_ZERO_LENGTH_LNS", ")", ":", "handler", "=", "partial", "(", "_length_scalar_handler", ",", "scalar_factory", ")", "return", "_bind_length_handlers", "(", "tids", ",", "handler", ",", "lns", ")" ]
Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Sequence[int]): The low-nibble lengths to bind to.
[ "Binds", "a", "set", "of", "scalar", "handlers", "for", "an", "inclusive", "range", "of", "low", "-", "nibble", "values", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L802-L813
927
amzn/ion-python
amazon/ion/reader_binary.py
_HandlerContext.remaining
def remaining(self): """Determines how many bytes are remaining in the current context.""" if self.depth == 0: return _STREAM_REMAINING return self.limit - self.queue.position
python
def remaining(self): """Determines how many bytes are remaining in the current context.""" if self.depth == 0: return _STREAM_REMAINING return self.limit - self.queue.position
[ "def", "remaining", "(", "self", ")", ":", "if", "self", ".", "depth", "==", "0", ":", "return", "_STREAM_REMAINING", "return", "self", ".", "limit", "-", "self", ".", "queue", ".", "position" ]
Determines how many bytes are remaining in the current context.
[ "Determines", "how", "many", "bytes", "are", "remaining", "in", "the", "current", "context", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L229-L233
928
amzn/ion-python
amazon/ion/reader_binary.py
_HandlerContext.read_data_transition
def read_data_transition(self, length, whence=None, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): """Returns an immediate event_transition to read a specified number of bytes.""" if whence is None: whence = self.whence return Transition( None, _read_data_handler(length, whence, self, skip, stream_event) )
python
def read_data_transition(self, length, whence=None, skip=False, stream_event=ION_STREAM_INCOMPLETE_EVENT): """Returns an immediate event_transition to read a specified number of bytes.""" if whence is None: whence = self.whence return Transition( None, _read_data_handler(length, whence, self, skip, stream_event) )
[ "def", "read_data_transition", "(", "self", ",", "length", ",", "whence", "=", "None", ",", "skip", "=", "False", ",", "stream_event", "=", "ION_STREAM_INCOMPLETE_EVENT", ")", ":", "if", "whence", "is", "None", ":", "whence", "=", "self", ".", "whence", "return", "Transition", "(", "None", ",", "_read_data_handler", "(", "length", ",", "whence", ",", "self", ",", "skip", ",", "stream_event", ")", ")" ]
Returns an immediate event_transition to read a specified number of bytes.
[ "Returns", "an", "immediate", "event_transition", "to", "read", "a", "specified", "number", "of", "bytes", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_binary.py#L235-L243
929
amzn/ion-python
amazon/ion/reader.py
_narrow_unichr
def _narrow_unichr(code_point): """Retrieves the unicode character representing any given code point, in a way that won't break on narrow builds. This is necessary because the built-in unichr function will fail for ordinals above 0xFFFF on narrow builds (UCS2); ordinals above 0xFFFF would require recalculating and combining surrogate pairs. This avoids that by retrieving the unicode character that was initially read. Args: code_point (int|CodePoint): An int or a subclass of int that contains the unicode character representing its code point in an attribute named 'char'. """ try: if len(code_point.char) > 1: return code_point.char except AttributeError: pass return six.unichr(code_point)
python
def _narrow_unichr(code_point): """Retrieves the unicode character representing any given code point, in a way that won't break on narrow builds. This is necessary because the built-in unichr function will fail for ordinals above 0xFFFF on narrow builds (UCS2); ordinals above 0xFFFF would require recalculating and combining surrogate pairs. This avoids that by retrieving the unicode character that was initially read. Args: code_point (int|CodePoint): An int or a subclass of int that contains the unicode character representing its code point in an attribute named 'char'. """ try: if len(code_point.char) > 1: return code_point.char except AttributeError: pass return six.unichr(code_point)
[ "def", "_narrow_unichr", "(", "code_point", ")", ":", "try", ":", "if", "len", "(", "code_point", ".", "char", ")", ">", "1", ":", "return", "code_point", ".", "char", "except", "AttributeError", ":", "pass", "return", "six", ".", "unichr", "(", "code_point", ")" ]
Retrieves the unicode character representing any given code point, in a way that won't break on narrow builds. This is necessary because the built-in unichr function will fail for ordinals above 0xFFFF on narrow builds (UCS2); ordinals above 0xFFFF would require recalculating and combining surrogate pairs. This avoids that by retrieving the unicode character that was initially read. Args: code_point (int|CodePoint): An int or a subclass of int that contains the unicode character representing its code point in an attribute named 'char'.
[ "Retrieves", "the", "unicode", "character", "representing", "any", "given", "code", "point", "in", "a", "way", "that", "won", "t", "break", "on", "narrow", "builds", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L43-L59
930
amzn/ion-python
amazon/ion/reader.py
reader_trampoline
def reader_trampoline(start, allow_flush=False): """Provides the co-routine trampoline for a reader state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a Transition of :class:`amazon.ion.core.DataEvent` and the co-routine itself. A reader must start with a ``ReadEventType.NEXT`` event to prime the parser. In many cases this will lead to an ``IonEventType.INCOMPLETE`` being yielded, but not always (consider a reader over an in-memory data structure). Notes: A reader delimits its incomplete parse points with ``IonEventType.INCOMPLETE``. Readers also delimit complete parse points with ``IonEventType.STREAM_END``; this is similar to the ``INCOMPLETE`` case except that it denotes that a logical termination of data is *allowed*. When these event are received, the only valid input event type is a ``ReadEventType.DATA``. Generally, ``ReadEventType.NEXT`` is used to get the next parse event, but ``ReadEventType.SKIP`` can be used to skip over the current container. An internal state machine co-routine can delimit a state change without yielding to the caller by yielding ``None`` event, this will cause the trampoline to invoke the transition delegate, immediately. Args: start: The reader co-routine to initially delegate to. allow_flush(Optional[bool]): True if this reader supports receiving ``NEXT`` after yielding ``INCOMPLETE`` to trigger an attempt to flush pending parse events, otherwise False. Yields: amazon.ion.core.IonEvent: the result of parsing. Receives :class:`DataEvent` to parse into :class:`amazon.ion.core.IonEvent`. """ data_event = yield if data_event is None or data_event.type is not ReadEventType.NEXT: raise TypeError('Reader must be started with NEXT') trans = Transition(None, start) while True: trans = trans.delegate.send(Transition(data_event, trans.delegate)) data_event = None if trans.event is not None: # Only yield if there is an event. data_event = (yield trans.event) if trans.event.event_type.is_stream_signal: if data_event.type is not ReadEventType.DATA: if not allow_flush or not (trans.event.event_type is IonEventType.INCOMPLETE and data_event.type is ReadEventType.NEXT): raise TypeError('Reader expected data: %r' % (data_event,)) else: if data_event.type is ReadEventType.DATA: raise TypeError('Reader did not expect data') if data_event.type is ReadEventType.DATA and len(data_event.data) == 0: raise ValueError('Empty data not allowed') if trans.event.depth == 0 \ and trans.event.event_type is not IonEventType.CONTAINER_START \ and data_event.type is ReadEventType.SKIP: raise TypeError('Cannot skip at the top-level')
python
def reader_trampoline(start, allow_flush=False): """Provides the co-routine trampoline for a reader state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a Transition of :class:`amazon.ion.core.DataEvent` and the co-routine itself. A reader must start with a ``ReadEventType.NEXT`` event to prime the parser. In many cases this will lead to an ``IonEventType.INCOMPLETE`` being yielded, but not always (consider a reader over an in-memory data structure). Notes: A reader delimits its incomplete parse points with ``IonEventType.INCOMPLETE``. Readers also delimit complete parse points with ``IonEventType.STREAM_END``; this is similar to the ``INCOMPLETE`` case except that it denotes that a logical termination of data is *allowed*. When these event are received, the only valid input event type is a ``ReadEventType.DATA``. Generally, ``ReadEventType.NEXT`` is used to get the next parse event, but ``ReadEventType.SKIP`` can be used to skip over the current container. An internal state machine co-routine can delimit a state change without yielding to the caller by yielding ``None`` event, this will cause the trampoline to invoke the transition delegate, immediately. Args: start: The reader co-routine to initially delegate to. allow_flush(Optional[bool]): True if this reader supports receiving ``NEXT`` after yielding ``INCOMPLETE`` to trigger an attempt to flush pending parse events, otherwise False. Yields: amazon.ion.core.IonEvent: the result of parsing. Receives :class:`DataEvent` to parse into :class:`amazon.ion.core.IonEvent`. """ data_event = yield if data_event is None or data_event.type is not ReadEventType.NEXT: raise TypeError('Reader must be started with NEXT') trans = Transition(None, start) while True: trans = trans.delegate.send(Transition(data_event, trans.delegate)) data_event = None if trans.event is not None: # Only yield if there is an event. data_event = (yield trans.event) if trans.event.event_type.is_stream_signal: if data_event.type is not ReadEventType.DATA: if not allow_flush or not (trans.event.event_type is IonEventType.INCOMPLETE and data_event.type is ReadEventType.NEXT): raise TypeError('Reader expected data: %r' % (data_event,)) else: if data_event.type is ReadEventType.DATA: raise TypeError('Reader did not expect data') if data_event.type is ReadEventType.DATA and len(data_event.data) == 0: raise ValueError('Empty data not allowed') if trans.event.depth == 0 \ and trans.event.event_type is not IonEventType.CONTAINER_START \ and data_event.type is ReadEventType.SKIP: raise TypeError('Cannot skip at the top-level')
[ "def", "reader_trampoline", "(", "start", ",", "allow_flush", "=", "False", ")", ":", "data_event", "=", "yield", "if", "data_event", "is", "None", "or", "data_event", ".", "type", "is", "not", "ReadEventType", ".", "NEXT", ":", "raise", "TypeError", "(", "'Reader must be started with NEXT'", ")", "trans", "=", "Transition", "(", "None", ",", "start", ")", "while", "True", ":", "trans", "=", "trans", ".", "delegate", ".", "send", "(", "Transition", "(", "data_event", ",", "trans", ".", "delegate", ")", ")", "data_event", "=", "None", "if", "trans", ".", "event", "is", "not", "None", ":", "# Only yield if there is an event.", "data_event", "=", "(", "yield", "trans", ".", "event", ")", "if", "trans", ".", "event", ".", "event_type", ".", "is_stream_signal", ":", "if", "data_event", ".", "type", "is", "not", "ReadEventType", ".", "DATA", ":", "if", "not", "allow_flush", "or", "not", "(", "trans", ".", "event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "NEXT", ")", ":", "raise", "TypeError", "(", "'Reader expected data: %r'", "%", "(", "data_event", ",", ")", ")", "else", ":", "if", "data_event", ".", "type", "is", "ReadEventType", ".", "DATA", ":", "raise", "TypeError", "(", "'Reader did not expect data'", ")", "if", "data_event", ".", "type", "is", "ReadEventType", ".", "DATA", "and", "len", "(", "data_event", ".", "data", ")", "==", "0", ":", "raise", "ValueError", "(", "'Empty data not allowed'", ")", "if", "trans", ".", "event", ".", "depth", "==", "0", "and", "trans", ".", "event", ".", "event_type", "is", "not", "IonEventType", ".", "CONTAINER_START", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "SKIP", ":", "raise", "TypeError", "(", "'Cannot skip at the top-level'", ")" ]
Provides the co-routine trampoline for a reader state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a Transition of :class:`amazon.ion.core.DataEvent` and the co-routine itself. A reader must start with a ``ReadEventType.NEXT`` event to prime the parser. In many cases this will lead to an ``IonEventType.INCOMPLETE`` being yielded, but not always (consider a reader over an in-memory data structure). Notes: A reader delimits its incomplete parse points with ``IonEventType.INCOMPLETE``. Readers also delimit complete parse points with ``IonEventType.STREAM_END``; this is similar to the ``INCOMPLETE`` case except that it denotes that a logical termination of data is *allowed*. When these event are received, the only valid input event type is a ``ReadEventType.DATA``. Generally, ``ReadEventType.NEXT`` is used to get the next parse event, but ``ReadEventType.SKIP`` can be used to skip over the current container. An internal state machine co-routine can delimit a state change without yielding to the caller by yielding ``None`` event, this will cause the trampoline to invoke the transition delegate, immediately. Args: start: The reader co-routine to initially delegate to. allow_flush(Optional[bool]): True if this reader supports receiving ``NEXT`` after yielding ``INCOMPLETE`` to trigger an attempt to flush pending parse events, otherwise False. Yields: amazon.ion.core.IonEvent: the result of parsing. Receives :class:`DataEvent` to parse into :class:`amazon.ion.core.IonEvent`.
[ "Provides", "the", "co", "-", "routine", "trampoline", "for", "a", "reader", "state", "machine", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L312-L369
931
amzn/ion-python
amazon/ion/reader.py
blocking_reader
def blocking_reader(reader, input, buffer_size=_DEFAULT_BUFFER_SIZE): """Provides an implementation of using the reader co-routine with a file-like object. Args: reader(Coroutine): A reader co-routine. input(BaseIO): The file-like object to read from. buffer_size(Optional[int]): The optional buffer size to use. """ ion_event = None while True: read_event = (yield ion_event) ion_event = reader.send(read_event) while ion_event is not None and ion_event.event_type.is_stream_signal: data = input.read(buffer_size) if len(data) == 0: # End of file. if ion_event.event_type is IonEventType.INCOMPLETE: ion_event = reader.send(NEXT_EVENT) continue else: yield ION_STREAM_END_EVENT return ion_event = reader.send(read_data_event(data))
python
def blocking_reader(reader, input, buffer_size=_DEFAULT_BUFFER_SIZE): """Provides an implementation of using the reader co-routine with a file-like object. Args: reader(Coroutine): A reader co-routine. input(BaseIO): The file-like object to read from. buffer_size(Optional[int]): The optional buffer size to use. """ ion_event = None while True: read_event = (yield ion_event) ion_event = reader.send(read_event) while ion_event is not None and ion_event.event_type.is_stream_signal: data = input.read(buffer_size) if len(data) == 0: # End of file. if ion_event.event_type is IonEventType.INCOMPLETE: ion_event = reader.send(NEXT_EVENT) continue else: yield ION_STREAM_END_EVENT return ion_event = reader.send(read_data_event(data))
[ "def", "blocking_reader", "(", "reader", ",", "input", ",", "buffer_size", "=", "_DEFAULT_BUFFER_SIZE", ")", ":", "ion_event", "=", "None", "while", "True", ":", "read_event", "=", "(", "yield", "ion_event", ")", "ion_event", "=", "reader", ".", "send", "(", "read_event", ")", "while", "ion_event", "is", "not", "None", "and", "ion_event", ".", "event_type", ".", "is_stream_signal", ":", "data", "=", "input", ".", "read", "(", "buffer_size", ")", "if", "len", "(", "data", ")", "==", "0", ":", "# End of file.", "if", "ion_event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "ion_event", "=", "reader", ".", "send", "(", "NEXT_EVENT", ")", "continue", "else", ":", "yield", "ION_STREAM_END_EVENT", "return", "ion_event", "=", "reader", ".", "send", "(", "read_data_event", "(", "data", ")", ")" ]
Provides an implementation of using the reader co-routine with a file-like object. Args: reader(Coroutine): A reader co-routine. input(BaseIO): The file-like object to read from. buffer_size(Optional[int]): The optional buffer size to use.
[ "Provides", "an", "implementation", "of", "using", "the", "reader", "co", "-", "routine", "with", "a", "file", "-", "like", "object", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L376-L398
932
amzn/ion-python
amazon/ion/reader.py
BufferQueue.read
def read(self, length, skip=False): """Consumes the first ``length`` bytes from the accumulator.""" if length > self.__size: raise IndexError( 'Cannot pop %d bytes, %d bytes in buffer queue' % (length, self.__size)) self.position += length self.__size -= length segments = self.__segments offset = self.__offset data = self.__data_cls() while length > 0: segment = segments[0] segment_off = offset segment_len = len(segment) segment_rem = segment_len - segment_off segment_read_len = min(segment_rem, length) if segment_off == 0 and segment_read_len == segment_rem: # consume an entire segment if skip: segment_slice = self.__element_type() else: segment_slice = segment else: # Consume a part of the segment. if skip: segment_slice = self.__element_type() else: segment_slice = segment[segment_off:segment_off + segment_read_len] offset = 0 segment_off += segment_read_len if segment_off == segment_len: segments.popleft() self.__offset = 0 else: self.__offset = segment_off if length <= segment_rem and len(data) == 0: return segment_slice data.extend(segment_slice) length -= segment_read_len if self.is_unicode: return data.as_text() else: return data
python
def read(self, length, skip=False): """Consumes the first ``length`` bytes from the accumulator.""" if length > self.__size: raise IndexError( 'Cannot pop %d bytes, %d bytes in buffer queue' % (length, self.__size)) self.position += length self.__size -= length segments = self.__segments offset = self.__offset data = self.__data_cls() while length > 0: segment = segments[0] segment_off = offset segment_len = len(segment) segment_rem = segment_len - segment_off segment_read_len = min(segment_rem, length) if segment_off == 0 and segment_read_len == segment_rem: # consume an entire segment if skip: segment_slice = self.__element_type() else: segment_slice = segment else: # Consume a part of the segment. if skip: segment_slice = self.__element_type() else: segment_slice = segment[segment_off:segment_off + segment_read_len] offset = 0 segment_off += segment_read_len if segment_off == segment_len: segments.popleft() self.__offset = 0 else: self.__offset = segment_off if length <= segment_rem and len(data) == 0: return segment_slice data.extend(segment_slice) length -= segment_read_len if self.is_unicode: return data.as_text() else: return data
[ "def", "read", "(", "self", ",", "length", ",", "skip", "=", "False", ")", ":", "if", "length", ">", "self", ".", "__size", ":", "raise", "IndexError", "(", "'Cannot pop %d bytes, %d bytes in buffer queue'", "%", "(", "length", ",", "self", ".", "__size", ")", ")", "self", ".", "position", "+=", "length", "self", ".", "__size", "-=", "length", "segments", "=", "self", ".", "__segments", "offset", "=", "self", ".", "__offset", "data", "=", "self", ".", "__data_cls", "(", ")", "while", "length", ">", "0", ":", "segment", "=", "segments", "[", "0", "]", "segment_off", "=", "offset", "segment_len", "=", "len", "(", "segment", ")", "segment_rem", "=", "segment_len", "-", "segment_off", "segment_read_len", "=", "min", "(", "segment_rem", ",", "length", ")", "if", "segment_off", "==", "0", "and", "segment_read_len", "==", "segment_rem", ":", "# consume an entire segment", "if", "skip", ":", "segment_slice", "=", "self", ".", "__element_type", "(", ")", "else", ":", "segment_slice", "=", "segment", "else", ":", "# Consume a part of the segment.", "if", "skip", ":", "segment_slice", "=", "self", ".", "__element_type", "(", ")", "else", ":", "segment_slice", "=", "segment", "[", "segment_off", ":", "segment_off", "+", "segment_read_len", "]", "offset", "=", "0", "segment_off", "+=", "segment_read_len", "if", "segment_off", "==", "segment_len", ":", "segments", ".", "popleft", "(", ")", "self", ".", "__offset", "=", "0", "else", ":", "self", ".", "__offset", "=", "segment_off", "if", "length", "<=", "segment_rem", "and", "len", "(", "data", ")", "==", "0", ":", "return", "segment_slice", "data", ".", "extend", "(", "segment_slice", ")", "length", "-=", "segment_read_len", "if", "self", ".", "is_unicode", ":", "return", "data", ".", "as_text", "(", ")", "else", ":", "return", "data" ]
Consumes the first ``length`` bytes from the accumulator.
[ "Consumes", "the", "first", "length", "bytes", "from", "the", "accumulator", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L154-L199
933
amzn/ion-python
amazon/ion/reader.py
BufferQueue.unread
def unread(self, c): """Unread the given character, byte, or code point. If this is a unicode buffer and the input is an int or byte, it will be interpreted as an ordinal representing a unicode code point. If this is a binary buffer, the input must be a byte or int; a unicode character will raise an error. """ if self.position < 1: raise IndexError('Cannot unread an empty buffer queue.') if isinstance(c, six.text_type): if not self.is_unicode: BufferQueue._incompatible_types(self.is_unicode, c) else: c = self.__chr(c) num_code_units = self.is_unicode and len(c) or 1 if self.__offset == 0: if num_code_units == 1 and six.PY3: if self.is_unicode: segment = c else: segment = six.int2byte(c) else: segment = c self.__segments.appendleft(segment) else: self.__offset -= num_code_units def verify(ch, idx): existing = self.__segments[0][self.__offset + idx] if existing != ch: raise ValueError('Attempted to unread %s when %s was expected.' % (ch, existing)) if num_code_units == 1: verify(c, 0) else: for i in range(num_code_units): verify(c[i], i) self.__size += num_code_units self.position -= num_code_units
python
def unread(self, c): """Unread the given character, byte, or code point. If this is a unicode buffer and the input is an int or byte, it will be interpreted as an ordinal representing a unicode code point. If this is a binary buffer, the input must be a byte or int; a unicode character will raise an error. """ if self.position < 1: raise IndexError('Cannot unread an empty buffer queue.') if isinstance(c, six.text_type): if not self.is_unicode: BufferQueue._incompatible_types(self.is_unicode, c) else: c = self.__chr(c) num_code_units = self.is_unicode and len(c) or 1 if self.__offset == 0: if num_code_units == 1 and six.PY3: if self.is_unicode: segment = c else: segment = six.int2byte(c) else: segment = c self.__segments.appendleft(segment) else: self.__offset -= num_code_units def verify(ch, idx): existing = self.__segments[0][self.__offset + idx] if existing != ch: raise ValueError('Attempted to unread %s when %s was expected.' % (ch, existing)) if num_code_units == 1: verify(c, 0) else: for i in range(num_code_units): verify(c[i], i) self.__size += num_code_units self.position -= num_code_units
[ "def", "unread", "(", "self", ",", "c", ")", ":", "if", "self", ".", "position", "<", "1", ":", "raise", "IndexError", "(", "'Cannot unread an empty buffer queue.'", ")", "if", "isinstance", "(", "c", ",", "six", ".", "text_type", ")", ":", "if", "not", "self", ".", "is_unicode", ":", "BufferQueue", ".", "_incompatible_types", "(", "self", ".", "is_unicode", ",", "c", ")", "else", ":", "c", "=", "self", ".", "__chr", "(", "c", ")", "num_code_units", "=", "self", ".", "is_unicode", "and", "len", "(", "c", ")", "or", "1", "if", "self", ".", "__offset", "==", "0", ":", "if", "num_code_units", "==", "1", "and", "six", ".", "PY3", ":", "if", "self", ".", "is_unicode", ":", "segment", "=", "c", "else", ":", "segment", "=", "six", ".", "int2byte", "(", "c", ")", "else", ":", "segment", "=", "c", "self", ".", "__segments", ".", "appendleft", "(", "segment", ")", "else", ":", "self", ".", "__offset", "-=", "num_code_units", "def", "verify", "(", "ch", ",", "idx", ")", ":", "existing", "=", "self", ".", "__segments", "[", "0", "]", "[", "self", ".", "__offset", "+", "idx", "]", "if", "existing", "!=", "ch", ":", "raise", "ValueError", "(", "'Attempted to unread %s when %s was expected.'", "%", "(", "ch", ",", "existing", ")", ")", "if", "num_code_units", "==", "1", ":", "verify", "(", "c", ",", "0", ")", "else", ":", "for", "i", "in", "range", "(", "num_code_units", ")", ":", "verify", "(", "c", "[", "i", "]", ",", "i", ")", "self", ".", "__size", "+=", "num_code_units", "self", ".", "position", "-=", "num_code_units" ]
Unread the given character, byte, or code point. If this is a unicode buffer and the input is an int or byte, it will be interpreted as an ordinal representing a unicode code point. If this is a binary buffer, the input must be a byte or int; a unicode character will raise an error.
[ "Unread", "the", "given", "character", "byte", "or", "code", "point", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L221-L259
934
amzn/ion-python
amazon/ion/reader.py
BufferQueue.skip
def skip(self, length): """Removes ``length`` bytes and returns the number length still required to skip""" if length >= self.__size: skip_amount = self.__size rem = length - skip_amount self.__segments.clear() self.__offset = 0 self.__size = 0 self.position += skip_amount else: rem = 0 self.read(length, skip=True) return rem
python
def skip(self, length): """Removes ``length`` bytes and returns the number length still required to skip""" if length >= self.__size: skip_amount = self.__size rem = length - skip_amount self.__segments.clear() self.__offset = 0 self.__size = 0 self.position += skip_amount else: rem = 0 self.read(length, skip=True) return rem
[ "def", "skip", "(", "self", ",", "length", ")", ":", "if", "length", ">=", "self", ".", "__size", ":", "skip_amount", "=", "self", ".", "__size", "rem", "=", "length", "-", "skip_amount", "self", ".", "__segments", ".", "clear", "(", ")", "self", ".", "__offset", "=", "0", "self", ".", "__size", "=", "0", "self", ".", "position", "+=", "skip_amount", "else", ":", "rem", "=", "0", "self", ".", "read", "(", "length", ",", "skip", "=", "True", ")", "return", "rem" ]
Removes ``length`` bytes and returns the number length still required to skip
[ "Removes", "length", "bytes", "and", "returns", "the", "number", "length", "still", "required", "to", "skip" ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader.py#L261-L273
935
amzn/ion-python
amazon/ion/reader_managed.py
managed_reader
def managed_reader(reader, catalog=None): """Managed reader wrapping another reader. Args: reader (Coroutine): The underlying non-blocking reader co-routine. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving imports. Yields: Events from the underlying reader delegating to symbol table processing as needed. The user will never see things like version markers or local symbol tables. """ if catalog is None: catalog = SymbolTableCatalog() ctx = _ManagedContext(catalog) symbol_trans = Transition(None, None) ion_event = None while True: if symbol_trans.delegate is not None \ and ion_event is not None \ and not ion_event.event_type.is_stream_signal: # We have a symbol processor active, do not yield to user. delegate = symbol_trans.delegate symbol_trans = delegate.send(Transition(ion_event, delegate)) if symbol_trans.delegate is None: # When the symbol processor terminates, the event is the context # and there is no delegate. ctx = symbol_trans.event data_event = NEXT_EVENT else: data_event = symbol_trans.event else: data_event = None if ion_event is not None: event_type = ion_event.event_type ion_type = ion_event.ion_type depth = ion_event.depth # System values only happen at the top-level if depth == 0: if event_type is IonEventType.VERSION_MARKER: if ion_event != ION_VERSION_MARKER_EVENT: raise IonException('Invalid IVM: %s' % (ion_event,)) # Reset and swallow IVM ctx = _ManagedContext(ctx.catalog) data_event = NEXT_EVENT elif ion_type is IonType.SYMBOL \ and len(ion_event.annotations) == 0 \ and ion_event.value is not None \ and ctx.resolve(ion_event.value).text == TEXT_ION_1_0: assert symbol_trans.delegate is None # A faux IVM is a NOP data_event = NEXT_EVENT elif event_type is IonEventType.CONTAINER_START \ and ion_type is IonType.STRUCT \ and ctx.has_symbol_table_annotation(ion_event.annotations): assert symbol_trans.delegate is None # Activate a new symbol processor. delegate = _local_symbol_table_handler(ctx) symbol_trans = Transition(None, delegate) data_event = NEXT_EVENT if data_event is None: # No system processing or we have to get data, yield control. if ion_event is not None: ion_event = _managed_thunk_event(ctx, ion_event) data_event = yield ion_event ion_event = reader.send(data_event)
python
def managed_reader(reader, catalog=None): """Managed reader wrapping another reader. Args: reader (Coroutine): The underlying non-blocking reader co-routine. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving imports. Yields: Events from the underlying reader delegating to symbol table processing as needed. The user will never see things like version markers or local symbol tables. """ if catalog is None: catalog = SymbolTableCatalog() ctx = _ManagedContext(catalog) symbol_trans = Transition(None, None) ion_event = None while True: if symbol_trans.delegate is not None \ and ion_event is not None \ and not ion_event.event_type.is_stream_signal: # We have a symbol processor active, do not yield to user. delegate = symbol_trans.delegate symbol_trans = delegate.send(Transition(ion_event, delegate)) if symbol_trans.delegate is None: # When the symbol processor terminates, the event is the context # and there is no delegate. ctx = symbol_trans.event data_event = NEXT_EVENT else: data_event = symbol_trans.event else: data_event = None if ion_event is not None: event_type = ion_event.event_type ion_type = ion_event.ion_type depth = ion_event.depth # System values only happen at the top-level if depth == 0: if event_type is IonEventType.VERSION_MARKER: if ion_event != ION_VERSION_MARKER_EVENT: raise IonException('Invalid IVM: %s' % (ion_event,)) # Reset and swallow IVM ctx = _ManagedContext(ctx.catalog) data_event = NEXT_EVENT elif ion_type is IonType.SYMBOL \ and len(ion_event.annotations) == 0 \ and ion_event.value is not None \ and ctx.resolve(ion_event.value).text == TEXT_ION_1_0: assert symbol_trans.delegate is None # A faux IVM is a NOP data_event = NEXT_EVENT elif event_type is IonEventType.CONTAINER_START \ and ion_type is IonType.STRUCT \ and ctx.has_symbol_table_annotation(ion_event.annotations): assert symbol_trans.delegate is None # Activate a new symbol processor. delegate = _local_symbol_table_handler(ctx) symbol_trans = Transition(None, delegate) data_event = NEXT_EVENT if data_event is None: # No system processing or we have to get data, yield control. if ion_event is not None: ion_event = _managed_thunk_event(ctx, ion_event) data_event = yield ion_event ion_event = reader.send(data_event)
[ "def", "managed_reader", "(", "reader", ",", "catalog", "=", "None", ")", ":", "if", "catalog", "is", "None", ":", "catalog", "=", "SymbolTableCatalog", "(", ")", "ctx", "=", "_ManagedContext", "(", "catalog", ")", "symbol_trans", "=", "Transition", "(", "None", ",", "None", ")", "ion_event", "=", "None", "while", "True", ":", "if", "symbol_trans", ".", "delegate", "is", "not", "None", "and", "ion_event", "is", "not", "None", "and", "not", "ion_event", ".", "event_type", ".", "is_stream_signal", ":", "# We have a symbol processor active, do not yield to user.", "delegate", "=", "symbol_trans", ".", "delegate", "symbol_trans", "=", "delegate", ".", "send", "(", "Transition", "(", "ion_event", ",", "delegate", ")", ")", "if", "symbol_trans", ".", "delegate", "is", "None", ":", "# When the symbol processor terminates, the event is the context", "# and there is no delegate.", "ctx", "=", "symbol_trans", ".", "event", "data_event", "=", "NEXT_EVENT", "else", ":", "data_event", "=", "symbol_trans", ".", "event", "else", ":", "data_event", "=", "None", "if", "ion_event", "is", "not", "None", ":", "event_type", "=", "ion_event", ".", "event_type", "ion_type", "=", "ion_event", ".", "ion_type", "depth", "=", "ion_event", ".", "depth", "# System values only happen at the top-level", "if", "depth", "==", "0", ":", "if", "event_type", "is", "IonEventType", ".", "VERSION_MARKER", ":", "if", "ion_event", "!=", "ION_VERSION_MARKER_EVENT", ":", "raise", "IonException", "(", "'Invalid IVM: %s'", "%", "(", "ion_event", ",", ")", ")", "# Reset and swallow IVM", "ctx", "=", "_ManagedContext", "(", "ctx", ".", "catalog", ")", "data_event", "=", "NEXT_EVENT", "elif", "ion_type", "is", "IonType", ".", "SYMBOL", "and", "len", "(", "ion_event", ".", "annotations", ")", "==", "0", "and", "ion_event", ".", "value", "is", "not", "None", "and", "ctx", ".", "resolve", "(", "ion_event", ".", "value", ")", ".", "text", "==", "TEXT_ION_1_0", ":", "assert", "symbol_trans", ".", "delegate", "is", "None", "# A faux IVM is a NOP", "data_event", "=", "NEXT_EVENT", "elif", "event_type", "is", "IonEventType", ".", "CONTAINER_START", "and", "ion_type", "is", "IonType", ".", "STRUCT", "and", "ctx", ".", "has_symbol_table_annotation", "(", "ion_event", ".", "annotations", ")", ":", "assert", "symbol_trans", ".", "delegate", "is", "None", "# Activate a new symbol processor.", "delegate", "=", "_local_symbol_table_handler", "(", "ctx", ")", "symbol_trans", "=", "Transition", "(", "None", ",", "delegate", ")", "data_event", "=", "NEXT_EVENT", "if", "data_event", "is", "None", ":", "# No system processing or we have to get data, yield control.", "if", "ion_event", "is", "not", "None", ":", "ion_event", "=", "_managed_thunk_event", "(", "ctx", ",", "ion_event", ")", "data_event", "=", "yield", "ion_event", "ion_event", "=", "reader", ".", "send", "(", "data_event", ")" ]
Managed reader wrapping another reader. Args: reader (Coroutine): The underlying non-blocking reader co-routine. catalog (Optional[SymbolTableCatalog]): The catalog to use for resolving imports. Yields: Events from the underlying reader delegating to symbol table processing as needed. The user will never see things like version markers or local symbol tables.
[ "Managed", "reader", "wrapping", "another", "reader", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_managed.py#L261-L335
936
amzn/ion-python
amazon/ion/reader_text.py
_illegal_character
def _illegal_character(c, ctx, message=''): """Raises an IonException upon encountering the given illegal character in the given context. Args: c (int|None): Ordinal of the illegal character. ctx (_HandlerContext): Context in which the illegal character was encountered. message (Optional[str]): Additional information, as necessary. """ container_type = ctx.container.ion_type is None and 'top-level' or ctx.container.ion_type.name value_type = ctx.ion_type is None and 'unknown' or ctx.ion_type.name if c is None: header = 'Illegal token' else: c = 'EOF' if BufferQueue.is_eof(c) else _chr(c) header = 'Illegal character %s' % (c,) raise IonException('%s at position %d in %s value contained in %s. %s Pending value: %s' % (header, ctx.queue.position, value_type, container_type, message, ctx.value))
python
def _illegal_character(c, ctx, message=''): """Raises an IonException upon encountering the given illegal character in the given context. Args: c (int|None): Ordinal of the illegal character. ctx (_HandlerContext): Context in which the illegal character was encountered. message (Optional[str]): Additional information, as necessary. """ container_type = ctx.container.ion_type is None and 'top-level' or ctx.container.ion_type.name value_type = ctx.ion_type is None and 'unknown' or ctx.ion_type.name if c is None: header = 'Illegal token' else: c = 'EOF' if BufferQueue.is_eof(c) else _chr(c) header = 'Illegal character %s' % (c,) raise IonException('%s at position %d in %s value contained in %s. %s Pending value: %s' % (header, ctx.queue.position, value_type, container_type, message, ctx.value))
[ "def", "_illegal_character", "(", "c", ",", "ctx", ",", "message", "=", "''", ")", ":", "container_type", "=", "ctx", ".", "container", ".", "ion_type", "is", "None", "and", "'top-level'", "or", "ctx", ".", "container", ".", "ion_type", ".", "name", "value_type", "=", "ctx", ".", "ion_type", "is", "None", "and", "'unknown'", "or", "ctx", ".", "ion_type", ".", "name", "if", "c", "is", "None", ":", "header", "=", "'Illegal token'", "else", ":", "c", "=", "'EOF'", "if", "BufferQueue", ".", "is_eof", "(", "c", ")", "else", "_chr", "(", "c", ")", "header", "=", "'Illegal character %s'", "%", "(", "c", ",", ")", "raise", "IonException", "(", "'%s at position %d in %s value contained in %s. %s Pending value: %s'", "%", "(", "header", ",", "ctx", ".", "queue", ".", "position", ",", "value_type", ",", "container_type", ",", "message", ",", "ctx", ".", "value", ")", ")" ]
Raises an IonException upon encountering the given illegal character in the given context. Args: c (int|None): Ordinal of the illegal character. ctx (_HandlerContext): Context in which the illegal character was encountered. message (Optional[str]): Additional information, as necessary.
[ "Raises", "an", "IonException", "upon", "encountering", "the", "given", "illegal", "character", "in", "the", "given", "context", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L40-L57
937
amzn/ion-python
amazon/ion/reader_text.py
_defaultdict
def _defaultdict(dct, fallback=_illegal_character): """Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed. """ out = defaultdict(lambda: fallback) for k, v in six.iteritems(dct): out[k] = v return out
python
def _defaultdict(dct, fallback=_illegal_character): """Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed. """ out = defaultdict(lambda: fallback) for k, v in six.iteritems(dct): out[k] = v return out
[ "def", "_defaultdict", "(", "dct", ",", "fallback", "=", "_illegal_character", ")", ":", "out", "=", "defaultdict", "(", "lambda", ":", "fallback", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "dct", ")", ":", "out", "[", "k", "]", "=", "v", "return", "out" ]
Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed.
[ "Wraps", "the", "given", "dictionary", "such", "that", "the", "given", "fallback", "function", "will", "be", "called", "when", "a", "nonexistent", "key", "is", "accessed", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L60-L67
938
amzn/ion-python
amazon/ion/reader_text.py
_number_negative_start_handler
def _number_negative_start_handler(c, ctx): """Handles numeric values that start with a negative sign. Branches to delegate co-routines according to _NEGATIVE_TABLE. """ assert c == _MINUS assert len(ctx.value) == 0 ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield yield ctx.immediate_transition(_NEGATIVE_TABLE[c](c, ctx))
python
def _number_negative_start_handler(c, ctx): """Handles numeric values that start with a negative sign. Branches to delegate co-routines according to _NEGATIVE_TABLE. """ assert c == _MINUS assert len(ctx.value) == 0 ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield yield ctx.immediate_transition(_NEGATIVE_TABLE[c](c, ctx))
[ "def", "_number_negative_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_MINUS", "assert", "len", "(", "ctx", ".", "value", ")", "==", "0", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "yield", "ctx", ".", "immediate_transition", "(", "_NEGATIVE_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles numeric values that start with a negative sign. Branches to delegate co-routines according to _NEGATIVE_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "a", "negative", "sign", ".", "Branches", "to", "delegate", "co", "-", "routines", "according", "to", "_NEGATIVE_TABLE", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L585-L594
939
amzn/ion-python
amazon/ion/reader_text.py
_number_zero_start_handler
def _number_zero_start_handler(c, ctx): """Handles numeric values that start with zero or negative zero. Branches to delegate co-routines according to _ZERO_START_TABLE. """ assert c == _ZERO assert len(ctx.value) == 0 or (len(ctx.value) == 1 and ctx.value[0] == _MINUS) ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) yield trans yield ctx.immediate_transition(_ZERO_START_TABLE[c](c, ctx))
python
def _number_zero_start_handler(c, ctx): """Handles numeric values that start with zero or negative zero. Branches to delegate co-routines according to _ZERO_START_TABLE. """ assert c == _ZERO assert len(ctx.value) == 0 or (len(ctx.value) == 1 and ctx.value[0] == _MINUS) ctx.set_ion_type(IonType.INT) ctx.value.append(c) c, _ = yield if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) yield trans yield ctx.immediate_transition(_ZERO_START_TABLE[c](c, ctx))
[ "def", "_number_zero_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_ZERO", "assert", "len", "(", "ctx", ".", "value", ")", "==", "0", "or", "(", "len", "(", "ctx", ".", "value", ")", "==", "1", "and", "ctx", ".", "value", "[", "0", "]", "==", "_MINUS", ")", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "if", "_ends_value", "(", "c", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "_parse_decimal_int", "(", "ctx", ".", "value", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "yield", "trans", "yield", "ctx", ".", "immediate_transition", "(", "_ZERO_START_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles numeric values that start with zero or negative zero. Branches to delegate co-routines according to _ZERO_START_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "zero", "or", "negative", "zero", ".", "Branches", "to", "delegate", "co", "-", "routines", "according", "to", "_ZERO_START_TABLE", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L598-L612
940
amzn/ion-python
amazon/ion/reader_text.py
_number_or_timestamp_handler
def _number_or_timestamp_handler(c, ctx): """Handles numeric values that start with digits 1-9. May terminate a value, in which case that value is an int. If it does not terminate a value, it branches to delegate co-routines according to _NUMBER_OR_TIMESTAMP_TABLE. """ assert c in _DIGITS ctx.set_ion_type(IonType.INT) # If this is the last digit read, this value is an Int. val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c not in _DIGITS: trans = ctx.immediate_transition(_NUMBER_OR_TIMESTAMP_TABLE[c](c, ctx)) else: val.append(c) c, _ = yield trans
python
def _number_or_timestamp_handler(c, ctx): """Handles numeric values that start with digits 1-9. May terminate a value, in which case that value is an int. If it does not terminate a value, it branches to delegate co-routines according to _NUMBER_OR_TIMESTAMP_TABLE. """ assert c in _DIGITS ctx.set_ion_type(IonType.INT) # If this is the last digit read, this value is an Int. val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if _ends_value(c): trans = ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ctx.ion_type, _parse_decimal_int(ctx.value)) if c == _SLASH: trans = ctx.immediate_transition(_number_slash_end_handler(c, ctx, trans)) else: if c not in _DIGITS: trans = ctx.immediate_transition(_NUMBER_OR_TIMESTAMP_TABLE[c](c, ctx)) else: val.append(c) c, _ = yield trans
[ "def", "_number_or_timestamp_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "in", "_DIGITS", "ctx", ".", "set_ion_type", "(", "IonType", ".", "INT", ")", "# If this is the last digit read, this value is an Int.", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "_ends_value", "(", "c", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ctx", ".", "ion_type", ",", "_parse_decimal_int", "(", "ctx", ".", "value", ")", ")", "if", "c", "==", "_SLASH", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "trans", ")", ")", "else", ":", "if", "c", "not", "in", "_DIGITS", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_NUMBER_OR_TIMESTAMP_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")", "else", ":", "val", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles numeric values that start with digits 1-9. May terminate a value, in which case that value is an int. If it does not terminate a value, it branches to delegate co-routines according to _NUMBER_OR_TIMESTAMP_TABLE.
[ "Handles", "numeric", "values", "that", "start", "with", "digits", "1", "-", "9", ".", "May", "terminate", "a", "value", "in", "which", "case", "that", "value", "is", "an", "int", ".", "If", "it", "does", "not", "terminate", "a", "value", "it", "branches", "to", "delegate", "co", "-", "routines", "according", "to", "_NUMBER_OR_TIMESTAMP_TABLE", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L616-L637
941
amzn/ion-python
amazon/ion/reader_text.py
_number_slash_end_handler
def _number_slash_end_handler(c, ctx, event): """Handles numeric values that end in a forward slash. This is only legal if the slash begins a comment; thus, this co-routine either results in an error being raised or an event being yielded. """ assert c == _SLASH c, self = yield next_ctx = ctx.derive_child_context(ctx.whence) comment = _comment_handler(_SLASH, next_ctx, next_ctx.whence) comment.send((c, comment)) # If the previous line returns without error, it's a valid comment and the number may be emitted. yield _CompositeTransition(event, ctx, comment, next_ctx, initialize_handler=False)
python
def _number_slash_end_handler(c, ctx, event): """Handles numeric values that end in a forward slash. This is only legal if the slash begins a comment; thus, this co-routine either results in an error being raised or an event being yielded. """ assert c == _SLASH c, self = yield next_ctx = ctx.derive_child_context(ctx.whence) comment = _comment_handler(_SLASH, next_ctx, next_ctx.whence) comment.send((c, comment)) # If the previous line returns without error, it's a valid comment and the number may be emitted. yield _CompositeTransition(event, ctx, comment, next_ctx, initialize_handler=False)
[ "def", "_number_slash_end_handler", "(", "c", ",", "ctx", ",", "event", ")", ":", "assert", "c", "==", "_SLASH", "c", ",", "self", "=", "yield", "next_ctx", "=", "ctx", ".", "derive_child_context", "(", "ctx", ".", "whence", ")", "comment", "=", "_comment_handler", "(", "_SLASH", ",", "next_ctx", ",", "next_ctx", ".", "whence", ")", "comment", ".", "send", "(", "(", "c", ",", "comment", ")", ")", "# If the previous line returns without error, it's a valid comment and the number may be emitted.", "yield", "_CompositeTransition", "(", "event", ",", "ctx", ",", "comment", ",", "next_ctx", ",", "initialize_handler", "=", "False", ")" ]
Handles numeric values that end in a forward slash. This is only legal if the slash begins a comment; thus, this co-routine either results in an error being raised or an event being yielded.
[ "Handles", "numeric", "values", "that", "end", "in", "a", "forward", "slash", ".", "This", "is", "only", "legal", "if", "the", "slash", "begins", "a", "comment", ";", "thus", "this", "co", "-", "routine", "either", "results", "in", "an", "error", "being", "raised", "or", "an", "event", "being", "yielded", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L641-L651
942
amzn/ion-python
amazon/ion/reader_text.py
_exponent_handler_factory
def _exponent_handler_factory(ion_type, exp_chars, parse_func, first_char=None): """Generates a handler co-routine which tokenizes an numeric exponent. Args: ion_type (IonType): The type of the value with this exponent. exp_chars (sequence): The set of ordinals of the legal exponent characters for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type). """ def transition(prev, c, ctx, trans): if c in _SIGN and prev in exp_chars: ctx.value.append(c) else: _illegal_character(c, ctx) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory(_DIGITS, transition, lambda c, ctx: c in exp_chars, illegal, parse_func, illegal_at_end=illegal, ion_type=ion_type, first_char=first_char)
python
def _exponent_handler_factory(ion_type, exp_chars, parse_func, first_char=None): """Generates a handler co-routine which tokenizes an numeric exponent. Args: ion_type (IonType): The type of the value with this exponent. exp_chars (sequence): The set of ordinals of the legal exponent characters for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type). """ def transition(prev, c, ctx, trans): if c in _SIGN and prev in exp_chars: ctx.value.append(c) else: _illegal_character(c, ctx) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory(_DIGITS, transition, lambda c, ctx: c in exp_chars, illegal, parse_func, illegal_at_end=illegal, ion_type=ion_type, first_char=first_char)
[ "def", "_exponent_handler_factory", "(", "ion_type", ",", "exp_chars", ",", "parse_func", ",", "first_char", "=", "None", ")", ":", "def", "transition", "(", "prev", ",", "c", ",", "ctx", ",", "trans", ")", ":", "if", "c", "in", "_SIGN", "and", "prev", "in", "exp_chars", ":", "ctx", ".", "value", ".", "append", "(", "c", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "return", "trans", "illegal", "=", "exp_chars", "+", "_SIGN", "return", "_numeric_handler_factory", "(", "_DIGITS", ",", "transition", ",", "lambda", "c", ",", "ctx", ":", "c", "in", "exp_chars", ",", "illegal", ",", "parse_func", ",", "illegal_at_end", "=", "illegal", ",", "ion_type", "=", "ion_type", ",", "first_char", "=", "first_char", ")" ]
Generates a handler co-routine which tokenizes an numeric exponent. Args: ion_type (IonType): The type of the value with this exponent. exp_chars (sequence): The set of ordinals of the legal exponent characters for this component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. first_char (Optional[int]): The ordinal of the character that should be appended instead of the character that occurs first in this component. This is useful for preparing the token for parsing in the case where a particular character is peculiar to the Ion format (e.g. 'd' to denote the exponent of a decimal value should be replaced with 'e' for compatibility with python's Decimal type).
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "an", "numeric", "exponent", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L711-L732
943
amzn/ion-python
amazon/ion/reader_text.py
_coefficient_handler_factory
def _coefficient_handler_factory(trans_table, parse_func, assertion=lambda c, ctx: True, ion_type=None, append_first_if_not=None): """Generates a handler co-routine which tokenizes a numeric coefficient. Args: trans_table (dict): lookup table for the handler for the next component of this numeric token, given the ordinal of the first character in that component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. ion_type (Optional[IonType]): The type of the value if it were to end on this coefficient. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases). """ def transition(prev, c, ctx, trans): if prev == _UNDERSCORE: _illegal_character(c, ctx, 'Underscore before %s.' % (_chr(c),)) return ctx.immediate_transition(trans_table[c](c, ctx)) return _numeric_handler_factory(_DIGITS, transition, assertion, (_DOT,), parse_func, ion_type=ion_type, append_first_if_not=append_first_if_not)
python
def _coefficient_handler_factory(trans_table, parse_func, assertion=lambda c, ctx: True, ion_type=None, append_first_if_not=None): """Generates a handler co-routine which tokenizes a numeric coefficient. Args: trans_table (dict): lookup table for the handler for the next component of this numeric token, given the ordinal of the first character in that component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. ion_type (Optional[IonType]): The type of the value if it were to end on this coefficient. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases). """ def transition(prev, c, ctx, trans): if prev == _UNDERSCORE: _illegal_character(c, ctx, 'Underscore before %s.' % (_chr(c),)) return ctx.immediate_transition(trans_table[c](c, ctx)) return _numeric_handler_factory(_DIGITS, transition, assertion, (_DOT,), parse_func, ion_type=ion_type, append_first_if_not=append_first_if_not)
[ "def", "_coefficient_handler_factory", "(", "trans_table", ",", "parse_func", ",", "assertion", "=", "lambda", "c", ",", "ctx", ":", "True", ",", "ion_type", "=", "None", ",", "append_first_if_not", "=", "None", ")", ":", "def", "transition", "(", "prev", ",", "c", ",", "ctx", ",", "trans", ")", ":", "if", "prev", "==", "_UNDERSCORE", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Underscore before %s.'", "%", "(", "_chr", "(", "c", ")", ",", ")", ")", "return", "ctx", ".", "immediate_transition", "(", "trans_table", "[", "c", "]", "(", "c", ",", "ctx", ")", ")", "return", "_numeric_handler_factory", "(", "_DIGITS", ",", "transition", ",", "assertion", ",", "(", "_DOT", ",", ")", ",", "parse_func", ",", "ion_type", "=", "ion_type", ",", "append_first_if_not", "=", "append_first_if_not", ")" ]
Generates a handler co-routine which tokenizes a numeric coefficient. Args: trans_table (dict): lookup table for the handler for the next component of this numeric token, given the ordinal of the first character in that component. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. assertion (callable): Accepts the first character's ordinal and the current context. Returns True if this is a legal start to the component. ion_type (Optional[IonType]): The type of the value if it were to end on this coefficient. append_first_if_not (Optional[int]): The ordinal of a character that should not be appended to the token if it occurs first in this component (e.g. an underscore in many cases).
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "a", "numeric", "coefficient", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L739-L759
944
amzn/ion-python
amazon/ion/reader_text.py
_radix_int_handler_factory
def _radix_int_handler_factory(radix_indicators, charset, parse_func): """Generates a handler co-routine which tokenizes a integer of a particular radix. Args: radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int. charset (sequence): Set of ordinals of legal characters for this radix. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. """ def assertion(c, ctx): return c in radix_indicators and \ ((len(ctx.value) == 1 and ctx.value[0] == _ZERO) or (len(ctx.value) == 2 and ctx.value[0] == _MINUS and ctx.value[1] == _ZERO)) and \ ctx.ion_type == IonType.INT return _numeric_handler_factory(charset, lambda prev, c, ctx, trans: _illegal_character(c, ctx), assertion, radix_indicators, parse_func, illegal_at_end=radix_indicators)
python
def _radix_int_handler_factory(radix_indicators, charset, parse_func): """Generates a handler co-routine which tokenizes a integer of a particular radix. Args: radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int. charset (sequence): Set of ordinals of legal characters for this radix. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token. """ def assertion(c, ctx): return c in radix_indicators and \ ((len(ctx.value) == 1 and ctx.value[0] == _ZERO) or (len(ctx.value) == 2 and ctx.value[0] == _MINUS and ctx.value[1] == _ZERO)) and \ ctx.ion_type == IonType.INT return _numeric_handler_factory(charset, lambda prev, c, ctx, trans: _illegal_character(c, ctx), assertion, radix_indicators, parse_func, illegal_at_end=radix_indicators)
[ "def", "_radix_int_handler_factory", "(", "radix_indicators", ",", "charset", ",", "parse_func", ")", ":", "def", "assertion", "(", "c", ",", "ctx", ")", ":", "return", "c", "in", "radix_indicators", "and", "(", "(", "len", "(", "ctx", ".", "value", ")", "==", "1", "and", "ctx", ".", "value", "[", "0", "]", "==", "_ZERO", ")", "or", "(", "len", "(", "ctx", ".", "value", ")", "==", "2", "and", "ctx", ".", "value", "[", "0", "]", "==", "_MINUS", "and", "ctx", ".", "value", "[", "1", "]", "==", "_ZERO", ")", ")", "and", "ctx", ".", "ion_type", "==", "IonType", ".", "INT", "return", "_numeric_handler_factory", "(", "charset", ",", "lambda", "prev", ",", "c", ",", "ctx", ",", "trans", ":", "_illegal_character", "(", "c", ",", "ctx", ")", ",", "assertion", ",", "radix_indicators", ",", "parse_func", ",", "illegal_at_end", "=", "radix_indicators", ")" ]
Generates a handler co-routine which tokenizes a integer of a particular radix. Args: radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int. charset (sequence): Set of ordinals of legal characters for this radix. parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a thunk that lazily parses the token.
[ "Generates", "a", "handler", "co", "-", "routine", "which", "tokenizes", "a", "integer", "of", "a", "particular", "radix", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L785-L800
945
amzn/ion-python
amazon/ion/reader_text.py
_timestamp_zero_start_handler
def _timestamp_zero_start_handler(c, ctx): """Handles numeric values that start with a zero followed by another digit. This is either a timestamp or an error. """ val = ctx.value ctx.set_ion_type(IonType.TIMESTAMP) if val[0] == _MINUS: _illegal_character(c, ctx, 'Negative year not allowed.') val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _TIMESTAMP_YEAR_DELIMITERS: trans = ctx.immediate_transition(_timestamp_handler(c, ctx)) elif c in _DIGITS: val.append(c) else: _illegal_character(c, ctx) c, _ = yield trans
python
def _timestamp_zero_start_handler(c, ctx): """Handles numeric values that start with a zero followed by another digit. This is either a timestamp or an error. """ val = ctx.value ctx.set_ion_type(IonType.TIMESTAMP) if val[0] == _MINUS: _illegal_character(c, ctx, 'Negative year not allowed.') val.append(c) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _TIMESTAMP_YEAR_DELIMITERS: trans = ctx.immediate_transition(_timestamp_handler(c, ctx)) elif c in _DIGITS: val.append(c) else: _illegal_character(c, ctx) c, _ = yield trans
[ "def", "_timestamp_zero_start_handler", "(", "c", ",", "ctx", ")", ":", "val", "=", "ctx", ".", "value", "ctx", ".", "set_ion_type", "(", "IonType", ".", "TIMESTAMP", ")", "if", "val", "[", "0", "]", "==", "_MINUS", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Negative year not allowed.'", ")", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "in", "_TIMESTAMP_YEAR_DELIMITERS", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_timestamp_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "in", "_DIGITS", ":", "val", ".", "append", "(", "c", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles numeric values that start with a zero followed by another digit. This is either a timestamp or an error.
[ "Handles", "numeric", "values", "that", "start", "with", "a", "zero", "followed", "by", "another", "digit", ".", "This", "is", "either", "a", "timestamp", "or", "an", "error", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L808-L826
946
amzn/ion-python
amazon/ion/reader_text.py
_parse_timestamp
def _parse_timestamp(tokens): """Parses each token in the given `_TimestampTokens` and marshals the numeric components into a `Timestamp`.""" def parse(): precision = TimestampPrecision.YEAR off_hour = tokens[_TimestampState.OFF_HOUR] off_minutes = tokens[_TimestampState.OFF_MINUTE] microsecond = None fraction_digits = None if off_hour is not None: assert off_minutes is not None off_sign = -1 if _MINUS in off_hour else 1 off_hour = int(off_hour) off_minutes = int(off_minutes) * off_sign if off_sign == -1 and off_hour == 0 and off_minutes == 0: # -00:00 (unknown UTC offset) is a naive datetime. off_hour = None off_minutes = None else: assert off_minutes is None year = tokens[_TimestampState.YEAR] assert year is not None year = int(year) month = tokens[_TimestampState.MONTH] if month is None: month = 1 else: month = int(month) precision = TimestampPrecision.MONTH day = tokens[_TimestampState.DAY] if day is None: day = 1 else: day = int(day) precision = TimestampPrecision.DAY hour = tokens[_TimestampState.HOUR] minute = tokens[_TimestampState.MINUTE] if hour is None: assert minute is None hour = 0 minute = 0 else: assert minute is not None hour = int(hour) minute = int(minute) precision = TimestampPrecision.MINUTE second = tokens[_TimestampState.SECOND] if second is None: second = 0 else: second = int(second) precision = TimestampPrecision.SECOND fraction = tokens[_TimestampState.FRACTIONAL] if fraction is not None: fraction_digits = len(fraction) if fraction_digits > MICROSECOND_PRECISION: for digit in fraction[MICROSECOND_PRECISION:]: if digit != _ZERO: raise ValueError('Only six significant digits supported in timestamp fractional. Found %s.' % (fraction,)) fraction_digits = MICROSECOND_PRECISION fraction = fraction[0:MICROSECOND_PRECISION] else: fraction.extend(_ZEROS[MICROSECOND_PRECISION - fraction_digits]) microsecond = int(fraction) return timestamp( year, month, day, hour, minute, second, microsecond, off_hour, off_minutes, precision=precision, fractional_precision=fraction_digits ) return parse
python
def _parse_timestamp(tokens): """Parses each token in the given `_TimestampTokens` and marshals the numeric components into a `Timestamp`.""" def parse(): precision = TimestampPrecision.YEAR off_hour = tokens[_TimestampState.OFF_HOUR] off_minutes = tokens[_TimestampState.OFF_MINUTE] microsecond = None fraction_digits = None if off_hour is not None: assert off_minutes is not None off_sign = -1 if _MINUS in off_hour else 1 off_hour = int(off_hour) off_minutes = int(off_minutes) * off_sign if off_sign == -1 and off_hour == 0 and off_minutes == 0: # -00:00 (unknown UTC offset) is a naive datetime. off_hour = None off_minutes = None else: assert off_minutes is None year = tokens[_TimestampState.YEAR] assert year is not None year = int(year) month = tokens[_TimestampState.MONTH] if month is None: month = 1 else: month = int(month) precision = TimestampPrecision.MONTH day = tokens[_TimestampState.DAY] if day is None: day = 1 else: day = int(day) precision = TimestampPrecision.DAY hour = tokens[_TimestampState.HOUR] minute = tokens[_TimestampState.MINUTE] if hour is None: assert minute is None hour = 0 minute = 0 else: assert minute is not None hour = int(hour) minute = int(minute) precision = TimestampPrecision.MINUTE second = tokens[_TimestampState.SECOND] if second is None: second = 0 else: second = int(second) precision = TimestampPrecision.SECOND fraction = tokens[_TimestampState.FRACTIONAL] if fraction is not None: fraction_digits = len(fraction) if fraction_digits > MICROSECOND_PRECISION: for digit in fraction[MICROSECOND_PRECISION:]: if digit != _ZERO: raise ValueError('Only six significant digits supported in timestamp fractional. Found %s.' % (fraction,)) fraction_digits = MICROSECOND_PRECISION fraction = fraction[0:MICROSECOND_PRECISION] else: fraction.extend(_ZEROS[MICROSECOND_PRECISION - fraction_digits]) microsecond = int(fraction) return timestamp( year, month, day, hour, minute, second, microsecond, off_hour, off_minutes, precision=precision, fractional_precision=fraction_digits ) return parse
[ "def", "_parse_timestamp", "(", "tokens", ")", ":", "def", "parse", "(", ")", ":", "precision", "=", "TimestampPrecision", ".", "YEAR", "off_hour", "=", "tokens", "[", "_TimestampState", ".", "OFF_HOUR", "]", "off_minutes", "=", "tokens", "[", "_TimestampState", ".", "OFF_MINUTE", "]", "microsecond", "=", "None", "fraction_digits", "=", "None", "if", "off_hour", "is", "not", "None", ":", "assert", "off_minutes", "is", "not", "None", "off_sign", "=", "-", "1", "if", "_MINUS", "in", "off_hour", "else", "1", "off_hour", "=", "int", "(", "off_hour", ")", "off_minutes", "=", "int", "(", "off_minutes", ")", "*", "off_sign", "if", "off_sign", "==", "-", "1", "and", "off_hour", "==", "0", "and", "off_minutes", "==", "0", ":", "# -00:00 (unknown UTC offset) is a naive datetime.", "off_hour", "=", "None", "off_minutes", "=", "None", "else", ":", "assert", "off_minutes", "is", "None", "year", "=", "tokens", "[", "_TimestampState", ".", "YEAR", "]", "assert", "year", "is", "not", "None", "year", "=", "int", "(", "year", ")", "month", "=", "tokens", "[", "_TimestampState", ".", "MONTH", "]", "if", "month", "is", "None", ":", "month", "=", "1", "else", ":", "month", "=", "int", "(", "month", ")", "precision", "=", "TimestampPrecision", ".", "MONTH", "day", "=", "tokens", "[", "_TimestampState", ".", "DAY", "]", "if", "day", "is", "None", ":", "day", "=", "1", "else", ":", "day", "=", "int", "(", "day", ")", "precision", "=", "TimestampPrecision", ".", "DAY", "hour", "=", "tokens", "[", "_TimestampState", ".", "HOUR", "]", "minute", "=", "tokens", "[", "_TimestampState", ".", "MINUTE", "]", "if", "hour", "is", "None", ":", "assert", "minute", "is", "None", "hour", "=", "0", "minute", "=", "0", "else", ":", "assert", "minute", "is", "not", "None", "hour", "=", "int", "(", "hour", ")", "minute", "=", "int", "(", "minute", ")", "precision", "=", "TimestampPrecision", ".", "MINUTE", "second", "=", "tokens", "[", "_TimestampState", ".", "SECOND", "]", "if", "second", "is", "None", ":", "second", "=", "0", "else", ":", "second", "=", "int", "(", "second", ")", "precision", "=", "TimestampPrecision", ".", "SECOND", "fraction", "=", "tokens", "[", "_TimestampState", ".", "FRACTIONAL", "]", "if", "fraction", "is", "not", "None", ":", "fraction_digits", "=", "len", "(", "fraction", ")", "if", "fraction_digits", ">", "MICROSECOND_PRECISION", ":", "for", "digit", "in", "fraction", "[", "MICROSECOND_PRECISION", ":", "]", ":", "if", "digit", "!=", "_ZERO", ":", "raise", "ValueError", "(", "'Only six significant digits supported in timestamp fractional. Found %s.'", "%", "(", "fraction", ",", ")", ")", "fraction_digits", "=", "MICROSECOND_PRECISION", "fraction", "=", "fraction", "[", "0", ":", "MICROSECOND_PRECISION", "]", "else", ":", "fraction", ".", "extend", "(", "_ZEROS", "[", "MICROSECOND_PRECISION", "-", "fraction_digits", "]", ")", "microsecond", "=", "int", "(", "fraction", ")", "return", "timestamp", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ",", "microsecond", ",", "off_hour", ",", "off_minutes", ",", "precision", "=", "precision", ",", "fractional_precision", "=", "fraction_digits", ")", "return", "parse" ]
Parses each token in the given `_TimestampTokens` and marshals the numeric components into a `Timestamp`.
[ "Parses", "each", "token", "in", "the", "given", "_TimestampTokens", "and", "marshals", "the", "numeric", "components", "into", "a", "Timestamp", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L870-L946
947
amzn/ion-python
amazon/ion/reader_text.py
_comment_handler
def _comment_handler(c, ctx, whence): """Handles comments. Upon completion of the comment, immediately transitions back to `whence`.""" assert c == _SLASH c, self = yield if c == _SLASH: ctx.set_line_comment() block_comment = False elif c == _ASTERISK: if ctx.line_comment: # This happens when a block comment immediately follows a line comment. ctx.set_line_comment(False) block_comment = True else: _illegal_character(c, ctx, 'Illegal character sequence "/%s".' % (_chr(c),)) done = False prev = None trans = ctx.immediate_transition(self) while not done: c, _ = yield trans if block_comment: if prev == _ASTERISK and c == _SLASH: done = True prev = c else: if c in _NEWLINES or BufferQueue.is_eof(c): done = True yield ctx.set_self_delimiting(True).immediate_transition(whence)
python
def _comment_handler(c, ctx, whence): """Handles comments. Upon completion of the comment, immediately transitions back to `whence`.""" assert c == _SLASH c, self = yield if c == _SLASH: ctx.set_line_comment() block_comment = False elif c == _ASTERISK: if ctx.line_comment: # This happens when a block comment immediately follows a line comment. ctx.set_line_comment(False) block_comment = True else: _illegal_character(c, ctx, 'Illegal character sequence "/%s".' % (_chr(c),)) done = False prev = None trans = ctx.immediate_transition(self) while not done: c, _ = yield trans if block_comment: if prev == _ASTERISK and c == _SLASH: done = True prev = c else: if c in _NEWLINES or BufferQueue.is_eof(c): done = True yield ctx.set_self_delimiting(True).immediate_transition(whence)
[ "def", "_comment_handler", "(", "c", ",", "ctx", ",", "whence", ")", ":", "assert", "c", "==", "_SLASH", "c", ",", "self", "=", "yield", "if", "c", "==", "_SLASH", ":", "ctx", ".", "set_line_comment", "(", ")", "block_comment", "=", "False", "elif", "c", "==", "_ASTERISK", ":", "if", "ctx", ".", "line_comment", ":", "# This happens when a block comment immediately follows a line comment.", "ctx", ".", "set_line_comment", "(", "False", ")", "block_comment", "=", "True", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal character sequence \"/%s\".'", "%", "(", "_chr", "(", "c", ")", ",", ")", ")", "done", "=", "False", "prev", "=", "None", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "not", "done", ":", "c", ",", "_", "=", "yield", "trans", "if", "block_comment", ":", "if", "prev", "==", "_ASTERISK", "and", "c", "==", "_SLASH", ":", "done", "=", "True", "prev", "=", "c", "else", ":", "if", "c", "in", "_NEWLINES", "or", "BufferQueue", ".", "is_eof", "(", "c", ")", ":", "done", "=", "True", "yield", "ctx", ".", "set_self_delimiting", "(", "True", ")", ".", "immediate_transition", "(", "whence", ")" ]
Handles comments. Upon completion of the comment, immediately transitions back to `whence`.
[ "Handles", "comments", ".", "Upon", "completion", "of", "the", "comment", "immediately", "transitions", "back", "to", "whence", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1033-L1059
948
amzn/ion-python
amazon/ion/reader_text.py
_sexp_slash_handler
def _sexp_slash_handler(c, ctx, whence=None, pending_event=None): """Handles the special case of a forward-slash within an s-expression. This is either an operator or a comment. """ assert c == _SLASH if whence is None: whence = ctx.whence c, self = yield ctx.queue.unread(c) if c == _ASTERISK or c == _SLASH: yield ctx.immediate_transition(_comment_handler(_SLASH, ctx, whence)) else: if pending_event is not None: # Since this is the start of a new value and not a comment, the pending event must be emitted. assert pending_event.event is not None yield _CompositeTransition(pending_event, ctx, partial(_operator_symbol_handler, _SLASH)) yield ctx.immediate_transition(_operator_symbol_handler(_SLASH, ctx))
python
def _sexp_slash_handler(c, ctx, whence=None, pending_event=None): """Handles the special case of a forward-slash within an s-expression. This is either an operator or a comment. """ assert c == _SLASH if whence is None: whence = ctx.whence c, self = yield ctx.queue.unread(c) if c == _ASTERISK or c == _SLASH: yield ctx.immediate_transition(_comment_handler(_SLASH, ctx, whence)) else: if pending_event is not None: # Since this is the start of a new value and not a comment, the pending event must be emitted. assert pending_event.event is not None yield _CompositeTransition(pending_event, ctx, partial(_operator_symbol_handler, _SLASH)) yield ctx.immediate_transition(_operator_symbol_handler(_SLASH, ctx))
[ "def", "_sexp_slash_handler", "(", "c", ",", "ctx", ",", "whence", "=", "None", ",", "pending_event", "=", "None", ")", ":", "assert", "c", "==", "_SLASH", "if", "whence", "is", "None", ":", "whence", "=", "ctx", ".", "whence", "c", ",", "self", "=", "yield", "ctx", ".", "queue", ".", "unread", "(", "c", ")", "if", "c", "==", "_ASTERISK", "or", "c", "==", "_SLASH", ":", "yield", "ctx", ".", "immediate_transition", "(", "_comment_handler", "(", "_SLASH", ",", "ctx", ",", "whence", ")", ")", "else", ":", "if", "pending_event", "is", "not", "None", ":", "# Since this is the start of a new value and not a comment, the pending event must be emitted.", "assert", "pending_event", ".", "event", "is", "not", "None", "yield", "_CompositeTransition", "(", "pending_event", ",", "ctx", ",", "partial", "(", "_operator_symbol_handler", ",", "_SLASH", ")", ")", "yield", "ctx", ".", "immediate_transition", "(", "_operator_symbol_handler", "(", "_SLASH", ",", "ctx", ")", ")" ]
Handles the special case of a forward-slash within an s-expression. This is either an operator or a comment.
[ "Handles", "the", "special", "case", "of", "a", "forward", "-", "slash", "within", "an", "s", "-", "expression", ".", "This", "is", "either", "an", "operator", "or", "a", "comment", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1063-L1079
949
amzn/ion-python
amazon/ion/reader_text.py
_typed_null_handler
def _typed_null_handler(c, ctx): """Handles typed null values. Entered once `null.` has been found.""" assert c == _DOT c, self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx.immediate_transition(self) while True: if done: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, nxt.ion_type, None) else: _illegal_character(c, ctx, 'Illegal null type.') elif length is None: if c not in nxt: _illegal_character(c, ctx, 'Illegal null type.') nxt = nxt[c] if isinstance(nxt, _NullSequence): length = len(nxt.sequence) else: if c != nxt[i]: _illegal_character(c, ctx, 'Illegal null type.') i += 1 done = i == length c, _ = yield trans
python
def _typed_null_handler(c, ctx): """Handles typed null values. Entered once `null.` has been found.""" assert c == _DOT c, self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx.immediate_transition(self) while True: if done: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, nxt.ion_type, None) else: _illegal_character(c, ctx, 'Illegal null type.') elif length is None: if c not in nxt: _illegal_character(c, ctx, 'Illegal null type.') nxt = nxt[c] if isinstance(nxt, _NullSequence): length = len(nxt.sequence) else: if c != nxt[i]: _illegal_character(c, ctx, 'Illegal null type.') i += 1 done = i == length c, _ = yield trans
[ "def", "_typed_null_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_DOT", "c", ",", "self", "=", "yield", "nxt", "=", "_NULL_STARTS", "i", "=", "0", "length", "=", "None", "done", "=", "False", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "done", ":", "if", "_ends_value", "(", "c", ")", "or", "(", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "and", "c", "in", "_OPERATORS", ")", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "nxt", ".", "ion_type", ",", "None", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "elif", "length", "is", "None", ":", "if", "c", "not", "in", "nxt", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "nxt", "=", "nxt", "[", "c", "]", "if", "isinstance", "(", "nxt", ",", "_NullSequence", ")", ":", "length", "=", "len", "(", "nxt", ".", "sequence", ")", "else", ":", "if", "c", "!=", "nxt", "[", "i", "]", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Illegal null type.'", ")", "i", "+=", "1", "done", "=", "i", "==", "length", "c", ",", "_", "=", "yield", "trans" ]
Handles typed null values. Entered once `null.` has been found.
[ "Handles", "typed", "null", "values", ".", "Entered", "once", "null", ".", "has", "been", "found", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1192-L1218
950
amzn/ion-python
amazon/ion/reader_text.py
_inf_or_operator_handler_factory
def _inf_or_operator_handler_factory(c_start, is_delegate=True): """Generates handler co-routines for values that may be `+inf` or `-inf`. Args: c_start (int): The ordinal of the character that starts this token (either `+` or `-`). is_delegate (bool): True if a different handler began processing this token; otherwise, False. This will only be true for `-inf`, because it is not the only value that can start with `-`; `+inf` is the only value (outside of a s-expression) that can start with `+`. """ @coroutine def inf_or_operator_handler(c, ctx): next_ctx = None if not is_delegate: ctx.value.append(c_start) c, self = yield else: assert ctx.value[0] == c_start assert c not in _DIGITS ctx.queue.unread(c) next_ctx = ctx _, self = yield assert c == _ maybe_inf = True ctx.set_ion_type(IonType.FLOAT) match_index = 0 trans = ctx.immediate_transition(self) while True: if maybe_inf: if match_index < len(_INF_SUFFIX): maybe_inf = c == _INF_SUFFIX[match_index] else: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): yield ctx.event_transition( IonEvent, IonEventType.SCALAR, IonType.FLOAT, c_start == _MINUS and _NEG_INF or _POS_INF ) else: maybe_inf = False if maybe_inf: match_index += 1 else: ctx.set_unicode() if match_index > 0: next_ctx = ctx.derive_child_context(ctx.whence) for ch in _INF_SUFFIX[0:match_index]: next_ctx.value.append(ch) break c, self = yield trans if ctx.container is not _C_SEXP: _illegal_character(c, next_ctx is None and ctx or next_ctx, 'Illegal character following %s.' % (_chr(c_start),)) if match_index == 0: if c in _OPERATORS: yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()) yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_unquoted_symbol_handler, c), next_ctx ) return inf_or_operator_handler
python
def _inf_or_operator_handler_factory(c_start, is_delegate=True): """Generates handler co-routines for values that may be `+inf` or `-inf`. Args: c_start (int): The ordinal of the character that starts this token (either `+` or `-`). is_delegate (bool): True if a different handler began processing this token; otherwise, False. This will only be true for `-inf`, because it is not the only value that can start with `-`; `+inf` is the only value (outside of a s-expression) that can start with `+`. """ @coroutine def inf_or_operator_handler(c, ctx): next_ctx = None if not is_delegate: ctx.value.append(c_start) c, self = yield else: assert ctx.value[0] == c_start assert c not in _DIGITS ctx.queue.unread(c) next_ctx = ctx _, self = yield assert c == _ maybe_inf = True ctx.set_ion_type(IonType.FLOAT) match_index = 0 trans = ctx.immediate_transition(self) while True: if maybe_inf: if match_index < len(_INF_SUFFIX): maybe_inf = c == _INF_SUFFIX[match_index] else: if _ends_value(c) or (ctx.container.ion_type is IonType.SEXP and c in _OPERATORS): yield ctx.event_transition( IonEvent, IonEventType.SCALAR, IonType.FLOAT, c_start == _MINUS and _NEG_INF or _POS_INF ) else: maybe_inf = False if maybe_inf: match_index += 1 else: ctx.set_unicode() if match_index > 0: next_ctx = ctx.derive_child_context(ctx.whence) for ch in _INF_SUFFIX[0:match_index]: next_ctx.value.append(ch) break c, self = yield trans if ctx.container is not _C_SEXP: _illegal_character(c, next_ctx is None and ctx or next_ctx, 'Illegal character following %s.' % (_chr(c_start),)) if match_index == 0: if c in _OPERATORS: yield ctx.immediate_transition(_operator_symbol_handler(c, ctx)) yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()) yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_unquoted_symbol_handler, c), next_ctx ) return inf_or_operator_handler
[ "def", "_inf_or_operator_handler_factory", "(", "c_start", ",", "is_delegate", "=", "True", ")", ":", "@", "coroutine", "def", "inf_or_operator_handler", "(", "c", ",", "ctx", ")", ":", "next_ctx", "=", "None", "if", "not", "is_delegate", ":", "ctx", ".", "value", ".", "append", "(", "c_start", ")", "c", ",", "self", "=", "yield", "else", ":", "assert", "ctx", ".", "value", "[", "0", "]", "==", "c_start", "assert", "c", "not", "in", "_DIGITS", "ctx", ".", "queue", ".", "unread", "(", "c", ")", "next_ctx", "=", "ctx", "_", ",", "self", "=", "yield", "assert", "c", "==", "_", "maybe_inf", "=", "True", "ctx", ".", "set_ion_type", "(", "IonType", ".", "FLOAT", ")", "match_index", "=", "0", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "maybe_inf", ":", "if", "match_index", "<", "len", "(", "_INF_SUFFIX", ")", ":", "maybe_inf", "=", "c", "==", "_INF_SUFFIX", "[", "match_index", "]", "else", ":", "if", "_ends_value", "(", "c", ")", "or", "(", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "and", "c", "in", "_OPERATORS", ")", ":", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "FLOAT", ",", "c_start", "==", "_MINUS", "and", "_NEG_INF", "or", "_POS_INF", ")", "else", ":", "maybe_inf", "=", "False", "if", "maybe_inf", ":", "match_index", "+=", "1", "else", ":", "ctx", ".", "set_unicode", "(", ")", "if", "match_index", ">", "0", ":", "next_ctx", "=", "ctx", ".", "derive_child_context", "(", "ctx", ".", "whence", ")", "for", "ch", "in", "_INF_SUFFIX", "[", "0", ":", "match_index", "]", ":", "next_ctx", ".", "value", ".", "append", "(", "ch", ")", "break", "c", ",", "self", "=", "yield", "trans", "if", "ctx", ".", "container", "is", "not", "_C_SEXP", ":", "_illegal_character", "(", "c", ",", "next_ctx", "is", "None", "and", "ctx", "or", "next_ctx", ",", "'Illegal character following %s.'", "%", "(", "_chr", "(", "c_start", ")", ",", ")", ")", "if", "match_index", "==", "0", ":", "if", "c", "in", "_OPERATORS", ":", "yield", "ctx", ".", "immediate_transition", "(", "_operator_symbol_handler", "(", "c", ",", "ctx", ")", ")", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", "yield", "_CompositeTransition", "(", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", ",", "ctx", ",", "partial", "(", "_unquoted_symbol_handler", ",", "c", ")", ",", "next_ctx", ")", "return", "inf_or_operator_handler" ]
Generates handler co-routines for values that may be `+inf` or `-inf`. Args: c_start (int): The ordinal of the character that starts this token (either `+` or `-`). is_delegate (bool): True if a different handler began processing this token; otherwise, False. This will only be true for `-inf`, because it is not the only value that can start with `-`; `+inf` is the only value (outside of a s-expression) that can start with `+`.
[ "Generates", "handler", "co", "-", "routines", "for", "values", "that", "may", "be", "+", "inf", "or", "-", "inf", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1305-L1365
951
amzn/ion-python
amazon/ion/reader_text.py
_operator_symbol_handler
def _operator_symbol_handler(c, ctx): """Handles operator symbol values within s-expressions.""" assert c in _OPERATORS ctx.set_unicode() val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while c in _OPERATORS: val.append(c) c, _ = yield trans yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol())
python
def _operator_symbol_handler(c, ctx): """Handles operator symbol values within s-expressions.""" assert c in _OPERATORS ctx.set_unicode() val = ctx.value val.append(c) c, self = yield trans = ctx.immediate_transition(self) while c in _OPERATORS: val.append(c) c, _ = yield trans yield ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, val.as_symbol())
[ "def", "_operator_symbol_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "in", "_OPERATORS", "ctx", ".", "set_unicode", "(", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "c", "in", "_OPERATORS", ":", "val", ".", "append", "(", "c", ")", "c", ",", "_", "=", "yield", "trans", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "val", ".", "as_symbol", "(", ")", ")" ]
Handles operator symbol values within s-expressions.
[ "Handles", "operator", "symbol", "values", "within", "s", "-", "expressions", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1373-L1384
952
amzn/ion-python
amazon/ion/reader_text.py
_symbol_token_end
def _symbol_token_end(c, ctx, is_field_name, value=None): """Returns a transition which ends the current symbol token.""" if value is None: value = ctx.value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx.quoted_text: # This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination # character has been found. ctx.set_self_delimiting(ctx.quoted_text).set_pending_symbol(value).set_quoted_text(False) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(value)) return trans
python
def _symbol_token_end(c, ctx, is_field_name, value=None): """Returns a transition which ends the current symbol token.""" if value is None: value = ctx.value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx.quoted_text: # This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination # character has been found. ctx.set_self_delimiting(ctx.quoted_text).set_pending_symbol(value).set_quoted_text(False) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(value)) return trans
[ "def", "_symbol_token_end", "(", "c", ",", "ctx", ",", "is_field_name", ",", "value", "=", "None", ")", ":", "if", "value", "is", "None", ":", "value", "=", "ctx", ".", "value", "if", "is_field_name", "or", "c", "in", "_SYMBOL_TOKEN_TERMINATORS", "or", "ctx", ".", "quoted_text", ":", "# This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination", "# character has been found.", "ctx", ".", "set_self_delimiting", "(", "ctx", ".", "quoted_text", ")", ".", "set_pending_symbol", "(", "value", ")", ".", "set_quoted_text", "(", "False", ")", "trans", "=", "ctx", ".", "immediate_transition", "(", "ctx", ".", "whence", ")", "else", ":", "trans", "=", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "_as_symbol", "(", "value", ")", ")", "return", "trans" ]
Returns a transition which ends the current symbol token.
[ "Returns", "a", "transition", "which", "ends", "the", "current", "symbol", "token", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1387-L1398
953
amzn/ion-python
amazon/ion/reader_text.py
_unquoted_symbol_handler
def _unquoted_symbol_handler(c, ctx, is_field_name=False): """Handles identifier symbol tokens. If in an s-expression, these may be followed without whitespace by operators. """ in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode() if c not in _IDENTIFIER_CHARACTERS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) assert ctx.value yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_operator_symbol_handler, c) ) _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break if c not in _IDENTIFIER_CHARACTERS: _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val.append(c) prev = c c, _ = yield trans yield _symbol_token_end(c, ctx, is_field_name)
python
def _unquoted_symbol_handler(c, ctx, is_field_name=False): """Handles identifier symbol tokens. If in an s-expression, these may be followed without whitespace by operators. """ in_sexp = ctx.container.ion_type is IonType.SEXP ctx.set_unicode() if c not in _IDENTIFIER_CHARACTERS: if in_sexp and c in _OPERATORS: c_next, _ = yield ctx.queue.unread(c_next) assert ctx.value yield _CompositeTransition( ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, ctx.value.as_symbol()), ctx, partial(_operator_symbol_handler, c) ) _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val = ctx.value val.append(c) prev = c c, self = yield trans = ctx.immediate_transition(self) while True: if c not in _WHITESPACE: if prev in _WHITESPACE or _ends_value(c) or c == _COLON or (in_sexp and c in _OPERATORS): break if c not in _IDENTIFIER_CHARACTERS: _illegal_character(c, ctx.set_ion_type(IonType.SYMBOL)) val.append(c) prev = c c, _ = yield trans yield _symbol_token_end(c, ctx, is_field_name)
[ "def", "_unquoted_symbol_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "in_sexp", "=", "ctx", ".", "container", ".", "ion_type", "is", "IonType", ".", "SEXP", "ctx", ".", "set_unicode", "(", ")", "if", "c", "not", "in", "_IDENTIFIER_CHARACTERS", ":", "if", "in_sexp", "and", "c", "in", "_OPERATORS", ":", "c_next", ",", "_", "=", "yield", "ctx", ".", "queue", ".", "unread", "(", "c_next", ")", "assert", "ctx", ".", "value", "yield", "_CompositeTransition", "(", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "SCALAR", ",", "IonType", ".", "SYMBOL", ",", "ctx", ".", "value", ".", "as_symbol", "(", ")", ")", ",", "ctx", ",", "partial", "(", "_operator_symbol_handler", ",", "c", ")", ")", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", ")", "val", "=", "ctx", ".", "value", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "not", "in", "_WHITESPACE", ":", "if", "prev", "in", "_WHITESPACE", "or", "_ends_value", "(", "c", ")", "or", "c", "==", "_COLON", "or", "(", "in_sexp", "and", "c", "in", "_OPERATORS", ")", ":", "break", "if", "c", "not", "in", "_IDENTIFIER_CHARACTERS", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "SYMBOL", ")", ")", "val", ".", "append", "(", "c", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "yield", "_symbol_token_end", "(", "c", ",", "ctx", ",", "is_field_name", ")" ]
Handles identifier symbol tokens. If in an s-expression, these may be followed without whitespace by operators.
[ "Handles", "identifier", "symbol", "tokens", ".", "If", "in", "an", "s", "-", "expression", "these", "may", "be", "followed", "without", "whitespace", "by", "operators", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1402-L1433
954
amzn/ion-python
amazon/ion/reader_text.py
_single_quote_handler_factory
def _single_quote_handler_factory(on_single_quote, on_other): """Generates handlers used for classifying tokens that begin with one or more single quotes. Args: on_single_quote (callable): Called when another single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. on_other (callable): Called when any character other than a single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. """ @coroutine def single_quote_handler(c, ctx, is_field_name=False): assert c == _SINGLE_QUOTE c, self = yield if c == _SINGLE_QUOTE and not _is_escaped(c): yield on_single_quote(c, ctx, is_field_name) else: ctx.set_unicode(quoted_text=True) yield on_other(c, ctx, is_field_name) return single_quote_handler
python
def _single_quote_handler_factory(on_single_quote, on_other): """Generates handlers used for classifying tokens that begin with one or more single quotes. Args: on_single_quote (callable): Called when another single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. on_other (callable): Called when any character other than a single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. """ @coroutine def single_quote_handler(c, ctx, is_field_name=False): assert c == _SINGLE_QUOTE c, self = yield if c == _SINGLE_QUOTE and not _is_escaped(c): yield on_single_quote(c, ctx, is_field_name) else: ctx.set_unicode(quoted_text=True) yield on_other(c, ctx, is_field_name) return single_quote_handler
[ "def", "_single_quote_handler_factory", "(", "on_single_quote", ",", "on_other", ")", ":", "@", "coroutine", "def", "single_quote_handler", "(", "c", ",", "ctx", ",", "is_field_name", "=", "False", ")", ":", "assert", "c", "==", "_SINGLE_QUOTE", "c", ",", "self", "=", "yield", "if", "c", "==", "_SINGLE_QUOTE", "and", "not", "_is_escaped", "(", "c", ")", ":", "yield", "on_single_quote", "(", "c", ",", "ctx", ",", "is_field_name", ")", "else", ":", "ctx", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "yield", "on_other", "(", "c", ",", "ctx", ",", "is_field_name", ")", "return", "single_quote_handler" ]
Generates handlers used for classifying tokens that begin with one or more single quotes. Args: on_single_quote (callable): Called when another single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition. on_other (callable): Called when any character other than a single quote is found. Accepts the current character's ordinal, the current context, and True if the token is a field name; returns a Transition.
[ "Generates", "handlers", "used", "for", "classifying", "tokens", "that", "begin", "with", "one", "or", "more", "single", "quotes", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1615-L1633
955
amzn/ion-python
amazon/ion/reader_text.py
_struct_or_lob_handler
def _struct_or_lob_handler(c, ctx): """Handles tokens that begin with an open brace.""" assert c == _OPEN_BRACE c, self = yield yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx))
python
def _struct_or_lob_handler(c, ctx): """Handles tokens that begin with an open brace.""" assert c == _OPEN_BRACE c, self = yield yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx))
[ "def", "_struct_or_lob_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_OPEN_BRACE", "c", ",", "self", "=", "yield", "yield", "ctx", ".", "immediate_transition", "(", "_STRUCT_OR_LOB_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
Handles tokens that begin with an open brace.
[ "Handles", "tokens", "that", "begin", "with", "an", "open", "brace", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1651-L1655
956
amzn/ion-python
amazon/ion/reader_text.py
_lob_start_handler
def _lob_start_handler(c, ctx): """Handles tokens that begin with two open braces.""" assert c == _OPEN_BRACE c, self = yield trans = ctx.immediate_transition(self) quotes = 0 while True: if c in _WHITESPACE: if quotes > 0: _illegal_character(c, ctx) elif c == _DOUBLE_QUOTE: if quotes > 0: _illegal_character(c, ctx) ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) yield ctx.immediate_transition(_short_string_handler(c, ctx)) elif c == _SINGLE_QUOTE: if not quotes: ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) quotes += 1 if quotes == 3: yield ctx.immediate_transition(_long_string_handler(c, ctx)) else: yield ctx.immediate_transition(_blob_end_handler(c, ctx)) c, _ = yield trans
python
def _lob_start_handler(c, ctx): """Handles tokens that begin with two open braces.""" assert c == _OPEN_BRACE c, self = yield trans = ctx.immediate_transition(self) quotes = 0 while True: if c in _WHITESPACE: if quotes > 0: _illegal_character(c, ctx) elif c == _DOUBLE_QUOTE: if quotes > 0: _illegal_character(c, ctx) ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) yield ctx.immediate_transition(_short_string_handler(c, ctx)) elif c == _SINGLE_QUOTE: if not quotes: ctx.set_ion_type(IonType.CLOB).set_unicode(quoted_text=True) quotes += 1 if quotes == 3: yield ctx.immediate_transition(_long_string_handler(c, ctx)) else: yield ctx.immediate_transition(_blob_end_handler(c, ctx)) c, _ = yield trans
[ "def", "_lob_start_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_OPEN_BRACE", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "quotes", "=", "0", "while", "True", ":", "if", "c", "in", "_WHITESPACE", ":", "if", "quotes", ">", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "elif", "c", "==", "_DOUBLE_QUOTE", ":", "if", "quotes", ">", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "ctx", ".", "set_ion_type", "(", "IonType", ".", "CLOB", ")", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "yield", "ctx", ".", "immediate_transition", "(", "_short_string_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "==", "_SINGLE_QUOTE", ":", "if", "not", "quotes", ":", "ctx", ".", "set_ion_type", "(", "IonType", ".", "CLOB", ")", ".", "set_unicode", "(", "quoted_text", "=", "True", ")", "quotes", "+=", "1", "if", "quotes", "==", "3", ":", "yield", "ctx", ".", "immediate_transition", "(", "_long_string_handler", "(", "c", ",", "ctx", ")", ")", "else", ":", "yield", "ctx", ".", "immediate_transition", "(", "_blob_end_handler", "(", "c", ",", "ctx", ")", ")", "c", ",", "_", "=", "yield", "trans" ]
Handles tokens that begin with two open braces.
[ "Handles", "tokens", "that", "begin", "with", "two", "open", "braces", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1677-L1700
957
amzn/ion-python
amazon/ion/reader_text.py
_lob_end_handler_factory
def _lob_end_handler_factory(ion_type, action, validate=lambda c, ctx, action_res: None): """Generates handlers for the end of blob or clob values. Args: ion_type (IonType): The type of this lob (either blob or clob). action (callable): Called for each non-whitespace, non-closing brace character encountered before the end of the lob. Accepts the current character's ordinal, the current context, the previous character's ordinal, the result of the previous call to ``action`` (if any), and True if this is the first call to ``action``. Returns any state that will be needed by subsequent calls to ``action``. For blobs, this should validate the character is valid base64; for clobs, this should ensure there are no illegal characters (e.g. comments) between the end of the data and the end of the clob. validate (Optional[callable]): Called once the second closing brace has been found. Accepts the current character's ordinal, the current context, and the result of the last call to ``action``; raises an error if this is not a valid lob value. """ assert ion_type is IonType.BLOB or ion_type is IonType.CLOB @coroutine def lob_end_handler(c, ctx): val = ctx.value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE: action_res = action(c, ctx, prev, action_res, True) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _WHITESPACE: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(ion_type), 'Expected }.') elif c == _CLOSE_BRACE: if prev == _CLOSE_BRACE: validate(c, ctx, action_res) break else: action_res = action(c, ctx, prev, action_res, False) prev = c c, _ = yield trans ctx.set_self_delimiting(True) # Lob values are self-delimiting (they are terminated by '}}'). yield ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ion_type, _parse_lob(ion_type, val)) return lob_end_handler
python
def _lob_end_handler_factory(ion_type, action, validate=lambda c, ctx, action_res: None): """Generates handlers for the end of blob or clob values. Args: ion_type (IonType): The type of this lob (either blob or clob). action (callable): Called for each non-whitespace, non-closing brace character encountered before the end of the lob. Accepts the current character's ordinal, the current context, the previous character's ordinal, the result of the previous call to ``action`` (if any), and True if this is the first call to ``action``. Returns any state that will be needed by subsequent calls to ``action``. For blobs, this should validate the character is valid base64; for clobs, this should ensure there are no illegal characters (e.g. comments) between the end of the data and the end of the clob. validate (Optional[callable]): Called once the second closing brace has been found. Accepts the current character's ordinal, the current context, and the result of the last call to ``action``; raises an error if this is not a valid lob value. """ assert ion_type is IonType.BLOB or ion_type is IonType.CLOB @coroutine def lob_end_handler(c, ctx): val = ctx.value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE: action_res = action(c, ctx, prev, action_res, True) c, self = yield trans = ctx.immediate_transition(self) while True: if c in _WHITESPACE: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(ion_type), 'Expected }.') elif c == _CLOSE_BRACE: if prev == _CLOSE_BRACE: validate(c, ctx, action_res) break else: action_res = action(c, ctx, prev, action_res, False) prev = c c, _ = yield trans ctx.set_self_delimiting(True) # Lob values are self-delimiting (they are terminated by '}}'). yield ctx.event_transition(IonThunkEvent, IonEventType.SCALAR, ion_type, _parse_lob(ion_type, val)) return lob_end_handler
[ "def", "_lob_end_handler_factory", "(", "ion_type", ",", "action", ",", "validate", "=", "lambda", "c", ",", "ctx", ",", "action_res", ":", "None", ")", ":", "assert", "ion_type", "is", "IonType", ".", "BLOB", "or", "ion_type", "is", "IonType", ".", "CLOB", "@", "coroutine", "def", "lob_end_handler", "(", "c", ",", "ctx", ")", ":", "val", "=", "ctx", ".", "value", "prev", "=", "c", "action_res", "=", "None", "if", "c", "!=", "_CLOSE_BRACE", "and", "c", "not", "in", "_WHITESPACE", ":", "action_res", "=", "action", "(", "c", ",", "ctx", ",", "prev", ",", "action_res", ",", "True", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "in", "_WHITESPACE", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "ion_type", ")", ",", "'Expected }.'", ")", "elif", "c", "==", "_CLOSE_BRACE", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "validate", "(", "c", ",", "ctx", ",", "action_res", ")", "break", "else", ":", "action_res", "=", "action", "(", "c", ",", "ctx", ",", "prev", ",", "action_res", ",", "False", ")", "prev", "=", "c", "c", ",", "_", "=", "yield", "trans", "ctx", ".", "set_self_delimiting", "(", "True", ")", "# Lob values are self-delimiting (they are terminated by '}}').", "yield", "ctx", ".", "event_transition", "(", "IonThunkEvent", ",", "IonEventType", ".", "SCALAR", ",", "ion_type", ",", "_parse_lob", "(", "ion_type", ",", "val", ")", ")", "return", "lob_end_handler" ]
Generates handlers for the end of blob or clob values. Args: ion_type (IonType): The type of this lob (either blob or clob). action (callable): Called for each non-whitespace, non-closing brace character encountered before the end of the lob. Accepts the current character's ordinal, the current context, the previous character's ordinal, the result of the previous call to ``action`` (if any), and True if this is the first call to ``action``. Returns any state that will be needed by subsequent calls to ``action``. For blobs, this should validate the character is valid base64; for clobs, this should ensure there are no illegal characters (e.g. comments) between the end of the data and the end of the clob. validate (Optional[callable]): Called once the second closing brace has been found. Accepts the current character's ordinal, the current context, and the result of the last call to ``action``; raises an error if this is not a valid lob value.
[ "Generates", "handlers", "for", "the", "end", "of", "blob", "or", "clob", "values", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1703-L1743
958
amzn/ion-python
amazon/ion/reader_text.py
_blob_end_handler_factory
def _blob_end_handler_factory(): """Generates the handler for the end of a blob value. This includes the base-64 data and the two closing braces.""" def expand_res(res): if res is None: return 0, 0 return res def action(c, ctx, prev, res, is_first): num_digits, num_pads = expand_res(res) if c in _BASE64_DIGITS: if prev == _CLOSE_BRACE or prev == _BASE64_PAD: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_digits += 1 elif c == _BASE64_PAD: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_pads += 1 else: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) ctx.value.append(c) return num_digits, num_pads def validate(c, ctx, res): num_digits, num_pads = expand_res(res) if num_pads > 3 or (num_digits + num_pads) % 4 != 0: _illegal_character(c, ctx, 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % (num_pads, num_digits)) return _lob_end_handler_factory(IonType.BLOB, action, validate)
python
def _blob_end_handler_factory(): """Generates the handler for the end of a blob value. This includes the base-64 data and the two closing braces.""" def expand_res(res): if res is None: return 0, 0 return res def action(c, ctx, prev, res, is_first): num_digits, num_pads = expand_res(res) if c in _BASE64_DIGITS: if prev == _CLOSE_BRACE or prev == _BASE64_PAD: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_digits += 1 elif c == _BASE64_PAD: if prev == _CLOSE_BRACE: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) num_pads += 1 else: _illegal_character(c, ctx.set_ion_type(IonType.BLOB)) ctx.value.append(c) return num_digits, num_pads def validate(c, ctx, res): num_digits, num_pads = expand_res(res) if num_pads > 3 or (num_digits + num_pads) % 4 != 0: _illegal_character(c, ctx, 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % (num_pads, num_digits)) return _lob_end_handler_factory(IonType.BLOB, action, validate)
[ "def", "_blob_end_handler_factory", "(", ")", ":", "def", "expand_res", "(", "res", ")", ":", "if", "res", "is", "None", ":", "return", "0", ",", "0", "return", "res", "def", "action", "(", "c", ",", "ctx", ",", "prev", ",", "res", ",", "is_first", ")", ":", "num_digits", ",", "num_pads", "=", "expand_res", "(", "res", ")", "if", "c", "in", "_BASE64_DIGITS", ":", "if", "prev", "==", "_CLOSE_BRACE", "or", "prev", "==", "_BASE64_PAD", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "num_digits", "+=", "1", "elif", "c", "==", "_BASE64_PAD", ":", "if", "prev", "==", "_CLOSE_BRACE", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "num_pads", "+=", "1", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ".", "set_ion_type", "(", "IonType", ".", "BLOB", ")", ")", "ctx", ".", "value", ".", "append", "(", "c", ")", "return", "num_digits", ",", "num_pads", "def", "validate", "(", "c", ",", "ctx", ",", "res", ")", ":", "num_digits", ",", "num_pads", "=", "expand_res", "(", "res", ")", "if", "num_pads", ">", "3", "or", "(", "num_digits", "+", "num_pads", ")", "%", "4", "!=", "0", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.'", "%", "(", "num_pads", ",", "num_digits", ")", ")", "return", "_lob_end_handler_factory", "(", "IonType", ".", "BLOB", ",", "action", ",", "validate", ")" ]
Generates the handler for the end of a blob value. This includes the base-64 data and the two closing braces.
[ "Generates", "the", "handler", "for", "the", "end", "of", "a", "blob", "value", ".", "This", "includes", "the", "base", "-", "64", "data", "and", "the", "two", "closing", "braces", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1746-L1774
959
amzn/ion-python
amazon/ion/reader_text.py
_clob_end_handler_factory
def _clob_end_handler_factory(): """Generates the handler for the end of a clob value. This includes anything from the data's closing quote through the second closing brace. """ def action(c, ctx, prev, res, is_first): if is_first and ctx.is_self_delimiting and c == _DOUBLE_QUOTE: assert c is prev return res _illegal_character(c, ctx) return _lob_end_handler_factory(IonType.CLOB, action)
python
def _clob_end_handler_factory(): """Generates the handler for the end of a clob value. This includes anything from the data's closing quote through the second closing brace. """ def action(c, ctx, prev, res, is_first): if is_first and ctx.is_self_delimiting and c == _DOUBLE_QUOTE: assert c is prev return res _illegal_character(c, ctx) return _lob_end_handler_factory(IonType.CLOB, action)
[ "def", "_clob_end_handler_factory", "(", ")", ":", "def", "action", "(", "c", ",", "ctx", ",", "prev", ",", "res", ",", "is_first", ")", ":", "if", "is_first", "and", "ctx", ".", "is_self_delimiting", "and", "c", "==", "_DOUBLE_QUOTE", ":", "assert", "c", "is", "prev", "return", "res", "_illegal_character", "(", "c", ",", "ctx", ")", "return", "_lob_end_handler_factory", "(", "IonType", ".", "CLOB", ",", "action", ")" ]
Generates the handler for the end of a clob value. This includes anything from the data's closing quote through the second closing brace.
[ "Generates", "the", "handler", "for", "the", "end", "of", "a", "clob", "value", ".", "This", "includes", "anything", "from", "the", "data", "s", "closing", "quote", "through", "the", "second", "closing", "brace", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1779-L1789
960
amzn/ion-python
amazon/ion/reader_text.py
_container_start_handler_factory
def _container_start_handler_factory(ion_type, before_yield=lambda c, ctx: None): """Generates handlers for tokens that begin with container start characters. Args: ion_type (IonType): The type of this container. before_yield (Optional[callable]): Called at initialization. Accepts the first character's ordinal and the current context; performs any necessary initialization actions. """ assert ion_type.is_container @coroutine def container_start_handler(c, ctx): before_yield(c, ctx) yield yield ctx.event_transition(IonEvent, IonEventType.CONTAINER_START, ion_type, value=None) return container_start_handler
python
def _container_start_handler_factory(ion_type, before_yield=lambda c, ctx: None): """Generates handlers for tokens that begin with container start characters. Args: ion_type (IonType): The type of this container. before_yield (Optional[callable]): Called at initialization. Accepts the first character's ordinal and the current context; performs any necessary initialization actions. """ assert ion_type.is_container @coroutine def container_start_handler(c, ctx): before_yield(c, ctx) yield yield ctx.event_transition(IonEvent, IonEventType.CONTAINER_START, ion_type, value=None) return container_start_handler
[ "def", "_container_start_handler_factory", "(", "ion_type", ",", "before_yield", "=", "lambda", "c", ",", "ctx", ":", "None", ")", ":", "assert", "ion_type", ".", "is_container", "@", "coroutine", "def", "container_start_handler", "(", "c", ",", "ctx", ")", ":", "before_yield", "(", "c", ",", "ctx", ")", "yield", "yield", "ctx", ".", "event_transition", "(", "IonEvent", ",", "IonEventType", ".", "CONTAINER_START", ",", "ion_type", ",", "value", "=", "None", ")", "return", "container_start_handler" ]
Generates handlers for tokens that begin with container start characters. Args: ion_type (IonType): The type of this container. before_yield (Optional[callable]): Called at initialization. Accepts the first character's ordinal and the current context; performs any necessary initialization actions.
[ "Generates", "handlers", "for", "tokens", "that", "begin", "with", "container", "start", "characters", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L1801-L1816
961
amzn/ion-python
amazon/ion/reader_text.py
_skip_trampoline
def _skip_trampoline(handler): """Intercepts events from container handlers, emitting them only if they should not be skipped.""" data_event, self = (yield None) delegate = handler event = None depth = 0 while True: def pass_through(): _trans = delegate.send(Transition(data_event, delegate)) return _trans, _trans.delegate, _trans.event if data_event is not None and data_event.type is ReadEventType.SKIP: while True: trans, delegate, event = pass_through() if event is not None: if event.event_type is IonEventType.CONTAINER_END and event.depth <= depth: break if event is None or event.event_type is IonEventType.INCOMPLETE: data_event, _ = yield Transition(event, self) else: trans, delegate, event = pass_through() if event is not None and (event.event_type is IonEventType.CONTAINER_START or event.event_type is IonEventType.CONTAINER_END): depth = event.depth data_event, _ = yield Transition(event, self)
python
def _skip_trampoline(handler): """Intercepts events from container handlers, emitting them only if they should not be skipped.""" data_event, self = (yield None) delegate = handler event = None depth = 0 while True: def pass_through(): _trans = delegate.send(Transition(data_event, delegate)) return _trans, _trans.delegate, _trans.event if data_event is not None and data_event.type is ReadEventType.SKIP: while True: trans, delegate, event = pass_through() if event is not None: if event.event_type is IonEventType.CONTAINER_END and event.depth <= depth: break if event is None or event.event_type is IonEventType.INCOMPLETE: data_event, _ = yield Transition(event, self) else: trans, delegate, event = pass_through() if event is not None and (event.event_type is IonEventType.CONTAINER_START or event.event_type is IonEventType.CONTAINER_END): depth = event.depth data_event, _ = yield Transition(event, self)
[ "def", "_skip_trampoline", "(", "handler", ")", ":", "data_event", ",", "self", "=", "(", "yield", "None", ")", "delegate", "=", "handler", "event", "=", "None", "depth", "=", "0", "while", "True", ":", "def", "pass_through", "(", ")", ":", "_trans", "=", "delegate", ".", "send", "(", "Transition", "(", "data_event", ",", "delegate", ")", ")", "return", "_trans", ",", "_trans", ".", "delegate", ",", "_trans", ".", "event", "if", "data_event", "is", "not", "None", "and", "data_event", ".", "type", "is", "ReadEventType", ".", "SKIP", ":", "while", "True", ":", "trans", ",", "delegate", ",", "event", "=", "pass_through", "(", ")", "if", "event", "is", "not", "None", ":", "if", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_END", "and", "event", ".", "depth", "<=", "depth", ":", "break", "if", "event", "is", "None", "or", "event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "data_event", ",", "_", "=", "yield", "Transition", "(", "event", ",", "self", ")", "else", ":", "trans", ",", "delegate", ",", "event", "=", "pass_through", "(", ")", "if", "event", "is", "not", "None", "and", "(", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_START", "or", "event", ".", "event_type", "is", "IonEventType", ".", "CONTAINER_END", ")", ":", "depth", "=", "event", ".", "depth", "data_event", ",", "_", "=", "yield", "Transition", "(", "event", ",", "self", ")" ]
Intercepts events from container handlers, emitting them only if they should not be skipped.
[ "Intercepts", "events", "from", "container", "handlers", "emitting", "them", "only", "if", "they", "should", "not", "be", "skipped", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L2152-L2176
962
amzn/ion-python
amazon/ion/reader_text.py
_next_code_point_handler
def _next_code_point_handler(whence, ctx): """Retrieves the next code point from within a quoted string or symbol.""" data_event, self = yield queue = ctx.queue unicode_escapes_allowed = ctx.ion_type is not IonType.CLOB escaped_newline = False escape_sequence = b'' low_surrogate_required = False while True: if len(queue) == 0: yield ctx.read_data_event(self) queue_iter = iter(queue) code_point_generator = _next_code_point_iter(queue, queue_iter) code_point = next(code_point_generator) if code_point == _BACKSLASH: escape_sequence += six.int2byte(_BACKSLASH) num_digits = None while True: if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if six.indexbytes(escape_sequence, -1) == _BACKSLASH: if code_point == _ord(b'u') and unicode_escapes_allowed: # 4-digit unicode escapes, plus '\u' for each surrogate num_digits = 12 if low_surrogate_required else 6 low_surrogate_required = False elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) elif code_point == _ord(b'x'): num_digits = 4 # 2-digit hex escapes elif code_point == _ord(b'U') and unicode_escapes_allowed: num_digits = 10 # 8-digit unicode escapes elif code_point in _COMMON_ESCAPES: if code_point == _SLASH or code_point == _QUESTION_MARK: escape_sequence = b'' # Drop the \. Python does not recognize these as escapes. escape_sequence += six.int2byte(code_point) break elif code_point in _NEWLINES: escaped_newline = True break else: # This is a backslash followed by an invalid escape character. This is illegal. _illegal_character(code_point, ctx, 'Invalid escape sequence \\%s.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) else: if code_point not in _HEX_DIGITS: _illegal_character(code_point, ctx, 'Non-hex character %s found in unicode escape.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) if len(escape_sequence) == num_digits: break if not escaped_newline: decoded_escape_sequence = escape_sequence.decode('unicode-escape') cp_iter = _next_code_point_iter(decoded_escape_sequence, iter(decoded_escape_sequence), to_int=ord) code_point = next(cp_iter) if code_point is None: # This is a high surrogate. Restart the loop to gather the low surrogate. low_surrogate_required = True continue code_point = CodePoint(code_point) code_point.char = decoded_escape_sequence code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence) elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) if code_point == _CARRIAGE_RETURN: # Normalize all newlines (\r, \n, and \r\n) to \n . if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if code_point != _NEWLINE: queue.unread(code_point) code_point = _NEWLINE while code_point is None: yield ctx.read_data_event(self) code_point = next(code_point_generator) if escaped_newline: code_point = CodePoint(code_point) code_point.char = _ESCAPED_NEWLINE code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence)
python
def _next_code_point_handler(whence, ctx): """Retrieves the next code point from within a quoted string or symbol.""" data_event, self = yield queue = ctx.queue unicode_escapes_allowed = ctx.ion_type is not IonType.CLOB escaped_newline = False escape_sequence = b'' low_surrogate_required = False while True: if len(queue) == 0: yield ctx.read_data_event(self) queue_iter = iter(queue) code_point_generator = _next_code_point_iter(queue, queue_iter) code_point = next(code_point_generator) if code_point == _BACKSLASH: escape_sequence += six.int2byte(_BACKSLASH) num_digits = None while True: if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if six.indexbytes(escape_sequence, -1) == _BACKSLASH: if code_point == _ord(b'u') and unicode_escapes_allowed: # 4-digit unicode escapes, plus '\u' for each surrogate num_digits = 12 if low_surrogate_required else 6 low_surrogate_required = False elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) elif code_point == _ord(b'x'): num_digits = 4 # 2-digit hex escapes elif code_point == _ord(b'U') and unicode_escapes_allowed: num_digits = 10 # 8-digit unicode escapes elif code_point in _COMMON_ESCAPES: if code_point == _SLASH or code_point == _QUESTION_MARK: escape_sequence = b'' # Drop the \. Python does not recognize these as escapes. escape_sequence += six.int2byte(code_point) break elif code_point in _NEWLINES: escaped_newline = True break else: # This is a backslash followed by an invalid escape character. This is illegal. _illegal_character(code_point, ctx, 'Invalid escape sequence \\%s.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) else: if code_point not in _HEX_DIGITS: _illegal_character(code_point, ctx, 'Non-hex character %s found in unicode escape.' % (_chr(code_point),)) escape_sequence += six.int2byte(code_point) if len(escape_sequence) == num_digits: break if not escaped_newline: decoded_escape_sequence = escape_sequence.decode('unicode-escape') cp_iter = _next_code_point_iter(decoded_escape_sequence, iter(decoded_escape_sequence), to_int=ord) code_point = next(cp_iter) if code_point is None: # This is a high surrogate. Restart the loop to gather the low surrogate. low_surrogate_required = True continue code_point = CodePoint(code_point) code_point.char = decoded_escape_sequence code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence) elif low_surrogate_required: _illegal_character(code_point, ctx, 'Unpaired high surrogate escape sequence %s.' % (escape_sequence,)) if code_point == _CARRIAGE_RETURN: # Normalize all newlines (\r, \n, and \r\n) to \n . if len(queue) == 0: yield ctx.read_data_event(self) code_point = next(queue_iter) if code_point != _NEWLINE: queue.unread(code_point) code_point = _NEWLINE while code_point is None: yield ctx.read_data_event(self) code_point = next(code_point_generator) if escaped_newline: code_point = CodePoint(code_point) code_point.char = _ESCAPED_NEWLINE code_point.is_escaped = True ctx.set_code_point(code_point) yield Transition(None, whence)
[ "def", "_next_code_point_handler", "(", "whence", ",", "ctx", ")", ":", "data_event", ",", "self", "=", "yield", "queue", "=", "ctx", ".", "queue", "unicode_escapes_allowed", "=", "ctx", ".", "ion_type", "is", "not", "IonType", ".", "CLOB", "escaped_newline", "=", "False", "escape_sequence", "=", "b''", "low_surrogate_required", "=", "False", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "queue_iter", "=", "iter", "(", "queue", ")", "code_point_generator", "=", "_next_code_point_iter", "(", "queue", ",", "queue_iter", ")", "code_point", "=", "next", "(", "code_point_generator", ")", "if", "code_point", "==", "_BACKSLASH", ":", "escape_sequence", "+=", "six", ".", "int2byte", "(", "_BACKSLASH", ")", "num_digits", "=", "None", "while", "True", ":", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "queue_iter", ")", "if", "six", ".", "indexbytes", "(", "escape_sequence", ",", "-", "1", ")", "==", "_BACKSLASH", ":", "if", "code_point", "==", "_ord", "(", "b'u'", ")", "and", "unicode_escapes_allowed", ":", "# 4-digit unicode escapes, plus '\\u' for each surrogate", "num_digits", "=", "12", "if", "low_surrogate_required", "else", "6", "low_surrogate_required", "=", "False", "elif", "low_surrogate_required", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Unpaired high surrogate escape sequence %s.'", "%", "(", "escape_sequence", ",", ")", ")", "elif", "code_point", "==", "_ord", "(", "b'x'", ")", ":", "num_digits", "=", "4", "# 2-digit hex escapes", "elif", "code_point", "==", "_ord", "(", "b'U'", ")", "and", "unicode_escapes_allowed", ":", "num_digits", "=", "10", "# 8-digit unicode escapes", "elif", "code_point", "in", "_COMMON_ESCAPES", ":", "if", "code_point", "==", "_SLASH", "or", "code_point", "==", "_QUESTION_MARK", ":", "escape_sequence", "=", "b''", "# Drop the \\. Python does not recognize these as escapes.", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "break", "elif", "code_point", "in", "_NEWLINES", ":", "escaped_newline", "=", "True", "break", "else", ":", "# This is a backslash followed by an invalid escape character. This is illegal.", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Invalid escape sequence \\\\%s.'", "%", "(", "_chr", "(", "code_point", ")", ",", ")", ")", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "else", ":", "if", "code_point", "not", "in", "_HEX_DIGITS", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Non-hex character %s found in unicode escape.'", "%", "(", "_chr", "(", "code_point", ")", ",", ")", ")", "escape_sequence", "+=", "six", ".", "int2byte", "(", "code_point", ")", "if", "len", "(", "escape_sequence", ")", "==", "num_digits", ":", "break", "if", "not", "escaped_newline", ":", "decoded_escape_sequence", "=", "escape_sequence", ".", "decode", "(", "'unicode-escape'", ")", "cp_iter", "=", "_next_code_point_iter", "(", "decoded_escape_sequence", ",", "iter", "(", "decoded_escape_sequence", ")", ",", "to_int", "=", "ord", ")", "code_point", "=", "next", "(", "cp_iter", ")", "if", "code_point", "is", "None", ":", "# This is a high surrogate. Restart the loop to gather the low surrogate.", "low_surrogate_required", "=", "True", "continue", "code_point", "=", "CodePoint", "(", "code_point", ")", "code_point", ".", "char", "=", "decoded_escape_sequence", "code_point", ".", "is_escaped", "=", "True", "ctx", ".", "set_code_point", "(", "code_point", ")", "yield", "Transition", "(", "None", ",", "whence", ")", "elif", "low_surrogate_required", ":", "_illegal_character", "(", "code_point", ",", "ctx", ",", "'Unpaired high surrogate escape sequence %s.'", "%", "(", "escape_sequence", ",", ")", ")", "if", "code_point", "==", "_CARRIAGE_RETURN", ":", "# Normalize all newlines (\\r, \\n, and \\r\\n) to \\n .", "if", "len", "(", "queue", ")", "==", "0", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "queue_iter", ")", "if", "code_point", "!=", "_NEWLINE", ":", "queue", ".", "unread", "(", "code_point", ")", "code_point", "=", "_NEWLINE", "while", "code_point", "is", "None", ":", "yield", "ctx", ".", "read_data_event", "(", "self", ")", "code_point", "=", "next", "(", "code_point_generator", ")", "if", "escaped_newline", ":", "code_point", "=", "CodePoint", "(", "code_point", ")", "code_point", ".", "char", "=", "_ESCAPED_NEWLINE", "code_point", ".", "is_escaped", "=", "True", "ctx", ".", "set_code_point", "(", "code_point", ")", "yield", "Transition", "(", "None", ",", "whence", ")" ]
Retrieves the next code point from within a quoted string or symbol.
[ "Retrieves", "the", "next", "code", "point", "from", "within", "a", "quoted", "string", "or", "symbol", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L2183-L2266
963
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.read_data_event
def read_data_event(self, whence, complete=False, can_flush=False): """Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optional[bool]): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request. """ return Transition(None, _read_data_handler(whence, self, complete, can_flush))
python
def read_data_event(self, whence, complete=False, can_flush=False): """Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optional[bool]): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request. """ return Transition(None, _read_data_handler(whence, self, complete, can_flush))
[ "def", "read_data_event", "(", "self", ",", "whence", ",", "complete", "=", "False", ",", "can_flush", "=", "False", ")", ":", "return", "Transition", "(", "None", ",", "_read_data_handler", "(", "whence", ",", "self", ",", "complete", ",", "can_flush", ")", ")" ]
Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optional[bool]): True if NEXT may be requested after INCOMPLETE is emitted as a result of this data request.
[ "Creates", "a", "transition", "to", "a", "co", "-", "routine", "for", "retrieving", "data", "as", "bytes", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L369-L379
964
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_unicode
def set_unicode(self, quoted_text=False): """Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted. """ if isinstance(self.value, CodePointArray): assert self.quoted_text == quoted_text return self self.value = CodePointArray(self.value) self.quoted_text = quoted_text self.line_comment = False return self
python
def set_unicode(self, quoted_text=False): """Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted. """ if isinstance(self.value, CodePointArray): assert self.quoted_text == quoted_text return self self.value = CodePointArray(self.value) self.quoted_text = quoted_text self.line_comment = False return self
[ "def", "set_unicode", "(", "self", ",", "quoted_text", "=", "False", ")", ":", "if", "isinstance", "(", "self", ".", "value", ",", "CodePointArray", ")", ":", "assert", "self", ".", "quoted_text", "==", "quoted_text", "return", "self", "self", ".", "value", "=", "CodePointArray", "(", "self", ".", "value", ")", "self", ".", "quoted_text", "=", "quoted_text", "self", ".", "line_comment", "=", "False", "return", "self" ]
Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted.
[ "Converts", "the", "context", "s", "value", "to", "a", "sequence", "of", "unicode", "code", "points", "for", "holding", "text", "tokens", "indicating", "whether", "the", "text", "is", "quoted", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L388-L398
965
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_quoted_text
def set_quoted_text(self, quoted_text): """Sets the context's ``quoted_text`` flag. Useful when entering and exiting quoted text tokens.""" self.quoted_text = quoted_text self.line_comment = False return self
python
def set_quoted_text(self, quoted_text): """Sets the context's ``quoted_text`` flag. Useful when entering and exiting quoted text tokens.""" self.quoted_text = quoted_text self.line_comment = False return self
[ "def", "set_quoted_text", "(", "self", ",", "quoted_text", ")", ":", "self", ".", "quoted_text", "=", "quoted_text", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets the context's ``quoted_text`` flag. Useful when entering and exiting quoted text tokens.
[ "Sets", "the", "context", "s", "quoted_text", "flag", ".", "Useful", "when", "entering", "and", "exiting", "quoted", "text", "tokens", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L400-L404
966
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.derive_container_context
def derive_container_context(self, ion_type, whence): """Derives a container context as a child of the current context.""" if ion_type is IonType.STRUCT: container = _C_STRUCT elif ion_type is IonType.LIST: container = _C_LIST elif ion_type is IonType.SEXP: container = _C_SEXP else: raise TypeError('Cannot derive container context for non-container type %s.' % (ion_type.name,)) return _HandlerContext( container=container, queue=self.queue, field_name=self.field_name, annotations=self.annotations, depth=self.depth + 1, whence=whence, value=None, # containers don't have a value ion_type=ion_type, pending_symbol=None )
python
def derive_container_context(self, ion_type, whence): """Derives a container context as a child of the current context.""" if ion_type is IonType.STRUCT: container = _C_STRUCT elif ion_type is IonType.LIST: container = _C_LIST elif ion_type is IonType.SEXP: container = _C_SEXP else: raise TypeError('Cannot derive container context for non-container type %s.' % (ion_type.name,)) return _HandlerContext( container=container, queue=self.queue, field_name=self.field_name, annotations=self.annotations, depth=self.depth + 1, whence=whence, value=None, # containers don't have a value ion_type=ion_type, pending_symbol=None )
[ "def", "derive_container_context", "(", "self", ",", "ion_type", ",", "whence", ")", ":", "if", "ion_type", "is", "IonType", ".", "STRUCT", ":", "container", "=", "_C_STRUCT", "elif", "ion_type", "is", "IonType", ".", "LIST", ":", "container", "=", "_C_LIST", "elif", "ion_type", "is", "IonType", ".", "SEXP", ":", "container", "=", "_C_SEXP", "else", ":", "raise", "TypeError", "(", "'Cannot derive container context for non-container type %s.'", "%", "(", "ion_type", ".", "name", ",", ")", ")", "return", "_HandlerContext", "(", "container", "=", "container", ",", "queue", "=", "self", ".", "queue", ",", "field_name", "=", "self", ".", "field_name", ",", "annotations", "=", "self", ".", "annotations", ",", "depth", "=", "self", ".", "depth", "+", "1", ",", "whence", "=", "whence", ",", "value", "=", "None", ",", "# containers don't have a value", "ion_type", "=", "ion_type", ",", "pending_symbol", "=", "None", ")" ]
Derives a container context as a child of the current context.
[ "Derives", "a", "container", "context", "as", "a", "child", "of", "the", "current", "context", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L421-L441
967
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.derive_child_context
def derive_child_context(self, whence): """Derives a scalar context as a child of the current context.""" return _HandlerContext( container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), # children start without a value ion_type=None, pending_symbol=None )
python
def derive_child_context(self, whence): """Derives a scalar context as a child of the current context.""" return _HandlerContext( container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), # children start without a value ion_type=None, pending_symbol=None )
[ "def", "derive_child_context", "(", "self", ",", "whence", ")", ":", "return", "_HandlerContext", "(", "container", "=", "self", ".", "container", ",", "queue", "=", "self", ".", "queue", ",", "field_name", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "self", ".", "depth", ",", "whence", "=", "whence", ",", "value", "=", "bytearray", "(", ")", ",", "# children start without a value", "ion_type", "=", "None", ",", "pending_symbol", "=", "None", ")" ]
Derives a scalar context as a child of the current context.
[ "Derives", "a", "scalar", "context", "as", "a", "child", "of", "the", "current", "context", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L455-L467
968
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_ion_type
def set_ion_type(self, ion_type): """Sets context to the given IonType.""" if ion_type is self.ion_type: return self self.ion_type = ion_type self.line_comment = False return self
python
def set_ion_type(self, ion_type): """Sets context to the given IonType.""" if ion_type is self.ion_type: return self self.ion_type = ion_type self.line_comment = False return self
[ "def", "set_ion_type", "(", "self", ",", "ion_type", ")", ":", "if", "ion_type", "is", "self", ".", "ion_type", ":", "return", "self", "self", ".", "ion_type", "=", "ion_type", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets context to the given IonType.
[ "Sets", "context", "to", "the", "given", "IonType", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L474-L480
969
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_annotation
def set_annotation(self): """Appends the context's ``pending_symbol`` to its ``annotations`` sequence.""" assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) # pending_symbol becomes an annotation self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
python
def set_annotation(self): """Appends the context's ``pending_symbol`` to its ``annotations`` sequence.""" assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) # pending_symbol becomes an annotation self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
[ "def", "set_annotation", "(", "self", ")", ":", "assert", "self", ".", "pending_symbol", "is", "not", "None", "assert", "not", "self", ".", "value", "annotations", "=", "(", "_as_symbol", "(", "self", ".", "pending_symbol", ",", "is_symbol_value", "=", "False", ")", ",", ")", "# pending_symbol becomes an annotation", "self", ".", "annotations", "=", "annotations", "if", "not", "self", ".", "annotations", "else", "self", ".", "annotations", "+", "annotations", "self", ".", "ion_type", "=", "None", "self", ".", "pending_symbol", "=", "None", "# reset pending symbol", "self", ".", "quoted_text", "=", "False", "self", ".", "line_comment", "=", "False", "self", ".", "is_self_delimiting", "=", "False", "return", "self" ]
Appends the context's ``pending_symbol`` to its ``annotations`` sequence.
[ "Appends", "the", "context", "s", "pending_symbol", "to", "its", "annotations", "sequence", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L482-L493
970
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_field_name
def set_field_name(self): """Sets the context's ``pending_symbol`` as its ``field_name``.""" assert self.pending_symbol is not None assert not self.value self.field_name = _as_symbol(self.pending_symbol, is_symbol_value=False) # pending_symbol becomes field name self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
python
def set_field_name(self): """Sets the context's ``pending_symbol`` as its ``field_name``.""" assert self.pending_symbol is not None assert not self.value self.field_name = _as_symbol(self.pending_symbol, is_symbol_value=False) # pending_symbol becomes field name self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
[ "def", "set_field_name", "(", "self", ")", ":", "assert", "self", ".", "pending_symbol", "is", "not", "None", "assert", "not", "self", ".", "value", "self", ".", "field_name", "=", "_as_symbol", "(", "self", ".", "pending_symbol", ",", "is_symbol_value", "=", "False", ")", "# pending_symbol becomes field name", "self", ".", "pending_symbol", "=", "None", "# reset pending symbol", "self", ".", "quoted_text", "=", "False", "self", ".", "line_comment", "=", "False", "self", ".", "is_self_delimiting", "=", "False", "return", "self" ]
Sets the context's ``pending_symbol`` as its ``field_name``.
[ "Sets", "the", "context", "s", "pending_symbol", "as", "its", "field_name", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L495-L504
971
amzn/ion-python
amazon/ion/reader_text.py
_HandlerContext.set_pending_symbol
def set_pending_symbol(self, pending_symbol=None): """Sets the context's ``pending_symbol`` with the given unicode sequence and resets the context's ``value``. If the input is None, an empty :class:`CodePointArray` is used. """ if pending_symbol is None: pending_symbol = CodePointArray() self.value = bytearray() # reset value self.pending_symbol = pending_symbol self.line_comment = False return self
python
def set_pending_symbol(self, pending_symbol=None): """Sets the context's ``pending_symbol`` with the given unicode sequence and resets the context's ``value``. If the input is None, an empty :class:`CodePointArray` is used. """ if pending_symbol is None: pending_symbol = CodePointArray() self.value = bytearray() # reset value self.pending_symbol = pending_symbol self.line_comment = False return self
[ "def", "set_pending_symbol", "(", "self", ",", "pending_symbol", "=", "None", ")", ":", "if", "pending_symbol", "is", "None", ":", "pending_symbol", "=", "CodePointArray", "(", ")", "self", ".", "value", "=", "bytearray", "(", ")", "# reset value", "self", ".", "pending_symbol", "=", "pending_symbol", "self", ".", "line_comment", "=", "False", "return", "self" ]
Sets the context's ``pending_symbol`` with the given unicode sequence and resets the context's ``value``. If the input is None, an empty :class:`CodePointArray` is used.
[ "Sets", "the", "context", "s", "pending_symbol", "with", "the", "given", "unicode", "sequence", "and", "resets", "the", "context", "s", "value", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L506-L516
972
amzn/ion-python
amazon/ion/writer_binary_raw_fields.py
_write_base
def _write_base(buf, value, bits_per_octet, end_bit=0, sign_bit=0, is_signed=False): """Write a field to the provided buffer. Args: buf (Sequence): The buffer into which the UInt will be written in the form of integer octets. value (int): The value to write as a UInt. bits_per_octet (int): The number of value bits (i.e. exclusive of the end bit, but inclusive of the sign bit, if applicable) per octet. end_bit (Optional[int]): The end bit mask. sign_bit (Optional[int]): The sign bit mask. Returns: int: The number of octets written. """ if value == 0: buf.append(sign_bit | end_bit) return 1 num_bits = bit_length(value) num_octets = num_bits // bits_per_octet # 'remainder' is the number of value bits in the first octet. remainder = num_bits % bits_per_octet if remainder != 0 or is_signed: # If signed, the first octet has one fewer bit available, requiring another octet. num_octets += 1 else: # This ensures that unsigned values that fit exactly are not shifted too far. remainder = bits_per_octet for i in range(num_octets): octet = 0 if i == 0: octet |= sign_bit if i == num_octets - 1: octet |= end_bit # 'remainder' is used for alignment such that only the first octet # may contain insignificant zeros. octet |= ((value >> (num_bits - (remainder + bits_per_octet * i))) & _OCTET_MASKS[bits_per_octet]) buf.append(octet) return num_octets
python
def _write_base(buf, value, bits_per_octet, end_bit=0, sign_bit=0, is_signed=False): """Write a field to the provided buffer. Args: buf (Sequence): The buffer into which the UInt will be written in the form of integer octets. value (int): The value to write as a UInt. bits_per_octet (int): The number of value bits (i.e. exclusive of the end bit, but inclusive of the sign bit, if applicable) per octet. end_bit (Optional[int]): The end bit mask. sign_bit (Optional[int]): The sign bit mask. Returns: int: The number of octets written. """ if value == 0: buf.append(sign_bit | end_bit) return 1 num_bits = bit_length(value) num_octets = num_bits // bits_per_octet # 'remainder' is the number of value bits in the first octet. remainder = num_bits % bits_per_octet if remainder != 0 or is_signed: # If signed, the first octet has one fewer bit available, requiring another octet. num_octets += 1 else: # This ensures that unsigned values that fit exactly are not shifted too far. remainder = bits_per_octet for i in range(num_octets): octet = 0 if i == 0: octet |= sign_bit if i == num_octets - 1: octet |= end_bit # 'remainder' is used for alignment such that only the first octet # may contain insignificant zeros. octet |= ((value >> (num_bits - (remainder + bits_per_octet * i))) & _OCTET_MASKS[bits_per_octet]) buf.append(octet) return num_octets
[ "def", "_write_base", "(", "buf", ",", "value", ",", "bits_per_octet", ",", "end_bit", "=", "0", ",", "sign_bit", "=", "0", ",", "is_signed", "=", "False", ")", ":", "if", "value", "==", "0", ":", "buf", ".", "append", "(", "sign_bit", "|", "end_bit", ")", "return", "1", "num_bits", "=", "bit_length", "(", "value", ")", "num_octets", "=", "num_bits", "//", "bits_per_octet", "# 'remainder' is the number of value bits in the first octet.", "remainder", "=", "num_bits", "%", "bits_per_octet", "if", "remainder", "!=", "0", "or", "is_signed", ":", "# If signed, the first octet has one fewer bit available, requiring another octet.", "num_octets", "+=", "1", "else", ":", "# This ensures that unsigned values that fit exactly are not shifted too far.", "remainder", "=", "bits_per_octet", "for", "i", "in", "range", "(", "num_octets", ")", ":", "octet", "=", "0", "if", "i", "==", "0", ":", "octet", "|=", "sign_bit", "if", "i", "==", "num_octets", "-", "1", ":", "octet", "|=", "end_bit", "# 'remainder' is used for alignment such that only the first octet", "# may contain insignificant zeros.", "octet", "|=", "(", "(", "value", ">>", "(", "num_bits", "-", "(", "remainder", "+", "bits_per_octet", "*", "i", ")", ")", ")", "&", "_OCTET_MASKS", "[", "bits_per_octet", "]", ")", "buf", ".", "append", "(", "octet", ")", "return", "num_octets" ]
Write a field to the provided buffer. Args: buf (Sequence): The buffer into which the UInt will be written in the form of integer octets. value (int): The value to write as a UInt. bits_per_octet (int): The number of value bits (i.e. exclusive of the end bit, but inclusive of the sign bit, if applicable) per octet. end_bit (Optional[int]): The end bit mask. sign_bit (Optional[int]): The sign bit mask. Returns: int: The number of octets written.
[ "Write", "a", "field", "to", "the", "provided", "buffer", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_binary_raw_fields.py#L147-L185
973
amzn/ion-python
amazon/ion/util.py
record
def record(*fields): """Constructs a type that can be extended to create immutable, value types. Examples: A typical declaration looks like:: class MyRecord(record('a', ('b', 1))): pass The above would make a sub-class of ``collections.namedtuple`` that was named ``MyRecord`` with a constructor that had the ``b`` field set to 1 by default. Note: This uses meta-class machinery to rewrite the inheritance hierarchy. This is done in order to make sure that the underlying ``namedtuple`` instance is bound to the right type name and to make sure that the synthetic class that is generated to enable this machinery is not enabled for sub-classes of a user's record class. Args: fields (list[str | (str, any)]): A sequence of str or pairs that """ @six.add_metaclass(_RecordMetaClass) class RecordType(object): _record_sentinel = True _record_fields = fields return RecordType
python
def record(*fields): """Constructs a type that can be extended to create immutable, value types. Examples: A typical declaration looks like:: class MyRecord(record('a', ('b', 1))): pass The above would make a sub-class of ``collections.namedtuple`` that was named ``MyRecord`` with a constructor that had the ``b`` field set to 1 by default. Note: This uses meta-class machinery to rewrite the inheritance hierarchy. This is done in order to make sure that the underlying ``namedtuple`` instance is bound to the right type name and to make sure that the synthetic class that is generated to enable this machinery is not enabled for sub-classes of a user's record class. Args: fields (list[str | (str, any)]): A sequence of str or pairs that """ @six.add_metaclass(_RecordMetaClass) class RecordType(object): _record_sentinel = True _record_fields = fields return RecordType
[ "def", "record", "(", "*", "fields", ")", ":", "@", "six", ".", "add_metaclass", "(", "_RecordMetaClass", ")", "class", "RecordType", "(", "object", ")", ":", "_record_sentinel", "=", "True", "_record_fields", "=", "fields", "return", "RecordType" ]
Constructs a type that can be extended to create immutable, value types. Examples: A typical declaration looks like:: class MyRecord(record('a', ('b', 1))): pass The above would make a sub-class of ``collections.namedtuple`` that was named ``MyRecord`` with a constructor that had the ``b`` field set to 1 by default. Note: This uses meta-class machinery to rewrite the inheritance hierarchy. This is done in order to make sure that the underlying ``namedtuple`` instance is bound to the right type name and to make sure that the synthetic class that is generated to enable this machinery is not enabled for sub-classes of a user's record class. Args: fields (list[str | (str, any)]): A sequence of str or pairs that
[ "Constructs", "a", "type", "that", "can", "be", "extended", "to", "create", "immutable", "value", "types", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L137-L163
974
amzn/ion-python
amazon/ion/util.py
coroutine
def coroutine(func): """Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``. Args: func (Callable): The function constructing a generator to decorate. Returns: Callable: The decorated generator. """ def wrapper(*args, **kwargs): gen = func(*args, **kwargs) val = next(gen) if val != None: raise TypeError('Unexpected value from start of coroutine') return gen wrapper.__name__ = func.__name__ wrapper.__doc__ = func.__doc__ return wrapper
python
def coroutine(func): """Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``. Args: func (Callable): The function constructing a generator to decorate. Returns: Callable: The decorated generator. """ def wrapper(*args, **kwargs): gen = func(*args, **kwargs) val = next(gen) if val != None: raise TypeError('Unexpected value from start of coroutine') return gen wrapper.__name__ = func.__name__ wrapper.__doc__ = func.__doc__ return wrapper
[ "def", "coroutine", "(", "func", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "gen", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "val", "=", "next", "(", "gen", ")", "if", "val", "!=", "None", ":", "raise", "TypeError", "(", "'Unexpected value from start of coroutine'", ")", "return", "gen", "wrapper", ".", "__name__", "=", "func", ".", "__name__", "wrapper", ".", "__doc__", "=", "func", ".", "__doc__", "return", "wrapper" ]
Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``. Args: func (Callable): The function constructing a generator to decorate. Returns: Callable: The decorated generator.
[ "Wraps", "a", "PEP", "-", "342", "enhanced", "generator", "in", "a", "way", "that", "avoids", "boilerplate", "of", "the", "priming", "call", "to", "next", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/util.py#L166-L183
975
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_field_name
def derive_field_name(self, field_name): """Derives a new event from this one setting the ``field_name`` attribute. Args: field_name (Union[amazon.ion.symbols.SymbolToken, unicode]): The field name to set. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], field_name, self[4], self[5] )
python
def derive_field_name(self, field_name): """Derives a new event from this one setting the ``field_name`` attribute. Args: field_name (Union[amazon.ion.symbols.SymbolToken, unicode]): The field name to set. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], field_name, self[4], self[5] )
[ "def", "derive_field_name", "(", "self", ",", "field_name", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "field_name", ",", "self", "[", "4", "]", ",", "self", "[", "5", "]", ")" ]
Derives a new event from this one setting the ``field_name`` attribute. Args: field_name (Union[amazon.ion.symbols.SymbolToken, unicode]): The field name to set. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "field_name", "attribute", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L163-L180
976
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_annotations
def derive_annotations(self, annotations): """Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], annotations, self[5] )
python
def derive_annotations(self, annotations): """Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], annotations, self[5] )
[ "def", "derive_annotations", "(", "self", ",", "annotations", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "self", "[", "3", "]", ",", "annotations", ",", "self", "[", "5", "]", ")" ]
Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "annotations", "attribute", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L182-L201
977
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_value
def derive_value(self, value): """Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event. """ return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
python
def derive_value(self, value): """Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event. """ return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
[ "def", "derive_value", "(", "self", ",", "value", ")", ":", "return", "IonEvent", "(", "self", ".", "event_type", ",", "self", ".", "ion_type", ",", "value", ",", "self", ".", "field_name", ",", "self", ".", "annotations", ",", "self", ".", "depth", ")" ]
Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "value", "attribute", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L203-L220
978
amzn/ion-python
amazon/ion/core.py
IonEvent.derive_depth
def derive_depth(self, depth): """Derives a new event from this one setting the ``depth`` attribute. Args: depth: (int): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], self[4], depth )
python
def derive_depth(self, depth): """Derives a new event from this one setting the ``depth`` attribute. Args: depth: (int): The annotations associated with the derived event. Returns: IonEvent: The newly generated event. """ cls = type(self) # We use ordinals to avoid thunk materialization. return cls( self[0], self[1], self[2], self[3], self[4], depth )
[ "def", "derive_depth", "(", "self", ",", "depth", ")", ":", "cls", "=", "type", "(", "self", ")", "# We use ordinals to avoid thunk materialization.", "return", "cls", "(", "self", "[", "0", "]", ",", "self", "[", "1", "]", ",", "self", "[", "2", "]", ",", "self", "[", "3", "]", ",", "self", "[", "4", "]", ",", "depth", ")" ]
Derives a new event from this one setting the ``depth`` attribute. Args: depth: (int): The annotations associated with the derived event. Returns: IonEvent: The newly generated event.
[ "Derives", "a", "new", "event", "from", "this", "one", "setting", "the", "depth", "attribute", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L222-L241
979
amzn/ion-python
amazon/ion/core.py
Timestamp.adjust_from_utc_fields
def adjust_from_utc_fields(*args, **kwargs): """Constructs a timestamp from UTC fields adjusted to the local offset if given.""" raw_ts = Timestamp(*args, **kwargs) offset = raw_ts.utcoffset() if offset is None or offset == timedelta(): return raw_ts # XXX This returns a datetime, not a Timestamp (which has our precision if defined) adjusted = raw_ts + offset if raw_ts.precision is None: # No precision means we can just return a regular datetime return adjusted return Timestamp( adjusted.year, adjusted.month, adjusted.day, adjusted.hour, adjusted.minute, adjusted.second, adjusted.microsecond, raw_ts.tzinfo, precision=raw_ts.precision, fractional_precision=raw_ts.fractional_precision )
python
def adjust_from_utc_fields(*args, **kwargs): """Constructs a timestamp from UTC fields adjusted to the local offset if given.""" raw_ts = Timestamp(*args, **kwargs) offset = raw_ts.utcoffset() if offset is None or offset == timedelta(): return raw_ts # XXX This returns a datetime, not a Timestamp (which has our precision if defined) adjusted = raw_ts + offset if raw_ts.precision is None: # No precision means we can just return a regular datetime return adjusted return Timestamp( adjusted.year, adjusted.month, adjusted.day, adjusted.hour, adjusted.minute, adjusted.second, adjusted.microsecond, raw_ts.tzinfo, precision=raw_ts.precision, fractional_precision=raw_ts.fractional_precision )
[ "def", "adjust_from_utc_fields", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raw_ts", "=", "Timestamp", "(", "*", "args", ",", "*", "*", "kwargs", ")", "offset", "=", "raw_ts", ".", "utcoffset", "(", ")", "if", "offset", "is", "None", "or", "offset", "==", "timedelta", "(", ")", ":", "return", "raw_ts", "# XXX This returns a datetime, not a Timestamp (which has our precision if defined)", "adjusted", "=", "raw_ts", "+", "offset", "if", "raw_ts", ".", "precision", "is", "None", ":", "# No precision means we can just return a regular datetime", "return", "adjusted", "return", "Timestamp", "(", "adjusted", ".", "year", ",", "adjusted", ".", "month", ",", "adjusted", ".", "day", ",", "adjusted", ".", "hour", ",", "adjusted", ".", "minute", ",", "adjusted", ".", "second", ",", "adjusted", ".", "microsecond", ",", "raw_ts", ".", "tzinfo", ",", "precision", "=", "raw_ts", ".", "precision", ",", "fractional_precision", "=", "raw_ts", ".", "fractional_precision", ")" ]
Constructs a timestamp from UTC fields adjusted to the local offset if given.
[ "Constructs", "a", "timestamp", "from", "UTC", "fields", "adjusted", "to", "the", "local", "offset", "if", "given", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/core.py#L410-L434
980
amzn/ion-python
amazon/ion/writer_text.py
raw_writer
def raw_writer(indent=None): """Returns a raw text writer co-routine. Yields: DataEvent: serialization events to write out Receives :class:`amazon.ion.core.IonEvent` or ``None`` when the co-routine yields ``HAS_PENDING`` :class:`WriteEventType` events. """ is_whitespace_str = isinstance(indent, str) and re.search(r'\A\s*\Z', indent, re.M) is not None if not (indent is None or is_whitespace_str): raise ValueError('The indent parameter must either be None or a string containing only whitespace') indent_bytes = six.b(indent) if isinstance(indent, str) else indent return writer_trampoline(_raw_writer_coroutine(indent=indent_bytes))
python
def raw_writer(indent=None): """Returns a raw text writer co-routine. Yields: DataEvent: serialization events to write out Receives :class:`amazon.ion.core.IonEvent` or ``None`` when the co-routine yields ``HAS_PENDING`` :class:`WriteEventType` events. """ is_whitespace_str = isinstance(indent, str) and re.search(r'\A\s*\Z', indent, re.M) is not None if not (indent is None or is_whitespace_str): raise ValueError('The indent parameter must either be None or a string containing only whitespace') indent_bytes = six.b(indent) if isinstance(indent, str) else indent return writer_trampoline(_raw_writer_coroutine(indent=indent_bytes))
[ "def", "raw_writer", "(", "indent", "=", "None", ")", ":", "is_whitespace_str", "=", "isinstance", "(", "indent", ",", "str", ")", "and", "re", ".", "search", "(", "r'\\A\\s*\\Z'", ",", "indent", ",", "re", ".", "M", ")", "is", "not", "None", "if", "not", "(", "indent", "is", "None", "or", "is_whitespace_str", ")", ":", "raise", "ValueError", "(", "'The indent parameter must either be None or a string containing only whitespace'", ")", "indent_bytes", "=", "six", ".", "b", "(", "indent", ")", "if", "isinstance", "(", "indent", ",", "str", ")", "else", "indent", "return", "writer_trampoline", "(", "_raw_writer_coroutine", "(", "indent", "=", "indent_bytes", ")", ")" ]
Returns a raw text writer co-routine. Yields: DataEvent: serialization events to write out Receives :class:`amazon.ion.core.IonEvent` or ``None`` when the co-routine yields ``HAS_PENDING`` :class:`WriteEventType` events.
[ "Returns", "a", "raw", "text", "writer", "co", "-", "routine", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer_text.py#L433-L449
981
amzn/ion-python
amazon/ion/writer.py
writer_trampoline
def writer_trampoline(start): """Provides the co-routine trampoline for a writer state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a :class:`Transition` with a :class:`amazon.ion.core.IonEvent` and the co-routine itself. Notes: A writer delimits its logical flush points with ``WriteEventType.COMPLETE``, depending on the configuration, a user may need to send an ``IonEventType.STREAM_END`` to force this to occur. Args: start: The writer co-routine to initially delegate to. Yields: DataEvent: the result of serialization. Receives :class:`amazon.ion.core.IonEvent` to serialize into :class:`DataEvent`. """ trans = Transition(None, start) while True: ion_event = (yield trans.event) if trans.event is None: if ion_event is None: raise TypeError('Cannot start Writer with no event') else: if trans.event.type is WriteEventType.HAS_PENDING and ion_event is not None: raise TypeError('Writer expected to receive no event: %r' % (ion_event,)) if trans.event.type is not WriteEventType.HAS_PENDING and ion_event is None: raise TypeError('Writer expected to receive event') if ion_event is not None and ion_event.event_type is IonEventType.INCOMPLETE: raise TypeError('Writer cannot receive INCOMPLETE event') trans = trans.delegate.send(Transition(ion_event, trans.delegate))
python
def writer_trampoline(start): """Provides the co-routine trampoline for a writer state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a :class:`Transition` with a :class:`amazon.ion.core.IonEvent` and the co-routine itself. Notes: A writer delimits its logical flush points with ``WriteEventType.COMPLETE``, depending on the configuration, a user may need to send an ``IonEventType.STREAM_END`` to force this to occur. Args: start: The writer co-routine to initially delegate to. Yields: DataEvent: the result of serialization. Receives :class:`amazon.ion.core.IonEvent` to serialize into :class:`DataEvent`. """ trans = Transition(None, start) while True: ion_event = (yield trans.event) if trans.event is None: if ion_event is None: raise TypeError('Cannot start Writer with no event') else: if trans.event.type is WriteEventType.HAS_PENDING and ion_event is not None: raise TypeError('Writer expected to receive no event: %r' % (ion_event,)) if trans.event.type is not WriteEventType.HAS_PENDING and ion_event is None: raise TypeError('Writer expected to receive event') if ion_event is not None and ion_event.event_type is IonEventType.INCOMPLETE: raise TypeError('Writer cannot receive INCOMPLETE event') trans = trans.delegate.send(Transition(ion_event, trans.delegate))
[ "def", "writer_trampoline", "(", "start", ")", ":", "trans", "=", "Transition", "(", "None", ",", "start", ")", "while", "True", ":", "ion_event", "=", "(", "yield", "trans", ".", "event", ")", "if", "trans", ".", "event", "is", "None", ":", "if", "ion_event", "is", "None", ":", "raise", "TypeError", "(", "'Cannot start Writer with no event'", ")", "else", ":", "if", "trans", ".", "event", ".", "type", "is", "WriteEventType", ".", "HAS_PENDING", "and", "ion_event", "is", "not", "None", ":", "raise", "TypeError", "(", "'Writer expected to receive no event: %r'", "%", "(", "ion_event", ",", ")", ")", "if", "trans", ".", "event", ".", "type", "is", "not", "WriteEventType", ".", "HAS_PENDING", "and", "ion_event", "is", "None", ":", "raise", "TypeError", "(", "'Writer expected to receive event'", ")", "if", "ion_event", "is", "not", "None", "and", "ion_event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "raise", "TypeError", "(", "'Writer cannot receive INCOMPLETE event'", ")", "trans", "=", "trans", ".", "delegate", ".", "send", "(", "Transition", "(", "ion_event", ",", "trans", ".", "delegate", ")", ")" ]
Provides the co-routine trampoline for a writer state machine. The given co-routine is a state machine that yields :class:`Transition` and takes a :class:`Transition` with a :class:`amazon.ion.core.IonEvent` and the co-routine itself. Notes: A writer delimits its logical flush points with ``WriteEventType.COMPLETE``, depending on the configuration, a user may need to send an ``IonEventType.STREAM_END`` to force this to occur. Args: start: The writer co-routine to initially delegate to. Yields: DataEvent: the result of serialization. Receives :class:`amazon.ion.core.IonEvent` to serialize into :class:`DataEvent`.
[ "Provides", "the", "co", "-", "routine", "trampoline", "for", "a", "writer", "state", "machine", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L79-L111
982
amzn/ion-python
amazon/ion/writer.py
_drain
def _drain(writer, ion_event): """Drain the writer of its pending write events. Args: writer (Coroutine): A writer co-routine. ion_event (amazon.ion.core.IonEvent): The first event to apply to the writer. Yields: DataEvent: Yields each pending data event. """ result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event.type is WriteEventType.HAS_PENDING: result_event = writer.send(ion_event) ion_event = None yield result_event
python
def _drain(writer, ion_event): """Drain the writer of its pending write events. Args: writer (Coroutine): A writer co-routine. ion_event (amazon.ion.core.IonEvent): The first event to apply to the writer. Yields: DataEvent: Yields each pending data event. """ result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event.type is WriteEventType.HAS_PENDING: result_event = writer.send(ion_event) ion_event = None yield result_event
[ "def", "_drain", "(", "writer", ",", "ion_event", ")", ":", "result_event", "=", "_WRITE_EVENT_HAS_PENDING_EMPTY", "while", "result_event", ".", "type", "is", "WriteEventType", ".", "HAS_PENDING", ":", "result_event", "=", "writer", ".", "send", "(", "ion_event", ")", "ion_event", "=", "None", "yield", "result_event" ]
Drain the writer of its pending write events. Args: writer (Coroutine): A writer co-routine. ion_event (amazon.ion.core.IonEvent): The first event to apply to the writer. Yields: DataEvent: Yields each pending data event.
[ "Drain", "the", "writer", "of", "its", "pending", "write", "events", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L117-L131
983
amzn/ion-python
amazon/ion/writer.py
blocking_writer
def blocking_writer(writer, output): """Provides an implementation of using the writer co-routine with a file-like object. Args: writer (Coroutine): A writer co-routine. output (BaseIO): The file-like object to pipe events to. Yields: WriteEventType: Yields when no events are pending. Receives :class:`amazon.ion.core.IonEvent` to write to the ``output``. """ result_type = None while True: ion_event = (yield result_type) for result_event in _drain(writer, ion_event): output.write(result_event.data) result_type = result_event.type
python
def blocking_writer(writer, output): """Provides an implementation of using the writer co-routine with a file-like object. Args: writer (Coroutine): A writer co-routine. output (BaseIO): The file-like object to pipe events to. Yields: WriteEventType: Yields when no events are pending. Receives :class:`amazon.ion.core.IonEvent` to write to the ``output``. """ result_type = None while True: ion_event = (yield result_type) for result_event in _drain(writer, ion_event): output.write(result_event.data) result_type = result_event.type
[ "def", "blocking_writer", "(", "writer", ",", "output", ")", ":", "result_type", "=", "None", "while", "True", ":", "ion_event", "=", "(", "yield", "result_type", ")", "for", "result_event", "in", "_drain", "(", "writer", ",", "ion_event", ")", ":", "output", ".", "write", "(", "result_event", ".", "data", ")", "result_type", "=", "result_event", ".", "type" ]
Provides an implementation of using the writer co-routine with a file-like object. Args: writer (Coroutine): A writer co-routine. output (BaseIO): The file-like object to pipe events to. Yields: WriteEventType: Yields when no events are pending. Receives :class:`amazon.ion.core.IonEvent` to write to the ``output``.
[ "Provides", "an", "implementation", "of", "using", "the", "writer", "co", "-", "routine", "with", "a", "file", "-", "like", "object", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/writer.py#L135-L152
984
amzn/ion-python
amazon/ion/simple_types.py
_IonNature.from_event
def from_event(cls, ion_event): """Constructs the given native extension from the properties of an event. Args: ion_event (IonEvent): The event to construct the native value from. """ if ion_event.value is not None: args, kwargs = cls._to_constructor_args(ion_event.value) else: # if value is None (i.e. this is a container event), args must be empty or initialization of the # underlying container will fail. args, kwargs = (), {} value = cls(*args, **kwargs) value.ion_event = ion_event value.ion_type = ion_event.ion_type value.ion_annotations = ion_event.annotations return value
python
def from_event(cls, ion_event): """Constructs the given native extension from the properties of an event. Args: ion_event (IonEvent): The event to construct the native value from. """ if ion_event.value is not None: args, kwargs = cls._to_constructor_args(ion_event.value) else: # if value is None (i.e. this is a container event), args must be empty or initialization of the # underlying container will fail. args, kwargs = (), {} value = cls(*args, **kwargs) value.ion_event = ion_event value.ion_type = ion_event.ion_type value.ion_annotations = ion_event.annotations return value
[ "def", "from_event", "(", "cls", ",", "ion_event", ")", ":", "if", "ion_event", ".", "value", "is", "not", "None", ":", "args", ",", "kwargs", "=", "cls", ".", "_to_constructor_args", "(", "ion_event", ".", "value", ")", "else", ":", "# if value is None (i.e. this is a container event), args must be empty or initialization of the", "# underlying container will fail.", "args", ",", "kwargs", "=", "(", ")", ",", "{", "}", "value", "=", "cls", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", ".", "ion_event", "=", "ion_event", "value", ".", "ion_type", "=", "ion_event", ".", "ion_type", "value", ".", "ion_annotations", "=", "ion_event", ".", "annotations", "return", "value" ]
Constructs the given native extension from the properties of an event. Args: ion_event (IonEvent): The event to construct the native value from.
[ "Constructs", "the", "given", "native", "extension", "from", "the", "properties", "of", "an", "event", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simple_types.py#L74-L90
985
amzn/ion-python
amazon/ion/simple_types.py
_IonNature.from_value
def from_value(cls, ion_type, value, annotations=()): """Constructs a value as a copy with an associated Ion type and annotations. Args: ion_type (IonType): The associated Ion type. value (Any): The value to construct from, generally of type ``cls``. annotations (Sequence[unicode]): The sequence Unicode strings decorating this value. """ if value is None: value = IonPyNull() else: args, kwargs = cls._to_constructor_args(value) value = cls(*args, **kwargs) value.ion_event = None value.ion_type = ion_type value.ion_annotations = annotations return value
python
def from_value(cls, ion_type, value, annotations=()): """Constructs a value as a copy with an associated Ion type and annotations. Args: ion_type (IonType): The associated Ion type. value (Any): The value to construct from, generally of type ``cls``. annotations (Sequence[unicode]): The sequence Unicode strings decorating this value. """ if value is None: value = IonPyNull() else: args, kwargs = cls._to_constructor_args(value) value = cls(*args, **kwargs) value.ion_event = None value.ion_type = ion_type value.ion_annotations = annotations return value
[ "def", "from_value", "(", "cls", ",", "ion_type", ",", "value", ",", "annotations", "=", "(", ")", ")", ":", "if", "value", "is", "None", ":", "value", "=", "IonPyNull", "(", ")", "else", ":", "args", ",", "kwargs", "=", "cls", ".", "_to_constructor_args", "(", "value", ")", "value", "=", "cls", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", ".", "ion_event", "=", "None", "value", ".", "ion_type", "=", "ion_type", "value", ".", "ion_annotations", "=", "annotations", "return", "value" ]
Constructs a value as a copy with an associated Ion type and annotations. Args: ion_type (IonType): The associated Ion type. value (Any): The value to construct from, generally of type ``cls``. annotations (Sequence[unicode]): The sequence Unicode strings decorating this value.
[ "Constructs", "a", "value", "as", "a", "copy", "with", "an", "associated", "Ion", "type", "and", "annotations", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simple_types.py#L93-L109
986
amzn/ion-python
amazon/ion/simple_types.py
_IonNature.to_event
def to_event(self, event_type, field_name=None, depth=None): """Constructs an IonEvent from this _IonNature value. Args: event_type (IonEventType): The type of the resulting event. field_name (Optional[text]): The field name associated with this value, if any. depth (Optional[int]): The depth of this value. Returns: An IonEvent with the properties from this value. """ if self.ion_event is None: value = self if isinstance(self, IonPyNull): value = None self.ion_event = IonEvent(event_type, ion_type=self.ion_type, value=value, field_name=field_name, annotations=self.ion_annotations, depth=depth) return self.ion_event
python
def to_event(self, event_type, field_name=None, depth=None): """Constructs an IonEvent from this _IonNature value. Args: event_type (IonEventType): The type of the resulting event. field_name (Optional[text]): The field name associated with this value, if any. depth (Optional[int]): The depth of this value. Returns: An IonEvent with the properties from this value. """ if self.ion_event is None: value = self if isinstance(self, IonPyNull): value = None self.ion_event = IonEvent(event_type, ion_type=self.ion_type, value=value, field_name=field_name, annotations=self.ion_annotations, depth=depth) return self.ion_event
[ "def", "to_event", "(", "self", ",", "event_type", ",", "field_name", "=", "None", ",", "depth", "=", "None", ")", ":", "if", "self", ".", "ion_event", "is", "None", ":", "value", "=", "self", "if", "isinstance", "(", "self", ",", "IonPyNull", ")", ":", "value", "=", "None", "self", ".", "ion_event", "=", "IonEvent", "(", "event_type", ",", "ion_type", "=", "self", ".", "ion_type", ",", "value", "=", "value", ",", "field_name", "=", "field_name", ",", "annotations", "=", "self", ".", "ion_annotations", ",", "depth", "=", "depth", ")", "return", "self", ".", "ion_event" ]
Constructs an IonEvent from this _IonNature value. Args: event_type (IonEventType): The type of the resulting event. field_name (Optional[text]): The field name associated with this value, if any. depth (Optional[int]): The depth of this value. Returns: An IonEvent with the properties from this value.
[ "Constructs", "an", "IonEvent", "from", "this", "_IonNature", "value", "." ]
0b21fa3ba7755f55f745e4aa970d86343b82449d
https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/simple_types.py#L111-L128
987
XML-Security/signxml
signxml/__init__.py
_remove_sig
def _remove_sig(signature, idempotent=False): """ Remove the signature node from its parent, keeping any tail element. This is needed for eneveloped signatures. :param signature: Signature to remove from payload :type signature: XML ElementTree Element :param idempotent: If True, don't raise an error if signature is already detached from parent. :type idempotent: boolean """ try: signaturep = next(signature.iterancestors()) except StopIteration: if idempotent: return raise ValueError("Can't remove the root signature node") if signature.tail is not None: try: signatures = next(signature.itersiblings(preceding=True)) except StopIteration: if signaturep.text is not None: signaturep.text = signaturep.text + signature.tail else: signaturep.text = signature.tail else: if signatures.tail is not None: signatures.tail = signatures.tail + signature.tail else: signatures.tail = signature.tail signaturep.remove(signature)
python
def _remove_sig(signature, idempotent=False): """ Remove the signature node from its parent, keeping any tail element. This is needed for eneveloped signatures. :param signature: Signature to remove from payload :type signature: XML ElementTree Element :param idempotent: If True, don't raise an error if signature is already detached from parent. :type idempotent: boolean """ try: signaturep = next(signature.iterancestors()) except StopIteration: if idempotent: return raise ValueError("Can't remove the root signature node") if signature.tail is not None: try: signatures = next(signature.itersiblings(preceding=True)) except StopIteration: if signaturep.text is not None: signaturep.text = signaturep.text + signature.tail else: signaturep.text = signature.tail else: if signatures.tail is not None: signatures.tail = signatures.tail + signature.tail else: signatures.tail = signature.tail signaturep.remove(signature)
[ "def", "_remove_sig", "(", "signature", ",", "idempotent", "=", "False", ")", ":", "try", ":", "signaturep", "=", "next", "(", "signature", ".", "iterancestors", "(", ")", ")", "except", "StopIteration", ":", "if", "idempotent", ":", "return", "raise", "ValueError", "(", "\"Can't remove the root signature node\"", ")", "if", "signature", ".", "tail", "is", "not", "None", ":", "try", ":", "signatures", "=", "next", "(", "signature", ".", "itersiblings", "(", "preceding", "=", "True", ")", ")", "except", "StopIteration", ":", "if", "signaturep", ".", "text", "is", "not", "None", ":", "signaturep", ".", "text", "=", "signaturep", ".", "text", "+", "signature", ".", "tail", "else", ":", "signaturep", ".", "text", "=", "signature", ".", "tail", "else", ":", "if", "signatures", ".", "tail", "is", "not", "None", ":", "signatures", ".", "tail", "=", "signatures", ".", "tail", "+", "signature", ".", "tail", "else", ":", "signatures", ".", "tail", "=", "signature", ".", "tail", "signaturep", ".", "remove", "(", "signature", ")" ]
Remove the signature node from its parent, keeping any tail element. This is needed for eneveloped signatures. :param signature: Signature to remove from payload :type signature: XML ElementTree Element :param idempotent: If True, don't raise an error if signature is already detached from parent. :type idempotent: boolean
[ "Remove", "the", "signature", "node", "from", "its", "parent", "keeping", "any", "tail", "element", ".", "This", "is", "needed", "for", "eneveloped", "signatures", "." ]
16503242617e9b25e5c2c9ced5ef18a06ffde146
https://github.com/XML-Security/signxml/blob/16503242617e9b25e5c2c9ced5ef18a06ffde146/signxml/__init__.py#L39-L69
988
cenkalti/github-flask
flask_github.py
GitHub.authorize
def authorize(self, scope=None, redirect_uri=None, state=None): """ Redirect to GitHub and request access to a user's data. :param scope: List of `Scopes`_ for which to request access, formatted as a string or comma delimited list of scopes as a string. Defaults to ``None``, resulting in granting read-only access to public information (includes public user profile info, public repository info, and gists). For more information on this, see the examples in presented in the GitHub API `Scopes`_ documentation, or see the examples provided below. :type scope: str :param redirect_uri: `Redirect URL`_ to which to redirect the user after authentication. Defaults to ``None``, resulting in using the default redirect URL for the OAuth application as defined in GitHub. This URL can differ from the callback URL defined in your GitHub application, however it must be a subdirectory of the specified callback URL, otherwise raises a :class:`GitHubError`. For more information on this, see the examples in presented in the GitHub API `Redirect URL`_ documentation, or see the example provided below. :type redirect_uri: str :param state: An unguessable random string. It is used to protect against cross-site request forgery attacks. :type state: str For example, if we wanted to use this method to get read/write access to user profile information, in addition to read-write access to code, commit status, etc., we would need to use the `Scopes`_ ``user`` and ``repo`` when calling this method. .. code-block:: python github.authorize(scope="user,repo") Additionally, if we wanted to specify a different redirect URL following authorization. .. code-block:: python # Our application's callback URL is "http://example.com/callback" redirect_uri="http://example.com/callback/my/path" github.authorize(scope="user,repo", redirect_uri=redirect_uri) .. _Scopes: https://developer.github.com/v3/oauth/#scopes .. _Redirect URL: https://developer.github.com/v3/oauth/#redirect-urls """ _logger.debug("Called authorize()") params = {'client_id': self.client_id} if scope: params['scope'] = scope if redirect_uri: params['redirect_uri'] = redirect_uri if state: params['state'] = state url = self.auth_url + 'authorize?' + urlencode(params) _logger.debug("Redirecting to %s", url) return redirect(url)
python
def authorize(self, scope=None, redirect_uri=None, state=None): """ Redirect to GitHub and request access to a user's data. :param scope: List of `Scopes`_ for which to request access, formatted as a string or comma delimited list of scopes as a string. Defaults to ``None``, resulting in granting read-only access to public information (includes public user profile info, public repository info, and gists). For more information on this, see the examples in presented in the GitHub API `Scopes`_ documentation, or see the examples provided below. :type scope: str :param redirect_uri: `Redirect URL`_ to which to redirect the user after authentication. Defaults to ``None``, resulting in using the default redirect URL for the OAuth application as defined in GitHub. This URL can differ from the callback URL defined in your GitHub application, however it must be a subdirectory of the specified callback URL, otherwise raises a :class:`GitHubError`. For more information on this, see the examples in presented in the GitHub API `Redirect URL`_ documentation, or see the example provided below. :type redirect_uri: str :param state: An unguessable random string. It is used to protect against cross-site request forgery attacks. :type state: str For example, if we wanted to use this method to get read/write access to user profile information, in addition to read-write access to code, commit status, etc., we would need to use the `Scopes`_ ``user`` and ``repo`` when calling this method. .. code-block:: python github.authorize(scope="user,repo") Additionally, if we wanted to specify a different redirect URL following authorization. .. code-block:: python # Our application's callback URL is "http://example.com/callback" redirect_uri="http://example.com/callback/my/path" github.authorize(scope="user,repo", redirect_uri=redirect_uri) .. _Scopes: https://developer.github.com/v3/oauth/#scopes .. _Redirect URL: https://developer.github.com/v3/oauth/#redirect-urls """ _logger.debug("Called authorize()") params = {'client_id': self.client_id} if scope: params['scope'] = scope if redirect_uri: params['redirect_uri'] = redirect_uri if state: params['state'] = state url = self.auth_url + 'authorize?' + urlencode(params) _logger.debug("Redirecting to %s", url) return redirect(url)
[ "def", "authorize", "(", "self", ",", "scope", "=", "None", ",", "redirect_uri", "=", "None", ",", "state", "=", "None", ")", ":", "_logger", ".", "debug", "(", "\"Called authorize()\"", ")", "params", "=", "{", "'client_id'", ":", "self", ".", "client_id", "}", "if", "scope", ":", "params", "[", "'scope'", "]", "=", "scope", "if", "redirect_uri", ":", "params", "[", "'redirect_uri'", "]", "=", "redirect_uri", "if", "state", ":", "params", "[", "'state'", "]", "=", "state", "url", "=", "self", ".", "auth_url", "+", "'authorize?'", "+", "urlencode", "(", "params", ")", "_logger", ".", "debug", "(", "\"Redirecting to %s\"", ",", "url", ")", "return", "redirect", "(", "url", ")" ]
Redirect to GitHub and request access to a user's data. :param scope: List of `Scopes`_ for which to request access, formatted as a string or comma delimited list of scopes as a string. Defaults to ``None``, resulting in granting read-only access to public information (includes public user profile info, public repository info, and gists). For more information on this, see the examples in presented in the GitHub API `Scopes`_ documentation, or see the examples provided below. :type scope: str :param redirect_uri: `Redirect URL`_ to which to redirect the user after authentication. Defaults to ``None``, resulting in using the default redirect URL for the OAuth application as defined in GitHub. This URL can differ from the callback URL defined in your GitHub application, however it must be a subdirectory of the specified callback URL, otherwise raises a :class:`GitHubError`. For more information on this, see the examples in presented in the GitHub API `Redirect URL`_ documentation, or see the example provided below. :type redirect_uri: str :param state: An unguessable random string. It is used to protect against cross-site request forgery attacks. :type state: str For example, if we wanted to use this method to get read/write access to user profile information, in addition to read-write access to code, commit status, etc., we would need to use the `Scopes`_ ``user`` and ``repo`` when calling this method. .. code-block:: python github.authorize(scope="user,repo") Additionally, if we wanted to specify a different redirect URL following authorization. .. code-block:: python # Our application's callback URL is "http://example.com/callback" redirect_uri="http://example.com/callback/my/path" github.authorize(scope="user,repo", redirect_uri=redirect_uri) .. _Scopes: https://developer.github.com/v3/oauth/#scopes .. _Redirect URL: https://developer.github.com/v3/oauth/#redirect-urls
[ "Redirect", "to", "GitHub", "and", "request", "access", "to", "a", "user", "s", "data", "." ]
9f58d61b7d328cef857edbb5c64a5d3f716367cb
https://github.com/cenkalti/github-flask/blob/9f58d61b7d328cef857edbb5c64a5d3f716367cb/flask_github.py#L104-L168
989
cenkalti/github-flask
flask_github.py
GitHub.authorized_handler
def authorized_handler(self, f): """ Decorator for the route that is used as the callback for authorizing with GitHub. This callback URL can be set in the settings for the app or passed in during authorization. """ @wraps(f) def decorated(*args, **kwargs): if 'code' in request.args: data = self._handle_response() else: data = self._handle_invalid_response() return f(*((data,) + args), **kwargs) return decorated
python
def authorized_handler(self, f): """ Decorator for the route that is used as the callback for authorizing with GitHub. This callback URL can be set in the settings for the app or passed in during authorization. """ @wraps(f) def decorated(*args, **kwargs): if 'code' in request.args: data = self._handle_response() else: data = self._handle_invalid_response() return f(*((data,) + args), **kwargs) return decorated
[ "def", "authorized_handler", "(", "self", ",", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'code'", "in", "request", ".", "args", ":", "data", "=", "self", ".", "_handle_response", "(", ")", "else", ":", "data", "=", "self", ".", "_handle_invalid_response", "(", ")", "return", "f", "(", "*", "(", "(", "data", ",", ")", "+", "args", ")", ",", "*", "*", "kwargs", ")", "return", "decorated" ]
Decorator for the route that is used as the callback for authorizing with GitHub. This callback URL can be set in the settings for the app or passed in during authorization.
[ "Decorator", "for", "the", "route", "that", "is", "used", "as", "the", "callback", "for", "authorizing", "with", "GitHub", ".", "This", "callback", "URL", "can", "be", "set", "in", "the", "settings", "for", "the", "app", "or", "passed", "in", "during", "authorization", "." ]
9f58d61b7d328cef857edbb5c64a5d3f716367cb
https://github.com/cenkalti/github-flask/blob/9f58d61b7d328cef857edbb5c64a5d3f716367cb/flask_github.py#L170-L184
990
cenkalti/github-flask
flask_github.py
GitHub._handle_response
def _handle_response(self): """ Handles response after the redirect to GitHub. This response determines if the user has allowed the this application access. If we were then we send a POST request for the access_key used to authenticate requests to GitHub. """ _logger.debug("Handling response from GitHub") params = { 'code': request.args.get('code'), 'client_id': self.client_id, 'client_secret': self.client_secret } url = self.auth_url + 'access_token' _logger.debug("POSTing to %s", url) _logger.debug(params) response = self.session.post(url, data=params) data = parse_qs(response.content) _logger.debug("response.content = %s", data) for k, v in data.items(): if len(v) == 1: data[k] = v[0] token = data.get(b'access_token', None) if token is not None: token = token.decode('ascii') return token
python
def _handle_response(self): """ Handles response after the redirect to GitHub. This response determines if the user has allowed the this application access. If we were then we send a POST request for the access_key used to authenticate requests to GitHub. """ _logger.debug("Handling response from GitHub") params = { 'code': request.args.get('code'), 'client_id': self.client_id, 'client_secret': self.client_secret } url = self.auth_url + 'access_token' _logger.debug("POSTing to %s", url) _logger.debug(params) response = self.session.post(url, data=params) data = parse_qs(response.content) _logger.debug("response.content = %s", data) for k, v in data.items(): if len(v) == 1: data[k] = v[0] token = data.get(b'access_token', None) if token is not None: token = token.decode('ascii') return token
[ "def", "_handle_response", "(", "self", ")", ":", "_logger", ".", "debug", "(", "\"Handling response from GitHub\"", ")", "params", "=", "{", "'code'", ":", "request", ".", "args", ".", "get", "(", "'code'", ")", ",", "'client_id'", ":", "self", ".", "client_id", ",", "'client_secret'", ":", "self", ".", "client_secret", "}", "url", "=", "self", ".", "auth_url", "+", "'access_token'", "_logger", ".", "debug", "(", "\"POSTing to %s\"", ",", "url", ")", "_logger", ".", "debug", "(", "params", ")", "response", "=", "self", ".", "session", ".", "post", "(", "url", ",", "data", "=", "params", ")", "data", "=", "parse_qs", "(", "response", ".", "content", ")", "_logger", ".", "debug", "(", "\"response.content = %s\"", ",", "data", ")", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", ":", "if", "len", "(", "v", ")", "==", "1", ":", "data", "[", "k", "]", "=", "v", "[", "0", "]", "token", "=", "data", ".", "get", "(", "b'access_token'", ",", "None", ")", "if", "token", "is", "not", "None", ":", "token", "=", "token", ".", "decode", "(", "'ascii'", ")", "return", "token" ]
Handles response after the redirect to GitHub. This response determines if the user has allowed the this application access. If we were then we send a POST request for the access_key used to authenticate requests to GitHub.
[ "Handles", "response", "after", "the", "redirect", "to", "GitHub", ".", "This", "response", "determines", "if", "the", "user", "has", "allowed", "the", "this", "application", "access", ".", "If", "we", "were", "then", "we", "send", "a", "POST", "request", "for", "the", "access_key", "used", "to", "authenticate", "requests", "to", "GitHub", "." ]
9f58d61b7d328cef857edbb5c64a5d3f716367cb
https://github.com/cenkalti/github-flask/blob/9f58d61b7d328cef857edbb5c64a5d3f716367cb/flask_github.py#L186-L212
991
ethereum/pyrlp
rlp/lazy.py
decode_lazy
def decode_lazy(rlp, sedes=None, **sedes_kwargs): """Decode an RLP encoded object in a lazy fashion. If the encoded object is a bytestring, this function acts similar to :func:`rlp.decode`. If it is a list however, a :class:`LazyList` is returned instead. This object will decode the string lazily, avoiding both horizontal and vertical traversing as much as possible. The way `sedes` is applied depends on the decoded object: If it is a string `sedes` deserializes it as a whole; if it is a list, each element is deserialized individually. In both cases, `sedes_kwargs` are passed on. Note that, if a deserializer is used, only "horizontal" but not "vertical lazyness" can be preserved. :param rlp: the RLP string to decode :param sedes: an object implementing a method ``deserialize(code)`` which is used as described above, or ``None`` if no deserialization should be performed :param \*\*sedes_kwargs: additional keyword arguments that will be passed to the deserializers :returns: either the already decoded and deserialized object (if encoded as a string) or an instance of :class:`rlp.LazyList` """ item, end = consume_item_lazy(rlp, 0) if end != len(rlp): raise DecodingError('RLP length prefix announced wrong length', rlp) if isinstance(item, LazyList): item.sedes = sedes item.sedes_kwargs = sedes_kwargs return item elif sedes: return sedes.deserialize(item, **sedes_kwargs) else: return item
python
def decode_lazy(rlp, sedes=None, **sedes_kwargs): """Decode an RLP encoded object in a lazy fashion. If the encoded object is a bytestring, this function acts similar to :func:`rlp.decode`. If it is a list however, a :class:`LazyList` is returned instead. This object will decode the string lazily, avoiding both horizontal and vertical traversing as much as possible. The way `sedes` is applied depends on the decoded object: If it is a string `sedes` deserializes it as a whole; if it is a list, each element is deserialized individually. In both cases, `sedes_kwargs` are passed on. Note that, if a deserializer is used, only "horizontal" but not "vertical lazyness" can be preserved. :param rlp: the RLP string to decode :param sedes: an object implementing a method ``deserialize(code)`` which is used as described above, or ``None`` if no deserialization should be performed :param \*\*sedes_kwargs: additional keyword arguments that will be passed to the deserializers :returns: either the already decoded and deserialized object (if encoded as a string) or an instance of :class:`rlp.LazyList` """ item, end = consume_item_lazy(rlp, 0) if end != len(rlp): raise DecodingError('RLP length prefix announced wrong length', rlp) if isinstance(item, LazyList): item.sedes = sedes item.sedes_kwargs = sedes_kwargs return item elif sedes: return sedes.deserialize(item, **sedes_kwargs) else: return item
[ "def", "decode_lazy", "(", "rlp", ",", "sedes", "=", "None", ",", "*", "*", "sedes_kwargs", ")", ":", "item", ",", "end", "=", "consume_item_lazy", "(", "rlp", ",", "0", ")", "if", "end", "!=", "len", "(", "rlp", ")", ":", "raise", "DecodingError", "(", "'RLP length prefix announced wrong length'", ",", "rlp", ")", "if", "isinstance", "(", "item", ",", "LazyList", ")", ":", "item", ".", "sedes", "=", "sedes", "item", ".", "sedes_kwargs", "=", "sedes_kwargs", "return", "item", "elif", "sedes", ":", "return", "sedes", ".", "deserialize", "(", "item", ",", "*", "*", "sedes_kwargs", ")", "else", ":", "return", "item" ]
Decode an RLP encoded object in a lazy fashion. If the encoded object is a bytestring, this function acts similar to :func:`rlp.decode`. If it is a list however, a :class:`LazyList` is returned instead. This object will decode the string lazily, avoiding both horizontal and vertical traversing as much as possible. The way `sedes` is applied depends on the decoded object: If it is a string `sedes` deserializes it as a whole; if it is a list, each element is deserialized individually. In both cases, `sedes_kwargs` are passed on. Note that, if a deserializer is used, only "horizontal" but not "vertical lazyness" can be preserved. :param rlp: the RLP string to decode :param sedes: an object implementing a method ``deserialize(code)`` which is used as described above, or ``None`` if no deserialization should be performed :param \*\*sedes_kwargs: additional keyword arguments that will be passed to the deserializers :returns: either the already decoded and deserialized object (if encoded as a string) or an instance of :class:`rlp.LazyList`
[ "Decode", "an", "RLP", "encoded", "object", "in", "a", "lazy", "fashion", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/lazy.py#L8-L41
992
ethereum/pyrlp
rlp/lazy.py
consume_item_lazy
def consume_item_lazy(rlp, start): """Read an item from an RLP string lazily. If the length prefix announces a string, the string is read; if it announces a list, a :class:`LazyList` is created. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, end)`` where ``item`` is the read string or a :class:`LazyList` and ``end`` is the position of the first unprocessed byte. """ p, t, l, s = consume_length_prefix(rlp, start) if t is bytes: item, _, end = consume_payload(rlp, p, s, bytes, l) return item, end else: assert t is list return LazyList(rlp, s, s + l), s + l
python
def consume_item_lazy(rlp, start): """Read an item from an RLP string lazily. If the length prefix announces a string, the string is read; if it announces a list, a :class:`LazyList` is created. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, end)`` where ``item`` is the read string or a :class:`LazyList` and ``end`` is the position of the first unprocessed byte. """ p, t, l, s = consume_length_prefix(rlp, start) if t is bytes: item, _, end = consume_payload(rlp, p, s, bytes, l) return item, end else: assert t is list return LazyList(rlp, s, s + l), s + l
[ "def", "consume_item_lazy", "(", "rlp", ",", "start", ")", ":", "p", ",", "t", ",", "l", ",", "s", "=", "consume_length_prefix", "(", "rlp", ",", "start", ")", "if", "t", "is", "bytes", ":", "item", ",", "_", ",", "end", "=", "consume_payload", "(", "rlp", ",", "p", ",", "s", ",", "bytes", ",", "l", ")", "return", "item", ",", "end", "else", ":", "assert", "t", "is", "list", "return", "LazyList", "(", "rlp", ",", "s", ",", "s", "+", "l", ")", ",", "s", "+", "l" ]
Read an item from an RLP string lazily. If the length prefix announces a string, the string is read; if it announces a list, a :class:`LazyList` is created. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, end)`` where ``item`` is the read string or a :class:`LazyList` and ``end`` is the position of the first unprocessed byte.
[ "Read", "an", "item", "from", "an", "RLP", "string", "lazily", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/lazy.py#L44-L62
993
ethereum/pyrlp
rlp/lazy.py
peek
def peek(rlp, index, sedes=None): """Get a specific element from an rlp encoded nested list. This function uses :func:`rlp.decode_lazy` and, thus, decodes only the necessary parts of the string. Usage example:: >>> import rlp >>> rlpdata = rlp.encode([1, 2, [3, [4, 5]]]) >>> rlp.peek(rlpdata, 0, rlp.sedes.big_endian_int) 1 >>> rlp.peek(rlpdata, [2, 0], rlp.sedes.big_endian_int) 3 :param rlp: the rlp string :param index: the index of the element to peek at (can be a list for nested data) :param sedes: a sedes used to deserialize the peeked at object, or `None` if no deserialization should be performed :raises: :exc:`IndexError` if `index` is invalid (out of range or too many levels) """ ll = decode_lazy(rlp) if not isinstance(index, Iterable): index = [index] for i in index: if isinstance(ll, Atomic): raise IndexError('Too many indices given') ll = ll[i] if sedes: return sedes.deserialize(ll) else: return ll
python
def peek(rlp, index, sedes=None): """Get a specific element from an rlp encoded nested list. This function uses :func:`rlp.decode_lazy` and, thus, decodes only the necessary parts of the string. Usage example:: >>> import rlp >>> rlpdata = rlp.encode([1, 2, [3, [4, 5]]]) >>> rlp.peek(rlpdata, 0, rlp.sedes.big_endian_int) 1 >>> rlp.peek(rlpdata, [2, 0], rlp.sedes.big_endian_int) 3 :param rlp: the rlp string :param index: the index of the element to peek at (can be a list for nested data) :param sedes: a sedes used to deserialize the peeked at object, or `None` if no deserialization should be performed :raises: :exc:`IndexError` if `index` is invalid (out of range or too many levels) """ ll = decode_lazy(rlp) if not isinstance(index, Iterable): index = [index] for i in index: if isinstance(ll, Atomic): raise IndexError('Too many indices given') ll = ll[i] if sedes: return sedes.deserialize(ll) else: return ll
[ "def", "peek", "(", "rlp", ",", "index", ",", "sedes", "=", "None", ")", ":", "ll", "=", "decode_lazy", "(", "rlp", ")", "if", "not", "isinstance", "(", "index", ",", "Iterable", ")", ":", "index", "=", "[", "index", "]", "for", "i", "in", "index", ":", "if", "isinstance", "(", "ll", ",", "Atomic", ")", ":", "raise", "IndexError", "(", "'Too many indices given'", ")", "ll", "=", "ll", "[", "i", "]", "if", "sedes", ":", "return", "sedes", ".", "deserialize", "(", "ll", ")", "else", ":", "return", "ll" ]
Get a specific element from an rlp encoded nested list. This function uses :func:`rlp.decode_lazy` and, thus, decodes only the necessary parts of the string. Usage example:: >>> import rlp >>> rlpdata = rlp.encode([1, 2, [3, [4, 5]]]) >>> rlp.peek(rlpdata, 0, rlp.sedes.big_endian_int) 1 >>> rlp.peek(rlpdata, [2, 0], rlp.sedes.big_endian_int) 3 :param rlp: the rlp string :param index: the index of the element to peek at (can be a list for nested data) :param sedes: a sedes used to deserialize the peeked at object, or `None` if no deserialization should be performed :raises: :exc:`IndexError` if `index` is invalid (out of range or too many levels)
[ "Get", "a", "specific", "element", "from", "an", "rlp", "encoded", "nested", "list", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/lazy.py#L138-L171
994
ethereum/pyrlp
rlp/sedes/text.py
Text.fixed_length
def fixed_length(cls, l, allow_empty=False): """Create a sedes for text data with exactly `l` encoded characters.""" return cls(l, l, allow_empty=allow_empty)
python
def fixed_length(cls, l, allow_empty=False): """Create a sedes for text data with exactly `l` encoded characters.""" return cls(l, l, allow_empty=allow_empty)
[ "def", "fixed_length", "(", "cls", ",", "l", ",", "allow_empty", "=", "False", ")", ":", "return", "cls", "(", "l", ",", "l", ",", "allow_empty", "=", "allow_empty", ")" ]
Create a sedes for text data with exactly `l` encoded characters.
[ "Create", "a", "sedes", "for", "text", "data", "with", "exactly", "l", "encoded", "characters", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/sedes/text.py#L24-L26
995
ethereum/pyrlp
rlp/sedes/serializable.py
_eq
def _eq(left, right): """ Equality comparison that allows for equality between tuple and list types with equivalent elements. """ if isinstance(left, (tuple, list)) and isinstance(right, (tuple, list)): return len(left) == len(right) and all(_eq(*pair) for pair in zip(left, right)) else: return left == right
python
def _eq(left, right): """ Equality comparison that allows for equality between tuple and list types with equivalent elements. """ if isinstance(left, (tuple, list)) and isinstance(right, (tuple, list)): return len(left) == len(right) and all(_eq(*pair) for pair in zip(left, right)) else: return left == right
[ "def", "_eq", "(", "left", ",", "right", ")", ":", "if", "isinstance", "(", "left", ",", "(", "tuple", ",", "list", ")", ")", "and", "isinstance", "(", "right", ",", "(", "tuple", ",", "list", ")", ")", ":", "return", "len", "(", "left", ")", "==", "len", "(", "right", ")", "and", "all", "(", "_eq", "(", "*", "pair", ")", "for", "pair", "in", "zip", "(", "left", ",", "right", ")", ")", "else", ":", "return", "left", "==", "right" ]
Equality comparison that allows for equality between tuple and list types with equivalent elements.
[ "Equality", "comparison", "that", "allows", "for", "equality", "between", "tuple", "and", "list", "types", "with", "equivalent", "elements", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/sedes/serializable.py#L82-L90
996
ethereum/pyrlp
rlp/sedes/lists.py
is_sequence
def is_sequence(obj): """Check if `obj` is a sequence, but not a string or bytes.""" return isinstance(obj, Sequence) and not ( isinstance(obj, str) or BinaryClass.is_valid_type(obj))
python
def is_sequence(obj): """Check if `obj` is a sequence, but not a string or bytes.""" return isinstance(obj, Sequence) and not ( isinstance(obj, str) or BinaryClass.is_valid_type(obj))
[ "def", "is_sequence", "(", "obj", ")", ":", "return", "isinstance", "(", "obj", ",", "Sequence", ")", "and", "not", "(", "isinstance", "(", "obj", ",", "str", ")", "or", "BinaryClass", ".", "is_valid_type", "(", "obj", ")", ")" ]
Check if `obj` is a sequence, but not a string or bytes.
[ "Check", "if", "obj", "is", "a", "sequence", "but", "not", "a", "string", "or", "bytes", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/sedes/lists.py#L32-L35
997
ethereum/pyrlp
rlp/codec.py
encode
def encode(obj, sedes=None, infer_serializer=True, cache=True): """Encode a Python object in RLP format. By default, the object is serialized in a suitable way first (using :func:`rlp.infer_sedes`) and then encoded. Serialization can be explicitly suppressed by setting `infer_serializer` to ``False`` and not passing an alternative as `sedes`. If `obj` has an attribute :attr:`_cached_rlp` (as, notably, :class:`rlp.Serializable`) and its value is not `None`, this value is returned bypassing serialization and encoding, unless `sedes` is given (as the cache is assumed to refer to the standard serialization which can be replaced by specifying `sedes`). If `obj` is a :class:`rlp.Serializable` and `cache` is true, the result of the encoding will be stored in :attr:`_cached_rlp` if it is empty. :param sedes: an object implementing a function ``serialize(obj)`` which will be used to serialize ``obj`` before encoding, or ``None`` to use the infered one (if any) :param infer_serializer: if ``True`` an appropriate serializer will be selected using :func:`rlp.infer_sedes` to serialize `obj` before encoding :param cache: cache the return value in `obj._cached_rlp` if possible (default `True`) :returns: the RLP encoded item :raises: :exc:`rlp.EncodingError` in the rather unlikely case that the item is too big to encode (will not happen) :raises: :exc:`rlp.SerializationError` if the serialization fails """ if isinstance(obj, Serializable): cached_rlp = obj._cached_rlp if sedes is None and cached_rlp: return cached_rlp else: really_cache = ( cache and sedes is None ) else: really_cache = False if sedes: item = sedes.serialize(obj) elif infer_serializer: item = infer_sedes(obj).serialize(obj) else: item = obj result = encode_raw(item) if really_cache: obj._cached_rlp = result return result
python
def encode(obj, sedes=None, infer_serializer=True, cache=True): """Encode a Python object in RLP format. By default, the object is serialized in a suitable way first (using :func:`rlp.infer_sedes`) and then encoded. Serialization can be explicitly suppressed by setting `infer_serializer` to ``False`` and not passing an alternative as `sedes`. If `obj` has an attribute :attr:`_cached_rlp` (as, notably, :class:`rlp.Serializable`) and its value is not `None`, this value is returned bypassing serialization and encoding, unless `sedes` is given (as the cache is assumed to refer to the standard serialization which can be replaced by specifying `sedes`). If `obj` is a :class:`rlp.Serializable` and `cache` is true, the result of the encoding will be stored in :attr:`_cached_rlp` if it is empty. :param sedes: an object implementing a function ``serialize(obj)`` which will be used to serialize ``obj`` before encoding, or ``None`` to use the infered one (if any) :param infer_serializer: if ``True`` an appropriate serializer will be selected using :func:`rlp.infer_sedes` to serialize `obj` before encoding :param cache: cache the return value in `obj._cached_rlp` if possible (default `True`) :returns: the RLP encoded item :raises: :exc:`rlp.EncodingError` in the rather unlikely case that the item is too big to encode (will not happen) :raises: :exc:`rlp.SerializationError` if the serialization fails """ if isinstance(obj, Serializable): cached_rlp = obj._cached_rlp if sedes is None and cached_rlp: return cached_rlp else: really_cache = ( cache and sedes is None ) else: really_cache = False if sedes: item = sedes.serialize(obj) elif infer_serializer: item = infer_sedes(obj).serialize(obj) else: item = obj result = encode_raw(item) if really_cache: obj._cached_rlp = result return result
[ "def", "encode", "(", "obj", ",", "sedes", "=", "None", ",", "infer_serializer", "=", "True", ",", "cache", "=", "True", ")", ":", "if", "isinstance", "(", "obj", ",", "Serializable", ")", ":", "cached_rlp", "=", "obj", ".", "_cached_rlp", "if", "sedes", "is", "None", "and", "cached_rlp", ":", "return", "cached_rlp", "else", ":", "really_cache", "=", "(", "cache", "and", "sedes", "is", "None", ")", "else", ":", "really_cache", "=", "False", "if", "sedes", ":", "item", "=", "sedes", ".", "serialize", "(", "obj", ")", "elif", "infer_serializer", ":", "item", "=", "infer_sedes", "(", "obj", ")", ".", "serialize", "(", "obj", ")", "else", ":", "item", "=", "obj", "result", "=", "encode_raw", "(", "item", ")", "if", "really_cache", ":", "obj", ".", "_cached_rlp", "=", "result", "return", "result" ]
Encode a Python object in RLP format. By default, the object is serialized in a suitable way first (using :func:`rlp.infer_sedes`) and then encoded. Serialization can be explicitly suppressed by setting `infer_serializer` to ``False`` and not passing an alternative as `sedes`. If `obj` has an attribute :attr:`_cached_rlp` (as, notably, :class:`rlp.Serializable`) and its value is not `None`, this value is returned bypassing serialization and encoding, unless `sedes` is given (as the cache is assumed to refer to the standard serialization which can be replaced by specifying `sedes`). If `obj` is a :class:`rlp.Serializable` and `cache` is true, the result of the encoding will be stored in :attr:`_cached_rlp` if it is empty. :param sedes: an object implementing a function ``serialize(obj)`` which will be used to serialize ``obj`` before encoding, or ``None`` to use the infered one (if any) :param infer_serializer: if ``True`` an appropriate serializer will be selected using :func:`rlp.infer_sedes` to serialize `obj` before encoding :param cache: cache the return value in `obj._cached_rlp` if possible (default `True`) :returns: the RLP encoded item :raises: :exc:`rlp.EncodingError` in the rather unlikely case that the item is too big to encode (will not happen) :raises: :exc:`rlp.SerializationError` if the serialization fails
[ "Encode", "a", "Python", "object", "in", "RLP", "format", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/codec.py#L20-L70
998
ethereum/pyrlp
rlp/codec.py
consume_payload
def consume_payload(rlp, prefix, start, type_, length): """Read the payload of an item from an RLP string. :param rlp: the rlp string to read from :param type_: the type of the payload (``bytes`` or ``list``) :param start: the position at which to start reading :param length: the length of the payload in bytes :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte """ if type_ is bytes: item = rlp[start: start + length] return (item, [prefix + item], start + length) elif type_ is list: items = [] per_item_rlp = [] list_rlp = prefix next_item_start = start end = next_item_start + length while next_item_start < end: p, t, l, s = consume_length_prefix(rlp, next_item_start) item, item_rlp, next_item_start = consume_payload(rlp, p, s, t, l) per_item_rlp.append(item_rlp) # When the item returned above is a single element, item_rlp will also contain a # single element, but when it's a list, the first element will be the RLP of the # whole List, which is what we want here. list_rlp += item_rlp[0] items.append(item) per_item_rlp.insert(0, list_rlp) if next_item_start > end: raise DecodingError('List length prefix announced a too small ' 'length', rlp) return (items, per_item_rlp, next_item_start) else: raise TypeError('Type must be either list or bytes')
python
def consume_payload(rlp, prefix, start, type_, length): """Read the payload of an item from an RLP string. :param rlp: the rlp string to read from :param type_: the type of the payload (``bytes`` or ``list``) :param start: the position at which to start reading :param length: the length of the payload in bytes :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte """ if type_ is bytes: item = rlp[start: start + length] return (item, [prefix + item], start + length) elif type_ is list: items = [] per_item_rlp = [] list_rlp = prefix next_item_start = start end = next_item_start + length while next_item_start < end: p, t, l, s = consume_length_prefix(rlp, next_item_start) item, item_rlp, next_item_start = consume_payload(rlp, p, s, t, l) per_item_rlp.append(item_rlp) # When the item returned above is a single element, item_rlp will also contain a # single element, but when it's a list, the first element will be the RLP of the # whole List, which is what we want here. list_rlp += item_rlp[0] items.append(item) per_item_rlp.insert(0, list_rlp) if next_item_start > end: raise DecodingError('List length prefix announced a too small ' 'length', rlp) return (items, per_item_rlp, next_item_start) else: raise TypeError('Type must be either list or bytes')
[ "def", "consume_payload", "(", "rlp", ",", "prefix", ",", "start", ",", "type_", ",", "length", ")", ":", "if", "type_", "is", "bytes", ":", "item", "=", "rlp", "[", "start", ":", "start", "+", "length", "]", "return", "(", "item", ",", "[", "prefix", "+", "item", "]", ",", "start", "+", "length", ")", "elif", "type_", "is", "list", ":", "items", "=", "[", "]", "per_item_rlp", "=", "[", "]", "list_rlp", "=", "prefix", "next_item_start", "=", "start", "end", "=", "next_item_start", "+", "length", "while", "next_item_start", "<", "end", ":", "p", ",", "t", ",", "l", ",", "s", "=", "consume_length_prefix", "(", "rlp", ",", "next_item_start", ")", "item", ",", "item_rlp", ",", "next_item_start", "=", "consume_payload", "(", "rlp", ",", "p", ",", "s", ",", "t", ",", "l", ")", "per_item_rlp", ".", "append", "(", "item_rlp", ")", "# When the item returned above is a single element, item_rlp will also contain a", "# single element, but when it's a list, the first element will be the RLP of the", "# whole List, which is what we want here.", "list_rlp", "+=", "item_rlp", "[", "0", "]", "items", ".", "append", "(", "item", ")", "per_item_rlp", ".", "insert", "(", "0", ",", "list_rlp", ")", "if", "next_item_start", ">", "end", ":", "raise", "DecodingError", "(", "'List length prefix announced a too small '", "'length'", ",", "rlp", ")", "return", "(", "items", ",", "per_item_rlp", ",", "next_item_start", ")", "else", ":", "raise", "TypeError", "(", "'Type must be either list or bytes'", ")" ]
Read the payload of an item from an RLP string. :param rlp: the rlp string to read from :param type_: the type of the payload (``bytes`` or ``list``) :param start: the position at which to start reading :param length: the length of the payload in bytes :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte
[ "Read", "the", "payload", "of", "an", "item", "from", "an", "RLP", "string", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/codec.py#L156-L192
999
ethereum/pyrlp
rlp/codec.py
consume_item
def consume_item(rlp, start): """Read an item from an RLP string. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte """ p, t, l, s = consume_length_prefix(rlp, start) return consume_payload(rlp, p, s, t, l)
python
def consume_item(rlp, start): """Read an item from an RLP string. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte """ p, t, l, s = consume_length_prefix(rlp, start) return consume_payload(rlp, p, s, t, l)
[ "def", "consume_item", "(", "rlp", ",", "start", ")", ":", "p", ",", "t", ",", "l", ",", "s", "=", "consume_length_prefix", "(", "rlp", ",", "start", ")", "return", "consume_payload", "(", "rlp", ",", "p", ",", "s", ",", "t", ",", "l", ")" ]
Read an item from an RLP string. :param rlp: the rlp string to read from :param start: the position at which to start reading :returns: a tuple ``(item, per_item_rlp, end)``, where ``item`` is the read item, per_item_rlp is a list containing the RLP encoding of each item and ``end`` is the position of the first unprocessed byte
[ "Read", "an", "item", "from", "an", "RLP", "string", "." ]
bb898f8056da3973204c699621350bf9565e43df
https://github.com/ethereum/pyrlp/blob/bb898f8056da3973204c699621350bf9565e43df/rlp/codec.py#L195-L206