file_path
stringlengths 21
207
| content
stringlengths 5
1.02M
| size
int64 5
1.02M
| lang
stringclasses 9
values | avg_line_length
float64 1.33
100
| max_line_length
int64 4
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/parser/errors.py | __all__ = ('HttpParserError',
'HttpParserCallbackError',
'HttpParserInvalidStatusError',
'HttpParserInvalidMethodError',
'HttpParserInvalidURLError',
'HttpParserUpgrade')
class HttpParserError(Exception):
pass
class HttpParserCallbackError(HttpParserError):
pass
class HttpParserInvalidStatusError(HttpParserError):
pass
class HttpParserInvalidMethodError(HttpParserError):
pass
class HttpParserInvalidURLError(HttpParserError):
pass
class HttpParserUpgrade(Exception):
pass
| 566 | Python | 17.290322 | 52 | 0.719081 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/parser/parser.c | /* Generated by Cython 0.29.28 */
/* BEGIN: Cython Metadata
{
"distutils": {
"depends": [],
"extra_compile_args": [
"-O2"
],
"name": "httptools.parser.parser",
"sources": [
"httptools/parser/parser.pyx"
]
},
"module_name": "httptools.parser.parser"
}
END: Cython Metadata */
#ifndef PY_SSIZE_T_CLEAN
#define PY_SSIZE_T_CLEAN
#endif /* PY_SSIZE_T_CLEAN */
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_28"
#define CYTHON_HEX_VERSION 0x001D1CF0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
#endif
#if !defined(WIN32) && !defined(MS_WINDOWS)
#ifndef __stdcall
#define __stdcall
#endif
#ifndef __cdecl
#define __cdecl
#endif
#ifndef __fastcall
#define __fastcall
#endif
#endif
#ifndef DL_IMPORT
#define DL_IMPORT(t) t
#endif
#ifndef DL_EXPORT
#define DL_EXPORT(t) t
#endif
#define __PYX_COMMA ,
#ifndef HAVE_LONG_LONG
#if PY_VERSION_HEX >= 0x02070000
#define HAVE_LONG_LONG
#endif
#endif
#ifndef PY_LONG_LONG
#define PY_LONG_LONG LONG_LONG
#endif
#ifndef Py_HUGE_VAL
#define Py_HUGE_VAL HUGE_VAL
#endif
#ifdef PYPY_VERSION
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#if PY_VERSION_HEX < 0x03050000
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#undef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 0
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#undef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 1
#undef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 0
#undef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 0
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#elif defined(PYSTON_VERSION)
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
#define CYTHON_USE_PYTYPE_LOOKUP 1
#endif
#if PY_MAJOR_VERSION < 3
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#elif !defined(CYTHON_USE_PYLONG_INTERNALS)
#define CYTHON_USE_PYLONG_INTERNALS 1
#endif
#ifndef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 1
#endif
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#elif !defined(CYTHON_USE_UNICODE_WRITER)
#define CYTHON_USE_UNICODE_WRITER 1
#endif
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#if PY_VERSION_HEX >= 0x030B00A4
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#elif !defined(CYTHON_FAST_THREAD_STATE)
#define CYTHON_FAST_THREAD_STATE 1
#endif
#ifndef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1)
#endif
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
#endif
#ifndef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
#endif
#ifndef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
#endif
#if PY_VERSION_HEX >= 0x030B00A4
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#elif !defined(CYTHON_USE_EXC_INFO_STACK)
#define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
#endif
#endif
#if !defined(CYTHON_FAST_PYCCALL)
#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
#endif
#if CYTHON_USE_PYLONG_INTERNALS
#if PY_MAJOR_VERSION < 3
#include "longintrepr.h"
#endif
#undef SHIFT
#undef BASE
#undef MASK
#ifdef SIZEOF_VOID_P
enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
#endif
#endif
#ifndef __has_attribute
#define __has_attribute(x) 0
#endif
#ifndef __has_cpp_attribute
#define __has_cpp_attribute(x) 0
#endif
#ifndef CYTHON_RESTRICT
#if defined(__GNUC__)
#define CYTHON_RESTRICT __restrict__
#elif defined(_MSC_VER) && _MSC_VER >= 1400
#define CYTHON_RESTRICT __restrict
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_RESTRICT restrict
#else
#define CYTHON_RESTRICT
#endif
#endif
#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
#endif
#ifndef CYTHON_MAYBE_UNUSED_VAR
# if defined(__cplusplus)
template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
# else
# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
# endif
#endif
#ifndef CYTHON_NCP_UNUSED
# if CYTHON_COMPILING_IN_CPYTHON
# define CYTHON_NCP_UNUSED
# else
# define CYTHON_NCP_UNUSED CYTHON_UNUSED
# endif
#endif
#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
#ifdef _MSC_VER
#ifndef _MSC_STDINT_H_
#if _MSC_VER < 1300
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
#else
typedef unsigned __int8 uint8_t;
typedef unsigned __int32 uint32_t;
#endif
#endif
#else
#include <stdint.h>
#endif
#ifndef CYTHON_FALLTHROUGH
#if defined(__cplusplus) && __cplusplus >= 201103L
#if __has_cpp_attribute(fallthrough)
#define CYTHON_FALLTHROUGH [[fallthrough]]
#elif __has_cpp_attribute(clang::fallthrough)
#define CYTHON_FALLTHROUGH [[clang::fallthrough]]
#elif __has_cpp_attribute(gnu::fallthrough)
#define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
#endif
#endif
#ifndef CYTHON_FALLTHROUGH
#if __has_attribute(fallthrough)
#define CYTHON_FALLTHROUGH __attribute__((fallthrough))
#else
#define CYTHON_FALLTHROUGH
#endif
#endif
#if defined(__clang__ ) && defined(__apple_build_version__)
#if __apple_build_version__ < 7000000
#undef CYTHON_FALLTHROUGH
#define CYTHON_FALLTHROUGH
#endif
#endif
#endif
#ifndef CYTHON_INLINE
#if defined(__clang__)
#define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
#elif defined(__GNUC__)
#define CYTHON_INLINE __inline__
#elif defined(_MSC_VER)
#define CYTHON_INLINE __inline
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_INLINE inline
#else
#define CYTHON_INLINE
#endif
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
#define Py_OptimizeFlag 0
#endif
#define __PYX_BUILD_PY_SSIZE_T "n"
#define CYTHON_FORMAT_SSIZE_T "z"
#if PY_MAJOR_VERSION < 3
#define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyClass_Type
#else
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
#define __Pyx_DefaultClassType PyType_Type
#if PY_VERSION_HEX >= 0x030B00A1
static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f,
PyObject *code, PyObject *c, PyObject* n, PyObject *v,
PyObject *fv, PyObject *cell, PyObject* fn,
PyObject *name, int fline, PyObject *lnos) {
PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL;
PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL;
const char *fn_cstr=NULL;
const char *name_cstr=NULL;
PyCodeObject* co=NULL;
PyObject *type, *value, *traceback;
PyErr_Fetch(&type, &value, &traceback);
if (!(kwds=PyDict_New())) goto end;
if (!(argcount=PyLong_FromLong(a))) goto end;
if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end;
if (!(posonlyargcount=PyLong_FromLong(0))) goto end;
if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end;
if (!(kwonlyargcount=PyLong_FromLong(k))) goto end;
if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end;
if (!(nlocals=PyLong_FromLong(l))) goto end;
if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end;
if (!(stacksize=PyLong_FromLong(s))) goto end;
if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end;
if (!(flags=PyLong_FromLong(f))) goto end;
if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end;
if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end;
if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end;
if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end;
if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too;
if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here
if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too;
Py_XDECREF((PyObject*)co);
co = (PyCodeObject*)call_result;
call_result = NULL;
if (0) {
cleanup_code_too:
Py_XDECREF((PyObject*)co);
co = NULL;
}
end:
Py_XDECREF(kwds);
Py_XDECREF(argcount);
Py_XDECREF(posonlyargcount);
Py_XDECREF(kwonlyargcount);
Py_XDECREF(nlocals);
Py_XDECREF(stacksize);
Py_XDECREF(replace);
Py_XDECREF(call_result);
Py_XDECREF(empty);
if (type) {
PyErr_Restore(type, value, traceback);
}
return co;
}
#else
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#endif
#define __Pyx_DefaultClassType PyType_Type
#endif
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
#ifndef Py_TPFLAGS_HAVE_INDEX
#define Py_TPFLAGS_HAVE_INDEX 0
#endif
#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
#define Py_TPFLAGS_HAVE_NEWBUFFER 0
#endif
#ifndef Py_TPFLAGS_HAVE_FINALIZE
#define Py_TPFLAGS_HAVE_FINALIZE 0
#endif
#ifndef METH_STACKLESS
#define METH_STACKLESS 0
#endif
#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
#ifndef METH_FASTCALL
#define METH_FASTCALL 0x80
#endif
typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
Py_ssize_t nargs, PyObject *kwnames);
#else
#define __Pyx_PyCFunctionFast _PyCFunctionFast
#define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
#endif
#if CYTHON_FAST_PYCCALL
#define __Pyx_PyFastCFunction_Check(func)\
((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
#else
#define __Pyx_PyFastCFunction_Check(func) 0
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
#define PyObject_Malloc(s) PyMem_Malloc(s)
#define PyObject_Free(p) PyMem_Free(p)
#define PyObject_Realloc(p) PyMem_Realloc(p)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
#define PyMem_RawMalloc(n) PyMem_Malloc(n)
#define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
#define PyMem_RawFree(p) PyMem_Free(p)
#endif
#if CYTHON_COMPILING_IN_PYSTON
#define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
#else
#define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
#endif
#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#elif PY_VERSION_HEX >= 0x03060000
#define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
#elif PY_VERSION_HEX >= 0x03000000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#else
#define __Pyx_PyThreadState_Current _PyThreadState_Current
#endif
#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
#include "pythread.h"
#define Py_tss_NEEDS_INIT 0
typedef int Py_tss_t;
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
*key = PyThread_create_key();
return 0;
}
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
*key = Py_tss_NEEDS_INIT;
return key;
}
static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
PyObject_Free(key);
}
static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
return *key != Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
PyThread_delete_key(*key);
*key = Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
return PyThread_set_key_value(*key, value);
}
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
return PyThread_get_key_value(*key);
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
#else
#define __Pyx_PyDict_NewPresized(n) PyDict_New()
#endif
#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
#else
#define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
#else
#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#if defined(PyUnicode_IS_READY)
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#else
#define __Pyx_PyUnicode_READY(op) (0)
#endif
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
#define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#endif
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
#define PyUnicode_2BYTE_KIND 2
#define PyUnicode_4BYTE_KIND 4
#define __Pyx_PyUnicode_READY(op) (0)
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
#define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
#define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
#define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
#endif
#if CYTHON_COMPILING_IN_PYPY
#define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
#else
#define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
#define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
#define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
#define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
#endif
#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
#else
#define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
#endif
#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
#define PyObject_ASCII(o) PyObject_Repr(o)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBaseString_Type PyUnicode_Type
#define PyStringObject PyUnicodeObject
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
#else
#define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
#define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
#endif
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
#define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyIntObject PyLongObject
#define PyInt_Type PyLong_Type
#define PyInt_Check(op) PyLong_Check(op)
#define PyInt_CheckExact(op) PyLong_CheckExact(op)
#define PyInt_FromString PyLong_FromString
#define PyInt_FromUnicode PyLong_FromUnicode
#define PyInt_FromLong PyLong_FromLong
#define PyInt_FromSize_t PyLong_FromSize_t
#define PyInt_FromSsize_t PyLong_FromSsize_t
#define PyInt_AsLong PyLong_AsLong
#define PyInt_AS_LONG PyLong_AS_LONG
#define PyInt_AsSsize_t PyLong_AsSsize_t
#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
#define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
#define PyNumber_Int PyNumber_Long
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBoolObject PyLongObject
#endif
#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
#ifndef PyUnicode_InternFromString
#define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
#endif
#endif
#if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
#else
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
#if CYTHON_USE_ASYNC_SLOTS
#if PY_VERSION_HEX >= 0x030500B1
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#else
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#endif
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
#ifndef __Pyx_PyAsyncMethodsStruct
typedef struct {
unaryfunc am_await;
unaryfunc am_aiter;
unaryfunc am_anext;
} __Pyx_PyAsyncMethodsStruct;
#endif
#if defined(WIN32) || defined(MS_WINDOWS)
#define _USE_MATH_DEFINES
#endif
#include <math.h>
#ifdef NAN
#define __PYX_NAN() ((float) NAN)
#else
static CYTHON_INLINE float __PYX_NAN() {
float value;
memset(&value, 0xFF, sizeof(value));
return value;
}
#endif
#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
#define __Pyx_truncl trunc
#else
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
#define __PYX_HAVE__httptools__parser__parser
#define __PYX_HAVE_API__httptools__parser__parser
/* Early includes */
#include <string.h>
#include <stdio.h>
#include "pythread.h"
#include <stdint.h>
#include "llhttp.h"
#ifdef _OPENMP
#include <omp.h>
#endif /* _OPENMP */
#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
#define CYTHON_WITHOUT_ASSERTIONS
#endif
typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
#define __PYX_DEFAULT_STRING_ENCODING ""
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#define __Pyx_uchar_cast(c) ((unsigned char)c)
#define __Pyx_long_cast(x) ((long)x)
#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
(sizeof(type) < sizeof(Py_ssize_t)) ||\
(sizeof(type) > sizeof(Py_ssize_t) &&\
likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX) &&\
(!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
v == (type)PY_SSIZE_T_MIN))) ||\
(sizeof(type) == sizeof(Py_ssize_t) &&\
(is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX))) )
static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
return (size_t) i < (size_t) limit;
}
#if defined (__cplusplus) && __cplusplus >= 201103L
#include <cstdlib>
#define __Pyx_sst_abs(value) std::abs(value)
#elif SIZEOF_INT >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) abs(value)
#elif SIZEOF_LONG >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) labs(value)
#elif defined (_MSC_VER)
#define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define __Pyx_sst_abs(value) llabs(value)
#elif defined (__GNUC__)
#define __Pyx_sst_abs(value) __builtin_llabs(value)
#else
#define __Pyx_sst_abs(value) ((value<0) ? -value : value)
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
#define __Pyx_PyBytes_FromString PyBytes_FromString
#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
#if PY_MAJOR_VERSION < 3
#define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#else
#define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
#endif
#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
const Py_UNICODE *u_end = u;
while (*u_end++) ;
return (size_t)(u_end - u - 1);
}
#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
#define __Pyx_PySequence_Tuple(obj)\
(likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
#if CYTHON_ASSUME_SAFE_MACROS
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
#else
#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
#endif
#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
#else
#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
#endif
#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
static int __Pyx_sys_getdefaultencoding_not_ascii;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
PyObject* ascii_chars_u = NULL;
PyObject* ascii_chars_b = NULL;
const char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
if (strcmp(default_encoding_c, "ascii") == 0) {
__Pyx_sys_getdefaultencoding_not_ascii = 0;
} else {
char ascii_chars[128];
int c;
for (c = 0; c < 128; c++) {
ascii_chars[c] = c;
}
__Pyx_sys_getdefaultencoding_not_ascii = 1;
ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
if (!ascii_chars_u) goto bad;
ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
PyErr_Format(
PyExc_ValueError,
"This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
default_encoding_c);
goto bad;
}
Py_DECREF(ascii_chars_u);
Py_DECREF(ascii_chars_b);
}
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
Py_XDECREF(ascii_chars_u);
Py_XDECREF(ascii_chars_b);
return -1;
}
#endif
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
#else
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
static char* __PYX_DEFAULT_STRING_ENCODING;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
__PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
return -1;
}
#endif
#endif
/* Test for GCC > 2.95 */
#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* !__GNUC__ or GCC < 2.95 */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
static PyObject *__pyx_m = NULL;
static PyObject *__pyx_d;
static PyObject *__pyx_b;
static PyObject *__pyx_cython_runtime = NULL;
static PyObject *__pyx_empty_tuple;
static PyObject *__pyx_empty_bytes;
static PyObject *__pyx_empty_unicode;
static int __pyx_lineno;
static int __pyx_clineno = 0;
static const char * __pyx_cfilenm= __FILE__;
static const char *__pyx_filename;
static const char *__pyx_f[] = {
"httptools\\parser\\parser.pyx",
"stringsource",
"type.pxd",
"bool.pxd",
"complex.pxd",
};
/*--- Type declarations ---*/
struct __pyx_obj_9httptools_6parser_6parser_HttpParser;
struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser;
struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser;
/* "httptools/parser/parser.pyx":26
*
* @cython.internal
* cdef class HttpParser: # <<<<<<<<<<<<<<
*
* cdef:
*/
struct __pyx_obj_9httptools_6parser_6parser_HttpParser {
PyObject_HEAD
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *__pyx_vtab;
llhttp_t *_cparser;
llhttp_settings_t *_csettings;
PyObject *_current_header_name;
PyObject *_current_header_value;
PyObject *_proto_on_url;
PyObject *_proto_on_status;
PyObject *_proto_on_body;
PyObject *_proto_on_header;
PyObject *_proto_on_headers_complete;
PyObject *_proto_on_message_complete;
PyObject *_proto_on_chunk_header;
PyObject *_proto_on_chunk_complete;
PyObject *_proto_on_message_begin;
PyObject *_last_error;
Py_buffer py_buf;
};
/* "httptools/parser/parser.pyx":215
*
*
* cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<<
*
* def __init__(self, protocol):
*/
struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser __pyx_base;
};
/* "httptools/parser/parser.pyx":229
*
*
* cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<<
*
* def __init__(self, protocol):
*/
struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser __pyx_base;
};
/* "httptools/parser/parser.pyx":26
*
* @cython.internal
* cdef class HttpParser: # <<<<<<<<<<<<<<
*
* cdef:
*/
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser {
PyObject *(*_init)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *, llhttp_type_t);
PyObject *(*_maybe_call_on_header)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *);
PyObject *(*_on_header_field)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *);
PyObject *(*_on_header_value)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *);
PyObject *(*_on_headers_complete)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *);
PyObject *(*_on_chunk_header)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *);
PyObject *(*_on_chunk_complete)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *);
};
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser;
/* "httptools/parser/parser.pyx":215
*
*
* cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<<
*
* def __init__(self, protocol):
*/
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser {
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_base;
};
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser;
/* "httptools/parser/parser.pyx":229
*
*
* cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<<
*
* def __init__(self, protocol):
*/
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser {
struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_base;
};
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser;
/* --- Runtime support code (head) --- */
/* Refnanny.proto */
#ifndef CYTHON_REFNANNY
#define CYTHON_REFNANNY 0
#endif
#if CYTHON_REFNANNY
typedef struct {
void (*INCREF)(void*, PyObject*, int);
void (*DECREF)(void*, PyObject*, int);
void (*GOTREF)(void*, PyObject*, int);
void (*GIVEREF)(void*, PyObject*, int);
void* (*SetupContext)(const char*, int, const char*);
void (*FinishContext)(void**);
} __Pyx_RefNannyAPIStruct;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
#define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
#ifdef WITH_THREAD
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
if (acquire_gil) {\
PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
PyGILState_Release(__pyx_gilstate_save);\
} else {\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
}
#else
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
#endif
#define __Pyx_RefNannyFinishContext()\
__Pyx_RefNanny->FinishContext(&__pyx_refnanny)
#define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
#define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
#else
#define __Pyx_RefNannyDeclarations
#define __Pyx_RefNannySetupContext(name, acquire_gil)
#define __Pyx_RefNannyFinishContext()
#define __Pyx_INCREF(r) Py_INCREF(r)
#define __Pyx_DECREF(r) Py_DECREF(r)
#define __Pyx_GOTREF(r)
#define __Pyx_GIVEREF(r)
#define __Pyx_XINCREF(r) Py_XINCREF(r)
#define __Pyx_XDECREF(r) Py_XDECREF(r)
#define __Pyx_XGOTREF(r)
#define __Pyx_XGIVEREF(r)
#endif
#define __Pyx_XDECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_XDECREF(tmp);\
} while (0)
#define __Pyx_DECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_DECREF(tmp);\
} while (0)
#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
/* PyObjectGetAttrStr.proto */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
#endif
/* GetBuiltinName.proto */
static PyObject *__Pyx_GetBuiltinName(PyObject *name);
/* RaiseArgTupleInvalid.proto */
static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
/* KeywordStringCheck.proto */
static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed);
/* PyErrExceptionMatches.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);
#else
#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err)
#endif
/* PyThreadStateGet.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
#else
#define __Pyx_PyThreadState_declare
#define __Pyx_PyThreadState_assign
#define __Pyx_PyErr_Occurred() PyErr_Occurred()
#endif
/* PyErrFetchRestore.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
#else
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#endif
#else
#define __Pyx_PyErr_Clear() PyErr_Clear()
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
#endif
/* GetAttr.proto */
static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *);
/* GetAttr3.proto */
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
/* PyFunctionFastCall.proto */
#if CYTHON_FAST_PYCALL
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
#else
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
#endif
#define __Pyx_BUILD_ASSERT_EXPR(cond)\
(sizeof(char [1 - 2*!(cond)]) - 1)
#ifndef Py_MEMBER_SIZE
#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
#endif
#if CYTHON_FAST_PYCALL
static size_t __pyx_pyframe_localsplus_offset = 0;
#include "frameobject.h"
#define __Pxy_PyFrame_Initialize_Offsets()\
((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
(void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
#define __Pyx_PyFrame_GetLocalsplus(frame)\
(assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
#endif // CYTHON_FAST_PYCALL
#endif
/* PyCFunctionFastCall.proto */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);
#else
#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL)
#endif
/* PyObjectCall.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);
#else
#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
#endif
/* PyObjectCallMethO.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);
#endif
/* PyObjectCallNoArg.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func);
#else
#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL)
#endif
/* PyObjectCallOneArg.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);
/* PyDictVersioning.proto */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
(version_var) = __PYX_GET_DICT_VERSION(dict);\
(cache_var) = (value);
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
(VAR) = __pyx_dict_cached_value;\
} else {\
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
}\
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
#else
#define __PYX_GET_DICT_VERSION(dict) (0)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
#endif
/* GetModuleGlobalName.proto */
#if CYTHON_USE_DICT_VERSIONS
#define __Pyx_GetModuleGlobalName(var, name) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
(var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\
(likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\
__Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
#define __Pyx_GetModuleGlobalNameUncached(var, name) {\
PY_UINT64_T __pyx_dict_version;\
PyObject *__pyx_dict_cached_value;\
(var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);
#else
#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name)
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
#endif
/* PyObjectCall2Args.proto */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);
/* RaiseException.proto */
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);
/* GetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb)
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* SwapException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* GetTopmostException.proto */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
#endif
/* SaveResetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
#else
#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb)
#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb)
#endif
/* PyObjectSetAttrStr.proto */
#if CYTHON_USE_TYPE_SLOTS
#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL)
static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);
#else
#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
#endif
/* RaiseDoubleKeywords.proto */
static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
/* ParseKeywords.proto */
static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
const char* function_name);
/* decode_c_string_utf16.proto */
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = 0;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = -1;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = 1;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
/* decode_c_bytes.proto */
static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes(
const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop,
const char* encoding, const char* errors,
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors));
/* decode_bytes.proto */
static CYTHON_INLINE PyObject* __Pyx_decode_bytes(
PyObject* string, Py_ssize_t start, Py_ssize_t stop,
const char* encoding, const char* errors,
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) {
return __Pyx_decode_c_bytes(
PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string),
start, stop, encoding, errors, decode_func);
}
/* PyObject_GenericGetAttrNoDict.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
#endif
/* PyObject_GenericGetAttr.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
#endif
/* SetVTable.proto */
static int __Pyx_SetVtable(PyObject *dict, void *vtable);
/* PyObjectGetAttrStrNoError.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
/* TypeImport.proto */
#ifndef __PYX_HAVE_RT_ImportType_proto
#define __PYX_HAVE_RT_ImportType_proto
enum __Pyx_ImportType_CheckSize {
__Pyx_ImportType_CheckSize_Error = 0,
__Pyx_ImportType_CheckSize_Warn = 1,
__Pyx_ImportType_CheckSize_Ignore = 2
};
static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
#endif
/* Import.proto */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
/* ImportFrom.proto */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name);
/* CLineInTraceback.proto */
#ifdef CYTHON_CLINE_IN_TRACEBACK
#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
#else
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
#endif
/* CodeObjectCache.proto */
typedef struct {
PyCodeObject* code_object;
int code_line;
} __Pyx_CodeObjectCacheEntry;
struct __Pyx_CodeObjectCache {
int count;
int max_count;
__Pyx_CodeObjectCacheEntry* entries;
};
static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
static PyCodeObject *__pyx_find_code_object(int code_line);
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
/* AddTraceback.proto */
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
/* GCCDiagnostics.proto */
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint8_t(uint8_t value);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_ptrdiff_t(ptrdiff_t value);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* CIntFromPy.proto */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
/* CIntFromPy.proto */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
/* FastTypeChecks.proto */
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
#else
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
#endif
#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
/* CheckBinaryVersion.proto */
static int __Pyx_check_binary_version(void);
/* InitStrings.proto */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__init(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_protocol, llhttp_type_t __pyx_v_mode); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/
/* Module declarations from 'cpython.mem' */
/* Module declarations from 'cpython.version' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.type' */
static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0;
/* Module declarations from 'libc.string' */
/* Module declarations from 'libc.stdio' */
/* Module declarations from 'cpython.object' */
/* Module declarations from 'cpython.ref' */
/* Module declarations from 'cpython.exc' */
/* Module declarations from 'cpython.module' */
/* Module declarations from 'cpython.tuple' */
/* Module declarations from 'cpython.list' */
/* Module declarations from 'cpython.sequence' */
/* Module declarations from 'cpython.mapping' */
/* Module declarations from 'cpython.iterator' */
/* Module declarations from 'cpython.number' */
/* Module declarations from 'cpython.int' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.bool' */
static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0;
/* Module declarations from 'cpython.long' */
/* Module declarations from 'cpython.float' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.complex' */
static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0;
/* Module declarations from 'cpython.string' */
/* Module declarations from 'cpython.unicode' */
/* Module declarations from 'cpython.dict' */
/* Module declarations from 'cpython.instance' */
/* Module declarations from 'cpython.function' */
/* Module declarations from 'cpython.method' */
/* Module declarations from 'cpython.weakref' */
/* Module declarations from 'cpython.getargs' */
/* Module declarations from 'cpython.pythread' */
/* Module declarations from 'cpython.pystate' */
/* Module declarations from 'cpython.cobject' */
/* Module declarations from 'cpython.oldbuffer' */
/* Module declarations from 'cpython.set' */
/* Module declarations from 'cpython.buffer' */
/* Module declarations from 'cpython.bytes' */
/* Module declarations from 'cpython.pycapsule' */
/* Module declarations from 'cpython' */
/* Module declarations from 'httptools.parser.python' */
/* Module declarations from 'cython' */
/* Module declarations from 'httptools.parser' */
/* Module declarations from 'libc.stdint' */
/* Module declarations from 'httptools.parser.cparser' */
/* Module declarations from 'httptools.parser.parser' */
static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpParser = 0;
static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpRequestParser = 0;
static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpResponseParser = 0;
static int __pyx_f_9httptools_6parser_6parser_cb_on_message_begin(llhttp_t *); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_url(llhttp_t *, char const *, size_t); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_status(llhttp_t *, char const *, size_t); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_header_field(llhttp_t *, char const *, size_t); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_header_value(llhttp_t *, char const *, size_t); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete(llhttp_t *); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_body(llhttp_t *, char const *, size_t); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_message_complete(llhttp_t *); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header(llhttp_t *); /*proto*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete(llhttp_t *); /*proto*/
static PyObject *__pyx_f_9httptools_6parser_6parser_parser_error_from_errno(llhttp_t *, llhttp_errno_t); /*proto*/
#define __Pyx_MODULE_NAME "httptools.parser.parser"
extern int __pyx_module_is_main_httptools__parser__parser;
int __pyx_module_is_main_httptools__parser__parser = 0;
/* Implementation of 'httptools.parser.parser' */
static PyObject *__pyx_builtin_MemoryError;
static PyObject *__pyx_builtin_TypeError;
static PyObject *__pyx_builtin_BaseException;
static const char __pyx_k_[] = "{}.{}";
static const char __pyx_k_all[] = "__all__";
static const char __pyx_k_main[] = "__main__";
static const char __pyx_k_name[] = "__name__";
static const char __pyx_k_test[] = "__test__";
static const char __pyx_k_errors[] = "errors";
static const char __pyx_k_format[] = "format";
static const char __pyx_k_import[] = "__import__";
static const char __pyx_k_on_url[] = "on_url";
static const char __pyx_k_reduce[] = "__reduce__";
static const char __pyx_k_context[] = "__context__";
static const char __pyx_k_on_body[] = "on_body";
static const char __pyx_k_getstate[] = "__getstate__";
static const char __pyx_k_protocol[] = "protocol";
static const char __pyx_k_setstate[] = "__setstate__";
static const char __pyx_k_TypeError[] = "TypeError";
static const char __pyx_k_on_header[] = "on_header";
static const char __pyx_k_on_status[] = "on_status";
static const char __pyx_k_reduce_ex[] = "__reduce_ex__";
static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__";
static const char __pyx_k_MemoryError[] = "MemoryError";
static const char __pyx_k_BaseException[] = "BaseException";
static const char __pyx_k_reduce_cython[] = "__reduce_cython__";
static const char __pyx_k_HttpParserError[] = "HttpParserError";
static const char __pyx_k_on_chunk_header[] = "on_chunk_header";
static const char __pyx_k_setstate_cython[] = "__setstate_cython__";
static const char __pyx_k_on_message_begin[] = "on_message_begin";
static const char __pyx_k_HttpParserUpgrade[] = "HttpParserUpgrade";
static const char __pyx_k_HttpRequestParser[] = "HttpRequestParser";
static const char __pyx_k_on_chunk_complete[] = "on_chunk_complete";
static const char __pyx_k_HttpResponseParser[] = "HttpResponseParser";
static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
static const char __pyx_k_on_headers_complete[] = "on_headers_complete";
static const char __pyx_k_on_message_complete[] = "on_message_complete";
static const char __pyx_k_invalid_headers_state[] = "invalid headers state";
static const char __pyx_k_HttpParserCallbackError[] = "HttpParserCallbackError";
static const char __pyx_k_HttpParserInvalidURLError[] = "HttpParserInvalidURLError";
static const char __pyx_k_HttpParserInvalidMethodError[] = "HttpParserInvalidMethodError";
static const char __pyx_k_HttpParserInvalidStatusError[] = "HttpParserInvalidStatusError";
static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__";
static PyObject *__pyx_kp_u_;
static PyObject *__pyx_n_s_BaseException;
static PyObject *__pyx_n_s_HttpParserCallbackError;
static PyObject *__pyx_n_s_HttpParserError;
static PyObject *__pyx_n_s_HttpParserInvalidMethodError;
static PyObject *__pyx_n_s_HttpParserInvalidStatusError;
static PyObject *__pyx_n_s_HttpParserInvalidURLError;
static PyObject *__pyx_n_s_HttpParserUpgrade;
static PyObject *__pyx_n_s_HttpRequestParser;
static PyObject *__pyx_n_u_HttpRequestParser;
static PyObject *__pyx_n_s_HttpResponseParser;
static PyObject *__pyx_n_u_HttpResponseParser;
static PyObject *__pyx_n_s_MemoryError;
static PyObject *__pyx_n_s_TypeError;
static PyObject *__pyx_n_s_all;
static PyObject *__pyx_n_s_cline_in_traceback;
static PyObject *__pyx_n_s_context;
static PyObject *__pyx_n_s_errors;
static PyObject *__pyx_n_s_format;
static PyObject *__pyx_n_s_getstate;
static PyObject *__pyx_n_s_import;
static PyObject *__pyx_kp_u_invalid_headers_state;
static PyObject *__pyx_n_s_main;
static PyObject *__pyx_n_s_name;
static PyObject *__pyx_kp_s_no_default___reduce___due_to_non;
static PyObject *__pyx_n_u_on_body;
static PyObject *__pyx_n_u_on_chunk_complete;
static PyObject *__pyx_n_u_on_chunk_header;
static PyObject *__pyx_n_u_on_header;
static PyObject *__pyx_n_u_on_headers_complete;
static PyObject *__pyx_n_u_on_message_begin;
static PyObject *__pyx_n_u_on_message_complete;
static PyObject *__pyx_n_u_on_status;
static PyObject *__pyx_n_u_on_url;
static PyObject *__pyx_n_s_protocol;
static PyObject *__pyx_n_s_pyx_vtable;
static PyObject *__pyx_n_s_reduce;
static PyObject *__pyx_n_s_reduce_cython;
static PyObject *__pyx_n_s_reduce_ex;
static PyObject *__pyx_n_s_setstate;
static PyObject *__pyx_n_s_setstate_cython;
static PyObject *__pyx_n_s_test;
static int __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static void __pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static int __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static int __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tuple__2;
static PyObject *__pyx_tuple__3;
static PyObject *__pyx_tuple__4;
static PyObject *__pyx_tuple__5;
static PyObject *__pyx_tuple__6;
static PyObject *__pyx_tuple__7;
static PyObject *__pyx_tuple__8;
/* Late includes */
/* "httptools/parser/parser.pyx":44
* Py_buffer py_buf
*
* def __cinit__(self): # <<<<<<<<<<<<<<
* self._cparser = <cparser.llhttp_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_t))
*/
/* Python wrapper */
static int __pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0);
if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;}
if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1;
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__cinit__", 0);
/* "httptools/parser/parser.pyx":45
*
* def __cinit__(self):
* self._cparser = <cparser.llhttp_t*> \ # <<<<<<<<<<<<<<
* PyMem_Malloc(sizeof(cparser.llhttp_t))
* if self._cparser is NULL:
*/
__pyx_v_self->_cparser = ((llhttp_t *)PyMem_Malloc((sizeof(llhttp_t))));
/* "httptools/parser/parser.pyx":47
* self._cparser = <cparser.llhttp_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_t))
* if self._cparser is NULL: # <<<<<<<<<<<<<<
* raise MemoryError()
*
*/
__pyx_t_1 = ((__pyx_v_self->_cparser == NULL) != 0);
if (unlikely(__pyx_t_1)) {
/* "httptools/parser/parser.pyx":48
* PyMem_Malloc(sizeof(cparser.llhttp_t))
* if self._cparser is NULL:
* raise MemoryError() # <<<<<<<<<<<<<<
*
* self._csettings = <cparser.llhttp_settings_t*> \
*/
PyErr_NoMemory(); __PYX_ERR(0, 48, __pyx_L1_error)
/* "httptools/parser/parser.pyx":47
* self._cparser = <cparser.llhttp_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_t))
* if self._cparser is NULL: # <<<<<<<<<<<<<<
* raise MemoryError()
*
*/
}
/* "httptools/parser/parser.pyx":50
* raise MemoryError()
*
* self._csettings = <cparser.llhttp_settings_t*> \ # <<<<<<<<<<<<<<
* PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
* if self._csettings is NULL:
*/
__pyx_v_self->_csettings = ((llhttp_settings_t *)PyMem_Malloc((sizeof(llhttp_settings_t))));
/* "httptools/parser/parser.pyx":52
* self._csettings = <cparser.llhttp_settings_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
* if self._csettings is NULL: # <<<<<<<<<<<<<<
* raise MemoryError()
*
*/
__pyx_t_1 = ((__pyx_v_self->_csettings == NULL) != 0);
if (unlikely(__pyx_t_1)) {
/* "httptools/parser/parser.pyx":53
* PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
* if self._csettings is NULL:
* raise MemoryError() # <<<<<<<<<<<<<<
*
* def __dealloc__(self):
*/
PyErr_NoMemory(); __PYX_ERR(0, 53, __pyx_L1_error)
/* "httptools/parser/parser.pyx":52
* self._csettings = <cparser.llhttp_settings_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
* if self._csettings is NULL: # <<<<<<<<<<<<<<
* raise MemoryError()
*
*/
}
/* "httptools/parser/parser.pyx":44
* Py_buffer py_buf
*
* def __cinit__(self): # <<<<<<<<<<<<<<
* self._cparser = <cparser.llhttp_t*> \
* PyMem_Malloc(sizeof(cparser.llhttp_t))
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":55
* raise MemoryError()
*
* def __dealloc__(self): # <<<<<<<<<<<<<<
* PyMem_Free(self._cparser)
* PyMem_Free(self._csettings)
*/
/* Python wrapper */
static void __pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/
static void __pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
__pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
}
static void __pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__", 0);
/* "httptools/parser/parser.pyx":56
*
* def __dealloc__(self):
* PyMem_Free(self._cparser) # <<<<<<<<<<<<<<
* PyMem_Free(self._csettings)
*
*/
PyMem_Free(__pyx_v_self->_cparser);
/* "httptools/parser/parser.pyx":57
* def __dealloc__(self):
* PyMem_Free(self._cparser)
* PyMem_Free(self._csettings) # <<<<<<<<<<<<<<
*
* cdef _init(self, protocol, cparser.llhttp_type_t mode):
*/
PyMem_Free(__pyx_v_self->_csettings);
/* "httptools/parser/parser.pyx":55
* raise MemoryError()
*
* def __dealloc__(self): # <<<<<<<<<<<<<<
* PyMem_Free(self._cparser)
* PyMem_Free(self._csettings)
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "httptools/parser/parser.pyx":59
* PyMem_Free(self._csettings)
*
* cdef _init(self, protocol, cparser.llhttp_type_t mode): # <<<<<<<<<<<<<<
* cparser.llhttp_settings_init(self._csettings)
*
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__init(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_protocol, llhttp_type_t __pyx_v_mode) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_init", 0);
/* "httptools/parser/parser.pyx":60
*
* cdef _init(self, protocol, cparser.llhttp_type_t mode):
* cparser.llhttp_settings_init(self._csettings) # <<<<<<<<<<<<<<
*
* cparser.llhttp_init(self._cparser, mode, self._csettings)
*/
llhttp_settings_init(__pyx_v_self->_csettings);
/* "httptools/parser/parser.pyx":62
* cparser.llhttp_settings_init(self._csettings)
*
* cparser.llhttp_init(self._cparser, mode, self._csettings) # <<<<<<<<<<<<<<
* self._cparser.data = <void*>self
*
*/
llhttp_init(__pyx_v_self->_cparser, __pyx_v_mode, __pyx_v_self->_csettings);
/* "httptools/parser/parser.pyx":63
*
* cparser.llhttp_init(self._cparser, mode, self._csettings)
* self._cparser.data = <void*>self # <<<<<<<<<<<<<<
*
* self._current_header_name = None
*/
__pyx_v_self->_cparser->data = ((void *)__pyx_v_self);
/* "httptools/parser/parser.pyx":65
* self._cparser.data = <void*>self
*
* self._current_header_name = None # <<<<<<<<<<<<<<
* self._current_header_value = None
*
*/
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_current_header_name);
__Pyx_DECREF(__pyx_v_self->_current_header_name);
__pyx_v_self->_current_header_name = ((PyObject*)Py_None);
/* "httptools/parser/parser.pyx":66
*
* self._current_header_name = None
* self._current_header_value = None # <<<<<<<<<<<<<<
*
* self._proto_on_header = getattr(protocol, 'on_header', None)
*/
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_current_header_value);
__Pyx_DECREF(__pyx_v_self->_current_header_value);
__pyx_v_self->_current_header_value = ((PyObject*)Py_None);
/* "httptools/parser/parser.pyx":68
* self._current_header_value = None
*
* self._proto_on_header = getattr(protocol, 'on_header', None) # <<<<<<<<<<<<<<
* if self._proto_on_header is not None:
* self._csettings.on_header_field = cb_on_header_field
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_header, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_header);
__Pyx_DECREF(__pyx_v_self->_proto_on_header);
__pyx_v_self->_proto_on_header = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":69
*
* self._proto_on_header = getattr(protocol, 'on_header', None)
* if self._proto_on_header is not None: # <<<<<<<<<<<<<<
* self._csettings.on_header_field = cb_on_header_field
* self._csettings.on_header_value = cb_on_header_value
*/
__pyx_t_2 = (__pyx_v_self->_proto_on_header != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":70
* self._proto_on_header = getattr(protocol, 'on_header', None)
* if self._proto_on_header is not None:
* self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<<
* self._csettings.on_header_value = cb_on_header_value
* self._proto_on_headers_complete = getattr(
*/
__pyx_v_self->_csettings->on_header_field = __pyx_f_9httptools_6parser_6parser_cb_on_header_field;
/* "httptools/parser/parser.pyx":71
* if self._proto_on_header is not None:
* self._csettings.on_header_field = cb_on_header_field
* self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<<
* self._proto_on_headers_complete = getattr(
* protocol, 'on_headers_complete', None)
*/
__pyx_v_self->_csettings->on_header_value = __pyx_f_9httptools_6parser_6parser_cb_on_header_value;
/* "httptools/parser/parser.pyx":69
*
* self._proto_on_header = getattr(protocol, 'on_header', None)
* if self._proto_on_header is not None: # <<<<<<<<<<<<<<
* self._csettings.on_header_field = cb_on_header_field
* self._csettings.on_header_value = cb_on_header_value
*/
}
/* "httptools/parser/parser.pyx":72
* self._csettings.on_header_field = cb_on_header_field
* self._csettings.on_header_value = cb_on_header_value
* self._proto_on_headers_complete = getattr( # <<<<<<<<<<<<<<
* protocol, 'on_headers_complete', None)
* self._csettings.on_headers_complete = cb_on_headers_complete
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_headers_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 72, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_headers_complete);
__Pyx_DECREF(__pyx_v_self->_proto_on_headers_complete);
__pyx_v_self->_proto_on_headers_complete = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":74
* self._proto_on_headers_complete = getattr(
* protocol, 'on_headers_complete', None)
* self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<<
*
* self._proto_on_body = getattr(protocol, 'on_body', None)
*/
__pyx_v_self->_csettings->on_headers_complete = __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete;
/* "httptools/parser/parser.pyx":76
* self._csettings.on_headers_complete = cb_on_headers_complete
*
* self._proto_on_body = getattr(protocol, 'on_body', None) # <<<<<<<<<<<<<<
* if self._proto_on_body is not None:
* self._csettings.on_body = cb_on_body
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_body, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 76, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_body);
__Pyx_DECREF(__pyx_v_self->_proto_on_body);
__pyx_v_self->_proto_on_body = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":77
*
* self._proto_on_body = getattr(protocol, 'on_body', None)
* if self._proto_on_body is not None: # <<<<<<<<<<<<<<
* self._csettings.on_body = cb_on_body
*
*/
__pyx_t_3 = (__pyx_v_self->_proto_on_body != Py_None);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* "httptools/parser/parser.pyx":78
* self._proto_on_body = getattr(protocol, 'on_body', None)
* if self._proto_on_body is not None:
* self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<<
*
* self._proto_on_message_begin = getattr(
*/
__pyx_v_self->_csettings->on_body = __pyx_f_9httptools_6parser_6parser_cb_on_body;
/* "httptools/parser/parser.pyx":77
*
* self._proto_on_body = getattr(protocol, 'on_body', None)
* if self._proto_on_body is not None: # <<<<<<<<<<<<<<
* self._csettings.on_body = cb_on_body
*
*/
}
/* "httptools/parser/parser.pyx":80
* self._csettings.on_body = cb_on_body
*
* self._proto_on_message_begin = getattr( # <<<<<<<<<<<<<<
* protocol, 'on_message_begin', None)
* if self._proto_on_message_begin is not None:
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_message_begin, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 80, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_message_begin);
__Pyx_DECREF(__pyx_v_self->_proto_on_message_begin);
__pyx_v_self->_proto_on_message_begin = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":82
* self._proto_on_message_begin = getattr(
* protocol, 'on_message_begin', None)
* if self._proto_on_message_begin is not None: # <<<<<<<<<<<<<<
* self._csettings.on_message_begin = cb_on_message_begin
*
*/
__pyx_t_2 = (__pyx_v_self->_proto_on_message_begin != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":83
* protocol, 'on_message_begin', None)
* if self._proto_on_message_begin is not None:
* self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<<
*
* self._proto_on_message_complete = getattr(
*/
__pyx_v_self->_csettings->on_message_begin = __pyx_f_9httptools_6parser_6parser_cb_on_message_begin;
/* "httptools/parser/parser.pyx":82
* self._proto_on_message_begin = getattr(
* protocol, 'on_message_begin', None)
* if self._proto_on_message_begin is not None: # <<<<<<<<<<<<<<
* self._csettings.on_message_begin = cb_on_message_begin
*
*/
}
/* "httptools/parser/parser.pyx":85
* self._csettings.on_message_begin = cb_on_message_begin
*
* self._proto_on_message_complete = getattr( # <<<<<<<<<<<<<<
* protocol, 'on_message_complete', None)
* if self._proto_on_message_complete is not None:
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_message_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_message_complete);
__Pyx_DECREF(__pyx_v_self->_proto_on_message_complete);
__pyx_v_self->_proto_on_message_complete = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":87
* self._proto_on_message_complete = getattr(
* protocol, 'on_message_complete', None)
* if self._proto_on_message_complete is not None: # <<<<<<<<<<<<<<
* self._csettings.on_message_complete = cb_on_message_complete
*
*/
__pyx_t_3 = (__pyx_v_self->_proto_on_message_complete != Py_None);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* "httptools/parser/parser.pyx":88
* protocol, 'on_message_complete', None)
* if self._proto_on_message_complete is not None:
* self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<<
*
* self._proto_on_chunk_header = getattr(
*/
__pyx_v_self->_csettings->on_message_complete = __pyx_f_9httptools_6parser_6parser_cb_on_message_complete;
/* "httptools/parser/parser.pyx":87
* self._proto_on_message_complete = getattr(
* protocol, 'on_message_complete', None)
* if self._proto_on_message_complete is not None: # <<<<<<<<<<<<<<
* self._csettings.on_message_complete = cb_on_message_complete
*
*/
}
/* "httptools/parser/parser.pyx":90
* self._csettings.on_message_complete = cb_on_message_complete
*
* self._proto_on_chunk_header = getattr( # <<<<<<<<<<<<<<
* protocol, 'on_chunk_header', None)
* self._csettings.on_chunk_header = cb_on_chunk_header
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_chunk_header, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_chunk_header);
__Pyx_DECREF(__pyx_v_self->_proto_on_chunk_header);
__pyx_v_self->_proto_on_chunk_header = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":92
* self._proto_on_chunk_header = getattr(
* protocol, 'on_chunk_header', None)
* self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<<
*
* self._proto_on_chunk_complete = getattr(
*/
__pyx_v_self->_csettings->on_chunk_header = __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header;
/* "httptools/parser/parser.pyx":94
* self._csettings.on_chunk_header = cb_on_chunk_header
*
* self._proto_on_chunk_complete = getattr( # <<<<<<<<<<<<<<
* protocol, 'on_chunk_complete', None)
* self._csettings.on_chunk_complete = cb_on_chunk_complete
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_chunk_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_proto_on_chunk_complete);
__Pyx_DECREF(__pyx_v_self->_proto_on_chunk_complete);
__pyx_v_self->_proto_on_chunk_complete = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":96
* self._proto_on_chunk_complete = getattr(
* protocol, 'on_chunk_complete', None)
* self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<<
*
* self._last_error = None
*/
__pyx_v_self->_csettings->on_chunk_complete = __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete;
/* "httptools/parser/parser.pyx":98
* self._csettings.on_chunk_complete = cb_on_chunk_complete
*
* self._last_error = None # <<<<<<<<<<<<<<
*
* cdef _maybe_call_on_header(self):
*/
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_last_error);
__Pyx_DECREF(__pyx_v_self->_last_error);
__pyx_v_self->_last_error = Py_None;
/* "httptools/parser/parser.pyx":59
* PyMem_Free(self._csettings)
*
* cdef _init(self, protocol, cparser.llhttp_type_t mode): # <<<<<<<<<<<<<<
* cparser.llhttp_settings_init(self._csettings)
*
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._init", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":100
* self._last_error = None
*
* cdef _maybe_call_on_header(self): # <<<<<<<<<<<<<<
* if self._current_header_value is not None:
* current_header_name = self._current_header_name
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_v_current_header_name = NULL;
PyObject *__pyx_v_current_header_value = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
PyObject *__pyx_t_7 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_maybe_call_on_header", 0);
/* "httptools/parser/parser.pyx":101
*
* cdef _maybe_call_on_header(self):
* if self._current_header_value is not None: # <<<<<<<<<<<<<<
* current_header_name = self._current_header_name
* current_header_value = self._current_header_value
*/
__pyx_t_1 = (__pyx_v_self->_current_header_value != ((PyObject*)Py_None));
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "httptools/parser/parser.pyx":102
* cdef _maybe_call_on_header(self):
* if self._current_header_value is not None:
* current_header_name = self._current_header_name # <<<<<<<<<<<<<<
* current_header_value = self._current_header_value
*
*/
__pyx_t_3 = __pyx_v_self->_current_header_name;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_current_header_name = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
/* "httptools/parser/parser.pyx":103
* if self._current_header_value is not None:
* current_header_name = self._current_header_name
* current_header_value = self._current_header_value # <<<<<<<<<<<<<<
*
* self._current_header_name = self._current_header_value = None
*/
__pyx_t_3 = __pyx_v_self->_current_header_value;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_current_header_value = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
/* "httptools/parser/parser.pyx":105
* current_header_value = self._current_header_value
*
* self._current_header_name = self._current_header_value = None # <<<<<<<<<<<<<<
*
* if self._proto_on_header is not None:
*/
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_current_header_name);
__Pyx_DECREF(__pyx_v_self->_current_header_name);
__pyx_v_self->_current_header_name = ((PyObject*)Py_None);
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_current_header_value);
__Pyx_DECREF(__pyx_v_self->_current_header_value);
__pyx_v_self->_current_header_value = ((PyObject*)Py_None);
/* "httptools/parser/parser.pyx":107
* self._current_header_name = self._current_header_value = None
*
* if self._proto_on_header is not None: # <<<<<<<<<<<<<<
* self._proto_on_header(current_header_name,
* current_header_value)
*/
__pyx_t_2 = (__pyx_v_self->_proto_on_header != Py_None);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":109
* if self._proto_on_header is not None:
* self._proto_on_header(current_header_name,
* current_header_value) # <<<<<<<<<<<<<<
*
* cdef _on_header_field(self, bytes field):
*/
__Pyx_INCREF(__pyx_v_self->_proto_on_header);
__pyx_t_4 = __pyx_v_self->_proto_on_header; __pyx_t_5 = NULL;
__pyx_t_6 = 0;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
__pyx_t_6 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_current_header_name, __pyx_v_current_header_value};
__pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_3);
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_current_header_name, __pyx_v_current_header_value};
__pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_3);
} else
#endif
{
__pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 108, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
if (__pyx_t_5) {
__Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL;
}
__Pyx_INCREF(__pyx_v_current_header_name);
__Pyx_GIVEREF(__pyx_v_current_header_name);
PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_current_header_name);
__Pyx_INCREF(__pyx_v_current_header_value);
__Pyx_GIVEREF(__pyx_v_current_header_value);
PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_current_header_value);
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
}
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "httptools/parser/parser.pyx":107
* self._current_header_name = self._current_header_value = None
*
* if self._proto_on_header is not None: # <<<<<<<<<<<<<<
* self._proto_on_header(current_header_name,
* current_header_value)
*/
}
/* "httptools/parser/parser.pyx":101
*
* cdef _maybe_call_on_header(self):
* if self._current_header_value is not None: # <<<<<<<<<<<<<<
* current_header_name = self._current_header_name
* current_header_value = self._current_header_value
*/
}
/* "httptools/parser/parser.pyx":100
* self._last_error = None
*
* cdef _maybe_call_on_header(self): # <<<<<<<<<<<<<<
* if self._current_header_value is not None:
* current_header_name = self._current_header_name
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._maybe_call_on_header", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_current_header_name);
__Pyx_XDECREF(__pyx_v_current_header_value);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":111
* current_header_value)
*
* cdef _on_header_field(self, bytes field): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
* if self._current_header_name is None:
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_on_header_field", 0);
/* "httptools/parser/parser.pyx":112
*
* cdef _on_header_field(self, bytes field):
* self._maybe_call_on_header() # <<<<<<<<<<<<<<
* if self._current_header_name is None:
* self._current_header_name = field
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 112, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":113
* cdef _on_header_field(self, bytes field):
* self._maybe_call_on_header()
* if self._current_header_name is None: # <<<<<<<<<<<<<<
* self._current_header_name = field
* else:
*/
__pyx_t_2 = (__pyx_v_self->_current_header_name == ((PyObject*)Py_None));
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":114
* self._maybe_call_on_header()
* if self._current_header_name is None:
* self._current_header_name = field # <<<<<<<<<<<<<<
* else:
* self._current_header_name += field
*/
__Pyx_INCREF(__pyx_v_field);
__Pyx_GIVEREF(__pyx_v_field);
__Pyx_GOTREF(__pyx_v_self->_current_header_name);
__Pyx_DECREF(__pyx_v_self->_current_header_name);
__pyx_v_self->_current_header_name = __pyx_v_field;
/* "httptools/parser/parser.pyx":113
* cdef _on_header_field(self, bytes field):
* self._maybe_call_on_header()
* if self._current_header_name is None: # <<<<<<<<<<<<<<
* self._current_header_name = field
* else:
*/
goto __pyx_L3;
}
/* "httptools/parser/parser.pyx":116
* self._current_header_name = field
* else:
* self._current_header_name += field # <<<<<<<<<<<<<<
*
* cdef _on_header_value(self, bytes val):
*/
/*else*/ {
__pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_self->_current_header_name, __pyx_v_field); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->_current_header_name);
__Pyx_DECREF(__pyx_v_self->_current_header_name);
__pyx_v_self->_current_header_name = ((PyObject*)__pyx_t_1);
__pyx_t_1 = 0;
}
__pyx_L3:;
/* "httptools/parser/parser.pyx":111
* current_header_value)
*
* cdef _on_header_field(self, bytes field): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
* if self._current_header_name is None:
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":118
* self._current_header_name += field
*
* cdef _on_header_value(self, bytes val): # <<<<<<<<<<<<<<
* if self._current_header_value is None:
* self._current_header_value = val
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_on_header_value", 0);
/* "httptools/parser/parser.pyx":119
*
* cdef _on_header_value(self, bytes val):
* if self._current_header_value is None: # <<<<<<<<<<<<<<
* self._current_header_value = val
* else:
*/
__pyx_t_1 = (__pyx_v_self->_current_header_value == ((PyObject*)Py_None));
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "httptools/parser/parser.pyx":120
* cdef _on_header_value(self, bytes val):
* if self._current_header_value is None:
* self._current_header_value = val # <<<<<<<<<<<<<<
* else:
* # This is unlikely, as mostly HTTP headers are one-line
*/
__Pyx_INCREF(__pyx_v_val);
__Pyx_GIVEREF(__pyx_v_val);
__Pyx_GOTREF(__pyx_v_self->_current_header_value);
__Pyx_DECREF(__pyx_v_self->_current_header_value);
__pyx_v_self->_current_header_value = __pyx_v_val;
/* "httptools/parser/parser.pyx":119
*
* cdef _on_header_value(self, bytes val):
* if self._current_header_value is None: # <<<<<<<<<<<<<<
* self._current_header_value = val
* else:
*/
goto __pyx_L3;
}
/* "httptools/parser/parser.pyx":123
* else:
* # This is unlikely, as mostly HTTP headers are one-line
* self._current_header_value += val # <<<<<<<<<<<<<<
*
* cdef _on_headers_complete(self):
*/
/*else*/ {
__pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_self->_current_header_value, __pyx_v_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 123, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_GIVEREF(__pyx_t_3);
__Pyx_GOTREF(__pyx_v_self->_current_header_value);
__Pyx_DECREF(__pyx_v_self->_current_header_value);
__pyx_v_self->_current_header_value = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
}
__pyx_L3:;
/* "httptools/parser/parser.pyx":118
* self._current_header_name += field
*
* cdef _on_header_value(self, bytes val): # <<<<<<<<<<<<<<
* if self._current_header_value is None:
* self._current_header_value = val
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":125
* self._current_header_value += val
*
* cdef _on_headers_complete(self): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
*
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_on_headers_complete", 0);
/* "httptools/parser/parser.pyx":126
*
* cdef _on_headers_complete(self):
* self._maybe_call_on_header() # <<<<<<<<<<<<<<
*
* if self._proto_on_headers_complete is not None:
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":128
* self._maybe_call_on_header()
*
* if self._proto_on_headers_complete is not None: # <<<<<<<<<<<<<<
* self._proto_on_headers_complete()
*
*/
__pyx_t_2 = (__pyx_v_self->_proto_on_headers_complete != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":129
*
* if self._proto_on_headers_complete is not None:
* self._proto_on_headers_complete() # <<<<<<<<<<<<<<
*
* cdef _on_chunk_header(self):
*/
__Pyx_INCREF(__pyx_v_self->_proto_on_headers_complete);
__pyx_t_4 = __pyx_v_self->_proto_on_headers_complete; __pyx_t_5 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
}
}
__pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":128
* self._maybe_call_on_header()
*
* if self._proto_on_headers_complete is not None: # <<<<<<<<<<<<<<
* self._proto_on_headers_complete()
*
*/
}
/* "httptools/parser/parser.pyx":125
* self._current_header_value += val
*
* cdef _on_headers_complete(self): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
*
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":131
* self._proto_on_headers_complete()
*
* cdef _on_chunk_header(self): # <<<<<<<<<<<<<<
* if (self._current_header_value is not None or
* self._current_header_name is not None):
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_on_chunk_header", 0);
/* "httptools/parser/parser.pyx":132
*
* cdef _on_chunk_header(self):
* if (self._current_header_value is not None or # <<<<<<<<<<<<<<
* self._current_header_name is not None):
* raise HttpParserError('invalid headers state')
*/
__pyx_t_2 = (__pyx_v_self->_current_header_value != ((PyObject*)Py_None));
__pyx_t_3 = (__pyx_t_2 != 0);
if (!__pyx_t_3) {
} else {
__pyx_t_1 = __pyx_t_3;
goto __pyx_L4_bool_binop_done;
}
/* "httptools/parser/parser.pyx":133
* cdef _on_chunk_header(self):
* if (self._current_header_value is not None or
* self._current_header_name is not None): # <<<<<<<<<<<<<<
* raise HttpParserError('invalid headers state')
*
*/
__pyx_t_3 = (__pyx_v_self->_current_header_name != ((PyObject*)Py_None));
__pyx_t_2 = (__pyx_t_3 != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L4_bool_binop_done:;
/* "httptools/parser/parser.pyx":132
*
* cdef _on_chunk_header(self):
* if (self._current_header_value is not None or # <<<<<<<<<<<<<<
* self._current_header_name is not None):
* raise HttpParserError('invalid headers state')
*/
if (unlikely(__pyx_t_1)) {
/* "httptools/parser/parser.pyx":134
* if (self._current_header_value is not None or
* self._current_header_name is not None):
* raise HttpParserError('invalid headers state') # <<<<<<<<<<<<<<
*
* if self._proto_on_chunk_header is not None:
*/
__Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 134, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_u_invalid_headers_state) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_u_invalid_headers_state);
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 134, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__PYX_ERR(0, 134, __pyx_L1_error)
/* "httptools/parser/parser.pyx":132
*
* cdef _on_chunk_header(self):
* if (self._current_header_value is not None or # <<<<<<<<<<<<<<
* self._current_header_name is not None):
* raise HttpParserError('invalid headers state')
*/
}
/* "httptools/parser/parser.pyx":136
* raise HttpParserError('invalid headers state')
*
* if self._proto_on_chunk_header is not None: # <<<<<<<<<<<<<<
* self._proto_on_chunk_header()
*
*/
__pyx_t_1 = (__pyx_v_self->_proto_on_chunk_header != Py_None);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "httptools/parser/parser.pyx":137
*
* if self._proto_on_chunk_header is not None:
* self._proto_on_chunk_header() # <<<<<<<<<<<<<<
*
* cdef _on_chunk_complete(self):
*/
__Pyx_INCREF(__pyx_v_self->_proto_on_chunk_header);
__pyx_t_5 = __pyx_v_self->_proto_on_chunk_header; __pyx_t_6 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 137, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
/* "httptools/parser/parser.pyx":136
* raise HttpParserError('invalid headers state')
*
* if self._proto_on_chunk_header is not None: # <<<<<<<<<<<<<<
* self._proto_on_chunk_header()
*
*/
}
/* "httptools/parser/parser.pyx":131
* self._proto_on_headers_complete()
*
* cdef _on_chunk_header(self): # <<<<<<<<<<<<<<
* if (self._current_header_value is not None or
* self._current_header_name is not None):
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":139
* self._proto_on_chunk_header()
*
* cdef _on_chunk_complete(self): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
*
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_on_chunk_complete", 0);
/* "httptools/parser/parser.pyx":140
*
* cdef _on_chunk_complete(self):
* self._maybe_call_on_header() # <<<<<<<<<<<<<<
*
* if self._proto_on_chunk_complete is not None:
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 140, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":142
* self._maybe_call_on_header()
*
* if self._proto_on_chunk_complete is not None: # <<<<<<<<<<<<<<
* self._proto_on_chunk_complete()
*
*/
__pyx_t_2 = (__pyx_v_self->_proto_on_chunk_complete != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":143
*
* if self._proto_on_chunk_complete is not None:
* self._proto_on_chunk_complete() # <<<<<<<<<<<<<<
*
* ### Public API ###
*/
__Pyx_INCREF(__pyx_v_self->_proto_on_chunk_complete);
__pyx_t_4 = __pyx_v_self->_proto_on_chunk_complete; __pyx_t_5 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
}
}
__pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 143, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":142
* self._maybe_call_on_header()
*
* if self._proto_on_chunk_complete is not None: # <<<<<<<<<<<<<<
* self._proto_on_chunk_complete()
*
*/
}
/* "httptools/parser/parser.pyx":139
* self._proto_on_chunk_header()
*
* cdef _on_chunk_complete(self): # <<<<<<<<<<<<<<
* self._maybe_call_on_header()
*
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":147
* ### Public API ###
*
* def get_http_version(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return '{}.{}'.format(parser.http_major, parser.http_minor)
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("get_http_version (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
llhttp_t *__pyx_v_parser;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
llhttp_t *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
int __pyx_t_7;
PyObject *__pyx_t_8 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("get_http_version", 0);
/* "httptools/parser/parser.pyx":148
*
* def get_http_version(self):
* cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<<
* return '{}.{}'.format(parser.http_major, parser.http_minor)
*
*/
__pyx_t_1 = __pyx_v_self->_cparser;
__pyx_v_parser = __pyx_t_1;
/* "httptools/parser/parser.pyx":149
* def get_http_version(self):
* cdef cparser.llhttp_t* parser = self._cparser
* return '{}.{}'.format(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<<
*
* def should_keep_alive(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = NULL;
__pyx_t_7 = 0;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_3, function);
__pyx_t_7 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_3)) {
PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
} else
#endif
{
__pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_8);
if (__pyx_t_6) {
__Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL;
}
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_t_4);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_t_5);
__pyx_t_4 = 0;
__pyx_t_5 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":147
* ### Public API ###
*
* def get_http_version(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return '{}.{}'.format(parser.http_major, parser.http_minor)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.get_http_version", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":151
* return '{}.{}'.format(parser.http_major, parser.http_minor)
*
* def should_keep_alive(self): # <<<<<<<<<<<<<<
* return bool(cparser.llhttp_should_keep_alive(self._cparser))
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("should_keep_alive (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("should_keep_alive", 0);
/* "httptools/parser/parser.pyx":152
*
* def should_keep_alive(self):
* return bool(cparser.llhttp_should_keep_alive(self._cparser)) # <<<<<<<<<<<<<<
*
* def should_upgrade(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyInt_From_int(llhttp_should_keep_alive(__pyx_v_self->_cparser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_PyBool_FromLong((!(!__pyx_t_2))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":151
* return '{}.{}'.format(parser.http_major, parser.http_minor)
*
* def should_keep_alive(self): # <<<<<<<<<<<<<<
* return bool(cparser.llhttp_should_keep_alive(self._cparser))
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.should_keep_alive", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":154
* return bool(cparser.llhttp_should_keep_alive(self._cparser))
*
* def should_upgrade(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return bool(parser.upgrade)
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("should_upgrade (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
llhttp_t *__pyx_v_parser;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
llhttp_t *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("should_upgrade", 0);
/* "httptools/parser/parser.pyx":155
*
* def should_upgrade(self):
* cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<<
* return bool(parser.upgrade)
*
*/
__pyx_t_1 = __pyx_v_self->_cparser;
__pyx_v_parser = __pyx_t_1;
/* "httptools/parser/parser.pyx":156
* def should_upgrade(self):
* cdef cparser.llhttp_t* parser = self._cparser
* return bool(parser.upgrade) # <<<<<<<<<<<<<<
*
* def feed_data(self, data):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->upgrade); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 156, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyBool_FromLong((!(!__pyx_t_3))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":154
* return bool(cparser.llhttp_should_keep_alive(self._cparser))
*
* def should_upgrade(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return bool(parser.upgrade)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.should_upgrade", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":158
* return bool(parser.upgrade)
*
* def feed_data(self, data): # <<<<<<<<<<<<<<
* cdef:
* size_t data_len
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("feed_data (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_data));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data) {
size_t __pyx_v_data_len;
llhttp_errno_t __pyx_v_err;
Py_buffer *__pyx_v_buf;
int __pyx_v_owning_buf;
char *__pyx_v_err_pos;
PyObject *__pyx_v_ex = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
char const *__pyx_t_9;
PyObject *__pyx_t_10 = NULL;
PyObject *__pyx_t_11 = NULL;
PyObject *__pyx_t_12 = NULL;
PyObject *__pyx_t_13 = NULL;
PyObject *__pyx_t_14 = NULL;
PyObject *__pyx_t_15 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("feed_data", 0);
/* "httptools/parser/parser.pyx":163
* cparser.llhttp_errno_t err
* Py_buffer *buf
* bint owning_buf = False # <<<<<<<<<<<<<<
* char* err_pos
*
*/
__pyx_v_owning_buf = 0;
/* "httptools/parser/parser.pyx":166
* char* err_pos
*
* if PyMemoryView_Check(data): # <<<<<<<<<<<<<<
* buf = PyMemoryView_GET_BUFFER(data)
* data_len = <size_t>buf.len
*/
__pyx_t_1 = (PyMemoryView_Check(__pyx_v_data) != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":167
*
* if PyMemoryView_Check(data):
* buf = PyMemoryView_GET_BUFFER(data) # <<<<<<<<<<<<<<
* data_len = <size_t>buf.len
* err = cparser.llhttp_execute(
*/
__pyx_v_buf = PyMemoryView_GET_BUFFER(__pyx_v_data);
/* "httptools/parser/parser.pyx":168
* if PyMemoryView_Check(data):
* buf = PyMemoryView_GET_BUFFER(data)
* data_len = <size_t>buf.len # <<<<<<<<<<<<<<
* err = cparser.llhttp_execute(
* self._cparser,
*/
__pyx_v_data_len = ((size_t)__pyx_v_buf->len);
/* "httptools/parser/parser.pyx":169
* buf = PyMemoryView_GET_BUFFER(data)
* data_len = <size_t>buf.len
* err = cparser.llhttp_execute( # <<<<<<<<<<<<<<
* self._cparser,
* <char*>buf.buf,
*/
__pyx_v_err = llhttp_execute(__pyx_v_self->_cparser, ((char *)__pyx_v_buf->buf), __pyx_v_data_len);
/* "httptools/parser/parser.pyx":166
* char* err_pos
*
* if PyMemoryView_Check(data): # <<<<<<<<<<<<<<
* buf = PyMemoryView_GET_BUFFER(data)
* data_len = <size_t>buf.len
*/
goto __pyx_L3;
}
/* "httptools/parser/parser.pyx":175
*
* else:
* buf = &self.py_buf # <<<<<<<<<<<<<<
* PyObject_GetBuffer(data, buf, PyBUF_SIMPLE)
* owning_buf = True
*/
/*else*/ {
__pyx_v_buf = (&__pyx_v_self->py_buf);
/* "httptools/parser/parser.pyx":176
* else:
* buf = &self.py_buf
* PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<<
* owning_buf = True
* data_len = <size_t>buf.len
*/
__pyx_t_2 = PyObject_GetBuffer(__pyx_v_data, __pyx_v_buf, PyBUF_SIMPLE); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 176, __pyx_L1_error)
/* "httptools/parser/parser.pyx":177
* buf = &self.py_buf
* PyObject_GetBuffer(data, buf, PyBUF_SIMPLE)
* owning_buf = True # <<<<<<<<<<<<<<
* data_len = <size_t>buf.len
*
*/
__pyx_v_owning_buf = 1;
/* "httptools/parser/parser.pyx":178
* PyObject_GetBuffer(data, buf, PyBUF_SIMPLE)
* owning_buf = True
* data_len = <size_t>buf.len # <<<<<<<<<<<<<<
*
* err = cparser.llhttp_execute(
*/
__pyx_v_data_len = ((size_t)__pyx_v_buf->len);
/* "httptools/parser/parser.pyx":180
* data_len = <size_t>buf.len
*
* err = cparser.llhttp_execute( # <<<<<<<<<<<<<<
* self._cparser,
* <char*>buf.buf,
*/
__pyx_v_err = llhttp_execute(__pyx_v_self->_cparser, ((char *)__pyx_v_buf->buf), __pyx_v_data_len);
}
__pyx_L3:;
/* "httptools/parser/parser.pyx":185
* data_len)
*
* try: # <<<<<<<<<<<<<<
* if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE:
* err_pos = cparser.llhttp_get_error_pos(self._cparser)
*/
/*try:*/ {
/* "httptools/parser/parser.pyx":186
*
* try:
* if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<<
* err_pos = cparser.llhttp_get_error_pos(self._cparser)
*
*/
__pyx_t_3 = ((__pyx_v_self->_cparser->upgrade == 1) != 0);
if (__pyx_t_3) {
} else {
__pyx_t_1 = __pyx_t_3;
goto __pyx_L8_bool_binop_done;
}
__pyx_t_3 = ((__pyx_v_err == HPE_PAUSED_UPGRADE) != 0);
__pyx_t_1 = __pyx_t_3;
__pyx_L8_bool_binop_done:;
if (unlikely(__pyx_t_1)) {
/* "httptools/parser/parser.pyx":187
* try:
* if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE:
* err_pos = cparser.llhttp_get_error_pos(self._cparser) # <<<<<<<<<<<<<<
*
* # Immediately free the parser from "error" state, simulating
*/
__pyx_v_err_pos = llhttp_get_error_pos(__pyx_v_self->_cparser);
/* "httptools/parser/parser.pyx":193
* # allow users manually "resume after upgrade", and 2) the use
* # case for resuming parsing is very rare.
* cparser.llhttp_resume_after_upgrade(self._cparser) # <<<<<<<<<<<<<<
*
* # The err_pos here is specific for the input buf. So if we ever
*/
llhttp_resume_after_upgrade(__pyx_v_self->_cparser);
/* "httptools/parser/parser.pyx":199
* # successive calls to feed_data() until resume_after_upgrade is
* # called), we have to store the result and keep our own state.
* raise HttpParserUpgrade(err_pos - <char*>buf.buf) # <<<<<<<<<<<<<<
* finally:
* if owning_buf:
*/
__Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_HttpParserUpgrade); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 199, __pyx_L5_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = __Pyx_PyInt_From_ptrdiff_t((__pyx_v_err_pos - ((char *)__pyx_v_buf->buf))); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 199, __pyx_L5_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_7, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6);
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 199, __pyx_L5_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__PYX_ERR(0, 199, __pyx_L5_error)
/* "httptools/parser/parser.pyx":186
*
* try:
* if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<<
* err_pos = cparser.llhttp_get_error_pos(self._cparser)
*
*/
}
}
/* "httptools/parser/parser.pyx":201
* raise HttpParserUpgrade(err_pos - <char*>buf.buf)
* finally:
* if owning_buf: # <<<<<<<<<<<<<<
* PyBuffer_Release(buf)
*
*/
/*finally:*/ {
/*normal exit:*/{
__pyx_t_1 = (__pyx_v_owning_buf != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":202
* finally:
* if owning_buf:
* PyBuffer_Release(buf) # <<<<<<<<<<<<<<
*
* if err != cparser.HPE_OK:
*/
PyBuffer_Release(__pyx_v_buf);
/* "httptools/parser/parser.pyx":201
* raise HttpParserUpgrade(err_pos - <char*>buf.buf)
* finally:
* if owning_buf: # <<<<<<<<<<<<<<
* PyBuffer_Release(buf)
*
*/
}
goto __pyx_L6;
}
__pyx_L5_error:;
/*exception exit:*/{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0;
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15);
if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12);
__Pyx_XGOTREF(__pyx_t_10);
__Pyx_XGOTREF(__pyx_t_11);
__Pyx_XGOTREF(__pyx_t_12);
__Pyx_XGOTREF(__pyx_t_13);
__Pyx_XGOTREF(__pyx_t_14);
__Pyx_XGOTREF(__pyx_t_15);
__pyx_t_2 = __pyx_lineno; __pyx_t_8 = __pyx_clineno; __pyx_t_9 = __pyx_filename;
{
__pyx_t_1 = (__pyx_v_owning_buf != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":202
* finally:
* if owning_buf:
* PyBuffer_Release(buf) # <<<<<<<<<<<<<<
*
* if err != cparser.HPE_OK:
*/
PyBuffer_Release(__pyx_v_buf);
/* "httptools/parser/parser.pyx":201
* raise HttpParserUpgrade(err_pos - <char*>buf.buf)
* finally:
* if owning_buf: # <<<<<<<<<<<<<<
* PyBuffer_Release(buf)
*
*/
}
}
if (PY_MAJOR_VERSION >= 3) {
__Pyx_XGIVEREF(__pyx_t_13);
__Pyx_XGIVEREF(__pyx_t_14);
__Pyx_XGIVEREF(__pyx_t_15);
__Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15);
}
__Pyx_XGIVEREF(__pyx_t_10);
__Pyx_XGIVEREF(__pyx_t_11);
__Pyx_XGIVEREF(__pyx_t_12);
__Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12);
__pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0;
__pyx_lineno = __pyx_t_2; __pyx_clineno = __pyx_t_8; __pyx_filename = __pyx_t_9;
goto __pyx_L1_error;
}
__pyx_L6:;
}
/* "httptools/parser/parser.pyx":204
* PyBuffer_Release(buf)
*
* if err != cparser.HPE_OK: # <<<<<<<<<<<<<<
* ex = parser_error_from_errno(
* self._cparser,
*/
__pyx_t_1 = ((__pyx_v_err != HPE_OK) != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":205
*
* if err != cparser.HPE_OK:
* ex = parser_error_from_errno( # <<<<<<<<<<<<<<
* self._cparser,
* <cparser.llhttp_errno_t> self._cparser.error)
*/
__pyx_t_4 = __pyx_f_9httptools_6parser_6parser_parser_error_from_errno(__pyx_v_self->_cparser, ((llhttp_errno_t)__pyx_v_self->_cparser->error)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 205, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_v_ex = __pyx_t_4;
__pyx_t_4 = 0;
/* "httptools/parser/parser.pyx":208
* self._cparser,
* <cparser.llhttp_errno_t> self._cparser.error)
* if isinstance(ex, HttpParserCallbackError): # <<<<<<<<<<<<<<
* if self._last_error is not None:
* ex.__context__ = self._last_error
*/
__Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 208, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_1 = PyObject_IsInstance(__pyx_v_ex, __pyx_t_4); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 208, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_3 = (__pyx_t_1 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":209
* <cparser.llhttp_errno_t> self._cparser.error)
* if isinstance(ex, HttpParserCallbackError):
* if self._last_error is not None: # <<<<<<<<<<<<<<
* ex.__context__ = self._last_error
* self._last_error = None
*/
__pyx_t_3 = (__pyx_v_self->_last_error != Py_None);
__pyx_t_1 = (__pyx_t_3 != 0);
if (__pyx_t_1) {
/* "httptools/parser/parser.pyx":210
* if isinstance(ex, HttpParserCallbackError):
* if self._last_error is not None:
* ex.__context__ = self._last_error # <<<<<<<<<<<<<<
* self._last_error = None
* raise ex
*/
__pyx_t_4 = __pyx_v_self->_last_error;
__Pyx_INCREF(__pyx_t_4);
if (__Pyx_PyObject_SetAttrStr(__pyx_v_ex, __pyx_n_s_context, __pyx_t_4) < 0) __PYX_ERR(0, 210, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
/* "httptools/parser/parser.pyx":211
* if self._last_error is not None:
* ex.__context__ = self._last_error
* self._last_error = None # <<<<<<<<<<<<<<
* raise ex
*
*/
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
__Pyx_GOTREF(__pyx_v_self->_last_error);
__Pyx_DECREF(__pyx_v_self->_last_error);
__pyx_v_self->_last_error = Py_None;
/* "httptools/parser/parser.pyx":209
* <cparser.llhttp_errno_t> self._cparser.error)
* if isinstance(ex, HttpParserCallbackError):
* if self._last_error is not None: # <<<<<<<<<<<<<<
* ex.__context__ = self._last_error
* self._last_error = None
*/
}
/* "httptools/parser/parser.pyx":208
* self._cparser,
* <cparser.llhttp_errno_t> self._cparser.error)
* if isinstance(ex, HttpParserCallbackError): # <<<<<<<<<<<<<<
* if self._last_error is not None:
* ex.__context__ = self._last_error
*/
}
/* "httptools/parser/parser.pyx":212
* ex.__context__ = self._last_error
* self._last_error = None
* raise ex # <<<<<<<<<<<<<<
*
*
*/
__Pyx_Raise(__pyx_v_ex, 0, 0, 0);
__PYX_ERR(0, 212, __pyx_L1_error)
/* "httptools/parser/parser.pyx":204
* PyBuffer_Release(buf)
*
* if err != cparser.HPE_OK: # <<<<<<<<<<<<<<
* ex = parser_error_from_errno(
* self._cparser,
*/
}
/* "httptools/parser/parser.pyx":158
* return bool(parser.upgrade)
*
* def feed_data(self, data): # <<<<<<<<<<<<<<
* cdef:
* size_t data_len
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.feed_data", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":217
* cdef class HttpRequestParser(HttpParser):
*
* def __init__(self, protocol): # <<<<<<<<<<<<<<
* self._init(protocol, cparser.HTTP_REQUEST)
*
*/
/* Python wrapper */
static int __pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_protocol = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,0};
PyObject* values[1] = {0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 217, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 1) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
}
__pyx_v_protocol = values[0];
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 217, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self), __pyx_v_protocol);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__init__", 0);
/* "httptools/parser/parser.pyx":218
*
* def __init__(self, protocol):
* self._init(protocol, cparser.HTTP_REQUEST) # <<<<<<<<<<<<<<
*
* self._proto_on_url = getattr(protocol, 'on_url', None)
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), __pyx_v_protocol, HTTP_REQUEST); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 218, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":220
* self._init(protocol, cparser.HTTP_REQUEST)
*
* self._proto_on_url = getattr(protocol, 'on_url', None) # <<<<<<<<<<<<<<
* if self._proto_on_url is not None:
* self._csettings.on_url = cb_on_url
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_url, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 220, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->__pyx_base._proto_on_url);
__Pyx_DECREF(__pyx_v_self->__pyx_base._proto_on_url);
__pyx_v_self->__pyx_base._proto_on_url = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":221
*
* self._proto_on_url = getattr(protocol, 'on_url', None)
* if self._proto_on_url is not None: # <<<<<<<<<<<<<<
* self._csettings.on_url = cb_on_url
*
*/
__pyx_t_2 = (__pyx_v_self->__pyx_base._proto_on_url != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":222
* self._proto_on_url = getattr(protocol, 'on_url', None)
* if self._proto_on_url is not None:
* self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<<
*
* def get_method(self):
*/
__pyx_v_self->__pyx_base._csettings->on_url = __pyx_f_9httptools_6parser_6parser_cb_on_url;
/* "httptools/parser/parser.pyx":221
*
* self._proto_on_url = getattr(protocol, 'on_url', None)
* if self._proto_on_url is not None: # <<<<<<<<<<<<<<
* self._csettings.on_url = cb_on_url
*
*/
}
/* "httptools/parser/parser.pyx":217
* cdef class HttpRequestParser(HttpParser):
*
* def __init__(self, protocol): # <<<<<<<<<<<<<<
* self._init(protocol, cparser.HTTP_REQUEST)
*
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":224
* self._csettings.on_url = cb_on_url
*
* def get_method(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return cparser.llhttp_method_name(<cparser.llhttp_method_t> parser.method)
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("get_method (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self) {
llhttp_t *__pyx_v_parser;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
llhttp_t *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("get_method", 0);
/* "httptools/parser/parser.pyx":225
*
* def get_method(self):
* cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<<
* return cparser.llhttp_method_name(<cparser.llhttp_method_t> parser.method)
*
*/
__pyx_t_1 = __pyx_v_self->__pyx_base._cparser;
__pyx_v_parser = __pyx_t_1;
/* "httptools/parser/parser.pyx":226
* def get_method(self):
* cdef cparser.llhttp_t* parser = self._cparser
* return cparser.llhttp_method_name(<cparser.llhttp_method_t> parser.method) # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyBytes_FromString(llhttp_method_name(((llhttp_method_t)__pyx_v_parser->method))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 226, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":224
* self._csettings.on_url = cb_on_url
*
* def get_method(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return cparser.llhttp_method_name(<cparser.llhttp_method_t> parser.method)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.get_method", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":231
* cdef class HttpResponseParser(HttpParser):
*
* def __init__(self, protocol): # <<<<<<<<<<<<<<
* self._init(protocol, cparser.HTTP_RESPONSE)
*
*/
/* Python wrapper */
static int __pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_protocol = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,0};
PyObject* values[1] = {0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 231, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 1) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
}
__pyx_v_protocol = values[0];
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 231, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self), __pyx_v_protocol);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__init__", 0);
/* "httptools/parser/parser.pyx":232
*
* def __init__(self, protocol):
* self._init(protocol, cparser.HTTP_RESPONSE) # <<<<<<<<<<<<<<
*
* self._proto_on_status = getattr(protocol, 'on_status', None)
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), __pyx_v_protocol, HTTP_RESPONSE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 232, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":234
* self._init(protocol, cparser.HTTP_RESPONSE)
*
* self._proto_on_status = getattr(protocol, 'on_status', None) # <<<<<<<<<<<<<<
* if self._proto_on_status is not None:
* self._csettings.on_status = cb_on_status
*/
__pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_status, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v_self->__pyx_base._proto_on_status);
__Pyx_DECREF(__pyx_v_self->__pyx_base._proto_on_status);
__pyx_v_self->__pyx_base._proto_on_status = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":235
*
* self._proto_on_status = getattr(protocol, 'on_status', None)
* if self._proto_on_status is not None: # <<<<<<<<<<<<<<
* self._csettings.on_status = cb_on_status
*
*/
__pyx_t_2 = (__pyx_v_self->__pyx_base._proto_on_status != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "httptools/parser/parser.pyx":236
* self._proto_on_status = getattr(protocol, 'on_status', None)
* if self._proto_on_status is not None:
* self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<<
*
* def get_status_code(self):
*/
__pyx_v_self->__pyx_base._csettings->on_status = __pyx_f_9httptools_6parser_6parser_cb_on_status;
/* "httptools/parser/parser.pyx":235
*
* self._proto_on_status = getattr(protocol, 'on_status', None)
* if self._proto_on_status is not None: # <<<<<<<<<<<<<<
* self._csettings.on_status = cb_on_status
*
*/
}
/* "httptools/parser/parser.pyx":231
* cdef class HttpResponseParser(HttpParser):
*
* def __init__(self, protocol): # <<<<<<<<<<<<<<
* self._init(protocol, cparser.HTTP_RESPONSE)
*
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":238
* self._csettings.on_status = cb_on_status
*
* def get_status_code(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return parser.status_code
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("get_status_code (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self) {
llhttp_t *__pyx_v_parser;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
llhttp_t *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("get_status_code", 0);
/* "httptools/parser/parser.pyx":239
*
* def get_status_code(self):
* cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<<
* return parser.status_code
*
*/
__pyx_t_1 = __pyx_v_self->__pyx_base._cparser;
__pyx_v_parser = __pyx_t_1;
/* "httptools/parser/parser.pyx":240
* def get_status_code(self):
* cdef cparser.llhttp_t* parser = self._cparser
* return parser.status_code # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyInt_From_uint16_t(__pyx_v_parser->status_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":238
* self._csettings.on_status = cb_on_status
*
* def get_status_code(self): # <<<<<<<<<<<<<<
* cdef cparser.llhttp_t* parser = self._cparser
* return parser.status_code
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.get_status_code", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(1, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":243
*
*
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_message_begin(llhttp_t *__pyx_v_parser) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
int __pyx_t_7;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_message_begin", 0);
/* "httptools/parser/parser.pyx":244
*
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._proto_on_message_begin()
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":245
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_begin()
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":246
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._proto_on_message_begin() # <<<<<<<<<<<<<<
* except BaseException as ex:
* pyparser._last_error = ex
*/
__Pyx_INCREF(__pyx_v_pyparser->_proto_on_message_begin);
__pyx_t_5 = __pyx_v_pyparser->_proto_on_message_begin; __pyx_t_6 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":245
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_begin()
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":251
* return -1
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
/* "httptools/parser/parser.pyx":247
* try:
* pyparser._proto_on_message_begin()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
__pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_7) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 247, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_INCREF(__pyx_t_5);
__pyx_v_ex = __pyx_t_5;
/*try:*/ {
/* "httptools/parser/parser.pyx":248
* pyparser._proto_on_message_begin()
* except BaseException as ex:
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return -1
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":249
* except BaseException as ex:
* pyparser._last_error = ex
* return -1 # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = -1;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":247
* try:
* pyparser._proto_on_message_begin()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_7 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_7;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":245
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_begin()
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":243
*
*
* cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":254
*
*
* cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_url(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_url", 0);
/* "httptools/parser/parser.pyx":256
* cdef int cb_on_url(cparser.llhttp_t* parser,
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._proto_on_url(at[:length])
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":257
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_url(at[:length])
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":258
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._proto_on_url(at[:length]) # <<<<<<<<<<<<<<
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error")
*/
__pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 258, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_v_pyparser->_proto_on_url);
__pyx_t_6 = __pyx_v_pyparser->_proto_on_url; __pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_6, function);
}
}
__pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5);
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 258, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":257
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_url(at[:length])
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":264
* return cparser.HPE_USER
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
/* "httptools/parser/parser.pyx":259
* try:
* pyparser._proto_on_url(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error")
* pyparser._last_error = ex
*/
__pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_8) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 259, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__pyx_v_ex = __pyx_t_6;
/*try:*/ {
/* "httptools/parser/parser.pyx":260
* pyparser._proto_on_url(at[:length])
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error") # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return cparser.HPE_USER
*/
llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_url` callback error"));
/* "httptools/parser/parser.pyx":261
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error")
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return cparser.HPE_USER
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":262
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error")
* pyparser._last_error = ex
* return cparser.HPE_USER # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = HPE_USER;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":259
* try:
* pyparser._proto_on_url(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_url` callback error")
* pyparser._last_error = ex
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_8 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_8;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":257
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_url(at[:length])
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":254
*
*
* cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":267
*
*
* cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_status(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_status", 0);
/* "httptools/parser/parser.pyx":269
* cdef int cb_on_status(cparser.llhttp_t* parser,
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._proto_on_status(at[:length])
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":270
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_status(at[:length])
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":271
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._proto_on_status(at[:length]) # <<<<<<<<<<<<<<
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error")
*/
__pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 271, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_v_pyparser->_proto_on_status);
__pyx_t_6 = __pyx_v_pyparser->_proto_on_status; __pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_6, function);
}
}
__pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5);
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 271, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":270
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_status(at[:length])
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":277
* return cparser.HPE_USER
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
/* "httptools/parser/parser.pyx":272
* try:
* pyparser._proto_on_status(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error")
* pyparser._last_error = ex
*/
__pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_8) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 272, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__pyx_v_ex = __pyx_t_6;
/*try:*/ {
/* "httptools/parser/parser.pyx":273
* pyparser._proto_on_status(at[:length])
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error") # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return cparser.HPE_USER
*/
llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_status` callback error"));
/* "httptools/parser/parser.pyx":274
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error")
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return cparser.HPE_USER
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":275
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error")
* pyparser._last_error = ex
* return cparser.HPE_USER # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = HPE_USER;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":272
* try:
* pyparser._proto_on_status(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_status` callback error")
* pyparser._last_error = ex
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_8 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_8;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":270
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_status(at[:length])
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":267
*
*
* cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":280
*
*
* cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_header_field(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
PyObject *__pyx_t_7 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_header_field", 0);
/* "httptools/parser/parser.pyx":282
* cdef int cb_on_header_field(cparser.llhttp_t* parser,
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._on_header_field(at[:length])
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":283
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_field(at[:length])
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":284
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._on_header_field(at[:length]) # <<<<<<<<<<<<<<
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error")
*/
__pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 284, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_5 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 284, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "httptools/parser/parser.pyx":283
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_field(at[:length])
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":290
* return cparser.HPE_USER
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "httptools/parser/parser.pyx":285
* try:
* pyparser._on_header_field(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error")
* pyparser._last_error = ex
*/
__pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_6) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 285, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_7);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_ex = __pyx_t_1;
/*try:*/ {
/* "httptools/parser/parser.pyx":286
* pyparser._on_header_field(at[:length])
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return cparser.HPE_USER
*/
llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_header_field` callback error"));
/* "httptools/parser/parser.pyx":287
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error")
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return cparser.HPE_USER
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":288
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error")
* pyparser._last_error = ex
* return cparser.HPE_USER # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = HPE_USER;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":285
* try:
* pyparser._on_header_field(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error")
* pyparser._last_error = ex
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_6 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_6;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":283
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_field(at[:length])
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":280
*
*
* cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":293
*
*
* cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_header_value(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
PyObject *__pyx_t_7 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_header_value", 0);
/* "httptools/parser/parser.pyx":295
* cdef int cb_on_header_value(cparser.llhttp_t* parser,
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._on_header_value(at[:length])
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":296
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_value(at[:length])
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":297
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._on_header_value(at[:length]) # <<<<<<<<<<<<<<
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error")
*/
__pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_5 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 297, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "httptools/parser/parser.pyx":296
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_value(at[:length])
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":303
* return cparser.HPE_USER
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "httptools/parser/parser.pyx":298
* try:
* pyparser._on_header_value(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error")
* pyparser._last_error = ex
*/
__pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_6) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 298, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_7);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_ex = __pyx_t_1;
/*try:*/ {
/* "httptools/parser/parser.pyx":299
* pyparser._on_header_value(at[:length])
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return cparser.HPE_USER
*/
llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_header_value` callback error"));
/* "httptools/parser/parser.pyx":300
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error")
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return cparser.HPE_USER
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":301
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error")
* pyparser._last_error = ex
* return cparser.HPE_USER # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = HPE_USER;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":298
* try:
* pyparser._on_header_value(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error")
* pyparser._last_error = ex
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_6 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_6;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":296
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_header_value(at[:length])
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":293
*
*
* cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":306
*
*
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete(llhttp_t *__pyx_v_parser) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
int __pyx_t_6;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_headers_complete", 0);
/* "httptools/parser/parser.pyx":307
*
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._on_headers_complete()
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":308
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_headers_complete()
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":309
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._on_headers_complete() # <<<<<<<<<<<<<<
* except BaseException as ex:
* pyparser._last_error = ex
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 309, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":308
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_headers_complete()
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":314
* return -1
* else:
* if pyparser._cparser.upgrade: # <<<<<<<<<<<<<<
* return 1
* else:
*/
/*else:*/ {
__pyx_t_5 = (__pyx_v_pyparser->_cparser->upgrade != 0);
if (__pyx_t_5) {
/* "httptools/parser/parser.pyx":315
* else:
* if pyparser._cparser.upgrade:
* return 1 # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = 1;
goto __pyx_L6_except_return;
/* "httptools/parser/parser.pyx":314
* return -1
* else:
* if pyparser._cparser.upgrade: # <<<<<<<<<<<<<<
* return 1
* else:
*/
}
/* "httptools/parser/parser.pyx":317
* return 1
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":310
* try:
* pyparser._on_headers_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
__pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_6) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 310, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_7);
__Pyx_GOTREF(__pyx_t_8);
__Pyx_INCREF(__pyx_t_7);
__pyx_v_ex = __pyx_t_7;
/*try:*/ {
/* "httptools/parser/parser.pyx":311
* pyparser._on_headers_complete()
* except BaseException as ex:
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return -1
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":312
* except BaseException as ex:
* pyparser._last_error = ex
* return -1 # <<<<<<<<<<<<<<
* else:
* if pyparser._cparser.upgrade:
*/
__pyx_r = -1;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
goto __pyx_L14_return;
}
/* "httptools/parser/parser.pyx":310
* try:
* pyparser._on_headers_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
/*finally:*/ {
__pyx_L14_return: {
__pyx_t_6 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_6;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":308
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_headers_complete()
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":306
*
*
* cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":320
*
*
* cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_body(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_body", 0);
/* "httptools/parser/parser.pyx":322
* cdef int cb_on_body(cparser.llhttp_t* parser,
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._proto_on_body(at[:length])
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":323
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_body(at[:length])
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":324
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._proto_on_body(at[:length]) # <<<<<<<<<<<<<<
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error")
*/
__pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 324, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_v_pyparser->_proto_on_body);
__pyx_t_6 = __pyx_v_pyparser->_proto_on_body; __pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_6, function);
}
}
__pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5);
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 324, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":323
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_body(at[:length])
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":330
* return cparser.HPE_USER
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
/* "httptools/parser/parser.pyx":325
* try:
* pyparser._proto_on_body(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error")
* pyparser._last_error = ex
*/
__pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_8) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 325, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__pyx_v_ex = __pyx_t_6;
/*try:*/ {
/* "httptools/parser/parser.pyx":326
* pyparser._proto_on_body(at[:length])
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error") # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return cparser.HPE_USER
*/
llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_body` callback error"));
/* "httptools/parser/parser.pyx":327
* except BaseException as ex:
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error")
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return cparser.HPE_USER
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":328
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error")
* pyparser._last_error = ex
* return cparser.HPE_USER # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = HPE_USER;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":325
* try:
* pyparser._proto_on_body(at[:length])
* except BaseException as ex: # <<<<<<<<<<<<<<
* cparser.llhttp_set_error_reason(parser, "`on_body` callback error")
* pyparser._last_error = ex
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_8 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_8;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":323
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_body(at[:length])
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":320
*
*
* cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<<
* const char *at, size_t length) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":333
*
*
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_message_complete(llhttp_t *__pyx_v_parser) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
int __pyx_t_7;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_message_complete", 0);
/* "httptools/parser/parser.pyx":334
*
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._proto_on_message_complete()
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":335
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_complete()
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":336
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._proto_on_message_complete() # <<<<<<<<<<<<<<
* except BaseException as ex:
* pyparser._last_error = ex
*/
__Pyx_INCREF(__pyx_v_pyparser->_proto_on_message_complete);
__pyx_t_5 = __pyx_v_pyparser->_proto_on_message_complete; __pyx_t_6 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 336, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":335
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_complete()
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":341
* return -1
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
/* "httptools/parser/parser.pyx":337
* try:
* pyparser._proto_on_message_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
__pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_7) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 337, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_INCREF(__pyx_t_5);
__pyx_v_ex = __pyx_t_5;
/*try:*/ {
/* "httptools/parser/parser.pyx":338
* pyparser._proto_on_message_complete()
* except BaseException as ex:
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return -1
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":339
* except BaseException as ex:
* pyparser._last_error = ex
* return -1 # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = -1;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":337
* try:
* pyparser._proto_on_message_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_7 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_7;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":335
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._proto_on_message_complete()
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":333
*
*
* cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":344
*
*
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header(llhttp_t *__pyx_v_parser) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_chunk_header", 0);
/* "httptools/parser/parser.pyx":345
*
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._on_chunk_header()
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":346
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_header()
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":347
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._on_chunk_header() # <<<<<<<<<<<<<<
* except BaseException as ex:
* pyparser._last_error = ex
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 347, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":346
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_header()
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":352
* return -1
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":348
* try:
* pyparser._on_chunk_header()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
__pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_5) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 348, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_7);
__Pyx_INCREF(__pyx_t_6);
__pyx_v_ex = __pyx_t_6;
/*try:*/ {
/* "httptools/parser/parser.pyx":349
* pyparser._on_chunk_header()
* except BaseException as ex:
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return -1
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":350
* except BaseException as ex:
* pyparser._last_error = ex
* return -1 # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = -1;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":348
* try:
* pyparser._on_chunk_header()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_5 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_5;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":346
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_header()
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":344
*
*
* cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":355
*
*
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete(llhttp_t *__pyx_v_parser) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0;
PyObject *__pyx_v_ex = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("cb_on_chunk_complete", 0);
/* "httptools/parser/parser.pyx":356
*
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data # <<<<<<<<<<<<<<
* try:
* pyparser._on_chunk_complete()
*/
__pyx_t_1 = ((PyObject *)__pyx_v_parser->data);
__Pyx_INCREF(__pyx_t_1);
__pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":357
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_complete()
* except BaseException as ex:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "httptools/parser/parser.pyx":358
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
* pyparser._on_chunk_complete() # <<<<<<<<<<<<<<
* except BaseException as ex:
* pyparser._last_error = ex
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":357
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_complete()
* except BaseException as ex:
*/
}
/* "httptools/parser/parser.pyx":363
* return -1
* else:
* return 0 # <<<<<<<<<<<<<<
*
*
*/
/*else:*/ {
__pyx_r = 0;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":359
* try:
* pyparser._on_chunk_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
__pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException);
if (__pyx_t_5) {
__Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 359, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_7);
__Pyx_INCREF(__pyx_t_6);
__pyx_v_ex = __pyx_t_6;
/*try:*/ {
/* "httptools/parser/parser.pyx":360
* pyparser._on_chunk_complete()
* except BaseException as ex:
* pyparser._last_error = ex # <<<<<<<<<<<<<<
* return -1
* else:
*/
__Pyx_INCREF(__pyx_v_ex);
__Pyx_GIVEREF(__pyx_v_ex);
__Pyx_GOTREF(__pyx_v_pyparser->_last_error);
__Pyx_DECREF(__pyx_v_pyparser->_last_error);
__pyx_v_pyparser->_last_error = __pyx_v_ex;
/* "httptools/parser/parser.pyx":361
* except BaseException as ex:
* pyparser._last_error = ex
* return -1 # <<<<<<<<<<<<<<
* else:
* return 0
*/
__pyx_r = -1;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
goto __pyx_L13_return;
}
/* "httptools/parser/parser.pyx":359
* try:
* pyparser._on_chunk_complete()
* except BaseException as ex: # <<<<<<<<<<<<<<
* pyparser._last_error = ex
* return -1
*/
/*finally:*/ {
__pyx_L13_return: {
__pyx_t_5 = __pyx_r;
__Pyx_DECREF(__pyx_v_ex);
__pyx_v_ex = NULL;
__pyx_r = __pyx_t_5;
goto __pyx_L6_except_return;
}
}
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "httptools/parser/parser.pyx":357
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
* cdef HttpParser pyparser = <HttpParser>parser.data
* try: # <<<<<<<<<<<<<<
* pyparser._on_chunk_complete()
* except BaseException as ex:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "httptools/parser/parser.pyx":355
*
*
* cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<<
* cdef HttpParser pyparser = <HttpParser>parser.data
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_pyparser);
__Pyx_XDECREF(__pyx_v_ex);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/parser.pyx":366
*
*
* cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): # <<<<<<<<<<<<<<
* cdef bytes reason = cparser.llhttp_get_error_reason(parser)
*
*/
static PyObject *__pyx_f_9httptools_6parser_6parser_parser_error_from_errno(llhttp_t *__pyx_v_parser, llhttp_errno_t __pyx_v_errno) {
PyObject *__pyx_v_reason = 0;
PyObject *__pyx_v_cls = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("parser_error_from_errno", 0);
/* "httptools/parser/parser.pyx":367
*
* cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno):
* cdef bytes reason = cparser.llhttp_get_error_reason(parser) # <<<<<<<<<<<<<<
*
* if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
*/
__pyx_t_1 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_parser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 367, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_reason = ((PyObject*)__pyx_t_1);
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":369
* cdef bytes reason = cparser.llhttp_get_error_reason(parser)
*
* if errno in (cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<<
* cparser.HPE_CB_HEADERS_COMPLETE,
* cparser.HPE_CB_MESSAGE_COMPLETE,
*/
switch (__pyx_v_errno) {
case HPE_CB_MESSAGE_BEGIN:
case HPE_CB_HEADERS_COMPLETE:
/* "httptools/parser/parser.pyx":370
*
* if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
* cparser.HPE_CB_HEADERS_COMPLETE, # <<<<<<<<<<<<<<
* cparser.HPE_CB_MESSAGE_COMPLETE,
* cparser.HPE_CB_CHUNK_HEADER,
*/
case HPE_CB_MESSAGE_COMPLETE:
/* "httptools/parser/parser.pyx":371
* if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
* cparser.HPE_CB_HEADERS_COMPLETE,
* cparser.HPE_CB_MESSAGE_COMPLETE, # <<<<<<<<<<<<<<
* cparser.HPE_CB_CHUNK_HEADER,
* cparser.HPE_CB_CHUNK_COMPLETE,
*/
case HPE_CB_CHUNK_HEADER:
/* "httptools/parser/parser.pyx":372
* cparser.HPE_CB_HEADERS_COMPLETE,
* cparser.HPE_CB_MESSAGE_COMPLETE,
* cparser.HPE_CB_CHUNK_HEADER, # <<<<<<<<<<<<<<
* cparser.HPE_CB_CHUNK_COMPLETE,
* cparser.HPE_USER):
*/
case HPE_CB_CHUNK_COMPLETE:
/* "httptools/parser/parser.pyx":373
* cparser.HPE_CB_MESSAGE_COMPLETE,
* cparser.HPE_CB_CHUNK_HEADER,
* cparser.HPE_CB_CHUNK_COMPLETE, # <<<<<<<<<<<<<<
* cparser.HPE_USER):
* cls = HttpParserCallbackError
*/
case HPE_USER:
/* "httptools/parser/parser.pyx":375
* cparser.HPE_CB_CHUNK_COMPLETE,
* cparser.HPE_USER):
* cls = HttpParserCallbackError # <<<<<<<<<<<<<<
*
* elif errno == cparser.HPE_INVALID_STATUS:
*/
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 375, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_cls = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":369
* cdef bytes reason = cparser.llhttp_get_error_reason(parser)
*
* if errno in (cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<<
* cparser.HPE_CB_HEADERS_COMPLETE,
* cparser.HPE_CB_MESSAGE_COMPLETE,
*/
break;
case HPE_INVALID_STATUS:
/* "httptools/parser/parser.pyx":378
*
* elif errno == cparser.HPE_INVALID_STATUS:
* cls = HttpParserInvalidStatusError # <<<<<<<<<<<<<<
*
* elif errno == cparser.HPE_INVALID_METHOD:
*/
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidStatusError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 378, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_cls = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":377
* cls = HttpParserCallbackError
*
* elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<<
* cls = HttpParserInvalidStatusError
*
*/
break;
case HPE_INVALID_METHOD:
/* "httptools/parser/parser.pyx":381
*
* elif errno == cparser.HPE_INVALID_METHOD:
* cls = HttpParserInvalidMethodError # <<<<<<<<<<<<<<
*
* elif errno == cparser.HPE_INVALID_URL:
*/
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidMethodError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 381, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_cls = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":380
* cls = HttpParserInvalidStatusError
*
* elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<<
* cls = HttpParserInvalidMethodError
*
*/
break;
case HPE_INVALID_URL:
/* "httptools/parser/parser.pyx":384
*
* elif errno == cparser.HPE_INVALID_URL:
* cls = HttpParserInvalidURLError # <<<<<<<<<<<<<<
*
* else:
*/
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 384, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_cls = __pyx_t_1;
__pyx_t_1 = 0;
/* "httptools/parser/parser.pyx":383
* cls = HttpParserInvalidMethodError
*
* elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<<
* cls = HttpParserInvalidURLError
*
*/
break;
default:
/* "httptools/parser/parser.pyx":387
*
* else:
* cls = HttpParserError # <<<<<<<<<<<<<<
*
* return cls(reason.decode('latin-1'))
*/
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 387, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_cls = __pyx_t_1;
__pyx_t_1 = 0;
break;
}
/* "httptools/parser/parser.pyx":389
* cls = HttpParserError
*
* return cls(reason.decode('latin-1')) # <<<<<<<<<<<<<<
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_decode_bytes(__pyx_v_reason, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 389, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_INCREF(__pyx_v_cls);
__pyx_t_3 = __pyx_v_cls; __pyx_t_4 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
__pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
if (likely(__pyx_t_4)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
__Pyx_INCREF(__pyx_t_4);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_3, function);
}
}
__pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_2) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2);
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 389, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "httptools/parser/parser.pyx":366
*
*
* cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): # <<<<<<<<<<<<<<
* cdef bytes reason = cparser.llhttp_get_error_reason(parser)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("httptools.parser.parser.parser_error_from_errno", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_reason);
__Pyx_XDECREF(__pyx_v_cls);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_vtable_9httptools_6parser_6parser_HttpParser;
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p;
PyObject *o;
if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
o = (*t->tp_alloc)(t, 0);
} else {
o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
}
if (unlikely(!o)) return 0;
p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o);
p->__pyx_vtab = __pyx_vtabptr_9httptools_6parser_6parser_HttpParser;
p->_current_header_name = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->_current_header_value = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->_proto_on_url = Py_None; Py_INCREF(Py_None);
p->_proto_on_status = Py_None; Py_INCREF(Py_None);
p->_proto_on_body = Py_None; Py_INCREF(Py_None);
p->_proto_on_header = Py_None; Py_INCREF(Py_None);
p->_proto_on_headers_complete = Py_None; Py_INCREF(Py_None);
p->_proto_on_message_complete = Py_None; Py_INCREF(Py_None);
p->_proto_on_chunk_header = Py_None; Py_INCREF(Py_None);
p->_proto_on_chunk_complete = Py_None; Py_INCREF(Py_None);
p->_proto_on_message_begin = Py_None; Py_INCREF(Py_None);
p->_last_error = Py_None; Py_INCREF(Py_None);
p->py_buf.obj = NULL;
if (unlikely(__pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad;
return o;
bad:
Py_DECREF(o); o = 0;
return NULL;
}
static void __pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser(PyObject *o) {
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
PyObject_GC_UnTrack(o);
{
PyObject *etype, *eval, *etb;
PyErr_Fetch(&etype, &eval, &etb);
__Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1);
__pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(o);
__Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1);
PyErr_Restore(etype, eval, etb);
}
Py_CLEAR(p->_current_header_name);
Py_CLEAR(p->_current_header_value);
Py_CLEAR(p->_proto_on_url);
Py_CLEAR(p->_proto_on_status);
Py_CLEAR(p->_proto_on_body);
Py_CLEAR(p->_proto_on_header);
Py_CLEAR(p->_proto_on_headers_complete);
Py_CLEAR(p->_proto_on_message_complete);
Py_CLEAR(p->_proto_on_chunk_header);
Py_CLEAR(p->_proto_on_chunk_complete);
Py_CLEAR(p->_proto_on_message_begin);
Py_CLEAR(p->_last_error);
(*Py_TYPE(o)->tp_free)(o);
}
static int __pyx_tp_traverse_9httptools_6parser_6parser_HttpParser(PyObject *o, visitproc v, void *a) {
int e;
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o;
if (p->_proto_on_url) {
e = (*v)(p->_proto_on_url, a); if (e) return e;
}
if (p->_proto_on_status) {
e = (*v)(p->_proto_on_status, a); if (e) return e;
}
if (p->_proto_on_body) {
e = (*v)(p->_proto_on_body, a); if (e) return e;
}
if (p->_proto_on_header) {
e = (*v)(p->_proto_on_header, a); if (e) return e;
}
if (p->_proto_on_headers_complete) {
e = (*v)(p->_proto_on_headers_complete, a); if (e) return e;
}
if (p->_proto_on_message_complete) {
e = (*v)(p->_proto_on_message_complete, a); if (e) return e;
}
if (p->_proto_on_chunk_header) {
e = (*v)(p->_proto_on_chunk_header, a); if (e) return e;
}
if (p->_proto_on_chunk_complete) {
e = (*v)(p->_proto_on_chunk_complete, a); if (e) return e;
}
if (p->_proto_on_message_begin) {
e = (*v)(p->_proto_on_message_begin, a); if (e) return e;
}
if (p->_last_error) {
e = (*v)(p->_last_error, a); if (e) return e;
}
if (p->py_buf.obj) {
e = (*v)(p->py_buf.obj, a); if (e) return e;
}
return 0;
}
static int __pyx_tp_clear_9httptools_6parser_6parser_HttpParser(PyObject *o) {
PyObject* tmp;
struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o;
tmp = ((PyObject*)p->_proto_on_url);
p->_proto_on_url = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_status);
p->_proto_on_status = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_body);
p->_proto_on_body = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_header);
p->_proto_on_header = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_headers_complete);
p->_proto_on_headers_complete = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_message_complete);
p->_proto_on_message_complete = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_chunk_header);
p->_proto_on_chunk_header = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_chunk_complete);
p->_proto_on_chunk_complete = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_proto_on_message_begin);
p->_proto_on_message_begin = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_last_error);
p->_last_error = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
Py_CLEAR(p->py_buf.obj);
return 0;
}
static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpParser[] = {
{"get_http_version", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version, METH_NOARGS, 0},
{"should_keep_alive", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive, METH_NOARGS, 0},
{"should_upgrade", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade, METH_NOARGS, 0},
{"feed_data", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data, METH_O, 0},
{"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpParser = {
PyVarObject_HEAD_INIT(0, 0)
"httptools.parser.parser.HttpParser", /*tp_name*/
sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpParser), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
0, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/
__pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_9httptools_6parser_6parser_HttpParser, /*tp_methods*/
0, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
0, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_9httptools_6parser_6parser_HttpParser, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
};
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser __pyx_vtable_9httptools_6parser_6parser_HttpRequestParser;
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *p;
PyObject *o = __pyx_tp_new_9httptools_6parser_6parser_HttpParser(t, a, k);
if (unlikely(!o)) return 0;
p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)o);
p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser*)__pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser;
return o;
}
static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpRequestParser[] = {
{"get_method", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method, METH_NOARGS, 0},
{"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpRequestParser = {
PyVarObject_HEAD_INIT(0, 0)
"httptools.parser.parser.HttpRequestParser", /*tp_name*/
sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
0, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/
__pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_9httptools_6parser_6parser_HttpRequestParser, /*tp_methods*/
0, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
};
static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser __pyx_vtable_9httptools_6parser_6parser_HttpResponseParser;
static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *p;
PyObject *o = __pyx_tp_new_9httptools_6parser_6parser_HttpParser(t, a, k);
if (unlikely(!o)) return 0;
p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)o);
p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser*)__pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser;
return o;
}
static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpResponseParser[] = {
{"get_status_code", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code, METH_NOARGS, 0},
{"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpResponseParser = {
PyVarObject_HEAD_INIT(0, 0)
"httptools.parser.parser.HttpResponseParser", /*tp_name*/
sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
0, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/
__pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_9httptools_6parser_6parser_HttpResponseParser, /*tp_methods*/
0, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
};
static PyMethodDef __pyx_methods[] = {
{0, 0, 0, 0}
};
#if PY_MAJOR_VERSION >= 3
#if CYTHON_PEP489_MULTI_PHASE_INIT
static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
static int __pyx_pymod_exec_parser(PyObject* module); /*proto*/
static PyModuleDef_Slot __pyx_moduledef_slots[] = {
{Py_mod_create, (void*)__pyx_pymod_create},
{Py_mod_exec, (void*)__pyx_pymod_exec_parser},
{0, NULL}
};
#endif
static struct PyModuleDef __pyx_moduledef = {
PyModuleDef_HEAD_INIT,
"parser",
0, /* m_doc */
#if CYTHON_PEP489_MULTI_PHASE_INIT
0, /* m_size */
#else
-1, /* m_size */
#endif
__pyx_methods /* m_methods */,
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_moduledef_slots, /* m_slots */
#else
NULL, /* m_reload */
#endif
NULL, /* m_traverse */
NULL, /* m_clear */
NULL /* m_free */
};
#endif
#ifndef CYTHON_SMALL_CODE
#if defined(__clang__)
#define CYTHON_SMALL_CODE
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
#define CYTHON_SMALL_CODE __attribute__((cold))
#else
#define CYTHON_SMALL_CODE
#endif
#endif
static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0},
{&__pyx_n_s_BaseException, __pyx_k_BaseException, sizeof(__pyx_k_BaseException), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserCallbackError, __pyx_k_HttpParserCallbackError, sizeof(__pyx_k_HttpParserCallbackError), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserError, __pyx_k_HttpParserError, sizeof(__pyx_k_HttpParserError), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserInvalidMethodError, __pyx_k_HttpParserInvalidMethodError, sizeof(__pyx_k_HttpParserInvalidMethodError), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserInvalidStatusError, __pyx_k_HttpParserInvalidStatusError, sizeof(__pyx_k_HttpParserInvalidStatusError), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserInvalidURLError, __pyx_k_HttpParserInvalidURLError, sizeof(__pyx_k_HttpParserInvalidURLError), 0, 0, 1, 1},
{&__pyx_n_s_HttpParserUpgrade, __pyx_k_HttpParserUpgrade, sizeof(__pyx_k_HttpParserUpgrade), 0, 0, 1, 1},
{&__pyx_n_s_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 0, 1, 1},
{&__pyx_n_u_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 1, 0, 1},
{&__pyx_n_s_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 0, 1, 1},
{&__pyx_n_u_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 1, 0, 1},
{&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1},
{&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},
{&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1},
{&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
{&__pyx_n_s_context, __pyx_k_context, sizeof(__pyx_k_context), 0, 0, 1, 1},
{&__pyx_n_s_errors, __pyx_k_errors, sizeof(__pyx_k_errors), 0, 0, 1, 1},
{&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1},
{&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1},
{&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},
{&__pyx_kp_u_invalid_headers_state, __pyx_k_invalid_headers_state, sizeof(__pyx_k_invalid_headers_state), 0, 1, 0, 0},
{&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
{&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
{&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0},
{&__pyx_n_u_on_body, __pyx_k_on_body, sizeof(__pyx_k_on_body), 0, 1, 0, 1},
{&__pyx_n_u_on_chunk_complete, __pyx_k_on_chunk_complete, sizeof(__pyx_k_on_chunk_complete), 0, 1, 0, 1},
{&__pyx_n_u_on_chunk_header, __pyx_k_on_chunk_header, sizeof(__pyx_k_on_chunk_header), 0, 1, 0, 1},
{&__pyx_n_u_on_header, __pyx_k_on_header, sizeof(__pyx_k_on_header), 0, 1, 0, 1},
{&__pyx_n_u_on_headers_complete, __pyx_k_on_headers_complete, sizeof(__pyx_k_on_headers_complete), 0, 1, 0, 1},
{&__pyx_n_u_on_message_begin, __pyx_k_on_message_begin, sizeof(__pyx_k_on_message_begin), 0, 1, 0, 1},
{&__pyx_n_u_on_message_complete, __pyx_k_on_message_complete, sizeof(__pyx_k_on_message_complete), 0, 1, 0, 1},
{&__pyx_n_u_on_status, __pyx_k_on_status, sizeof(__pyx_k_on_status), 0, 1, 0, 1},
{&__pyx_n_u_on_url, __pyx_k_on_url, sizeof(__pyx_k_on_url), 0, 1, 0, 1},
{&__pyx_n_s_protocol, __pyx_k_protocol, sizeof(__pyx_k_protocol), 0, 0, 1, 1},
{&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1},
{&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
{&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
{&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
{&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
{&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
{&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
{0, 0, 0, 0, 0, 0, 0}
};
static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
__pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 48, __pyx_L1_error)
__pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error)
__pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 247, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__2);
__Pyx_GIVEREF(__pyx_tuple__2);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__3);
__Pyx_GIVEREF(__pyx_tuple__3);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__4);
__Pyx_GIVEREF(__pyx_tuple__4);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__5);
__Pyx_GIVEREF(__pyx_tuple__5);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__6);
__Pyx_GIVEREF(__pyx_tuple__6);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__7);
__Pyx_GIVEREF(__pyx_tuple__7);
/* "httptools/parser/parser.pyx":22
*
*
* __all__ = ('HttpRequestParser', 'HttpResponseParser') # <<<<<<<<<<<<<<
*
*
*/
__pyx_tuple__8 = PyTuple_Pack(2, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__8);
__Pyx_GIVEREF(__pyx_tuple__8);
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
static int __Pyx_modinit_global_init_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
/*--- Global init code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_variable_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
/*--- Variable export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
/*--- Function export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
__pyx_vtabptr_9httptools_6parser_6parser_HttpParser = &__pyx_vtable_9httptools_6parser_6parser_HttpParser;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *, llhttp_type_t))__pyx_f_9httptools_6parser_6parser_10HttpParser__init;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._maybe_call_on_header = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._on_header_field = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._on_header_value = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._on_headers_complete = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._on_chunk_header = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header;
__pyx_vtable_9httptools_6parser_6parser_HttpParser._on_chunk_complete = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete;
if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type_9httptools_6parser_6parser_HttpParser.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpParser.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type_9httptools_6parser_6parser_HttpParser.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error)
__pyx_ptype_9httptools_6parser_6parser_HttpParser = &__pyx_type_9httptools_6parser_6parser_HttpParser;
__pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser = &__pyx_vtable_9httptools_6parser_6parser_HttpRequestParser;
__pyx_vtable_9httptools_6parser_6parser_HttpRequestParser.__pyx_base = *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser;
__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_base = __pyx_ptype_9httptools_6parser_6parser_HttpParser;
if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error)
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *)&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error)
__pyx_ptype_9httptools_6parser_6parser_HttpRequestParser = &__pyx_type_9httptools_6parser_6parser_HttpRequestParser;
__pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser = &__pyx_vtable_9httptools_6parser_6parser_HttpResponseParser;
__pyx_vtable_9httptools_6parser_6parser_HttpResponseParser.__pyx_base = *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser;
__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_base = __pyx_ptype_9httptools_6parser_6parser_HttpParser;
if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error)
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *)&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error)
__pyx_ptype_9httptools_6parser_6parser_HttpResponseParser = &__pyx_type_9httptools_6parser_6parser_HttpResponseParser;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_type_import_code(void) {
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
/*--- Type import code ---*/
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type",
#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000
sizeof(PyTypeObject),
#else
sizeof(PyHeapTypeObject),
#endif
__Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_variable_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
/*--- Variable import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
/*--- Function import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
#if PY_MAJOR_VERSION < 3
__Pyx_PyMODINIT_FUNC initparser(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC initparser(void)
#else
__Pyx_PyMODINIT_FUNC PyInit_parser(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC PyInit_parser(void)
#if CYTHON_PEP489_MULTI_PHASE_INIT
{
return PyModuleDef_Init(&__pyx_moduledef);
}
static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
#if PY_VERSION_HEX >= 0x030700A1
static PY_INT64_T main_interpreter_id = -1;
PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
if (main_interpreter_id == -1) {
main_interpreter_id = current_id;
return (unlikely(current_id == -1)) ? -1 : 0;
} else if (unlikely(main_interpreter_id != current_id))
#else
static PyInterpreterState *main_interpreter = NULL;
PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
if (!main_interpreter) {
main_interpreter = current_interpreter;
} else if (unlikely(main_interpreter != current_interpreter))
#endif
{
PyErr_SetString(
PyExc_ImportError,
"Interpreter change detected - this module can only be loaded into one interpreter per process.");
return -1;
}
return 0;
}
static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
PyObject *value = PyObject_GetAttrString(spec, from_name);
int result = 0;
if (likely(value)) {
if (allow_none || value != Py_None) {
result = PyDict_SetItemString(moddict, to_name, value);
}
Py_DECREF(value);
} else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
} else {
result = -1;
}
return result;
}
static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
PyObject *module = NULL, *moddict, *modname;
if (__Pyx_check_single_interpreter())
return NULL;
if (__pyx_m)
return __Pyx_NewRef(__pyx_m);
modname = PyObject_GetAttrString(spec, "name");
if (unlikely(!modname)) goto bad;
module = PyModule_NewObject(modname);
Py_DECREF(modname);
if (unlikely(!module)) goto bad;
moddict = PyModule_GetDict(module);
if (unlikely(!moddict)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
return module;
bad:
Py_XDECREF(module);
return NULL;
}
static CYTHON_SMALL_CODE int __pyx_pymod_exec_parser(PyObject *__pyx_pyinit_module)
#endif
#endif
{
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
if (__pyx_m == __pyx_pyinit_module) return 0;
PyErr_SetString(PyExc_RuntimeError, "Module 'parser' has already been imported. Re-initialisation is not supported.");
return -1;
}
#elif PY_MAJOR_VERSION >= 3
if (__pyx_m) return __Pyx_NewRef(__pyx_m);
#endif
#if CYTHON_REFNANNY
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
if (!__Pyx_RefNanny) {
PyErr_Clear();
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
if (!__Pyx_RefNanny)
Py_FatalError("failed to import 'refnanny' module");
}
#endif
__Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_parser(void)", 0);
if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#ifdef __Pxy_PyFrame_Initialize_Offsets
__Pxy_PyFrame_Initialize_Offsets();
#endif
__pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
#ifdef __Pyx_CyFunction_USED
if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_FusedFunction_USED
if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Coroutine_USED
if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Generator_USED
if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_AsyncGen_USED
if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_StopAsyncIteration_USED
if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
PyEval_InitThreads();
#endif
/*--- Module creation code ---*/
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_m = __pyx_pyinit_module;
Py_INCREF(__pyx_m);
#else
#if PY_MAJOR_VERSION < 3
__pyx_m = Py_InitModule4("parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
#else
__pyx_m = PyModule_Create(&__pyx_moduledef);
#endif
if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_d);
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_b);
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_cython_runtime);
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
/*--- Initialize various global constants etc. ---*/
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
if (__pyx_module_is_main_httptools__parser__parser) {
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
}
#if PY_MAJOR_VERSION >= 3
{
PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
if (!PyDict_GetItemString(modules, "httptools.parser.parser")) {
if (unlikely(PyDict_SetItemString(modules, "httptools.parser.parser", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
}
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
/*--- Execution code ---*/
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/* "httptools/parser/parser.pyx":11
*
*
* from .errors import (HttpParserError, # <<<<<<<<<<<<<<
* HttpParserCallbackError,
* HttpParserInvalidStatusError,
*/
__pyx_t_1 = PyList_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(__pyx_n_s_HttpParserError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserError);
PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpParserError);
__Pyx_INCREF(__pyx_n_s_HttpParserCallbackError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserCallbackError);
PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_HttpParserCallbackError);
__Pyx_INCREF(__pyx_n_s_HttpParserInvalidStatusError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidStatusError);
PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_HttpParserInvalidStatusError);
__Pyx_INCREF(__pyx_n_s_HttpParserInvalidMethodError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidMethodError);
PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_HttpParserInvalidMethodError);
__Pyx_INCREF(__pyx_n_s_HttpParserInvalidURLError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidURLError);
PyList_SET_ITEM(__pyx_t_1, 4, __pyx_n_s_HttpParserInvalidURLError);
__Pyx_INCREF(__pyx_n_s_HttpParserUpgrade);
__Pyx_GIVEREF(__pyx_n_s_HttpParserUpgrade);
PyList_SET_ITEM(__pyx_t_1, 5, __pyx_n_s_HttpParserUpgrade);
__pyx_t_2 = __Pyx_Import(__pyx_n_s_errors, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserError, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserCallbackError, __pyx_t_1) < 0) __PYX_ERR(0, 12, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidStatusError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidStatusError, __pyx_t_1) < 0) __PYX_ERR(0, 13, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidMethodError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidMethodError, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidURLError, __pyx_t_1) < 0) __PYX_ERR(0, 15, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserUpgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserUpgrade, __pyx_t_1) < 0) __PYX_ERR(0, 16, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "httptools/parser/parser.pyx":22
*
*
* __all__ = ('HttpRequestParser', 'HttpResponseParser') # <<<<<<<<<<<<<<
*
*
*/
if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__8) < 0) __PYX_ERR(0, 22, __pyx_L1_error)
/* "httptools/parser/parser.pyx":1
* #cython: language_level=3 # <<<<<<<<<<<<<<
*
* from __future__ import print_function
*/
__pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/*--- Wrapped vars code ---*/
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
if (__pyx_m) {
if (__pyx_d) {
__Pyx_AddTraceback("init httptools.parser.parser", __pyx_clineno, __pyx_lineno, __pyx_filename);
}
Py_CLEAR(__pyx_m);
} else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ImportError, "init httptools.parser.parser");
}
__pyx_L0:;
__Pyx_RefNannyFinishContext();
#if CYTHON_PEP489_MULTI_PHASE_INIT
return (__pyx_m != NULL) ? 0 : -1;
#elif PY_MAJOR_VERSION >= 3
return __pyx_m;
#else
return;
#endif
}
/* --- Runtime support code --- */
/* Refnanny */
#if CYTHON_REFNANNY
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
PyObject *m = NULL, *p = NULL;
void *r = NULL;
m = PyImport_ImportModule(modname);
if (!m) goto end;
p = PyObject_GetAttrString(m, "RefNannyAPI");
if (!p) goto end;
r = PyLong_AsVoidPtr(p);
end:
Py_XDECREF(p);
Py_XDECREF(m);
return (__Pyx_RefNannyAPIStruct *)r;
}
#endif
/* PyObjectGetAttrStr */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro))
return tp->tp_getattro(obj, attr_name);
#if PY_MAJOR_VERSION < 3
if (likely(tp->tp_getattr))
return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
#endif
return PyObject_GetAttr(obj, attr_name);
}
#endif
/* GetBuiltinName */
static PyObject *__Pyx_GetBuiltinName(PyObject *name) {
PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);
if (unlikely(!result)) {
PyErr_Format(PyExc_NameError,
#if PY_MAJOR_VERSION >= 3
"name '%U' is not defined", name);
#else
"name '%.200s' is not defined", PyString_AS_STRING(name));
#endif
}
return result;
}
/* RaiseArgTupleInvalid */
static void __Pyx_RaiseArgtupleInvalid(
const char* func_name,
int exact,
Py_ssize_t num_min,
Py_ssize_t num_max,
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
more_or_less = "at least";
} else {
num_expected = num_max;
more_or_less = "at most";
}
if (exact) {
more_or_less = "exactly";
}
PyErr_Format(PyExc_TypeError,
"%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
/* KeywordStringCheck */
static int __Pyx_CheckKeywordStrings(
PyObject *kwdict,
const char* function_name,
int kw_allowed)
{
PyObject* key = 0;
Py_ssize_t pos = 0;
#if CYTHON_COMPILING_IN_PYPY
if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0))
goto invalid_keyword;
return 1;
#else
while (PyDict_Next(kwdict, &pos, &key, 0)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyString_Check(key)))
#endif
if (unlikely(!PyUnicode_Check(key)))
goto invalid_keyword_type;
}
if ((!kw_allowed) && unlikely(key))
goto invalid_keyword;
return 1;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
return 0;
#endif
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
return 0;
}
/* PyErrExceptionMatches */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
PyObject *exc_type = tstate->curexc_type;
if (exc_type == err) return 1;
if (unlikely(!exc_type)) return 0;
if (unlikely(PyTuple_Check(err)))
return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
}
#endif
/* PyErrFetchRestore */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
tmp_type = tstate->curexc_type;
tmp_value = tstate->curexc_value;
tmp_tb = tstate->curexc_traceback;
tstate->curexc_type = type;
tstate->curexc_value = value;
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
*type = tstate->curexc_type;
*value = tstate->curexc_value;
*tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
}
#endif
/* GetAttr */
static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
#if CYTHON_USE_TYPE_SLOTS
#if PY_MAJOR_VERSION >= 3
if (likely(PyUnicode_Check(n)))
#else
if (likely(PyString_Check(n)))
#endif
return __Pyx_PyObject_GetAttrStr(o, n);
#endif
return PyObject_GetAttr(o, n);
}
/* GetAttr3 */
static PyObject *__Pyx_GetAttr3Default(PyObject *d) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
return NULL;
__Pyx_PyErr_Clear();
Py_INCREF(d);
return d;
}
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
PyObject *r = __Pyx_GetAttr(o, n);
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
}
/* PyFunctionFastCall */
#if CYTHON_FAST_PYCALL
static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,
PyObject *globals) {
PyFrameObject *f;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject **fastlocals;
Py_ssize_t i;
PyObject *result;
assert(globals != NULL);
/* XXX Perhaps we should create a specialized
PyFrame_New() that doesn't take locals, but does
take builtins without sanity checking them.
*/
assert(tstate != NULL);
f = PyFrame_New(tstate, co, globals, NULL);
if (f == NULL) {
return NULL;
}
fastlocals = __Pyx_PyFrame_GetLocalsplus(f);
for (i = 0; i < na; i++) {
Py_INCREF(*args);
fastlocals[i] = *args++;
}
result = PyEval_EvalFrameEx(f,0);
++tstate->recursion_depth;
Py_DECREF(f);
--tstate->recursion_depth;
return result;
}
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
PyObject *globals = PyFunction_GET_GLOBALS(func);
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
PyObject *closure;
#if PY_MAJOR_VERSION >= 3
PyObject *kwdefs;
#endif
PyObject *kwtuple, **k;
PyObject **d;
Py_ssize_t nd;
Py_ssize_t nk;
PyObject *result;
assert(kwargs == NULL || PyDict_Check(kwargs));
nk = kwargs ? PyDict_Size(kwargs) : 0;
if (Py_EnterRecursiveCall((char*)" while calling a Python object")) {
return NULL;
}
if (
#if PY_MAJOR_VERSION >= 3
co->co_kwonlyargcount == 0 &&
#endif
likely(kwargs == NULL || nk == 0) &&
co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
if (argdefs == NULL && co->co_argcount == nargs) {
result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals);
goto done;
}
else if (nargs == 0 && argdefs != NULL
&& co->co_argcount == Py_SIZE(argdefs)) {
/* function called with no arguments, but all parameters have
a default value: use default values as arguments .*/
args = &PyTuple_GET_ITEM(argdefs, 0);
result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals);
goto done;
}
}
if (kwargs != NULL) {
Py_ssize_t pos, i;
kwtuple = PyTuple_New(2 * nk);
if (kwtuple == NULL) {
result = NULL;
goto done;
}
k = &PyTuple_GET_ITEM(kwtuple, 0);
pos = i = 0;
while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) {
Py_INCREF(k[i]);
Py_INCREF(k[i+1]);
i += 2;
}
nk = i / 2;
}
else {
kwtuple = NULL;
k = NULL;
}
closure = PyFunction_GET_CLOSURE(func);
#if PY_MAJOR_VERSION >= 3
kwdefs = PyFunction_GET_KW_DEFAULTS(func);
#endif
if (argdefs != NULL) {
d = &PyTuple_GET_ITEM(argdefs, 0);
nd = Py_SIZE(argdefs);
}
else {
d = NULL;
nd = 0;
}
#if PY_MAJOR_VERSION >= 3
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, kwdefs, closure);
#else
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, closure);
#endif
Py_XDECREF(kwtuple);
done:
Py_LeaveRecursiveCall();
return result;
}
#endif
#endif
/* PyCFunctionFastCall */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) {
PyCFunctionObject *func = (PyCFunctionObject*)func_obj;
PyCFunction meth = PyCFunction_GET_FUNCTION(func);
PyObject *self = PyCFunction_GET_SELF(func);
int flags = PyCFunction_GET_FLAGS(func);
assert(PyCFunction_Check(func));
assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));
assert(nargs >= 0);
assert(nargs == 0 || args != NULL);
/* _PyCFunction_FastCallDict() must not be called with an exception set,
because it may clear it (directly or indirectly) and so the
caller loses its exception */
assert(!PyErr_Occurred());
if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);
} else {
return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);
}
}
#endif
/* PyObjectCall */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = Py_TYPE(func)->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = (*call)(func, arg, kw);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyObjectCallMethO */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) {
PyObject *self, *result;
PyCFunction cfunc;
cfunc = PyCFunction_GET_FUNCTION(func);
self = PyCFunction_GET_SELF(func);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = cfunc(self, arg);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyObjectCallNoArg */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) {
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(func)) {
return __Pyx_PyFunction_FastCall(func, NULL, 0);
}
#endif
#ifdef __Pyx_CyFunction_USED
if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func)))
#else
if (likely(PyCFunction_Check(func)))
#endif
{
if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) {
return __Pyx_PyObject_CallMethO(func, NULL);
}
}
return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL);
}
#endif
/* PyObjectCallOneArg */
#if CYTHON_COMPILING_IN_CPYTHON
static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_New(1);
if (unlikely(!args)) return NULL;
Py_INCREF(arg);
PyTuple_SET_ITEM(args, 0, arg);
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(func)) {
return __Pyx_PyFunction_FastCall(func, &arg, 1);
}
#endif
if (likely(PyCFunction_Check(func))) {
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
return __Pyx_PyObject_CallMethO(func, arg);
#if CYTHON_FAST_PYCCALL
} else if (__Pyx_PyFastCFunction_Check(func)) {
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
#endif
}
}
return __Pyx__PyObject_CallOneArg(func, arg);
}
#else
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_Pack(1, arg);
if (unlikely(!args)) return NULL;
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
#endif
/* PyDictVersioning */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
PyObject **dictptr = NULL;
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
if (offset) {
#if CYTHON_COMPILING_IN_CPYTHON
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
#else
dictptr = _PyObject_GetDictPtr(obj);
#endif
}
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
}
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
return 0;
return obj_dict_version == __Pyx_get_object_dict_version(obj);
}
#endif
/* GetModuleGlobalName */
#if CYTHON_USE_DICT_VERSIONS
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
#else
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
#endif
{
PyObject *result;
#if !CYTHON_AVOID_BORROWED_REFS
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
} else if (unlikely(PyErr_Occurred())) {
return NULL;
}
#else
result = PyDict_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
#endif
#else
result = PyObject_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
PyErr_Clear();
#endif
return __Pyx_GetBuiltinName(name);
}
/* PyObjectCall2Args */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) {
PyObject *args, *result = NULL;
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyFunction_FastCall(function, args, 2);
}
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyCFunction_FastCall(function, args, 2);
}
#endif
args = PyTuple_New(2);
if (unlikely(!args)) goto done;
Py_INCREF(arg1);
PyTuple_SET_ITEM(args, 0, arg1);
Py_INCREF(arg2);
PyTuple_SET_ITEM(args, 1, arg2);
Py_INCREF(function);
result = __Pyx_PyObject_Call(function, args, NULL);
Py_DECREF(args);
Py_DECREF(function);
done:
return result;
}
/* RaiseException */
#if PY_MAJOR_VERSION < 3
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,
CYTHON_UNUSED PyObject *cause) {
__Pyx_PyThreadState_declare
Py_XINCREF(type);
if (!value || value == Py_None)
value = NULL;
else
Py_INCREF(value);
if (!tb || tb == Py_None)
tb = NULL;
else {
Py_INCREF(tb);
if (!PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto raise_error;
}
}
if (PyType_Check(type)) {
#if CYTHON_COMPILING_IN_PYPY
if (!value) {
Py_INCREF(Py_None);
value = Py_None;
}
#endif
PyErr_NormalizeException(&type, &value, &tb);
} else {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto raise_error;
}
value = type;
type = (PyObject*) Py_TYPE(type);
Py_INCREF(type);
if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto raise_error;
}
}
__Pyx_PyThreadState_assign
__Pyx_ErrRestore(type, value, tb);
return;
raise_error:
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(tb);
return;
}
#else
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
PyObject* owned_instance = NULL;
if (tb == Py_None) {
tb = 0;
} else if (tb && !PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto bad;
}
if (value == Py_None)
value = 0;
if (PyExceptionInstance_Check(type)) {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto bad;
}
value = type;
type = (PyObject*) Py_TYPE(value);
} else if (PyExceptionClass_Check(type)) {
PyObject *instance_class = NULL;
if (value && PyExceptionInstance_Check(value)) {
instance_class = (PyObject*) Py_TYPE(value);
if (instance_class != type) {
int is_subclass = PyObject_IsSubclass(instance_class, type);
if (!is_subclass) {
instance_class = NULL;
} else if (unlikely(is_subclass == -1)) {
goto bad;
} else {
type = instance_class;
}
}
}
if (!instance_class) {
PyObject *args;
if (!value)
args = PyTuple_New(0);
else if (PyTuple_Check(value)) {
Py_INCREF(value);
args = value;
} else
args = PyTuple_Pack(1, value);
if (!args)
goto bad;
owned_instance = PyObject_Call(type, args, NULL);
Py_DECREF(args);
if (!owned_instance)
goto bad;
value = owned_instance;
if (!PyExceptionInstance_Check(value)) {
PyErr_Format(PyExc_TypeError,
"calling %R should have returned an instance of "
"BaseException, not %R",
type, Py_TYPE(value));
goto bad;
}
}
} else {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto bad;
}
if (cause) {
PyObject *fixed_cause;
if (cause == Py_None) {
fixed_cause = NULL;
} else if (PyExceptionClass_Check(cause)) {
fixed_cause = PyObject_CallObject(cause, NULL);
if (fixed_cause == NULL)
goto bad;
} else if (PyExceptionInstance_Check(cause)) {
fixed_cause = cause;
Py_INCREF(fixed_cause);
} else {
PyErr_SetString(PyExc_TypeError,
"exception causes must derive from "
"BaseException");
goto bad;
}
PyException_SetCause(value, fixed_cause);
}
PyErr_SetObject(type, value);
if (tb) {
#if CYTHON_COMPILING_IN_PYPY
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb);
Py_INCREF(tb);
PyErr_Restore(tmp_type, tmp_value, tb);
Py_XDECREF(tmp_tb);
#else
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) {
Py_INCREF(tb);
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_tb);
}
#endif
}
bad:
Py_XDECREF(owned_instance);
return;
}
#endif
/* GetException */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb)
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)
#endif
{
PyObject *local_type, *local_value, *local_tb;
#if CYTHON_FAST_THREAD_STATE
PyObject *tmp_type, *tmp_value, *tmp_tb;
local_type = tstate->curexc_type;
local_value = tstate->curexc_value;
local_tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
#else
PyErr_Fetch(&local_type, &local_value, &local_tb);
#endif
PyErr_NormalizeException(&local_type, &local_value, &local_tb);
#if CYTHON_FAST_THREAD_STATE
if (unlikely(tstate->curexc_type))
#else
if (unlikely(PyErr_Occurred()))
#endif
goto bad;
#if PY_MAJOR_VERSION >= 3
if (local_tb) {
if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
goto bad;
}
#endif
Py_XINCREF(local_tb);
Py_XINCREF(local_type);
Py_XINCREF(local_value);
*type = local_type;
*value = local_value;
*tb = local_tb;
#if CYTHON_FAST_THREAD_STATE
#if CYTHON_USE_EXC_INFO_STACK
{
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = local_type;
exc_info->exc_value = local_value;
exc_info->exc_traceback = local_tb;
}
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = local_type;
tstate->exc_value = local_value;
tstate->exc_traceback = local_tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
#else
PyErr_SetExcInfo(local_type, local_value, local_tb);
#endif
return 0;
bad:
*type = 0;
*value = 0;
*tb = 0;
Py_XDECREF(local_type);
Py_XDECREF(local_value);
Py_XDECREF(local_tb);
return -1;
}
/* SwapException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = *type;
exc_info->exc_value = *value;
exc_info->exc_traceback = *tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = *type;
tstate->exc_value = *value;
tstate->exc_traceback = *tb;
#endif
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb);
PyErr_SetExcInfo(*type, *value, *tb);
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#endif
/* GetTopmostException */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem *
__Pyx_PyErr_GetTopmostException(PyThreadState *tstate)
{
_PyErr_StackItem *exc_info = tstate->exc_info;
while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) &&
exc_info->previous_item != NULL)
{
exc_info = exc_info->previous_item;
}
return exc_info;
}
#endif
/* SaveResetException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate);
*type = exc_info->exc_type;
*value = exc_info->exc_value;
*tb = exc_info->exc_traceback;
#else
*type = tstate->exc_type;
*value = tstate->exc_value;
*tb = tstate->exc_traceback;
#endif
Py_XINCREF(*type);
Py_XINCREF(*value);
Py_XINCREF(*tb);
}
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = type;
exc_info->exc_value = value;
exc_info->exc_traceback = tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = type;
tstate->exc_value = value;
tstate->exc_traceback = tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
#endif
/* PyObjectSetAttrStr */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_setattro))
return tp->tp_setattro(obj, attr_name, value);
#if PY_MAJOR_VERSION < 3
if (likely(tp->tp_setattr))
return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value);
#endif
return PyObject_SetAttr(obj, attr_name, value);
}
#endif
/* RaiseDoubleKeywords */
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() got multiple values for keyword argument '%U'", func_name, kw_name);
#else
"%s() got multiple values for keyword argument '%s'", func_name,
PyString_AsString(kw_name));
#endif
}
/* ParseKeywords */
static int __Pyx_ParseOptionalKeywords(
PyObject *kwds,
PyObject **argnames[],
PyObject *kwds2,
PyObject *values[],
Py_ssize_t num_pos_args,
const char* function_name)
{
PyObject *key = 0, *value = 0;
Py_ssize_t pos = 0;
PyObject*** name;
PyObject*** first_kw_arg = argnames + num_pos_args;
while (PyDict_Next(kwds, &pos, &key, &value)) {
name = first_kw_arg;
while (*name && (**name != key)) name++;
if (*name) {
values[name-argnames] = value;
continue;
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
if ((**argname == key) || (
(CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
&& _PyString_Eq(**argname, key))) {
goto arg_passed_twice;
}
argname++;
}
}
} else
#endif
if (likely(PyUnicode_Check(key))) {
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) goto arg_passed_twice;
argname++;
}
}
} else
goto invalid_keyword_type;
if (kwds2) {
if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
} else {
goto invalid_keyword;
}
}
return 0;
arg_passed_twice:
__Pyx_RaiseDoubleKeywordsError(function_name, key);
goto bad;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
goto bad;
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
bad:
return -1;
}
/* decode_c_bytes */
static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes(
const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop,
const char* encoding, const char* errors,
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) {
if (unlikely((start < 0) | (stop < 0))) {
if (start < 0) {
start += length;
if (start < 0)
start = 0;
}
if (stop < 0)
stop += length;
}
if (stop > length)
stop = length;
if (unlikely(stop <= start))
return __Pyx_NewRef(__pyx_empty_unicode);
length = stop - start;
cstring += start;
if (decode_func) {
return decode_func(cstring, length, errors);
} else {
return PyUnicode_Decode(cstring, length, encoding, errors);
}
}
/* PyObject_GenericGetAttrNoDict */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
PyErr_Format(PyExc_AttributeError,
#if PY_MAJOR_VERSION >= 3
"'%.50s' object has no attribute '%U'",
tp->tp_name, attr_name);
#else
"'%.50s' object has no attribute '%.400s'",
tp->tp_name, PyString_AS_STRING(attr_name));
#endif
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
PyObject *descr;
PyTypeObject *tp = Py_TYPE(obj);
if (unlikely(!PyString_Check(attr_name))) {
return PyObject_GenericGetAttr(obj, attr_name);
}
assert(!tp->tp_dictoffset);
descr = _PyType_Lookup(tp, attr_name);
if (unlikely(!descr)) {
return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
}
Py_INCREF(descr);
#if PY_MAJOR_VERSION < 3
if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
#endif
{
descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
if (unlikely(f)) {
PyObject *res = f(descr, obj, (PyObject *)tp);
Py_DECREF(descr);
return res;
}
}
return descr;
}
#endif
/* PyObject_GenericGetAttr */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
return PyObject_GenericGetAttr(obj, attr_name);
}
return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
}
#endif
/* SetVTable */
static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
#if PY_VERSION_HEX >= 0x02070000
PyObject *ob = PyCapsule_New(vtable, 0, 0);
#else
PyObject *ob = PyCObject_FromVoidPtr(vtable, 0);
#endif
if (!ob)
goto bad;
if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0)
goto bad;
Py_DECREF(ob);
return 0;
bad:
Py_XDECREF(ob);
return -1;
}
/* PyObjectGetAttrStrNoError */
static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
__Pyx_PyErr_Clear();
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
PyObject *result;
#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) {
return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1);
}
#endif
result = __Pyx_PyObject_GetAttrStr(obj, attr_name);
if (unlikely(!result)) {
__Pyx_PyObject_GetAttrStr_ClearAttributeError();
}
return result;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
PyObject *name_attr;
name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name);
if (likely(name_attr)) {
ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
} else {
ret = -1;
}
if (unlikely(ret < 0)) {
PyErr_Clear();
ret = 0;
}
Py_XDECREF(name_attr);
return ret;
}
static int __Pyx_setup_reduce(PyObject* type_obj) {
int ret = 0;
PyObject *object_reduce = NULL;
PyObject *object_reduce_ex = NULL;
PyObject *reduce = NULL;
PyObject *reduce_ex = NULL;
PyObject *reduce_cython = NULL;
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython);
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (reduce == object_reduce || PyErr_Occurred()) {
goto __PYX_BAD;
}
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython);
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (!setstate || PyErr_Occurred()) {
goto __PYX_BAD;
}
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
#endif
Py_XDECREF(reduce);
Py_XDECREF(reduce_ex);
Py_XDECREF(reduce_cython);
Py_XDECREF(setstate);
Py_XDECREF(setstate_cython);
return ret;
}
/* TypeImport */
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,
size_t size, enum __Pyx_ImportType_CheckSize check_size)
{
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
#ifdef Py_LIMITED_API
PyObject *py_basicsize;
#endif
result = PyObject_GetAttrString(module, class_name);
if (!result)
goto bad;
if (!PyType_Check(result)) {
PyErr_Format(PyExc_TypeError,
"%.200s.%.200s is not a type object",
module_name, class_name);
goto bad;
}
#ifndef Py_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
if (!py_basicsize)
goto bad;
basicsize = PyLong_AsSsize_t(py_basicsize);
Py_DECREF(py_basicsize);
py_basicsize = 0;
if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
#endif
if ((size_t)basicsize < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {
PyOS_snprintf(warning, sizeof(warning),
"%s.%s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;
}
return (PyTypeObject *)result;
bad:
Py_XDECREF(result);
return NULL;
}
#endif
/* Import */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
PyObject *empty_list = 0;
PyObject *module = 0;
PyObject *global_dict = 0;
PyObject *empty_dict = 0;
PyObject *list;
#if PY_MAJOR_VERSION < 3
PyObject *py_import;
py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);
if (!py_import)
goto bad;
#endif
if (from_list)
list = from_list;
else {
empty_list = PyList_New(0);
if (!empty_list)
goto bad;
list = empty_list;
}
global_dict = PyModule_GetDict(__pyx_m);
if (!global_dict)
goto bad;
empty_dict = PyDict_New();
if (!empty_dict)
goto bad;
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
if (!PyErr_ExceptionMatches(PyExc_ImportError))
goto bad;
PyErr_Clear();
}
}
level = 0;
}
#endif
if (!module) {
#if PY_MAJOR_VERSION < 3
PyObject *py_level = PyInt_FromLong(level);
if (!py_level)
goto bad;
module = PyObject_CallFunctionObjArgs(py_import,
name, global_dict, empty_dict, list, py_level, (PyObject *)NULL);
Py_DECREF(py_level);
#else
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, level);
#endif
}
}
bad:
#if PY_MAJOR_VERSION < 3
Py_XDECREF(py_import);
#endif
Py_XDECREF(empty_list);
Py_XDECREF(empty_dict);
return module;
}
/* ImportFrom */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) {
PyObject* value = __Pyx_PyObject_GetAttrStr(module, name);
if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Format(PyExc_ImportError,
#if PY_MAJOR_VERSION < 3
"cannot import name %.230s", PyString_AS_STRING(name));
#else
"cannot import name %S", name);
#endif
}
return value;
}
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
PyObject **cython_runtime_dict;
#endif
if (unlikely(!__pyx_cython_runtime)) {
return c_line;
}
__Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
#if CYTHON_COMPILING_IN_CPYTHON
cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
if (likely(cython_runtime_dict)) {
__PYX_PY_DICT_LOOKUP_IF_MODIFIED(
use_cline, *cython_runtime_dict,
__Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
} else
#endif
{
PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
if (use_cline_obj) {
use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
Py_DECREF(use_cline_obj);
} else {
PyErr_Clear();
use_cline = NULL;
}
}
if (!use_cline) {
c_line = 0;
(void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
}
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
c_line = 0;
}
__Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
return c_line;
}
#endif
/* CodeObjectCache */
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
int start = 0, mid = 0, end = count - 1;
if (end >= 0 && code_line > entries[end].code_line) {
return count;
}
while (start < end) {
mid = start + (end - start) / 2;
if (code_line < entries[mid].code_line) {
end = mid;
} else if (code_line > entries[mid].code_line) {
start = mid + 1;
} else {
return mid;
}
}
if (code_line <= entries[mid].code_line) {
return mid;
} else {
return mid + 1;
}
}
static PyCodeObject *__pyx_find_code_object(int code_line) {
PyCodeObject* code_object;
int pos;
if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
return NULL;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
return NULL;
}
code_object = __pyx_code_cache.entries[pos].code_object;
Py_INCREF(code_object);
return code_object;
}
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
int pos, i;
__Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
if (unlikely(!code_line)) {
return;
}
if (unlikely(!entries)) {
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
if (likely(entries)) {
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = 64;
__pyx_code_cache.count = 1;
entries[0].code_line = code_line;
entries[0].code_object = code_object;
Py_INCREF(code_object);
}
return;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
PyCodeObject* tmp = entries[pos].code_object;
entries[pos].code_object = code_object;
Py_DECREF(tmp);
return;
}
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = new_max;
}
for (i=__pyx_code_cache.count; i>pos; i--) {
entries[i] = entries[i-1];
}
entries[pos].code_line = code_line;
entries[pos].code_object = code_object;
__pyx_code_cache.count++;
Py_INCREF(code_object);
}
/* AddTraceback */
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = NULL;
PyObject *py_funcname = NULL;
#if PY_MAJOR_VERSION < 3
PyObject *py_srcfile = NULL;
py_srcfile = PyString_FromString(filename);
if (!py_srcfile) goto bad;
#endif
if (c_line) {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
if (!py_funcname) goto bad;
#else
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
if (!py_funcname) goto bad;
funcname = PyUnicode_AsUTF8(py_funcname);
if (!funcname) goto bad;
#endif
}
else {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
if (!py_funcname) goto bad;
#endif
}
#if PY_MAJOR_VERSION < 3
py_code = __Pyx_PyCode_New(
0,
0,
0,
0,
0,
__pyx_empty_bytes, /*PyObject *code,*/
__pyx_empty_tuple, /*PyObject *consts,*/
__pyx_empty_tuple, /*PyObject *names,*/
__pyx_empty_tuple, /*PyObject *varnames,*/
__pyx_empty_tuple, /*PyObject *freevars,*/
__pyx_empty_tuple, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
py_line,
__pyx_empty_bytes /*PyObject *lnotab*/
);
Py_DECREF(py_srcfile);
#else
py_code = PyCode_NewEmpty(filename, funcname, py_line);
#endif
Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline
return py_code;
bad:
Py_XDECREF(py_funcname);
#if PY_MAJOR_VERSION < 3
Py_XDECREF(py_srcfile);
#endif
return NULL;
}
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
if (c_line) {
c_line = __Pyx_CLineForTraceback(tstate, c_line);
}
py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
if (!py_code) {
py_code = __Pyx_CreateCodeObjectForTraceback(
funcname, c_line, py_line, filename);
if (!py_code) goto bad;
__pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
}
py_frame = PyFrame_New(
tstate, /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
__pyx_d, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
__Pyx_PyFrame_SetLineNumber(py_frame, py_line);
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint8_t(uint8_t value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const uint8_t neg_one = (uint8_t) -1, const_zero = (uint8_t) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(uint8_t) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(uint8_t) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint8_t) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(uint8_t) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint8_t) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(uint8_t),
little, !is_unsigned);
}
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const int neg_one = (int) -1, const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(int) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(int) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(int) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(int),
little, !is_unsigned);
}
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_ptrdiff_t(ptrdiff_t value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const ptrdiff_t neg_one = (ptrdiff_t) -1, const_zero = (ptrdiff_t) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(ptrdiff_t) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(ptrdiff_t) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(ptrdiff_t) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(ptrdiff_t) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(ptrdiff_t) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(ptrdiff_t),
little, !is_unsigned);
}
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const uint16_t neg_one = (uint16_t) -1, const_zero = (uint16_t) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(uint16_t) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(uint16_t) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(uint16_t) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(uint16_t),
little, !is_unsigned);
}
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* CIntFromPyVerify */
#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
{\
func_type value = func_value;\
if (sizeof(target_type) < sizeof(func_type)) {\
if (unlikely(value != (func_type) (target_type) value)) {\
func_type zero = 0;\
if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
return (target_type) -1;\
if (is_unsigned && unlikely(value < zero))\
goto raise_neg_overflow;\
else\
goto raise_overflow;\
}\
}\
return (target_type) value;\
}
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(long) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (long) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (long) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(long) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
case -2:
if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
}
#endif
if (sizeof(long) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
long val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (long) -1;
}
} else {
long val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (long) -1;
val = __Pyx_PyInt_As_long(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to long");
return (long) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to long");
return (long) -1;
}
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const int neg_one = (int) -1, const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(int) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (int) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (int) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(int) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
case -2:
if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
}
#endif
if (sizeof(int) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
int val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (int) -1;
}
} else {
int val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (int) -1;
val = __Pyx_PyInt_As_int(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to int");
return (int) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to int");
return (int) -1;
}
/* FastTypeChecks */
#if CYTHON_COMPILING_IN_CPYTHON
static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
while (a) {
a = a->tp_base;
if (a == b)
return 1;
}
return b == &PyBaseObject_Type;
}
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
PyObject *mro;
if (a == b) return 1;
mro = a->tp_mro;
if (likely(mro)) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(mro);
for (i = 0; i < n; i++) {
if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
return 1;
}
return 0;
}
return __Pyx_InBases(a, b);
}
#if PY_MAJOR_VERSION == 2
static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
PyObject *exception, *value, *tb;
int res;
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ErrFetch(&exception, &value, &tb);
res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
if (!res) {
res = PyObject_IsSubclass(err, exc_type2);
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
}
__Pyx_ErrRestore(exception, value, tb);
return res;
}
#else
static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
if (!res) {
res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
}
return res;
}
#endif
static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
assert(PyExceptionClass_Check(exc_type));
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
PyObject *t = PyTuple_GET_ITEM(tuple, i);
#if PY_MAJOR_VERSION < 3
if (likely(exc_type == t)) return 1;
#endif
if (likely(PyExceptionClass_Check(t))) {
if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;
} else {
}
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
if (likely(err == exc_type)) return 1;
if (likely(PyExceptionClass_Check(err))) {
if (likely(PyExceptionClass_Check(exc_type))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
} else if (likely(PyTuple_Check(exc_type))) {
return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);
} else {
}
}
return PyErr_GivenExceptionMatches(err, exc_type);
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
assert(PyExceptionClass_Check(exc_type1));
assert(PyExceptionClass_Check(exc_type2));
if (likely(err == exc_type1 || err == exc_type2)) return 1;
if (likely(PyExceptionClass_Check(err))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
}
return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
}
#endif
/* CheckBinaryVersion */
static int __Pyx_check_binary_version(void) {
char ctversion[4], rtversion[4];
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
char message[200];
PyOS_snprintf(message, sizeof(message),
"compiletime version %s of module '%.100s' "
"does not match runtime version %s",
ctversion, __Pyx_MODULE_NAME, rtversion);
return PyErr_WarnEx(NULL, message, 1);
}
return 0;
}
/* InitStrings */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
while (t->p) {
#if PY_MAJOR_VERSION < 3
if (t->is_unicode) {
*t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
} else if (t->intern) {
*t->p = PyString_InternFromString(t->s);
} else {
*t->p = PyString_FromStringAndSize(t->s, t->n - 1);
}
#else
if (t->is_unicode | t->is_str) {
if (t->intern) {
*t->p = PyUnicode_InternFromString(t->s);
} else if (t->encoding) {
*t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
} else {
*t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
}
} else {
*t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
}
#endif
if (!*t->p)
return -1;
if (PyObject_Hash(*t->p) == -1)
return -1;
++t;
}
return 0;
}
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
}
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
Py_ssize_t ignore;
return __Pyx_PyObject_AsStringAndSize(o, &ignore);
}
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
#if !CYTHON_PEP393_ENABLED
static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
char* defenc_c;
PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
if (!defenc) return NULL;
defenc_c = PyBytes_AS_STRING(defenc);
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
{
char* end = defenc_c + PyBytes_GET_SIZE(defenc);
char* c;
for (c = defenc_c; c < end; c++) {
if ((unsigned char) (*c) >= 128) {
PyUnicode_AsASCIIString(o);
return NULL;
}
}
}
#endif
*length = PyBytes_GET_SIZE(defenc);
return defenc_c;
}
#else
static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
if (likely(PyUnicode_IS_ASCII(o))) {
*length = PyUnicode_GET_LENGTH(o);
return PyUnicode_AsUTF8(o);
} else {
PyUnicode_AsASCIIString(o);
return NULL;
}
#else
return PyUnicode_AsUTF8AndSize(o, length);
#endif
}
#endif
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
if (
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
__Pyx_sys_getdefaultencoding_not_ascii &&
#endif
PyUnicode_Check(o)) {
return __Pyx_PyUnicode_AsStringAndSize(o, length);
} else
#endif
#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
if (PyByteArray_Check(o)) {
*length = PyByteArray_GET_SIZE(o);
return PyByteArray_AS_STRING(o);
} else
#endif
{
char* result;
int r = PyBytes_AsStringAndSize(o, &result, length);
if (unlikely(r < 0)) {
return NULL;
} else {
return result;
}
}
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
int is_true = x == Py_True;
if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
else return PyObject_IsTrue(x);
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
int retval;
if (unlikely(!x)) return -1;
retval = __Pyx_PyObject_IsTrue(x);
Py_DECREF(x);
return retval;
}
static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
#if PY_MAJOR_VERSION >= 3
if (PyLong_Check(result)) {
if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
"__int__ returned non-int (type %.200s). "
"The ability to return an instance of a strict subclass of int "
"is deprecated, and may be removed in a future version of Python.",
Py_TYPE(result)->tp_name)) {
Py_DECREF(result);
return NULL;
}
return result;
}
#endif
PyErr_Format(PyExc_TypeError,
"__%.4s__ returned non-%.4s (type %.200s)",
type_name, type_name, Py_TYPE(result)->tp_name);
Py_DECREF(result);
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
#if CYTHON_USE_TYPE_SLOTS
PyNumberMethods *m;
#endif
const char *name = NULL;
PyObject *res = NULL;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x) || PyLong_Check(x)))
#else
if (likely(PyLong_Check(x)))
#endif
return __Pyx_NewRef(x);
#if CYTHON_USE_TYPE_SLOTS
m = Py_TYPE(x)->tp_as_number;
#if PY_MAJOR_VERSION < 3
if (m && m->nb_int) {
name = "int";
res = m->nb_int(x);
}
else if (m && m->nb_long) {
name = "long";
res = m->nb_long(x);
}
#else
if (likely(m && m->nb_int)) {
name = "int";
res = m->nb_int(x);
}
#endif
#else
if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
res = PyNumber_Int(x);
}
#endif
if (likely(res)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
#else
if (unlikely(!PyLong_CheckExact(res))) {
#endif
return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
}
}
else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_TypeError,
"an integer is required");
}
return res;
}
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
Py_ssize_t ival;
PyObject *x;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(b))) {
if (sizeof(Py_ssize_t) >= sizeof(long))
return PyInt_AS_LONG(b);
else
return PyInt_AsSsize_t(b);
}
#endif
if (likely(PyLong_CheckExact(b))) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)b)->ob_digit;
const Py_ssize_t size = Py_SIZE(b);
if (likely(__Pyx_sst_abs(size) <= 1)) {
ival = likely(size) ? digits[0] : 0;
if (size == -1) ival = -ival;
return ival;
} else {
switch (size) {
case 2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
}
}
#endif
return PyLong_AsSsize_t(b);
}
x = PyNumber_Index(b);
if (!x) return -1;
ival = PyInt_AsSsize_t(x);
Py_DECREF(x);
return ival;
}
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) {
if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) {
return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o);
#if PY_MAJOR_VERSION < 3
} else if (likely(PyInt_CheckExact(o))) {
return PyInt_AS_LONG(o);
#endif
} else {
Py_ssize_t ival;
PyObject *x;
x = PyNumber_Index(o);
if (!x) return -1;
ival = PyInt_AsLong(x);
Py_DECREF(x);
return ival;
}
}
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
}
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
return PyInt_FromSize_t(ival);
}
#endif /* Py_PYTHON_H */
| 400,467 | C | 37.425254 | 310 | 0.588341 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/_build_tables.py | #-----------------------------------------------------------------
# pycparser: _build_tables.py
#
# A dummy for generating the lexing/parsing tables and and
# compiling them into .pyc for faster execution in optimized mode.
# Also generates AST code from the configuration file.
# Should be called from the pycparser directory.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
# Insert '.' and '..' as first entries to the search path for modules.
# Restricted environments like embeddable python do not include the
# current working directory on startup.
import sys
sys.path[0:0] = ['.', '..']
# Generate c_ast.py
from _ast_gen import ASTCodeGenerator
ast_gen = ASTCodeGenerator('_c_ast.cfg')
ast_gen.generate(open('c_ast.py', 'w'))
from pycparser import c_parser
# Generates the tables
#
c_parser.CParser(
lex_optimize=True,
yacc_debug=False,
yacc_optimize=True)
# Load to compile into .pyc
#
import lextab
import yacctab
import c_ast
| 1,039 | Python | 26.36842 | 70 | 0.639076 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/_ast_gen.py | #-----------------------------------------------------------------
# _ast_gen.py
#
# Generates the AST Node classes from a specification given in
# a configuration file
#
# The design of this module was inspired by astgen.py from the
# Python 2.5 code-base.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
from string import Template
class ASTCodeGenerator(object):
def __init__(self, cfg_filename='_c_ast.cfg'):
""" Initialize the code generator from a configuration
file.
"""
self.cfg_filename = cfg_filename
self.node_cfg = [NodeCfg(name, contents)
for (name, contents) in self.parse_cfgfile(cfg_filename)]
def generate(self, file=None):
""" Generates the code into file, an open file buffer.
"""
src = Template(_PROLOGUE_COMMENT).substitute(
cfg_filename=self.cfg_filename)
src += _PROLOGUE_CODE
for node_cfg in self.node_cfg:
src += node_cfg.generate_source() + '\n\n'
file.write(src)
def parse_cfgfile(self, filename):
""" Parse the configuration file and yield pairs of
(name, contents) for each node.
"""
with open(filename, "r") as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
colon_i = line.find(':')
lbracket_i = line.find('[')
rbracket_i = line.find(']')
if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
name = line[:colon_i]
val = line[lbracket_i + 1:rbracket_i]
vallist = [v.strip() for v in val.split(',')] if val else []
yield name, vallist
class NodeCfg(object):
""" Node configuration.
name: node name
contents: a list of contents - attributes and child nodes
See comment at the top of the configuration file for details.
"""
def __init__(self, name, contents):
self.name = name
self.all_entries = []
self.attr = []
self.child = []
self.seq_child = []
for entry in contents:
clean_entry = entry.rstrip('*')
self.all_entries.append(clean_entry)
if entry.endswith('**'):
self.seq_child.append(clean_entry)
elif entry.endswith('*'):
self.child.append(clean_entry)
else:
self.attr.append(entry)
def generate_source(self):
src = self._gen_init()
src += '\n' + self._gen_children()
src += '\n' + self._gen_iter()
src += '\n' + self._gen_attr_names()
return src
def _gen_init(self):
src = "class %s(Node):\n" % self.name
if self.all_entries:
args = ', '.join(self.all_entries)
slots = ', '.join("'{0}'".format(e) for e in self.all_entries)
slots += ", 'coord', '__weakref__'"
arglist = '(self, %s, coord=None)' % args
else:
slots = "'coord', '__weakref__'"
arglist = '(self, coord=None)'
src += " __slots__ = (%s)\n" % slots
src += " def __init__%s:\n" % arglist
for name in self.all_entries + ['coord']:
src += " self.%s = %s\n" % (name, name)
return src
def _gen_children(self):
src = ' def children(self):\n'
if self.all_entries:
src += ' nodelist = []\n'
for child in self.child:
src += (
' if self.%(child)s is not None:' +
' nodelist.append(("%(child)s", self.%(child)s))\n') % (
dict(child=child))
for seq_child in self.seq_child:
src += (
' for i, child in enumerate(self.%(child)s or []):\n'
' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
dict(child=seq_child))
src += ' return tuple(nodelist)\n'
else:
src += ' return ()\n'
return src
def _gen_iter(self):
src = ' def __iter__(self):\n'
if self.all_entries:
for child in self.child:
src += (
' if self.%(child)s is not None:\n' +
' yield self.%(child)s\n') % (dict(child=child))
for seq_child in self.seq_child:
src += (
' for child in (self.%(child)s or []):\n'
' yield child\n') % (dict(child=seq_child))
if not (self.child or self.seq_child):
# Empty generator
src += (
' return\n' +
' yield\n')
else:
# Empty generator
src += (
' return\n' +
' yield\n')
return src
def _gen_attr_names(self):
src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
return src
_PROLOGUE_COMMENT = \
r'''#-----------------------------------------------------------------
# ** ATTENTION **
# This code was automatically generated from the file:
# $cfg_filename
#
# Do not modify it directly. Modify the configuration file and
# run the generator again.
# ** ** *** ** **
#
# pycparser: c_ast.py
#
# AST Node classes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
'''
_PROLOGUE_CODE = r'''
import sys
def _repr(obj):
"""
Get the representation of an object, with dedicated pprint-like format for lists.
"""
if isinstance(obj, list):
return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
else:
return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
def __repr__(self):
""" Generates a python representation of the current node
"""
result = self.__class__.__name__ + '('
indent = ''
separator = ''
for name in self.__slots__[:-2]:
result += separator
result += indent
result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
separator = ','
indent = '\n ' + (' ' * len(self.__class__.__name__))
result += indent + ')'
return result
def children(self):
""" A sequence of all children that are Nodes
"""
pass
def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
""" Pretty print the Node and all its attributes and
children (recursively) to a buffer.
buf:
Open IO buffer into which the Node is printed.
offset:
Initial offset (amount of leading spaces)
attrnames:
True if you want to see the attribute names in
name=value pairs. False to only see the values.
nodenames:
True if you want to see the actual node names
within their parents.
showcoord:
Do you want the coordinates of each Node to be
displayed.
"""
lead = ' ' * offset
if nodenames and _my_node_name is not None:
buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
else:
buf.write(lead + self.__class__.__name__+ ': ')
if self.attr_names:
if attrnames:
nvlist = [(n, getattr(self,n)) for n in self.attr_names]
attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
else:
vlist = [getattr(self, n) for n in self.attr_names]
attrstr = ', '.join('%s' % v for v in vlist)
buf.write(attrstr)
if showcoord:
buf.write(' (at %s)' % self.coord)
buf.write('\n')
for (child_name, child) in self.children():
child.show(
buf,
offset=offset + 2,
attrnames=attrnames,
nodenames=nodenames,
showcoord=showcoord,
_my_node_name=child_name)
class NodeVisitor(object):
""" A base NodeVisitor class for visiting c_ast nodes.
Subclass it and define your own visit_XXX methods, where
XXX is the class name you want to visit with these
methods.
For example:
class ConstantVisitor(NodeVisitor):
def __init__(self):
self.values = []
def visit_Constant(self, node):
self.values.append(node.value)
Creates a list of values of all the constant nodes
encountered below the given node. To use it:
cv = ConstantVisitor()
cv.visit(node)
Notes:
* generic_visit() will be called for AST nodes for which
no visit_XXX method was defined.
* The children of nodes for which a visit_XXX was
defined will not be visited - if you need this, call
generic_visit() on the node.
You can use:
NodeVisitor.generic_visit(self, node)
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
_method_cache = None
def visit(self, node):
""" Visit a node.
"""
if self._method_cache is None:
self._method_cache = {}
visitor = self._method_cache.get(node.__class__.__name__, None)
if visitor is None:
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
self._method_cache[node.__class__.__name__] = visitor
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
for c in node:
self.visit(c)
'''
| 10,555 | Python | 30.323442 | 138 | 0.484889 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/c_ast.py | #-----------------------------------------------------------------
# ** ATTENTION **
# This code was automatically generated from the file:
# _c_ast.cfg
#
# Do not modify it directly. Modify the configuration file and
# run the generator again.
# ** ** *** ** **
#
# pycparser: c_ast.py
#
# AST Node classes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
import sys
def _repr(obj):
"""
Get the representation of an object, with dedicated pprint-like format for lists.
"""
if isinstance(obj, list):
return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
else:
return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
def __repr__(self):
""" Generates a python representation of the current node
"""
result = self.__class__.__name__ + '('
indent = ''
separator = ''
for name in self.__slots__[:-2]:
result += separator
result += indent
result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
separator = ','
indent = '\n ' + (' ' * len(self.__class__.__name__))
result += indent + ')'
return result
def children(self):
""" A sequence of all children that are Nodes
"""
pass
def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
""" Pretty print the Node and all its attributes and
children (recursively) to a buffer.
buf:
Open IO buffer into which the Node is printed.
offset:
Initial offset (amount of leading spaces)
attrnames:
True if you want to see the attribute names in
name=value pairs. False to only see the values.
nodenames:
True if you want to see the actual node names
within their parents.
showcoord:
Do you want the coordinates of each Node to be
displayed.
"""
lead = ' ' * offset
if nodenames and _my_node_name is not None:
buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
else:
buf.write(lead + self.__class__.__name__+ ': ')
if self.attr_names:
if attrnames:
nvlist = [(n, getattr(self,n)) for n in self.attr_names]
attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
else:
vlist = [getattr(self, n) for n in self.attr_names]
attrstr = ', '.join('%s' % v for v in vlist)
buf.write(attrstr)
if showcoord:
buf.write(' (at %s)' % self.coord)
buf.write('\n')
for (child_name, child) in self.children():
child.show(
buf,
offset=offset + 2,
attrnames=attrnames,
nodenames=nodenames,
showcoord=showcoord,
_my_node_name=child_name)
class NodeVisitor(object):
""" A base NodeVisitor class for visiting c_ast nodes.
Subclass it and define your own visit_XXX methods, where
XXX is the class name you want to visit with these
methods.
For example:
class ConstantVisitor(NodeVisitor):
def __init__(self):
self.values = []
def visit_Constant(self, node):
self.values.append(node.value)
Creates a list of values of all the constant nodes
encountered below the given node. To use it:
cv = ConstantVisitor()
cv.visit(node)
Notes:
* generic_visit() will be called for AST nodes for which
no visit_XXX method was defined.
* The children of nodes for which a visit_XXX was
defined will not be visited - if you need this, call
generic_visit() on the node.
You can use:
NodeVisitor.generic_visit(self, node)
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
_method_cache = None
def visit(self, node):
""" Visit a node.
"""
if self._method_cache is None:
self._method_cache = {}
visitor = self._method_cache.get(node.__class__.__name__, None)
if visitor is None:
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
self._method_cache[node.__class__.__name__] = visitor
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
for c in node:
self.visit(c)
class ArrayDecl(Node):
__slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
def __init__(self, type, dim, dim_quals, coord=None):
self.type = type
self.dim = dim
self.dim_quals = dim_quals
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
if self.dim is not None: nodelist.append(("dim", self.dim))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
if self.dim is not None:
yield self.dim
attr_names = ('dim_quals', )
class ArrayRef(Node):
__slots__ = ('name', 'subscript', 'coord', '__weakref__')
def __init__(self, name, subscript, coord=None):
self.name = name
self.subscript = subscript
self.coord = coord
def children(self):
nodelist = []
if self.name is not None: nodelist.append(("name", self.name))
if self.subscript is not None: nodelist.append(("subscript", self.subscript))
return tuple(nodelist)
def __iter__(self):
if self.name is not None:
yield self.name
if self.subscript is not None:
yield self.subscript
attr_names = ()
class Assignment(Node):
__slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__')
def __init__(self, op, lvalue, rvalue, coord=None):
self.op = op
self.lvalue = lvalue
self.rvalue = rvalue
self.coord = coord
def children(self):
nodelist = []
if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue))
if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
return tuple(nodelist)
def __iter__(self):
if self.lvalue is not None:
yield self.lvalue
if self.rvalue is not None:
yield self.rvalue
attr_names = ('op', )
class Alignas(Node):
__slots__ = ('alignment', 'coord', '__weakref__')
def __init__(self, alignment, coord=None):
self.alignment = alignment
self.coord = coord
def children(self):
nodelist = []
if self.alignment is not None: nodelist.append(("alignment", self.alignment))
return tuple(nodelist)
def __iter__(self):
if self.alignment is not None:
yield self.alignment
attr_names = ()
class BinaryOp(Node):
__slots__ = ('op', 'left', 'right', 'coord', '__weakref__')
def __init__(self, op, left, right, coord=None):
self.op = op
self.left = left
self.right = right
self.coord = coord
def children(self):
nodelist = []
if self.left is not None: nodelist.append(("left", self.left))
if self.right is not None: nodelist.append(("right", self.right))
return tuple(nodelist)
def __iter__(self):
if self.left is not None:
yield self.left
if self.right is not None:
yield self.right
attr_names = ('op', )
class Break(Node):
__slots__ = ('coord', '__weakref__')
def __init__(self, coord=None):
self.coord = coord
def children(self):
return ()
def __iter__(self):
return
yield
attr_names = ()
class Case(Node):
__slots__ = ('expr', 'stmts', 'coord', '__weakref__')
def __init__(self, expr, stmts, coord=None):
self.expr = expr
self.stmts = stmts
self.coord = coord
def children(self):
nodelist = []
if self.expr is not None: nodelist.append(("expr", self.expr))
for i, child in enumerate(self.stmts or []):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
if self.expr is not None:
yield self.expr
for child in (self.stmts or []):
yield child
attr_names = ()
class Cast(Node):
__slots__ = ('to_type', 'expr', 'coord', '__weakref__')
def __init__(self, to_type, expr, coord=None):
self.to_type = to_type
self.expr = expr
self.coord = coord
def children(self):
nodelist = []
if self.to_type is not None: nodelist.append(("to_type", self.to_type))
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
def __iter__(self):
if self.to_type is not None:
yield self.to_type
if self.expr is not None:
yield self.expr
attr_names = ()
class Compound(Node):
__slots__ = ('block_items', 'coord', '__weakref__')
def __init__(self, block_items, coord=None):
self.block_items = block_items
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.block_items or []):
nodelist.append(("block_items[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.block_items or []):
yield child
attr_names = ()
class CompoundLiteral(Node):
__slots__ = ('type', 'init', 'coord', '__weakref__')
def __init__(self, type, init, coord=None):
self.type = type
self.init = init
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
if self.init is not None: nodelist.append(("init", self.init))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
if self.init is not None:
yield self.init
attr_names = ()
class Constant(Node):
__slots__ = ('type', 'value', 'coord', '__weakref__')
def __init__(self, type, value, coord=None):
self.type = type
self.value = value
self.coord = coord
def children(self):
nodelist = []
return tuple(nodelist)
def __iter__(self):
return
yield
attr_names = ('type', 'value', )
class Continue(Node):
__slots__ = ('coord', '__weakref__')
def __init__(self, coord=None):
self.coord = coord
def children(self):
return ()
def __iter__(self):
return
yield
attr_names = ()
class Decl(Node):
__slots__ = ('name', 'quals', 'align', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__')
def __init__(self, name, quals, align, storage, funcspec, type, init, bitsize, coord=None):
self.name = name
self.quals = quals
self.align = align
self.storage = storage
self.funcspec = funcspec
self.type = type
self.init = init
self.bitsize = bitsize
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
if self.init is not None: nodelist.append(("init", self.init))
if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
if self.init is not None:
yield self.init
if self.bitsize is not None:
yield self.bitsize
attr_names = ('name', 'quals', 'align', 'storage', 'funcspec', )
class DeclList(Node):
__slots__ = ('decls', 'coord', '__weakref__')
def __init__(self, decls, coord=None):
self.decls = decls
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.decls or []):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.decls or []):
yield child
attr_names = ()
class Default(Node):
__slots__ = ('stmts', 'coord', '__weakref__')
def __init__(self, stmts, coord=None):
self.stmts = stmts
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.stmts or []):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.stmts or []):
yield child
attr_names = ()
class DoWhile(Node):
__slots__ = ('cond', 'stmt', 'coord', '__weakref__')
def __init__(self, cond, stmt, coord=None):
self.cond = cond
self.stmt = stmt
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.stmt is not None:
yield self.stmt
attr_names = ()
class EllipsisParam(Node):
__slots__ = ('coord', '__weakref__')
def __init__(self, coord=None):
self.coord = coord
def children(self):
return ()
def __iter__(self):
return
yield
attr_names = ()
class EmptyStatement(Node):
__slots__ = ('coord', '__weakref__')
def __init__(self, coord=None):
self.coord = coord
def children(self):
return ()
def __iter__(self):
return
yield
attr_names = ()
class Enum(Node):
__slots__ = ('name', 'values', 'coord', '__weakref__')
def __init__(self, name, values, coord=None):
self.name = name
self.values = values
self.coord = coord
def children(self):
nodelist = []
if self.values is not None: nodelist.append(("values", self.values))
return tuple(nodelist)
def __iter__(self):
if self.values is not None:
yield self.values
attr_names = ('name', )
class Enumerator(Node):
__slots__ = ('name', 'value', 'coord', '__weakref__')
def __init__(self, name, value, coord=None):
self.name = name
self.value = value
self.coord = coord
def children(self):
nodelist = []
if self.value is not None: nodelist.append(("value", self.value))
return tuple(nodelist)
def __iter__(self):
if self.value is not None:
yield self.value
attr_names = ('name', )
class EnumeratorList(Node):
__slots__ = ('enumerators', 'coord', '__weakref__')
def __init__(self, enumerators, coord=None):
self.enumerators = enumerators
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.enumerators or []):
nodelist.append(("enumerators[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.enumerators or []):
yield child
attr_names = ()
class ExprList(Node):
__slots__ = ('exprs', 'coord', '__weakref__')
def __init__(self, exprs, coord=None):
self.exprs = exprs
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.exprs or []):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.exprs or []):
yield child
attr_names = ()
class FileAST(Node):
__slots__ = ('ext', 'coord', '__weakref__')
def __init__(self, ext, coord=None):
self.ext = ext
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.ext or []):
nodelist.append(("ext[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.ext or []):
yield child
attr_names = ()
class For(Node):
__slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__')
def __init__(self, init, cond, next, stmt, coord=None):
self.init = init
self.cond = cond
self.next = next
self.stmt = stmt
self.coord = coord
def children(self):
nodelist = []
if self.init is not None: nodelist.append(("init", self.init))
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.next is not None: nodelist.append(("next", self.next))
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
def __iter__(self):
if self.init is not None:
yield self.init
if self.cond is not None:
yield self.cond
if self.next is not None:
yield self.next
if self.stmt is not None:
yield self.stmt
attr_names = ()
class FuncCall(Node):
__slots__ = ('name', 'args', 'coord', '__weakref__')
def __init__(self, name, args, coord=None):
self.name = name
self.args = args
self.coord = coord
def children(self):
nodelist = []
if self.name is not None: nodelist.append(("name", self.name))
if self.args is not None: nodelist.append(("args", self.args))
return tuple(nodelist)
def __iter__(self):
if self.name is not None:
yield self.name
if self.args is not None:
yield self.args
attr_names = ()
class FuncDecl(Node):
__slots__ = ('args', 'type', 'coord', '__weakref__')
def __init__(self, args, type, coord=None):
self.args = args
self.type = type
self.coord = coord
def children(self):
nodelist = []
if self.args is not None: nodelist.append(("args", self.args))
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
def __iter__(self):
if self.args is not None:
yield self.args
if self.type is not None:
yield self.type
attr_names = ()
class FuncDef(Node):
__slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__')
def __init__(self, decl, param_decls, body, coord=None):
self.decl = decl
self.param_decls = param_decls
self.body = body
self.coord = coord
def children(self):
nodelist = []
if self.decl is not None: nodelist.append(("decl", self.decl))
if self.body is not None: nodelist.append(("body", self.body))
for i, child in enumerate(self.param_decls or []):
nodelist.append(("param_decls[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
if self.decl is not None:
yield self.decl
if self.body is not None:
yield self.body
for child in (self.param_decls or []):
yield child
attr_names = ()
class Goto(Node):
__slots__ = ('name', 'coord', '__weakref__')
def __init__(self, name, coord=None):
self.name = name
self.coord = coord
def children(self):
nodelist = []
return tuple(nodelist)
def __iter__(self):
return
yield
attr_names = ('name', )
class ID(Node):
__slots__ = ('name', 'coord', '__weakref__')
def __init__(self, name, coord=None):
self.name = name
self.coord = coord
def children(self):
nodelist = []
return tuple(nodelist)
def __iter__(self):
return
yield
attr_names = ('name', )
class IdentifierType(Node):
__slots__ = ('names', 'coord', '__weakref__')
def __init__(self, names, coord=None):
self.names = names
self.coord = coord
def children(self):
nodelist = []
return tuple(nodelist)
def __iter__(self):
return
yield
attr_names = ('names', )
class If(Node):
__slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
def __init__(self, cond, iftrue, iffalse, coord=None):
self.cond = cond
self.iftrue = iftrue
self.iffalse = iffalse
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.iftrue is not None:
yield self.iftrue
if self.iffalse is not None:
yield self.iffalse
attr_names = ()
class InitList(Node):
__slots__ = ('exprs', 'coord', '__weakref__')
def __init__(self, exprs, coord=None):
self.exprs = exprs
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.exprs or []):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.exprs or []):
yield child
attr_names = ()
class Label(Node):
__slots__ = ('name', 'stmt', 'coord', '__weakref__')
def __init__(self, name, stmt, coord=None):
self.name = name
self.stmt = stmt
self.coord = coord
def children(self):
nodelist = []
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
def __iter__(self):
if self.stmt is not None:
yield self.stmt
attr_names = ('name', )
class NamedInitializer(Node):
__slots__ = ('name', 'expr', 'coord', '__weakref__')
def __init__(self, name, expr, coord=None):
self.name = name
self.expr = expr
self.coord = coord
def children(self):
nodelist = []
if self.expr is not None: nodelist.append(("expr", self.expr))
for i, child in enumerate(self.name or []):
nodelist.append(("name[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
if self.expr is not None:
yield self.expr
for child in (self.name or []):
yield child
attr_names = ()
class ParamList(Node):
__slots__ = ('params', 'coord', '__weakref__')
def __init__(self, params, coord=None):
self.params = params
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.params or []):
nodelist.append(("params[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.params or []):
yield child
attr_names = ()
class PtrDecl(Node):
__slots__ = ('quals', 'type', 'coord', '__weakref__')
def __init__(self, quals, type, coord=None):
self.quals = quals
self.type = type
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
attr_names = ('quals', )
class Return(Node):
__slots__ = ('expr', 'coord', '__weakref__')
def __init__(self, expr, coord=None):
self.expr = expr
self.coord = coord
def children(self):
nodelist = []
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
def __iter__(self):
if self.expr is not None:
yield self.expr
attr_names = ()
class StaticAssert(Node):
__slots__ = ('cond', 'message', 'coord', '__weakref__')
def __init__(self, cond, message, coord=None):
self.cond = cond
self.message = message
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.message is not None: nodelist.append(("message", self.message))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.message is not None:
yield self.message
attr_names = ()
class Struct(Node):
__slots__ = ('name', 'decls', 'coord', '__weakref__')
def __init__(self, name, decls, coord=None):
self.name = name
self.decls = decls
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.decls or []):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.decls or []):
yield child
attr_names = ('name', )
class StructRef(Node):
__slots__ = ('name', 'type', 'field', 'coord', '__weakref__')
def __init__(self, name, type, field, coord=None):
self.name = name
self.type = type
self.field = field
self.coord = coord
def children(self):
nodelist = []
if self.name is not None: nodelist.append(("name", self.name))
if self.field is not None: nodelist.append(("field", self.field))
return tuple(nodelist)
def __iter__(self):
if self.name is not None:
yield self.name
if self.field is not None:
yield self.field
attr_names = ('type', )
class Switch(Node):
__slots__ = ('cond', 'stmt', 'coord', '__weakref__')
def __init__(self, cond, stmt, coord=None):
self.cond = cond
self.stmt = stmt
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.stmt is not None:
yield self.stmt
attr_names = ()
class TernaryOp(Node):
__slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
def __init__(self, cond, iftrue, iffalse, coord=None):
self.cond = cond
self.iftrue = iftrue
self.iffalse = iffalse
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue))
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.iftrue is not None:
yield self.iftrue
if self.iffalse is not None:
yield self.iffalse
attr_names = ()
class TypeDecl(Node):
__slots__ = ('declname', 'quals', 'align', 'type', 'coord', '__weakref__')
def __init__(self, declname, quals, align, type, coord=None):
self.declname = declname
self.quals = quals
self.align = align
self.type = type
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
attr_names = ('declname', 'quals', 'align', )
class Typedef(Node):
__slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__')
def __init__(self, name, quals, storage, type, coord=None):
self.name = name
self.quals = quals
self.storage = storage
self.type = type
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
attr_names = ('name', 'quals', 'storage', )
class Typename(Node):
__slots__ = ('name', 'quals', 'align', 'type', 'coord', '__weakref__')
def __init__(self, name, quals, align, type, coord=None):
self.name = name
self.quals = quals
self.align = align
self.type = type
self.coord = coord
def children(self):
nodelist = []
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
def __iter__(self):
if self.type is not None:
yield self.type
attr_names = ('name', 'quals', 'align', )
class UnaryOp(Node):
__slots__ = ('op', 'expr', 'coord', '__weakref__')
def __init__(self, op, expr, coord=None):
self.op = op
self.expr = expr
self.coord = coord
def children(self):
nodelist = []
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
def __iter__(self):
if self.expr is not None:
yield self.expr
attr_names = ('op', )
class Union(Node):
__slots__ = ('name', 'decls', 'coord', '__weakref__')
def __init__(self, name, decls, coord=None):
self.name = name
self.decls = decls
self.coord = coord
def children(self):
nodelist = []
for i, child in enumerate(self.decls or []):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
def __iter__(self):
for child in (self.decls or []):
yield child
attr_names = ('name', )
class While(Node):
__slots__ = ('cond', 'stmt', 'coord', '__weakref__')
def __init__(self, cond, stmt, coord=None):
self.cond = cond
self.stmt = stmt
self.coord = coord
def children(self):
nodelist = []
if self.cond is not None: nodelist.append(("cond", self.cond))
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
def __iter__(self):
if self.cond is not None:
yield self.cond
if self.stmt is not None:
yield self.stmt
attr_names = ()
class Pragma(Node):
__slots__ = ('string', 'coord', '__weakref__')
def __init__(self, string, coord=None):
self.string = string
self.coord = coord
def children(self):
nodelist = []
return tuple(nodelist)
def __iter__(self):
return
yield
attr_names = ('string', )
| 31,445 | Python | 26.927176 | 138 | 0.536747 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/__init__.py | #-----------------------------------------------------------------
# pycparser: __init__.py
#
# This package file exports some convenience functions for
# interacting with pycparser
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
__all__ = ['c_lexer', 'c_parser', 'c_ast']
__version__ = '2.21'
import io
from subprocess import check_output
from .c_parser import CParser
def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
""" Preprocess a file using cpp.
filename:
Name of the file you want to preprocess.
cpp_path:
cpp_args:
Refer to the documentation of parse_file for the meaning of these
arguments.
When successful, returns the preprocessed file's contents.
Errors from cpp will be printed out.
"""
path_list = [cpp_path]
if isinstance(cpp_args, list):
path_list += cpp_args
elif cpp_args != '':
path_list += [cpp_args]
path_list += [filename]
try:
# Note the use of universal_newlines to treat all newlines
# as \n for Python's purpose
text = check_output(path_list, universal_newlines=True)
except OSError as e:
raise RuntimeError("Unable to invoke 'cpp'. " +
'Make sure its path was passed correctly\n' +
('Original error: %s' % e))
return text
def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
parser=None):
""" Parse a C file using pycparser.
filename:
Name of the file you want to parse.
use_cpp:
Set to True if you want to execute the C pre-processor
on the file prior to parsing it.
cpp_path:
If use_cpp is True, this is the path to 'cpp' on your
system. If no path is provided, it attempts to just
execute 'cpp', so it must be in your PATH.
cpp_args:
If use_cpp is True, set this to the command line arguments strings
to cpp. Be careful with quotes - it's best to pass a raw string
(r'') here. For example:
r'-I../utils/fake_libc_include'
If several arguments are required, pass a list of strings.
parser:
Optional parser object to be used instead of the default CParser
When successful, an AST is returned. ParseError can be
thrown if the file doesn't parse successfully.
Errors from cpp will be printed out.
"""
if use_cpp:
text = preprocess_file(filename, cpp_path, cpp_args)
else:
with io.open(filename) as f:
text = f.read()
if parser is None:
parser = CParser()
return parser.parse(text, filename)
| 2,815 | Python | 29.945055 | 78 | 0.572647 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/c_lexer.py | #------------------------------------------------------------------------------
# pycparser: c_lexer.py
#
# CLexer class: lexer for the C language
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
import re
from .ply import lex
from .ply.lex import TOKEN
class CLexer(object):
""" A lexer for the C language. After building it, set the
input text with input(), and call token() to get new
tokens.
The public attribute filename can be set to an initial
filename, but the lexer will update it upon #line
directives.
"""
def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
type_lookup_func):
""" Create a new Lexer.
error_func:
An error function. Will be called with an error
message, line and column as arguments, in case of
an error during lexing.
on_lbrace_func, on_rbrace_func:
Called when an LBRACE or RBRACE is encountered
(likely to push/pop type_lookup_func's scope)
type_lookup_func:
A type lookup function. Given a string, it must
return True IFF this string is a name of a type
that was defined with a typedef earlier.
"""
self.error_func = error_func
self.on_lbrace_func = on_lbrace_func
self.on_rbrace_func = on_rbrace_func
self.type_lookup_func = type_lookup_func
self.filename = ''
# Keeps track of the last token returned from self.token()
self.last_token = None
# Allow either "# line" or "# <num>" to support GCC's
# cpp output
#
self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
def build(self, **kwargs):
""" Builds the lexer from the specification. Must be
called after the lexer object is created.
This method exists separately, because the PLY
manual warns against calling lex.lex inside
__init__
"""
self.lexer = lex.lex(object=self, **kwargs)
def reset_lineno(self):
""" Resets the internal line number counter of the lexer.
"""
self.lexer.lineno = 1
def input(self, text):
self.lexer.input(text)
def token(self):
self.last_token = self.lexer.token()
return self.last_token
def find_tok_column(self, token):
""" Find the column of the token in its line.
"""
last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
return token.lexpos - last_cr
######################-- PRIVATE --######################
##
## Internal auxiliary methods
##
def _error(self, msg, token):
location = self._make_tok_location(token)
self.error_func(msg, location[0], location[1])
self.lexer.skip(1)
def _make_tok_location(self, token):
return (token.lineno, self.find_tok_column(token))
##
## Reserved keywords
##
keywords = (
'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
'REGISTER', 'OFFSETOF',
'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
'VOLATILE', 'WHILE', '__INT128',
)
keywords_new = (
'_BOOL', '_COMPLEX',
'_NORETURN', '_THREAD_LOCAL', '_STATIC_ASSERT',
'_ATOMIC', '_ALIGNOF', '_ALIGNAS',
)
keyword_map = {}
for keyword in keywords:
keyword_map[keyword.lower()] = keyword
for keyword in keywords_new:
keyword_map[keyword[:2].upper() + keyword[2:].lower()] = keyword
##
## All the tokens recognized by the lexer
##
tokens = keywords + keywords_new + (
# Identifiers
'ID',
# Type identifiers (identifiers previously defined as
# types with typedef)
'TYPEID',
# constants
'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR',
'FLOAT_CONST', 'HEX_FLOAT_CONST',
'CHAR_CONST',
'WCHAR_CONST',
'U8CHAR_CONST',
'U16CHAR_CONST',
'U32CHAR_CONST',
# String literals
'STRING_LITERAL',
'WSTRING_LITERAL',
'U8STRING_LITERAL',
'U16STRING_LITERAL',
'U32STRING_LITERAL',
# Operators
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
'OREQUAL',
# Increment/decrement
'PLUSPLUS', 'MINUSMINUS',
# Structure dereference (->)
'ARROW',
# Conditional operator (?)
'CONDOP',
# Delimiters
'LPAREN', 'RPAREN', # ( )
'LBRACKET', 'RBRACKET', # [ ]
'LBRACE', 'RBRACE', # { }
'COMMA', 'PERIOD', # . ,
'SEMI', 'COLON', # ; :
# Ellipsis (...)
'ELLIPSIS',
# pre-processor
'PPHASH', # '#'
'PPPRAGMA', # 'pragma'
'PPPRAGMASTR',
)
##
## Regexes for use in tokens
##
##
# valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
hex_prefix = '0[xX]'
hex_digits = '[0-9a-fA-F]+'
bin_prefix = '0[bB]'
bin_digits = '[01]+'
# integer constants (K&R2: A.2.5.1)
integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
octal_constant = '0[0-7]*'+integer_suffix_opt
hex_constant = hex_prefix+hex_digits+integer_suffix_opt
bin_constant = bin_prefix+bin_digits+integer_suffix_opt
bad_octal_constant = '0[0-7]*[89]'
# character constants (K&R2: A.2.5.2)
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
# directives with Windows paths as filenames (..\..\dir\file)
# For the same reason, decimal_escape allows all digit sequences. We want to
# parse all correct code, even if it means to sometimes parse incorrect
# code.
#
# The original regexes were taken verbatim from the C syntax definition,
# and were later modified to avoid worst-case exponential running time.
#
# simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
# decimal_escape = r"""(\d+)"""
# hex_escape = r"""(x[0-9a-fA-F]+)"""
# bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
#
# The following modifications were made to avoid the ambiguity that allowed backtracking:
# (https://github.com/eliben/pycparser/issues/61)
#
# - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape.
# - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex
# - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal
# - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape.
#
# Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways.
# e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
decimal_escape = r"""(\d+)(?!\d)"""
hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
# This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed
# 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to
escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
char_const = "'"+cconst_char+"'"
wchar_const = 'L'+char_const
u8char_const = 'u8'+char_const
u16char_const = 'u'+char_const
u32char_const = 'U'+char_const
multicharacter_constant = "'"+cconst_char+"{2,4}'"
unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
# string literals (K&R2: A.2.6)
string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')'
string_literal = '"'+string_char+'*"'
wstring_literal = 'L'+string_literal
u8string_literal = 'u8'+string_literal
u16string_literal = 'u'+string_literal
u32string_literal = 'U'+string_literal
bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
# floating constants (K&R2: A.2.5.3)
exponent_part = r"""([eE][-+]?[0-9]+)"""
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
##
## Lexer states: used for preprocessor \n-terminated directives
##
states = (
# ppline: preprocessor line directives
#
('ppline', 'exclusive'),
# pppragma: pragma
#
('pppragma', 'exclusive'),
)
def t_PPHASH(self, t):
r'[ \t]*\#'
if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('ppline')
self.pp_line = self.pp_filename = None
elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('pppragma')
else:
t.type = 'PPHASH'
return t
##
## Rules for the ppline state
##
@TOKEN(string_literal)
def t_ppline_FILENAME(self, t):
if self.pp_line is None:
self._error('filename before line number in #line', t)
else:
self.pp_filename = t.value.lstrip('"').rstrip('"')
@TOKEN(decimal_constant)
def t_ppline_LINE_NUMBER(self, t):
if self.pp_line is None:
self.pp_line = t.value
else:
# Ignore: GCC's cpp sometimes inserts a numeric flag
# after the file name
pass
def t_ppline_NEWLINE(self, t):
r'\n'
if self.pp_line is None:
self._error('line number missing in #line', t)
else:
self.lexer.lineno = int(self.pp_line)
if self.pp_filename is not None:
self.filename = self.pp_filename
t.lexer.begin('INITIAL')
def t_ppline_PPLINE(self, t):
r'line'
pass
t_ppline_ignore = ' \t'
def t_ppline_error(self, t):
self._error('invalid #line directive', t)
##
## Rules for the pppragma state
##
def t_pppragma_NEWLINE(self, t):
r'\n'
t.lexer.lineno += 1
t.lexer.begin('INITIAL')
def t_pppragma_PPPRAGMA(self, t):
r'pragma'
return t
t_pppragma_ignore = ' \t'
def t_pppragma_STR(self, t):
'.+'
t.type = 'PPPRAGMASTR'
return t
def t_pppragma_error(self, t):
self._error('invalid #pragma directive', t)
##
## Rules for the normal state
##
t_ignore = ' \t'
# Newlines
def t_NEWLINE(self, t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MOD = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'\^='
# Increment/decrement
t_PLUSPLUS = r'\+\+'
t_MINUSMINUS = r'--'
# ->
t_ARROW = r'->'
# ?
t_CONDOP = r'\?'
# Delimiters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Scope delimiters
# To see why on_lbrace_func is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# TT x = 5;
# Outside the function, TT is a typedef, but inside (starting and ending
# with the braces) it's a parameter. The trouble begins with yacc's
# lookahead token. If we open a new scope in brace_open, then TT has
# already been read and incorrectly interpreted as TYPEID. So, we need
# to open and close scopes from within the lexer.
# Similar for the TT immediately outside the end of the function.
#
@TOKEN(r'\{')
def t_LBRACE(self, t):
self.on_lbrace_func()
return t
@TOKEN(r'\}')
def t_RBRACE(self, t):
self.on_rbrace_func()
return t
t_STRING_LITERAL = string_literal
# The following floating and integer constants are defined as
# functions to impose a strict order (otherwise, decimal
# is placed before the others because its regex is longer,
# and this is bad)
#
@TOKEN(floating_constant)
def t_FLOAT_CONST(self, t):
return t
@TOKEN(hex_floating_constant)
def t_HEX_FLOAT_CONST(self, t):
return t
@TOKEN(hex_constant)
def t_INT_CONST_HEX(self, t):
return t
@TOKEN(bin_constant)
def t_INT_CONST_BIN(self, t):
return t
@TOKEN(bad_octal_constant)
def t_BAD_CONST_OCT(self, t):
msg = "Invalid octal constant"
self._error(msg, t)
@TOKEN(octal_constant)
def t_INT_CONST_OCT(self, t):
return t
@TOKEN(decimal_constant)
def t_INT_CONST_DEC(self, t):
return t
# Must come before bad_char_const, to prevent it from
# catching valid char constants as invalid
#
@TOKEN(multicharacter_constant)
def t_INT_CONST_CHAR(self, t):
return t
@TOKEN(char_const)
def t_CHAR_CONST(self, t):
return t
@TOKEN(wchar_const)
def t_WCHAR_CONST(self, t):
return t
@TOKEN(u8char_const)
def t_U8CHAR_CONST(self, t):
return t
@TOKEN(u16char_const)
def t_U16CHAR_CONST(self, t):
return t
@TOKEN(u32char_const)
def t_U32CHAR_CONST(self, t):
return t
@TOKEN(unmatched_quote)
def t_UNMATCHED_QUOTE(self, t):
msg = "Unmatched '"
self._error(msg, t)
@TOKEN(bad_char_const)
def t_BAD_CHAR_CONST(self, t):
msg = "Invalid char constant %s" % t.value
self._error(msg, t)
@TOKEN(wstring_literal)
def t_WSTRING_LITERAL(self, t):
return t
@TOKEN(u8string_literal)
def t_U8STRING_LITERAL(self, t):
return t
@TOKEN(u16string_literal)
def t_U16STRING_LITERAL(self, t):
return t
@TOKEN(u32string_literal)
def t_U32STRING_LITERAL(self, t):
return t
# unmatched string literals are caught by the preprocessor
@TOKEN(bad_string_literal)
def t_BAD_STRING_LITERAL(self, t):
msg = "String contains invalid escape code"
self._error(msg, t)
@TOKEN(identifier)
def t_ID(self, t):
t.type = self.keyword_map.get(t.value, "ID")
if t.type == 'ID' and self.type_lookup_func(t.value):
t.type = "TYPEID"
return t
def t_error(self, t):
msg = 'Illegal character %s' % repr(t.value[0])
self._error(msg, t)
| 17,167 | Python | 29.933333 | 129 | 0.528747 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/c_parser.py | #------------------------------------------------------------------------------
# pycparser: c_parser.py
#
# CParser class: Parser and AST builder for the C language
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from .ply import yacc
from . import c_ast
from .c_lexer import CLexer
from .plyparser import PLYParser, ParseError, parameterized, template
from .ast_transforms import fix_switch_cases, fix_atomic_specifiers
@template
class CParser(PLYParser):
def __init__(
self,
lex_optimize=True,
lexer=CLexer,
lextab='pycparser.lextab',
yacc_optimize=True,
yacctab='pycparser.yacctab',
yacc_debug=False,
taboutputdir=''):
""" Create a new CParser.
Some arguments for controlling the debug/optimization
level of the parser are provided. The defaults are
tuned for release/performance mode.
The simple rules for using them are:
*) When tweaking CParser/CLexer, set these to False
*) When releasing a stable parser, set to True
lex_optimize:
Set to False when you're modifying the lexer.
Otherwise, changes in the lexer won't be used, if
some lextab.py file exists.
When releasing with a stable lexer, set to True
to save the re-generation of the lexer table on
each run.
lexer:
Set this parameter to define the lexer to use if
you're not using the default CLexer.
lextab:
Points to the lex table that's used for optimized
mode. Only if you're modifying the lexer and want
some tests to avoid re-generating the table, make
this point to a local lex table file (that's been
earlier generated with lex_optimize=True)
yacc_optimize:
Set to False when you're modifying the parser.
Otherwise, changes in the parser won't be used, if
some parsetab.py file exists.
When releasing with a stable parser, set to True
to save the re-generation of the parser table on
each run.
yacctab:
Points to the yacc table that's used for optimized
mode. Only if you're modifying the parser, make
this point to a local yacc table file
yacc_debug:
Generate a parser.out file that explains how yacc
built the parsing table from the grammar.
taboutputdir:
Set this parameter to control the location of generated
lextab and yacctab files.
"""
self.clex = lexer(
error_func=self._lex_error_func,
on_lbrace_func=self._lex_on_lbrace_func,
on_rbrace_func=self._lex_on_rbrace_func,
type_lookup_func=self._lex_type_lookup_func)
self.clex.build(
optimize=lex_optimize,
lextab=lextab,
outputdir=taboutputdir)
self.tokens = self.clex.tokens
rules_with_opt = [
'abstract_declarator',
'assignment_expression',
'declaration_list',
'declaration_specifiers_no_type',
'designation',
'expression',
'identifier_list',
'init_declarator_list',
'id_init_declarator_list',
'initializer_list',
'parameter_type_list',
'block_item_list',
'type_qualifier_list',
'struct_declarator_list'
]
for rule in rules_with_opt:
self._create_opt_rule(rule)
self.cparser = yacc.yacc(
module=self,
start='translation_unit_or_empty',
debug=yacc_debug,
optimize=yacc_optimize,
tabmodule=yacctab,
outputdir=taboutputdir)
# Stack of scopes for keeping track of symbols. _scope_stack[-1] is
# the current (topmost) scope. Each scope is a dictionary that
# specifies whether a name is a type. If _scope_stack[n][name] is
# True, 'name' is currently a type in the scope. If it's False,
# 'name' is used in the scope but not as a type (for instance, if we
# saw: int name;
# If 'name' is not a key in _scope_stack[n] then 'name' was not defined
# in this scope at all.
self._scope_stack = [dict()]
# Keeps track of the last token given to yacc (the lookahead token)
self._last_yielded_token = None
def parse(self, text, filename='', debug=False):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debug:
Debug flag to YACC
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debug)
######################-- PRIVATE --######################
def _push_scope(self):
self._scope_stack.append(dict())
def _pop_scope(self):
assert len(self._scope_stack) > 1
self._scope_stack.pop()
def _add_typedef_name(self, name, coord):
""" Add a new typedef name (ie a TYPEID) to the current scope
"""
if not self._scope_stack[-1].get(name, True):
self._parse_error(
"Typedef %r previously declared as non-typedef "
"in this scope" % name, coord)
self._scope_stack[-1][name] = True
def _add_identifier(self, name, coord):
""" Add a new object, function, or enum member name (ie an ID) to the
current scope
"""
if self._scope_stack[-1].get(name, False):
self._parse_error(
"Non-typedef %r previously declared as typedef "
"in this scope" % name, coord)
self._scope_stack[-1][name] = False
def _is_type_in_scope(self, name):
""" Is *name* a typedef-name in the current scope?
"""
for scope in reversed(self._scope_stack):
# If name is an identifier in this scope it shadows typedefs in
# higher scopes.
in_scope = scope.get(name)
if in_scope is not None: return in_scope
return False
def _lex_error_func(self, msg, line, column):
self._parse_error(msg, self._coord(line, column))
def _lex_on_lbrace_func(self):
self._push_scope()
def _lex_on_rbrace_func(self):
self._pop_scope()
def _lex_type_lookup_func(self, name):
""" Looks up types that were previously defined with
typedef.
Passed to the lexer for recognizing identifiers that
are types.
"""
is_type = self._is_type_in_scope(name)
return is_type
def _get_yacc_lookahead_token(self):
""" We need access to yacc's lookahead token in certain cases.
This is the last token yacc requested from the lexer, so we
ask the lexer.
"""
return self.clex.last_token
# To understand what's going on here, read sections A.8.5 and
# A.8.6 of K&R2 very carefully.
#
# A C type consists of a basic type declaration, with a list
# of modifiers. For example:
#
# int *c[5];
#
# The basic declaration here is 'int c', and the pointer and
# the array are the modifiers.
#
# Basic declarations are represented by TypeDecl (from module c_ast) and the
# modifiers are FuncDecl, PtrDecl and ArrayDecl.
#
# The standard states that whenever a new modifier is parsed, it should be
# added to the end of the list of modifiers. For example:
#
# K&R2 A.8.6.2: Array Declarators
#
# In a declaration T D where D has the form
# D1 [constant-expression-opt]
# and the type of the identifier in the declaration T D1 is
# "type-modifier T", the type of the
# identifier of D is "type-modifier array of T"
#
# This is what this method does. The declarator it receives
# can be a list of declarators ending with TypeDecl. It
# tacks the modifier to the end of this list, just before
# the TypeDecl.
#
# Additionally, the modifier may be a list itself. This is
# useful for pointers, that can come as a chain from the rule
# p_pointer. In this case, the whole modifier list is spliced
# into the new location.
def _type_modify_decl(self, decl, modifier):
""" Tacks a type modifier on a declarator, and returns
the modified declarator.
Note: the declarator and modifier may be modified
"""
#~ print '****'
#~ decl.show(offset=3)
#~ modifier.show(offset=3)
#~ print '****'
modifier_head = modifier
modifier_tail = modifier
# The modifier may be a nested list. Reach its tail.
while modifier_tail.type:
modifier_tail = modifier_tail.type
# If the decl is a basic type, just tack the modifier onto it.
if isinstance(decl, c_ast.TypeDecl):
modifier_tail.type = decl
return modifier
else:
# Otherwise, the decl is a list of modifiers. Reach
# its tail and splice the modifier onto the tail,
# pointing to the underlying basic type.
decl_tail = decl
while not isinstance(decl_tail.type, c_ast.TypeDecl):
decl_tail = decl_tail.type
modifier_tail.type = decl_tail.type
decl_tail.type = modifier_head
return decl
# Due to the order in which declarators are constructed,
# they have to be fixed in order to look like a normal AST.
#
# When a declaration arrives from syntax construction, it has
# these problems:
# * The innermost TypeDecl has no type (because the basic
# type is only known at the uppermost declaration level)
# * The declaration has no variable name, since that is saved
# in the innermost TypeDecl
# * The typename of the declaration is a list of type
# specifiers, and not a node. Here, basic identifier types
# should be separated from more complex types like enums
# and structs.
#
# This method fixes these problems.
def _fix_decl_name_type(self, decl, typename):
""" Fixes a declaration. Modifies decl.
"""
# Reach the underlying basic type
#
type = decl
while not isinstance(type, c_ast.TypeDecl):
type = type.type
decl.name = type.declname
type.quals = decl.quals[:]
# The typename is a list of types. If any type in this
# list isn't an IdentifierType, it must be the only
# type in the list (it's illegal to declare "int enum ..")
# If all the types are basic, they're collected in the
# IdentifierType holder.
for tn in typename:
if not isinstance(tn, c_ast.IdentifierType):
if len(typename) > 1:
self._parse_error(
"Invalid multiple types specified", tn.coord)
else:
type.type = tn
return decl
if not typename:
# Functions default to returning int
#
if not isinstance(decl.type, c_ast.FuncDecl):
self._parse_error(
"Missing type in declaration", decl.coord)
type.type = c_ast.IdentifierType(
['int'],
coord=decl.coord)
else:
# At this point, we know that typename is a list of IdentifierType
# nodes. Concatenate all the names into a single list.
#
type.type = c_ast.IdentifierType(
[name for id in typename for name in id.names],
coord=typename[0].coord)
return decl
def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
""" Declaration specifiers are represented by a dictionary
with the entries:
* qual: a list of type qualifiers
* storage: a list of storage type qualifiers
* type: a list of type specifiers
* function: a list of function specifiers
* alignment: a list of alignment specifiers
This method is given a declaration specifier, and a
new specifier of a given kind.
If `append` is True, the new specifier is added to the end of
the specifiers list, otherwise it's added at the beginning.
Returns the declaration specifier, with the new
specifier incorporated.
"""
spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[])
if append:
spec[kind].append(newspec)
else:
spec[kind].insert(0, newspec)
return spec
def _build_declarations(self, spec, decls, typedef_namespace=False):
""" Builds a list of declarations all sharing the given specifiers.
If typedef_namespace is true, each declared name is added
to the "typedef namespace", which also includes objects,
functions, and enum constants.
"""
is_typedef = 'typedef' in spec['storage']
declarations = []
# Bit-fields are allowed to be unnamed.
if decls[0].get('bitsize') is not None:
pass
# When redeclaring typedef names as identifiers in inner scopes, a
# problem can occur where the identifier gets grouped into
# spec['type'], leaving decl as None. This can only occur for the
# first declarator.
elif decls[0]['decl'] is None:
if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
not self._is_type_in_scope(spec['type'][-1].names[0]):
coord = '?'
for t in spec['type']:
if hasattr(t, 'coord'):
coord = t.coord
break
self._parse_error('Invalid declaration', coord)
# Make this look as if it came from "direct_declarator:ID"
decls[0]['decl'] = c_ast.TypeDecl(
declname=spec['type'][-1].names[0],
type=None,
quals=None,
align=spec['alignment'],
coord=spec['type'][-1].coord)
# Remove the "new" type's name from the end of spec['type']
del spec['type'][-1]
# A similar problem can occur where the declaration ends up looking
# like an abstract declarator. Give it a name if this is the case.
elif not isinstance(decls[0]['decl'], (
c_ast.Enum, c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
decls_0_tail = decls[0]['decl']
while not isinstance(decls_0_tail, c_ast.TypeDecl):
decls_0_tail = decls_0_tail.type
if decls_0_tail.declname is None:
decls_0_tail.declname = spec['type'][-1].names[0]
del spec['type'][-1]
for decl in decls:
assert decl['decl'] is not None
if is_typedef:
declaration = c_ast.Typedef(
name=None,
quals=spec['qual'],
storage=spec['storage'],
type=decl['decl'],
coord=decl['decl'].coord)
else:
declaration = c_ast.Decl(
name=None,
quals=spec['qual'],
align=spec['alignment'],
storage=spec['storage'],
funcspec=spec['function'],
type=decl['decl'],
init=decl.get('init'),
bitsize=decl.get('bitsize'),
coord=decl['decl'].coord)
if isinstance(declaration.type, (
c_ast.Enum, c_ast.Struct, c_ast.Union,
c_ast.IdentifierType)):
fixed_decl = declaration
else:
fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
# Add the type name defined by typedef to a
# symbol table (for usage in the lexer)
if typedef_namespace:
if is_typedef:
self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
else:
self._add_identifier(fixed_decl.name, fixed_decl.coord)
fixed_decl = fix_atomic_specifiers(fixed_decl)
declarations.append(fixed_decl)
return declarations
def _build_function_definition(self, spec, decl, param_decls, body):
""" Builds a function definition.
"""
if 'typedef' in spec['storage']:
self._parse_error("Invalid typedef", decl.coord)
declaration = self._build_declarations(
spec=spec,
decls=[dict(decl=decl, init=None)],
typedef_namespace=True)[0]
return c_ast.FuncDef(
decl=declaration,
param_decls=param_decls,
body=body,
coord=decl.coord)
def _select_struct_union_class(self, token):
""" Given a token (either STRUCT or UNION), selects the
appropriate AST class.
"""
if token == 'struct':
return c_ast.Struct
else:
return c_ast.Union
##
## Precedence and associativity of operators
##
# If this changes, c_generator.CGenerator.precedence_map needs to change as
# well
precedence = (
('left', 'LOR'),
('left', 'LAND'),
('left', 'OR'),
('left', 'XOR'),
('left', 'AND'),
('left', 'EQ', 'NE'),
('left', 'GT', 'GE', 'LT', 'LE'),
('left', 'RSHIFT', 'LSHIFT'),
('left', 'PLUS', 'MINUS'),
('left', 'TIMES', 'DIVIDE', 'MOD')
)
##
## Grammar productions
## Implementation of the BNF defined in K&R2 A.13
##
# Wrapper around a translation unit, to allow for empty input.
# Not strictly part of the C99 Grammar, but useful in practice.
def p_translation_unit_or_empty(self, p):
""" translation_unit_or_empty : translation_unit
| empty
"""
if p[1] is None:
p[0] = c_ast.FileAST([])
else:
p[0] = c_ast.FileAST(p[1])
def p_translation_unit_1(self, p):
""" translation_unit : external_declaration
"""
# Note: external_declaration is already a list
p[0] = p[1]
def p_translation_unit_2(self, p):
""" translation_unit : translation_unit external_declaration
"""
p[1].extend(p[2])
p[0] = p[1]
# Declarations always come as lists (because they can be
# several in one line), so we wrap the function definition
# into a list as well, to make the return value of
# external_declaration homogeneous.
def p_external_declaration_1(self, p):
""" external_declaration : function_definition
"""
p[0] = [p[1]]
def p_external_declaration_2(self, p):
""" external_declaration : declaration
"""
p[0] = p[1]
def p_external_declaration_3(self, p):
""" external_declaration : pp_directive
| pppragma_directive
"""
p[0] = [p[1]]
def p_external_declaration_4(self, p):
""" external_declaration : SEMI
"""
p[0] = []
def p_external_declaration_5(self, p):
""" external_declaration : static_assert
"""
p[0] = p[1]
def p_static_assert_declaration(self, p):
""" static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN
| _STATIC_ASSERT LPAREN constant_expression RPAREN
"""
if len(p) == 5:
p[0] = [c_ast.StaticAssert(p[3], None, self._token_coord(p, 1))]
else:
p[0] = [c_ast.StaticAssert(p[3], p[5], self._token_coord(p, 1))]
def p_pp_directive(self, p):
""" pp_directive : PPHASH
"""
self._parse_error('Directives not supported yet',
self._token_coord(p, 1))
def p_pppragma_directive(self, p):
""" pppragma_directive : PPPRAGMA
| PPPRAGMA PPPRAGMASTR
"""
if len(p) == 3:
p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
else:
p[0] = c_ast.Pragma("", self._token_coord(p, 1))
# In function definitions, the declarator can be followed by
# a declaration list, for old "K&R style" function definitios.
def p_function_definition_1(self, p):
""" function_definition : id_declarator declaration_list_opt compound_statement
"""
# no declaration specifiers - 'int' becomes the default type
spec = dict(
qual=[],
alignment=[],
storage=[],
type=[c_ast.IdentifierType(['int'],
coord=self._token_coord(p, 1))],
function=[])
p[0] = self._build_function_definition(
spec=spec,
decl=p[1],
param_decls=p[2],
body=p[3])
def p_function_definition_2(self, p):
""" function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
"""
spec = p[1]
p[0] = self._build_function_definition(
spec=spec,
decl=p[2],
param_decls=p[3],
body=p[4])
# Note, according to C18 A.2.2 6.7.10 static_assert-declaration _Static_assert
# is a declaration, not a statement. We additionally recognise it as a statement
# to fix parsing of _Static_assert inside the functions.
#
def p_statement(self, p):
""" statement : labeled_statement
| expression_statement
| compound_statement
| selection_statement
| iteration_statement
| jump_statement
| pppragma_directive
| static_assert
"""
p[0] = p[1]
# A pragma is generally considered a decorator rather than an actual
# statement. Still, for the purposes of analyzing an abstract syntax tree of
# C code, pragma's should not be ignored and were previously treated as a
# statement. This presents a problem for constructs that take a statement
# such as labeled_statements, selection_statements, and
# iteration_statements, causing a misleading structure in the AST. For
# example, consider the following C code.
#
# for (int i = 0; i < 3; i++)
# #pragma omp critical
# sum += 1;
#
# This code will compile and execute "sum += 1;" as the body of the for
# loop. Previous implementations of PyCParser would render the AST for this
# block of code as follows:
#
# For:
# DeclList:
# Decl: i, [], [], []
# TypeDecl: i, []
# IdentifierType: ['int']
# Constant: int, 0
# BinaryOp: <
# ID: i
# Constant: int, 3
# UnaryOp: p++
# ID: i
# Pragma: omp critical
# Assignment: +=
# ID: sum
# Constant: int, 1
#
# This AST misleadingly takes the Pragma as the body of the loop and the
# assignment then becomes a sibling of the loop.
#
# To solve edge cases like these, the pragmacomp_or_statement rule groups
# a pragma and its following statement (which would otherwise be orphaned)
# using a compound block, effectively turning the above code into:
#
# for (int i = 0; i < 3; i++) {
# #pragma omp critical
# sum += 1;
# }
def p_pragmacomp_or_statement(self, p):
""" pragmacomp_or_statement : pppragma_directive statement
| statement
"""
if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
p[0] = c_ast.Compound(
block_items=[p[1], p[2]],
coord=self._token_coord(p, 1))
else:
p[0] = p[1]
# In C, declarations can come several in a line:
# int x, *px, romulo = 5;
#
# However, for the AST, we will split them to separate Decl
# nodes.
#
# This rule splits its declarations and always returns a list
# of Decl nodes, even if it's one element long.
#
def p_decl_body(self, p):
""" decl_body : declaration_specifiers init_declarator_list_opt
| declaration_specifiers_no_type id_init_declarator_list_opt
"""
spec = p[1]
# p[2] (init_declarator_list_opt) is either a list or None
#
if p[2] is None:
# By the standard, you must have at least one declarator unless
# declaring a structure tag, a union tag, or the members of an
# enumeration.
#
ty = spec['type']
s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
decls = [c_ast.Decl(
name=None,
quals=spec['qual'],
align=spec['alignment'],
storage=spec['storage'],
funcspec=spec['function'],
type=ty[0],
init=None,
bitsize=None,
coord=ty[0].coord)]
# However, this case can also occur on redeclared identifiers in
# an inner scope. The trouble is that the redeclared type's name
# gets grouped into declaration_specifiers; _build_declarations
# compensates for this.
#
else:
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=None, init=None)],
typedef_namespace=True)
else:
decls = self._build_declarations(
spec=spec,
decls=p[2],
typedef_namespace=True)
p[0] = decls
# The declaration has been split to a decl_body sub-rule and
# SEMI, because having them in a single rule created a problem
# for defining typedefs.
#
# If a typedef line was directly followed by a line using the
# type defined with the typedef, the type would not be
# recognized. This is because to reduce the declaration rule,
# the parser's lookahead asked for the token after SEMI, which
# was the type from the next line, and the lexer had no chance
# to see the updated type symbol table.
#
# Splitting solves this problem, because after seeing SEMI,
# the parser reduces decl_body, which actually adds the new
# type into the table to be seen by the lexer before the next
# line is reached.
def p_declaration(self, p):
""" declaration : decl_body SEMI
"""
p[0] = p[1]
# Since each declaration is a list of declarations, this
# rule will combine all the declarations and return a single
# list
#
def p_declaration_list(self, p):
""" declaration_list : declaration
| declaration_list declaration
"""
p[0] = p[1] if len(p) == 2 else p[1] + p[2]
# To know when declaration-specifiers end and declarators begin,
# we require declaration-specifiers to have at least one
# type-specifier, and disallow typedef-names after we've seen any
# type-specifier. These are both required by the spec.
#
def p_declaration_specifiers_no_type_1(self, p):
""" declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
def p_declaration_specifiers_no_type_2(self, p):
""" declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
def p_declaration_specifiers_no_type_3(self, p):
""" declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
# Without this, `typedef _Atomic(T) U` will parse incorrectly because the
# _Atomic qualifier will match, instead of the specifier.
def p_declaration_specifiers_no_type_4(self, p):
""" declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
def p_declaration_specifiers_no_type_5(self, p):
""" declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment')
def p_declaration_specifiers_1(self, p):
""" declaration_specifiers : declaration_specifiers type_qualifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
def p_declaration_specifiers_2(self, p):
""" declaration_specifiers : declaration_specifiers storage_class_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
def p_declaration_specifiers_3(self, p):
""" declaration_specifiers : declaration_specifiers function_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
def p_declaration_specifiers_4(self, p):
""" declaration_specifiers : declaration_specifiers type_specifier_no_typeid
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
def p_declaration_specifiers_5(self, p):
""" declaration_specifiers : type_specifier
"""
p[0] = self._add_declaration_specifier(None, p[1], 'type')
def p_declaration_specifiers_6(self, p):
""" declaration_specifiers : declaration_specifiers_no_type type_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
def p_declaration_specifiers_7(self, p):
""" declaration_specifiers : declaration_specifiers alignment_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True)
def p_storage_class_specifier(self, p):
""" storage_class_specifier : AUTO
| REGISTER
| STATIC
| EXTERN
| TYPEDEF
| _THREAD_LOCAL
"""
p[0] = p[1]
def p_function_specifier(self, p):
""" function_specifier : INLINE
| _NORETURN
"""
p[0] = p[1]
def p_type_specifier_no_typeid(self, p):
""" type_specifier_no_typeid : VOID
| _BOOL
| CHAR
| SHORT
| INT
| LONG
| FLOAT
| DOUBLE
| _COMPLEX
| SIGNED
| UNSIGNED
| __INT128
"""
p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
def p_type_specifier(self, p):
""" type_specifier : typedef_name
| enum_specifier
| struct_or_union_specifier
| type_specifier_no_typeid
| atomic_specifier
"""
p[0] = p[1]
# See section 6.7.2.4 of the C11 standard.
def p_atomic_specifier(self, p):
""" atomic_specifier : _ATOMIC LPAREN type_name RPAREN
"""
typ = p[3]
typ.quals.append('_Atomic')
p[0] = typ
def p_type_qualifier(self, p):
""" type_qualifier : CONST
| RESTRICT
| VOLATILE
| _ATOMIC
"""
p[0] = p[1]
def p_init_declarator_list(self, p):
""" init_declarator_list : init_declarator
| init_declarator_list COMMA init_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# Returns a {decl=<declarator> : init=<initializer>} dictionary
# If there's no initializer, uses None
#
def p_init_declarator(self, p):
""" init_declarator : declarator
| declarator EQUALS initializer
"""
p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
def p_id_init_declarator_list(self, p):
""" id_init_declarator_list : id_init_declarator
| id_init_declarator_list COMMA init_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
def p_id_init_declarator(self, p):
""" id_init_declarator : id_declarator
| id_declarator EQUALS initializer
"""
p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
# Require at least one type specifier in a specifier-qualifier-list
#
def p_specifier_qualifier_list_1(self, p):
""" specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
def p_specifier_qualifier_list_2(self, p):
""" specifier_qualifier_list : specifier_qualifier_list type_qualifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
def p_specifier_qualifier_list_3(self, p):
""" specifier_qualifier_list : type_specifier
"""
p[0] = self._add_declaration_specifier(None, p[1], 'type')
def p_specifier_qualifier_list_4(self, p):
""" specifier_qualifier_list : type_qualifier_list type_specifier
"""
p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[])
def p_specifier_qualifier_list_5(self, p):
""" specifier_qualifier_list : alignment_specifier
"""
p[0] = dict(qual=[], alignment=[p[1]], storage=[], type=[], function=[])
def p_specifier_qualifier_list_6(self, p):
""" specifier_qualifier_list : specifier_qualifier_list alignment_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment')
# TYPEID is allowed here (and in other struct/enum related tag names), because
# struct/enum tags reside in their own namespace and can be named the same as types
#
def p_struct_or_union_specifier_1(self, p):
""" struct_or_union_specifier : struct_or_union ID
| struct_or_union TYPEID
"""
klass = self._select_struct_union_class(p[1])
# None means no list of members
p[0] = klass(
name=p[2],
decls=None,
coord=self._token_coord(p, 2))
def p_struct_or_union_specifier_2(self, p):
""" struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
| struct_or_union brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
if len(p) == 4:
# Empty sequence means an empty list of members
p[0] = klass(
name=None,
decls=[],
coord=self._token_coord(p, 2))
else:
p[0] = klass(
name=None,
decls=p[3],
coord=self._token_coord(p, 2))
def p_struct_or_union_specifier_3(self, p):
""" struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
| struct_or_union ID brace_open brace_close
| struct_or_union TYPEID brace_open struct_declaration_list brace_close
| struct_or_union TYPEID brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
if len(p) == 5:
# Empty sequence means an empty list of members
p[0] = klass(
name=p[2],
decls=[],
coord=self._token_coord(p, 2))
else:
p[0] = klass(
name=p[2],
decls=p[4],
coord=self._token_coord(p, 2))
def p_struct_or_union(self, p):
""" struct_or_union : STRUCT
| UNION
"""
p[0] = p[1]
# Combine all declarations into a single list
#
def p_struct_declaration_list(self, p):
""" struct_declaration_list : struct_declaration
| struct_declaration_list struct_declaration
"""
if len(p) == 2:
p[0] = p[1] or []
else:
p[0] = p[1] + (p[2] or [])
def p_struct_declaration_1(self, p):
""" struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
"""
spec = p[1]
assert 'typedef' not in spec['storage']
if p[2] is not None:
decls = self._build_declarations(
spec=spec,
decls=p[2])
elif len(spec['type']) == 1:
# Anonymous struct/union, gcc extension, C1x feature.
# Although the standard only allows structs/unions here, I see no
# reason to disallow other types since some compilers have typedefs
# here, and pycparser isn't about rejecting all invalid code.
#
node = spec['type'][0]
if isinstance(node, c_ast.Node):
decl_type = node
else:
decl_type = c_ast.IdentifierType(node)
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=decl_type)])
else:
# Structure/union members can have the same names as typedefs.
# The trouble is that the member's name gets grouped into
# specifier_qualifier_list; _build_declarations compensates.
#
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=None, init=None)])
p[0] = decls
def p_struct_declaration_2(self, p):
""" struct_declaration : SEMI
"""
p[0] = None
def p_struct_declaration_3(self, p):
""" struct_declaration : pppragma_directive
"""
p[0] = [p[1]]
def p_struct_declarator_list(self, p):
""" struct_declarator_list : struct_declarator
| struct_declarator_list COMMA struct_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# struct_declarator passes up a dict with the keys: decl (for
# the underlying declarator) and bitsize (for the bitsize)
#
def p_struct_declarator_1(self, p):
""" struct_declarator : declarator
"""
p[0] = {'decl': p[1], 'bitsize': None}
def p_struct_declarator_2(self, p):
""" struct_declarator : declarator COLON constant_expression
| COLON constant_expression
"""
if len(p) > 3:
p[0] = {'decl': p[1], 'bitsize': p[3]}
else:
p[0] = {'decl': c_ast.TypeDecl(None, None, None, None), 'bitsize': p[2]}
def p_enum_specifier_1(self, p):
""" enum_specifier : ENUM ID
| ENUM TYPEID
"""
p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
def p_enum_specifier_2(self, p):
""" enum_specifier : ENUM brace_open enumerator_list brace_close
"""
p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
def p_enum_specifier_3(self, p):
""" enum_specifier : ENUM ID brace_open enumerator_list brace_close
| ENUM TYPEID brace_open enumerator_list brace_close
"""
p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
def p_enumerator_list(self, p):
""" enumerator_list : enumerator
| enumerator_list COMMA
| enumerator_list COMMA enumerator
"""
if len(p) == 2:
p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
elif len(p) == 3:
p[0] = p[1]
else:
p[1].enumerators.append(p[3])
p[0] = p[1]
def p_alignment_specifier(self, p):
""" alignment_specifier : _ALIGNAS LPAREN type_name RPAREN
| _ALIGNAS LPAREN constant_expression RPAREN
"""
p[0] = c_ast.Alignas(p[3], self._token_coord(p, 1))
def p_enumerator(self, p):
""" enumerator : ID
| ID EQUALS constant_expression
"""
if len(p) == 2:
enumerator = c_ast.Enumerator(
p[1], None,
self._token_coord(p, 1))
else:
enumerator = c_ast.Enumerator(
p[1], p[3],
self._token_coord(p, 1))
self._add_identifier(enumerator.name, enumerator.coord)
p[0] = enumerator
def p_declarator(self, p):
""" declarator : id_declarator
| typeid_declarator
"""
p[0] = p[1]
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_xxx_declarator_1(self, p):
""" xxx_declarator : direct_xxx_declarator
"""
p[0] = p[1]
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_xxx_declarator_2(self, p):
""" xxx_declarator : pointer direct_xxx_declarator
"""
p[0] = self._type_modify_decl(p[2], p[1])
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_direct_xxx_declarator_1(self, p):
""" direct_xxx_declarator : yyy
"""
p[0] = c_ast.TypeDecl(
declname=p[1],
type=None,
quals=None,
align=None,
coord=self._token_coord(p, 1))
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
def p_direct_xxx_declarator_2(self, p):
""" direct_xxx_declarator : LPAREN xxx_declarator RPAREN
"""
p[0] = p[2]
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_direct_xxx_declarator_3(self, p):
""" direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
"""
quals = (p[3] if len(p) > 5 else []) or []
# Accept dimension qualifiers
# Per C99 6.7.5.3 p7
arr = c_ast.ArrayDecl(
type=None,
dim=p[4] if len(p) > 5 else p[3],
dim_quals=quals,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_direct_xxx_declarator_4(self, p):
""" direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
| direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
"""
# Using slice notation for PLY objects doesn't work in Python 3 for the
# version of PLY embedded with pycparser; see PLY Google Code issue 30.
# Work around that here by listing the two elements separately.
listed_quals = [item if isinstance(item, list) else [item]
for item in [p[3],p[4]]]
dim_quals = [qual for sublist in listed_quals for qual in sublist
if qual is not None]
arr = c_ast.ArrayDecl(
type=None,
dim=p[5],
dim_quals=dim_quals,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
# Special for VLAs
#
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_direct_xxx_declarator_5(self, p):
""" direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=c_ast.ID(p[4], self._token_coord(p, 4)),
dim_quals=p[3] if p[3] is not None else [],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
def p_direct_xxx_declarator_6(self, p):
""" direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
| direct_xxx_declarator LPAREN identifier_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
# To see why _get_yacc_lookahead_token is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# Outside the function, TT is a typedef, but inside (starting and
# ending with the braces) it's a parameter. The trouble begins with
# yacc's lookahead token. We don't know if we're declaring or
# defining a function until we see LBRACE, but if we wait for yacc to
# trigger a rule on that token, then TT will have already been read
# and incorrectly interpreted as TYPEID. We need to add the
# parameters to the scope the moment the lexer sees LBRACE.
#
if self._get_yacc_lookahead_token().type == "LBRACE":
if func.args is not None:
for param in func.args.params:
if isinstance(param, c_ast.EllipsisParam): break
self._add_identifier(param.name, param.coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
def p_pointer(self, p):
""" pointer : TIMES type_qualifier_list_opt
| TIMES type_qualifier_list_opt pointer
"""
coord = self._token_coord(p, 1)
# Pointer decls nest from inside out. This is important when different
# levels have different qualifiers. For example:
#
# char * const * p;
#
# Means "pointer to const pointer to char"
#
# While:
#
# char ** const p;
#
# Means "const pointer to pointer to char"
#
# So when we construct PtrDecl nestings, the leftmost pointer goes in
# as the most nested type.
nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
if len(p) > 3:
tail_type = p[3]
while tail_type.type is not None:
tail_type = tail_type.type
tail_type.type = nested_type
p[0] = p[3]
else:
p[0] = nested_type
def p_type_qualifier_list(self, p):
""" type_qualifier_list : type_qualifier
| type_qualifier_list type_qualifier
"""
p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
def p_parameter_type_list(self, p):
""" parameter_type_list : parameter_list
| parameter_list COMMA ELLIPSIS
"""
if len(p) > 2:
p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
p[0] = p[1]
def p_parameter_list(self, p):
""" parameter_list : parameter_declaration
| parameter_list COMMA parameter_declaration
"""
if len(p) == 2: # single parameter
p[0] = c_ast.ParamList([p[1]], p[1].coord)
else:
p[1].params.append(p[3])
p[0] = p[1]
# From ISO/IEC 9899:TC2, 6.7.5.3.11:
# "If, in a parameter declaration, an identifier can be treated either
# as a typedef name or as a parameter name, it shall be taken as a
# typedef name."
#
# Inside a parameter declaration, once we've reduced declaration specifiers,
# if we shift in an LPAREN and see a TYPEID, it could be either an abstract
# declarator or a declarator nested inside parens. This rule tells us to
# always treat it as an abstract declarator. Therefore, we only accept
# `id_declarator`s and `typeid_noparen_declarator`s.
def p_parameter_declaration_1(self, p):
""" parameter_declaration : declaration_specifiers id_declarator
| declaration_specifiers typeid_noparen_declarator
"""
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
coord=self._token_coord(p, 1))]
p[0] = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2])])[0]
def p_parameter_declaration_2(self, p):
""" parameter_declaration : declaration_specifiers abstract_declarator_opt
"""
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
coord=self._token_coord(p, 1))]
# Parameters can have the same names as typedefs. The trouble is that
# the parameter's name gets grouped into declaration_specifiers, making
# it look like an old-style declaration; compensate.
#
if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
self._is_type_in_scope(spec['type'][-1].names[0]):
decl = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2], init=None)])[0]
# This truly is an old-style parameter declaration
#
else:
decl = c_ast.Typename(
name='',
quals=spec['qual'],
align=None,
type=p[2] or c_ast.TypeDecl(None, None, None, None),
coord=self._token_coord(p, 2))
typename = spec['type']
decl = self._fix_decl_name_type(decl, typename)
p[0] = decl
def p_identifier_list(self, p):
""" identifier_list : identifier
| identifier_list COMMA identifier
"""
if len(p) == 2: # single parameter
p[0] = c_ast.ParamList([p[1]], p[1].coord)
else:
p[1].params.append(p[3])
p[0] = p[1]
def p_initializer_1(self, p):
""" initializer : assignment_expression
"""
p[0] = p[1]
def p_initializer_2(self, p):
""" initializer : brace_open initializer_list_opt brace_close
| brace_open initializer_list COMMA brace_close
"""
if p[2] is None:
p[0] = c_ast.InitList([], self._token_coord(p, 1))
else:
p[0] = p[2]
def p_initializer_list(self, p):
""" initializer_list : designation_opt initializer
| initializer_list COMMA designation_opt initializer
"""
if len(p) == 3: # single initializer
init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
p[0] = c_ast.InitList([init], p[2].coord)
else:
init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
p[1].exprs.append(init)
p[0] = p[1]
def p_designation(self, p):
""" designation : designator_list EQUALS
"""
p[0] = p[1]
# Designators are represented as a list of nodes, in the order in which
# they're written in the code.
#
def p_designator_list(self, p):
""" designator_list : designator
| designator_list designator
"""
p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
def p_designator(self, p):
""" designator : LBRACKET constant_expression RBRACKET
| PERIOD identifier
"""
p[0] = p[2]
def p_type_name(self, p):
""" type_name : specifier_qualifier_list abstract_declarator_opt
"""
typename = c_ast.Typename(
name='',
quals=p[1]['qual'][:],
align=None,
type=p[2] or c_ast.TypeDecl(None, None, None, None),
coord=self._token_coord(p, 2))
p[0] = self._fix_decl_name_type(typename, p[1]['type'])
def p_abstract_declarator_1(self, p):
""" abstract_declarator : pointer
"""
dummytype = c_ast.TypeDecl(None, None, None, None)
p[0] = self._type_modify_decl(
decl=dummytype,
modifier=p[1])
def p_abstract_declarator_2(self, p):
""" abstract_declarator : pointer direct_abstract_declarator
"""
p[0] = self._type_modify_decl(p[2], p[1])
def p_abstract_declarator_3(self, p):
""" abstract_declarator : direct_abstract_declarator
"""
p[0] = p[1]
# Creating and using direct_abstract_declarator_opt here
# instead of listing both direct_abstract_declarator and the
# lack of it in the beginning of _1 and _2 caused two
# shift/reduce errors.
#
def p_direct_abstract_declarator_1(self, p):
""" direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
p[0] = p[2]
def p_direct_abstract_declarator_2(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=p[3],
dim_quals=[],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_abstract_declarator_3(self, p):
""" direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
"""
quals = (p[2] if len(p) > 4 else []) or []
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None, None),
dim=p[3] if len(p) > 4 else p[2],
dim_quals=quals,
coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_4(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_abstract_declarator_5(self, p):
""" direct_abstract_declarator : LBRACKET TIMES RBRACKET
"""
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None, None),
dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_6(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
def p_direct_abstract_declarator_7(self, p):
""" direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
"""
p[0] = c_ast.FuncDecl(
args=p[2],
type=c_ast.TypeDecl(None, None, None, None),
coord=self._token_coord(p, 1))
# declaration is a list, statement isn't. To make it consistent, block_item
# will always be a list
#
def p_block_item(self, p):
""" block_item : declaration
| statement
"""
p[0] = p[1] if isinstance(p[1], list) else [p[1]]
# Since we made block_item a list, this just combines lists
#
def p_block_item_list(self, p):
""" block_item_list : block_item
| block_item_list block_item
"""
# Empty block items (plain ';') produce [None], so ignore them
p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
def p_compound_statement_1(self, p):
""" compound_statement : brace_open block_item_list_opt brace_close """
p[0] = c_ast.Compound(
block_items=p[2],
coord=self._token_coord(p, 1))
def p_labeled_statement_1(self, p):
""" labeled_statement : ID COLON pragmacomp_or_statement """
p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
def p_labeled_statement_2(self, p):
""" labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
def p_labeled_statement_3(self, p):
""" labeled_statement : DEFAULT COLON pragmacomp_or_statement """
p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
def p_selection_statement_1(self, p):
""" selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
def p_selection_statement_2(self, p):
""" selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
def p_selection_statement_3(self, p):
""" selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = fix_switch_cases(
c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
def p_iteration_statement_1(self, p):
""" iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
def p_iteration_statement_2(self, p):
""" iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
def p_iteration_statement_3(self, p):
""" iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
def p_iteration_statement_4(self, p):
""" iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
p[4], p[6], p[8], self._token_coord(p, 1))
def p_jump_statement_1(self, p):
""" jump_statement : GOTO ID SEMI """
p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
def p_jump_statement_2(self, p):
""" jump_statement : BREAK SEMI """
p[0] = c_ast.Break(self._token_coord(p, 1))
def p_jump_statement_3(self, p):
""" jump_statement : CONTINUE SEMI """
p[0] = c_ast.Continue(self._token_coord(p, 1))
def p_jump_statement_4(self, p):
""" jump_statement : RETURN expression SEMI
| RETURN SEMI
"""
p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
def p_expression_statement(self, p):
""" expression_statement : expression_opt SEMI """
if p[1] is None:
p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
else:
p[0] = p[1]
def p_expression(self, p):
""" expression : assignment_expression
| expression COMMA assignment_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
if not isinstance(p[1], c_ast.ExprList):
p[1] = c_ast.ExprList([p[1]], p[1].coord)
p[1].exprs.append(p[3])
p[0] = p[1]
def p_parenthesized_compound_expression(self, p):
""" assignment_expression : LPAREN compound_statement RPAREN """
p[0] = p[2]
def p_typedef_name(self, p):
""" typedef_name : TYPEID """
p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
def p_assignment_expression(self, p):
""" assignment_expression : conditional_expression
| unary_expression assignment_operator assignment_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
# K&R2 defines these as many separate rules, to encode
# precedence and associativity. Why work hard ? I'll just use
# the built in precedence/associativity specification feature
# of PLY. (see precedence declaration above)
#
def p_assignment_operator(self, p):
""" assignment_operator : EQUALS
| XOREQUAL
| TIMESEQUAL
| DIVEQUAL
| MODEQUAL
| PLUSEQUAL
| MINUSEQUAL
| LSHIFTEQUAL
| RSHIFTEQUAL
| ANDEQUAL
| OREQUAL
"""
p[0] = p[1]
def p_constant_expression(self, p):
""" constant_expression : conditional_expression """
p[0] = p[1]
def p_conditional_expression(self, p):
""" conditional_expression : binary_expression
| binary_expression CONDOP expression COLON conditional_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
def p_binary_expression(self, p):
""" binary_expression : cast_expression
| binary_expression TIMES binary_expression
| binary_expression DIVIDE binary_expression
| binary_expression MOD binary_expression
| binary_expression PLUS binary_expression
| binary_expression MINUS binary_expression
| binary_expression RSHIFT binary_expression
| binary_expression LSHIFT binary_expression
| binary_expression LT binary_expression
| binary_expression LE binary_expression
| binary_expression GE binary_expression
| binary_expression GT binary_expression
| binary_expression EQ binary_expression
| binary_expression NE binary_expression
| binary_expression AND binary_expression
| binary_expression OR binary_expression
| binary_expression XOR binary_expression
| binary_expression LAND binary_expression
| binary_expression LOR binary_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
def p_cast_expression_1(self, p):
""" cast_expression : unary_expression """
p[0] = p[1]
def p_cast_expression_2(self, p):
""" cast_expression : LPAREN type_name RPAREN cast_expression """
p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
def p_unary_expression_1(self, p):
""" unary_expression : postfix_expression """
p[0] = p[1]
def p_unary_expression_2(self, p):
""" unary_expression : PLUSPLUS unary_expression
| MINUSMINUS unary_expression
| unary_operator cast_expression
"""
p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
def p_unary_expression_3(self, p):
""" unary_expression : SIZEOF unary_expression
| SIZEOF LPAREN type_name RPAREN
| _ALIGNOF LPAREN type_name RPAREN
"""
p[0] = c_ast.UnaryOp(
p[1],
p[2] if len(p) == 3 else p[3],
self._token_coord(p, 1))
def p_unary_operator(self, p):
""" unary_operator : AND
| TIMES
| PLUS
| MINUS
| NOT
| LNOT
"""
p[0] = p[1]
def p_postfix_expression_1(self, p):
""" postfix_expression : primary_expression """
p[0] = p[1]
def p_postfix_expression_2(self, p):
""" postfix_expression : postfix_expression LBRACKET expression RBRACKET """
p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
def p_postfix_expression_3(self, p):
""" postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
| postfix_expression LPAREN RPAREN
"""
p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
def p_postfix_expression_4(self, p):
""" postfix_expression : postfix_expression PERIOD ID
| postfix_expression PERIOD TYPEID
| postfix_expression ARROW ID
| postfix_expression ARROW TYPEID
"""
field = c_ast.ID(p[3], self._token_coord(p, 3))
p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
def p_postfix_expression_5(self, p):
""" postfix_expression : postfix_expression PLUSPLUS
| postfix_expression MINUSMINUS
"""
p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
def p_postfix_expression_6(self, p):
""" postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
| LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
"""
p[0] = c_ast.CompoundLiteral(p[2], p[5])
def p_primary_expression_1(self, p):
""" primary_expression : identifier """
p[0] = p[1]
def p_primary_expression_2(self, p):
""" primary_expression : constant """
p[0] = p[1]
def p_primary_expression_3(self, p):
""" primary_expression : unified_string_literal
| unified_wstring_literal
"""
p[0] = p[1]
def p_primary_expression_4(self, p):
""" primary_expression : LPAREN expression RPAREN """
p[0] = p[2]
def p_primary_expression_5(self, p):
""" primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
"""
coord = self._token_coord(p, 1)
p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
c_ast.ExprList([p[3], p[5]], coord),
coord)
def p_offsetof_member_designator(self, p):
""" offsetof_member_designator : identifier
| offsetof_member_designator PERIOD identifier
| offsetof_member_designator LBRACKET expression RBRACKET
"""
if len(p) == 2:
p[0] = p[1]
elif len(p) == 4:
p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord)
elif len(p) == 5:
p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
else:
raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p))
def p_argument_expression_list(self, p):
""" argument_expression_list : assignment_expression
| argument_expression_list COMMA assignment_expression
"""
if len(p) == 2: # single expr
p[0] = c_ast.ExprList([p[1]], p[1].coord)
else:
p[1].exprs.append(p[3])
p[0] = p[1]
def p_identifier(self, p):
""" identifier : ID """
p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
def p_constant_1(self, p):
""" constant : INT_CONST_DEC
| INT_CONST_OCT
| INT_CONST_HEX
| INT_CONST_BIN
| INT_CONST_CHAR
"""
uCount = 0
lCount = 0
for x in p[1][-3:]:
if x in ('l', 'L'):
lCount += 1
elif x in ('u', 'U'):
uCount += 1
t = ''
if uCount > 1:
raise ValueError('Constant cannot have more than one u/U suffix.')
elif lCount > 2:
raise ValueError('Constant cannot have more than two l/L suffix.')
prefix = 'unsigned ' * uCount + 'long ' * lCount
p[0] = c_ast.Constant(
prefix + 'int', p[1], self._token_coord(p, 1))
def p_constant_2(self, p):
""" constant : FLOAT_CONST
| HEX_FLOAT_CONST
"""
if 'x' in p[1].lower():
t = 'float'
else:
if p[1][-1] in ('f', 'F'):
t = 'float'
elif p[1][-1] in ('l', 'L'):
t = 'long double'
else:
t = 'double'
p[0] = c_ast.Constant(
t, p[1], self._token_coord(p, 1))
def p_constant_3(self, p):
""" constant : CHAR_CONST
| WCHAR_CONST
| U8CHAR_CONST
| U16CHAR_CONST
| U32CHAR_CONST
"""
p[0] = c_ast.Constant(
'char', p[1], self._token_coord(p, 1))
# The "unified" string and wstring literal rules are for supporting
# concatenation of adjacent string literals.
# I.e. "hello " "world" is seen by the C compiler as a single string literal
# with the value "hello world"
#
def p_unified_string_literal(self, p):
""" unified_string_literal : STRING_LITERAL
| unified_string_literal STRING_LITERAL
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value[:-1] + p[2][1:]
p[0] = p[1]
def p_unified_wstring_literal(self, p):
""" unified_wstring_literal : WSTRING_LITERAL
| U8STRING_LITERAL
| U16STRING_LITERAL
| U32STRING_LITERAL
| unified_wstring_literal WSTRING_LITERAL
| unified_wstring_literal U8STRING_LITERAL
| unified_wstring_literal U16STRING_LITERAL
| unified_wstring_literal U32STRING_LITERAL
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
p[0] = p[1]
def p_brace_open(self, p):
""" brace_open : LBRACE
"""
p[0] = p[1]
p.set_lineno(0, p.lineno(1))
def p_brace_close(self, p):
""" brace_close : RBRACE
"""
p[0] = p[1]
p.set_lineno(0, p.lineno(1))
def p_empty(self, p):
'empty : '
p[0] = None
def p_error(self, p):
# If error recovery is added here in the future, make sure
# _get_yacc_lookahead_token still works!
#
if p:
self._parse_error(
'before: %s' % p.value,
self._coord(lineno=p.lineno,
column=self.clex.find_tok_column(p)))
else:
self._parse_error('At end of input', self.clex.filename)
| 73,680 | Python | 37.03872 | 134 | 0.525841 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/c_generator.py | #------------------------------------------------------------------------------
# pycparser: c_generator.py
#
# C code generator from pycparser AST nodes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
class CGenerator(object):
""" Uses the same visitor pattern as c_ast.NodeVisitor, but modified to
return a value from each visit method, using string accumulation in
generic_visit.
"""
def __init__(self, reduce_parentheses=False):
""" Constructs C-code generator
reduce_parentheses:
if True, eliminates needless parentheses on binary operators
"""
# Statements start with indentation of self.indent_level spaces, using
# the _make_indent method.
self.indent_level = 0
self.reduce_parentheses = reduce_parentheses
def _make_indent(self):
return ' ' * self.indent_level
def visit(self, node):
method = 'visit_' + node.__class__.__name__
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
if node is None:
return ''
else:
return ''.join(self.visit(c) for c_name, c in node.children())
def visit_Constant(self, n):
return n.value
def visit_ID(self, n):
return n.name
def visit_Pragma(self, n):
ret = '#pragma'
if n.string:
ret += ' ' + n.string
return ret
def visit_ArrayRef(self, n):
arrref = self._parenthesize_unless_simple(n.name)
return arrref + '[' + self.visit(n.subscript) + ']'
def visit_StructRef(self, n):
sref = self._parenthesize_unless_simple(n.name)
return sref + n.type + self.visit(n.field)
def visit_FuncCall(self, n):
fref = self._parenthesize_unless_simple(n.name)
return fref + '(' + self.visit(n.args) + ')'
def visit_UnaryOp(self, n):
if n.op == 'sizeof':
# Always parenthesize the argument of sizeof since it can be
# a name.
return 'sizeof(%s)' % self.visit(n.expr)
else:
operand = self._parenthesize_unless_simple(n.expr)
if n.op == 'p++':
return '%s++' % operand
elif n.op == 'p--':
return '%s--' % operand
else:
return '%s%s' % (n.op, operand)
# Precedence map of binary operators:
precedence_map = {
# Should be in sync with c_parser.CParser.precedence
# Higher numbers are stronger binding
'||': 0, # weakest binding
'&&': 1,
'|': 2,
'^': 3,
'&': 4,
'==': 5, '!=': 5,
'>': 6, '>=': 6, '<': 6, '<=': 6,
'>>': 7, '<<': 7,
'+': 8, '-': 8,
'*': 9, '/': 9, '%': 9 # strongest binding
}
def visit_BinaryOp(self, n):
# Note: all binary operators are left-to-right associative
#
# If `n.left.op` has a stronger or equally binding precedence in
# comparison to `n.op`, no parenthesis are needed for the left:
# e.g., `(a*b) + c` is equivalent to `a*b + c`, as well as
# `(a+b) - c` is equivalent to `a+b - c` (same precedence).
# If the left operator is weaker binding than the current, then
# parentheses are necessary:
# e.g., `(a+b) * c` is NOT equivalent to `a+b * c`.
lval_str = self._parenthesize_if(
n.left,
lambda d: not (self._is_simple_node(d) or
self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and
self.precedence_map[d.op] >= self.precedence_map[n.op]))
# If `n.right.op` has a stronger -but not equal- binding precedence,
# parenthesis can be omitted on the right:
# e.g., `a + (b*c)` is equivalent to `a + b*c`.
# If the right operator is weaker or equally binding, then parentheses
# are necessary:
# e.g., `a * (b+c)` is NOT equivalent to `a * b+c` and
# `a - (b+c)` is NOT equivalent to `a - b+c` (same precedence).
rval_str = self._parenthesize_if(
n.right,
lambda d: not (self._is_simple_node(d) or
self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and
self.precedence_map[d.op] > self.precedence_map[n.op]))
return '%s %s %s' % (lval_str, n.op, rval_str)
def visit_Assignment(self, n):
rval_str = self._parenthesize_if(
n.rvalue,
lambda n: isinstance(n, c_ast.Assignment))
return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str)
def visit_IdentifierType(self, n):
return ' '.join(n.names)
def _visit_expr(self, n):
if isinstance(n, c_ast.InitList):
return '{' + self.visit(n) + '}'
elif isinstance(n, c_ast.ExprList):
return '(' + self.visit(n) + ')'
else:
return self.visit(n)
def visit_Decl(self, n, no_type=False):
# no_type is used when a Decl is part of a DeclList, where the type is
# explicitly only for the first declaration in a list.
#
s = n.name if no_type else self._generate_decl(n)
if n.bitsize: s += ' : ' + self.visit(n.bitsize)
if n.init:
s += ' = ' + self._visit_expr(n.init)
return s
def visit_DeclList(self, n):
s = self.visit(n.decls[0])
if len(n.decls) > 1:
s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True)
for decl in n.decls[1:])
return s
def visit_Typedef(self, n):
s = ''
if n.storage: s += ' '.join(n.storage) + ' '
s += self._generate_type(n.type)
return s
def visit_Cast(self, n):
s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')'
return s + ' ' + self._parenthesize_unless_simple(n.expr)
def visit_ExprList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_InitList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_Enum(self, n):
return self._generate_struct_union_enum(n, name='enum')
def visit_Alignas(self, n):
return '_Alignas({})'.format(self.visit(n.alignment))
def visit_Enumerator(self, n):
if not n.value:
return '{indent}{name},\n'.format(
indent=self._make_indent(),
name=n.name,
)
else:
return '{indent}{name} = {value},\n'.format(
indent=self._make_indent(),
name=n.name,
value=self.visit(n.value),
)
def visit_FuncDef(self, n):
decl = self.visit(n.decl)
self.indent_level = 0
body = self.visit(n.body)
if n.param_decls:
knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls)
return decl + '\n' + knrdecls + ';\n' + body + '\n'
else:
return decl + '\n' + body + '\n'
def visit_FileAST(self, n):
s = ''
for ext in n.ext:
if isinstance(ext, c_ast.FuncDef):
s += self.visit(ext)
elif isinstance(ext, c_ast.Pragma):
s += self.visit(ext) + '\n'
else:
s += self.visit(ext) + ';\n'
return s
def visit_Compound(self, n):
s = self._make_indent() + '{\n'
self.indent_level += 2
if n.block_items:
s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items)
self.indent_level -= 2
s += self._make_indent() + '}\n'
return s
def visit_CompoundLiteral(self, n):
return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
def visit_EmptyStatement(self, n):
return ';'
def visit_ParamList(self, n):
return ', '.join(self.visit(param) for param in n.params)
def visit_Return(self, n):
s = 'return'
if n.expr: s += ' ' + self.visit(n.expr)
return s + ';'
def visit_Break(self, n):
return 'break;'
def visit_Continue(self, n):
return 'continue;'
def visit_TernaryOp(self, n):
s = '(' + self._visit_expr(n.cond) + ') ? '
s += '(' + self._visit_expr(n.iftrue) + ') : '
s += '(' + self._visit_expr(n.iffalse) + ')'
return s
def visit_If(self, n):
s = 'if ('
if n.cond: s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.iftrue, add_indent=True)
if n.iffalse:
s += self._make_indent() + 'else\n'
s += self._generate_stmt(n.iffalse, add_indent=True)
return s
def visit_For(self, n):
s = 'for ('
if n.init: s += self.visit(n.init)
s += ';'
if n.cond: s += ' ' + self.visit(n.cond)
s += ';'
if n.next: s += ' ' + self.visit(n.next)
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_While(self, n):
s = 'while ('
if n.cond: s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_DoWhile(self, n):
s = 'do\n'
s += self._generate_stmt(n.stmt, add_indent=True)
s += self._make_indent() + 'while ('
if n.cond: s += self.visit(n.cond)
s += ');'
return s
def visit_StaticAssert(self, n):
s = '_Static_assert('
s += self.visit(n.cond)
if n.message:
s += ','
s += self.visit(n.message)
s += ')'
return s
def visit_Switch(self, n):
s = 'switch (' + self.visit(n.cond) + ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_Case(self, n):
s = 'case ' + self.visit(n.expr) + ':\n'
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Default(self, n):
s = 'default:\n'
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Label(self, n):
return n.name + ':\n' + self._generate_stmt(n.stmt)
def visit_Goto(self, n):
return 'goto ' + n.name + ';'
def visit_EllipsisParam(self, n):
return '...'
def visit_Struct(self, n):
return self._generate_struct_union_enum(n, 'struct')
def visit_Typename(self, n):
return self._generate_type(n.type)
def visit_Union(self, n):
return self._generate_struct_union_enum(n, 'union')
def visit_NamedInitializer(self, n):
s = ''
for name in n.name:
if isinstance(name, c_ast.ID):
s += '.' + name.name
else:
s += '[' + self.visit(name) + ']'
s += ' = ' + self._visit_expr(n.expr)
return s
def visit_FuncDecl(self, n):
return self._generate_type(n)
def visit_ArrayDecl(self, n):
return self._generate_type(n, emit_declname=False)
def visit_TypeDecl(self, n):
return self._generate_type(n, emit_declname=False)
def visit_PtrDecl(self, n):
return self._generate_type(n, emit_declname=False)
def _generate_struct_union_enum(self, n, name):
""" Generates code for structs, unions, and enums. name should be
'struct', 'union', or 'enum'.
"""
if name in ('struct', 'union'):
members = n.decls
body_function = self._generate_struct_union_body
else:
assert name == 'enum'
members = None if n.values is None else n.values.enumerators
body_function = self._generate_enum_body
s = name + ' ' + (n.name or '')
if members is not None:
# None means no members
# Empty sequence means an empty list of members
s += '\n'
s += self._make_indent()
self.indent_level += 2
s += '{\n'
s += body_function(members)
self.indent_level -= 2
s += self._make_indent() + '}'
return s
def _generate_struct_union_body(self, members):
return ''.join(self._generate_stmt(decl) for decl in members)
def _generate_enum_body(self, members):
# `[:-2] + '\n'` removes the final `,` from the enumerator list
return ''.join(self.visit(value) for value in members)[:-2] + '\n'
def _generate_stmt(self, n, add_indent=False):
""" Generation from a statement node. This method exists as a wrapper
for individual visit_* methods to handle different treatment of
some statements in this context.
"""
typ = type(n)
if add_indent: self.indent_level += 2
indent = self._make_indent()
if add_indent: self.indent_level -= 2
if typ in (
c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp,
c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef,
c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef,
c_ast.ExprList):
# These can also appear in an expression context so no semicolon
# is added to them automatically
#
return indent + self.visit(n) + ';\n'
elif typ in (c_ast.Compound,):
# No extra indentation required before the opening brace of a
# compound - because it consists of multiple lines it has to
# compute its own indentation.
#
return self.visit(n)
elif typ in (c_ast.If,):
return indent + self.visit(n)
else:
return indent + self.visit(n) + '\n'
def _generate_decl(self, n):
""" Generation from a Decl node.
"""
s = ''
if n.funcspec: s = ' '.join(n.funcspec) + ' '
if n.storage: s += ' '.join(n.storage) + ' '
if n.align: s += self.visit(n.align[0]) + ' '
s += self._generate_type(n.type)
return s
def _generate_type(self, n, modifiers=[], emit_declname = True):
""" Recursive generation from a type node. n is the type node.
modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
encountered on the way down to a TypeDecl, to allow proper
generation from it.
"""
typ = type(n)
#~ print(n, modifiers)
if typ == c_ast.TypeDecl:
s = ''
if n.quals: s += ' '.join(n.quals) + ' '
s += self.visit(n.type)
nstr = n.declname if n.declname and emit_declname else ''
# Resolve modifiers.
# Wrap in parens to distinguish pointer to array and pointer to
# function syntax.
#
for i, modifier in enumerate(modifiers):
if isinstance(modifier, c_ast.ArrayDecl):
if (i != 0 and
isinstance(modifiers[i - 1], c_ast.PtrDecl)):
nstr = '(' + nstr + ')'
nstr += '['
if modifier.dim_quals:
nstr += ' '.join(modifier.dim_quals) + ' '
nstr += self.visit(modifier.dim) + ']'
elif isinstance(modifier, c_ast.FuncDecl):
if (i != 0 and
isinstance(modifiers[i - 1], c_ast.PtrDecl)):
nstr = '(' + nstr + ')'
nstr += '(' + self.visit(modifier.args) + ')'
elif isinstance(modifier, c_ast.PtrDecl):
if modifier.quals:
nstr = '* %s%s' % (' '.join(modifier.quals),
' ' + nstr if nstr else '')
else:
nstr = '*' + nstr
if nstr: s += ' ' + nstr
return s
elif typ == c_ast.Decl:
return self._generate_decl(n.type)
elif typ == c_ast.Typename:
return self._generate_type(n.type, emit_declname = emit_declname)
elif typ == c_ast.IdentifierType:
return ' '.join(n.names) + ' '
elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
return self._generate_type(n.type, modifiers + [n],
emit_declname = emit_declname)
else:
return self.visit(n)
def _parenthesize_if(self, n, condition):
""" Visits 'n' and returns its string representation, parenthesized
if the condition function applied to the node returns True.
"""
s = self._visit_expr(n)
if condition(n):
return '(' + s + ')'
else:
return s
def _parenthesize_unless_simple(self, n):
""" Common use case for _parenthesize_if
"""
return self._parenthesize_if(n, lambda d: not self._is_simple_node(d))
def _is_simple_node(self, n):
""" Returns True for nodes that are "simple" - i.e. nodes that always
have higher precedence than operators.
"""
return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
c_ast.StructRef, c_ast.FuncCall))
| 17,772 | Python | 34.333996 | 83 | 0.507934 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/plyparser.py | #-----------------------------------------------------------------
# plyparser.py
#
# PLYParser class and other utilities for simplifying programming
# parsers with PLY
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
import warnings
class Coord(object):
""" Coordinates of a syntactic element. Consists of:
- File name
- Line number
- (optional) column number, for the Lexer
"""
__slots__ = ('file', 'line', 'column', '__weakref__')
def __init__(self, file, line, column=None):
self.file = file
self.line = line
self.column = column
def __str__(self):
str = "%s:%s" % (self.file, self.line)
if self.column: str += ":%s" % self.column
return str
class ParseError(Exception): pass
class PLYParser(object):
def _create_opt_rule(self, rulename):
""" Given a rule name, creates an optional ply.yacc rule
for it. The name of the optional rule is
<rulename>_opt
"""
optname = rulename + '_opt'
def optrule(self, p):
p[0] = p[1]
optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename)
optrule.__name__ = 'p_%s' % optname
setattr(self.__class__, optrule.__name__, optrule)
def _coord(self, lineno, column=None):
return Coord(
file=self.clex.filename,
line=lineno,
column=column)
def _token_coord(self, p, token_idx):
""" Returns the coordinates for the YaccProduction object 'p' indexed
with 'token_idx'. The coordinate includes the 'lineno' and
'column'. Both follow the lex semantic, starting from 1.
"""
last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
if last_cr < 0:
last_cr = -1
column = (p.lexpos(token_idx) - (last_cr))
return self._coord(p.lineno(token_idx), column)
def _parse_error(self, msg, coord):
raise ParseError("%s: %s" % (coord, msg))
def parameterized(*params):
""" Decorator to create parameterized rules.
Parameterized rule methods must be named starting with 'p_' and contain
'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be
replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with
docstring 'xxx_rule : yyy' when decorated with
``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring
'id_rule : ID'. Using multiple tuples produces multiple rules.
"""
def decorate(rule_func):
rule_func._params = params
return rule_func
return decorate
def template(cls):
""" Class decorator to generate rules from parameterized rule templates.
See `parameterized` for more information on parameterized rules.
"""
issued_nodoc_warning = False
for attr_name in dir(cls):
if attr_name.startswith('p_'):
method = getattr(cls, attr_name)
if hasattr(method, '_params'):
# Remove the template method
delattr(cls, attr_name)
# Create parameterized rules from this method; only run this if
# the method has a docstring. This is to address an issue when
# pycparser's users are installed in -OO mode which strips
# docstrings away.
# See: https://github.com/eliben/pycparser/pull/198/ and
# https://github.com/eliben/pycparser/issues/197
# for discussion.
if method.__doc__ is not None:
_create_param_rules(cls, method)
elif not issued_nodoc_warning:
warnings.warn(
'parsing methods must have __doc__ for pycparser to work properly',
RuntimeWarning,
stacklevel=2)
issued_nodoc_warning = True
return cls
def _create_param_rules(cls, func):
""" Create ply.yacc rules based on a parameterized rule function
Generates new methods (one per each pair of parameters) based on the
template rule function `func`, and attaches them to `cls`. The rule
function's parameters must be accessible via its `_params` attribute.
"""
for xxx, yyy in func._params:
# Use the template method's body for each new method
def param_rule(self, p):
func(self, p)
# Substitute in the params for the grammar rule and function name
param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy)
param_rule.__name__ = func.__name__.replace('xxx', xxx)
# Attach the new method to the class
setattr(cls, param_rule.__name__, param_rule)
| 4,875 | Python | 35.388059 | 91 | 0.568615 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ast_transforms.py | #------------------------------------------------------------------------------
# pycparser: ast_transforms.py
#
# Some utilities used by the parser to create a friendlier AST.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
def fix_switch_cases(switch_node):
""" The 'case' statements in a 'switch' come out of parsing with one
child node, so subsequent statements are just tucked to the parent
Compound. Additionally, consecutive (fall-through) case statements
come out messy. This is a peculiarity of the C grammar. The following:
switch (myvar) {
case 10:
k = 10;
p = k + 1;
return 10;
case 20:
case 30:
return 20;
default:
break;
}
Creates this tree (pseudo-dump):
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
The goal of this transform is to fix this mess, turning it into the
following:
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
A fixed AST node is returned. The argument may be modified.
"""
assert isinstance(switch_node, c_ast.Switch)
if not isinstance(switch_node.stmt, c_ast.Compound):
return switch_node
# The new Compound child for the Switch, which will collect children in the
# correct order
new_compound = c_ast.Compound([], switch_node.stmt.coord)
# The last Case/Default node
last_case = None
# Goes over the children of the Compound below the Switch, adding them
# either directly below new_compound or below the last Case as appropriate
# (for `switch(cond) {}`, block_items would have been None)
for child in (switch_node.stmt.block_items or []):
if isinstance(child, (c_ast.Case, c_ast.Default)):
# If it's a Case/Default:
# 1. Add it to the Compound and mark as "last case"
# 2. If its immediate child is also a Case or Default, promote it
# to a sibling.
new_compound.block_items.append(child)
_extract_nested_case(child, new_compound.block_items)
last_case = new_compound.block_items[-1]
else:
# Other statements are added as children to the last case, if it
# exists.
if last_case is None:
new_compound.block_items.append(child)
else:
last_case.stmts.append(child)
switch_node.stmt = new_compound
return switch_node
def _extract_nested_case(case_node, stmts_list):
""" Recursively extract consecutive Case statements that are made nested
by the parser and add them to the stmts_list.
"""
if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
stmts_list.append(case_node.stmts.pop())
_extract_nested_case(stmts_list[-1], stmts_list)
def fix_atomic_specifiers(decl):
""" Atomic specifiers like _Atomic(type) are unusually structured,
conferring a qualifier upon the contained type.
This function fixes a decl with atomic specifiers to have a sane AST
structure, by removing spurious Typename->TypeDecl pairs and attaching
the _Atomic qualifier in the right place.
"""
# There can be multiple levels of _Atomic in a decl; fix them until a
# fixed point is reached.
while True:
decl, found = _fix_atomic_specifiers_once(decl)
if not found:
break
# Make sure to add an _Atomic qual on the topmost decl if needed. Also
# restore the declname on the innermost TypeDecl (it gets placed in the
# wrong place during construction).
typ = decl
while not isinstance(typ, c_ast.TypeDecl):
try:
typ = typ.type
except AttributeError:
return decl
if '_Atomic' in typ.quals and '_Atomic' not in decl.quals:
decl.quals.append('_Atomic')
if typ.declname is None:
typ.declname = decl.name
return decl
def _fix_atomic_specifiers_once(decl):
""" Performs one 'fix' round of atomic specifiers.
Returns (modified_decl, found) where found is True iff a fix was made.
"""
parent = decl
grandparent = None
node = decl.type
while node is not None:
if isinstance(node, c_ast.Typename) and '_Atomic' in node.quals:
break
try:
grandparent = parent
parent = node
node = node.type
except AttributeError:
# If we've reached a node without a `type` field, it means we won't
# find what we're looking for at this point; give up the search
# and return the original decl unmodified.
return decl, False
assert isinstance(parent, c_ast.TypeDecl)
grandparent.type = node.type
if '_Atomic' not in node.type.quals:
node.type.quals.append('_Atomic')
return decl, True
| 5,691 | Python | 33.496969 | 79 | 0.54718 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/cpp.py | # -----------------------------------------------------------------------------
# cpp.py
#
# Author: David Beazley (http://www.dabeaz.com)
# Copyright (C) 2017
# All rights reserved
#
# This module implements an ANSI-C style lexical preprocessor for PLY.
# -----------------------------------------------------------------------------
import sys
# Some Python 3 compatibility shims
if sys.version_info.major < 3:
STRING_TYPES = (str, unicode)
else:
STRING_TYPES = str
xrange = range
# -----------------------------------------------------------------------------
# Default preprocessor lexer definitions. These tokens are enough to get
# a basic preprocessor working. Other modules may import these if they want
# -----------------------------------------------------------------------------
tokens = (
'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND'
)
literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
# Whitespace
def t_CPP_WS(t):
r'\s+'
t.lexer.lineno += t.value.count("\n")
return t
t_CPP_POUND = r'\#'
t_CPP_DPOUND = r'\#\#'
# Identifier
t_CPP_ID = r'[A-Za-z_][\w_]*'
# Integer literal
def CPP_INTEGER(t):
r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)'
return t
t_CPP_INTEGER = CPP_INTEGER
# Floating literal
t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
# String literal
def t_CPP_STRING(t):
r'\"([^\\\n]|(\\(.|\n)))*?\"'
t.lexer.lineno += t.value.count("\n")
return t
# Character constant 'c' or L'c'
def t_CPP_CHAR(t):
r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
t.lexer.lineno += t.value.count("\n")
return t
# Comment
def t_CPP_COMMENT1(t):
r'(/\*(.|\n)*?\*/)'
ncr = t.value.count("\n")
t.lexer.lineno += ncr
# replace with one space or a number of '\n'
t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' '
return t
# Line comment
def t_CPP_COMMENT2(t):
r'(//.*?(\n|$))'
# replace with '/n'
t.type = 'CPP_WS'; t.value = '\n'
return t
def t_error(t):
t.type = t.value[0]
t.value = t.value[0]
t.lexer.skip(1)
return t
import re
import copy
import time
import os.path
# -----------------------------------------------------------------------------
# trigraph()
#
# Given an input string, this function replaces all trigraph sequences.
# The following mapping is used:
#
# ??= #
# ??/ \
# ??' ^
# ??( [
# ??) ]
# ??! |
# ??< {
# ??> }
# ??- ~
# -----------------------------------------------------------------------------
_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
_trigraph_rep = {
'=':'#',
'/':'\\',
"'":'^',
'(':'[',
')':']',
'!':'|',
'<':'{',
'>':'}',
'-':'~'
}
def trigraph(input):
return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
# ------------------------------------------------------------------
# Macro object
#
# This object holds information about preprocessor macros
#
# .name - Macro name (string)
# .value - Macro value (a list of tokens)
# .arglist - List of argument names
# .variadic - Boolean indicating whether or not variadic macro
# .vararg - Name of the variadic parameter
#
# When a macro is created, the macro replacement token sequence is
# pre-scanned and used to create patch lists that are later used
# during macro expansion
# ------------------------------------------------------------------
class Macro(object):
def __init__(self,name,value,arglist=None,variadic=False):
self.name = name
self.value = value
self.arglist = arglist
self.variadic = variadic
if variadic:
self.vararg = arglist[-1]
self.source = None
# ------------------------------------------------------------------
# Preprocessor object
#
# Object representing a preprocessor. Contains macro definitions,
# include directories, and other information
# ------------------------------------------------------------------
class Preprocessor(object):
def __init__(self,lexer=None):
if lexer is None:
lexer = lex.lexer
self.lexer = lexer
self.macros = { }
self.path = []
self.temp_path = []
# Probe the lexer for selected tokens
self.lexprobe()
tm = time.localtime()
self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
self.parser = None
# -----------------------------------------------------------------------------
# tokenize()
#
# Utility function. Given a string of text, tokenize into a list of tokens
# -----------------------------------------------------------------------------
def tokenize(self,text):
tokens = []
self.lexer.input(text)
while True:
tok = self.lexer.token()
if not tok: break
tokens.append(tok)
return tokens
# ---------------------------------------------------------------------
# error()
#
# Report a preprocessor error/warning of some kind
# ----------------------------------------------------------------------
def error(self,file,line,msg):
print("%s:%d %s" % (file,line,msg))
# ----------------------------------------------------------------------
# lexprobe()
#
# This method probes the preprocessor lexer object to discover
# the token types of symbols that are important to the preprocessor.
# If this works right, the preprocessor will simply "work"
# with any suitable lexer regardless of how tokens have been named.
# ----------------------------------------------------------------------
def lexprobe(self):
# Determine the token type for identifiers
self.lexer.input("identifier")
tok = self.lexer.token()
if not tok or tok.value != "identifier":
print("Couldn't determine identifier type")
else:
self.t_ID = tok.type
# Determine the token type for integers
self.lexer.input("12345")
tok = self.lexer.token()
if not tok or int(tok.value) != 12345:
print("Couldn't determine integer type")
else:
self.t_INTEGER = tok.type
self.t_INTEGER_TYPE = type(tok.value)
# Determine the token type for strings enclosed in double quotes
self.lexer.input("\"filename\"")
tok = self.lexer.token()
if not tok or tok.value != "\"filename\"":
print("Couldn't determine string type")
else:
self.t_STRING = tok.type
# Determine the token type for whitespace--if any
self.lexer.input(" ")
tok = self.lexer.token()
if not tok or tok.value != " ":
self.t_SPACE = None
else:
self.t_SPACE = tok.type
# Determine the token type for newlines
self.lexer.input("\n")
tok = self.lexer.token()
if not tok or tok.value != "\n":
self.t_NEWLINE = None
print("Couldn't determine token for newlines")
else:
self.t_NEWLINE = tok.type
self.t_WS = (self.t_SPACE, self.t_NEWLINE)
# Check for other characters used by the preprocessor
chars = [ '<','>','#','##','\\','(',')',',','.']
for c in chars:
self.lexer.input(c)
tok = self.lexer.token()
if not tok or tok.value != c:
print("Unable to lex '%s' required for preprocessor" % c)
# ----------------------------------------------------------------------
# add_path()
#
# Adds a search path to the preprocessor.
# ----------------------------------------------------------------------
def add_path(self,path):
self.path.append(path)
# ----------------------------------------------------------------------
# group_lines()
#
# Given an input string, this function splits it into lines. Trailing whitespace
# is removed. Any line ending with \ is grouped with the next line. This
# function forms the lowest level of the preprocessor---grouping into text into
# a line-by-line format.
# ----------------------------------------------------------------------
def group_lines(self,input):
lex = self.lexer.clone()
lines = [x.rstrip() for x in input.splitlines()]
for i in xrange(len(lines)):
j = i+1
while lines[i].endswith('\\') and (j < len(lines)):
lines[i] = lines[i][:-1]+lines[j]
lines[j] = ""
j += 1
input = "\n".join(lines)
lex.input(input)
lex.lineno = 1
current_line = []
while True:
tok = lex.token()
if not tok:
break
current_line.append(tok)
if tok.type in self.t_WS and '\n' in tok.value:
yield current_line
current_line = []
if current_line:
yield current_line
# ----------------------------------------------------------------------
# tokenstrip()
#
# Remove leading/trailing whitespace tokens from a token list
# ----------------------------------------------------------------------
def tokenstrip(self,tokens):
i = 0
while i < len(tokens) and tokens[i].type in self.t_WS:
i += 1
del tokens[:i]
i = len(tokens)-1
while i >= 0 and tokens[i].type in self.t_WS:
i -= 1
del tokens[i+1:]
return tokens
# ----------------------------------------------------------------------
# collect_args()
#
# Collects comma separated arguments from a list of tokens. The arguments
# must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
# where tokencount is the number of tokens consumed, args is a list of arguments,
# and positions is a list of integers containing the starting index of each
# argument. Each argument is represented by a list of tokens.
#
# When collecting arguments, leading and trailing whitespace is removed
# from each argument.
#
# This function properly handles nested parenthesis and commas---these do not
# define new arguments.
# ----------------------------------------------------------------------
def collect_args(self,tokenlist):
args = []
positions = []
current_arg = []
nesting = 1
tokenlen = len(tokenlist)
# Search for the opening '('.
i = 0
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
i += 1
if (i < tokenlen) and (tokenlist[i].value == '('):
positions.append(i+1)
else:
self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
return 0, [], []
i += 1
while i < tokenlen:
t = tokenlist[i]
if t.value == '(':
current_arg.append(t)
nesting += 1
elif t.value == ')':
nesting -= 1
if nesting == 0:
if current_arg:
args.append(self.tokenstrip(current_arg))
positions.append(i)
return i+1,args,positions
current_arg.append(t)
elif t.value == ',' and nesting == 1:
args.append(self.tokenstrip(current_arg))
positions.append(i+1)
current_arg = []
else:
current_arg.append(t)
i += 1
# Missing end argument
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
return 0, [],[]
# ----------------------------------------------------------------------
# macro_prescan()
#
# Examine the macro value (token sequence) and identify patch points
# This is used to speed up macro expansion later on---we'll know
# right away where to apply patches to the value to form the expansion
# ----------------------------------------------------------------------
def macro_prescan(self,macro):
macro.patch = [] # Standard macro arguments
macro.str_patch = [] # String conversion expansion
macro.var_comma_patch = [] # Variadic macro comma patch
i = 0
while i < len(macro.value):
if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
argnum = macro.arglist.index(macro.value[i].value)
# Conversion of argument to a string
if i > 0 and macro.value[i-1].value == '#':
macro.value[i] = copy.copy(macro.value[i])
macro.value[i].type = self.t_STRING
del macro.value[i-1]
macro.str_patch.append((argnum,i-1))
continue
# Concatenation
elif (i > 0 and macro.value[i-1].value == '##'):
macro.patch.append(('c',argnum,i-1))
del macro.value[i-1]
continue
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
macro.patch.append(('c',argnum,i))
i += 1
continue
# Standard expansion
else:
macro.patch.append(('e',argnum,i))
elif macro.value[i].value == '##':
if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
(macro.value[i+1].value == macro.vararg):
macro.var_comma_patch.append(i-1)
i += 1
macro.patch.sort(key=lambda x: x[2],reverse=True)
# ----------------------------------------------------------------------
# macro_expand_args()
#
# Given a Macro and list of arguments (each a token list), this method
# returns an expanded version of a macro. The return value is a token sequence
# representing the replacement macro tokens
# ----------------------------------------------------------------------
def macro_expand_args(self,macro,args):
# Make a copy of the macro token sequence
rep = [copy.copy(_x) for _x in macro.value]
# Make string expansion patches. These do not alter the length of the replacement sequence
str_expansion = {}
for argnum, i in macro.str_patch:
if argnum not in str_expansion:
str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
rep[i] = copy.copy(rep[i])
rep[i].value = str_expansion[argnum]
# Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
comma_patch = False
if macro.variadic and not args[-1]:
for i in macro.var_comma_patch:
rep[i] = None
comma_patch = True
# Make all other patches. The order of these matters. It is assumed that the patch list
# has been sorted in reverse order of patch location since replacements will cause the
# size of the replacement sequence to expand from the patch point.
expanded = { }
for ptype, argnum, i in macro.patch:
# Concatenation. Argument is left unexpanded
if ptype == 'c':
rep[i:i+1] = args[argnum]
# Normal expansion. Argument is macro expanded first
elif ptype == 'e':
if argnum not in expanded:
expanded[argnum] = self.expand_macros(args[argnum])
rep[i:i+1] = expanded[argnum]
# Get rid of removed comma if necessary
if comma_patch:
rep = [_i for _i in rep if _i]
return rep
# ----------------------------------------------------------------------
# expand_macros()
#
# Given a list of tokens, this function performs macro expansion.
# The expanded argument is a dictionary that contains macros already
# expanded. This is used to prevent infinite recursion.
# ----------------------------------------------------------------------
def expand_macros(self,tokens,expanded=None):
if expanded is None:
expanded = {}
i = 0
while i < len(tokens):
t = tokens[i]
if t.type == self.t_ID:
if t.value in self.macros and t.value not in expanded:
# Yes, we found a macro match
expanded[t.value] = True
m = self.macros[t.value]
if not m.arglist:
# A simple macro
ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
for e in ex:
e.lineno = t.lineno
tokens[i:i+1] = ex
i += len(ex)
else:
# A macro with arguments
j = i + 1
while j < len(tokens) and tokens[j].type in self.t_WS:
j += 1
if tokens[j].value == '(':
tokcount,args,positions = self.collect_args(tokens[j:])
if not m.variadic and len(args) != len(m.arglist):
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
i = j + tokcount
elif m.variadic and len(args) < len(m.arglist)-1:
if len(m.arglist) > 2:
self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
else:
self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
i = j + tokcount
else:
if m.variadic:
if len(args) == len(m.arglist)-1:
args.append([])
else:
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
del args[len(m.arglist):]
# Get macro replacement text
rep = self.macro_expand_args(m,args)
rep = self.expand_macros(rep,expanded)
for r in rep:
r.lineno = t.lineno
tokens[i:j+tokcount] = rep
i += len(rep)
del expanded[t.value]
continue
elif t.value == '__LINE__':
t.type = self.t_INTEGER
t.value = self.t_INTEGER_TYPE(t.lineno)
i += 1
return tokens
# ----------------------------------------------------------------------
# evalexpr()
#
# Evaluate an expression token sequence for the purposes of evaluating
# integral expressions.
# ----------------------------------------------------------------------
def evalexpr(self,tokens):
# tokens = tokenize(line)
# Search for defined macros
i = 0
while i < len(tokens):
if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
j = i + 1
needparen = False
result = "0L"
while j < len(tokens):
if tokens[j].type in self.t_WS:
j += 1
continue
elif tokens[j].type == self.t_ID:
if tokens[j].value in self.macros:
result = "1L"
else:
result = "0L"
if not needparen: break
elif tokens[j].value == '(':
needparen = True
elif tokens[j].value == ')':
break
else:
self.error(self.source,tokens[i].lineno,"Malformed defined()")
j += 1
tokens[i].type = self.t_INTEGER
tokens[i].value = self.t_INTEGER_TYPE(result)
del tokens[i+1:j+1]
i += 1
tokens = self.expand_macros(tokens)
for i,t in enumerate(tokens):
if t.type == self.t_ID:
tokens[i] = copy.copy(t)
tokens[i].type = self.t_INTEGER
tokens[i].value = self.t_INTEGER_TYPE("0L")
elif t.type == self.t_INTEGER:
tokens[i] = copy.copy(t)
# Strip off any trailing suffixes
tokens[i].value = str(tokens[i].value)
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
tokens[i].value = tokens[i].value[:-1]
expr = "".join([str(x.value) for x in tokens])
expr = expr.replace("&&"," and ")
expr = expr.replace("||"," or ")
expr = expr.replace("!"," not ")
try:
result = eval(expr)
except Exception:
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
result = 0
return result
# ----------------------------------------------------------------------
# parsegen()
#
# Parse an input string/
# ----------------------------------------------------------------------
def parsegen(self,input,source=None):
# Replace trigraph sequences
t = trigraph(input)
lines = self.group_lines(t)
if not source:
source = ""
self.define("__FILE__ \"%s\"" % source)
self.source = source
chunk = []
enable = True
iftrigger = False
ifstack = []
for x in lines:
for i,tok in enumerate(x):
if tok.type not in self.t_WS: break
if tok.value == '#':
# Preprocessor directive
# insert necessary whitespace instead of eaten tokens
for tok in x:
if tok.type in self.t_WS and '\n' in tok.value:
chunk.append(tok)
dirtokens = self.tokenstrip(x[i+1:])
if dirtokens:
name = dirtokens[0].value
args = self.tokenstrip(dirtokens[1:])
else:
name = ""
args = []
if name == 'define':
if enable:
for tok in self.expand_macros(chunk):
yield tok
chunk = []
self.define(args)
elif name == 'include':
if enable:
for tok in self.expand_macros(chunk):
yield tok
chunk = []
oldfile = self.macros['__FILE__']
for tok in self.include(args):
yield tok
self.macros['__FILE__'] = oldfile
self.source = source
elif name == 'undef':
if enable:
for tok in self.expand_macros(chunk):
yield tok
chunk = []
self.undef(args)
elif name == 'ifdef':
ifstack.append((enable,iftrigger))
if enable:
if not args[0].value in self.macros:
enable = False
iftrigger = False
else:
iftrigger = True
elif name == 'ifndef':
ifstack.append((enable,iftrigger))
if enable:
if args[0].value in self.macros:
enable = False
iftrigger = False
else:
iftrigger = True
elif name == 'if':
ifstack.append((enable,iftrigger))
if enable:
result = self.evalexpr(args)
if not result:
enable = False
iftrigger = False
else:
iftrigger = True
elif name == 'elif':
if ifstack:
if ifstack[-1][0]: # We only pay attention if outer "if" allows this
if enable: # If already true, we flip enable False
enable = False
elif not iftrigger: # If False, but not triggered yet, we'll check expression
result = self.evalexpr(args)
if result:
enable = True
iftrigger = True
else:
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
elif name == 'else':
if ifstack:
if ifstack[-1][0]:
if enable:
enable = False
elif not iftrigger:
enable = True
iftrigger = True
else:
self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
elif name == 'endif':
if ifstack:
enable,iftrigger = ifstack.pop()
else:
self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
else:
# Unknown preprocessor directive
pass
else:
# Normal text
if enable:
chunk.extend(x)
for tok in self.expand_macros(chunk):
yield tok
chunk = []
# ----------------------------------------------------------------------
# include()
#
# Implementation of file-inclusion
# ----------------------------------------------------------------------
def include(self,tokens):
# Try to extract the filename and then process an include file
if not tokens:
return
if tokens:
if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
tokens = self.expand_macros(tokens)
if tokens[0].value == '<':
# Include <...>
i = 1
while i < len(tokens):
if tokens[i].value == '>':
break
i += 1
else:
print("Malformed #include <...>")
return
filename = "".join([x.value for x in tokens[1:i]])
path = self.path + [""] + self.temp_path
elif tokens[0].type == self.t_STRING:
filename = tokens[0].value[1:-1]
path = self.temp_path + [""] + self.path
else:
print("Malformed #include statement")
return
for p in path:
iname = os.path.join(p,filename)
try:
data = open(iname,"r").read()
dname = os.path.dirname(iname)
if dname:
self.temp_path.insert(0,dname)
for tok in self.parsegen(data,filename):
yield tok
if dname:
del self.temp_path[0]
break
except IOError:
pass
else:
print("Couldn't find '%s'" % filename)
# ----------------------------------------------------------------------
# define()
#
# Define a new macro
# ----------------------------------------------------------------------
def define(self,tokens):
if isinstance(tokens,STRING_TYPES):
tokens = self.tokenize(tokens)
linetok = tokens
try:
name = linetok[0]
if len(linetok) > 1:
mtype = linetok[1]
else:
mtype = None
if not mtype:
m = Macro(name.value,[])
self.macros[name.value] = m
elif mtype.type in self.t_WS:
# A normal macro
m = Macro(name.value,self.tokenstrip(linetok[2:]))
self.macros[name.value] = m
elif mtype.value == '(':
# A macro with arguments
tokcount, args, positions = self.collect_args(linetok[1:])
variadic = False
for a in args:
if variadic:
print("No more arguments may follow a variadic argument")
break
astr = "".join([str(_i.value) for _i in a])
if astr == "...":
variadic = True
a[0].type = self.t_ID
a[0].value = '__VA_ARGS__'
variadic = True
del a[1:]
continue
elif astr[-3:] == "..." and a[0].type == self.t_ID:
variadic = True
del a[1:]
# If, for some reason, "." is part of the identifier, strip off the name for the purposes
# of macro expansion
if a[0].value[-3:] == '...':
a[0].value = a[0].value[:-3]
continue
if len(a) > 1 or a[0].type != self.t_ID:
print("Invalid macro argument")
break
else:
mvalue = self.tokenstrip(linetok[1+tokcount:])
i = 0
while i < len(mvalue):
if i+1 < len(mvalue):
if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
del mvalue[i]
continue
elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
del mvalue[i+1]
i += 1
m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
self.macro_prescan(m)
self.macros[name.value] = m
else:
print("Bad macro definition")
except LookupError:
print("Bad macro definition")
# ----------------------------------------------------------------------
# undef()
#
# Undefine a macro
# ----------------------------------------------------------------------
def undef(self,tokens):
id = tokens[0].value
try:
del self.macros[id]
except LookupError:
pass
# ----------------------------------------------------------------------
# parse()
#
# Parse input text.
# ----------------------------------------------------------------------
def parse(self,input,source=None,ignore={}):
self.ignore = ignore
self.parser = self.parsegen(input,source)
# ----------------------------------------------------------------------
# token()
#
# Method to return individual tokens
# ----------------------------------------------------------------------
def token(self):
try:
while True:
tok = next(self.parser)
if tok.type not in self.ignore: return tok
except StopIteration:
self.parser = None
return None
if __name__ == '__main__':
import ply.lex as lex
lexer = lex.lex()
# Run a preprocessor
import sys
f = open(sys.argv[1])
input = f.read()
p = Preprocessor(lexer)
p.parse(input,sys.argv[1])
while True:
tok = p.token()
if not tok: break
print(p.source, tok)
| 33,282 | Python | 35.736203 | 141 | 0.420588 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/yacc.py | # -----------------------------------------------------------------------------
# ply: yacc.py
#
# Copyright (C) 2001-2017
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the David Beazley or Dabeaz LLC may be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
#
# This implements an LR parser that is constructed from grammar rules defined
# as Python functions. The grammer is specified by supplying the BNF inside
# Python documentation strings. The inspiration for this technique was borrowed
# from John Aycock's Spark parsing system. PLY might be viewed as cross between
# Spark and the GNU bison utility.
#
# The current implementation is only somewhat object-oriented. The
# LR parser itself is defined in terms of an object (which allows multiple
# parsers to co-exist). However, most of the variables used during table
# construction are defined in terms of global variables. Users shouldn't
# notice unless they are trying to define multiple parsers at the same
# time using threads (in which case they should have their head examined).
#
# This implementation supports both SLR and LALR(1) parsing. LALR(1)
# support was originally implemented by Elias Ioup ([email protected]),
# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
# by the more efficient DeRemer and Pennello algorithm.
#
# :::::::: WARNING :::::::
#
# Construction of LR parsing tables is fairly complicated and expensive.
# To make this module run fast, a *LOT* of work has been put into
# optimization---often at the expensive of readability and what might
# consider to be good Python "coding style." Modify the code at your
# own risk!
# ----------------------------------------------------------------------------
import re
import types
import sys
import os.path
import inspect
import base64
import warnings
__version__ = '3.10'
__tabversion__ = '3.10'
#-----------------------------------------------------------------------------
# === User configurable parameters ===
#
# Change these to modify the default behavior of yacc (if you wish)
#-----------------------------------------------------------------------------
yaccdebug = True # Debugging mode. If set, yacc generates a
# a 'parser.out' file in the current directory
debug_file = 'parser.out' # Default name of the debugging file
tab_module = 'parsetab' # Default name of the table module
default_lr = 'LALR' # Default LR table generation method
error_count = 3 # Number of symbols that must be shifted to leave recovery mode
yaccdevel = False # Set to True if developing yacc. This turns off optimized
# implementations of certain functions.
resultlimit = 40 # Size limit of results when running in debug mode.
pickle_protocol = 0 # Protocol to use when writing pickle files
# String type-checking compatibility
if sys.version_info[0] < 3:
string_types = basestring
else:
string_types = str
MAXINT = sys.maxsize
# This object is a stand-in for a logging object created by the
# logging module. PLY will use this by default to create things
# such as the parser.out file. If a user wants more detailed
# information, they can create their own logging object and pass
# it into PLY.
class PlyLogger(object):
def __init__(self, f):
self.f = f
def debug(self, msg, *args, **kwargs):
self.f.write((msg % args) + '\n')
info = debug
def warning(self, msg, *args, **kwargs):
self.f.write('WARNING: ' + (msg % args) + '\n')
def error(self, msg, *args, **kwargs):
self.f.write('ERROR: ' + (msg % args) + '\n')
critical = debug
# Null logger is used when no output is generated. Does nothing.
class NullLogger(object):
def __getattribute__(self, name):
return self
def __call__(self, *args, **kwargs):
return self
# Exception raised for yacc-related errors
class YaccError(Exception):
pass
# Format the result message that the parser produces when running in debug mode.
def format_result(r):
repr_str = repr(r)
if '\n' in repr_str:
repr_str = repr(repr_str)
if len(repr_str) > resultlimit:
repr_str = repr_str[:resultlimit] + ' ...'
result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)
return result
# Format stack entries when the parser is running in debug mode
def format_stack_entry(r):
repr_str = repr(r)
if '\n' in repr_str:
repr_str = repr(repr_str)
if len(repr_str) < 16:
return repr_str
else:
return '<%s @ 0x%x>' % (type(r).__name__, id(r))
# Panic mode error recovery support. This feature is being reworked--much of the
# code here is to offer a deprecation/backwards compatible transition
_errok = None
_token = None
_restart = None
_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().
Instead, invoke the methods on the associated parser instance:
def p_error(p):
...
# Use parser.errok(), parser.token(), parser.restart()
...
parser = yacc.yacc()
'''
def errok():
warnings.warn(_warnmsg)
return _errok()
def restart():
warnings.warn(_warnmsg)
return _restart()
def token():
warnings.warn(_warnmsg)
return _token()
# Utility function to call the p_error() function with some deprecation hacks
def call_errorfunc(errorfunc, token, parser):
global _errok, _token, _restart
_errok = parser.errok
_token = parser.token
_restart = parser.restart
r = errorfunc(token)
try:
del _errok, _token, _restart
except NameError:
pass
return r
#-----------------------------------------------------------------------------
# === LR Parsing Engine ===
#
# The following classes are used for the LR parser itself. These are not
# used during table construction and are independent of the actual LR
# table generation algorithm
#-----------------------------------------------------------------------------
# This class is used to hold non-terminal grammar symbols during parsing.
# It normally has the following attributes set:
# .type = Grammar symbol type
# .value = Symbol value
# .lineno = Starting line number
# .endlineno = Ending line number (optional, set automatically)
# .lexpos = Starting lex position
# .endlexpos = Ending lex position (optional, set automatically)
class YaccSymbol:
def __str__(self):
return self.type
def __repr__(self):
return str(self)
# This class is a wrapper around the objects actually passed to each
# grammar rule. Index lookup and assignment actually assign the
# .value attribute of the underlying YaccSymbol object.
# The lineno() method returns the line number of a given
# item (or 0 if not defined). The linespan() method returns
# a tuple of (startline,endline) representing the range of lines
# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
# representing the range of positional information for a symbol.
class YaccProduction:
def __init__(self, s, stack=None):
self.slice = s
self.stack = stack
self.lexer = None
self.parser = None
def __getitem__(self, n):
if isinstance(n, slice):
return [s.value for s in self.slice[n]]
elif n >= 0:
return self.slice[n].value
else:
return self.stack[n].value
def __setitem__(self, n, v):
self.slice[n].value = v
def __getslice__(self, i, j):
return [s.value for s in self.slice[i:j]]
def __len__(self):
return len(self.slice)
def lineno(self, n):
return getattr(self.slice[n], 'lineno', 0)
def set_lineno(self, n, lineno):
self.slice[n].lineno = lineno
def linespan(self, n):
startline = getattr(self.slice[n], 'lineno', 0)
endline = getattr(self.slice[n], 'endlineno', startline)
return startline, endline
def lexpos(self, n):
return getattr(self.slice[n], 'lexpos', 0)
def lexspan(self, n):
startpos = getattr(self.slice[n], 'lexpos', 0)
endpos = getattr(self.slice[n], 'endlexpos', startpos)
return startpos, endpos
def error(self):
raise SyntaxError
# -----------------------------------------------------------------------------
# == LRParser ==
#
# The LR Parsing engine.
# -----------------------------------------------------------------------------
class LRParser:
def __init__(self, lrtab, errorf):
self.productions = lrtab.lr_productions
self.action = lrtab.lr_action
self.goto = lrtab.lr_goto
self.errorfunc = errorf
self.set_defaulted_states()
self.errorok = True
def errok(self):
self.errorok = True
def restart(self):
del self.statestack[:]
del self.symstack[:]
sym = YaccSymbol()
sym.type = '$end'
self.symstack.append(sym)
self.statestack.append(0)
# Defaulted state support.
# This method identifies parser states where there is only one possible reduction action.
# For such states, the parser can make a choose to make a rule reduction without consuming
# the next look-ahead token. This delayed invocation of the tokenizer can be useful in
# certain kinds of advanced parsing situations where the lexer and parser interact with
# each other or change states (i.e., manipulation of scope, lexer states, etc.).
#
# See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
def set_defaulted_states(self):
self.defaulted_states = {}
for state, actions in self.action.items():
rules = list(actions.values())
if len(rules) == 1 and rules[0] < 0:
self.defaulted_states[state] = rules[0]
def disable_defaulted_states(self):
self.defaulted_states = {}
def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
if debug or yaccdevel:
if isinstance(debug, int):
debug = PlyLogger(sys.stderr)
return self.parsedebug(input, lexer, debug, tracking, tokenfunc)
elif tracking:
return self.parseopt(input, lexer, debug, tracking, tokenfunc)
else:
return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parsedebug().
#
# This is the debugging enabled version of parse(). All changes made to the
# parsing engine should be made here. Optimized versions of this function
# are automatically created by the ply/ygen.py script. This script cuts out
# sections enclosed in markers such as this:
#
# #--! DEBUG
# statements
# #--! DEBUG
#
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
#--! parsedebug-start
lookahead = None # Current lookahead symbol
lookaheadstack = [] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
defaulted_states = self.defaulted_states # Local reference to defaulted states
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
#--! DEBUG
debug.info('PLY: PARSE DEBUG START')
#--! DEBUG
# If no lexer was given, we will try to use the lex module
if not lexer:
from . import lex
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set the parser() token method (sometimes used in error recovery)
self.token = get_token
# Set up the state and symbol stacks
statestack = [] # Stack of parsing states
self.statestack = statestack
symstack = [] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = '$end'
symstack.append(sym)
state = 0
while True:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
#--! DEBUG
debug.debug('')
debug.debug('State : %s', state)
#--! DEBUG
if state not in defaulted_states:
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = '$end'
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
else:
t = defaulted_states[state]
#--! DEBUG
debug.debug('Defaulted state %s: Reduce using %d', state, -t)
#--! DEBUG
#--! DEBUG
debug.debug('Stack : %s',
('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
#--! DEBUG
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
#--! DEBUG
debug.debug('Action : Shift and goto state %s', t)
#--! DEBUG
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount:
errorcount -= 1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
#--! DEBUG
if plen:
debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,
'['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',
goto[statestack[-1-plen]][pname])
else:
debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],
goto[statestack[-1]][pname])
#--! DEBUG
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
#--! TRACKING
if tracking:
t1 = targ[1]
sym.lineno = t1.lineno
sym.lexpos = t1.lexpos
t1 = targ[-1]
sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
#--! TRACKING
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
self.state = state
p.callable(pslice)
del statestack[-plen:]
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
#--! DEBUG
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
symstack.extend(targ[1:-1]) # Put the production slice back on the stack
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
#--! TRACKING
if tracking:
sym.lineno = lexer.lineno
sym.lexpos = lexer.lexpos
#--! TRACKING
targ = [sym]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
self.state = state
p.callable(pslice)
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
#--! DEBUG
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
result = getattr(n, 'value', None)
#--! DEBUG
debug.info('Done : Returning %s', format_result(result))
debug.info('PLY: PARSE DEBUG END')
#--! DEBUG
return result
if t is None:
#--! DEBUG
debug.error('Error : %s',
('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
#--! DEBUG
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = False
errtoken = lookahead
if errtoken.type == '$end':
errtoken = None # End of file!
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken, 'lineno'):
lineno = lookahead.lineno
else:
lineno = 0
if lineno:
sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
else:
sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
else:
sys.stderr.write('yacc: Parse error in input. EOF\n')
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != '$end':
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == '$end':
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
#--! TRACKING
if tracking:
sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
#--! TRACKING
lookahead = None
continue
# Create the error symbol for the first time and make it the new lookahead symbol
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead, 'lineno'):
t.lineno = t.endlineno = lookahead.lineno
if hasattr(lookahead, 'lexpos'):
t.lexpos = t.endlexpos = lookahead.lexpos
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
sym = symstack.pop()
#--! TRACKING
if tracking:
lookahead.lineno = sym.lineno
lookahead.lexpos = sym.lexpos
#--! TRACKING
statestack.pop()
state = statestack[-1]
continue
# Call an error function here
raise RuntimeError('yacc: internal parser error!!!\n')
#--! parsedebug-end
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parseopt().
#
# Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY!
# This code is automatically generated by the ply/ygen.py script. Make
# changes to the parsedebug() method instead.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
#--! parseopt-start
lookahead = None # Current lookahead symbol
lookaheadstack = [] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
defaulted_states = self.defaulted_states # Local reference to defaulted states
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
# If no lexer was given, we will try to use the lex module
if not lexer:
from . import lex
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set the parser() token method (sometimes used in error recovery)
self.token = get_token
# Set up the state and symbol stacks
statestack = [] # Stack of parsing states
self.statestack = statestack
symstack = [] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = '$end'
symstack.append(sym)
state = 0
while True:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
if state not in defaulted_states:
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = '$end'
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
else:
t = defaulted_states[state]
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount:
errorcount -= 1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
#--! TRACKING
if tracking:
t1 = targ[1]
sym.lineno = t1.lineno
sym.lexpos = t1.lexpos
t1 = targ[-1]
sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
#--! TRACKING
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
self.state = state
p.callable(pslice)
del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
symstack.extend(targ[1:-1]) # Put the production slice back on the stack
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
#--! TRACKING
if tracking:
sym.lineno = lexer.lineno
sym.lexpos = lexer.lexpos
#--! TRACKING
targ = [sym]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
result = getattr(n, 'value', None)
return result
if t is None:
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = False
errtoken = lookahead
if errtoken.type == '$end':
errtoken = None # End of file!
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken, 'lineno'):
lineno = lookahead.lineno
else:
lineno = 0
if lineno:
sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
else:
sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
else:
sys.stderr.write('yacc: Parse error in input. EOF\n')
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != '$end':
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == '$end':
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
#--! TRACKING
if tracking:
sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
#--! TRACKING
lookahead = None
continue
# Create the error symbol for the first time and make it the new lookahead symbol
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead, 'lineno'):
t.lineno = t.endlineno = lookahead.lineno
if hasattr(lookahead, 'lexpos'):
t.lexpos = t.endlexpos = lookahead.lexpos
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
sym = symstack.pop()
#--! TRACKING
if tracking:
lookahead.lineno = sym.lineno
lookahead.lexpos = sym.lexpos
#--! TRACKING
statestack.pop()
state = statestack[-1]
continue
# Call an error function here
raise RuntimeError('yacc: internal parser error!!!\n')
#--! parseopt-end
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parseopt_notrack().
#
# Optimized version of parseopt() with line number tracking removed.
# DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated
# by the ply/ygen.py script. Make changes to the parsedebug() method instead.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
#--! parseopt-notrack-start
lookahead = None # Current lookahead symbol
lookaheadstack = [] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
defaulted_states = self.defaulted_states # Local reference to defaulted states
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
# If no lexer was given, we will try to use the lex module
if not lexer:
from . import lex
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set the parser() token method (sometimes used in error recovery)
self.token = get_token
# Set up the state and symbol stacks
statestack = [] # Stack of parsing states
self.statestack = statestack
symstack = [] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = '$end'
symstack.append(sym)
state = 0
while True:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
if state not in defaulted_states:
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = '$end'
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
else:
t = defaulted_states[state]
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount:
errorcount -= 1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
self.state = state
p.callable(pslice)
del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
symstack.extend(targ[1:-1]) # Put the production slice back on the stack
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
targ = [sym]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead) # Save the current lookahead token
statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
result = getattr(n, 'value', None)
return result
if t is None:
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = False
errtoken = lookahead
if errtoken.type == '$end':
errtoken = None # End of file!
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken, 'lineno'):
lineno = lookahead.lineno
else:
lineno = 0
if lineno:
sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
else:
sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
else:
sys.stderr.write('yacc: Parse error in input. EOF\n')
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != '$end':
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == '$end':
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
lookahead = None
continue
# Create the error symbol for the first time and make it the new lookahead symbol
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead, 'lineno'):
t.lineno = t.endlineno = lookahead.lineno
if hasattr(lookahead, 'lexpos'):
t.lexpos = t.endlexpos = lookahead.lexpos
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
sym = symstack.pop()
statestack.pop()
state = statestack[-1]
continue
# Call an error function here
raise RuntimeError('yacc: internal parser error!!!\n')
#--! parseopt-notrack-end
# -----------------------------------------------------------------------------
# === Grammar Representation ===
#
# The following functions, classes, and variables are used to represent and
# manipulate the rules that make up a grammar.
# -----------------------------------------------------------------------------
# regex matching identifiers
_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
# -----------------------------------------------------------------------------
# class Production:
#
# This class stores the raw information about a single production or grammar rule.
# A grammar rule refers to a specification such as this:
#
# expr : expr PLUS term
#
# Here are the basic attributes defined on all productions
#
# name - Name of the production. For example 'expr'
# prod - A list of symbols on the right side ['expr','PLUS','term']
# prec - Production precedence level
# number - Production number.
# func - Function that executes on reduce
# file - File where production function is defined
# lineno - Line number where production function is defined
#
# The following attributes are defined or optional.
#
# len - Length of the production (number of symbols on right hand side)
# usyms - Set of unique symbols found in the production
# -----------------------------------------------------------------------------
class Production(object):
reduced = 0
def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):
self.name = name
self.prod = tuple(prod)
self.number = number
self.func = func
self.callable = None
self.file = file
self.line = line
self.prec = precedence
# Internal settings used during table construction
self.len = len(self.prod) # Length of the production
# Create a list of unique production symbols used in the production
self.usyms = []
for s in self.prod:
if s not in self.usyms:
self.usyms.append(s)
# List of all LR items for the production
self.lr_items = []
self.lr_next = None
# Create a string representation
if self.prod:
self.str = '%s -> %s' % (self.name, ' '.join(self.prod))
else:
self.str = '%s -> <empty>' % self.name
def __str__(self):
return self.str
def __repr__(self):
return 'Production(' + str(self) + ')'
def __len__(self):
return len(self.prod)
def __nonzero__(self):
return 1
def __getitem__(self, index):
return self.prod[index]
# Return the nth lr_item from the production (or None if at the end)
def lr_item(self, n):
if n > len(self.prod):
return None
p = LRItem(self, n)
# Precompute the list of productions immediately following.
try:
p.lr_after = Prodnames[p.prod[n+1]]
except (IndexError, KeyError):
p.lr_after = []
try:
p.lr_before = p.prod[n-1]
except IndexError:
p.lr_before = None
return p
# Bind the production function name to a callable
def bind(self, pdict):
if self.func:
self.callable = pdict[self.func]
# This class serves as a minimal standin for Production objects when
# reading table data from files. It only contains information
# actually used by the LR parsing engine, plus some additional
# debugging information.
class MiniProduction(object):
def __init__(self, str, name, len, func, file, line):
self.name = name
self.len = len
self.func = func
self.callable = None
self.file = file
self.line = line
self.str = str
def __str__(self):
return self.str
def __repr__(self):
return 'MiniProduction(%s)' % self.str
# Bind the production function name to a callable
def bind(self, pdict):
if self.func:
self.callable = pdict[self.func]
# -----------------------------------------------------------------------------
# class LRItem
#
# This class represents a specific stage of parsing a production rule. For
# example:
#
# expr : expr . PLUS term
#
# In the above, the "." represents the current location of the parse. Here
# basic attributes:
#
# name - Name of the production. For example 'expr'
# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
# number - Production number.
#
# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
# then lr_next refers to 'expr -> expr PLUS . term'
# lr_index - LR item index (location of the ".") in the prod list.
# lookaheads - LALR lookahead symbols for this item
# len - Length of the production (number of symbols on right hand side)
# lr_after - List of all productions that immediately follow
# lr_before - Grammar symbol immediately before
# -----------------------------------------------------------------------------
class LRItem(object):
def __init__(self, p, n):
self.name = p.name
self.prod = list(p.prod)
self.number = p.number
self.lr_index = n
self.lookaheads = {}
self.prod.insert(n, '.')
self.prod = tuple(self.prod)
self.len = len(self.prod)
self.usyms = p.usyms
def __str__(self):
if self.prod:
s = '%s -> %s' % (self.name, ' '.join(self.prod))
else:
s = '%s -> <empty>' % self.name
return s
def __repr__(self):
return 'LRItem(' + str(self) + ')'
# -----------------------------------------------------------------------------
# rightmost_terminal()
#
# Return the rightmost terminal from a list of symbols. Used in add_production()
# -----------------------------------------------------------------------------
def rightmost_terminal(symbols, terminals):
i = len(symbols) - 1
while i >= 0:
if symbols[i] in terminals:
return symbols[i]
i -= 1
return None
# -----------------------------------------------------------------------------
# === GRAMMAR CLASS ===
#
# The following class represents the contents of the specified grammar along
# with various computed properties such as first sets, follow sets, LR items, etc.
# This data is used for critical parts of the table generation process later.
# -----------------------------------------------------------------------------
class GrammarError(YaccError):
pass
class Grammar(object):
def __init__(self, terminals):
self.Productions = [None] # A list of all of the productions. The first
# entry is always reserved for the purpose of
# building an augmented grammar
self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all
# productions of that nonterminal.
self.Prodmap = {} # A dictionary that is only used to detect duplicate
# productions.
self.Terminals = {} # A dictionary mapping the names of terminal symbols to a
# list of the rules where they are used.
for term in terminals:
self.Terminals[term] = []
self.Terminals['error'] = []
self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list
# of rule numbers where they are used.
self.First = {} # A dictionary of precomputed FIRST(x) symbols
self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols
self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
# form ('right',level) or ('nonassoc', level) or ('left',level)
self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
# This is only used to provide error checking and to generate
# a warning about unused precedence rules.
self.Start = None # Starting symbol for the grammar
def __len__(self):
return len(self.Productions)
def __getitem__(self, index):
return self.Productions[index]
# -----------------------------------------------------------------------------
# set_precedence()
#
# Sets the precedence for a given terminal. assoc is the associativity such as
# 'left','right', or 'nonassoc'. level is a numeric level.
#
# -----------------------------------------------------------------------------
def set_precedence(self, term, assoc, level):
assert self.Productions == [None], 'Must call set_precedence() before add_production()'
if term in self.Precedence:
raise GrammarError('Precedence already specified for terminal %r' % term)
if assoc not in ['left', 'right', 'nonassoc']:
raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
self.Precedence[term] = (assoc, level)
# -----------------------------------------------------------------------------
# add_production()
#
# Given an action function, this function assembles a production rule and
# computes its precedence level.
#
# The production rule is supplied as a list of symbols. For example,
# a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
# symbols ['expr','PLUS','term'].
#
# Precedence is determined by the precedence of the right-most non-terminal
# or the precedence of a terminal specified by %prec.
#
# A variety of error checks are performed to make sure production symbols
# are valid and that %prec is used correctly.
# -----------------------------------------------------------------------------
def add_production(self, prodname, syms, func=None, file='', line=0):
if prodname in self.Terminals:
raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))
if prodname == 'error':
raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))
if not _is_identifier.match(prodname):
raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))
# Look for literal tokens
for n, s in enumerate(syms):
if s[0] in "'\"":
try:
c = eval(s)
if (len(c) > 1):
raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %
(file, line, s, prodname))
if c not in self.Terminals:
self.Terminals[c] = []
syms[n] = c
continue
except SyntaxError:
pass
if not _is_identifier.match(s) and s != '%prec':
raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))
# Determine the precedence level
if '%prec' in syms:
if syms[-1] == '%prec':
raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))
if syms[-2] != '%prec':
raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %
(file, line))
precname = syms[-1]
prodprec = self.Precedence.get(precname)
if not prodprec:
raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))
else:
self.UsedPrecedence.add(precname)
del syms[-2:] # Drop %prec from the rule
else:
# If no %prec, precedence is determined by the rightmost terminal symbol
precname = rightmost_terminal(syms, self.Terminals)
prodprec = self.Precedence.get(precname, ('right', 0))
# See if the rule is already in the rulemap
map = '%s -> %s' % (prodname, syms)
if map in self.Prodmap:
m = self.Prodmap[map]
raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +
'Previous definition at %s:%d' % (m.file, m.line))
# From this point on, everything is valid. Create a new Production instance
pnumber = len(self.Productions)
if prodname not in self.Nonterminals:
self.Nonterminals[prodname] = []
# Add the production number to Terminals and Nonterminals
for t in syms:
if t in self.Terminals:
self.Terminals[t].append(pnumber)
else:
if t not in self.Nonterminals:
self.Nonterminals[t] = []
self.Nonterminals[t].append(pnumber)
# Create a production and add it to the list of productions
p = Production(pnumber, prodname, syms, prodprec, func, file, line)
self.Productions.append(p)
self.Prodmap[map] = p
# Add to the global productions list
try:
self.Prodnames[prodname].append(p)
except KeyError:
self.Prodnames[prodname] = [p]
# -----------------------------------------------------------------------------
# set_start()
#
# Sets the starting symbol and creates the augmented grammar. Production
# rule 0 is S' -> start where start is the start symbol.
# -----------------------------------------------------------------------------
def set_start(self, start=None):
if not start:
start = self.Productions[1].name
if start not in self.Nonterminals:
raise GrammarError('start symbol %s undefined' % start)
self.Productions[0] = Production(0, "S'", [start])
self.Nonterminals[start].append(0)
self.Start = start
# -----------------------------------------------------------------------------
# find_unreachable()
#
# Find all of the nonterminal symbols that can't be reached from the starting
# symbol. Returns a list of nonterminals that can't be reached.
# -----------------------------------------------------------------------------
def find_unreachable(self):
# Mark all symbols that are reachable from a symbol s
def mark_reachable_from(s):
if s in reachable:
return
reachable.add(s)
for p in self.Prodnames.get(s, []):
for r in p.prod:
mark_reachable_from(r)
reachable = set()
mark_reachable_from(self.Productions[0].prod[0])
return [s for s in self.Nonterminals if s not in reachable]
# -----------------------------------------------------------------------------
# infinite_cycles()
#
# This function looks at the various parsing rules and tries to detect
# infinite recursion cycles (grammar rules where there is no possible way
# to derive a string of only terminals).
# -----------------------------------------------------------------------------
def infinite_cycles(self):
terminates = {}
# Terminals:
for t in self.Terminals:
terminates[t] = True
terminates['$end'] = True
# Nonterminals:
# Initialize to false:
for n in self.Nonterminals:
terminates[n] = False
# Then propagate termination until no change:
while True:
some_change = False
for (n, pl) in self.Prodnames.items():
# Nonterminal n terminates iff any of its productions terminates.
for p in pl:
# Production p terminates iff all of its rhs symbols terminate.
for s in p.prod:
if not terminates[s]:
# The symbol s does not terminate,
# so production p does not terminate.
p_terminates = False
break
else:
# didn't break from the loop,
# so every symbol s terminates
# so production p terminates.
p_terminates = True
if p_terminates:
# symbol n terminates!
if not terminates[n]:
terminates[n] = True
some_change = True
# Don't need to consider any more productions for this n.
break
if not some_change:
break
infinite = []
for (s, term) in terminates.items():
if not term:
if s not in self.Prodnames and s not in self.Terminals and s != 'error':
# s is used-but-not-defined, and we've already warned of that,
# so it would be overkill to say that it's also non-terminating.
pass
else:
infinite.append(s)
return infinite
# -----------------------------------------------------------------------------
# undefined_symbols()
#
# Find all symbols that were used the grammar, but not defined as tokens or
# grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
# and prod is the production where the symbol was used.
# -----------------------------------------------------------------------------
def undefined_symbols(self):
result = []
for p in self.Productions:
if not p:
continue
for s in p.prod:
if s not in self.Prodnames and s not in self.Terminals and s != 'error':
result.append((s, p))
return result
# -----------------------------------------------------------------------------
# unused_terminals()
#
# Find all terminals that were defined, but not used by the grammar. Returns
# a list of all symbols.
# -----------------------------------------------------------------------------
def unused_terminals(self):
unused_tok = []
for s, v in self.Terminals.items():
if s != 'error' and not v:
unused_tok.append(s)
return unused_tok
# ------------------------------------------------------------------------------
# unused_rules()
#
# Find all grammar rules that were defined, but not used (maybe not reachable)
# Returns a list of productions.
# ------------------------------------------------------------------------------
def unused_rules(self):
unused_prod = []
for s, v in self.Nonterminals.items():
if not v:
p = self.Prodnames[s][0]
unused_prod.append(p)
return unused_prod
# -----------------------------------------------------------------------------
# unused_precedence()
#
# Returns a list of tuples (term,precedence) corresponding to precedence
# rules that were never used by the grammar. term is the name of the terminal
# on which precedence was applied and precedence is a string such as 'left' or
# 'right' corresponding to the type of precedence.
# -----------------------------------------------------------------------------
def unused_precedence(self):
unused = []
for termname in self.Precedence:
if not (termname in self.Terminals or termname in self.UsedPrecedence):
unused.append((termname, self.Precedence[termname][0]))
return unused
# -------------------------------------------------------------------------
# _first()
#
# Compute the value of FIRST1(beta) where beta is a tuple of symbols.
#
# During execution of compute_first1, the result may be incomplete.
# Afterward (e.g., when called from compute_follow()), it will be complete.
# -------------------------------------------------------------------------
def _first(self, beta):
# We are computing First(x1,x2,x3,...,xn)
result = []
for x in beta:
x_produces_empty = False
# Add all the non-<empty> symbols of First[x] to the result.
for f in self.First[x]:
if f == '<empty>':
x_produces_empty = True
else:
if f not in result:
result.append(f)
if x_produces_empty:
# We have to consider the next x in beta,
# i.e. stay in the loop.
pass
else:
# We don't have to consider any further symbols in beta.
break
else:
# There was no 'break' from the loop,
# so x_produces_empty was true for all x in beta,
# so beta produces empty as well.
result.append('<empty>')
return result
# -------------------------------------------------------------------------
# compute_first()
#
# Compute the value of FIRST1(X) for all symbols
# -------------------------------------------------------------------------
def compute_first(self):
if self.First:
return self.First
# Terminals:
for t in self.Terminals:
self.First[t] = [t]
self.First['$end'] = ['$end']
# Nonterminals:
# Initialize to the empty set:
for n in self.Nonterminals:
self.First[n] = []
# Then propagate symbols until no change:
while True:
some_change = False
for n in self.Nonterminals:
for p in self.Prodnames[n]:
for f in self._first(p.prod):
if f not in self.First[n]:
self.First[n].append(f)
some_change = True
if not some_change:
break
return self.First
# ---------------------------------------------------------------------
# compute_follow()
#
# Computes all of the follow sets for every non-terminal symbol. The
# follow set is the set of all symbols that might follow a given
# non-terminal. See the Dragon book, 2nd Ed. p. 189.
# ---------------------------------------------------------------------
def compute_follow(self, start=None):
# If already computed, return the result
if self.Follow:
return self.Follow
# If first sets not computed yet, do that first.
if not self.First:
self.compute_first()
# Add '$end' to the follow list of the start symbol
for k in self.Nonterminals:
self.Follow[k] = []
if not start:
start = self.Productions[1].name
self.Follow[start] = ['$end']
while True:
didadd = False
for p in self.Productions[1:]:
# Here is the production set
for i, B in enumerate(p.prod):
if B in self.Nonterminals:
# Okay. We got a non-terminal in a production
fst = self._first(p.prod[i+1:])
hasempty = False
for f in fst:
if f != '<empty>' and f not in self.Follow[B]:
self.Follow[B].append(f)
didadd = True
if f == '<empty>':
hasempty = True
if hasempty or i == (len(p.prod)-1):
# Add elements of follow(a) to follow(b)
for f in self.Follow[p.name]:
if f not in self.Follow[B]:
self.Follow[B].append(f)
didadd = True
if not didadd:
break
return self.Follow
# -----------------------------------------------------------------------------
# build_lritems()
#
# This function walks the list of productions and builds a complete set of the
# LR items. The LR items are stored in two ways: First, they are uniquely
# numbered and placed in the list _lritems. Second, a linked list of LR items
# is built for each production. For example:
#
# E -> E PLUS E
#
# Creates the list
#
# [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
# -----------------------------------------------------------------------------
def build_lritems(self):
for p in self.Productions:
lastlri = p
i = 0
lr_items = []
while True:
if i > len(p):
lri = None
else:
lri = LRItem(p, i)
# Precompute the list of productions immediately following
try:
lri.lr_after = self.Prodnames[lri.prod[i+1]]
except (IndexError, KeyError):
lri.lr_after = []
try:
lri.lr_before = lri.prod[i-1]
except IndexError:
lri.lr_before = None
lastlri.lr_next = lri
if not lri:
break
lr_items.append(lri)
lastlri = lri
i += 1
p.lr_items = lr_items
# -----------------------------------------------------------------------------
# == Class LRTable ==
#
# This basic class represents a basic table of LR parsing information.
# Methods for generating the tables are not defined here. They are defined
# in the derived class LRGeneratedTable.
# -----------------------------------------------------------------------------
class VersionError(YaccError):
pass
class LRTable(object):
def __init__(self):
self.lr_action = None
self.lr_goto = None
self.lr_productions = None
self.lr_method = None
def read_table(self, module):
if isinstance(module, types.ModuleType):
parsetab = module
else:
exec('import %s' % module)
parsetab = sys.modules[module]
if parsetab._tabversion != __tabversion__:
raise VersionError('yacc table file version is out of date')
self.lr_action = parsetab._lr_action
self.lr_goto = parsetab._lr_goto
self.lr_productions = []
for p in parsetab._lr_productions:
self.lr_productions.append(MiniProduction(*p))
self.lr_method = parsetab._lr_method
return parsetab._lr_signature
def read_pickle(self, filename):
try:
import cPickle as pickle
except ImportError:
import pickle
if not os.path.exists(filename):
raise ImportError
in_f = open(filename, 'rb')
tabversion = pickle.load(in_f)
if tabversion != __tabversion__:
raise VersionError('yacc table file version is out of date')
self.lr_method = pickle.load(in_f)
signature = pickle.load(in_f)
self.lr_action = pickle.load(in_f)
self.lr_goto = pickle.load(in_f)
productions = pickle.load(in_f)
self.lr_productions = []
for p in productions:
self.lr_productions.append(MiniProduction(*p))
in_f.close()
return signature
# Bind all production function names to callable objects in pdict
def bind_callables(self, pdict):
for p in self.lr_productions:
p.bind(pdict)
# -----------------------------------------------------------------------------
# === LR Generator ===
#
# The following classes and functions are used to generate LR parsing tables on
# a grammar.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# digraph()
# traverse()
#
# The following two functions are used to compute set valued functions
# of the form:
#
# F(x) = F'(x) U U{F(y) | x R y}
#
# This is used to compute the values of Read() sets as well as FOLLOW sets
# in LALR(1) generation.
#
# Inputs: X - An input set
# R - A relation
# FP - Set-valued function
# ------------------------------------------------------------------------------
def digraph(X, R, FP):
N = {}
for x in X:
N[x] = 0
stack = []
F = {}
for x in X:
if N[x] == 0:
traverse(x, N, stack, F, X, R, FP)
return F
def traverse(x, N, stack, F, X, R, FP):
stack.append(x)
d = len(stack)
N[x] = d
F[x] = FP(x) # F(X) <- F'(x)
rel = R(x) # Get y's related to x
for y in rel:
if N[y] == 0:
traverse(y, N, stack, F, X, R, FP)
N[x] = min(N[x], N[y])
for a in F.get(y, []):
if a not in F[x]:
F[x].append(a)
if N[x] == d:
N[stack[-1]] = MAXINT
F[stack[-1]] = F[x]
element = stack.pop()
while element != x:
N[stack[-1]] = MAXINT
F[stack[-1]] = F[x]
element = stack.pop()
class LALRError(YaccError):
pass
# -----------------------------------------------------------------------------
# == LRGeneratedTable ==
#
# This class implements the LR table generation algorithm. There are no
# public methods except for write()
# -----------------------------------------------------------------------------
class LRGeneratedTable(LRTable):
def __init__(self, grammar, method='LALR', log=None):
if method not in ['SLR', 'LALR']:
raise LALRError('Unsupported method %s' % method)
self.grammar = grammar
self.lr_method = method
# Set up the logger
if not log:
log = NullLogger()
self.log = log
# Internal attributes
self.lr_action = {} # Action table
self.lr_goto = {} # Goto table
self.lr_productions = grammar.Productions # Copy of grammar Production array
self.lr_goto_cache = {} # Cache of computed gotos
self.lr0_cidhash = {} # Cache of closures
self._add_count = 0 # Internal counter used to detect cycles
# Diagonistic information filled in by the table generator
self.sr_conflict = 0
self.rr_conflict = 0
self.conflicts = [] # List of conflicts
self.sr_conflicts = []
self.rr_conflicts = []
# Build the tables
self.grammar.build_lritems()
self.grammar.compute_first()
self.grammar.compute_follow()
self.lr_parse_table()
# Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
def lr0_closure(self, I):
self._add_count += 1
# Add everything in I to J
J = I[:]
didadd = True
while didadd:
didadd = False
for j in J:
for x in j.lr_after:
if getattr(x, 'lr0_added', 0) == self._add_count:
continue
# Add B --> .G to J
J.append(x.lr_next)
x.lr0_added = self._add_count
didadd = True
return J
# Compute the LR(0) goto function goto(I,X) where I is a set
# of LR(0) items and X is a grammar symbol. This function is written
# in a way that guarantees uniqueness of the generated goto sets
# (i.e. the same goto set will never be returned as two different Python
# objects). With uniqueness, we can later do fast set comparisons using
# id(obj) instead of element-wise comparison.
def lr0_goto(self, I, x):
# First we look for a previously cached entry
g = self.lr_goto_cache.get((id(I), x))
if g:
return g
# Now we generate the goto set in a way that guarantees uniqueness
# of the result
s = self.lr_goto_cache.get(x)
if not s:
s = {}
self.lr_goto_cache[x] = s
gs = []
for p in I:
n = p.lr_next
if n and n.lr_before == x:
s1 = s.get(id(n))
if not s1:
s1 = {}
s[id(n)] = s1
gs.append(n)
s = s1
g = s.get('$end')
if not g:
if gs:
g = self.lr0_closure(gs)
s['$end'] = g
else:
s['$end'] = gs
self.lr_goto_cache[(id(I), x)] = g
return g
# Compute the LR(0) sets of item function
def lr0_items(self):
C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]
i = 0
for I in C:
self.lr0_cidhash[id(I)] = i
i += 1
# Loop over the items in C and each grammar symbols
i = 0
while i < len(C):
I = C[i]
i += 1
# Collect all of the symbols that could possibly be in the goto(I,X) sets
asyms = {}
for ii in I:
for s in ii.usyms:
asyms[s] = None
for x in asyms:
g = self.lr0_goto(I, x)
if not g or id(g) in self.lr0_cidhash:
continue
self.lr0_cidhash[id(g)] = len(C)
C.append(g)
return C
# -----------------------------------------------------------------------------
# ==== LALR(1) Parsing ====
#
# LALR(1) parsing is almost exactly the same as SLR except that instead of
# relying upon Follow() sets when performing reductions, a more selective
# lookahead set that incorporates the state of the LR(0) machine is utilized.
# Thus, we mainly just have to focus on calculating the lookahead sets.
#
# The method used here is due to DeRemer and Pennelo (1982).
#
# DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
# Lookahead Sets", ACM Transactions on Programming Languages and Systems,
# Vol. 4, No. 4, Oct. 1982, pp. 615-649
#
# Further details can also be found in:
#
# J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
# McGraw-Hill Book Company, (1985).
#
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# compute_nullable_nonterminals()
#
# Creates a dictionary containing all of the non-terminals that might produce
# an empty production.
# -----------------------------------------------------------------------------
def compute_nullable_nonterminals(self):
nullable = set()
num_nullable = 0
while True:
for p in self.grammar.Productions[1:]:
if p.len == 0:
nullable.add(p.name)
continue
for t in p.prod:
if t not in nullable:
break
else:
nullable.add(p.name)
if len(nullable) == num_nullable:
break
num_nullable = len(nullable)
return nullable
# -----------------------------------------------------------------------------
# find_nonterminal_trans(C)
#
# Given a set of LR(0) items, this functions finds all of the non-terminal
# transitions. These are transitions in which a dot appears immediately before
# a non-terminal. Returns a list of tuples of the form (state,N) where state
# is the state number and N is the nonterminal symbol.
#
# The input C is the set of LR(0) items.
# -----------------------------------------------------------------------------
def find_nonterminal_transitions(self, C):
trans = []
for stateno, state in enumerate(C):
for p in state:
if p.lr_index < p.len - 1:
t = (stateno, p.prod[p.lr_index+1])
if t[1] in self.grammar.Nonterminals:
if t not in trans:
trans.append(t)
return trans
# -----------------------------------------------------------------------------
# dr_relation()
#
# Computes the DR(p,A) relationships for non-terminal transitions. The input
# is a tuple (state,N) where state is a number and N is a nonterminal symbol.
#
# Returns a list of terminals.
# -----------------------------------------------------------------------------
def dr_relation(self, C, trans, nullable):
dr_set = {}
state, N = trans
terms = []
g = self.lr0_goto(C[state], N)
for p in g:
if p.lr_index < p.len - 1:
a = p.prod[p.lr_index+1]
if a in self.grammar.Terminals:
if a not in terms:
terms.append(a)
# This extra bit is to handle the start state
if state == 0 and N == self.grammar.Productions[0].prod[0]:
terms.append('$end')
return terms
# -----------------------------------------------------------------------------
# reads_relation()
#
# Computes the READS() relation (p,A) READS (t,C).
# -----------------------------------------------------------------------------
def reads_relation(self, C, trans, empty):
# Look for empty transitions
rel = []
state, N = trans
g = self.lr0_goto(C[state], N)
j = self.lr0_cidhash.get(id(g), -1)
for p in g:
if p.lr_index < p.len - 1:
a = p.prod[p.lr_index + 1]
if a in empty:
rel.append((j, a))
return rel
# -----------------------------------------------------------------------------
# compute_lookback_includes()
#
# Determines the lookback and includes relations
#
# LOOKBACK:
#
# This relation is determined by running the LR(0) state machine forward.
# For example, starting with a production "N : . A B C", we run it forward
# to obtain "N : A B C ." We then build a relationship between this final
# state and the starting state. These relationships are stored in a dictionary
# lookdict.
#
# INCLUDES:
#
# Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
#
# This relation is used to determine non-terminal transitions that occur
# inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
# if the following holds:
#
# B -> LAT, where T -> epsilon and p' -L-> p
#
# L is essentially a prefix (which may be empty), T is a suffix that must be
# able to derive an empty string. State p' must lead to state p with the string L.
#
# -----------------------------------------------------------------------------
def compute_lookback_includes(self, C, trans, nullable):
lookdict = {} # Dictionary of lookback relations
includedict = {} # Dictionary of include relations
# Make a dictionary of non-terminal transitions
dtrans = {}
for t in trans:
dtrans[t] = 1
# Loop over all transitions and compute lookbacks and includes
for state, N in trans:
lookb = []
includes = []
for p in C[state]:
if p.name != N:
continue
# Okay, we have a name match. We now follow the production all the way
# through the state machine until we get the . on the right hand side
lr_index = p.lr_index
j = state
while lr_index < p.len - 1:
lr_index = lr_index + 1
t = p.prod[lr_index]
# Check to see if this symbol and state are a non-terminal transition
if (j, t) in dtrans:
# Yes. Okay, there is some chance that this is an includes relation
# the only way to know for certain is whether the rest of the
# production derives empty
li = lr_index + 1
while li < p.len:
if p.prod[li] in self.grammar.Terminals:
break # No forget it
if p.prod[li] not in nullable:
break
li = li + 1
else:
# Appears to be a relation between (j,t) and (state,N)
includes.append((j, t))
g = self.lr0_goto(C[j], t) # Go to next set
j = self.lr0_cidhash.get(id(g), -1) # Go to next state
# When we get here, j is the final state, now we have to locate the production
for r in C[j]:
if r.name != p.name:
continue
if r.len != p.len:
continue
i = 0
# This look is comparing a production ". A B C" with "A B C ."
while i < r.lr_index:
if r.prod[i] != p.prod[i+1]:
break
i = i + 1
else:
lookb.append((j, r))
for i in includes:
if i not in includedict:
includedict[i] = []
includedict[i].append((state, N))
lookdict[(state, N)] = lookb
return lookdict, includedict
# -----------------------------------------------------------------------------
# compute_read_sets()
#
# Given a set of LR(0) items, this function computes the read sets.
#
# Inputs: C = Set of LR(0) items
# ntrans = Set of nonterminal transitions
# nullable = Set of empty transitions
#
# Returns a set containing the read sets
# -----------------------------------------------------------------------------
def compute_read_sets(self, C, ntrans, nullable):
FP = lambda x: self.dr_relation(C, x, nullable)
R = lambda x: self.reads_relation(C, x, nullable)
F = digraph(ntrans, R, FP)
return F
# -----------------------------------------------------------------------------
# compute_follow_sets()
#
# Given a set of LR(0) items, a set of non-terminal transitions, a readset,
# and an include set, this function computes the follow sets
#
# Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
#
# Inputs:
# ntrans = Set of nonterminal transitions
# readsets = Readset (previously computed)
# inclsets = Include sets (previously computed)
#
# Returns a set containing the follow sets
# -----------------------------------------------------------------------------
def compute_follow_sets(self, ntrans, readsets, inclsets):
FP = lambda x: readsets[x]
R = lambda x: inclsets.get(x, [])
F = digraph(ntrans, R, FP)
return F
# -----------------------------------------------------------------------------
# add_lookaheads()
#
# Attaches the lookahead symbols to grammar rules.
#
# Inputs: lookbacks - Set of lookback relations
# followset - Computed follow set
#
# This function directly attaches the lookaheads to productions contained
# in the lookbacks set
# -----------------------------------------------------------------------------
def add_lookaheads(self, lookbacks, followset):
for trans, lb in lookbacks.items():
# Loop over productions in lookback
for state, p in lb:
if state not in p.lookaheads:
p.lookaheads[state] = []
f = followset.get(trans, [])
for a in f:
if a not in p.lookaheads[state]:
p.lookaheads[state].append(a)
# -----------------------------------------------------------------------------
# add_lalr_lookaheads()
#
# This function does all of the work of adding lookahead information for use
# with LALR parsing
# -----------------------------------------------------------------------------
def add_lalr_lookaheads(self, C):
# Determine all of the nullable nonterminals
nullable = self.compute_nullable_nonterminals()
# Find all non-terminal transitions
trans = self.find_nonterminal_transitions(C)
# Compute read sets
readsets = self.compute_read_sets(C, trans, nullable)
# Compute lookback/includes relations
lookd, included = self.compute_lookback_includes(C, trans, nullable)
# Compute LALR FOLLOW sets
followsets = self.compute_follow_sets(trans, readsets, included)
# Add all of the lookaheads
self.add_lookaheads(lookd, followsets)
# -----------------------------------------------------------------------------
# lr_parse_table()
#
# This function constructs the parse tables for SLR or LALR
# -----------------------------------------------------------------------------
def lr_parse_table(self):
Productions = self.grammar.Productions
Precedence = self.grammar.Precedence
goto = self.lr_goto # Goto array
action = self.lr_action # Action array
log = self.log # Logger for output
actionp = {} # Action production array (temporary)
log.info('Parsing method: %s', self.lr_method)
# Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
# This determines the number of states
C = self.lr0_items()
if self.lr_method == 'LALR':
self.add_lalr_lookaheads(C)
# Build the parser table, state by state
st = 0
for I in C:
# Loop over each production in I
actlist = [] # List of actions
st_action = {}
st_actionp = {}
st_goto = {}
log.info('')
log.info('state %d', st)
log.info('')
for p in I:
log.info(' (%d) %s', p.number, p)
log.info('')
for p in I:
if p.len == p.lr_index + 1:
if p.name == "S'":
# Start symbol. Accept!
st_action['$end'] = 0
st_actionp['$end'] = p
else:
# We are at the end of a production. Reduce!
if self.lr_method == 'LALR':
laheads = p.lookaheads[st]
else:
laheads = self.grammar.Follow[p.name]
for a in laheads:
actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))
r = st_action.get(a)
if r is not None:
# Whoa. Have a shift/reduce or reduce/reduce conflict
if r > 0:
# Need to decide on shift or reduce here
# By default we favor shifting. Need to add
# some precedence rules here.
# Shift precedence comes from the token
sprec, slevel = Precedence.get(a, ('right', 0))
# Reduce precedence comes from rule being reduced (p)
rprec, rlevel = Productions[p.number].prec
if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
# We really need to reduce here.
st_action[a] = -p.number
st_actionp[a] = p
if not slevel and not rlevel:
log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
self.sr_conflicts.append((st, a, 'reduce'))
Productions[p.number].reduced += 1
elif (slevel == rlevel) and (rprec == 'nonassoc'):
st_action[a] = None
else:
# Hmmm. Guess we'll keep the shift
if not rlevel:
log.info(' ! shift/reduce conflict for %s resolved as shift', a)
self.sr_conflicts.append((st, a, 'shift'))
elif r < 0:
# Reduce/reduce conflict. In this case, we favor the rule
# that was defined first in the grammar file
oldp = Productions[-r]
pp = Productions[p.number]
if oldp.line > pp.line:
st_action[a] = -p.number
st_actionp[a] = p
chosenp, rejectp = pp, oldp
Productions[p.number].reduced += 1
Productions[oldp.number].reduced -= 1
else:
chosenp, rejectp = oldp, pp
self.rr_conflicts.append((st, chosenp, rejectp))
log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)',
a, st_actionp[a].number, st_actionp[a])
else:
raise LALRError('Unknown conflict in state %d' % st)
else:
st_action[a] = -p.number
st_actionp[a] = p
Productions[p.number].reduced += 1
else:
i = p.lr_index
a = p.prod[i+1] # Get symbol right after the "."
if a in self.grammar.Terminals:
g = self.lr0_goto(I, a)
j = self.lr0_cidhash.get(id(g), -1)
if j >= 0:
# We are in a shift state
actlist.append((a, p, 'shift and go to state %d' % j))
r = st_action.get(a)
if r is not None:
# Whoa have a shift/reduce or shift/shift conflict
if r > 0:
if r != j:
raise LALRError('Shift/shift conflict in state %d' % st)
elif r < 0:
# Do a precedence check.
# - if precedence of reduce rule is higher, we reduce.
# - if precedence of reduce is same and left assoc, we reduce.
# - otherwise we shift
# Shift precedence comes from the token
sprec, slevel = Precedence.get(a, ('right', 0))
# Reduce precedence comes from the rule that could have been reduced
rprec, rlevel = Productions[st_actionp[a].number].prec
if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
# We decide to shift here... highest precedence to shift
Productions[st_actionp[a].number].reduced -= 1
st_action[a] = j
st_actionp[a] = p
if not rlevel:
log.info(' ! shift/reduce conflict for %s resolved as shift', a)
self.sr_conflicts.append((st, a, 'shift'))
elif (slevel == rlevel) and (rprec == 'nonassoc'):
st_action[a] = None
else:
# Hmmm. Guess we'll keep the reduce
if not slevel and not rlevel:
log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
self.sr_conflicts.append((st, a, 'reduce'))
else:
raise LALRError('Unknown conflict in state %d' % st)
else:
st_action[a] = j
st_actionp[a] = p
# Print the actions associated with each terminal
_actprint = {}
for a, p, m in actlist:
if a in st_action:
if p is st_actionp[a]:
log.info(' %-15s %s', a, m)
_actprint[(a, m)] = 1
log.info('')
# Print the actions that were not used. (debugging)
not_used = 0
for a, p, m in actlist:
if a in st_action:
if p is not st_actionp[a]:
if not (a, m) in _actprint:
log.debug(' ! %-15s [ %s ]', a, m)
not_used = 1
_actprint[(a, m)] = 1
if not_used:
log.debug('')
# Construct the goto table for this state
nkeys = {}
for ii in I:
for s in ii.usyms:
if s in self.grammar.Nonterminals:
nkeys[s] = None
for n in nkeys:
g = self.lr0_goto(I, n)
j = self.lr0_cidhash.get(id(g), -1)
if j >= 0:
st_goto[n] = j
log.info(' %-30s shift and go to state %d', n, j)
action[st] = st_action
actionp[st] = st_actionp
goto[st] = st_goto
st += 1
# -----------------------------------------------------------------------------
# write()
#
# This function writes the LR parsing tables to a file
# -----------------------------------------------------------------------------
def write_table(self, tabmodule, outputdir='', signature=''):
if isinstance(tabmodule, types.ModuleType):
raise IOError("Won't overwrite existing tabmodule")
basemodulename = tabmodule.split('.')[-1]
filename = os.path.join(outputdir, basemodulename) + '.py'
try:
f = open(filename, 'w')
f.write('''
# %s
# This file is automatically generated. Do not edit.
_tabversion = %r
_lr_method = %r
_lr_signature = %r
''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))
# Change smaller to 0 to go back to original tables
smaller = 1
# Factor out names to try and make smaller
if smaller:
items = {}
for s, nd in self.lr_action.items():
for name, v in nd.items():
i = items.get(name)
if not i:
i = ([], [])
items[name] = i
i[0].append(s)
i[1].append(v)
f.write('\n_lr_action_items = {')
for k, v in items.items():
f.write('%r:([' % k)
for i in v[0]:
f.write('%r,' % i)
f.write('],[')
for i in v[1]:
f.write('%r,' % i)
f.write(']),')
f.write('}\n')
f.write('''
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
''')
else:
f.write('\n_lr_action = { ')
for k, v in self.lr_action.items():
f.write('(%r,%r):%r,' % (k[0], k[1], v))
f.write('}\n')
if smaller:
# Factor out names to try and make smaller
items = {}
for s, nd in self.lr_goto.items():
for name, v in nd.items():
i = items.get(name)
if not i:
i = ([], [])
items[name] = i
i[0].append(s)
i[1].append(v)
f.write('\n_lr_goto_items = {')
for k, v in items.items():
f.write('%r:([' % k)
for i in v[0]:
f.write('%r,' % i)
f.write('],[')
for i in v[1]:
f.write('%r,' % i)
f.write(']),')
f.write('}\n')
f.write('''
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
''')
else:
f.write('\n_lr_goto = { ')
for k, v in self.lr_goto.items():
f.write('(%r,%r):%r,' % (k[0], k[1], v))
f.write('}\n')
# Write production table
f.write('_lr_productions = [\n')
for p in self.lr_productions:
if p.func:
f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len,
p.func, os.path.basename(p.file), p.line))
else:
f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len))
f.write(']\n')
f.close()
except IOError as e:
raise
# -----------------------------------------------------------------------------
# pickle_table()
#
# This function pickles the LR parsing tables to a supplied file object
# -----------------------------------------------------------------------------
def pickle_table(self, filename, signature=''):
try:
import cPickle as pickle
except ImportError:
import pickle
with open(filename, 'wb') as outf:
pickle.dump(__tabversion__, outf, pickle_protocol)
pickle.dump(self.lr_method, outf, pickle_protocol)
pickle.dump(signature, outf, pickle_protocol)
pickle.dump(self.lr_action, outf, pickle_protocol)
pickle.dump(self.lr_goto, outf, pickle_protocol)
outp = []
for p in self.lr_productions:
if p.func:
outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))
else:
outp.append((str(p), p.name, p.len, None, None, None))
pickle.dump(outp, outf, pickle_protocol)
# -----------------------------------------------------------------------------
# === INTROSPECTION ===
#
# The following functions and classes are used to implement the PLY
# introspection features followed by the yacc() function itself.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# get_caller_module_dict()
#
# This function returns a dictionary containing all of the symbols defined within
# a caller further down the call stack. This is used to get the environment
# associated with the yacc() call if none was provided.
# -----------------------------------------------------------------------------
def get_caller_module_dict(levels):
f = sys._getframe(levels)
ldict = f.f_globals.copy()
if f.f_globals != f.f_locals:
ldict.update(f.f_locals)
return ldict
# -----------------------------------------------------------------------------
# parse_grammar()
#
# This takes a raw grammar rule string and parses it into production data
# -----------------------------------------------------------------------------
def parse_grammar(doc, file, line):
grammar = []
# Split the doc string into lines
pstrings = doc.splitlines()
lastp = None
dline = line
for ps in pstrings:
dline += 1
p = ps.split()
if not p:
continue
try:
if p[0] == '|':
# This is a continuation of a previous rule
if not lastp:
raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline))
prodname = lastp
syms = p[1:]
else:
prodname = p[0]
lastp = prodname
syms = p[2:]
assign = p[1]
if assign != ':' and assign != '::=':
raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline))
grammar.append((file, dline, prodname, syms))
except SyntaxError:
raise
except Exception:
raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))
return grammar
# -----------------------------------------------------------------------------
# ParserReflect()
#
# This class represents information extracted for building a parser including
# start symbol, error function, tokens, precedence list, action functions,
# etc.
# -----------------------------------------------------------------------------
class ParserReflect(object):
def __init__(self, pdict, log=None):
self.pdict = pdict
self.start = None
self.error_func = None
self.tokens = None
self.modules = set()
self.grammar = []
self.error = False
if log is None:
self.log = PlyLogger(sys.stderr)
else:
self.log = log
# Get all of the basic information
def get_all(self):
self.get_start()
self.get_error_func()
self.get_tokens()
self.get_precedence()
self.get_pfunctions()
# Validate all of the information
def validate_all(self):
self.validate_start()
self.validate_error_func()
self.validate_tokens()
self.validate_precedence()
self.validate_pfunctions()
self.validate_modules()
return self.error
# Compute a signature over the grammar
def signature(self):
parts = []
try:
if self.start:
parts.append(self.start)
if self.prec:
parts.append(''.join([''.join(p) for p in self.prec]))
if self.tokens:
parts.append(' '.join(self.tokens))
for f in self.pfuncs:
if f[3]:
parts.append(f[3])
except (TypeError, ValueError):
pass
return ''.join(parts)
# -----------------------------------------------------------------------------
# validate_modules()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def validate_modules(self):
# Match def p_funcname(
fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
for module in self.modules:
try:
lines, linen = inspect.getsourcelines(module)
except IOError:
continue
counthash = {}
for linen, line in enumerate(lines):
linen += 1
m = fre.match(line)
if m:
name = m.group(1)
prev = counthash.get(name)
if not prev:
counthash[name] = linen
else:
filename = inspect.getsourcefile(module)
self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',
filename, linen, name, prev)
# Get the start symbol
def get_start(self):
self.start = self.pdict.get('start')
# Validate the start symbol
def validate_start(self):
if self.start is not None:
if not isinstance(self.start, string_types):
self.log.error("'start' must be a string")
# Look for error handler
def get_error_func(self):
self.error_func = self.pdict.get('p_error')
# Validate the error function
def validate_error_func(self):
if self.error_func:
if isinstance(self.error_func, types.FunctionType):
ismethod = 0
elif isinstance(self.error_func, types.MethodType):
ismethod = 1
else:
self.log.error("'p_error' defined, but is not a function or method")
self.error = True
return
eline = self.error_func.__code__.co_firstlineno
efile = self.error_func.__code__.co_filename
module = inspect.getmodule(self.error_func)
self.modules.add(module)
argcount = self.error_func.__code__.co_argcount - ismethod
if argcount != 1:
self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
self.error = True
# Get the tokens map
def get_tokens(self):
tokens = self.pdict.get('tokens')
if not tokens:
self.log.error('No token list is defined')
self.error = True
return
if not isinstance(tokens, (list, tuple)):
self.log.error('tokens must be a list or tuple')
self.error = True
return
if not tokens:
self.log.error('tokens is empty')
self.error = True
return
self.tokens = tokens
# Validate the tokens
def validate_tokens(self):
# Validate the tokens.
if 'error' in self.tokens:
self.log.error("Illegal token name 'error'. Is a reserved word")
self.error = True
return
terminals = set()
for n in self.tokens:
if n in terminals:
self.log.warning('Token %r multiply defined', n)
terminals.add(n)
# Get the precedence map (if any)
def get_precedence(self):
self.prec = self.pdict.get('precedence')
# Validate and parse the precedence map
def validate_precedence(self):
preclist = []
if self.prec:
if not isinstance(self.prec, (list, tuple)):
self.log.error('precedence must be a list or tuple')
self.error = True
return
for level, p in enumerate(self.prec):
if not isinstance(p, (list, tuple)):
self.log.error('Bad precedence table')
self.error = True
return
if len(p) < 2:
self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
self.error = True
return
assoc = p[0]
if not isinstance(assoc, string_types):
self.log.error('precedence associativity must be a string')
self.error = True
return
for term in p[1:]:
if not isinstance(term, string_types):
self.log.error('precedence items must be strings')
self.error = True
return
preclist.append((term, assoc, level+1))
self.preclist = preclist
# Get all p_functions from the grammar
def get_pfunctions(self):
p_functions = []
for name, item in self.pdict.items():
if not name.startswith('p_') or name == 'p_error':
continue
if isinstance(item, (types.FunctionType, types.MethodType)):
line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
module = inspect.getmodule(item)
p_functions.append((line, module, name, item.__doc__))
# Sort all of the actions by line number; make sure to stringify
# modules to make them sortable, since `line` may not uniquely sort all
# p functions
p_functions.sort(key=lambda p_function: (
p_function[0],
str(p_function[1]),
p_function[2],
p_function[3]))
self.pfuncs = p_functions
# Validate all of the p_functions
def validate_pfunctions(self):
grammar = []
# Check for non-empty symbols
if len(self.pfuncs) == 0:
self.log.error('no rules of the form p_rulename are defined')
self.error = True
return
for line, module, name, doc in self.pfuncs:
file = inspect.getsourcefile(module)
func = self.pdict[name]
if isinstance(func, types.MethodType):
reqargs = 2
else:
reqargs = 1
if func.__code__.co_argcount > reqargs:
self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
self.error = True
elif func.__code__.co_argcount < reqargs:
self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
self.error = True
elif not func.__doc__:
self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',
file, line, func.__name__)
else:
try:
parsed_g = parse_grammar(doc, file, line)
for g in parsed_g:
grammar.append((name, g))
except SyntaxError as e:
self.log.error(str(e))
self.error = True
# Looks like a valid grammar rule
# Mark the file in which defined.
self.modules.add(module)
# Secondary validation step that looks for p_ definitions that are not functions
# or functions that look like they might be grammar rules.
for n, v in self.pdict.items():
if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):
continue
if n.startswith('t_'):
continue
if n.startswith('p_') and n != 'p_error':
self.log.warning('%r not defined as a function', n)
if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or
(isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):
if v.__doc__:
try:
doc = v.__doc__.split(' ')
if doc[1] == ':':
self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',
v.__code__.co_filename, v.__code__.co_firstlineno, n)
except IndexError:
pass
self.grammar = grammar
# -----------------------------------------------------------------------------
# yacc(module)
#
# Build a parser
# -----------------------------------------------------------------------------
def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,
outputdir=None, debuglog=None, errorlog=None, picklefile=None):
if tabmodule is None:
tabmodule = tab_module
# Reference to the parsing method of the last built parser
global parse
# If pickling is enabled, table files are not created
if picklefile:
write_tables = 0
if errorlog is None:
errorlog = PlyLogger(sys.stderr)
# Get the module dictionary used for the parser
if module:
_items = [(k, getattr(module, k)) for k in dir(module)]
pdict = dict(_items)
# If no __file__ attribute is available, try to obtain it from the __module__ instead
if '__file__' not in pdict:
pdict['__file__'] = sys.modules[pdict['__module__']].__file__
else:
pdict = get_caller_module_dict(2)
if outputdir is None:
# If no output directory is set, the location of the output files
# is determined according to the following rules:
# - If tabmodule specifies a package, files go into that package directory
# - Otherwise, files go in the same directory as the specifying module
if isinstance(tabmodule, types.ModuleType):
srcfile = tabmodule.__file__
else:
if '.' not in tabmodule:
srcfile = pdict['__file__']
else:
parts = tabmodule.split('.')
pkgname = '.'.join(parts[:-1])
exec('import %s' % pkgname)
srcfile = getattr(sys.modules[pkgname], '__file__', '')
outputdir = os.path.dirname(srcfile)
# Determine if the module is package of a package or not.
# If so, fix the tabmodule setting so that tables load correctly
pkg = pdict.get('__package__')
if pkg and isinstance(tabmodule, str):
if '.' not in tabmodule:
tabmodule = pkg + '.' + tabmodule
# Set start symbol if it's specified directly using an argument
if start is not None:
pdict['start'] = start
# Collect parser information from the dictionary
pinfo = ParserReflect(pdict, log=errorlog)
pinfo.get_all()
if pinfo.error:
raise YaccError('Unable to build parser')
# Check signature against table files (if any)
signature = pinfo.signature()
# Read the tables
try:
lr = LRTable()
if picklefile:
read_signature = lr.read_pickle(picklefile)
else:
read_signature = lr.read_table(tabmodule)
if optimize or (read_signature == signature):
try:
lr.bind_callables(pinfo.pdict)
parser = LRParser(lr, pinfo.error_func)
parse = parser.parse
return parser
except Exception as e:
errorlog.warning('There was a problem loading the table file: %r', e)
except VersionError as e:
errorlog.warning(str(e))
except ImportError:
pass
if debuglog is None:
if debug:
try:
debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))
except IOError as e:
errorlog.warning("Couldn't open %r. %s" % (debugfile, e))
debuglog = NullLogger()
else:
debuglog = NullLogger()
debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
errors = False
# Validate the parser information
if pinfo.validate_all():
raise YaccError('Unable to build parser')
if not pinfo.error_func:
errorlog.warning('no p_error() function is defined')
# Create a grammar object
grammar = Grammar(pinfo.tokens)
# Set precedence level for terminals
for term, assoc, level in pinfo.preclist:
try:
grammar.set_precedence(term, assoc, level)
except GrammarError as e:
errorlog.warning('%s', e)
# Add productions to the grammar
for funcname, gram in pinfo.grammar:
file, line, prodname, syms = gram
try:
grammar.add_production(prodname, syms, funcname, file, line)
except GrammarError as e:
errorlog.error('%s', e)
errors = True
# Set the grammar start symbols
try:
if start is None:
grammar.set_start(pinfo.start)
else:
grammar.set_start(start)
except GrammarError as e:
errorlog.error(str(e))
errors = True
if errors:
raise YaccError('Unable to build parser')
# Verify the grammar structure
undefined_symbols = grammar.undefined_symbols()
for sym, prod in undefined_symbols:
errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)
errors = True
unused_terminals = grammar.unused_terminals()
if unused_terminals:
debuglog.info('')
debuglog.info('Unused terminals:')
debuglog.info('')
for term in unused_terminals:
errorlog.warning('Token %r defined, but not used', term)
debuglog.info(' %s', term)
# Print out all productions to the debug log
if debug:
debuglog.info('')
debuglog.info('Grammar')
debuglog.info('')
for n, p in enumerate(grammar.Productions):
debuglog.info('Rule %-5d %s', n, p)
# Find unused non-terminals
unused_rules = grammar.unused_rules()
for prod in unused_rules:
errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)
if len(unused_terminals) == 1:
errorlog.warning('There is 1 unused token')
if len(unused_terminals) > 1:
errorlog.warning('There are %d unused tokens', len(unused_terminals))
if len(unused_rules) == 1:
errorlog.warning('There is 1 unused rule')
if len(unused_rules) > 1:
errorlog.warning('There are %d unused rules', len(unused_rules))
if debug:
debuglog.info('')
debuglog.info('Terminals, with rules where they appear')
debuglog.info('')
terms = list(grammar.Terminals)
terms.sort()
for term in terms:
debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))
debuglog.info('')
debuglog.info('Nonterminals, with rules where they appear')
debuglog.info('')
nonterms = list(grammar.Nonterminals)
nonterms.sort()
for nonterm in nonterms:
debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))
debuglog.info('')
if check_recursion:
unreachable = grammar.find_unreachable()
for u in unreachable:
errorlog.warning('Symbol %r is unreachable', u)
infinite = grammar.infinite_cycles()
for inf in infinite:
errorlog.error('Infinite recursion detected for symbol %r', inf)
errors = True
unused_prec = grammar.unused_precedence()
for term, assoc in unused_prec:
errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)
errors = True
if errors:
raise YaccError('Unable to build parser')
# Run the LRGeneratedTable on the grammar
if debug:
errorlog.debug('Generating %s tables', method)
lr = LRGeneratedTable(grammar, method, debuglog)
if debug:
num_sr = len(lr.sr_conflicts)
# Report shift/reduce and reduce/reduce conflicts
if num_sr == 1:
errorlog.warning('1 shift/reduce conflict')
elif num_sr > 1:
errorlog.warning('%d shift/reduce conflicts', num_sr)
num_rr = len(lr.rr_conflicts)
if num_rr == 1:
errorlog.warning('1 reduce/reduce conflict')
elif num_rr > 1:
errorlog.warning('%d reduce/reduce conflicts', num_rr)
# Write out conflicts to the output file
if debug and (lr.sr_conflicts or lr.rr_conflicts):
debuglog.warning('')
debuglog.warning('Conflicts:')
debuglog.warning('')
for state, tok, resolution in lr.sr_conflicts:
debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution)
already_reported = set()
for state, rule, rejected in lr.rr_conflicts:
if (state, id(rule), id(rejected)) in already_reported:
continue
debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
debuglog.warning('rejected rule (%s) in state %d', rejected, state)
errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
errorlog.warning('rejected rule (%s) in state %d', rejected, state)
already_reported.add((state, id(rule), id(rejected)))
warned_never = []
for state, rule, rejected in lr.rr_conflicts:
if not rejected.reduced and (rejected not in warned_never):
debuglog.warning('Rule (%s) is never reduced', rejected)
errorlog.warning('Rule (%s) is never reduced', rejected)
warned_never.append(rejected)
# Write the table file if requested
if write_tables:
try:
lr.write_table(tabmodule, outputdir, signature)
except IOError as e:
errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))
# Write a pickled version of the tables
if picklefile:
try:
lr.pickle_table(picklefile, signature)
except IOError as e:
errorlog.warning("Couldn't create %r. %s" % (picklefile, e))
# Build the parser
lr.bind_callables(pinfo.pdict)
parser = LRParser(lr, pinfo.error_func)
parse = parser.parse
return parser
| 137,323 | Python | 38.291559 | 119 | 0.467358 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/__init__.py | # PLY package
# Author: David Beazley ([email protected])
__version__ = '3.9'
__all__ = ['lex','yacc']
| 102 | Python | 16.166664 | 41 | 0.588235 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/ctokens.py | # ----------------------------------------------------------------------
# ctokens.py
#
# Token specifications for symbols in ANSI C and C++. This file is
# meant to be used as a library in other tokenizers.
# ----------------------------------------------------------------------
# Reserved words
tokens = [
# Literals (identifier, integer constant, float constant, string constant, char const)
'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
# Increment/decrement (++,--)
'INCREMENT', 'DECREMENT',
# Structure dereference (->)
'ARROW',
# Ternary operator (?)
'TERNARY',
# Delimeters ( ) [ ] { } , . ; :
'LPAREN', 'RPAREN',
'LBRACKET', 'RBRACKET',
'LBRACE', 'RBRACE',
'COMMA', 'PERIOD', 'SEMI', 'COLON',
# Ellipsis (...)
'ELLIPSIS',
]
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MODULO = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'\^='
# Increment/decrement
t_INCREMENT = r'\+\+'
t_DECREMENT = r'--'
# ->
t_ARROW = r'->'
# ?
t_TERNARY = r'\?'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_LBRACE = r'\{'
t_RBRACE = r'\}'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Identifiers
t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
# Integer literal
t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
# Floating literal
t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
# String literal
t_STRING = r'\"([^\\\n]|(\\.))*?\"'
# Character constant 'c' or L'c'
t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
# Comment (C-Style)
def t_COMMENT(t):
r'/\*(.|\n)*?\*/'
t.lexer.lineno += t.value.count('\n')
return t
# Comment (C++-Style)
def t_CPPCOMMENT(t):
r'//.*\n'
t.lexer.lineno += 1
return t
| 3,177 | Python | 22.716418 | 90 | 0.393768 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/ygen.py | # ply: ygen.py
#
# This is a support program that auto-generates different versions of the YACC parsing
# function with different features removed for the purposes of performance.
#
# Users should edit the method LParser.parsedebug() in yacc.py. The source code
# for that method is then used to create the other methods. See the comments in
# yacc.py for further details.
import os.path
import shutil
def get_source_range(lines, tag):
srclines = enumerate(lines)
start_tag = '#--! %s-start' % tag
end_tag = '#--! %s-end' % tag
for start_index, line in srclines:
if line.strip().startswith(start_tag):
break
for end_index, line in srclines:
if line.strip().endswith(end_tag):
break
return (start_index + 1, end_index)
def filter_section(lines, tag):
filtered_lines = []
include = True
tag_text = '#--! %s' % tag
for line in lines:
if line.strip().startswith(tag_text):
include = not include
elif include:
filtered_lines.append(line)
return filtered_lines
def main():
dirname = os.path.dirname(__file__)
shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak'))
with open(os.path.join(dirname, 'yacc.py'), 'r') as f:
lines = f.readlines()
parse_start, parse_end = get_source_range(lines, 'parsedebug')
parseopt_start, parseopt_end = get_source_range(lines, 'parseopt')
parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack')
# Get the original source
orig_lines = lines[parse_start:parse_end]
# Filter the DEBUG sections out
parseopt_lines = filter_section(orig_lines, 'DEBUG')
# Filter the TRACKING sections out
parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING')
# Replace the parser source sections with updated versions
lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines
lines[parseopt_start:parseopt_end] = parseopt_lines
lines = [line.rstrip()+'\n' for line in lines]
with open(os.path.join(dirname, 'yacc.py'), 'w') as f:
f.writelines(lines)
print('Updated yacc.py')
if __name__ == '__main__':
main()
| 2,251 | Python | 29.026666 | 94 | 0.657486 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pycparser/ply/lex.py | # -----------------------------------------------------------------------------
# ply: lex.py
#
# Copyright (C) 2001-2017
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the David Beazley or Dabeaz LLC may be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
__version__ = '3.10'
__tabversion__ = '3.10'
import re
import sys
import types
import copy
import os
import inspect
# This tuple contains known string types
try:
# Python 2.6
StringTypes = (types.StringType, types.UnicodeType)
except AttributeError:
# Python 3.0
StringTypes = (str, bytes)
# This regular expression is used to match valid token names
_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
# Exception thrown when invalid token encountered and no default error
# handler is defined.
class LexError(Exception):
def __init__(self, message, s):
self.args = (message,)
self.text = s
# Token class. This class is used to represent the tokens produced.
class LexToken(object):
def __str__(self):
return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)
def __repr__(self):
return str(self)
# This object is a stand-in for a logging object created by the
# logging module.
class PlyLogger(object):
def __init__(self, f):
self.f = f
def critical(self, msg, *args, **kwargs):
self.f.write((msg % args) + '\n')
def warning(self, msg, *args, **kwargs):
self.f.write('WARNING: ' + (msg % args) + '\n')
def error(self, msg, *args, **kwargs):
self.f.write('ERROR: ' + (msg % args) + '\n')
info = critical
debug = critical
# Null logger is used when no output is generated. Does nothing.
class NullLogger(object):
def __getattribute__(self, name):
return self
def __call__(self, *args, **kwargs):
return self
# -----------------------------------------------------------------------------
# === Lexing Engine ===
#
# The following Lexer class implements the lexer runtime. There are only
# a few public methods and attributes:
#
# input() - Store a new string in the lexer
# token() - Get the next token
# clone() - Clone the lexer
#
# lineno - Current line number
# lexpos - Current position in the input string
# -----------------------------------------------------------------------------
class Lexer:
def __init__(self):
self.lexre = None # Master regular expression. This is a list of
# tuples (re, findex) where re is a compiled
# regular expression and findex is a list
# mapping regex group numbers to rules
self.lexretext = None # Current regular expression strings
self.lexstatere = {} # Dictionary mapping lexer states to master regexs
self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
self.lexstate = 'INITIAL' # Current lexer state
self.lexstatestack = [] # Stack of lexer states
self.lexstateinfo = None # State information
self.lexstateignore = {} # Dictionary of ignored characters for each state
self.lexstateerrorf = {} # Dictionary of error functions for each state
self.lexstateeoff = {} # Dictionary of eof functions for each state
self.lexreflags = 0 # Optional re compile flags
self.lexdata = None # Actual input data (as a string)
self.lexpos = 0 # Current position in input text
self.lexlen = 0 # Length of the input text
self.lexerrorf = None # Error rule (if any)
self.lexeoff = None # EOF rule (if any)
self.lextokens = None # List of valid tokens
self.lexignore = '' # Ignored characters
self.lexliterals = '' # Literal characters that can be passed through
self.lexmodule = None # Module
self.lineno = 1 # Current line number
self.lexoptimize = False # Optimized mode
def clone(self, object=None):
c = copy.copy(self)
# If the object parameter has been supplied, it means we are attaching the
# lexer to a new object. In this case, we have to rebind all methods in
# the lexstatere and lexstateerrorf tables.
if object:
newtab = {}
for key, ritem in self.lexstatere.items():
newre = []
for cre, findex in ritem:
newfindex = []
for f in findex:
if not f or not f[0]:
newfindex.append(f)
continue
newfindex.append((getattr(object, f[0].__name__), f[1]))
newre.append((cre, newfindex))
newtab[key] = newre
c.lexstatere = newtab
c.lexstateerrorf = {}
for key, ef in self.lexstateerrorf.items():
c.lexstateerrorf[key] = getattr(object, ef.__name__)
c.lexmodule = object
return c
# ------------------------------------------------------------
# writetab() - Write lexer information to a table file
# ------------------------------------------------------------
def writetab(self, lextab, outputdir=''):
if isinstance(lextab, types.ModuleType):
raise IOError("Won't overwrite existing lextab module")
basetabmodule = lextab.split('.')[-1]
filename = os.path.join(outputdir, basetabmodule) + '.py'
with open(filename, 'w') as tf:
tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
tf.write('_tabversion = %s\n' % repr(__tabversion__))
tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
# Rewrite the lexstatere table, replacing function objects with function names
tabre = {}
for statename, lre in self.lexstatere.items():
titem = []
for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):
titem.append((retext, _funcs_to_names(func, renames)))
tabre[statename] = titem
tf.write('_lexstatere = %s\n' % repr(tabre))
tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore))
taberr = {}
for statename, ef in self.lexstateerrorf.items():
taberr[statename] = ef.__name__ if ef else None
tf.write('_lexstateerrorf = %s\n' % repr(taberr))
tabeof = {}
for statename, ef in self.lexstateeoff.items():
tabeof[statename] = ef.__name__ if ef else None
tf.write('_lexstateeoff = %s\n' % repr(tabeof))
# ------------------------------------------------------------
# readtab() - Read lexer information from a tab file
# ------------------------------------------------------------
def readtab(self, tabfile, fdict):
if isinstance(tabfile, types.ModuleType):
lextab = tabfile
else:
exec('import %s' % tabfile)
lextab = sys.modules[tabfile]
if getattr(lextab, '_tabversion', '0.0') != __tabversion__:
raise ImportError('Inconsistent PLY version')
self.lextokens = lextab._lextokens
self.lexreflags = lextab._lexreflags
self.lexliterals = lextab._lexliterals
self.lextokens_all = self.lextokens | set(self.lexliterals)
self.lexstateinfo = lextab._lexstateinfo
self.lexstateignore = lextab._lexstateignore
self.lexstatere = {}
self.lexstateretext = {}
for statename, lre in lextab._lexstatere.items():
titem = []
txtitem = []
for pat, func_name in lre:
titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
self.lexstatere[statename] = titem
self.lexstateretext[statename] = txtitem
self.lexstateerrorf = {}
for statename, ef in lextab._lexstateerrorf.items():
self.lexstateerrorf[statename] = fdict[ef]
self.lexstateeoff = {}
for statename, ef in lextab._lexstateeoff.items():
self.lexstateeoff[statename] = fdict[ef]
self.begin('INITIAL')
# ------------------------------------------------------------
# input() - Push a new string into the lexer
# ------------------------------------------------------------
def input(self, s):
# Pull off the first character to see if s looks like a string
c = s[:1]
if not isinstance(c, StringTypes):
raise ValueError('Expected a string')
self.lexdata = s
self.lexpos = 0
self.lexlen = len(s)
# ------------------------------------------------------------
# begin() - Changes the lexing state
# ------------------------------------------------------------
def begin(self, state):
if state not in self.lexstatere:
raise ValueError('Undefined state')
self.lexre = self.lexstatere[state]
self.lexretext = self.lexstateretext[state]
self.lexignore = self.lexstateignore.get(state, '')
self.lexerrorf = self.lexstateerrorf.get(state, None)
self.lexeoff = self.lexstateeoff.get(state, None)
self.lexstate = state
# ------------------------------------------------------------
# push_state() - Changes the lexing state and saves old on stack
# ------------------------------------------------------------
def push_state(self, state):
self.lexstatestack.append(self.lexstate)
self.begin(state)
# ------------------------------------------------------------
# pop_state() - Restores the previous state
# ------------------------------------------------------------
def pop_state(self):
self.begin(self.lexstatestack.pop())
# ------------------------------------------------------------
# current_state() - Returns the current lexing state
# ------------------------------------------------------------
def current_state(self):
return self.lexstate
# ------------------------------------------------------------
# skip() - Skip ahead n characters
# ------------------------------------------------------------
def skip(self, n):
self.lexpos += n
# ------------------------------------------------------------
# opttoken() - Return the next token from the Lexer
#
# Note: This function has been carefully implemented to be as fast
# as possible. Don't make changes unless you really know what
# you are doing
# ------------------------------------------------------------
def token(self):
# Make local copies of frequently referenced attributes
lexpos = self.lexpos
lexlen = self.lexlen
lexignore = self.lexignore
lexdata = self.lexdata
while lexpos < lexlen:
# This code provides some short-circuit code for whitespace, tabs, and other ignored characters
if lexdata[lexpos] in lexignore:
lexpos += 1
continue
# Look for a regular expression match
for lexre, lexindexfunc in self.lexre:
m = lexre.match(lexdata, lexpos)
if not m:
continue
# Create a token for return
tok = LexToken()
tok.value = m.group()
tok.lineno = self.lineno
tok.lexpos = lexpos
i = m.lastindex
func, tok.type = lexindexfunc[i]
if not func:
# If no token type was set, it's an ignored token
if tok.type:
self.lexpos = m.end()
return tok
else:
lexpos = m.end()
break
lexpos = m.end()
# If token is processed by a function, call it
tok.lexer = self # Set additional attributes useful in token rules
self.lexmatch = m
self.lexpos = lexpos
newtok = func(tok)
# Every function must return a token, if nothing, we just move to next token
if not newtok:
lexpos = self.lexpos # This is here in case user has updated lexpos.
lexignore = self.lexignore # This is here in case there was a state change
break
# Verify type of the token. If not in the token map, raise an error
if not self.lexoptimize:
if newtok.type not in self.lextokens_all:
raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
func.__code__.co_filename, func.__code__.co_firstlineno,
func.__name__, newtok.type), lexdata[lexpos:])
return newtok
else:
# No match, see if in literals
if lexdata[lexpos] in self.lexliterals:
tok = LexToken()
tok.value = lexdata[lexpos]
tok.lineno = self.lineno
tok.type = tok.value
tok.lexpos = lexpos
self.lexpos = lexpos + 1
return tok
# No match. Call t_error() if defined.
if self.lexerrorf:
tok = LexToken()
tok.value = self.lexdata[lexpos:]
tok.lineno = self.lineno
tok.type = 'error'
tok.lexer = self
tok.lexpos = lexpos
self.lexpos = lexpos
newtok = self.lexerrorf(tok)
if lexpos == self.lexpos:
# Error method didn't change text position at all. This is an error.
raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
lexpos = self.lexpos
if not newtok:
continue
return newtok
self.lexpos = lexpos
raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:])
if self.lexeoff:
tok = LexToken()
tok.type = 'eof'
tok.value = ''
tok.lineno = self.lineno
tok.lexpos = lexpos
tok.lexer = self
self.lexpos = lexpos
newtok = self.lexeoff(tok)
return newtok
self.lexpos = lexpos + 1
if self.lexdata is None:
raise RuntimeError('No input string given with input()')
return None
# Iterator interface
def __iter__(self):
return self
def next(self):
t = self.token()
if t is None:
raise StopIteration
return t
__next__ = next
# -----------------------------------------------------------------------------
# ==== Lex Builder ===
#
# The functions and classes below are used to collect lexing information
# and build a Lexer object from it.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# _get_regex(func)
#
# Returns the regular expression assigned to a function either as a doc string
# or as a .regex attribute attached by the @TOKEN decorator.
# -----------------------------------------------------------------------------
def _get_regex(func):
return getattr(func, 'regex', func.__doc__)
# -----------------------------------------------------------------------------
# get_caller_module_dict()
#
# This function returns a dictionary containing all of the symbols defined within
# a caller further down the call stack. This is used to get the environment
# associated with the yacc() call if none was provided.
# -----------------------------------------------------------------------------
def get_caller_module_dict(levels):
f = sys._getframe(levels)
ldict = f.f_globals.copy()
if f.f_globals != f.f_locals:
ldict.update(f.f_locals)
return ldict
# -----------------------------------------------------------------------------
# _funcs_to_names()
#
# Given a list of regular expression functions, this converts it to a list
# suitable for output to a table file
# -----------------------------------------------------------------------------
def _funcs_to_names(funclist, namelist):
result = []
for f, name in zip(funclist, namelist):
if f and f[0]:
result.append((name, f[1]))
else:
result.append(f)
return result
# -----------------------------------------------------------------------------
# _names_to_funcs()
#
# Given a list of regular expression function names, this converts it back to
# functions.
# -----------------------------------------------------------------------------
def _names_to_funcs(namelist, fdict):
result = []
for n in namelist:
if n and n[0]:
result.append((fdict[n[0]], n[1]))
else:
result.append(n)
return result
# -----------------------------------------------------------------------------
# _form_master_re()
#
# This function takes a list of all of the regex components and attempts to
# form the master regular expression. Given limitations in the Python re
# module, it may be necessary to break the master regex into separate expressions.
# -----------------------------------------------------------------------------
def _form_master_re(relist, reflags, ldict, toknames):
if not relist:
return []
regex = '|'.join(relist)
try:
lexre = re.compile(regex, reflags)
# Build the index to function map for the matching engine
lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
lexindexnames = lexindexfunc[:]
for f, i in lexre.groupindex.items():
handle = ldict.get(f, None)
if type(handle) in (types.FunctionType, types.MethodType):
lexindexfunc[i] = (handle, toknames[f])
lexindexnames[i] = f
elif handle is not None:
lexindexnames[i] = f
if f.find('ignore_') > 0:
lexindexfunc[i] = (None, None)
else:
lexindexfunc[i] = (None, toknames[f])
return [(lexre, lexindexfunc)], [regex], [lexindexnames]
except Exception:
m = int(len(relist)/2)
if m == 0:
m = 1
llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)
rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)
return (llist+rlist), (lre+rre), (lnames+rnames)
# -----------------------------------------------------------------------------
# def _statetoken(s,names)
#
# Given a declaration name s of the form "t_" and a dictionary whose keys are
# state names, this function returns a tuple (states,tokenname) where states
# is a tuple of state names and tokenname is the name of the token. For example,
# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
# -----------------------------------------------------------------------------
def _statetoken(s, names):
nonstate = 1
parts = s.split('_')
for i, part in enumerate(parts[1:], 1):
if part not in names and part != 'ANY':
break
if i > 1:
states = tuple(parts[1:i])
else:
states = ('INITIAL',)
if 'ANY' in states:
states = tuple(names)
tokenname = '_'.join(parts[i:])
return (states, tokenname)
# -----------------------------------------------------------------------------
# LexerReflect()
#
# This class represents information needed to build a lexer as extracted from a
# user's input file.
# -----------------------------------------------------------------------------
class LexerReflect(object):
def __init__(self, ldict, log=None, reflags=0):
self.ldict = ldict
self.error_func = None
self.tokens = []
self.reflags = reflags
self.stateinfo = {'INITIAL': 'inclusive'}
self.modules = set()
self.error = False
self.log = PlyLogger(sys.stderr) if log is None else log
# Get all of the basic information
def get_all(self):
self.get_tokens()
self.get_literals()
self.get_states()
self.get_rules()
# Validate all of the information
def validate_all(self):
self.validate_tokens()
self.validate_literals()
self.validate_rules()
return self.error
# Get the tokens map
def get_tokens(self):
tokens = self.ldict.get('tokens', None)
if not tokens:
self.log.error('No token list is defined')
self.error = True
return
if not isinstance(tokens, (list, tuple)):
self.log.error('tokens must be a list or tuple')
self.error = True
return
if not tokens:
self.log.error('tokens is empty')
self.error = True
return
self.tokens = tokens
# Validate the tokens
def validate_tokens(self):
terminals = {}
for n in self.tokens:
if not _is_identifier.match(n):
self.log.error("Bad token name '%s'", n)
self.error = True
if n in terminals:
self.log.warning("Token '%s' multiply defined", n)
terminals[n] = 1
# Get the literals specifier
def get_literals(self):
self.literals = self.ldict.get('literals', '')
if not self.literals:
self.literals = ''
# Validate literals
def validate_literals(self):
try:
for c in self.literals:
if not isinstance(c, StringTypes) or len(c) > 1:
self.log.error('Invalid literal %s. Must be a single character', repr(c))
self.error = True
except TypeError:
self.log.error('Invalid literals specification. literals must be a sequence of characters')
self.error = True
def get_states(self):
self.states = self.ldict.get('states', None)
# Build statemap
if self.states:
if not isinstance(self.states, (tuple, list)):
self.log.error('states must be defined as a tuple or list')
self.error = True
else:
for s in self.states:
if not isinstance(s, tuple) or len(s) != 2:
self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
self.error = True
continue
name, statetype = s
if not isinstance(name, StringTypes):
self.log.error('State name %s must be a string', repr(name))
self.error = True
continue
if not (statetype == 'inclusive' or statetype == 'exclusive'):
self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
self.error = True
continue
if name in self.stateinfo:
self.log.error("State '%s' already defined", name)
self.error = True
continue
self.stateinfo[name] = statetype
# Get all of the symbols with a t_ prefix and sort them into various
# categories (functions, strings, error functions, and ignore characters)
def get_rules(self):
tsymbols = [f for f in self.ldict if f[:2] == 't_']
# Now build up a list of functions and a list of strings
self.toknames = {} # Mapping of symbols to token names
self.funcsym = {} # Symbols defined as functions
self.strsym = {} # Symbols defined as strings
self.ignore = {} # Ignore strings by state
self.errorf = {} # Error functions by state
self.eoff = {} # EOF functions by state
for s in self.stateinfo:
self.funcsym[s] = []
self.strsym[s] = []
if len(tsymbols) == 0:
self.log.error('No rules of the form t_rulename are defined')
self.error = True
return
for f in tsymbols:
t = self.ldict[f]
states, tokname = _statetoken(f, self.stateinfo)
self.toknames[f] = tokname
if hasattr(t, '__call__'):
if tokname == 'error':
for s in states:
self.errorf[s] = t
elif tokname == 'eof':
for s in states:
self.eoff[s] = t
elif tokname == 'ignore':
line = t.__code__.co_firstlineno
file = t.__code__.co_filename
self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
self.error = True
else:
for s in states:
self.funcsym[s].append((f, t))
elif isinstance(t, StringTypes):
if tokname == 'ignore':
for s in states:
self.ignore[s] = t
if '\\' in t:
self.log.warning("%s contains a literal backslash '\\'", f)
elif tokname == 'error':
self.log.error("Rule '%s' must be defined as a function", f)
self.error = True
else:
for s in states:
self.strsym[s].append((f, t))
else:
self.log.error('%s not defined as a function or string', f)
self.error = True
# Sort the functions by line number
for f in self.funcsym.values():
f.sort(key=lambda x: x[1].__code__.co_firstlineno)
# Sort the strings by regular expression length
for s in self.strsym.values():
s.sort(key=lambda x: len(x[1]), reverse=True)
# Validate all of the t_rules collected
def validate_rules(self):
for state in self.stateinfo:
# Validate all rules defined by functions
for fname, f in self.funcsym[state]:
line = f.__code__.co_firstlineno
file = f.__code__.co_filename
module = inspect.getmodule(f)
self.modules.add(module)
tokname = self.toknames[fname]
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = f.__code__.co_argcount
if nargs > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
self.error = True
continue
if nargs < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
self.error = True
continue
if not _get_regex(f):
self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
self.error = True
continue
try:
c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
if c.match(''):
self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
self.error = True
except re.error as e:
self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
if '#' in _get_regex(f):
self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
self.error = True
# Validate all rules defined by strings
for name, r in self.strsym[state]:
tokname = self.toknames[name]
if tokname == 'error':
self.log.error("Rule '%s' must be defined as a function", name)
self.error = True
continue
if tokname not in self.tokens and tokname.find('ignore_') < 0:
self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
self.error = True
continue
try:
c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
if (c.match('')):
self.log.error("Regular expression for rule '%s' matches empty string", name)
self.error = True
except re.error as e:
self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
if '#' in r:
self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
self.error = True
if not self.funcsym[state] and not self.strsym[state]:
self.log.error("No rules defined for state '%s'", state)
self.error = True
# Validate the error function
efunc = self.errorf.get(state, None)
if efunc:
f = efunc
line = f.__code__.co_firstlineno
file = f.__code__.co_filename
module = inspect.getmodule(f)
self.modules.add(module)
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = f.__code__.co_argcount
if nargs > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
self.error = True
if nargs < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
self.error = True
for module in self.modules:
self.validate_module(module)
# -----------------------------------------------------------------------------
# validate_module()
#
# This checks to see if there are duplicated t_rulename() functions or strings
# in the parser input file. This is done using a simple regular expression
# match on each line in the source code of the given module.
# -----------------------------------------------------------------------------
def validate_module(self, module):
try:
lines, linen = inspect.getsourcelines(module)
except IOError:
return
fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
counthash = {}
linen += 1
for line in lines:
m = fre.match(line)
if not m:
m = sre.match(line)
if m:
name = m.group(1)
prev = counthash.get(name)
if not prev:
counthash[name] = linen
else:
filename = inspect.getsourcefile(module)
self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
self.error = True
linen += 1
# -----------------------------------------------------------------------------
# lex(module)
#
# Build all of the regular expression rules from definitions in the supplied module
# -----------------------------------------------------------------------------
def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
if lextab is None:
lextab = 'lextab'
global lexer
ldict = None
stateinfo = {'INITIAL': 'inclusive'}
lexobj = Lexer()
lexobj.lexoptimize = optimize
global token, input
if errorlog is None:
errorlog = PlyLogger(sys.stderr)
if debug:
if debuglog is None:
debuglog = PlyLogger(sys.stderr)
# Get the module dictionary used for the lexer
if object:
module = object
# Get the module dictionary used for the parser
if module:
_items = [(k, getattr(module, k)) for k in dir(module)]
ldict = dict(_items)
# If no __file__ attribute is available, try to obtain it from the __module__ instead
if '__file__' not in ldict:
ldict['__file__'] = sys.modules[ldict['__module__']].__file__
else:
ldict = get_caller_module_dict(2)
# Determine if the module is package of a package or not.
# If so, fix the tabmodule setting so that tables load correctly
pkg = ldict.get('__package__')
if pkg and isinstance(lextab, str):
if '.' not in lextab:
lextab = pkg + '.' + lextab
# Collect parser information from the dictionary
linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
linfo.get_all()
if not optimize:
if linfo.validate_all():
raise SyntaxError("Can't build lexer")
if optimize and lextab:
try:
lexobj.readtab(lextab, ldict)
token = lexobj.token
input = lexobj.input
lexer = lexobj
return lexobj
except ImportError:
pass
# Dump some basic debugging information
if debug:
debuglog.info('lex: tokens = %r', linfo.tokens)
debuglog.info('lex: literals = %r', linfo.literals)
debuglog.info('lex: states = %r', linfo.stateinfo)
# Build a dictionary of valid token names
lexobj.lextokens = set()
for n in linfo.tokens:
lexobj.lextokens.add(n)
# Get literals specification
if isinstance(linfo.literals, (list, tuple)):
lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
else:
lexobj.lexliterals = linfo.literals
lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)
# Get the stateinfo dictionary
stateinfo = linfo.stateinfo
regexs = {}
# Build the master regular expressions
for state in stateinfo:
regex_list = []
# Add rules defined by functions first
for fname, f in linfo.funcsym[state]:
line = f.__code__.co_firstlineno
file = f.__code__.co_filename
regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
if debug:
debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)
# Now add all of the simple rules
for name, r in linfo.strsym[state]:
regex_list.append('(?P<%s>%s)' % (name, r))
if debug:
debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)
regexs[state] = regex_list
# Build the master regular expressions
if debug:
debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')
for state in regexs:
lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
lexobj.lexstatere[state] = lexre
lexobj.lexstateretext[state] = re_text
lexobj.lexstaterenames[state] = re_names
if debug:
for i, text in enumerate(re_text):
debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)
# For inclusive states, we need to add the regular expressions from the INITIAL state
for state, stype in stateinfo.items():
if state != 'INITIAL' and stype == 'inclusive':
lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
lexobj.lexstateinfo = stateinfo
lexobj.lexre = lexobj.lexstatere['INITIAL']
lexobj.lexretext = lexobj.lexstateretext['INITIAL']
lexobj.lexreflags = reflags
# Set up ignore variables
lexobj.lexstateignore = linfo.ignore
lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')
# Set up error functions
lexobj.lexstateerrorf = linfo.errorf
lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
if not lexobj.lexerrorf:
errorlog.warning('No t_error rule is defined')
# Set up eof functions
lexobj.lexstateeoff = linfo.eoff
lexobj.lexeoff = linfo.eoff.get('INITIAL', None)
# Check state information for ignore and error rules
for s, stype in stateinfo.items():
if stype == 'exclusive':
if s not in linfo.errorf:
errorlog.warning("No error rule is defined for exclusive state '%s'", s)
if s not in linfo.ignore and lexobj.lexignore:
errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
elif stype == 'inclusive':
if s not in linfo.errorf:
linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
if s not in linfo.ignore:
linfo.ignore[s] = linfo.ignore.get('INITIAL', '')
# Create global versions of the token() and input() functions
token = lexobj.token
input = lexobj.input
lexer = lexobj
# If in optimize mode, we write the lextab
if lextab and optimize:
if outputdir is None:
# If no output directory is set, the location of the output files
# is determined according to the following rules:
# - If lextab specifies a package, files go into that package directory
# - Otherwise, files go in the same directory as the specifying module
if isinstance(lextab, types.ModuleType):
srcfile = lextab.__file__
else:
if '.' not in lextab:
srcfile = ldict['__file__']
else:
parts = lextab.split('.')
pkgname = '.'.join(parts[:-1])
exec('import %s' % pkgname)
srcfile = getattr(sys.modules[pkgname], '__file__', '')
outputdir = os.path.dirname(srcfile)
try:
lexobj.writetab(lextab, outputdir)
except IOError as e:
errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))
return lexobj
# -----------------------------------------------------------------------------
# runmain()
#
# This runs the lexer as a main program
# -----------------------------------------------------------------------------
def runmain(lexer=None, data=None):
if not data:
try:
filename = sys.argv[1]
f = open(filename)
data = f.read()
f.close()
except IndexError:
sys.stdout.write('Reading from standard input (type EOF to end):\n')
data = sys.stdin.read()
if lexer:
_input = lexer.input
else:
_input = input
_input(data)
if lexer:
_token = lexer.token
else:
_token = token
while True:
tok = _token()
if not tok:
break
sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))
# -----------------------------------------------------------------------------
# @TOKEN(regex)
#
# This decorator function can be used to set the regex expression on a function
# when its docstring might need to be set in an alternative way
# -----------------------------------------------------------------------------
def TOKEN(r):
def set_regex(f):
if hasattr(r, '__call__'):
f.regex = _get_regex(r)
else:
f.regex = r
return f
return set_regex
# Alternative spelling of the TOKEN decorator
Token = TOKEN
| 42,918 | Python | 38.017273 | 131 | 0.507386 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sniffio/_version.py | # This file is imported from __init__.py and exec'd from setup.py
__version__ = "1.3.0"
| 89 | Python | 21.499995 | 65 | 0.640449 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sniffio/_impl.py | from contextvars import ContextVar
from typing import Optional
import sys
import threading
current_async_library_cvar = ContextVar(
"current_async_library_cvar", default=None
) # type: ContextVar[Optional[str]]
class _ThreadLocal(threading.local):
# Since threading.local provides no explicit mechanism is for setting
# a default for a value, a custom class with a class attribute is used
# instead.
name = None # type: Optional[str]
thread_local = _ThreadLocal()
class AsyncLibraryNotFoundError(RuntimeError):
pass
def current_async_library() -> str:
"""Detect which async library is currently running.
The following libraries are currently supported:
================ =========== ============================
Library Requires Magic string
================ =========== ============================
**Trio** Trio v0.6+ ``"trio"``
**Curio** - ``"curio"``
**asyncio** ``"asyncio"``
**Trio-asyncio** v0.8.2+ ``"trio"`` or ``"asyncio"``,
depending on current mode
================ =========== ============================
Returns:
A string like ``"trio"``.
Raises:
AsyncLibraryNotFoundError: if called from synchronous context,
or if the current async library was not recognized.
Examples:
.. code-block:: python3
from sniffio import current_async_library
async def generic_sleep(seconds):
library = current_async_library()
if library == "trio":
import trio
await trio.sleep(seconds)
elif library == "asyncio":
import asyncio
await asyncio.sleep(seconds)
# ... and so on ...
else:
raise RuntimeError(f"Unsupported library {library!r}")
"""
value = thread_local.name
if value is not None:
return value
value = current_async_library_cvar.get()
if value is not None:
return value
# Need to sniff for asyncio
if "asyncio" in sys.modules:
import asyncio
try:
current_task = asyncio.current_task # type: ignore[attr-defined]
except AttributeError:
current_task = asyncio.Task.current_task # type: ignore[attr-defined]
try:
if current_task() is not None:
return "asyncio"
except RuntimeError:
pass
# Sniff for curio (for now)
if 'curio' in sys.modules:
from curio.meta import curio_running
if curio_running():
return 'curio'
raise AsyncLibraryNotFoundError(
"unknown async library, or not in async context"
)
| 2,843 | Python | 28.625 | 82 | 0.539923 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sniffio/__init__.py | """Top-level package for sniffio."""
__all__ = [
"current_async_library", "AsyncLibraryNotFoundError",
"current_async_library_cvar"
]
from ._version import __version__
from ._impl import (
current_async_library,
AsyncLibraryNotFoundError,
current_async_library_cvar,
thread_local,
)
| 310 | Python | 18.437499 | 57 | 0.680645 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sniffio/_tests/test_sniffio.py | import os
import sys
import pytest
from .. import (
current_async_library, AsyncLibraryNotFoundError,
current_async_library_cvar, thread_local
)
def test_basics_cvar():
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
token = current_async_library_cvar.set("generic-lib")
try:
assert current_async_library() == "generic-lib"
finally:
current_async_library_cvar.reset(token)
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
def test_basics_tlocal():
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
old_name, thread_local.name = thread_local.name, "generic-lib"
try:
assert current_async_library() == "generic-lib"
finally:
thread_local.name = old_name
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
def test_asyncio():
import asyncio
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
ran = []
async def this_is_asyncio():
assert current_async_library() == "asyncio"
# Call it a second time to exercise the caching logic
assert current_async_library() == "asyncio"
ran.append(True)
asyncio.run(this_is_asyncio())
assert ran == [True]
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
# https://github.com/dabeaz/curio/pull/354
@pytest.mark.skipif(
os.name == "nt" and sys.version_info >= (3, 9),
reason="Curio breaks on Python 3.9+ on Windows. Fix was not released yet",
)
def test_curio():
import curio
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
ran = []
async def this_is_curio():
assert current_async_library() == "curio"
# Call it a second time to exercise the caching logic
assert current_async_library() == "curio"
ran.append(True)
curio.run(this_is_curio)
assert ran == [True]
with pytest.raises(AsyncLibraryNotFoundError):
current_async_library()
| 2,110 | Python | 23.835294 | 78 | 0.661137 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pclass.py | from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
from pyrsistent._field_common import (
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
)
from pyrsistent._transformations import transform
def _is_pclass(bases):
return len(bases) == 1 and bases[0] == CheckedType
class PClassMeta(type):
def __new__(mcs, name, bases, dct):
set_fields(dct, bases, name='_pclass_fields')
store_invariants(dct, bases, '_pclass_invariants', '__invariant__')
dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields'])
# There must only be one __weakref__ entry in the inheritance hierarchy,
# lets put it on the top level class.
if _is_pclass(bases):
dct['__slots__'] += ('__weakref__',)
return super(PClassMeta, mcs).__new__(mcs, name, bases, dct)
_MISSING_VALUE = object()
def _check_and_set_attr(cls, field, name, value, result, invariant_errors):
check_type(cls, field, name, value)
is_ok, error_code = field.invariant(value)
if not is_ok:
invariant_errors.append(error_code)
else:
setattr(result, name, value)
class PClass(CheckedType, metaclass=PClassMeta):
"""
A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
is not a PMap and hence not a collection but rather a plain Python object.
More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
"""
def __new__(cls, **kwargs): # Support *args?
result = super(PClass, cls).__new__(cls)
factory_fields = kwargs.pop('_factory_fields', None)
ignore_extra = kwargs.pop('ignore_extra', None)
missing_fields = []
invariant_errors = []
for name, field in cls._pclass_fields.items():
if name in kwargs:
if factory_fields is None or name in factory_fields:
if is_field_ignore_extra_complaint(PClass, field, ignore_extra):
value = field.factory(kwargs[name], ignore_extra=ignore_extra)
else:
value = field.factory(kwargs[name])
else:
value = kwargs[name]
_check_and_set_attr(cls, field, name, value, result, invariant_errors)
del kwargs[name]
elif field.initial is not PFIELD_NO_INITIAL:
initial = field.initial() if callable(field.initial) else field.initial
_check_and_set_attr(
cls, field, name, initial, result, invariant_errors)
elif field.mandatory:
missing_fields.append('{0}.{1}'.format(cls.__name__, name))
if invariant_errors or missing_fields:
raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed')
if kwargs:
raise AttributeError("'{0}' are not among the specified fields for {1}".format(
', '.join(kwargs), cls.__name__))
check_global_invariants(result, cls._pclass_invariants)
result._pclass_frozen = True
return result
def set(self, *args, **kwargs):
"""
Set a field in the instance. Returns a new instance with the updated value. The original instance remains
unmodified. Accepts key-value pairs or single string representing the field name and a value.
>>> from pyrsistent import PClass, field
>>> class AClass(PClass):
... x = field()
...
>>> a = AClass(x=1)
>>> a2 = a.set(x=2)
>>> a3 = a.set('x', 3)
>>> a
AClass(x=1)
>>> a2
AClass(x=2)
>>> a3
AClass(x=3)
"""
if args:
kwargs[args[0]] = args[1]
factory_fields = set(kwargs)
for key in self._pclass_fields:
if key not in kwargs:
value = getattr(self, key, _MISSING_VALUE)
if value is not _MISSING_VALUE:
kwargs[key] = value
return self.__class__(_factory_fields=factory_fields, **kwargs)
@classmethod
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
"""
Factory method. Will create a new PClass of the current type and assign the values
specified in kwargs.
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
in the set of fields on the PClass.
"""
if isinstance(kwargs, cls):
return kwargs
if ignore_extra:
kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs}
return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs)
def serialize(self, format=None):
"""
Serialize the current PClass using custom serializer functions for fields where
such have been supplied.
"""
result = {}
for name in self._pclass_fields:
value = getattr(self, name, _MISSING_VALUE)
if value is not _MISSING_VALUE:
result[name] = serialize(self._pclass_fields[name].serializer, format, value)
return result
def transform(self, *transformations):
"""
Apply transformations to the currency PClass. For more details on transformations see
the documentation for PMap. Transformations on PClasses do not support key matching
since the PClass is not a collection. Apart from that the transformations available
for other persistent types work as expected.
"""
return transform(self, transformations)
def __eq__(self, other):
if isinstance(other, self.__class__):
for name in self._pclass_fields:
if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE):
return False
return True
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
# May want to optimize this by caching the hash somehow
return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))
def __setattr__(self, key, value):
if getattr(self, '_pclass_frozen', False):
raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value))
super(PClass, self).__setattr__(key, value)
def __delattr__(self, key):
raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key))
def _to_dict(self):
result = {}
for key in self._pclass_fields:
value = getattr(self, key, _MISSING_VALUE)
if value is not _MISSING_VALUE:
result[key] = value
return result
def __repr__(self):
return "{0}({1})".format(self.__class__.__name__,
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items()))
def __reduce__(self):
# Pickling support
data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))
return _restore_pickle, (self.__class__, data,)
def evolver(self):
"""
Returns an evolver for this object.
"""
return _PClassEvolver(self, self._to_dict())
def remove(self, name):
"""
Remove attribute given by name from the current instance. Raises AttributeError if the
attribute doesn't exist.
"""
evolver = self.evolver()
del evolver[name]
return evolver.persistent()
class _PClassEvolver(object):
__slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields')
def __init__(self, original, initial_dict):
self._pclass_evolver_original = original
self._pclass_evolver_data = initial_dict
self._pclass_evolver_data_is_dirty = False
self._factory_fields = set()
def __getitem__(self, item):
return self._pclass_evolver_data[item]
def set(self, key, value):
if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value:
self._pclass_evolver_data[key] = value
self._factory_fields.add(key)
self._pclass_evolver_data_is_dirty = True
return self
def __setitem__(self, key, value):
self.set(key, value)
def remove(self, item):
if item in self._pclass_evolver_data:
del self._pclass_evolver_data[item]
self._factory_fields.discard(item)
self._pclass_evolver_data_is_dirty = True
return self
raise AttributeError(item)
def __delitem__(self, item):
self.remove(item)
def persistent(self):
if self._pclass_evolver_data_is_dirty:
return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields,
**self._pclass_evolver_data)
return self._pclass_evolver_original
def __setattr__(self, key, value):
if key not in self.__slots__:
self.set(key, value)
else:
super(_PClassEvolver, self).__setattr__(key, value)
def __getattr__(self, item):
return self[item]
| 9,702 | Python | 35.893536 | 120 | 0.591012 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_immutable.py | import sys
def immutable(members='', name='Immutable', verbose=False):
"""
Produces a class that either can be used standalone or as a base class for persistent classes.
This is a thin wrapper around a named tuple.
Constructing a type and using it to instantiate objects:
>>> Point = immutable('x, y', name='Point')
>>> p = Point(1, 2)
>>> p2 = p.set(x=3)
>>> p
Point(x=1, y=2)
>>> p2
Point(x=3, y=2)
Inheriting from a constructed type. In this case no type name needs to be supplied:
>>> class PositivePoint(immutable('x, y')):
... __slots__ = tuple()
... def __new__(cls, x, y):
... if x > 0 and y > 0:
... return super(PositivePoint, cls).__new__(cls, x, y)
... raise Exception('Coordinates must be positive!')
...
>>> p = PositivePoint(1, 2)
>>> p.set(x=3)
PositivePoint(x=3, y=2)
>>> p.set(y=-3)
Traceback (most recent call last):
Exception: Coordinates must be positive!
The persistent class also supports the notion of frozen members. The value of a frozen member
cannot be updated. For example it could be used to implement an ID that should remain the same
over time. A frozen member is denoted by a trailing underscore.
>>> Point = immutable('x, y, id_', name='Point')
>>> p = Point(1, 2, id_=17)
>>> p.set(x=3)
Point(x=3, y=2, id_=17)
>>> p.set(id_=18)
Traceback (most recent call last):
AttributeError: Cannot set frozen members id_
"""
if isinstance(members, str):
members = members.replace(',', ' ').split()
def frozen_member_test():
frozen_members = ["'%s'" % f for f in members if f.endswith('_')]
if frozen_members:
return """
frozen_fields = fields_to_modify & set([{frozen_members}])
if frozen_fields:
raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))
""".format(frozen_members=', '.join(frozen_members))
return ''
quoted_members = ', '.join("'%s'" % m for m in members)
template = """
class {class_name}(namedtuple('ImmutableBase', [{quoted_members}])):
__slots__ = tuple()
def __repr__(self):
return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__)
def set(self, **kwargs):
if not kwargs:
return self
fields_to_modify = set(kwargs.keys())
if not fields_to_modify <= {member_set}:
raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set}))
{frozen_member_test}
return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))
""".format(quoted_members=quoted_members,
member_set="set([%s])" % quoted_members if quoted_members else 'set()',
frozen_member_test=frozen_member_test(),
class_name=name)
if verbose:
print(template)
from collections import namedtuple
namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
try:
exec(template, namespace)
except SyntaxError as e:
raise SyntaxError(str(e) + ':\n' + template) from e
return namespace[name]
| 3,287 | Python | 32.55102 | 101 | 0.585336 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pmap.py | from collections.abc import Mapping, Hashable
from itertools import chain
from pyrsistent._pvector import pvector
from pyrsistent._transformations import transform
class PMapView:
"""View type for the persistent map/dict type `PMap`.
Provides an equivalent of Python's built-in `dict_values` and `dict_items`
types that result from expreessions such as `{}.values()` and
`{}.items()`. The equivalent for `{}.keys()` is absent because the keys are
instead represented by a `PSet` object, which can be created in `O(1)` time.
The `PMapView` class is overloaded by the `PMapValues` and `PMapItems`
classes which handle the specific case of values and items, respectively
Parameters
----------
m : mapping
The mapping/dict-like object of which a view is to be created. This
should generally be a `PMap` object.
"""
# The public methods that use the above.
def __init__(self, m):
# Make sure this is a persistnt map
if not isinstance(m, PMap):
# We can convert mapping objects into pmap objects, I guess (but why?)
if isinstance(m, Mapping):
m = pmap(m)
else:
raise TypeError("PViewMap requires a Mapping object")
object.__setattr__(self, '_map', m)
def __len__(self):
return len(self._map)
def __setattr__(self, k, v):
raise TypeError("%s is immutable" % (type(self),))
def __reversed__(self):
raise TypeError("Persistent maps are not reversible")
class PMapValues(PMapView):
"""View type for the values of the persistent map/dict type `PMap`.
Provides an equivalent of Python's built-in `dict_values` type that result
from expreessions such as `{}.values()`. See also `PMapView`.
Parameters
----------
m : mapping
The mapping/dict-like object of which a view is to be created. This
should generally be a `PMap` object.
"""
def __iter__(self):
return self._map.itervalues()
def __contains__(self, arg):
return arg in self._map.itervalues()
# The str and repr methods imitate the dict_view style currently.
def __str__(self):
return f"pmap_values({list(iter(self))})"
def __repr__(self):
return f"pmap_values({list(iter(self))})"
def __eq__(self, x):
# For whatever reason, dict_values always seem to return False for ==
# (probably it's not implemented), so we mimic that.
if x is self: return True
else: return False
class PMapItems(PMapView):
"""View type for the items of the persistent map/dict type `PMap`.
Provides an equivalent of Python's built-in `dict_items` type that result
from expreessions such as `{}.items()`. See also `PMapView`.
Parameters
----------
m : mapping
The mapping/dict-like object of which a view is to be created. This
should generally be a `PMap` object.
"""
def __iter__(self):
return self._map.iteritems()
def __contains__(self, arg):
try: (k,v) = arg
except Exception: return False
return k in self._map and self._map[k] == v
# The str and repr methods mitate the dict_view style currently.
def __str__(self):
return f"pmap_items({list(iter(self))})"
def __repr__(self):
return f"pmap_items({list(iter(self))})"
def __eq__(self, x):
if x is self: return True
elif not isinstance(x, type(self)): return False
else: return self._map == x._map
class PMap(object):
"""
Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
create an instance.
Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
excessive hash collisions.
This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
for example assignments and deletion of values.
PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
element access.
Random access and insert is log32(n) where n is the size of the map.
The following are examples of some common operations on persistent maps
>>> m1 = m(a=1, b=3)
>>> m2 = m1.set('c', 3)
>>> m3 = m2.remove('a')
>>> m1 == {'a': 1, 'b': 3}
True
>>> m2 == {'a': 1, 'b': 3, 'c': 3}
True
>>> m3 == {'b': 3, 'c': 3}
True
>>> m3['c']
3
>>> m3.c
3
"""
__slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
def __new__(cls, size, buckets):
self = super(PMap, cls).__new__(cls)
self._size = size
self._buckets = buckets
return self
@staticmethod
def _get_bucket(buckets, key):
index = hash(key) % len(buckets)
bucket = buckets[index]
return index, bucket
@staticmethod
def _getitem(buckets, key):
_, bucket = PMap._get_bucket(buckets, key)
if bucket:
for k, v in bucket:
if k == key:
return v
raise KeyError(key)
def __getitem__(self, key):
return PMap._getitem(self._buckets, key)
@staticmethod
def _contains(buckets, key):
_, bucket = PMap._get_bucket(buckets, key)
if bucket:
for k, _ in bucket:
if k == key:
return True
return False
return False
def __contains__(self, key):
return self._contains(self._buckets, key)
get = Mapping.get
def __iter__(self):
return self.iterkeys()
# If this method is not defined, then reversed(pmap) will attempt to reverse
# the map using len() and getitem, usually resulting in a mysterious
# KeyError.
def __reversed__(self):
raise TypeError("Persistent maps are not reversible")
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(
"{0} has no attribute '{1}'".format(type(self).__name__, key)
) from e
def iterkeys(self):
for k, _ in self.iteritems():
yield k
# These are more efficient implementations compared to the original
# methods that are based on the keys iterator and then calls the
# accessor functions to access the value for the corresponding key
def itervalues(self):
for _, v in self.iteritems():
yield v
def iteritems(self):
for bucket in self._buckets:
if bucket:
for k, v in bucket:
yield k, v
def values(self):
return PMapValues(self)
def keys(self):
from ._pset import PSet
return PSet(self)
def items(self):
return PMapItems(self)
def __len__(self):
return self._size
def __repr__(self):
return 'pmap({0})'.format(str(dict(self)))
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, Mapping):
return NotImplemented
if len(self) != len(other):
return False
if isinstance(other, PMap):
if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
and self._cached_hash != other._cached_hash):
return False
if self._buckets == other._buckets:
return True
return dict(self.iteritems()) == dict(other.iteritems())
elif isinstance(other, dict):
return dict(self.iteritems()) == other
return dict(self.iteritems()) == dict(other.items())
__ne__ = Mapping.__ne__
def __lt__(self, other):
raise TypeError('PMaps are not orderable')
__le__ = __lt__
__gt__ = __lt__
__ge__ = __lt__
def __str__(self):
return self.__repr__()
def __hash__(self):
if not hasattr(self, '_cached_hash'):
self._cached_hash = hash(frozenset(self.iteritems()))
return self._cached_hash
def set(self, key, val):
"""
Return a new PMap with key and val inserted.
>>> m1 = m(a=1, b=2)
>>> m2 = m1.set('a', 3)
>>> m3 = m1.set('c' ,4)
>>> m1 == {'a': 1, 'b': 2}
True
>>> m2 == {'a': 3, 'b': 2}
True
>>> m3 == {'a': 1, 'b': 2, 'c': 4}
True
"""
return self.evolver().set(key, val).persistent()
def remove(self, key):
"""
Return a new PMap without the element specified by key. Raises KeyError if the element
is not present.
>>> m1 = m(a=1, b=2)
>>> m1.remove('a')
pmap({'b': 2})
"""
return self.evolver().remove(key).persistent()
def discard(self, key):
"""
Return a new PMap without the element specified by key. Returns reference to itself
if element is not present.
>>> m1 = m(a=1, b=2)
>>> m1.discard('a')
pmap({'b': 2})
>>> m1 is m1.discard('c')
True
"""
try:
return self.remove(key)
except KeyError:
return self
def update(self, *maps):
"""
Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
maps the rightmost (last) value is inserted.
>>> m1 = m(a=1, b=2)
>>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) == {'a': 17, 'b': 2, 'c': 3, 'd': 35}
True
"""
return self.update_with(lambda l, r: r, *maps)
def update_with(self, update_fn, *maps):
"""
Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
maps the values will be merged using merge_fn going from left to right.
>>> from operator import add
>>> m1 = m(a=1, b=2)
>>> m1.update_with(add, m(a=2)) == {'a': 3, 'b': 2}
True
The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
>>> m1 = m(a=1)
>>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
pmap({'a': 1})
"""
evolver = self.evolver()
for map in maps:
for key, value in map.items():
evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
return evolver.persistent()
def __add__(self, other):
return self.update(other)
__or__ = __add__
def __reduce__(self):
# Pickling support
return pmap, (dict(self),)
def transform(self, *transformations):
"""
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
consists of two parts. One match expression that specifies which elements to transform
and one transformation function that performs the actual transformation.
>>> from pyrsistent import freeze, ny
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
... {'author': 'Steve', 'content': 'A slightly longer article'}],
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
>>> very_short_news.articles[0].content
'A short article'
>>> very_short_news.articles[1].content
'A slightly long...'
When nothing has been transformed the original data structure is kept
>>> short_news is news_paper
True
>>> very_short_news is news_paper
False
>>> very_short_news.articles[0] is news_paper.articles[0]
True
"""
return transform(self, transformations)
def copy(self):
return self
class _Evolver(object):
__slots__ = ('_buckets_evolver', '_size', '_original_pmap')
def __init__(self, original_pmap):
self._original_pmap = original_pmap
self._buckets_evolver = original_pmap._buckets.evolver()
self._size = original_pmap._size
def __getitem__(self, key):
return PMap._getitem(self._buckets_evolver, key)
def __setitem__(self, key, val):
self.set(key, val)
def set(self, key, val):
kv = (key, val)
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
reallocation_required = len(self._buckets_evolver) < 0.67 * self._size
if bucket:
for k, v in bucket:
if k == key:
if v is not val:
new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
self._buckets_evolver[index] = new_bucket
return self
# Only check and perform reallocation if not replacing an existing value.
# This is a performance tweak, see #247.
if reallocation_required:
self._reallocate()
return self.set(key, val)
new_bucket = [kv]
new_bucket.extend(bucket)
self._buckets_evolver[index] = new_bucket
self._size += 1
else:
if reallocation_required:
self._reallocate()
return self.set(key, val)
self._buckets_evolver[index] = [kv]
self._size += 1
return self
def _reallocate(self):
new_size = 2 * len(self._buckets_evolver)
new_list = new_size * [None]
buckets = self._buckets_evolver.persistent()
for k, v in chain.from_iterable(x for x in buckets if x):
index = hash(k) % new_size
if new_list[index]:
new_list[index].append((k, v))
else:
new_list[index] = [(k, v)]
# A reallocation should always result in a dirty buckets evolver to avoid
# possible loss of elements when doing the reallocation.
self._buckets_evolver = pvector().evolver()
self._buckets_evolver.extend(new_list)
def is_dirty(self):
return self._buckets_evolver.is_dirty()
def persistent(self):
if self.is_dirty():
self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
return self._original_pmap
def __len__(self):
return self._size
def __contains__(self, key):
return PMap._contains(self._buckets_evolver, key)
def __delitem__(self, key):
self.remove(key)
def remove(self, key):
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
if bucket:
new_bucket = [(k, v) for (k, v) in bucket if k != key]
if len(bucket) > len(new_bucket):
self._buckets_evolver[index] = new_bucket if new_bucket else None
self._size -= 1
return self
raise KeyError('{0}'.format(key))
def evolver(self):
"""
Create a new evolver for this pmap. For a discussion on evolvers in general see the
documentation for the pvector evolver.
Create the evolver and perform various mutating updates to it:
>>> m1 = m(a=1, b=2)
>>> e = m1.evolver()
>>> e['c'] = 3
>>> len(e)
3
>>> del e['a']
The underlying pmap remains the same:
>>> m1 == {'a': 1, 'b': 2}
True
The changes are kept in the evolver. An updated pmap can be created using the
persistent() function on the evolver.
>>> m2 = e.persistent()
>>> m2 == {'b': 2, 'c': 3}
True
The new pmap will share data with the original pmap in the same way that would have
been done if only using operations on the pmap.
"""
return self._Evolver(self)
Mapping.register(PMap)
Hashable.register(PMap)
def _turbo_mapping(initial, pre_size):
if pre_size:
size = pre_size
else:
try:
size = 2 * len(initial) or 8
except Exception:
# Guess we can't figure out the length. Give up on length hinting,
# we can always reallocate later.
size = 8
buckets = size * [None]
if not isinstance(initial, Mapping):
# Make a dictionary of the initial data if it isn't already,
# that will save us some job further down since we can assume no
# key collisions
initial = dict(initial)
for k, v in initial.items():
h = hash(k)
index = h % size
bucket = buckets[index]
if bucket:
bucket.append((k, v))
else:
buckets[index] = [(k, v)]
return PMap(len(initial), pvector().extend(buckets))
_EMPTY_PMAP = _turbo_mapping({}, 0)
def pmap(initial={}, pre_size=0):
"""
Create new persistent map, inserts all elements in initial into the newly created map.
The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
may have a positive performance impact in the cases where you know beforehand that a large number of elements
will be inserted into the map eventually since it will reduce the number of reallocations required.
>>> pmap({'a': 13, 'b': 14}) == {'a': 13, 'b': 14}
True
"""
if not initial and pre_size == 0:
return _EMPTY_PMAP
return _turbo_mapping(initial, pre_size)
def m(**kwargs):
"""
Creates a new persistent map. Inserts all key value arguments into the newly created map.
>>> m(a=13, b=14) == {'a': 13, 'b': 14}
True
"""
return pmap(kwargs)
| 18,781 | Python | 31.551126 | 127 | 0.559235 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pbag.py | from collections.abc import Container, Iterable, Sized, Hashable
from functools import reduce
from pyrsistent._pmap import pmap
def _add_to_counters(counters, element):
return counters.set(element, counters.get(element, 0) + 1)
class PBag(object):
"""
A persistent bag/multiset type.
Requires elements to be hashable, and allows duplicates, but has no
ordering. Bags are hashable.
Do not instantiate directly, instead use the factory functions :py:func:`b`
or :py:func:`pbag` to create an instance.
Some examples:
>>> s = pbag([1, 2, 3, 1])
>>> s2 = s.add(4)
>>> s3 = s2.remove(1)
>>> s
pbag([1, 1, 2, 3])
>>> s2
pbag([1, 1, 2, 3, 4])
>>> s3
pbag([1, 2, 3, 4])
"""
__slots__ = ('_counts', '__weakref__')
def __init__(self, counts):
self._counts = counts
def add(self, element):
"""
Add an element to the bag.
>>> s = pbag([1])
>>> s2 = s.add(1)
>>> s3 = s.add(2)
>>> s2
pbag([1, 1])
>>> s3
pbag([1, 2])
"""
return PBag(_add_to_counters(self._counts, element))
def update(self, iterable):
"""
Update bag with all elements in iterable.
>>> s = pbag([1])
>>> s.update([1, 2])
pbag([1, 1, 2])
"""
if iterable:
return PBag(reduce(_add_to_counters, iterable, self._counts))
return self
def remove(self, element):
"""
Remove an element from the bag.
>>> s = pbag([1, 1, 2])
>>> s2 = s.remove(1)
>>> s3 = s.remove(2)
>>> s2
pbag([1, 2])
>>> s3
pbag([1, 1])
"""
if element not in self._counts:
raise KeyError(element)
elif self._counts[element] == 1:
newc = self._counts.remove(element)
else:
newc = self._counts.set(element, self._counts[element] - 1)
return PBag(newc)
def count(self, element):
"""
Return the number of times an element appears.
>>> pbag([]).count('non-existent')
0
>>> pbag([1, 1, 2]).count(1)
2
"""
return self._counts.get(element, 0)
def __len__(self):
"""
Return the length including duplicates.
>>> len(pbag([1, 1, 2]))
3
"""
return sum(self._counts.itervalues())
def __iter__(self):
"""
Return an iterator of all elements, including duplicates.
>>> list(pbag([1, 1, 2]))
[1, 1, 2]
>>> list(pbag([1, 2]))
[1, 2]
"""
for elt, count in self._counts.iteritems():
for i in range(count):
yield elt
def __contains__(self, elt):
"""
Check if an element is in the bag.
>>> 1 in pbag([1, 1, 2])
True
>>> 0 in pbag([1, 2])
False
"""
return elt in self._counts
def __repr__(self):
return "pbag({0})".format(list(self))
def __eq__(self, other):
"""
Check if two bags are equivalent, honoring the number of duplicates,
and ignoring insertion order.
>>> pbag([1, 1, 2]) == pbag([1, 2])
False
>>> pbag([2, 1, 0]) == pbag([0, 1, 2])
True
"""
if type(other) is not PBag:
raise TypeError("Can only compare PBag with PBags")
return self._counts == other._counts
def __lt__(self, other):
raise TypeError('PBags are not orderable')
__le__ = __lt__
__gt__ = __lt__
__ge__ = __lt__
# Multiset-style operations similar to collections.Counter
def __add__(self, other):
"""
Combine elements from two PBags.
>>> pbag([1, 2, 2]) + pbag([2, 3, 3])
pbag([1, 2, 2, 2, 3, 3])
"""
if not isinstance(other, PBag):
return NotImplemented
result = self._counts.evolver()
for elem, other_count in other._counts.iteritems():
result[elem] = self.count(elem) + other_count
return PBag(result.persistent())
def __sub__(self, other):
"""
Remove elements from one PBag that are present in another.
>>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4])
pbag([1, 2, 2])
"""
if not isinstance(other, PBag):
return NotImplemented
result = self._counts.evolver()
for elem, other_count in other._counts.iteritems():
newcount = self.count(elem) - other_count
if newcount > 0:
result[elem] = newcount
elif elem in self:
result.remove(elem)
return PBag(result.persistent())
def __or__(self, other):
"""
Union: Keep elements that are present in either of two PBags.
>>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3])
pbag([1, 2, 2, 2, 3, 3])
"""
if not isinstance(other, PBag):
return NotImplemented
result = self._counts.evolver()
for elem, other_count in other._counts.iteritems():
count = self.count(elem)
newcount = max(count, other_count)
result[elem] = newcount
return PBag(result.persistent())
def __and__(self, other):
"""
Intersection: Only keep elements that are present in both PBags.
>>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3])
pbag([2])
"""
if not isinstance(other, PBag):
return NotImplemented
result = pmap().evolver()
for elem, count in self._counts.iteritems():
newcount = min(count, other.count(elem))
if newcount > 0:
result[elem] = newcount
return PBag(result.persistent())
def __hash__(self):
"""
Hash based on value of elements.
>>> m = pmap({pbag([1, 2]): "it's here!"})
>>> m[pbag([2, 1])]
"it's here!"
>>> pbag([1, 1, 2]) in m
False
"""
return hash(self._counts)
Container.register(PBag)
Iterable.register(PBag)
Sized.register(PBag)
Hashable.register(PBag)
def b(*elements):
"""
Construct a persistent bag.
Takes an arbitrary number of arguments to insert into the new persistent
bag.
>>> b(1, 2, 3, 2)
pbag([1, 2, 2, 3])
"""
return pbag(elements)
def pbag(elements):
"""
Convert an iterable to a persistent bag.
Takes an iterable with elements to insert.
>>> pbag([1, 2, 3, 2])
pbag([1, 2, 2, 3])
"""
if not elements:
return _EMPTY_PBAG
return PBag(reduce(_add_to_counters, elements, pmap()))
_EMPTY_PBAG = PBag(pmap())
| 6,730 | Python | 24.115672 | 79 | 0.504903 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_checked_types.py | from enum import Enum
from abc import abstractmethod, ABCMeta
from collections.abc import Iterable
from pyrsistent._pmap import PMap, pmap
from pyrsistent._pset import PSet, pset
from pyrsistent._pvector import PythonPVector, python_pvector
class CheckedType(object):
"""
Marker class to enable creation and serialization of checked object graphs.
"""
__slots__ = ()
@classmethod
@abstractmethod
def create(cls, source_data, _factory_fields=None):
raise NotImplementedError()
@abstractmethod
def serialize(self, format=None):
raise NotImplementedError()
def _restore_pickle(cls, data):
return cls.create(data, _factory_fields=set())
class InvariantException(Exception):
"""
Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory
field is missing.
Contains two fields of interest:
invariant_errors, a tuple of error data for the failing invariants
missing_fields, a tuple of strings specifying the missing names
"""
def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs):
self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes)
self.missing_fields = missing_fields
super(InvariantException, self).__init__(*args, **kwargs)
def __str__(self):
return super(InvariantException, self).__str__() + \
", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(
invariant_errors=', '.join(str(e) for e in self.invariant_errors),
missing_fields=', '.join(self.missing_fields))
_preserved_iterable_types = (
Enum,
)
"""Some types are themselves iterable, but we want to use the type itself and
not its members for the type specification. This defines a set of such types
that we explicitly preserve.
Note that strings are not such types because the string inputs we pass in are
values, not types.
"""
def maybe_parse_user_type(t):
"""Try to coerce a user-supplied type directive into a list of types.
This function should be used in all places where a user specifies a type,
for consistency.
The policy for what defines valid user input should be clear from the implementation.
"""
is_type = isinstance(t, type)
is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types)
is_string = isinstance(t, str)
is_iterable = isinstance(t, Iterable)
if is_preserved:
return [t]
elif is_string:
return [t]
elif is_type and not is_iterable:
return [t]
elif is_iterable:
# Recur to validate contained types as well.
ts = t
return tuple(e for t in ts for e in maybe_parse_user_type(t))
else:
# If this raises because `t` cannot be formatted, so be it.
raise TypeError(
'Type specifications must be types or strings. Input: {}'.format(t)
)
def maybe_parse_many_user_types(ts):
# Just a different name to communicate that you're parsing multiple user
# inputs. `maybe_parse_user_type` handles the iterable case anyway.
return maybe_parse_user_type(ts)
def _store_types(dct, bases, destination_name, source_name):
maybe_types = maybe_parse_many_user_types([
d[source_name]
for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d
])
dct[destination_name] = maybe_types
def _merge_invariant_results(result):
verdict = True
data = []
for verd, dat in result:
if not verd:
verdict = False
data.append(dat)
return verdict, tuple(data)
def wrap_invariant(invariant):
# Invariant functions may return the outcome of several tests
# In those cases the results have to be merged before being passed
# back to the client.
def f(*args, **kwargs):
result = invariant(*args, **kwargs)
if isinstance(result[0], bool):
return result
return _merge_invariant_results(result)
return f
def _all_dicts(bases, seen=None):
"""
Yield each class in ``bases`` and each of their base classes.
"""
if seen is None:
seen = set()
for cls in bases:
if cls in seen:
continue
seen.add(cls)
yield cls.__dict__
for b in _all_dicts(cls.__bases__, seen):
yield b
def store_invariants(dct, bases, destination_name, source_name):
# Invariants are inherited
invariants = []
for ns in [dct] + list(_all_dicts(bases)):
try:
invariant = ns[source_name]
except KeyError:
continue
invariants.append(invariant)
if not all(callable(invariant) for invariant in invariants):
raise TypeError('Invariants must be callable')
dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
class _CheckedTypeMeta(ABCMeta):
def __new__(mcs, name, bases, dct):
_store_types(dct, bases, '_checked_types', '__type__')
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
def default_serializer(self, _, value):
if isinstance(value, CheckedType):
return value.serialize()
return value
dct.setdefault('__serializer__', default_serializer)
dct['__slots__'] = ()
return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct)
class CheckedTypeError(TypeError):
def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs):
super(CheckedTypeError, self).__init__(*args, **kwargs)
self.source_class = source_class
self.expected_types = expected_types
self.actual_type = actual_type
self.actual_value = actual_value
class CheckedKeyTypeError(CheckedTypeError):
"""
Raised when trying to set a value using a key with a type that doesn't match the declared type.
Attributes:
source_class -- The class of the collection
expected_types -- Allowed types
actual_type -- The non matching type
actual_value -- Value of the variable with the non matching type
"""
pass
class CheckedValueTypeError(CheckedTypeError):
"""
Raised when trying to set a value using a key with a type that doesn't match the declared type.
Attributes:
source_class -- The class of the collection
expected_types -- Allowed types
actual_type -- The non matching type
actual_value -- Value of the variable with the non matching type
"""
pass
def _get_class(type_name):
module_name, class_name = type_name.rsplit('.', 1)
module = __import__(module_name, fromlist=[class_name])
return getattr(module, class_name)
def get_type(typ):
if isinstance(typ, type):
return typ
return _get_class(typ)
def get_types(typs):
return [get_type(typ) for typ in typs]
def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError):
if expected_types:
for e in it:
if not any(isinstance(e, get_type(t)) for t in expected_types):
actual_type = type(e)
msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(
source_class=source_class.__name__,
expected_types=tuple(get_type(et).__name__ for et in expected_types),
actual_type=actual_type.__name__)
raise exception_type(source_class, expected_types, actual_type, e, msg)
def _invariant_errors(elem, invariants):
return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid]
def _invariant_errors_iterable(it, invariants):
return sum([_invariant_errors(elem, invariants) for elem in it], [])
def optional(*typs):
""" Convenience function to specify that a value may be of any of the types in type 'typs' or None """
return tuple(typs) + (type(None),)
def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False):
if isinstance(source_data, cls):
return source_data
# Recursively apply create methods of checked types if the types of the supplied data
# does not match any of the valid types.
types = get_types(cls._checked_types)
checked_type = next((t for t in types if issubclass(t, CheckedType)), None)
if checked_type:
return cls([checked_type.create(data, ignore_extra=ignore_extra)
if not any(isinstance(data, t) for t in types) else data
for data in source_data])
return cls(source_data)
class CheckedPVector(PythonPVector, CheckedType, metaclass=_CheckedTypeMeta):
"""
A CheckedPVector is a PVector which allows specifying type and invariant checks.
>>> class Positives(CheckedPVector):
... __type__ = (int, float)
... __invariant__ = lambda n: (n >= 0, 'Negative')
...
>>> Positives([1, 2, 3])
Positives([1, 2, 3])
"""
__slots__ = ()
def __new__(cls, initial=()):
if type(initial) == PythonPVector:
return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail)
return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent()
def set(self, key, value):
return self.evolver().set(key, value).persistent()
def append(self, val):
return self.evolver().append(val).persistent()
def extend(self, it):
return self.evolver().extend(it).persistent()
create = classmethod(_checked_type_create)
def serialize(self, format=None):
serializer = self.__serializer__
return list(serializer(format, v) for v in self)
def __reduce__(self):
# Pickling support
return _restore_pickle, (self.__class__, list(self),)
class Evolver(PythonPVector.Evolver):
__slots__ = ('_destination_class', '_invariant_errors')
def __init__(self, destination_class, vector):
super(CheckedPVector.Evolver, self).__init__(vector)
self._destination_class = destination_class
self._invariant_errors = []
def _check(self, it):
_check_types(it, self._destination_class._checked_types, self._destination_class)
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
self._invariant_errors.extend(error_data)
def __setitem__(self, key, value):
self._check([value])
return super(CheckedPVector.Evolver, self).__setitem__(key, value)
def append(self, elem):
self._check([elem])
return super(CheckedPVector.Evolver, self).append(elem)
def extend(self, it):
it = list(it)
self._check(it)
return super(CheckedPVector.Evolver, self).extend(it)
def persistent(self):
if self._invariant_errors:
raise InvariantException(error_codes=self._invariant_errors)
result = self._orig_pvector
if self.is_dirty() or (self._destination_class != type(self._orig_pvector)):
pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail)
result = self._destination_class(pv)
self._reset(result)
return result
def __repr__(self):
return self.__class__.__name__ + "({0})".format(self.tolist())
__str__ = __repr__
def evolver(self):
return CheckedPVector.Evolver(self.__class__, self)
class CheckedPSet(PSet, CheckedType, metaclass=_CheckedTypeMeta):
"""
A CheckedPSet is a PSet which allows specifying type and invariant checks.
>>> class Positives(CheckedPSet):
... __type__ = (int, float)
... __invariant__ = lambda n: (n >= 0, 'Negative')
...
>>> Positives([1, 2, 3])
Positives([1, 2, 3])
"""
__slots__ = ()
def __new__(cls, initial=()):
if type(initial) is PMap:
return super(CheckedPSet, cls).__new__(cls, initial)
evolver = CheckedPSet.Evolver(cls, pset())
for e in initial:
evolver.add(e)
return evolver.persistent()
def __repr__(self):
return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:]
def __str__(self):
return self.__repr__()
def serialize(self, format=None):
serializer = self.__serializer__
return set(serializer(format, v) for v in self)
create = classmethod(_checked_type_create)
def __reduce__(self):
# Pickling support
return _restore_pickle, (self.__class__, list(self),)
def evolver(self):
return CheckedPSet.Evolver(self.__class__, self)
class Evolver(PSet._Evolver):
__slots__ = ('_destination_class', '_invariant_errors')
def __init__(self, destination_class, original_set):
super(CheckedPSet.Evolver, self).__init__(original_set)
self._destination_class = destination_class
self._invariant_errors = []
def _check(self, it):
_check_types(it, self._destination_class._checked_types, self._destination_class)
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
self._invariant_errors.extend(error_data)
def add(self, element):
self._check([element])
self._pmap_evolver[element] = True
return self
def persistent(self):
if self._invariant_errors:
raise InvariantException(error_codes=self._invariant_errors)
if self.is_dirty() or self._destination_class != type(self._original_pset):
return self._destination_class(self._pmap_evolver.persistent())
return self._original_pset
class _CheckedMapTypeMeta(type):
def __new__(mcs, name, bases, dct):
_store_types(dct, bases, '_checked_key_types', '__key_type__')
_store_types(dct, bases, '_checked_value_types', '__value_type__')
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
def default_serializer(self, _, key, value):
sk = key
if isinstance(key, CheckedType):
sk = key.serialize()
sv = value
if isinstance(value, CheckedType):
sv = value.serialize()
return sk, sv
dct.setdefault('__serializer__', default_serializer)
dct['__slots__'] = ()
return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct)
# Marker object
_UNDEFINED_CHECKED_PMAP_SIZE = object()
class CheckedPMap(PMap, CheckedType, metaclass=_CheckedMapTypeMeta):
"""
A CheckedPMap is a PMap which allows specifying type and invariant checks.
>>> class IntToFloatMap(CheckedPMap):
... __key_type__ = int
... __value_type__ = float
... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')
...
>>> IntToFloatMap({1: 1.5, 2: 2.25})
IntToFloatMap({1: 1.5, 2: 2.25})
"""
__slots__ = ()
def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE):
if size is not _UNDEFINED_CHECKED_PMAP_SIZE:
return super(CheckedPMap, cls).__new__(cls, size, initial)
evolver = CheckedPMap.Evolver(cls, pmap())
for k, v in initial.items():
evolver.set(k, v)
return evolver.persistent()
def evolver(self):
return CheckedPMap.Evolver(self.__class__, self)
def __repr__(self):
return self.__class__.__name__ + "({0})".format(str(dict(self)))
__str__ = __repr__
def serialize(self, format=None):
serializer = self.__serializer__
return dict(serializer(format, k, v) for k, v in self.items())
@classmethod
def create(cls, source_data, _factory_fields=None):
if isinstance(source_data, cls):
return source_data
# Recursively apply create methods of checked types if the types of the supplied data
# does not match any of the valid types.
key_types = get_types(cls._checked_key_types)
checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None)
value_types = get_types(cls._checked_value_types)
checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None)
if checked_key_type or checked_value_type:
return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key,
checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value)
for key, value in source_data.items()))
return cls(source_data)
def __reduce__(self):
# Pickling support
return _restore_pickle, (self.__class__, dict(self),)
class Evolver(PMap._Evolver):
__slots__ = ('_destination_class', '_invariant_errors')
def __init__(self, destination_class, original_map):
super(CheckedPMap.Evolver, self).__init__(original_map)
self._destination_class = destination_class
self._invariant_errors = []
def set(self, key, value):
_check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError)
_check_types([value], self._destination_class._checked_value_types, self._destination_class)
self._invariant_errors.extend(data for valid, data in (invariant(key, value)
for invariant in self._destination_class._checked_invariants)
if not valid)
return super(CheckedPMap.Evolver, self).set(key, value)
def persistent(self):
if self._invariant_errors:
raise InvariantException(error_codes=self._invariant_errors)
if self.is_dirty() or type(self._original_pmap) != self._destination_class:
return self._destination_class(self._buckets_evolver.persistent(), self._size)
return self._original_pmap
| 18,372 | Python | 32.836096 | 150 | 0.617897 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_plist.py | from collections.abc import Sequence, Hashable
from numbers import Integral
from functools import reduce
class _PListBuilder(object):
"""
Helper class to allow construction of a list without
having to reverse it in the end.
"""
__slots__ = ('_head', '_tail')
def __init__(self):
self._head = _EMPTY_PLIST
self._tail = _EMPTY_PLIST
def _append(self, elem, constructor):
if not self._tail:
self._head = constructor(elem)
self._tail = self._head
else:
self._tail.rest = constructor(elem)
self._tail = self._tail.rest
return self._head
def append_elem(self, elem):
return self._append(elem, lambda e: PList(e, _EMPTY_PLIST))
def append_plist(self, pl):
return self._append(pl, lambda l: l)
def build(self):
return self._head
class _PListBase(object):
__slots__ = ('__weakref__',)
# Selected implementations can be taken straight from the Sequence
# class, other are less suitable. Especially those that work with
# index lookups.
count = Sequence.count
index = Sequence.index
def __reduce__(self):
# Pickling support
return plist, (list(self),)
def __len__(self):
"""
Return the length of the list, computed by traversing it.
This is obviously O(n) but with the current implementation
where a list is also a node the overhead of storing the length
in every node would be quite significant.
"""
return sum(1 for _ in self)
def __repr__(self):
return "plist({0})".format(list(self))
__str__ = __repr__
def cons(self, elem):
"""
Return a new list with elem inserted as new head.
>>> plist([1, 2]).cons(3)
plist([3, 1, 2])
"""
return PList(elem, self)
def mcons(self, iterable):
"""
Return a new list with all elements of iterable repeatedly cons:ed to the current list.
NB! The elements will be inserted in the reverse order of the iterable.
Runs in O(len(iterable)).
>>> plist([1, 2]).mcons([3, 4])
plist([4, 3, 1, 2])
"""
head = self
for elem in iterable:
head = head.cons(elem)
return head
def reverse(self):
"""
Return a reversed version of list. Runs in O(n) where n is the length of the list.
>>> plist([1, 2, 3]).reverse()
plist([3, 2, 1])
Also supports the standard reversed function.
>>> reversed(plist([1, 2, 3]))
plist([3, 2, 1])
"""
result = plist()
head = self
while head:
result = result.cons(head.first)
head = head.rest
return result
__reversed__ = reverse
def split(self, index):
"""
Spilt the list at position specified by index. Returns a tuple containing the
list up until index and the list after the index. Runs in O(index).
>>> plist([1, 2, 3, 4]).split(2)
(plist([1, 2]), plist([3, 4]))
"""
lb = _PListBuilder()
right_list = self
i = 0
while right_list and i < index:
lb.append_elem(right_list.first)
right_list = right_list.rest
i += 1
if not right_list:
# Just a small optimization in the cases where no split occurred
return self, _EMPTY_PLIST
return lb.build(), right_list
def __iter__(self):
li = self
while li:
yield li.first
li = li.rest
def __lt__(self, other):
if not isinstance(other, _PListBase):
return NotImplemented
return tuple(self) < tuple(other)
def __eq__(self, other):
"""
Traverses the lists, checking equality of elements.
This is an O(n) operation, but preserves the standard semantics of list equality.
"""
if not isinstance(other, _PListBase):
return NotImplemented
self_head = self
other_head = other
while self_head and other_head:
if not self_head.first == other_head.first:
return False
self_head = self_head.rest
other_head = other_head.rest
return not self_head and not other_head
def __getitem__(self, index):
# Don't use this this data structure if you plan to do a lot of indexing, it is
# very inefficient! Use a PVector instead!
if isinstance(index, slice):
if index.start is not None and index.stop is None and (index.step is None or index.step == 1):
return self._drop(index.start)
# Take the easy way out for all other slicing cases, not much structural reuse possible anyway
return plist(tuple(self)[index])
if not isinstance(index, Integral):
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
if index < 0:
# NB: O(n)!
index += len(self)
try:
return self._drop(index).first
except AttributeError as e:
raise IndexError("PList index out of range") from e
def _drop(self, count):
if count < 0:
raise IndexError("PList index out of range")
head = self
while count > 0:
head = head.rest
count -= 1
return head
def __hash__(self):
return hash(tuple(self))
def remove(self, elem):
"""
Return new list with first element equal to elem removed. O(k) where k is the position
of the element that is removed.
Raises ValueError if no matching element is found.
>>> plist([1, 2, 1]).remove(1)
plist([2, 1])
"""
builder = _PListBuilder()
head = self
while head:
if head.first == elem:
return builder.append_plist(head.rest)
builder.append_elem(head.first)
head = head.rest
raise ValueError('{0} not found in PList'.format(elem))
class PList(_PListBase):
"""
Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).
Element access is O(k) where k is the position of the element in the list. Taking the
length of the list is O(n).
Fully supports the Sequence and Hashable protocols including indexing and slicing but
if you need fast random access go for the PVector instead.
Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to
create an instance.
Some examples:
>>> x = plist([1, 2])
>>> y = x.cons(3)
>>> x
plist([1, 2])
>>> y
plist([3, 1, 2])
>>> y.first
3
>>> y.rest == x
True
>>> y[:2]
plist([3, 1])
"""
__slots__ = ('first', 'rest')
def __new__(cls, first, rest):
instance = super(PList, cls).__new__(cls)
instance.first = first
instance.rest = rest
return instance
def __bool__(self):
return True
__nonzero__ = __bool__
Sequence.register(PList)
Hashable.register(PList)
class _EmptyPList(_PListBase):
__slots__ = ()
def __bool__(self):
return False
__nonzero__ = __bool__
@property
def first(self):
raise AttributeError("Empty PList has no first")
@property
def rest(self):
return self
Sequence.register(_EmptyPList)
Hashable.register(_EmptyPList)
_EMPTY_PLIST = _EmptyPList()
def plist(iterable=(), reverse=False):
"""
Creates a new persistent list containing all elements of iterable.
Optional parameter reverse specifies if the elements should be inserted in
reverse order or not.
>>> plist([1, 2, 3])
plist([1, 2, 3])
>>> plist([1, 2, 3], reverse=True)
plist([3, 2, 1])
"""
if not reverse:
iterable = list(iterable)
iterable.reverse()
return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST)
def l(*elements):
"""
Creates a new persistent list containing all arguments.
>>> l(1, 2, 3)
plist([1, 2, 3])
"""
return plist(elements)
| 8,293 | Python | 25.414013 | 106 | 0.566381 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_helpers.py | from functools import wraps
from pyrsistent._pmap import PMap, pmap
from pyrsistent._pset import PSet, pset
from pyrsistent._pvector import PVector, pvector
def freeze(o, strict=True):
"""
Recursively convert simple Python containers into pyrsistent versions
of those containers.
- list is converted to pvector, recursively
- dict is converted to pmap, recursively on values (but not keys)
- set is converted to pset, but not recursively
- tuple is converted to tuple, recursively.
If strict == True (default):
- freeze is called on elements of pvectors
- freeze is called on values of pmaps
Sets and dict keys are not recursively frozen because they do not contain
mutable data by convention. The main exception to this rule is that
dict keys and set elements are often instances of mutable objects that
support hash-by-id, which this function can't convert anyway.
>>> freeze(set([1, 2]))
pset([1, 2])
>>> freeze([1, {'a': 3}])
pvector([1, pmap({'a': 3})])
>>> freeze((1, []))
(1, pvector([]))
"""
typ = type(o)
if typ is dict or (strict and isinstance(o, PMap)):
return pmap({k: freeze(v, strict) for k, v in o.items()})
if typ is list or (strict and isinstance(o, PVector)):
curried_freeze = lambda x: freeze(x, strict)
return pvector(map(curried_freeze, o))
if typ is tuple:
curried_freeze = lambda x: freeze(x, strict)
return tuple(map(curried_freeze, o))
if typ is set:
# impossible to have anything that needs freezing inside a set or pset
return pset(o)
return o
def thaw(o, strict=True):
"""
Recursively convert pyrsistent containers into simple Python containers.
- pvector is converted to list, recursively
- pmap is converted to dict, recursively on values (but not keys)
- pset is converted to set, but not recursively
- tuple is converted to tuple, recursively.
If strict == True (the default):
- thaw is called on elements of lists
- thaw is called on values in dicts
>>> from pyrsistent import s, m, v
>>> thaw(s(1, 2))
{1, 2}
>>> thaw(v(1, m(a=3)))
[1, {'a': 3}]
>>> thaw((1, v()))
(1, [])
"""
typ = type(o)
if isinstance(o, PVector) or (strict and typ is list):
curried_thaw = lambda x: thaw(x, strict)
return list(map(curried_thaw, o))
if isinstance(o, PMap) or (strict and typ is dict):
return {k: thaw(v, strict) for k, v in o.items()}
if typ is tuple:
curried_thaw = lambda x: thaw(x, strict)
return tuple(map(curried_thaw, o))
if isinstance(o, PSet):
# impossible to thaw inside psets or sets
return set(o)
return o
def mutant(fn):
"""
Convenience decorator to isolate mutation to within the decorated function (with respect
to the input arguments).
All arguments to the decorated function will be frozen so that they are guaranteed not to change.
The return value is also frozen.
"""
@wraps(fn)
def inner_f(*args, **kwargs):
return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
return inner_f
| 3,232 | Python | 31.989796 | 102 | 0.641708 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/__init__.py | # -*- coding: utf-8 -*-
from pyrsistent._pmap import pmap, m, PMap
from pyrsistent._pvector import pvector, v, PVector
from pyrsistent._pset import pset, s, PSet
from pyrsistent._pbag import pbag, b, PBag
from pyrsistent._plist import plist, l, PList
from pyrsistent._pdeque import pdeque, dq, PDeque
from pyrsistent._checked_types import (
CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
CheckedValueTypeError, CheckedType, optional)
from pyrsistent._field_common import (
field, PTypeError, pset_field, pmap_field, pvector_field)
from pyrsistent._precord import PRecord
from pyrsistent._pclass import PClass, PClassMeta
from pyrsistent._immutable import immutable
from pyrsistent._helpers import freeze, thaw, mutant
from pyrsistent._transformations import inc, discard, rex, ny
from pyrsistent._toolz import get_in
__all__ = ('pmap', 'm', 'PMap',
'pvector', 'v', 'PVector',
'pset', 's', 'PSet',
'pbag', 'b', 'PBag',
'plist', 'l', 'PList',
'pdeque', 'dq', 'PDeque',
'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional',
'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field',
'PClass', 'PClassMeta',
'immutable',
'freeze', 'thaw', 'mutant',
'get_in',
'inc', 'discard', 'rex', 'ny')
| 1,479 | Python | 29.833333 | 155 | 0.656525 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_toolz.py | """
Functionality copied from the toolz package to avoid having
to add toolz as a dependency.
See https://github.com/pytoolz/toolz/.
toolz is released under BSD licence. Below is the licence text
from toolz as it appeared when copying the code.
--------------------------------------------------------------
Copyright (c) 2013 Matthew Rocklin
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
a. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
b. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
c. Neither the name of toolz nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
"""
import operator
from functools import reduce
def get_in(keys, coll, default=None, no_default=False):
"""
NB: This is a straight copy of the get_in implementation found in
the toolz library (https://github.com/pytoolz/toolz/). It works
with persistent data structures as well as the corresponding
datastructures from the stdlib.
Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
``no_default`` is specified, then it raises KeyError or IndexError.
``get_in`` is a generalization of ``operator.getitem`` for nested data
structures such as dictionaries and lists.
>>> from pyrsistent import freeze
>>> transaction = freeze({'name': 'Alice',
... 'purchase': {'items': ['Apple', 'Orange'],
... 'costs': [0.50, 1.25]},
... 'credit card': '5555-1234-1234-1234'})
>>> get_in(['purchase', 'items', 0], transaction)
'Apple'
>>> get_in(['name'], transaction)
'Alice'
>>> get_in(['purchase', 'total'], transaction)
>>> get_in(['purchase', 'items', 'apple'], transaction)
>>> get_in(['purchase', 'items', 10], transaction)
>>> get_in(['purchase', 'total'], transaction, 0)
0
>>> get_in(['y'], {}, no_default=True)
Traceback (most recent call last):
...
KeyError: 'y'
"""
try:
return reduce(operator.getitem, keys, coll)
except (KeyError, IndexError, TypeError):
if no_default:
raise
return default
| 3,425 | Python | 39.785714 | 75 | 0.676204 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pvector.py | from abc import abstractmethod, ABCMeta
from collections.abc import Sequence, Hashable
from numbers import Integral
import operator
from pyrsistent._transformations import transform
def _bitcount(val):
return bin(val).count("1")
BRANCH_FACTOR = 32
BIT_MASK = BRANCH_FACTOR - 1
SHIFT = _bitcount(BIT_MASK)
def compare_pvector(v, other, operator):
return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
def _index_or_slice(index, stop):
if stop is None:
return index
return slice(index, stop)
class PythonPVector(object):
"""
Support structure for PVector that implements structural sharing for vectors using a trie.
"""
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
def __new__(cls, count, shift, root, tail):
self = super(PythonPVector, cls).__new__(cls)
self._count = count
self._shift = shift
self._root = root
self._tail = tail
# Derived attribute stored for performance
self._tail_offset = self._count - len(self._tail)
return self
def __len__(self):
return self._count
def __getitem__(self, index):
if isinstance(index, slice):
# There are more conditions than the below where it would be OK to
# return ourselves, implement those...
if index.start is None and index.stop is None and index.step is None:
return self
# This is a bit nasty realizing the whole structure as a list before
# slicing it but it is the fastest way I've found to date, and it's easy :-)
return _EMPTY_PVECTOR.extend(self.tolist()[index])
if index < 0:
index += self._count
return PythonPVector._node_for(self, index)[index & BIT_MASK]
def __add__(self, other):
return self.extend(other)
def __repr__(self):
return 'pvector({0})'.format(str(self.tolist()))
def __str__(self):
return self.__repr__()
def __iter__(self):
# This is kind of lazy and will produce some memory overhead but it is the fasted method
# by far of those tried since it uses the speed of the built in python list directly.
return iter(self.tolist())
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
def __gt__(self, other):
return compare_pvector(self, other, operator.gt)
def __lt__(self, other):
return compare_pvector(self, other, operator.lt)
def __ge__(self, other):
return compare_pvector(self, other, operator.ge)
def __le__(self, other):
return compare_pvector(self, other, operator.le)
def __mul__(self, times):
if times <= 0 or self is _EMPTY_PVECTOR:
return _EMPTY_PVECTOR
if times == 1:
return self
return _EMPTY_PVECTOR.extend(times * self.tolist())
__rmul__ = __mul__
def _fill_list(self, node, shift, the_list):
if shift:
shift -= SHIFT
for n in node:
self._fill_list(n, shift, the_list)
else:
the_list.extend(node)
def tolist(self):
"""
The fastest way to convert the vector into a python list.
"""
the_list = []
self._fill_list(self._root, self._shift, the_list)
the_list.extend(self._tail)
return the_list
def _totuple(self):
"""
Returns the content as a python tuple.
"""
return tuple(self.tolist())
def __hash__(self):
# Taking the easy way out again...
return hash(self._totuple())
def transform(self, *transformations):
return transform(self, transformations)
def __reduce__(self):
# Pickling support
return pvector, (self.tolist(),)
def mset(self, *args):
if len(args) % 2:
raise TypeError("mset expected an even number of arguments")
evolver = self.evolver()
for i in range(0, len(args), 2):
evolver[args[i]] = args[i+1]
return evolver.persistent()
class Evolver(object):
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
'_extra_tail', '_cached_leafs', '_orig_pvector')
def __init__(self, v):
self._reset(v)
def __getitem__(self, index):
if not isinstance(index, Integral):
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
if index < 0:
index += self._count + len(self._extra_tail)
if self._count <= index < self._count + len(self._extra_tail):
return self._extra_tail[index - self._count]
return PythonPVector._node_for(self, index)[index & BIT_MASK]
def _reset(self, v):
self._count = v._count
self._shift = v._shift
self._root = v._root
self._tail = v._tail
self._tail_offset = v._tail_offset
self._dirty_nodes = {}
self._cached_leafs = {}
self._extra_tail = []
self._orig_pvector = v
def append(self, element):
self._extra_tail.append(element)
return self
def extend(self, iterable):
self._extra_tail.extend(iterable)
return self
def set(self, index, val):
self[index] = val
return self
def __setitem__(self, index, val):
if not isinstance(index, Integral):
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
if index < 0:
index += self._count + len(self._extra_tail)
if 0 <= index < self._count:
node = self._cached_leafs.get(index >> SHIFT)
if node:
node[index & BIT_MASK] = val
elif index >= self._tail_offset:
if id(self._tail) not in self._dirty_nodes:
self._tail = list(self._tail)
self._dirty_nodes[id(self._tail)] = True
self._cached_leafs[index >> SHIFT] = self._tail
self._tail[index & BIT_MASK] = val
else:
self._root = self._do_set(self._shift, self._root, index, val)
elif self._count <= index < self._count + len(self._extra_tail):
self._extra_tail[index - self._count] = val
elif index == self._count + len(self._extra_tail):
self._extra_tail.append(val)
else:
raise IndexError("Index out of range: %s" % (index,))
def _do_set(self, level, node, i, val):
if id(node) in self._dirty_nodes:
ret = node
else:
ret = list(node)
self._dirty_nodes[id(ret)] = True
if level == 0:
ret[i & BIT_MASK] = val
self._cached_leafs[i >> SHIFT] = ret
else:
sub_index = (i >> level) & BIT_MASK # >>>
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
return ret
def delete(self, index):
del self[index]
return self
def __delitem__(self, key):
if self._orig_pvector:
# All structural sharing bets are off, base evolver on _extra_tail only
l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
l.extend(self._extra_tail)
self._reset(_EMPTY_PVECTOR)
self._extra_tail = l
del self._extra_tail[key]
def persistent(self):
result = self._orig_pvector
if self.is_dirty():
result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
self._reset(result)
return result
def __len__(self):
return self._count + len(self._extra_tail)
def is_dirty(self):
return bool(self._dirty_nodes or self._extra_tail)
def evolver(self):
return PythonPVector.Evolver(self)
def set(self, i, val):
# This method could be implemented by a call to mset() but doing so would cause
# a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
# of PVector) so we're keeping this implementation for now.
if not isinstance(i, Integral):
raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
if i < 0:
i += self._count
if 0 <= i < self._count:
if i >= self._tail_offset:
new_tail = list(self._tail)
new_tail[i & BIT_MASK] = val
return PythonPVector(self._count, self._shift, self._root, new_tail)
return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
if i == self._count:
return self.append(val)
raise IndexError("Index out of range: %s" % (i,))
def _do_set(self, level, node, i, val):
ret = list(node)
if level == 0:
ret[i & BIT_MASK] = val
else:
sub_index = (i >> level) & BIT_MASK # >>>
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
return ret
@staticmethod
def _node_for(pvector_like, i):
if 0 <= i < pvector_like._count:
if i >= pvector_like._tail_offset:
return pvector_like._tail
node = pvector_like._root
for level in range(pvector_like._shift, 0, -SHIFT):
node = node[(i >> level) & BIT_MASK] # >>>
return node
raise IndexError("Index out of range: %s" % (i,))
def _create_new_root(self):
new_shift = self._shift
# Overflow root?
if (self._count >> SHIFT) > (1 << self._shift): # >>>
new_root = [self._root, self._new_path(self._shift, self._tail)]
new_shift += SHIFT
else:
new_root = self._push_tail(self._shift, self._root, self._tail)
return new_root, new_shift
def append(self, val):
if len(self._tail) < BRANCH_FACTOR:
new_tail = list(self._tail)
new_tail.append(val)
return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
# Full tail, push into tree
new_root, new_shift = self._create_new_root()
return PythonPVector(self._count + 1, new_shift, new_root, [val])
def _new_path(self, level, node):
if level == 0:
return node
return [self._new_path(level - SHIFT, node)]
def _mutating_insert_tail(self):
self._root, self._shift = self._create_new_root()
self._tail = []
def _mutating_fill_tail(self, offset, sequence):
max_delta_len = BRANCH_FACTOR - len(self._tail)
delta = sequence[offset:offset + max_delta_len]
self._tail.extend(delta)
delta_len = len(delta)
self._count += delta_len
return offset + delta_len
def _mutating_extend(self, sequence):
offset = 0
sequence_len = len(sequence)
while offset < sequence_len:
offset = self._mutating_fill_tail(offset, sequence)
if len(self._tail) == BRANCH_FACTOR:
self._mutating_insert_tail()
self._tail_offset = self._count - len(self._tail)
def extend(self, obj):
# Mutates the new vector directly for efficiency but that's only an
# implementation detail, once it is returned it should be considered immutable
l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
if l:
new_vector = self.append(l[0])
new_vector._mutating_extend(l[1:])
return new_vector
return self
def _push_tail(self, level, parent, tail_node):
"""
if parent is leaf, insert node,
else does it map to an existing child? ->
node_to_insert = push node one more level
else alloc new path
return node_to_insert placed in copy of parent
"""
ret = list(parent)
if level == SHIFT:
ret.append(tail_node)
return ret
sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
if len(parent) > sub_index:
ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
return ret
ret.append(self._new_path(level - SHIFT, tail_node))
return ret
def index(self, value, *args, **kwargs):
return self.tolist().index(value, *args, **kwargs)
def count(self, value):
return self.tolist().count(value)
def delete(self, index, stop=None):
l = self.tolist()
del l[_index_or_slice(index, stop)]
return _EMPTY_PVECTOR.extend(l)
def remove(self, value):
l = self.tolist()
l.remove(value)
return _EMPTY_PVECTOR.extend(l)
class PVector(metaclass=ABCMeta):
"""
Persistent vector implementation. Meant as a replacement for the cases where you would normally
use a Python list.
Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
create an instance.
Heavily influenced by the persistent vector available in Clojure. Initially this was more or
less just a port of the Java code for the Clojure vector. It has since been modified and to
some extent optimized for usage in Python.
The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
updates are done to the original vector. Structural sharing between vectors are applied where possible to save
space and to avoid making complete copies.
This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
for example assignments.
The PVector implements the Sequence protocol and is Hashable.
Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
The following are examples of some common operations on persistent vectors:
>>> p = v(1, 2, 3)
>>> p2 = p.append(4)
>>> p3 = p2.extend([5, 6, 7])
>>> p
pvector([1, 2, 3])
>>> p2
pvector([1, 2, 3, 4])
>>> p3
pvector([1, 2, 3, 4, 5, 6, 7])
>>> p3[5]
6
>>> p.set(1, 99)
pvector([1, 99, 3])
>>>
"""
@abstractmethod
def __len__(self):
"""
>>> len(v(1, 2, 3))
3
"""
@abstractmethod
def __getitem__(self, index):
"""
Get value at index. Full slicing support.
>>> v1 = v(5, 6, 7, 8)
>>> v1[2]
7
>>> v1[1:3]
pvector([6, 7])
"""
@abstractmethod
def __add__(self, other):
"""
>>> v1 = v(1, 2)
>>> v2 = v(3, 4)
>>> v1 + v2
pvector([1, 2, 3, 4])
"""
@abstractmethod
def __mul__(self, times):
"""
>>> v1 = v(1, 2)
>>> 3 * v1
pvector([1, 2, 1, 2, 1, 2])
"""
@abstractmethod
def __hash__(self):
"""
>>> v1 = v(1, 2, 3)
>>> v2 = v(1, 2, 3)
>>> hash(v1) == hash(v2)
True
"""
@abstractmethod
def evolver(self):
"""
Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
with "transaction like" semantics. No part of the underlying vector i updated, it is still
fully immutable. Furthermore multiple evolvers created from the same pvector do not
interfere with each other.
You may want to use an evolver instead of working directly with the pvector in the
following cases:
* Multiple updates are done to the same vector and the intermediate results are of no
interest. In this case using an evolver may be a more efficient and easier to work with.
* You need to pass a vector into a legacy function or a function that you have no control
over which performs in place mutations of lists. In this case pass an evolver instance
instead and then create a new pvector from the evolver once the function returns.
The following example illustrates a typical workflow when working with evolvers. It also
displays most of the API (which i kept small by design, you should not be tempted to
use evolvers in excess ;-)).
Create the evolver and perform various mutating updates to it:
>>> v1 = v(1, 2, 3, 4, 5)
>>> e = v1.evolver()
>>> e[1] = 22
>>> _ = e.append(6)
>>> _ = e.extend([7, 8, 9])
>>> e[8] += 1
>>> len(e)
9
The underlying pvector remains the same:
>>> v1
pvector([1, 2, 3, 4, 5])
The changes are kept in the evolver. An updated pvector can be created using the
persistent() function on the evolver.
>>> v2 = e.persistent()
>>> v2
pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
The new pvector will share data with the original pvector in the same way that would have
been done if only using operations on the pvector.
"""
@abstractmethod
def mset(self, *args):
"""
Return a new vector with elements in specified positions replaced by values (multi set).
Elements on even positions in the argument list are interpreted as indexes while
elements on odd positions are considered values.
>>> v1 = v(1, 2, 3)
>>> v1.mset(0, 11, 2, 33)
pvector([11, 2, 33])
"""
@abstractmethod
def set(self, i, val):
"""
Return a new vector with element at position i replaced with val. The original vector remains unchanged.
Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
result in an IndexError.
>>> v1 = v(1, 2, 3)
>>> v1.set(1, 4)
pvector([1, 4, 3])
>>> v1.set(3, 4)
pvector([1, 2, 3, 4])
>>> v1.set(-1, 4)
pvector([1, 2, 4])
"""
@abstractmethod
def append(self, val):
"""
Return a new vector with val appended.
>>> v1 = v(1, 2)
>>> v1.append(3)
pvector([1, 2, 3])
"""
@abstractmethod
def extend(self, obj):
"""
Return a new vector with all values in obj appended to it. Obj may be another
PVector or any other Iterable.
>>> v1 = v(1, 2, 3)
>>> v1.extend([4, 5])
pvector([1, 2, 3, 4, 5])
"""
@abstractmethod
def index(self, value, *args, **kwargs):
"""
Return first index of value. Additional indexes may be supplied to limit the search to a
sub range of the vector.
>>> v1 = v(1, 2, 3, 4, 3)
>>> v1.index(3)
2
>>> v1.index(3, 3, 5)
4
"""
@abstractmethod
def count(self, value):
"""
Return the number of times that value appears in the vector.
>>> v1 = v(1, 4, 3, 4)
>>> v1.count(4)
2
"""
@abstractmethod
def transform(self, *transformations):
"""
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
consists of two parts. One match expression that specifies which elements to transform
and one transformation function that performs the actual transformation.
>>> from pyrsistent import freeze, ny
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
... {'author': 'Steve', 'content': 'A slightly longer article'}],
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
>>> very_short_news.articles[0].content
'A short article'
>>> very_short_news.articles[1].content
'A slightly long...'
When nothing has been transformed the original data structure is kept
>>> short_news is news_paper
True
>>> very_short_news is news_paper
False
>>> very_short_news.articles[0] is news_paper.articles[0]
True
"""
@abstractmethod
def delete(self, index, stop=None):
"""
Delete a portion of the vector by index or range.
>>> v1 = v(1, 2, 3, 4, 5)
>>> v1.delete(1)
pvector([1, 3, 4, 5])
>>> v1.delete(1, 3)
pvector([1, 4, 5])
"""
@abstractmethod
def remove(self, value):
"""
Remove the first occurrence of a value from the vector.
>>> v1 = v(1, 2, 3, 2, 1)
>>> v2 = v1.remove(1)
>>> v2
pvector([2, 3, 2, 1])
>>> v2.remove(1)
pvector([2, 3, 2])
"""
_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
PVector.register(PythonPVector)
Sequence.register(PVector)
Hashable.register(PVector)
def python_pvector(iterable=()):
"""
Create a new persistent vector containing the elements in iterable.
>>> v1 = pvector([1, 2, 3])
>>> v1
pvector([1, 2, 3])
"""
return _EMPTY_PVECTOR.extend(iterable)
try:
# Use the C extension as underlying trie implementation if it is available
import os
if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
pvector = python_pvector
else:
from pvectorc import pvector
PVector.register(type(pvector()))
except ImportError:
pvector = python_pvector
def v(*elements):
"""
Create a new persistent vector containing all parameters to this function.
>>> v1 = v(1, 2, 3)
>>> v1
pvector([1, 2, 3])
"""
return pvector(elements)
| 22,694 | Python | 30.875 | 135 | 0.553935 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pset.py | from collections.abc import Set, Hashable
import sys
from pyrsistent._pmap import pmap
class PSet(object):
"""
Persistent set implementation. Built on top of the persistent map. The set supports all operations
in the Set protocol and is Hashable.
Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`
to create an instance.
Random access and insert is log32(n) where n is the size of the set.
Some examples:
>>> s = pset([1, 2, 3, 1])
>>> s2 = s.add(4)
>>> s3 = s2.remove(2)
>>> s
pset([1, 2, 3])
>>> s2
pset([1, 2, 3, 4])
>>> s3
pset([1, 3, 4])
"""
__slots__ = ('_map', '__weakref__')
def __new__(cls, m):
self = super(PSet, cls).__new__(cls)
self._map = m
return self
def __contains__(self, element):
return element in self._map
def __iter__(self):
return iter(self._map)
def __len__(self):
return len(self._map)
def __repr__(self):
if not self:
return 'p' + str(set(self))
return 'pset([{0}])'.format(str(set(self))[1:-1])
def __str__(self):
return self.__repr__()
def __hash__(self):
return hash(self._map)
def __reduce__(self):
# Pickling support
return pset, (list(self),)
@classmethod
def _from_iterable(cls, it, pre_size=8):
return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size))
def add(self, element):
"""
Return a new PSet with element added
>>> s1 = s(1, 2)
>>> s1.add(3)
pset([1, 2, 3])
"""
return self.evolver().add(element).persistent()
def update(self, iterable):
"""
Return a new PSet with elements in iterable added
>>> s1 = s(1, 2)
>>> s1.update([3, 4, 4])
pset([1, 2, 3, 4])
"""
e = self.evolver()
for element in iterable:
e.add(element)
return e.persistent()
def remove(self, element):
"""
Return a new PSet with element removed. Raises KeyError if element is not present.
>>> s1 = s(1, 2)
>>> s1.remove(2)
pset([1])
"""
if element in self._map:
return self.evolver().remove(element).persistent()
raise KeyError("Element '%s' not present in PSet" % repr(element))
def discard(self, element):
"""
Return a new PSet with element removed. Returns itself if element is not present.
"""
if element in self._map:
return self.evolver().remove(element).persistent()
return self
class _Evolver(object):
__slots__ = ('_original_pset', '_pmap_evolver')
def __init__(self, original_pset):
self._original_pset = original_pset
self._pmap_evolver = original_pset._map.evolver()
def add(self, element):
self._pmap_evolver[element] = True
return self
def remove(self, element):
del self._pmap_evolver[element]
return self
def is_dirty(self):
return self._pmap_evolver.is_dirty()
def persistent(self):
if not self.is_dirty():
return self._original_pset
return PSet(self._pmap_evolver.persistent())
def __len__(self):
return len(self._pmap_evolver)
def copy(self):
return self
def evolver(self):
"""
Create a new evolver for this pset. For a discussion on evolvers in general see the
documentation for the pvector evolver.
Create the evolver and perform various mutating updates to it:
>>> s1 = s(1, 2, 3)
>>> e = s1.evolver()
>>> _ = e.add(4)
>>> len(e)
4
>>> _ = e.remove(1)
The underlying pset remains the same:
>>> s1
pset([1, 2, 3])
The changes are kept in the evolver. An updated pmap can be created using the
persistent() function on the evolver.
>>> s2 = e.persistent()
>>> s2
pset([2, 3, 4])
The new pset will share data with the original pset in the same way that would have
been done if only using operations on the pset.
"""
return PSet._Evolver(self)
# All the operations and comparisons you would expect on a set.
#
# This is not very beautiful. If we avoid inheriting from PSet we can use the
# __slots__ concepts (which requires a new style class) and hopefully save some memory.
__le__ = Set.__le__
__lt__ = Set.__lt__
__gt__ = Set.__gt__
__ge__ = Set.__ge__
__eq__ = Set.__eq__
__ne__ = Set.__ne__
__and__ = Set.__and__
__or__ = Set.__or__
__sub__ = Set.__sub__
__xor__ = Set.__xor__
issubset = __le__
issuperset = __ge__
union = __or__
intersection = __and__
difference = __sub__
symmetric_difference = __xor__
isdisjoint = Set.isdisjoint
Set.register(PSet)
Hashable.register(PSet)
_EMPTY_PSET = PSet(pmap())
def pset(iterable=(), pre_size=8):
"""
Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that
used for :py:func:`pmap`.
>>> s1 = pset([1, 2, 3, 2])
>>> s1
pset([1, 2, 3])
"""
if not iterable:
return _EMPTY_PSET
return PSet._from_iterable(iterable, pre_size=pre_size)
def s(*elements):
"""
Create a persistent set.
Takes an arbitrary number of arguments to insert into the new set.
>>> s1 = s(1, 2, 3, 2)
>>> s1
pset([1, 2, 3])
"""
return pset(elements)
| 5,693 | Python | 23.973684 | 102 | 0.540664 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/typing.pyi | # flake8: noqa: E704
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
# Stubs for pyrsistent (Python 3.6)
#
from typing import Any
from typing import Callable
from typing import Dict
from typing import Generic
from typing import Hashable
from typing import Iterator
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import AbstractSet
from typing import Sized
from typing import Set
from typing import Tuple
from typing import TypeVar
from typing import Type
from typing import Union
from typing import overload
T = TypeVar('T')
KT = TypeVar('KT')
VT = TypeVar('VT')
class PMap(Mapping[KT, VT], Hashable):
def __add__(self, other: PMap[KT, VT]) -> PMap[KT, VT]: ...
def __getitem__(self, key: KT) -> VT: ...
def __getattr__(self, key: str) -> VT: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[KT]: ...
def __len__(self) -> int: ...
def copy(self) -> PMap[KT, VT]: ...
def discard(self, key: KT) -> PMap[KT, VT]: ...
def evolver(self) -> PMapEvolver[KT, VT]: ...
def iteritems(self) -> Iterable[Tuple[KT, VT]]: ...
def iterkeys(self) -> Iterable[KT]: ...
def itervalues(self) -> Iterable[VT]: ...
def remove(self, key: KT) -> PMap[KT, VT]: ...
def set(self, key: KT, val: VT) -> PMap[KT, VT]: ...
def transform(self, *transformations: Any) -> PMap[KT, VT]: ...
def update(self, *args: Mapping): ...
def update_with(self, update_fn: Callable[[VT, VT], VT], *args: Mapping) -> Any: ...
class PMapEvolver(Generic[KT, VT]):
def __delitem__(self, key: KT) -> None: ...
def __getitem__(self, key: KT) -> VT: ...
def __len__(self) -> int: ...
def __setitem__(self, key: KT, val: VT) -> None: ...
def is_dirty(self) -> bool: ...
def persistent(self) -> PMap[KT, VT]: ...
def remove(self, key: KT) -> PMapEvolver[KT, VT]: ...
def set(self, key: KT, val: VT) -> PMapEvolver[KT, VT]: ...
class PVector(Sequence[T], Hashable):
def __add__(self, other: PVector[T]) -> PVector[T]: ...
@overload
def __getitem__(self, index: int) -> T: ...
@overload
def __getitem__(self, index: slice) -> PVector[T]: ...
def __hash__(self) -> int: ...
def __len__(self) -> int: ...
def __mul__(self, other: PVector[T]) -> PVector[T]: ...
def append(self, val: T) -> PVector[T]: ...
def delete(self, index: int, stop: Optional[int] = None) -> PVector[T]: ...
def evolver(self) -> PVectorEvolver[T]: ...
def extend(self, obj: Iterable[T]) -> PVector[T]: ...
def tolist(self) -> List[T]: ...
def mset(self, *args: Iterable[Union[T, int]]) -> PVector[T]: ...
def remove(self, value: T) -> PVector[T]: ...
# Not compatible with MutableSequence
def set(self, i: int, val: T) -> PVector[T]: ...
def transform(self, *transformations: Any) -> PVector[T]: ...
class PVectorEvolver(Sequence[T], Sized):
def __delitem__(self, i: Union[int, slice]) -> None: ...
@overload
def __getitem__(self, index: int) -> T: ...
# Not actually supported
@overload
def __getitem__(self, index: slice) -> PVectorEvolver[T]: ...
def __len__(self) -> int: ...
def __setitem__(self, index: int, val: T) -> None: ...
def append(self, val: T) -> PVectorEvolver[T]: ...
def delete(self, value: T) -> PVectorEvolver[T]: ...
def extend(self, obj: Iterable[T]) -> PVectorEvolver[T]: ...
def is_dirty(self) -> bool: ...
def persistent(self) -> PVector[T]: ...
def set(self, i: int, val: T) -> PVectorEvolver[T]: ...
class PSet(AbstractSet[T], Hashable):
def __contains__(self, element: object) -> bool: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[T]: ...
def __len__(self) -> int: ...
def add(self, element: T) -> PSet[T]: ...
def copy(self) -> PSet[T]: ...
def difference(self, iterable: Iterable) -> PSet[T]: ...
def discard(self, element: T) -> PSet[T]: ...
def evolver(self) -> PSetEvolver[T]: ...
def intersection(self, iterable: Iterable) -> PSet[T]: ...
def issubset(self, iterable: Iterable) -> bool: ...
def issuperset(self, iterable: Iterable) -> bool: ...
def remove(self, element: T) -> PSet[T]: ...
def symmetric_difference(self, iterable: Iterable[T]) -> PSet[T]: ...
def union(self, iterable: Iterable[T]) -> PSet[T]: ...
def update(self, iterable: Iterable[T]) -> PSet[T]: ...
class PSetEvolver(Generic[T], Sized):
def __len__(self) -> int: ...
def add(self, element: T) -> PSetEvolver[T]: ...
def is_dirty(self) -> bool: ...
def persistent(self) -> PSet[T]: ...
def remove(self, element: T) -> PSetEvolver[T]: ...
class PBag(Generic[T], Sized, Hashable):
def __add__(self, other: PBag[T]) -> PBag[T]: ...
def __and__(self, other: PBag[T]) -> PBag[T]: ...
def __contains__(self, elem: object) -> bool: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[T]: ...
def __len__(self) -> int: ...
def __or__(self, other: PBag[T]) -> PBag[T]: ...
def __sub__(self, other: PBag[T]) -> PBag[T]: ...
def add(self, elem: T) -> PBag[T]: ...
def count(self, elem: T) -> int: ...
def remove(self, elem: T) -> PBag[T]: ...
def update(self, iterable: Iterable[T]) -> PBag[T]: ...
class PDeque(Sequence[T], Hashable):
@overload
def __getitem__(self, index: int) -> T: ...
@overload
def __getitem__(self, index: slice) -> PDeque[T]: ...
def __hash__(self) -> int: ...
def __len__(self) -> int: ...
def __lt__(self, other: PDeque[T]) -> bool: ...
def append(self, elem: T) -> PDeque[T]: ...
def appendleft(self, elem: T) -> PDeque[T]: ...
def extend(self, iterable: Iterable[T]) -> PDeque[T]: ...
def extendleft(self, iterable: Iterable[T]) -> PDeque[T]: ...
@property
def left(self) -> T: ...
# The real return type is Integral according to what pyrsistent
# checks at runtime but mypy doesn't deal in numeric.*:
# https://github.com/python/mypy/issues/2636
@property
def maxlen(self) -> int: ...
def pop(self, count: int = 1) -> PDeque[T]: ...
def popleft(self, count: int = 1) -> PDeque[T]: ...
def remove(self, elem: T) -> PDeque[T]: ...
def reverse(self) -> PDeque[T]: ...
@property
def right(self) -> T: ...
def rotate(self, steps: int) -> PDeque[T]: ...
class PList(Sequence[T], Hashable):
@overload
def __getitem__(self, index: int) -> T: ...
@overload
def __getitem__(self, index: slice) -> PList[T]: ...
def __hash__(self) -> int: ...
def __len__(self) -> int: ...
def __lt__(self, other: PList[T]) -> bool: ...
def __gt__(self, other: PList[T]) -> bool: ...
def cons(self, elem: T) -> PList[T]: ...
@property
def first(self) -> T: ...
def mcons(self, iterable: Iterable[T]) -> PList[T]: ...
def remove(self, elem: T) -> PList[T]: ...
@property
def rest(self) -> PList[T]: ...
def reverse(self) -> PList[T]: ...
def split(self, index: int) -> Tuple[PList[T], PList[T]]: ...
T_PClass = TypeVar('T_PClass', bound='PClass')
class PClass(Hashable):
def __new__(cls, **kwargs: Any): ...
def set(self: T_PClass, *args: Any, **kwargs: Any) -> T_PClass: ...
@classmethod
def create(
cls: Type[T_PClass],
kwargs: Any,
_factory_fields: Optional[Any] = ...,
ignore_extra: bool = ...,
) -> T_PClass: ...
def serialize(self, format: Optional[Any] = ...): ...
def transform(self, *transformations: Any): ...
def __eq__(self, other: object): ...
def __ne__(self, other: object): ...
def __hash__(self): ...
def __reduce__(self): ...
def evolver(self) -> PClassEvolver: ...
def remove(self: T_PClass, name: Any) -> T_PClass: ...
class PClassEvolver:
def __init__(self, original: Any, initial_dict: Any) -> None: ...
def __getitem__(self, item: Any): ...
def set(self, key: Any, value: Any): ...
def __setitem__(self, key: Any, value: Any) -> None: ...
def remove(self, item: Any): ...
def __delitem__(self, item: Any) -> None: ...
def persistent(self) -> PClass: ...
def __getattr__(self, item: Any): ...
class CheckedPMap(PMap[KT, VT]):
__key_type__: Type[KT]
__value_type__: Type[VT]
def __new__(cls, source: Mapping[KT, VT] = ..., size: int = ...) -> CheckedPMap: ...
@classmethod
def create(cls, source_data: Mapping[KT, VT], _factory_fields: Any = ...) -> CheckedPMap[KT, VT]: ...
def serialize(self, format: Optional[Any] = ...) -> Dict[KT, VT]: ...
class CheckedPVector(PVector[T]):
__type__: Type[T]
def __new__(self, initial: Iterable[T] = ...) -> CheckedPVector: ...
@classmethod
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPVector[T]: ...
def serialize(self, format: Optional[Any] = ...) -> List[T]: ...
class CheckedPSet(PSet[T]):
__type__: Type[T]
def __new__(cls, initial: Iterable[T] = ...) -> CheckedPSet: ...
@classmethod
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPSet[T]: ...
def serialize(self, format: Optional[Any] = ...) -> Set[T]: ...
class InvariantException(Exception):
invariant_errors: Tuple[Any, ...] = ... # possibly nested tuple
missing_fields: Tuple[str, ...] = ...
def __init__(
self,
error_codes: Any = ...,
missing_fields: Any = ...,
*args: Any,
**kwargs: Any
) -> None: ...
class CheckedTypeError(TypeError):
source_class: Type[Any]
expected_types: Tuple[Any, ...]
actual_type: Type[Any]
actual_value: Any
def __init__(
self,
source_class: Any,
expected_types: Any,
actual_type: Any,
actual_value: Any,
*args: Any,
**kwargs: Any
) -> None: ...
class CheckedKeyTypeError(CheckedTypeError): ...
class CheckedValueTypeError(CheckedTypeError): ...
class CheckedType: ...
class PTypeError(TypeError):
source_class: Type[Any] = ...
field: str = ...
expected_types: Tuple[Any, ...] = ...
actual_type: Type[Any] = ...
def __init__(
self,
source_class: Any,
field: Any,
expected_types: Any,
actual_type: Any,
*args: Any,
**kwargs: Any
) -> None: ...
| 10,416 | unknown | 34.552901 | 105 | 0.562884 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_transformations.py | import re
try:
from inspect import Parameter, signature
except ImportError:
signature = None
from inspect import getfullargspec
_EMPTY_SENTINEL = object()
def inc(x):
""" Add one to the current value """
return x + 1
def dec(x):
""" Subtract one from the current value """
return x - 1
def discard(evolver, key):
""" Discard the element and returns a structure without the discarded elements """
try:
del evolver[key]
except KeyError:
pass
# Matchers
def rex(expr):
""" Regular expression matcher to use together with transform functions """
r = re.compile(expr)
return lambda key: isinstance(key, str) and r.match(key)
def ny(_):
""" Matcher that matches any value """
return True
# Support functions
def _chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def transform(structure, transformations):
r = structure
for path, command in _chunks(transformations, 2):
r = _do_to_path(r, path, command)
return r
def _do_to_path(structure, path, command):
if not path:
return command(structure) if callable(command) else command
kvs = _get_keys_and_values(structure, path[0])
return _update_structure(structure, kvs, path[1:], command)
def _items(structure):
try:
return structure.items()
except AttributeError:
# Support wider range of structures by adding a transform_items() or similar?
return list(enumerate(structure))
def _get(structure, key, default):
try:
if hasattr(structure, '__getitem__'):
return structure[key]
return getattr(structure, key)
except (IndexError, KeyError):
return default
def _get_keys_and_values(structure, key_spec):
if callable(key_spec):
# Support predicates as callable objects in the path
arity = _get_arity(key_spec)
if arity == 1:
# Unary predicates are called with the "key" of the path
# - eg a key in a mapping, an index in a sequence.
return [(k, v) for k, v in _items(structure) if key_spec(k)]
elif arity == 2:
# Binary predicates are called with the key and the corresponding
# value.
return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
else:
# Other arities are an error.
raise ValueError(
"callable in transform path must take 1 or 2 arguments"
)
# Non-callables are used as-is as a key.
return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
if signature is None:
def _get_arity(f):
argspec = getfullargspec(f)
return len(argspec.args) - len(argspec.defaults or ())
else:
def _get_arity(f):
return sum(
1
for p
in signature(f).parameters.values()
if p.default is Parameter.empty
and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
)
def _update_structure(structure, kvs, path, command):
from pyrsistent._pmap import pmap
e = structure.evolver()
if not path and command is discard:
# Do this in reverse to avoid index problems with vectors. See #92.
for k, v in reversed(kvs):
discard(e, k)
else:
for k, v in kvs:
is_empty = False
if v is _EMPTY_SENTINEL:
# Allow expansion of structure but make sure to cover the case
# when an empty pmap is added as leaf node. See #154.
is_empty = True
v = pmap()
result = _do_to_path(v, path, command)
if result is not v or is_empty:
e[k] = result
return e.persistent()
| 3,800 | Python | 26.15 | 86 | 0.598421 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/__init__.pyi | # flake8: noqa: E704
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
# Stubs for pyrsistent (Python 3.6)
from typing import Any
from typing import AnyStr
from typing import Callable
from typing import Iterable
from typing import Iterator
from typing import List
from typing import Optional
from typing import Mapping
from typing import MutableMapping
from typing import Sequence
from typing import Set
from typing import Union
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import overload
# see commit 08519aa for explanation of the re-export
from pyrsistent.typing import CheckedKeyTypeError as CheckedKeyTypeError
from pyrsistent.typing import CheckedPMap as CheckedPMap
from pyrsistent.typing import CheckedPSet as CheckedPSet
from pyrsistent.typing import CheckedPVector as CheckedPVector
from pyrsistent.typing import CheckedType as CheckedType
from pyrsistent.typing import CheckedValueTypeError as CheckedValueTypeError
from pyrsistent.typing import InvariantException as InvariantException
from pyrsistent.typing import PClass as PClass
from pyrsistent.typing import PBag as PBag
from pyrsistent.typing import PDeque as PDeque
from pyrsistent.typing import PList as PList
from pyrsistent.typing import PMap as PMap
from pyrsistent.typing import PMapEvolver as PMapEvolver
from pyrsistent.typing import PSet as PSet
from pyrsistent.typing import PSetEvolver as PSetEvolver
from pyrsistent.typing import PTypeError as PTypeError
from pyrsistent.typing import PVector as PVector
from pyrsistent.typing import PVectorEvolver as PVectorEvolver
T = TypeVar('T')
KT = TypeVar('KT')
VT = TypeVar('VT')
def pmap(initial: Union[Mapping[KT, VT], Iterable[Tuple[KT, VT]]] = {}, pre_size: int = 0) -> PMap[KT, VT]: ...
def m(**kwargs: VT) -> PMap[str, VT]: ...
def pvector(iterable: Iterable[T] = ...) -> PVector[T]: ...
def v(*iterable: T) -> PVector[T]: ...
def pset(iterable: Iterable[T] = (), pre_size: int = 8) -> PSet[T]: ...
def s(*iterable: T) -> PSet[T]: ...
# see class_test.py for use cases
Invariant = Tuple[bool, Optional[Union[str, Callable[[], str]]]]
@overload
def field(
type: Union[Type[T], Sequence[Type[T]]] = ...,
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
initial: Any = object(),
mandatory: bool = False,
factory: Callable[[Any], T] = lambda x: x,
serializer: Callable[[Any, T], Any] = lambda _, value: value,
) -> T: ...
# The actual return value (_PField) is irrelevant after a PRecord has been instantiated,
# see https://github.com/tobgu/pyrsistent/blob/master/pyrsistent/_precord.py#L10
@overload
def field(
type: Any = ...,
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
initial: Any = object(),
mandatory: bool = False,
factory: Callable[[Any], Any] = lambda x: x,
serializer: Callable[[Any, Any], Any] = lambda _, value: value,
) -> Any: ...
# Use precise types for the simplest use cases, but fall back to Any for
# everything else. See record_test.py for the wide range of possible types for
# item_type
@overload
def pset_field(
item_type: Type[T],
optional: bool = False,
initial: Iterable[T] = ...,
) -> PSet[T]: ...
@overload
def pset_field(
item_type: Any,
optional: bool = False,
initial: Any = (),
) -> PSet[Any]: ...
@overload
def pmap_field(
key_type: Type[KT],
value_type: Type[VT],
optional: bool = False,
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
) -> PMap[KT, VT]: ...
@overload
def pmap_field(
key_type: Any,
value_type: Any,
optional: bool = False,
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
) -> PMap[Any, Any]: ...
@overload
def pvector_field(
item_type: Type[T],
optional: bool = False,
initial: Iterable[T] = ...,
) -> PVector[T]: ...
@overload
def pvector_field(
item_type: Any,
optional: bool = False,
initial: Any = (),
) -> PVector[Any]: ...
def pbag(elements: Iterable[T]) -> PBag[T]: ...
def b(*elements: T) -> PBag[T]: ...
def plist(iterable: Iterable[T] = (), reverse: bool = False) -> PList[T]: ...
def l(*elements: T) -> PList[T]: ...
def pdeque(iterable: Optional[Iterable[T]] = None, maxlen: Optional[int] = None) -> PDeque[T]: ...
def dq(*iterable: T) -> PDeque[T]: ...
@overload
def optional(type: T) -> Tuple[T, Type[None]]: ...
@overload
def optional(*typs: Any) -> Tuple[Any, ...]: ...
T_PRecord = TypeVar('T_PRecord', bound='PRecord')
class PRecord(PMap[AnyStr, Any]):
_precord_fields: Mapping
_precord_initial_values: Mapping
def __hash__(self) -> int: ...
def __init__(self, **kwargs: Any) -> None: ...
def __iter__(self) -> Iterator[Any]: ...
def __len__(self) -> int: ...
@classmethod
def create(
cls: Type[T_PRecord],
kwargs: Mapping,
_factory_fields: Optional[Iterable] = None,
ignore_extra: bool = False,
) -> T_PRecord: ...
# This is OK because T_PRecord is a concrete type
def discard(self: T_PRecord, key: KT) -> T_PRecord: ...
def remove(self: T_PRecord, key: KT) -> T_PRecord: ...
def serialize(self, format: Optional[Any] = ...) -> MutableMapping: ...
# From pyrsistent documentation:
# This set function differs slightly from that in the PMap
# class. First of all it accepts key-value pairs. Second it accepts multiple key-value
# pairs to perform one, atomic, update of multiple fields.
@overload
def set(self, key: KT, val: VT) -> Any: ...
@overload
def set(self, **kwargs: VT) -> Any: ...
def immutable(
members: Union[str, Iterable[str]] = '',
name: str = 'Immutable',
verbose: bool = False,
) -> Tuple: ... # actually a namedtuple
# ignore mypy warning "Overloaded function signatures 1 and 5 overlap with
# incompatible return types"
@overload
def freeze(o: Mapping[KT, VT]) -> PMap[KT, VT]: ... # type: ignore
@overload
def freeze(o: List[T]) -> PVector[T]: ... # type: ignore
@overload
def freeze(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
@overload
def freeze(o: Set[T]) -> PSet[T]: ... # type: ignore
@overload
def freeze(o: T) -> T: ...
@overload
def thaw(o: PMap[KT, VT]) -> MutableMapping[KT, VT]: ... # type: ignore
@overload
def thaw(o: PVector[T]) -> List[T]: ... # type: ignore
@overload
def thaw(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
# collections.abc.MutableSet is kind of garbage:
# https://stackoverflow.com/questions/24977898/why-does-collections-mutableset-not-bestow-an-update-method
@overload
def thaw(o: PSet[T]) -> Set[T]: ... # type: ignore
@overload
def thaw(o: T) -> T: ...
def mutant(fn: Callable) -> Callable: ...
def inc(x: int) -> int: ...
@overload
def discard(evolver: PMapEvolver[KT, VT], key: KT) -> None: ...
@overload
def discard(evolver: PVectorEvolver[T], key: int) -> None: ...
@overload
def discard(evolver: PSetEvolver[T], key: T) -> None: ...
def rex(expr: str) -> Callable[[Any], bool]: ...
def ny(_: Any) -> bool: ...
def get_in(keys: Iterable, coll: Mapping, default: Optional[Any] = None, no_default: bool = False) -> Any: ...
| 7,188 | unknown | 32.593458 | 111 | 0.668753 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/typing.py | """Helpers for use with type annotation.
Use the empty classes in this module when annotating the types of Pyrsistent
objects, instead of using the actual collection class.
For example,
from pyrsistent import pvector
from pyrsistent.typing import PVector
myvector: PVector[str] = pvector(['a', 'b', 'c'])
"""
from __future__ import absolute_import
try:
from typing import Container
from typing import Hashable
from typing import Generic
from typing import Iterable
from typing import Mapping
from typing import Sequence
from typing import Sized
from typing import TypeVar
__all__ = [
'CheckedPMap',
'CheckedPSet',
'CheckedPVector',
'PBag',
'PDeque',
'PList',
'PMap',
'PSet',
'PVector',
]
T = TypeVar('T')
KT = TypeVar('KT')
VT = TypeVar('VT')
class CheckedPMap(Mapping[KT, VT], Hashable):
pass
# PSet.add and PSet.discard have different type signatures than that of Set.
class CheckedPSet(Generic[T], Hashable):
pass
class CheckedPVector(Sequence[T], Hashable):
pass
class PBag(Container[T], Iterable[T], Sized, Hashable):
pass
class PDeque(Sequence[T], Hashable):
pass
class PList(Sequence[T], Hashable):
pass
class PMap(Mapping[KT, VT], Hashable):
pass
# PSet.add and PSet.discard have different type signatures than that of Set.
class PSet(Generic[T], Hashable):
pass
class PVector(Sequence[T], Hashable):
pass
class PVectorEvolver(Generic[T]):
pass
class PMapEvolver(Generic[KT, VT]):
pass
class PSetEvolver(Generic[T]):
pass
except ImportError:
pass
| 1,767 | Python | 20.82716 | 80 | 0.627051 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_precord.py | from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
from pyrsistent._field_common import (
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
)
from pyrsistent._pmap import PMap, pmap
class _PRecordMeta(type):
def __new__(mcs, name, bases, dct):
set_fields(dct, bases, name='_precord_fields')
store_invariants(dct, bases, '_precord_invariants', '__invariant__')
dct['_precord_mandatory_fields'] = \
set(name for name, field in dct['_precord_fields'].items() if field.mandatory)
dct['_precord_initial_values'] = \
dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL)
dct['__slots__'] = ()
return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct)
class PRecord(PMap, CheckedType, metaclass=_PRecordMeta):
"""
A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
access using subscript notation.
More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
"""
def __new__(cls, **kwargs):
# Hack total! If these two special attributes exist that means we can create
# ourselves. Otherwise we need to go through the Evolver to create the structures
# for us.
if '_precord_size' in kwargs and '_precord_buckets' in kwargs:
return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets'])
factory_fields = kwargs.pop('_factory_fields', None)
ignore_extra = kwargs.pop('_ignore_extra', False)
initial_values = kwargs
if cls._precord_initial_values:
initial_values = dict((k, v() if callable(v) else v)
for k, v in cls._precord_initial_values.items())
initial_values.update(kwargs)
e = _PRecordEvolver(cls, pmap(pre_size=len(cls._precord_fields)), _factory_fields=factory_fields, _ignore_extra=ignore_extra)
for k, v in initial_values.items():
e[k] = v
return e.persistent()
def set(self, *args, **kwargs):
"""
Set a field in the record. This set function differs slightly from that in the PMap
class. First of all it accepts key-value pairs. Second it accepts multiple key-value
pairs to perform one, atomic, update of multiple fields.
"""
# The PRecord set() can accept kwargs since all fields that have been declared are
# valid python identifiers. Also allow multiple fields to be set in one operation.
if args:
return super(PRecord, self).set(args[0], args[1])
return self.update(kwargs)
def evolver(self):
"""
Returns an evolver of this object.
"""
return _PRecordEvolver(self.__class__, self)
def __repr__(self):
return "{0}({1})".format(self.__class__.__name__,
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items()))
@classmethod
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
"""
Factory method. Will create a new PRecord of the current type and assign the values
specified in kwargs.
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
in the set of fields on the PRecord.
"""
if isinstance(kwargs, cls):
return kwargs
if ignore_extra:
kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs}
return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs)
def __reduce__(self):
# Pickling support
return _restore_pickle, (self.__class__, dict(self),)
def serialize(self, format=None):
"""
Serialize the current PRecord using custom serializer functions for fields where
such have been supplied.
"""
return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())
class _PRecordEvolver(PMap._Evolver):
__slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra')
def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False):
super(_PRecordEvolver, self).__init__(original_pmap)
self._destination_cls = cls
self._invariant_error_codes = []
self._missing_fields = []
self._factory_fields = _factory_fields
self._ignore_extra = _ignore_extra
def __setitem__(self, key, original_value):
self.set(key, original_value)
def set(self, key, original_value):
field = self._destination_cls._precord_fields.get(key)
if field:
if self._factory_fields is None or field in self._factory_fields:
try:
if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra):
value = field.factory(original_value, ignore_extra=self._ignore_extra)
else:
value = field.factory(original_value)
except InvariantException as e:
self._invariant_error_codes += e.invariant_errors
self._missing_fields += e.missing_fields
return self
else:
value = original_value
check_type(self._destination_cls, field, key, value)
is_ok, error_code = field.invariant(value)
if not is_ok:
self._invariant_error_codes.append(error_code)
return super(_PRecordEvolver, self).set(key, value)
else:
raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__))
def persistent(self):
cls = self._destination_cls
is_dirty = self.is_dirty()
pm = super(_PRecordEvolver, self).persistent()
if is_dirty or not isinstance(pm, cls):
result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size)
else:
result = pm
if cls._precord_mandatory_fields:
self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f
in (cls._precord_mandatory_fields - set(result.keys())))
if self._invariant_error_codes or self._missing_fields:
raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields),
'Field invariant failed')
check_global_invariants(result, cls._precord_invariants)
return result
| 7,032 | Python | 40.863095 | 133 | 0.607651 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_pdeque.py | from collections.abc import Sequence, Hashable
from itertools import islice, chain
from numbers import Integral
from pyrsistent._plist import plist
class PDeque(object):
"""
Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented
using two persistent lists.
A maximum length can be specified to create a bounded queue.
Fully supports the Sequence and Hashable protocols including indexing and slicing but
if you need fast random access go for the PVector instead.
Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to
create an instance.
Some examples:
>>> x = pdeque([1, 2, 3])
>>> x.left
1
>>> x.right
3
>>> x[0] == x.left
True
>>> x[-1] == x.right
True
>>> x.pop()
pdeque([1, 2])
>>> x.pop() == x[:-1]
True
>>> x.popleft()
pdeque([2, 3])
>>> x.append(4)
pdeque([1, 2, 3, 4])
>>> x.appendleft(4)
pdeque([4, 1, 2, 3])
>>> y = pdeque([1, 2, 3], maxlen=3)
>>> y.append(4)
pdeque([2, 3, 4], maxlen=3)
>>> y.appendleft(4)
pdeque([4, 1, 2], maxlen=3)
"""
__slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__')
def __new__(cls, left_list, right_list, length, maxlen=None):
instance = super(PDeque, cls).__new__(cls)
instance._left_list = left_list
instance._right_list = right_list
instance._length = length
if maxlen is not None:
if not isinstance(maxlen, Integral):
raise TypeError('An integer is required as maxlen')
if maxlen < 0:
raise ValueError("maxlen must be non-negative")
instance._maxlen = maxlen
return instance
@property
def right(self):
"""
Rightmost element in dqueue.
"""
return PDeque._tip_from_lists(self._right_list, self._left_list)
@property
def left(self):
"""
Leftmost element in dqueue.
"""
return PDeque._tip_from_lists(self._left_list, self._right_list)
@staticmethod
def _tip_from_lists(primary_list, secondary_list):
if primary_list:
return primary_list.first
if secondary_list:
return secondary_list[-1]
raise IndexError('No elements in empty deque')
def __iter__(self):
return chain(self._left_list, self._right_list.reverse())
def __repr__(self):
return "pdeque({0}{1})".format(list(self),
', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '')
__str__ = __repr__
@property
def maxlen(self):
"""
Maximum length of the queue.
"""
return self._maxlen
def pop(self, count=1):
"""
Return new deque with rightmost element removed. Popping the empty queue
will return the empty queue. A optional count can be given to indicate the
number of elements to pop. Popping with a negative index is the same as
popleft. Executes in amortized O(k) where k is the number of elements to pop.
>>> pdeque([1, 2]).pop()
pdeque([1])
>>> pdeque([1, 2]).pop(2)
pdeque([])
>>> pdeque([1, 2]).pop(-1)
pdeque([2])
"""
if count < 0:
return self.popleft(-count)
new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count)
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
def popleft(self, count=1):
"""
Return new deque with leftmost element removed. Otherwise functionally
equivalent to pop().
>>> pdeque([1, 2]).popleft()
pdeque([2])
"""
if count < 0:
return self.pop(-count)
new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count)
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
@staticmethod
def _pop_lists(primary_list, secondary_list, count):
new_primary_list = primary_list
new_secondary_list = secondary_list
while count > 0 and (new_primary_list or new_secondary_list):
count -= 1
if new_primary_list.rest:
new_primary_list = new_primary_list.rest
elif new_primary_list:
new_primary_list = new_secondary_list.reverse()
new_secondary_list = plist()
else:
new_primary_list = new_secondary_list.reverse().rest
new_secondary_list = plist()
return new_primary_list, new_secondary_list
def _is_empty(self):
return not self._left_list and not self._right_list
def __lt__(self, other):
if not isinstance(other, PDeque):
return NotImplemented
return tuple(self) < tuple(other)
def __eq__(self, other):
if not isinstance(other, PDeque):
return NotImplemented
if tuple(self) == tuple(other):
# Sanity check of the length value since it is redundant (there for performance)
assert len(self) == len(other)
return True
return False
def __hash__(self):
return hash(tuple(self))
def __len__(self):
return self._length
def append(self, elem):
"""
Return new deque with elem as the rightmost element.
>>> pdeque([1, 2]).append(3)
pdeque([1, 2, 3])
"""
new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem)
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
def appendleft(self, elem):
"""
Return new deque with elem as the leftmost element.
>>> pdeque([1, 2]).appendleft(3)
pdeque([3, 1, 2])
"""
new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem)
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
def _append(self, primary_list, secondary_list, elem):
if self._maxlen is not None and self._length == self._maxlen:
if self._maxlen == 0:
return primary_list, secondary_list, 0
new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1)
return new_primary_list, new_secondary_list.cons(elem), self._length
return primary_list, secondary_list.cons(elem), self._length + 1
@staticmethod
def _extend_list(the_list, iterable):
count = 0
for elem in iterable:
the_list = the_list.cons(elem)
count += 1
return the_list, count
def _extend(self, primary_list, secondary_list, iterable):
new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable)
new_secondary_list = secondary_list
current_len = self._length + extend_count
if self._maxlen is not None and current_len > self._maxlen:
pop_len = current_len - self._maxlen
new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len)
extend_count -= pop_len
return new_primary_list, new_secondary_list, extend_count
def extend(self, iterable):
"""
Return new deque with all elements of iterable appended to the right.
>>> pdeque([1, 2]).extend([3, 4])
pdeque([1, 2, 3, 4])
"""
new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable)
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
def extendleft(self, iterable):
"""
Return new deque with all elements of iterable appended to the left.
NB! The elements will be inserted in reverse order compared to the order in the iterable.
>>> pdeque([1, 2]).extendleft([3, 4])
pdeque([4, 3, 1, 2])
"""
new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable)
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
def count(self, elem):
"""
Return the number of elements equal to elem present in the queue
>>> pdeque([1, 2, 1]).count(1)
2
"""
return self._left_list.count(elem) + self._right_list.count(elem)
def remove(self, elem):
"""
Return new deque with first element from left equal to elem removed. If no such element is found
a ValueError is raised.
>>> pdeque([2, 1, 2]).remove(2)
pdeque([1, 2])
"""
try:
return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1)
except ValueError:
# Value not found in left list, try the right list
try:
# This is severely inefficient with a double reverse, should perhaps implement a remove_last()?
return PDeque(self._left_list,
self._right_list.reverse().remove(elem).reverse(), self._length - 1)
except ValueError as e:
raise ValueError('{0} not found in PDeque'.format(elem)) from e
def reverse(self):
"""
Return reversed deque.
>>> pdeque([1, 2, 3]).reverse()
pdeque([3, 2, 1])
Also supports the standard python reverse function.
>>> reversed(pdeque([1, 2, 3]))
pdeque([3, 2, 1])
"""
return PDeque(self._right_list, self._left_list, self._length)
__reversed__ = reverse
def rotate(self, steps):
"""
Return deque with elements rotated steps steps.
>>> x = pdeque([1, 2, 3])
>>> x.rotate(1)
pdeque([3, 1, 2])
>>> x.rotate(-2)
pdeque([3, 1, 2])
"""
popped_deque = self.pop(steps)
if steps >= 0:
return popped_deque.extendleft(islice(self.reverse(), steps))
return popped_deque.extend(islice(self, -steps))
def __reduce__(self):
# Pickling support
return pdeque, (list(self), self._maxlen)
def __getitem__(self, index):
if isinstance(index, slice):
if index.step is not None and index.step != 1:
# Too difficult, no structural sharing possible
return pdeque(tuple(self)[index], maxlen=self._maxlen)
result = self
if index.start is not None:
result = result.popleft(index.start % self._length)
if index.stop is not None:
result = result.pop(self._length - (index.stop % self._length))
return result
if not isinstance(index, Integral):
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
if index >= 0:
return self.popleft(index).left
shifted = len(self) + index
if shifted < 0:
raise IndexError(
"pdeque index {0} out of range {1}".format(index, len(self)),
)
return self.popleft(shifted).left
index = Sequence.index
Sequence.register(PDeque)
Hashable.register(PDeque)
def pdeque(iterable=(), maxlen=None):
"""
Return deque containing the elements of iterable. If maxlen is specified then
len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
>>> pdeque([1, 2, 3])
pdeque([1, 2, 3])
>>> pdeque([1, 2, 3, 4], maxlen=2)
pdeque([3, 4], maxlen=2)
"""
t = tuple(iterable)
if maxlen is not None:
t = t[-maxlen:]
length = len(t)
pivot = int(length / 2)
left = plist(t[:pivot])
right = plist(t[pivot:], reverse=True)
return PDeque(left, right, length, maxlen)
def dq(*elements):
"""
Return deque containing all arguments.
>>> dq(1, 2, 3)
pdeque([1, 2, 3])
"""
return pdeque(elements)
| 12,203 | Python | 31.371353 | 115 | 0.574695 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pyrsistent/_field_common.py | from pyrsistent._checked_types import (
CheckedPMap,
CheckedPSet,
CheckedPVector,
CheckedType,
InvariantException,
_restore_pickle,
get_type,
maybe_parse_user_type,
maybe_parse_many_user_types,
)
from pyrsistent._checked_types import optional as optional_type
from pyrsistent._checked_types import wrap_invariant
import inspect
def set_fields(dct, bases, name):
dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], []))
for k, v in list(dct.items()):
if isinstance(v, _PField):
dct[name][k] = v
del dct[k]
def check_global_invariants(subject, invariants):
error_codes = tuple(error_code for is_ok, error_code in
(invariant(subject) for invariant in invariants) if not is_ok)
if error_codes:
raise InvariantException(error_codes, (), 'Global invariant failed')
def serialize(serializer, format, value):
if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER:
return value.serialize(format)
return serializer(format, value)
def check_type(destination_cls, field, name, value):
if field.type and not any(isinstance(value, get_type(t)) for t in field.type):
actual_type = type(value)
message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__)
raise PTypeError(destination_cls, name, field.type, actual_type, message)
def is_type_cls(type_cls, field_type):
if type(field_type) is set:
return True
types = tuple(field_type)
if len(types) == 0:
return False
return issubclass(get_type(types[0]), type_cls)
def is_field_ignore_extra_complaint(type_cls, field, ignore_extra):
# ignore_extra param has default False value, for speed purpose no need to propagate False
if not ignore_extra:
return False
if not is_type_cls(type_cls, field.type):
return False
return 'ignore_extra' in inspect.signature(field.factory).parameters
class _PField(object):
__slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer')
def __init__(self, type, invariant, initial, mandatory, factory, serializer):
self.type = type
self.invariant = invariant
self.initial = initial
self.mandatory = mandatory
self._factory = factory
self.serializer = serializer
@property
def factory(self):
# If no factory is specified and the type is another CheckedType use the factory method of that CheckedType
if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1:
typ = get_type(tuple(self.type)[0])
if issubclass(typ, CheckedType):
return typ.create
return self._factory
PFIELD_NO_TYPE = ()
PFIELD_NO_INVARIANT = lambda _: (True, None)
PFIELD_NO_FACTORY = lambda x: x
PFIELD_NO_INITIAL = object()
PFIELD_NO_SERIALIZER = lambda _, value: value
def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL,
mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER):
"""
Field specification factory for :py:class:`PRecord`.
:param type: a type or iterable with types that are allowed for this field
:param invariant: a function specifying an invariant that must hold for the field
:param initial: value of field if not specified when instantiating the record
:param mandatory: boolean specifying if the field is mandatory or not
:param factory: function called when field is set.
:param serializer: function that returns a serialized version of the field
"""
# NB: We have to check this predicate separately from the predicates in
# `maybe_parse_user_type` et al. because this one is related to supporting
# the argspec for `field`, while those are related to supporting the valid
# ways to specify types.
# Multiple types must be passed in one of the following containers. Note
# that a type that is a subclass of one of these containers, like a
# `collections.namedtuple`, will work as expected, since we check
# `isinstance` and not `issubclass`.
if isinstance(type, (list, set, tuple)):
types = set(maybe_parse_many_user_types(type))
else:
types = set(maybe_parse_user_type(type))
invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant
field = _PField(type=types, invariant=invariant_function, initial=initial,
mandatory=mandatory, factory=factory, serializer=serializer)
_check_field_parameters(field)
return field
def _check_field_parameters(field):
for t in field.type:
if not isinstance(t, type) and not isinstance(t, str):
raise TypeError('Type parameter expected, not {0}'.format(type(t)))
if field.initial is not PFIELD_NO_INITIAL and \
not callable(field.initial) and \
field.type and not any(isinstance(field.initial, t) for t in field.type):
raise TypeError('Initial has invalid type {0}'.format(type(field.initial)))
if not callable(field.invariant):
raise TypeError('Invariant must be callable')
if not callable(field.factory):
raise TypeError('Factory must be callable')
if not callable(field.serializer):
raise TypeError('Serializer must be callable')
class PTypeError(TypeError):
"""
Raised when trying to assign a value with a type that doesn't match the declared type.
Attributes:
source_class -- The class of the record
field -- Field name
expected_types -- Types allowed for the field
actual_type -- The non matching type
"""
def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs):
super(PTypeError, self).__init__(*args, **kwargs)
self.source_class = source_class
self.field = field
self.expected_types = expected_types
self.actual_type = actual_type
SEQ_FIELD_TYPE_SUFFIXES = {
CheckedPVector: "PVector",
CheckedPSet: "PSet",
}
# Global dictionary to hold auto-generated field types: used for unpickling
_seq_field_types = {}
def _restore_seq_field_pickle(checked_class, item_type, data):
"""Unpickling function for auto-generated PVec/PSet field types."""
type_ = _seq_field_types[checked_class, item_type]
return _restore_pickle(type_, data)
def _types_to_names(types):
"""Convert a tuple of types to a human-readable string."""
return "".join(get_type(typ).__name__.capitalize() for typ in types)
def _make_seq_field_type(checked_class, item_type, item_invariant):
"""Create a subclass of the given checked class with the given item type."""
type_ = _seq_field_types.get((checked_class, item_type))
if type_ is not None:
return type_
class TheType(checked_class):
__type__ = item_type
__invariant__ = item_invariant
def __reduce__(self):
return (_restore_seq_field_pickle,
(checked_class, item_type, list(self)))
suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class]
TheType.__name__ = _types_to_names(TheType._checked_types) + suffix
_seq_field_types[checked_class, item_type] = TheType
return TheType
def _sequence_field(checked_class, item_type, optional, initial,
invariant=PFIELD_NO_INVARIANT,
item_invariant=PFIELD_NO_INVARIANT):
"""
Create checked field for either ``PSet`` or ``PVector``.
:param checked_class: ``CheckedPSet`` or ``CheckedPVector``.
:param item_type: The required type for the items in the set.
:param optional: If true, ``None`` can be used as a value for
this field.
:param initial: Initial value to pass to factory.
:return: A ``field`` containing a checked class.
"""
TheType = _make_seq_field_type(checked_class, item_type, item_invariant)
if optional:
def factory(argument, _factory_fields=None, ignore_extra=False):
if argument is None:
return None
else:
return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra)
else:
factory = TheType.create
return field(type=optional_type(TheType) if optional else TheType,
factory=factory, mandatory=True,
invariant=invariant,
initial=factory(initial))
def pset_field(item_type, optional=False, initial=(),
invariant=PFIELD_NO_INVARIANT,
item_invariant=PFIELD_NO_INVARIANT):
"""
Create checked ``PSet`` field.
:param item_type: The required type for the items in the set.
:param optional: If true, ``None`` can be used as a value for
this field.
:param initial: Initial value to pass to factory if no value is given
for the field.
:return: A ``field`` containing a ``CheckedPSet`` of the given type.
"""
return _sequence_field(CheckedPSet, item_type, optional, initial,
invariant=invariant,
item_invariant=item_invariant)
def pvector_field(item_type, optional=False, initial=(),
invariant=PFIELD_NO_INVARIANT,
item_invariant=PFIELD_NO_INVARIANT):
"""
Create checked ``PVector`` field.
:param item_type: The required type for the items in the vector.
:param optional: If true, ``None`` can be used as a value for
this field.
:param initial: Initial value to pass to factory if no value is given
for the field.
:return: A ``field`` containing a ``CheckedPVector`` of the given type.
"""
return _sequence_field(CheckedPVector, item_type, optional, initial,
invariant=invariant,
item_invariant=item_invariant)
_valid = lambda item: (True, "")
# Global dictionary to hold auto-generated field types: used for unpickling
_pmap_field_types = {}
def _restore_pmap_field_pickle(key_type, value_type, data):
"""Unpickling function for auto-generated PMap field types."""
type_ = _pmap_field_types[key_type, value_type]
return _restore_pickle(type_, data)
def _make_pmap_field_type(key_type, value_type):
"""Create a subclass of CheckedPMap with the given key and value types."""
type_ = _pmap_field_types.get((key_type, value_type))
if type_ is not None:
return type_
class TheMap(CheckedPMap):
__key_type__ = key_type
__value_type__ = value_type
def __reduce__(self):
return (_restore_pmap_field_pickle,
(self.__key_type__, self.__value_type__, dict(self)))
TheMap.__name__ = "{0}To{1}PMap".format(
_types_to_names(TheMap._checked_key_types),
_types_to_names(TheMap._checked_value_types))
_pmap_field_types[key_type, value_type] = TheMap
return TheMap
def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT):
"""
Create a checked ``PMap`` field.
:param key: The required type for the keys of the map.
:param value: The required type for the values of the map.
:param optional: If true, ``None`` can be used as a value for
this field.
:param invariant: Pass-through to ``field``.
:return: A ``field`` containing a ``CheckedPMap``.
"""
TheMap = _make_pmap_field_type(key_type, value_type)
if optional:
def factory(argument):
if argument is None:
return None
else:
return TheMap.create(argument)
else:
factory = TheMap.create
return field(mandatory=True, initial=TheMap(),
type=optional_type(TheMap) if optional else TheMap,
factory=factory, invariant=invariant)
| 11,963 | Python | 34.927928 | 125 | 0.65084 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/PACKAGE-LICENSES/omni.kit.property.usd_clipboard_test-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. | 412 | Markdown | 57.999992 | 74 | 0.839806 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/config/extension.toml | [package]
# Semantic Versioning is used: https://semver.org/
version = "1.0.1"
category = "Internal"
# Lists people or organizations that are considered the "authors" of the package.
authors = ["NVIDIA"]
# The title and description fields are primarly for displaying extension info in UI
title = "USD Property Clipboard Tests"
description="Clipboard tests that relate to usd properties and need to show a window."
# URL of the extension source repository.
repository = ""
# Preview image. Folder named "data" automatically goes in git lfs (see .gitattributes file).
preview_image = "data/preview.png"
# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
icon = "data/icon.png"
# Keywords for the extension
keywords = ["kit", "usd", "property"]
# Location of change log file in target (final) folder of extension, relative to the root.
# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
changelog="docs/CHANGELOG.md"
# Path (relative to the root) or content of readme markdown file for UI.
readme = "docs/README.md"
[dependencies]
"omni.usd" = {}
"omni.ui" = {}
"omni.kit.window.property" = {}
"omni.kit.widget.stage" = {}
"omni.kit.context_menu" = {}
# Main python module this extension provides, it will be publicly available as "import omni.kit.property.usd".
[[python.module]]
name = "omni.kit.property.usd_clipboard_test"
[[test]]
timeout = 1200
args = [
"--/renderer/enabled=pxr",
"--/renderer/active=pxr",
"--/renderer/multiGpu/enabled=false",
"--/renderer/multiGpu/autoEnable=false", # Disable mGPU with PXR due to OM-51026, OM-53611
"--/renderer/multiGpu/maxGpuCount=1",
"--/app/asyncRendering=false",
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false",
"--/app/file/ignoreUnsavedOnExit=true",
"--/persistent/app/stage/dragDropImport='reference'",
"--/persistent/app/material/dragDropMaterialPath='absolute'",
"--/persistent/app/omniverse/filepicker/options_menu/show_details=false",
# "--no-window" - NOTE: using no-window causes exception in MousePressed cast function
]
dependencies = [
"omni.hydra.pxr",
"omni.usd",
"omni.kit.window.content_browser",
"omni.kit.window.stage",
"omni.kit.property.material",
"omni.kit.ui_test",
"omni.kit.test_suite.helpers",
"omni.kit.window.viewport",
]
stdoutFailPatterns.exclude = [
"*HydraRenderer failed to render this frame*", # Can drop a frame or two rendering with OpenGL interop
"*Cannot use omni.hydra.pxr without OpenGL interop*" # Linux TC configs with multi-GPU might not have OpenGL available
]
| 2,643 | TOML | 31.641975 | 122 | 0.710556 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/omni/kit/property/usd_clipboard_test/tests/__init__.py | from .test_property_context_menu import *
| 42 | Python | 20.49999 | 41 | 0.785714 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/omni/kit/property/usd_clipboard_test/tests/test_property_context_menu.py | ## Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import sys
import unittest
import omni.kit.app
import omni.kit.window.property.managed_frame
from omni.kit.test.async_unittest import AsyncTestCase
import omni.usd
from omni.kit import ui_test
from pxr import Gf
from omni.kit.test_suite.helpers import (
open_stage,
get_test_data_path,
select_prims,
wait_stage_loading,
arrange_windows
)
class PropertyContextMenu(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows("Stage", 64)
await open_stage(get_test_data_path(__name__, "usd/bound_shapes.usda"))
omni.kit.window.property.managed_frame.reset_collapsed_state()
omni.kit.window.property.managed_frame.set_collapsed_state("Property/Raw USD Properties", False)
# After running each test
async def tearDown(self):
await wait_stage_loading()
omni.kit.window.property.managed_frame.reset_collapsed_state()
# @unittest.skipIf(sys.platform.startswith("linux"), "Pyperclip fails on some TeamCity agents")
async def test_property_context_menu(self):
await ui_test.find("Content").focus()
stage_window = ui_test.find("Stage")
await stage_window.focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# get prim attributes
cube_attr = stage.GetPrimAtPath("/World/Cube").GetAttribute('xformOp:translate')
cone_attr = stage.GetPrimAtPath("/World/Cone").GetAttribute('xformOp:translate')
# verify transforms different
self.assertEqual(cube_attr.Get(), Gf.Vec3d(119.899608, -1.138346, -118.761261))
self.assertEqual(cone_attr.Get(), Gf.Vec3d( 0.0, 0.0, 0.0))
# select cube
await select_prims(["/World/Cube"])
await ui_test.human_delay()
# scroll window to xformOp:translate
ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").widget.scroll_here_y(0.5)
await ui_test.human_delay()
# right click on xformOp:translate
await ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").click(right_click=True)
await ui_test.human_delay()
# context menu copy
await ui_test.select_context_menu("Copy", offset=ui_test.Vec2(10, 10))
# select cone
await select_prims(["/World/Cone"])
await ui_test.human_delay()
# scroll window to xformOp:translate
ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").widget.scroll_here_y(0.5)
await ui_test.human_delay()
# right click on xformOp:translate
await ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").click(right_click=True)
await ui_test.human_delay()
# context menu paste
await ui_test.select_context_menu("Paste", offset=ui_test.Vec2(10, 10))
# verify transforms same
self.assertEqual(cube_attr.Get(), Gf.Vec3d(119.899608, -1.138346, -118.761261))
self.assertEqual(cone_attr.Get(), Gf.Vec3d(119.899608, -1.138346, -118.761261))
async def test_property_context_menu_paste(self):
await ui_test.find("Content").focus()
stage_window = ui_test.find("Stage")
await stage_window.focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# select cube
await select_prims(["/World/Cube"])
await ui_test.human_delay(10)
# scroll window to xformOp:translate
ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").widget.scroll_here_y(0.5)
await ui_test.human_delay()
# verify code on clipbaord is NOT getting executed
omni.kit.clipboard.copy("omni.kit.stage_templates.new_stage()")
# right click on xformOp:translate
await ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").click(right_click=True)
await ui_test.human_delay()
await ui_test.find("Property//Frame/**/Label[*].text=='xformOp:translate'").click()
# if code was executed a new stage will have been created, so verify prims
await ui_test.human_delay(250)
prims = [prim.GetPath().pathString for prim in stage.TraverseAll() if not omni.usd.is_hidden_type(prim)]
prims.sort()
self.assertEqual(prims, ['/World', '/World/Cone', '/World/Cube', '/World/Cylinder', '/World/Looks', '/World/Looks/OmniGlass', '/World/Looks/OmniGlass/Shader', '/World/Looks/OmniPBR', '/World/Looks/OmniPBR/Shader', '/World/Looks/OmniSurface_Plastic', '/World/Looks/OmniSurface_Plastic/Shader', '/World/Sphere', '/World/defaultLight'])
| 5,196 | Python | 41.950413 | 341 | 0.666282 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.0.1] - 2022-12-21
### Added
- Stabillity fix
## [1.0.0] - 2022-11-28
### Added
- Test added.
| 195 | Markdown | 15.333332 | 80 | 0.625641 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/docs/README.md | # omni.kit.property.usd_clipboard_test
## Introduction
This extension is used purely for holding tests for omni.kit.property.usd, that need to show a real window during automated tests. Mostly this is because the clipboard copy and paste code needs to have an actual window.
| 278 | Markdown | 45.499992 | 220 | 0.794964 |
omniverse-code/kit/exts/omni.kit.property.usd_clipboard_test/docs/index.rst | omni.kit.property.usd_clipboard_test: USD Property Clipboard Test Extension
###########################################################################
.. toctree::
:maxdepth: 1
CHANGELOG
USD Property Widget
===================
.. automodule:: omni.kit.property.usd_clipboard_test
:platform: Windows-x86_64, Linux-x86_64
:members:
:undoc-members:
:show-inheritance:
| 391 | reStructuredText | 23.499999 | 75 | 0.539642 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/setting_menu_container.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ["SettingMenuContainer"]
from omni.kit.viewport.menubar.core import (
IconMenuDelegate,
SliderMenuDelegate,
CheckboxMenuDelegate,
SettingModel,
SettingModelWithDefaultValue,
ViewportMenuContainer,
FloatArraySettingColorMenuItem,
menu_is_tearable,
)
from .menu_item.settings_renderer_menu_item import SettingsRendererMenuItem
from .menu_item.settings_transform_manipulator import SettingsTransformManipulator
from .style import UI_STYLE
import carb
import carb.settings
import omni.ui as ui
from omni.ui import color as cl
from typing import Any, Dict, List, Union
from functools import partial
class ViewportSetting:
def __init__(self, key: str, default: Any, set_default: bool = True, read_incoming: bool = False):
settings = carb.settings.get_settings()
if read_incoming:
incoming_default = settings.get(key)
if incoming_default is not None:
default = incoming_default
self.key = key
self.default = default
if set_default:
settings.set_default(self.key, self.default)
def reset(self, settings):
settings.set(self.key, self.default)
class SelectionColorSetting(ViewportSetting):
OUTLINE = "/persistent/app/viewport/outline/color"
INTERSECTION = "/persistent/app/viewport/outline/intersection/color"
def __init__(self, default: Any):
super().__init__(self.OUTLINE, default, False)
self.index = 1020
def reset(self, settings):
float_array = settings.get(self.key)
float_array = float_array[0 : self.index] + self.default + float_array[self.index + len(self.default) :]
carb.settings.get_settings().set(self.OUTLINE, float_array)
carb.settings.get_settings().set(self.INTERSECTION, self.default)
class VIEWPORT_SETTINGS:
NAVIGATION_SPEED = ViewportSetting("/persistent/app/viewport/camMoveVelocity", 5.0)
NAVIGATION_SPEED_MULTAMOUNT = ViewportSetting("/persistent/app/viewport/camVelocityScalerMultAmount", 1.1)
SHOW_SPEED_ON_START = ViewportSetting("/persistent/app/viewport/camShowSpeedOnStart", True)
ADAPTIVE_SPEED = ViewportSetting("/persistent/app/viewport/camVelocityCOINormalization", 0.0)
GAMEPAD_CONTROL = ViewportSetting("/persistent/app/omniverse/gamepadCameraControl", True)
CAMERA_STOP_ON_UP = ViewportSetting("/persistent/app/viewport/camStopOnMouseUp", True)
CAM_UPDATE_CLAMPING = ViewportSetting("/ext/omni.kit.manipulator.camera/clampUpdates", 0.15, read_incoming=True)
INERTIA_ENABLED = ViewportSetting("/persistent/app/viewport/camInertiaEnabled", False)
INERTIA_ANOUNT = ViewportSetting("/persistent/app/viewport/camInertiaAmount", 0.55)
ROTATION_SMOOTH_ENABLED = ViewportSetting("/persistent/app/viewport/camRotSmoothEnabled", True)
ROTATION_SMOOTH_SCALE = ViewportSetting("/persistent/app/viewport/camRotSmoothScale", 20.0)
ROTATION_SMOOTH_ALWAYS = ViewportSetting("/persistent/app/viewport/camRotSmoothAlways", False)
GESTURE_ENABLED = ViewportSetting("/persistent/app/viewport/camGestureEnabled", False)
GESTURE_TIME = ViewportSetting("/persistent/app/viewport/camGestureTime", 0.12)
GESTURE_RADIUS = ViewportSetting("/persistent/app/viewport/camGestureRadius", 20)
SELECTION_LINE_WIDTH = ViewportSetting("/persistent/app/viewport/outline/width", 2)
GRID_LINE_WIDTH = ViewportSetting("/persistent/app/viewport/grid/lineWidth", 1)
GRID_SCALE = ViewportSetting("/persistent/app/viewport/grid/scale", 100.0)
GRID_FADE = ViewportSetting("/persistent/app/viewport/grid/lineFadeOutStartDistance", 10.0)
GIZMO_LINE_WIDTH = ViewportSetting("/persistent/app/viewport/gizmo/lineWidth", 1.0)
GIZMO_SCALE_ENABLED = ViewportSetting("/persistent/app/viewport/gizmo/constantScaleEnabled", True)
GIZMO_SCALE = ViewportSetting("/persistent/app/viewport/gizmo/constantScale", 10.0)
GIZMO_GLOBAL_SCALE = ViewportSetting("/persistent/app/viewport/gizmo/scale", 1.0)
GIZMO_MIN_FADEOUT = ViewportSetting("/persistent/app/viewport/gizmo/minFadeOut", 1.0)
GIZMO_MAX_FADEOUT = ViewportSetting("/persistent/app/viewport/gizmo/maxFadeOut", 50)
UI_BACKGROUND_OPACITY = ViewportSetting("/persistent/app/viewport/ui/background/opacity", 1.0)
UI_BRIGHTNESS = ViewportSetting("/persistent/app/viewport/ui/brightness", 0.84)
OBJECT_CENTRIC = ViewportSetting("/persistent/app/viewport/objectCentricNavigation", 0)
DOUBLE_CLICK_COI = ViewportSetting("/persistent/app/viewport/coiDoubleClick", False)
BBOX_LINE_COLOR = ViewportSetting("/persistent/app/viewport/boundingBoxes/lineColor", [0.886, 0.447, 0.447])
GRID_LINE_COLOR = ViewportSetting("/persistent/app/viewport/grid/lineColor", [0.3, 0.3, 0.3])
OUTLINE_COLOR = SelectionColorSetting([1.0, 0.6, 0.0, 1.0])
LOOK_SPEED_HORIZ = ViewportSetting("/persistent/exts/omni.kit.manipulator.camera/lookSpeed/0", 180.0)
LOOK_SPEED_VERT = ViewportSetting("/persistent/exts/omni.kit.manipulator.camera/lookSpeed/1", 90.0)
TUMBLE_SPEED = ViewportSetting("/persistent/exts/omni.kit.manipulator.camera/tumbleSpeed", 360.0)
ZOOM_SPEED = ViewportSetting("/persistent/exts/omni.kit.manipulator.camera/moveSpeed/2", 1.0)
FLY_IGNORE_VIEW_DIRECTION = ViewportSetting("/persistent/exts/omni.kit.manipulator.camera/flyViewLock", False)
class ViewportSettingModel(SettingModelWithDefaultValue):
def __init__(self, viewport_setting: ViewportSetting, draggable: bool = False):
super().__init__(viewport_setting.key, viewport_setting.default, draggable=draggable)
CAM_VELOCITY_MIN = "/persistent/app/viewport/camVelocityMin"
CAM_VELOCITY_MAX = "/persistent/app/viewport/camVelocityMax"
CAM_VELOCITY_SCALER_MIN = "/persistent/app/viewport/camVelocityScalerMin"
CAM_VELOCITY_SCALER_MAX = "/persistent/app/viewport/camVelocityScalerMax"
SETTING_UI_BRIGHTNESS_MIN = "/app/viewport/ui/minBrightness"
SETTING_UI_BRIGHTNESS_MAX = "/app/viewport/ui/maxBrightness"
BRIGHTNESS_VALUE_RANGE_MIN = 0.25
BRIGHTNESS_VALUE_RANGE_MAX = 1.0
OUTLINE_COLOR_INDEX = 1020
class SelectionColorMenuItem(FloatArraySettingColorMenuItem):
def __init__(self):
setting = VIEWPORT_SETTINGS.OUTLINE_COLOR
super().__init__(
setting.key, setting.default, name="Selection Color", start_index=setting.index, has_reset=True
)
def on_color_changed(self, colors: List[float]) -> None:
# Set the default exterior color
super().on_color_changed(colors)
# Set the interior intersection color too
carb.settings.get_settings().set(VIEWPORT_SETTINGS.OUTLINE_COLOR.INTERSECTION, colors)
class BoundingColorMenuItem(FloatArraySettingColorMenuItem):
def __init__(self):
setting = VIEWPORT_SETTINGS.BBOX_LINE_COLOR
super().__init__(setting.key, setting.default, name="Bounding Box Color", has_reset=True)
class GridColorMenuItem(FloatArraySettingColorMenuItem):
def __init__(self):
setting = VIEWPORT_SETTINGS.GRID_LINE_COLOR
super().__init__(setting.key, setting.default, name="Grid Color", has_reset=True)
class MenuContext:
def __init__(self):
self.__renderer_menu_item: Union[SettingsRendererMenuItem, None] = None
self.__settings = carb.settings.get_settings()
self.__carb_subscriptions = []
@property
def settings(self):
return self.__settings
@property
def renderer_menu_item(self) -> Union[SettingsRendererMenuItem, None]:
return self.__renderer_menu_item
@renderer_menu_item.setter
def renderer_menu_item(self, render_menu_item: Union[SettingsRendererMenuItem, None]) -> None:
if self.__renderer_menu_item:
self.__renderer_menu_item.destroy()
self.__renderer_menu_item = render_menu_item
def add_carb_subscription(self, carb_sub: carb.settings.SubscriptionId):
self.__carb_subscriptions.append(carb_sub)
def destroy(self):
self.renderer_menu_item = None
for sub in self.__carb_subscriptions:
sub.unsubscribe()
self.__carb_subscriptions = []
class SettingMenuContainer(ViewportMenuContainer):
"""The menu with the viewport settings"""
def __init__(self):
super().__init__(
name="Settings",
delegate=IconMenuDelegate("Settings"),
visible_setting_path="/exts/omni.kit.viewport.menubar.settings/visible",
order_setting_path="/exts/omni.kit.viewport.menubar.settings/order",
style=UI_STYLE,
)
self.__menu_context: Dict[str, MenuContext] = {}
settings = carb.settings.get_settings()
settings.set_default(CAM_VELOCITY_MIN, 0.01)
settings.set_default(CAM_VELOCITY_MAX, 50)
settings.set_default(CAM_VELOCITY_SCALER_MIN, 1)
settings.set_default(CAM_VELOCITY_SCALER_MAX, 10)
def destroy(self):
for menu_ctx in self.__menu_context.values():
menu_ctx.destroy()
self.__menu_context = {}
super().destroy()
def build_fn(self, factory: Dict):
ui.Menu(self.name, delegate=self._delegate, on_build_fn=partial(self._build_menu, factory), style=self._style)
def _build_menu(self, factory: Dict) -> None:
viewport_api = factory.get("viewport_api")
if not viewport_api:
return
viewport_api_id = viewport_api.id
menu_ctx = self.__menu_context.get(viewport_api_id)
if menu_ctx:
menu_ctx.destroy()
menu_ctx = MenuContext()
self.__menu_context[viewport_api_id] = menu_ctx
ui.Menu(
"Navigation",
on_build_fn=lambda: self.__build_navigation_menu_items(menu_ctx),
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.Navigation"),
)
ui.Menu(
"Selection",
on_build_fn=lambda: self.__build_selection_menu_items(menu_ctx),
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.Selection"),
)
ui.Menu(
"Grid",
on_build_fn=lambda: self.__build_grid_menu_items(menu_ctx),
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.Grid"),
)
ui.Menu(
"Gizmos",
on_build_fn=lambda: self.__build_gizmo_menu_items(menu_ctx),
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.Gizmos"),
)
menu_ctx.renderer_menu_item = SettingsRendererMenuItem(
"Viewport", factory=factory, tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.Viewport")
)
ui.Menu(
"Viewport UI",
on_build_fn=lambda: self.__build_ui_menu_items(menu_ctx),
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.ViewportUI"),
)
SettingsTransformManipulator(
"Manipulator Transform",
factory=factory,
tearable=menu_is_tearable("omni.kit.viewport.menubar.settings.ManipulatorTransform"),
)
ui.Separator()
ui.MenuItem(
"Reset To Defaults",
hide_on_click=False,
triggered_fn=lambda vid=viewport_api_id: self.__reset_settings(vid),
)
ui.Separator()
ui.MenuItem("Preferences", hide_on_click=False, triggered_fn=self._show_viewport_preference)
def __build_navigation_menu_items(self, menu_ctx: MenuContext) -> None:
settings = carb.settings.get_settings()
ui.MenuItem(
"Navigation Speed",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.NAVIGATION_SPEED, draggable=True),
min=settings.get(CAM_VELOCITY_MIN),
max=settings.get(CAM_VELOCITY_MAX),
tooltip="Set the Fly Mode navigation speed",
has_reset=True,
),
)
ui.MenuItem(
"Navigation Speed Scalar",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.NAVIGATION_SPEED_MULTAMOUNT, draggable=True),
min=settings.get(CAM_VELOCITY_SCALER_MIN),
max=settings.get(CAM_VELOCITY_SCALER_MAX),
tooltip="Change the Fly Mode navigation speed by this amount",
has_reset=True,
),
)
ui.MenuItem(
"Lock Navigation Height",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.FLY_IGNORE_VIEW_DIRECTION),
tooltip="Whether forward/backward and up/down movements ignore camera-view direction (similar to left/right strafe)",
has_reset=True,
)
)
ui.MenuItem(
"Gamepad Camera Control",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GAMEPAD_CONTROL),
tooltip="Enable gamepad navigation for this Viewport",
has_reset=True,
),
)
ui.Separator()
ui.MenuItem(
"Object Centric Navigation",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.OBJECT_CENTRIC),
tooltip="Set camera's center of interest to center of object under mouse when camera manipulation begins",
has_reset=True,
),
)
ui.MenuItem(
"Double Click Sets Interest",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.DOUBLE_CLICK_COI),
tooltip="Double click will set the center of interest to the object under mouse." +
"\nEnabling this may make click-to-select less responsive.",
has_reset=True,
)
)
ui.Separator()
self.__build_advanced_navigation_items(menu_ctx)
ui.Separator()
self.__build_navigation_speed_items(menu_ctx)
self.__build_debug_settings(menu_ctx)
def __build_navigation_speed_items(self, menu_ctx: MenuContext):
ui.MenuItem(
"Look Speed Horizontal",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.LOOK_SPEED_HORIZ, draggable=True),
min=0,
max=360,
step=1,
tooltip="Set the Look Mode navigation speed as degrees rotated over a drag across the Viepwort horizonatally.",
has_reset=True,
),
)
ui.MenuItem(
"Look Speed Vertical",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.LOOK_SPEED_VERT, draggable=True),
min=0,
max=180,
step=1,
tooltip="Set the Look Mode navigation speed as degrees rotated over a drag across the Viepwort vertically.",
has_reset=True,
),
)
ui.MenuItem(
"Tumble Speed",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.TUMBLE_SPEED, draggable=True),
min=0,
max=720,
step=1,
tooltip="Set the Tumble Mode navigation speed as degrees rotated over a drag across the Viepwort.",
has_reset=True,
),
)
ui.MenuItem(
"Zoom Speed",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.ZOOM_SPEED, draggable=True),
min=0,
max=2,
tooltip="Set the Zoom Mode navigation speed",
has_reset=True,
),
)
def __build_advanced_navigation_items(self, menu_ctx: MenuContext):
settings = menu_ctx.settings
inertia_enable_model = ViewportSettingModel(VIEWPORT_SETTINGS.INERTIA_ENABLED)
ui.MenuItem(
"Inertia Mode",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=inertia_enable_model,
tooltip="Enable advanced settings to control camera inertia and gestures for mouse manipulation",
has_reset=True,
),
)
inertia_menu_item = ui.MenuItem(
"Camera Inertia",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.INERTIA_ANOUNT, draggable=True),
tooltip="Seconds the inertia is active for",
min=0.0,
max=4.0,
has_reset=True,
),
)
# Show an entry for enabling disabling inertia on all modes if this value is set
inertia_modes = settings.get("/exts/omni.kit.manipulator.camera/inertiaModesEnabled")
inertia_modes_menu_item = None
if inertia_modes:
# Odd setting to control inertai always, but its what View was using, so preserve as it is persistant
legacy_all_interia_model = ViewportSettingModel(VIEWPORT_SETTINGS.ROTATION_SMOOTH_ALWAYS)
inertia_modes_menu_item = ui.MenuItem(
"Inertia For Other Movements",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=legacy_all_interia_model,
tooltip="Apply inertia to other camera movements or only WASD navigation",
has_reset=True,
),
)
def _toggle_inertia_always(model: ui.AbstractValueModel):
if model.as_bool:
# Allow a user specified preference to enable ceratin modes only, otherwise default to all
inertia_modes = settings.get("/app/viewport/inertiaModesEnabled")
inertia_modes = inertia_modes or [1, 1, 1, 1]
else:
inertia_modes = [1, 0, 0, 0]
settings.set("/exts/omni.kit.manipulator.camera/inertiaModesEnabled", inertia_modes)
_toggle_inertia_always(legacy_all_interia_model)
menu_ctx.add_carb_subscription(
legacy_all_interia_model.subscribe_value_changed_fn(_toggle_inertia_always)
)
def __on_inertial_changed(model: ui.AbstractValueModel):
inertia_enabled = model.as_bool
inertia_menu_item.visible = inertia_enabled
if inertia_modes_menu_item:
inertia_modes_menu_item.visible = inertia_enabled
# Sync the state now
__on_inertial_changed(inertia_enable_model)
menu_ctx.add_carb_subscription(
inertia_enable_model.subscribe_value_changed_fn(__on_inertial_changed)
)
def __build_debug_settings(self, menu_ctx: MenuContext):
settings = menu_ctx.settings
_added_initial_separator = False
def add_initial_separator():
nonlocal _added_initial_separator
if not _added_initial_separator:
_added_initial_separator = True
ui.Separator()
if settings.get("/exts/omni.kit.viewport.menubar.settings/show/camera/clamping"):
add_initial_separator()
ui.MenuItem(
"Animation clamp",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.CAM_UPDATE_CLAMPING),
tooltip="Clamp animation to this maximum number of seconds",
min=0.0001,
max=1.0,
has_reset=True,
),
)
def __build_selection_menu_items(self, menu_ctx: MenuContext):
SelectionColorMenuItem()
ui.MenuItem(
"Selection Line Width",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.SELECTION_LINE_WIDTH, draggable=True),
min=1,
max=15,
slider_class=ui.IntSlider,
has_reset=True,
),
)
BoundingColorMenuItem()
def __build_grid_menu_items(self, menu_ctx: MenuContext):
GridColorMenuItem()
ui.MenuItem(
"Grid Line Width",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GRID_LINE_WIDTH, draggable=True),
min=1,
max=10,
slider_class=ui.IntSlider,
has_reset=True,
),
)
ui.MenuItem(
"Grid Size",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GRID_SCALE, draggable=True),
min=1.0,
max=1000.0,
has_reset=True,
),
)
fadeout_model = ViewportSettingModel(VIEWPORT_SETTINGS.GRID_FADE, draggable=True)
def __on_fadeout_changed(model: ui.AbstractValueModel):
carb.settings.get_settings().set("/persistent/app/viewport/grid/lineFadeOutEndDistance", model.as_float * 4)
ui.MenuItem(
"Grid Fade",
hide_on_click=False,
delegate=SliderMenuDelegate(model=fadeout_model, min=0.5, max=50.0, has_reset=True),
)
menu_ctx.add_carb_subscription(
fadeout_model.subscribe_value_changed_fn(__on_fadeout_changed)
)
def __build_ui_menu_items(self, menu_ctx: MenuContext):
def __ui_background_opacity_changed(model: ui.AbstractValueModel) -> None:
alpha = int(model.as_float * 255)
name = "viewport_menubar_background"
color = cl._find(name)
color = (color & 0x00FFFFFF) + (alpha << 24)
cl._store(name, color)
ui_background_opacity_model = ViewportSettingModel(VIEWPORT_SETTINGS.UI_BACKGROUND_OPACITY, draggable=True)
ui.MenuItem(
"UI Background Opacity",
hide_on_click=False,
delegate=SliderMenuDelegate(model=ui_background_opacity_model, min=0.0, max=1.0, has_reset=True),
)
__ui_background_opacity_changed(ui_background_opacity_model)
settings = carb.settings.get_settings()
min_brightness = settings.get(SETTING_UI_BRIGHTNESS_MIN)
max_brightness = settings.get(SETTING_UI_BRIGHTNESS_MAX)
def __ui_brightness_changed(model: ui.AbstractValueModel) -> None:
def __gray_to_color(gray: int):
return 0xFF000000 + (gray << 16) + (gray << 8) + gray
value = (model.as_float - BRIGHTNESS_VALUE_RANGE_MIN) / (
BRIGHTNESS_VALUE_RANGE_MAX - BRIGHTNESS_VALUE_RANGE_MIN
)
light_gray = int(value * 255)
color = __gray_to_color(light_gray)
cl._store("viewport_menubar_light", color)
medium_gray = int(light_gray * 0.539)
color = __gray_to_color(medium_gray)
cl._store("viewport_menubar_medium", color)
ui_brightness_model = ViewportSettingModel(VIEWPORT_SETTINGS.UI_BRIGHTNESS, draggable=True)
ui.MenuItem(
"UI Control Brightness",
hide_on_click=False,
delegate=SliderMenuDelegate(model=ui_brightness_model, min=min_brightness, max=max_brightness, has_reset=True),
)
__ui_brightness_changed(ui_brightness_model)
menu_ctx.add_carb_subscription(
ui_background_opacity_model.subscribe_value_changed_fn(__ui_background_opacity_changed)
)
menu_ctx.add_carb_subscription(
ui_brightness_model.subscribe_value_changed_fn(__ui_brightness_changed)
)
def __build_gizmo_menu_items(self, menu_ctx: MenuContext):
ui.MenuItem(
"Gizmo Line Width",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_LINE_WIDTH, draggable=True),
min=1.0,
max=25.0,
has_reset=True,
),
)
scale_enabled_model = ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_SCALE_ENABLED)
ui.MenuItem(
"Gizmo Constant Scale Enabled",
hide_on_click=False,
delegate=CheckboxMenuDelegate(model=scale_enabled_model, has_reset=True),
)
constant_scale_menu_item = ui.MenuItem(
"Gizmo Constant Scale",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_SCALE, draggable=True),
min=0.5,
max=100.0,
has_reset=True,
),
)
global_scale_menu_item = ui.MenuItem(
"Gizmo Camera Scale" if scale_enabled_model.as_bool else "Gizmo Global Scale",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_GLOBAL_SCALE, draggable=True),
min=0.01,
max=4.0,
has_reset=True,
),
)
def __on_gizmo_enabled_changed(model: SettingModel):
is_constant_scale = model.as_bool
constant_scale_menu_item.visible = is_constant_scale
global_scale_menu_item.text = "Gizmo Camera Scale" if is_constant_scale else "Gizmo Global Scale"
__on_gizmo_enabled_changed(scale_enabled_model)
menu_ctx.add_carb_subscription(
scale_enabled_model.subscribe_value_changed_fn(__on_gizmo_enabled_changed)
)
ui.MenuItem(
"Gizmo Min FadeOut",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_MIN_FADEOUT, draggable=True),
min=1.0,
max=1000.0,
has_reset=True,
),
)
ui.MenuItem(
"Gizmo Max FadeOut",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=ViewportSettingModel(VIEWPORT_SETTINGS.GIZMO_MAX_FADEOUT, draggable=True),
min=1.0,
max=1000.0,
has_reset=True,
),
)
def __reset_settings(self, viewport_api_id: str):
settings = carb.settings.get_settings()
for value in VIEWPORT_SETTINGS.__dict__.values():
if isinstance(value, ViewportSetting):
value.reset(settings)
# Only reset renderer settings of current viewport
menu_ctx = self.__menu_context.get(viewport_api_id)
renderer_menu_item = menu_ctx.renderer_menu_item if menu_ctx else None
if renderer_menu_item:
renderer_menu_item.reset()
def _show_viewport_preference(self) -> None:
try:
import omni.kit.window.preferences as preferences
import asyncio
async def focus_async():
pref_window = ui.Workspace.get_window("Preferences")
if pref_window:
pref_window.focus()
PAGE_TITLE = "Viewport"
inst = preferences.get_instance()
if not inst:
carb.log_error("Preferences extension is not loaded yet")
return
pages = preferences.get_page_list()
for page in pages:
if page.get_title() == PAGE_TITLE:
inst.select_page(page)
# Show the Window
inst.show_preferences_window()
# Force the tab to be the active/focused tab (this currently needs to be done in async)
asyncio.ensure_future(focus_async())
return page
else:
carb.log_error("Viewport Preferences page not found!")
except ImportError:
carb.log_error("omni.kit.window.preferences not enabled!")
| 28,966 | Python | 40.263533 | 133 | 0.617483 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/style.py | from omni.ui import color as cl
from omni.ui import constant as fl
from pathlib import Path
CURRENT_PATH = Path(__file__).parent
ICON_PATH = CURRENT_PATH.parent.parent.parent.parent.parent.joinpath("data").joinpath("icons")
UI_STYLE = {
"Menu.Item.Icon::Settings": {"image_url": f"{ICON_PATH}/viewport_settings.svg"},
"ResolutionLink": {"background_color": 0, "margin": 0, "padding": 2},
"ResolutionLink.Image": {"image_url": f"{ICON_PATH}/link_dark.svg", "margin": 0},
"ResolutionLink.Image:checked": {"image_url": f"{ICON_PATH}/link.svg"},
"ComboBox::ratio": {"background_color": 0x0, "padding": 4, "margin": 0},
"Menu.Item.Button::save": {"padding": 0, "margin": 0, "background_color": 0},
"Menu.Item.Button.Image::save": {"image_url": f"{ICON_PATH}/save.svg", "color": cl.viewport_menubar_light},
"Menu.Item.Button.Image::save:checked": {"color": cl.shade(cl("#0697cd"))},
"Ratio.Background": {"background_color": 0xFF444444, "border_color": 0xFFA1701B, "border_width": 1},
"Resolution.Text": {"color": cl.input_hint},
"Resolution.Name": {"color": cl.viewport_menubar_light},
"Resolution.Del": {"image_url": f"{ICON_PATH}/delete.svg"},
}
cl.save_background = cl.shade(cl("#1F2123"))
cl.input_hint = cl.shade(cl('#5A5A5A'))
SAVE_WINDOW_STYLE = {
"Window": {"secondary_background_color": 0x0},
"Titlebar.Background": {"background_color": cl.save_background},
"Input.Hint": {"color": cl.input_hint},
"Image::close": {"image_url": f"{ICON_PATH}/close.svg"},
"Button": {"background_color": cl.save_background},
}
| 1,585 | Python | 45.647057 | 111 | 0.65489 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/extension.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ["ViewportSettingsMenuBarExtension"]
from .setting_menu_container import SettingMenuContainer
import omni.ext
class ViewportSettingsMenuBarExtension(omni.ext.IExt):
"""The Entry Point for the Viewport Settings in Viewport Menu Bar"""
def on_startup(self, ext_id):
self._settings_menu = SettingMenuContainer()
def on_shutdown(self):
self._settings_menu.destroy()
self._settings_menu = None
| 873 | Python | 35.416665 | 76 | 0.761741 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/__init__.py | from .extension import *
| 25 | Python | 11.999994 | 24 | 0.76 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/settings_transform_manipulator.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ["SettingsTransformManipulator"]
from omni.kit.viewport.menubar.core import (
CheckboxMenuDelegate,
ComboBoxMenuDelegate,
SliderMenuDelegate,
SettingComboBoxModel,
ComboBoxItem,
SettingModelWithDefaultValue,
ResetHelper,
)
import omni.ui as ui
import carb.settings
from typing import Any, Dict, Tuple, List, Optional, Union
from functools import partial
SETTING_SCALE = "/persistent/exts/omni.kit.manipulator.transform/manipulator/scaleMultiplier"
SETTING_FREE_ROTATION_ENABLED = "/persistent/exts/omni.kit.manipulator.transform/manipulator/freeRotationEnabled"
SETTING_FREE_ROTATION_TYPE = "/persistent/exts/omni.kit.manipulator.transform/manipulator/freeRotationType"
SETTING_INTERSECTION_THICKNESS = "/persistent/exts/omni.kit.manipulator.transform/manipulator/intersectionThickness"
FREE_ROTATION_TYPE_CLAMPED = "Clamped"
FREE_ROTATION_TYPE_CONTINUOUS = "Continuous"
MENU_WIDTH = 350
class _ManipulatorRotationTypeModel(SettingComboBoxModel, ResetHelper):
def __init__(self):
types = [FREE_ROTATION_TYPE_CLAMPED, FREE_ROTATION_TYPE_CONTINUOUS]
super().__init__(SETTING_FREE_ROTATION_TYPE, types)
def _on_current_item_changed(self, item: ComboBoxItem) -> None:
super()._on_current_item_changed(item)
self._update_reset_button()
def get_default(self):
return FREE_ROTATION_TYPE_CLAMPED
def get_value(self):
settings = carb.settings.get_settings()
return settings.get(SETTING_FREE_ROTATION_TYPE)
def restore_default(self) -> None:
current_index = self.current_index
if current_index:
current = current_index.as_int
items = self.get_item_children(None)
# Early exit if the model is already correct
if items[current].value == FREE_ROTATION_TYPE_CLAMPED:
return
# Iterate all items, and select the first match to the real value
for index, item in enumerate(items):
if item.value == FREE_ROTATION_TYPE_CLAMPED:
current_index.set_value(index)
return
class SettingsTransformManipulator(ui.Menu):
"""The menu with the transform manipulator settings"""
def __init__(self, text: str = "", factory: Dict = {}, **kwargs):
settings = carb.settings.get_settings()
settings.set_default_float(SETTING_SCALE, 1.4)
settings.set_default_bool(SETTING_FREE_ROTATION_ENABLED, True)
settings.set_default_string(SETTING_FREE_ROTATION_TYPE, FREE_ROTATION_TYPE_CLAMPED)
settings.set_default_float(SETTING_INTERSECTION_THICKNESS, 10.0)
super().__init__(text, on_build_fn=partial(self.build_fn, factory), **kwargs)
def build_fn(self, factory: Dict):
model = SettingModelWithDefaultValue(SETTING_SCALE, 1.4, draggable=True)
ui.MenuItem(
"Transform Manipulator Scale",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=model,
width=MENU_WIDTH,
min=0.0,
max=25.0,
has_reset=True,
),
)
model = SettingModelWithDefaultValue(SETTING_FREE_ROTATION_ENABLED, True, draggable=True)
ui.MenuItem(
"Enable Free Rotation",
hide_on_click=False,
delegate=CheckboxMenuDelegate(
model=model,
width=MENU_WIDTH,
has_reset=True,
),
)
model = _ManipulatorRotationTypeModel()
ui.MenuItem(
"Free Rotation Type",
hide_on_click=False,
delegate=ComboBoxMenuDelegate(
model=model,
width=MENU_WIDTH,
has_reset=True,
),
)
model = SettingModelWithDefaultValue(SETTING_INTERSECTION_THICKNESS, 10.0, True)
ui.MenuItem(
"Manipulator Intersection Thickness",
hide_on_click=False,
delegate=SliderMenuDelegate(
model=model,
width=MENU_WIDTH,
min=1.0,
max=50.0,
has_reset=True,
),
)
| 4,657 | Python | 34.830769 | 116 | 0.640326 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/settings_renderer_menu_item.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ["SettingsRendererMenuItem"]
from .custom_resolution.custom_resolution_menu_item import CustomResolutionMenuItem
from .resolution_collection.model import ComboBoxResolutionModel
from .resolution_collection.menu import ResolutionCollectionMenu
from omni.kit.viewport.menubar.core import (
ViewportMenuItem,
CheckboxMenuDelegate,
ComboBoxMenuDelegate,
ComboBoxModel,
SettingComboBoxModel,
ComboBoxItem,
ResetHelper,
RadioMenuCollection,
)
import omni.ui as ui
import omni.kit.app
import carb
from pxr import Sdf
from typing import Any, Dict, Tuple, List, Optional, Union
from functools import partial
SETTING_APERTURE = "/app/hydra/aperture/conform"
SETTING_RENDER_SCALE_LIST = "/app/renderer/resolution/multiplierList"
def _resolve_viewport_setting(viewport_id: str, setting_name: str, isettings: carb.settings.ISettings,
legacy_key: Union[str, None] = None):
# Resolve a default Viewport setting from the most specific to the most general
# /app/viewport/Viewport/Viewport0/setting => Startup value for this specific Viewport
# /app/viewport/defaults/setting => Startup value targetting all Viewports
# Next check if a non-persitent viewport-specific default exists via toml / start-up settings
dflt_setting_key = f"/app/viewport/{viewport_id}/{setting_name}"
pers_setting_key = "/persistent" + dflt_setting_key
# 1. Get the persistant per-viewport value that is saved (may be non-existant)
cur_value = isettings.get(pers_setting_key)
# 2. Get the per-viewport default that the setting should restore to
dflt_value = isettings.get(f"/app/viewport/{viewport_id}/{setting_name}")
# 3. If there is no per-viewport default, try to restore to a value for all Viewports
if dflt_value is None:
dflt_value = isettings.get(f"/app/viewport/defaults/{setting_name}")
# 4. If still no value to restore to, check for a legacy setting that represnts this
if dflt_value is None:
if legacy_key:
dflt_value = isettings.get(legacy_key)
elif setting_name == "resolution":
width = isettings.get("/app/renderer/resolution/width")
height = isettings.get("/app/renderer/resolution/height")
if (width is not None) and (height is not None):
# When either width or height is 0 or less, Viewport will be set to use UI size
if (width > 0) and (height > 0):
dflt_value = (width, height)
if dflt_value is None:
dflt_value = (0, 0)
if cur_value is None:
cur_value = dflt_value
return (
(pers_setting_key, cur_value),
(dflt_setting_key, dflt_value)
)
class _ViewportResolutionSetter:
"""Simple class that forwards resolution menu item changes to the proper underlying object"""
def __init__(self, factory_dict: dict, fill_viewport: bool):
self.__factory_dict = factory_dict
# Set the Viewport's fill_frame to False as we are controlling it fully
viewport_api = self.viewport_api
if viewport_api and viewport_api.fill_frame:
viewport_api.fill_frame = False
viewport_widget = self.viewport_widget
if viewport_widget:
viewport_widget.expand_viewport = fill_viewport
@property
def viewport_api(self):
return self.__factory_dict.get("viewport_api")
@property
def viewport_widget(self):
return self.__factory_dict.get("layer_provider").viewport_widget
@property
def fill_frame(self) -> bool:
return self.viewport_widget.fill_frame
@property
def fill_viewport(self) -> bool:
return self.viewport_widget.expand_viewport
@fill_viewport.setter
def fill_viewport(self, value: bool):
self.viewport_widget.expand_viewport = value
def set_resolution(self, resolution) -> None:
self.viewport_widget.set_resolution(resolution)
@property
def full_resolution(self) -> Tuple[float, float]:
return self.viewport_widget.full_resolution
class _ComboBoxResolutionScaleModel(SettingComboBoxModel, ResetHelper):
"""The resolution scale model has all the resolution scales and sets the viewport resolution scale"""
def __init__(self, viewport_api, resolution_scale_setting, settings):
self.__viewport_api = viewport_api
# Get the list of available multipliers or a default
values = settings.get(SETTING_RENDER_SCALE_LIST) or [2.0, 1.0, 0.666666666666, 0.5, 0.333333333333, 0.25]
# Check if the legacy per-app multiplier is set and use that if it is
default = resolution_scale_setting[1][1]
self.__default = default if default and default > 0 else 1.0
current_value = resolution_scale_setting[0][1]
# Push current_value into resolution_scale if not set to it already
if (current_value is not None) and (current_value > 0) and (current_value != self.__viewport_api.resolution_scale):
self.__viewport_api.resolution_scale = current_value
SettingComboBoxModel.__init__(
self,
# Set the key to set to to the persistent per-viewport key
setting_path=resolution_scale_setting[0][0],
texts=[str(int(value * 100)) + "%" for value in values],
values=values,
# This is passed to avoid defaulting the per-viewport persistent key to a value so that changes to the
# setting when not adjusted/saved will pick up the new default
current_value=self.__viewport_api.resolution_scale,
)
ResetHelper.__init__(self)
def _on_current_item_changed(self, item: ComboBoxItem) -> None:
super()._on_current_item_changed(item)
self.__viewport_api.resolution_scale = item.value
self._update_reset_button()
# for ResetHelper
def get_default(self):
return self.__default
def restore_default(self) -> None:
if self.__default is not None:
current_index = self.current_index
if current_index:
current = current_index.as_int
items = self.get_item_children(None)
# Early exit if the model is already correct
if items[current].value == self.__default:
return
# Iterate all items, and select the first match to the real value
for index, item in enumerate(items):
if item.value == self.__default:
current_index.set_value(index)
return
def get_value(self):
return self.__viewport_api.resolution_scale
class _ComboBoxApertureFitModel(ComboBoxModel):
"""The aperture model"""
def __init__(self, viewport_api, settings):
self.__viewport_api = viewport_api
values = [0, 1, 2, 3, 4]
texts = ["Match Vertical", "Match Horizontal", "Fit", "Crop", "Stretch"]
current_value = settings.get(SETTING_APERTURE) or 1
super().__init__(texts, values=values, current_value=current_value)
def _on_current_item_changed(self, item: ComboBoxItem) -> None:
# TODO: Add to Python bindings for UsdContext or HydraTexture
# self.__viewport_api.set_aperture_conform_policy(item.value)
pass
class _FillViewportModel(ui.AbstractValueModel, ResetHelper):
def __init__(self, resolution_setter, fill_viewport_settings, isettings: carb.settings.ISettings):
self.__resolution_setter = resolution_setter
# Get the default value that this item should reset/restore to
self.__default = bool(fill_viewport_settings[1][1])
self.__saved_value = self.__default
# This is the per-viewport persistent path this item will save to
self.__setting_path = fill_viewport_settings[0][0]
ui.AbstractValueModel.__init__(self)
ResetHelper.__init__(self)
self.__sub_model = self.subscribe_value_changed_fn(self.__on_value_changed)
self.__sub_setting = isettings.subscribe_to_node_change_events(self.__setting_path, self.__on_setting_changed)
def destroy(self):
self.__sub_model = None
if self.__sub_setting:
carb.settings.get_settings().unsubscribe_to_change_events(self.__sub_setting)
self.__sub_setting = None
def get_value_as_bool(self) -> bool:
return self.__resolution_setter.fill_viewport
def set_value(self, value: bool, save_restore: bool = False):
value = bool(value)
if save_restore:
if value:
value = self.__saved_value
else:
self.__saved_value = self.get_value_as_bool()
if value != self.get_value_as_bool():
self.__resolution_setter.fill_viewport = value
self._value_changed()
# for ResetHelper
def get_default(self):
return self.__default
def restore_default(self) -> None:
self.set_value(self.__default)
def get_value(self):
return self.get_value_as_bool()
def __on_setting_changed(self, *args, **kwargs):
if self.__sub_model:
self.set_value(carb.settings.get_settings().get(self.__setting_path))
def __on_value_changed(self, model: ui.AbstractValueModel):
# Use self.__sub_setting as a signal to process change in carb subscription
settings = carb.settings.get_settings()
model_sub, self.__sub_model = self.__sub_model, None
try:
value = model.as_bool
if bool(settings.get(self.__setting_path)) != value:
settings.set(self.__setting_path, value)
self._update_reset_button()
finally:
# Make sure to put the subscription back
self.__sub_model = model_sub
class SettingsRendererMenuItem(ui.Menu):
"""The menu with the viewport settings"""
def __init__(self, text: str = "", factory: Dict = {}, **kwargs):
self.__resolution_model: Union[ComboBoxResolutionModel, None] = None
self.__render_scale_model: Union[_ComboBoxResolutionScaleModel, None] = None
self.__fill_viewport_model: Union[_FillViewportModel, None] = None
self.__custom_menu_item: Union[CustomResolutionMenuItem, None] = None
self.__viewport_api_id: Union[str, None] = None
super().__init__(text, on_build_fn=partial(self.build_fn, factory), **kwargs)
def build_fn(self, factory: Dict):
# Create the model and the delegate here, not in __init__ to make the
# objects unique per viewport.
viewport_api = factory["viewport_api"]
viewport_api_id = viewport_api.id
isettings = carb.settings.get_settings()
self.__viewport_api_id = viewport_api_id
resolution_settings = _resolve_viewport_setting(viewport_api_id, "resolution", isettings)
fill_viewport_settings = _resolve_viewport_setting(viewport_api_id, "fillViewport", isettings)
resolution_scale_settings = _resolve_viewport_setting(viewport_api_id, "resolutionScale", isettings)
resolution_delegate = _ViewportResolutionSetter(factory, fill_viewport_settings[0][1])
self.__resolution_model = ComboBoxResolutionModel(resolution_delegate, resolution_settings, isettings)
ResolutionCollectionMenu("Render Resolution", self.__resolution_model)
self.__custom_menu_item = CustomResolutionMenuItem(self.__resolution_model, resolution_delegate)
self.__custom_menu_item.resolution = resolution_delegate.full_resolution
self.__render_scale_model = _ComboBoxResolutionScaleModel(viewport_api, resolution_scale_settings, isettings)
ui.MenuItem(
"Render Scale",
delegate=ComboBoxMenuDelegate(model=self.__render_scale_model, has_reset=True),
hide_on_click=False
)
# Requires Python bindings to set this through to the renderer
# ui.MenuItem(
# "Aperture Policy",
# delegate=ComboBoxMenuDelegate(model=_ComboBoxApertureFitModel(viewport_api, settings)),
# hide_on_click=False,
# )
self.__fill_viewport_model = _FillViewportModel(resolution_delegate, fill_viewport_settings, isettings)
self.__fill_viewport_item = ui.MenuItem(
"Fill Viewport",
delegate=CheckboxMenuDelegate(model=self.__fill_viewport_model, width=310, has_reset=True),
hide_on_click=False,
)
# Watch for an index change to disable / enable 'Fill Viewport' checkbox
self.__sub_resolution_index = self.__resolution_model.current_index.subscribe_value_changed_fn(
self.__on_resolution_index_changed
)
# Viewport can be changed externally, watch for any resolution changes to sync back into our models
self.__sub_render_settings = viewport_api.subscribe_to_render_settings_change(
self.__on_render_settings_changed
)
def __del__(self):
self.destroy()
def destroy(self):
self.__sub_render_settings = None
self.__sub_resolution_index = None
if self.__resolution_model:
self.__resolution_model.destroy()
self.__resolution_model = None
if self.__render_scale_model:
self.__render_scale_model.destroy()
self.__render_scale_model = None
if self.__fill_viewport_model:
self.__fill_viewport_model.destroy()
self.__fill_viewport_model = None
super().destroy()
def reset(self) -> None:
# When _default_resolution is None, them fill-frame is default on, off otherwise
if self.__fill_viewport_model:
self.__fill_viewport_model.restore_default()
# Restore resolution scale based on setting
if self.__render_scale_model:
self.__render_scale_model.restore_default()
# Restore resolution scale based on setting
if self.__resolution_model:
self.__resolution_model.restore_default()
def __sync_model(self, combo_model: ComboBoxModel, value: Any, select_first: bool = False):
current_index = combo_model.current_index
# Special case for forcing "Viewport" selection to be checked
if select_first:
if current_index.as_int != 0:
current_index.set_value(0)
return
items = combo_model.get_item_children(None)
if items and items[current_index.as_int].value != value:
# Iterate all items, and select the first match to the real value
index_custom = -1
for index, item in enumerate(items):
if item.value == value:
current_index.set_value(index)
return
if item.model.as_string == "Custom":
index_custom = index
if index_custom != -1:
current_index.set_value(index_custom)
def __on_resolution_index_changed(self, index_model: ui.SimpleIntModel) -> None:
# Enable or disable the 'Fill Viewport' option based on whether using Widget size for render resolution
# XXX: Changing visibility causes the menu to resize, which isn't great
index = index_model.as_int
fill_enabled = index != 0 if index_model else False
if fill_enabled != self.__fill_viewport_item.delegate.enabled:
self.__fill_viewport_model.set_value(fill_enabled, True)
self.__fill_viewport_item.delegate.enabled = fill_enabled
# When fillViewport is turned off, try to restore to last resolution
if not fill_enabled:
resolution = carb.settings.get_settings().get(f"/persistent/app/viewport/{self.__viewport_api_id}/resolution")
if resolution:
self.__sync_model(self.__resolution_model, tuple(resolution))
items = self.__resolution_model.get_item_children(None)
if index >= 0 and index < len(items):
item = items[index]
self.__custom_menu_item.resolution = item.value
def __on_render_settings_changed(self, camera_path: Sdf.Path, resolution: Tuple[int, int], viewport_api):
full_resolution = viewport_api.full_resolution
if self.__custom_menu_item.resolution != full_resolution:
# Update the custom_menu_item resolution entry boxes.
self.__custom_menu_item.resolution = full_resolution
# Sync the resolution to any existing settings (accounting for "Viewport" special case)
self.__sync_model(self.__resolution_model, full_resolution, self.__resolution_model.fill_frame)
# Sync the resolution scale menu item
self.__sync_model(self.__render_scale_model, viewport_api.resolution_scale)
| 17,346 | Python | 42.47619 | 126 | 0.644414 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/resolution_collection/model.py | import asyncio
import carb.settings
import omni.kit.app
from omni.kit.viewport.menubar.core import ComboBoxItem, SettingComboBoxModel, ResetHelper
from typing import Tuple, List, Optional
SETTING_RESOLUTION_LIST = "/app/renderer/resolution/list"
SETTING_CUSTOM_RESOLUTION_LIST = "/persistent/app/renderer/resolution/custom/list"
NAME_RESOLUTIONS = {
"Icon": (512, 512),
"Square": (1024, 1024),
"SD": (1280, 960),
"HD720P": (1280, 720),
"HD1080P": (1920, 1080),
"2K": (2048, 1080),
"1440P": (2560, 1440),
"UHD": (3840, 2160),
"Ultra Wide": (3440, 1440),
"Super Ultra Wide": (3840, 1440),
"5K Wide": (5120, 2880),
}
class ResolutionComboBoxItem(ComboBoxItem):
def __init__(self, resolution: Tuple[int, int], name: Optional[str] = None, custom: bool = False ) -> None:
self.resolution = resolution
self.name = name if name else self.get_name_from_resolution(resolution)
text = f"{resolution[0]}x{resolution[1]}" if self.is_valid_resolution() else self.name
self.custom = custom
super().__init__(text, resolution if resolution else "")
def get_name_from_resolution(self, resolution: Tuple[int, int]) -> str:
for name in NAME_RESOLUTIONS:
if NAME_RESOLUTIONS[name] == resolution:
return name
return ""
def is_valid_resolution(self):
return self.resolution and self.resolution[0] > 0 and self.resolution[1] > 0
class ComboBoxResolutionModel(SettingComboBoxModel, ResetHelper):
"""The resolution model has all the resolutions and sets the viewport resolution"""
def __init__(self, resolution_setter, resolution_setting, settings):
# Parse the incoming resolution list via settings
self.__resolution_setter = resolution_setter
# Set the default restore to value based on the resolved default pref-key
self.__default = resolution_setting[1][1]
self.__default = tuple(self.__default) if self.__default else (0, 0)
self.__custom_items: List[ResolutionComboBoxItem] = []
# XXX: For test-suite which passes None!
full_resolution = resolution_setter.full_resolution if resolution_setter else (0, 0)
values = None
try:
sttg_values = settings.get(SETTING_RESOLUTION_LIST)
if sttg_values is not None:
num_values = len(sttg_values)
if num_values > 0 and num_values % 2 == 0:
values = [(sttg_values[i*2 + 0], sttg_values[i*2 + 1]) for i in range(int(num_values / 2))]
else:
raise RuntimeError(f"Resolution list has invalid length of {num_values}")
except Exception as e:
import traceback
carb.log_error(f"{e}")
carb.log_error(f"{traceback.format_exc()}")
if values is None:
values = [(3840, 2160), (2560, 1440), (2048, 1080), (1920, 1080), (1280, 720), (1024, 1024), (512, 512)]
SettingComboBoxModel.__init__(
self,
# Set the key to set to to the persistent per-viewport key
setting_path=resolution_setting[0][0],
# Filled in below
texts=[],
values=[],
# Set the current value to the resolved persistent per-viewport value
# This is passed to avoid defaulting the per-viewport persitent key to a value so that changes to the
# setting when not adjusted/saved will pick up the new default
current_value=full_resolution,
)
ResetHelper.__init__(self)
self._items.append(ResolutionComboBoxItem((0, 0), name="Viewport"))
for value in values:
self._items.append(ResolutionComboBoxItem(value))
# Separator
self._items.append(ResolutionComboBoxItem(None))
self._items.append(ResolutionComboBoxItem((-1, -1), "Custom"))
# Custom is the last one
self.__index_custom = len(self._items) - 1
current = self._get_current_index_by_value(full_resolution)
self.current_index.set_value(current)
self.__update_setting = omni.kit.app.SettingChangeSubscription(SETTING_CUSTOM_RESOLUTION_LIST, self.__on_custom_change)
self.__on_custom_change(None, carb.settings.ChangeEventType.CHANGED)
def destroy(self):
self.__update_setting = None
self.__resolution_setter = None
self.__custom_items = []
def _on_current_item_changed(self, item: ResolutionComboBoxItem) -> None:
value = item.value
if value[0] >= 0 and value[1] >= 0:
super()._on_current_item_changed(item)
if self.__resolution_setter:
self.__resolution_setter.set_resolution(value)
self._update_reset_button()
def get_item_children(self, item) -> List[ResolutionComboBoxItem]:
#return super().get_item_children(item)
if item is None:
items = []
items.extend(self._items)
items.extend(self.__custom_items)
return items
else:
return []
# for ResetHelper
def get_default(self):
return self.__default
def restore_default(self) -> None:
if self.__default is None:
return
current_index = self.current_index
if current_index:
current = current_index.as_int
items = self.get_item_children(None)
# Early exit if the model is already correct
if items[current].value == self.__default:
return
# Iterate all items, and select the first match to the real value
for index, item in enumerate(items):
if item.value == self.__default:
current_index.set_value(index)
return
current_index.set_value(3)
def get_value(self) -> Optional[Tuple[int, int]]:
if self.__resolution_setter:
return self.__resolution_setter.full_resolution
return None
def is_custom(self, resolution: Tuple[int, int]) -> bool:
for custom in self.__custom_items:
if custom.value == resolution:
return True
return False
@property
def fill_frame(self) -> bool:
return self.__resolution_setter.fill_frame if self.__resolution_setter else False
def __on_custom_change(self, value, event_type) -> None:
async def __refresh_custom():
# It is strange that sometimes it is triggered with not all fields updated.
# Update a frame to make sure full information filled
await omni.kit.app.get_app().next_update_async()
self.__custom_items = []
custom_list = carb.settings.get_settings().get(SETTING_CUSTOM_RESOLUTION_LIST) or []
if custom_list:
# Separator
self.__custom_items.append(ResolutionComboBoxItem(None))
for custom in custom_list:
name = custom.pop("name", "")
width = custom.pop("width", -1)
height = custom.pop("height", -1)
if name and width > 0 and height > 0:
self.__custom_items.append(ResolutionComboBoxItem((width, height), name=name, custom=True))
self._item_changed(None)
if self.__resolution_setter:
current = self._get_current_index_by_value(self.__resolution_setter.full_resolution, default=self.__index_custom)
self.current_index.set_value(current)
asyncio.ensure_future(__refresh_custom())
| 7,629 | Python | 39.157895 | 129 | 0.602307 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/resolution_collection/menu.py | from .model import ResolutionComboBoxItem, ComboBoxResolutionModel
import carb.settings
import omni.ui as ui
from omni.kit.viewport.menubar.core import RadioMenuCollection, ViewportMenuDelegate, AbstractWidgetMenuDelegate
import math
from typing import List, Union
import weakref
SETTING_CUSTOM_RESOLUTION_LIST = "/persistent/app/renderer/resolution/custom/list"
DEFAULT_RATIOS = {
"16:9": float(16)/9,
"1:1": 1,
"32:9": float(32)/9,
"4:3": float(4)/3,
"21:9": float(21)/9,
}
class ResolutionCollectionDelegate(AbstractWidgetMenuDelegate):
def __init__(self, model: ComboBoxResolutionModel):
# don't use content_clipping as submenu hovering becomes inconsistent
super().__init__(model=model, has_reset=True, content_clipping=False)
self.__resolution_label: Union[ui.Label, None] = None
index_model = model.get_item_value_model(None, 0)
self.__sub_index_change = index_model.subscribe_value_changed_fn(
lambda m, this=weakref.proxy(self): this.__on_index_changed(m)
)
def destroy(self):
self.__sub_index_change = None
def build_widget(self, item: ui.MenuHelper):
ui.Spacer(width=4)
ui.Label(item.text, width=0)
ui.Spacer()
self.__resolution_label = ui.Label(self.__get_current_resolution(), width=70)
def __get_current_resolution(self):
index = self._model.get_item_value_model(None, 0).as_int
items: List[ResolutionComboBoxItem] = self._model.get_item_children(None)
if index >= 0 and index < len(items):
return items[index].name
else:
return "Unknown"
def __on_index_changed(self, model: ui.SimpleIntModel) -> None:
if self.__resolution_label:
self.__resolution_label.text = self.__get_current_resolution()
class ResolutionCollectionMenu(RadioMenuCollection):
ITEM_HEIGHT = 20
def __init__(self, text: str, model: ComboBoxResolutionModel):
super().__init__(
text,
model,
delegate = ResolutionCollectionDelegate(model),
)
self.__custom_menu_items = {}
def build_menu_item(self, item: ResolutionComboBoxItem) -> ui.MenuItem:
if item.resolution is None:
return ui.Separator(
delegate=ui.MenuDelegate(
on_build_item=lambda _: ui.Line(
height=0, alignment=ui.Alignment.V_CENTER, style_type_name_override="Menu.Separator"
)
)
)
else:
menu_item = ui.MenuItem(
item.name,
delegate = ViewportMenuDelegate(build_custom_widgets=lambda d, m, i=item: self.__build_resolution_menuitem_widgets(i))
)
if item.custom:
self.__custom_menu_items[item.name] = menu_item
return menu_item
def __build_resolution_menuitem_widgets(self, item: ResolutionComboBoxItem):
if item.is_valid_resolution():
ui.Spacer()
ui.Spacer(width=20)
ui.Label(item.model.as_string, width=80, style_type_name_override="Resolution.Text")
with ui.HStack(width=60):
ratio = float(item.resolution[0]) / item.resolution[1]
width = self.ITEM_HEIGHT * ratio
with ui.ZStack(width=width):
ui.Rectangle(style_type_name_override="Ratio.Background")
ui.Label(self.get_ratio_text(ratio), alignment=ui.Alignment.CENTER, style_type_name_override="Ratio.Text")
ui.Spacer()
if item.custom:
with ui.VStack(content_clipping=1, width=0):
ui.Image(width=20, style_type_name_override="Resolution.Del", mouse_pressed_fn=lambda x, y, b, f, i=item: self.__delete_resolution(i))
else:
ui.Spacer(width=20)
def get_ratio_text(self, ratio: float):
found = [key for (key, value) in DEFAULT_RATIOS.items() if math.isclose(value, ratio, rel_tol=1e-2)]
if found:
return found[0]
else:
return f"{ratio: .2f}:1"
def __delete_resolution(self, item: ResolutionComboBoxItem):
settings = carb.settings.get_settings()
custom_list = settings.get(SETTING_CUSTOM_RESOLUTION_LIST) or []
for custom in custom_list:
name = custom["name"]
if name == item.name:
custom_list.remove(custom)
settings.set(SETTING_CUSTOM_RESOLUTION_LIST, custom_list)
if item.name in self.__custom_menu_items:
self.__custom_menu_items[item.name].visible = False
| 4,698 | Python | 38.487395 | 154 | 0.603874 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/custom_resolution/save_window.py | import omni.ui as ui
from typing import Tuple, Callable
from ...style import SAVE_WINDOW_STYLE
class SaveWindow(ui.Window):
"""
Window to save custom resolution.
"""
PADDING = 8
def __init__(self, resolution: Tuple[int, int], on_save_fn: Callable[[str, Tuple[int, int]], bool]):
self.name_model = ui.SimpleStringModel()
self.__resolution = resolution
self.__on_save_fn = on_save_fn
flags = ui.WINDOW_FLAGS_NO_TITLE_BAR | ui.WINDOW_FLAGS_NO_RESIZE | ui.WINDOW_FLAGS_NO_MOVE | ui.WINDOW_FLAGS_MODAL
super().__init__(f"###Resoluiton Save", width=400, height=180, flags=flags, auto_resize=False, padding_x=0, padding_y=0)
self.frame.set_style(SAVE_WINDOW_STYLE)
self.frame.set_build_fn(self.__build_ui)
def __del__(self):
self.__sub_begin_edit = None
self.destroy()
def __build_ui(self):
with self.frame:
with ui.VStack(height=0):
self._build_titlebar()
ui.Spacer(height=30)
self._build_input()
ui.Spacer(height=30)
self._build_buttons()
ui.Spacer(height=15)
def _build_titlebar(self):
with ui.ZStack(height=0):
ui.Rectangle(style_tyle_name_override="Titlebar.Background")
with ui.VStack():
ui.Spacer(height=self.PADDING)
with ui.HStack():
ui.Spacer(width=self.PADDING)
ui.Label("Save Custom Viewport Resolution", width=0, style_type_name_override="Titlebar.Title")
ui.Spacer()
ui.Image(width=20, height=20, mouse_released_fn=lambda x, y, b, f: self.__on_cancel(), name="close")
ui.Spacer(width=self.PADDING)
ui.Spacer(height=self.PADDING)
def _build_input(self):
with ui.HStack():
ui.Spacer()
with ui.ZStack(width=160):
name_input = ui.StringField(self.name_model)
hint_label = ui.Label("Type Name", style_type_name_override="Input.Hint")
ui.Spacer(width=20)
ui.Label(f"{self.__resolution[0]} x {self.__resolution[1]}")
ui.Spacer()
name_input.focus_keyboard()
def __hide_hint():
hint_label.visible = False
self.__sub_begin_edit = self.name_model.subscribe_begin_edit_fn(lambda m: __hide_hint())
def _build_buttons(self):
with ui.HStack():
ui.Spacer()
ui.Button("Save", width=80, clicked_fn=self.__on_save)
ui.Spacer(width=20)
ui.Button("Cancel", width=80, clicked_fn=self.__on_cancel)
ui.Spacer()
def __on_save(self) -> None:
if self.__on_save_fn(self.name_model.as_string, self.__resolution):
self.visible = False
def __on_cancel(self) -> None:
self.visible = False
| 2,934 | Python | 35.6875 | 128 | 0.557941 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/custom_resolution/custom_resolution_menu_item.py | import omni.ui as ui
from typing import Tuple
from .custom_resolution_delegate import CustomResolutionDelegate
class CustomResolutionMenuItem(ui.MenuItem):
"""
Menu item to edit/save custom resolution.
"""
def __init__(self, res_model, res_setter):
self.__delegate = CustomResolutionDelegate(res_model, res_setter)
ui.MenuItem(
"Custom Resolution",
delegate=self.__delegate,
hide_on_click=False,
)
super().__init__("Custom Resolution")
def destroy(self):
self.__delegate.destroy()
@property
def resolution(self) -> Tuple[int, int]:
return self.__delegate.resolution
@resolution.setter
def resolution(self, res: Tuple[int, int]) -> None:
self.__delegate.resolution = res
| 803 | Python | 26.724137 | 73 | 0.633873 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/menu_item/custom_resolution/custom_resolution_delegate.py | from omni.kit.viewport.menubar.core import AbstractWidgetMenuDelegate
import omni.ui as ui
import omni.kit.app
from .save_window import SaveWindow
import carb.settings
from typing import List, Optional, Callable, Tuple
import weakref
import math
import asyncio
SETTING_CUSTOM_RESOLUTION_LIST = "/persistent/app/renderer/resolution/custom/list"
SETTING_MIN_RESOLUTION = "/exts/omni.kit.viewport.menubar.settings/min_resolution"
class RatioItem(ui.AbstractItem):
def __init__(self, text: str, value: float) -> None:
super().__init__()
self.model = ui.SimpleStringModel(text)
self.value = value
class RatioModel(ui.AbstractItemModel):
"""
The model used for ratio combobox
"""
def __init__(self):
super().__init__()
# List items
self.__default_items = [
RatioItem("16:9", 16.0/9),
RatioItem("4:3", 4.0/3),
RatioItem("1:1", 1.0)
]
self.__custom_item: Optional[RatioItem] = None
# Current value
self.current_index = ui.SimpleIntModel(-1)
self._sub = self.current_index.subscribe_value_changed_fn(
lambda _, this=weakref.proxy(self): this.__on_index_changed()
)
def destroy(self):
self._sub = None
self.current_index = None
@property
def ratio(self) -> float:
items = self.get_item_children(None)
return items[self.current_index.as_int].value
@ratio.setter
def ratio(self, value: float) -> None:
found = [index for (index, item) in enumerate(self.__default_items) if math.isclose(item.value, value, rel_tol=1e-2)]
if found:
self.__custom_item = None
self.current_index.set_value(found[0])
self._item_changed(None)
else:
ratio_text = f"{value: .2f}:1"
self.__custom_item = RatioItem(ratio_text, value)
self.current_index.set_value(0)
self._item_changed(None)
def subscribe_ratio_changed_fn(self, on_ratio_changed_fn: Callable[[float], None]):
def __on_sub_index_changed(this, callback):
current_index = this.current_index.as_int
items = this.get_item_children(None)
callback(items[current_index].value)
return self.current_index.subscribe_value_changed_fn(
lambda _, this=weakref.proxy(self), callback=on_ratio_changed_fn: __on_sub_index_changed(this, callback)
)
def get_item_children(self, item) -> List[RatioItem]:
items = []
if self.__custom_item:
items.append(self.__custom_item)
items.extend(self.__default_items)
return items
def get_item_value_model(self, item, column_id):
if item is None:
return self.current_index
return item.model
def __on_index_changed(self):
self._item_changed(None)
class CustomResolutionDelegate(AbstractWidgetMenuDelegate):
"""
Delegate to edit/save custom resoltion.
"""
def __init__(self, resolution_model, resolution_setter):
super().__init__(width=310, has_reset=False)
self.__resolution_model = resolution_model
self.__resolution_setter = resolution_setter
self.__link_button: Optional[ui.Button] = None
self.__save_button: Optional[ui.Button] = None
self.__save_window: Optional[SaveWindow] = None
self.__settings = carb.settings.get_settings()
(self.__resolution_min_width, self.__resolution_min_height) = self.__settings.get(SETTING_MIN_RESOLUTION) or [64, 64]
self.width_model = ui.SimpleIntModel(1920)
self.__sub_width_begin_edit = self.width_model.subscribe_begin_edit_fn(lambda _: self.__on_begin_edit())
self.__sub_width_end_edit = self.width_model.subscribe_end_edit_fn(lambda _: self.__on_width_end_edit())
self.height_model = ui.SimpleIntModel(1080)
self.__sub_height_begin_edit = self.height_model.subscribe_begin_edit_fn(lambda _: self.__on_begin_edit())
self.__sub_height_end_edit = self.height_model.subscribe_end_edit_fn(lambda _: self.__on_height_end_edit())
self.ratio_model = RatioModel()
self.__sub_ratio_change = None
self.__subscribe_ratio_change()
def destroy(self):
self.__sub_ratio_change = None
self.__sub_width_begin_edit = None
self.__sub_width_end_edit = None
self.__sub_height_begin_edit = None
self.__sub_height_end_edit = None
@property
def resolution(self) -> Tuple[int, int]:
return (self.width_model.as_int, self.height_model.as_int)
@resolution.setter
def resolution(self, res: Tuple[int, int]) -> None:
if res[0] == -1 and res[1] == -1:
# "Custom" selected
self.__update_save_image_state()
elif res[0] > 0 and res[1] > 0:
if self.width_model.as_int == res[0] and self.height_model.as_int == res[1]:
return
was_r_subscibed = self.__subscribe_ratio_change(enable=False)
self.ratio_model.ratio = res[0] / res[1]
self.width_model.set_value(res[0])
self.height_model.set_value(res[1])
self.__subscribe_ratio_change(enable=was_r_subscibed)
self.__update_save_image_state()
def build_widget(self, item: ui.MenuHelper):
with ui.VStack(spacing=0):
ui.Spacer(height=0, spacing=4)
with ui.HStack():
ui.Spacer(width=8)
ui.IntField(self.width_model, width=60, height=20)
ui.Spacer(width=10)
self.__link_button = ui.Button(
width=35,
image_height=20,
image_width=24,
checked=True,
clicked_fn=self.__on_link_clicked,
style_type_name_override="ResolutionLink",
)
ui.Spacer(width=10)
ui.IntField(self.height_model, width=60, height=20)
ui.Spacer(width=10)
ui.ComboBox(self.ratio_model, name="ratio")
ui.Spacer(width=10)
with ui.VStack(width=0, content_clipping=True):
self.__save_button = ui.Button(
style_type_name_override="Menu.Item.Button",
name="save",
width=20,
height=20,
image_width=20,
image_height=20,
clicked_fn=self.__save
)
ui.Spacer(width=4)
with ui.HStack():
ui.Spacer(width=8)
ui.Label("Width", alignment=ui.Alignment.LEFT, width=60)
ui.Spacer(width=54)
ui.Label("Height", alignment=ui.Alignment.LEFT, width=60)
ui.Spacer()
def __on_width_changed(self, model):
width = model.as_int
if width < self.__resolution_min_width:
self.__post_resolution_warning()
model.set_value(self.__resolution_min_width)
width = model.as_int
if self.__link_button:
if self.__link_button.checked:
height = int(width/self.ratio_model.ratio)
if height < self.__resolution_min_height:
# Height is too small, change width to match the min height
self.__post_resolution_warning()
height = self.__resolution_min_height
width = int(height * self.ratio_model.ratio)
model.set_value(width)
if height != self.height_model.as_int:
self.height_model.set_value(height)
else:
self.ratio_model.ratio = float(width) / self.height_model.as_int
self.__set_render_resolution(self.resolution)
self.__update_save_image_state()
def __on_height_changed(self, model):
height = model.as_int
if height < self.__resolution_min_height:
self.__post_resolution_warning()
model.set_value(self.__resolution_min_height)
height = model.as_int
if self.__link_button:
if self.__link_button.checked:
width = int(height * self.ratio_model.ratio)
if width < self.__resolution_min_width:
# Width is too small, change height to match min width
self.__post_resolution_warning()
width = self.__resolution_min_width
height = int(width / self.ratio_model.ratio)
model.set_value(height)
if width != self.width_model.as_int:
self.width_model.set_value(width)
else:
self.ratio_model.ratio = float(self.width_model.as_int) / height
self.__set_render_resolution(self.resolution)
self.__update_save_image_state()
def __on_ratio_changed(self, ratio: float):
height = int(self.width_model.as_int/self.ratio_model.ratio)
if height != self.height_model.as_int:
self.height_model.set_value(height)
self.__set_render_resolution(self.resolution)
self.__update_save_image_state()
def __on_link_clicked(self):
self.__link_button.checked = not self.__link_button.checked
def __subscribe_ratio_change(self, enable: bool = True) -> bool:
was_subscribed = self.__sub_ratio_change is not None
if enable:
if not was_subscribed:
self.__sub_ratio_change = self.ratio_model.subscribe_ratio_changed_fn(self.__on_ratio_changed)
elif was_subscribed:
self.__sub_ratio_change = None
return was_subscribed
def __save(self):
if self.__save_button.checked:
if self.__save_window:
self.__save_window = None
self.__save_window = SaveWindow(self.resolution, self.__on_save_resolution)
def __update_save_image_state(self):
if not self.__save_button:
return
for item in self.__resolution_model.get_item_children(None):
if self.resolution == item.value:
self.__save_button.checked = False
break
else:
self.__save_button.checked = True
def __on_save_resolution(self, new_name: str, resolution: Tuple[int, int]) -> bool:
custom_list = self.__settings.get(SETTING_CUSTOM_RESOLUTION_LIST) or []
for custom in custom_list:
name = custom["name"]
if name == new_name:
carb.log_warn("f{new_name} already exists!")
return False
custom_list.append(
{
"name": new_name,
"width": resolution[0],
"height": resolution[1]
}
)
self.__settings.set(SETTING_CUSTOM_RESOLUTION_LIST, custom_list)
self.__save_button.checked = False
return True
def __set_render_resolution(self, resolution: Tuple[int, int]):
async def __delay_async(res: Tuple[int, int]):
# Delay a frame to make sure current changes from UI are saved
await omni.kit.app.get_app().next_update_async()
self.__resolution_setter.set_resolution(res)
asyncio.ensure_future(__delay_async(resolution))
def __on_begin_edit(self):
self.__saved_width = self.width_model.as_int
self.__saved_height = self.height_model.as_int
def __on_width_end_edit(self):
if self.width_model.as_int <= 0:
self.width_model.set_value(self.__saved_width)
self.__on_width_changed(self.width_model)
def __on_height_end_edit(self):
if self.height_model.as_int <= 0:
self.height_model.set_value(self.__saved_height)
self.__on_height_changed(self.height_model)
def __post_resolution_warning(self):
try:
import omni.kit.notification_manager as nm
nm.post_notification(f"Resolution cannot be lower than {self.__resolution_min_width}x{self.__resolution_min_height}", status=nm.NotificationStatus.WARNING)
except ImportError:
carb.log_warn(f"Resolution cannot be lower than {self.__resolution_min_width}x{self.__resolution_min_height}") | 12,445 | Python | 38.891026 | 167 | 0.57268 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/tests/test_custom_resolution.py | import carb.settings
import omni.kit.test
from ..menu_item.resolution_collection.model import ComboBoxResolutionModel, ResolutionComboBoxItem
SETTING_CUSTOM_RESOLUTION_LIST = "/persistent/app/renderer/resolution/custom/list"
class TestCustomResolution(omni.kit.test.AsyncTestCase):
async def setUp(self):
self.__settings = carb.settings.get_settings()
# Useles fake data that needs to go to ComboBoxResolutionModel
resolution_setter = None
resolution_settings = (("setting", (0,0)), ("setting", (0,0)))
self.__model = ComboBoxResolutionModel(None, resolution_settings, self.__settings)
super().setUp()
async def tearDown(self):
self.__settings.set(SETTING_CUSTOM_RESOLUTION_LIST, [])
super().tearDown()
async def test_custom_resolutions(self):
items = self.__model.get_item_children(None)
num_items = len(items)
self.__settings.set(SETTING_CUSTOM_RESOLUTION_LIST, [{"name": "test", "width": 100, "height": 200}])
for _ in range(2):
await omni.kit.app.get_app().next_update_async()
items = self.__model.get_item_children(None)
self.assertEqual(num_items + 2, len(items))
new_item: ResolutionComboBoxItem = items[-1]
self.assertEqual(new_item.name, "test")
self.assertEqual(new_item.resolution, (100, 200))
self.assertTrue(new_item.custom)
self.__settings.set(SETTING_CUSTOM_RESOLUTION_LIST, [])
for _ in range(2):
await omni.kit.app.get_app().next_update_async()
items = self.__model.get_item_children(None)
self.assertEqual(num_items, len(items))
| 1,666 | Python | 40.674999 | 108 | 0.657263 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/tests/__init__.py | from .test_ui import *
from .test_custom_resolution import *
| 61 | Python | 19.66666 | 37 | 0.754098 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/omni/kit/viewport/menubar/settings/tests/test_ui.py | import omni.kit.test
from re import I
from omni.ui.tests.test_base import OmniUiTest
import omni.kit.ui_test as ui_test
from omni.kit.ui_test import Vec2
import omni.usd
import omni.kit.app
from omni.kit.test.teamcity import is_running_in_teamcity
from pathlib import Path
import carb.input
import asyncio
import unittest
import sys
CURRENT_PATH = Path(__file__).parent
TEST_DATA_PATH = CURRENT_PATH.parent.parent.parent.parent.parent.parent.joinpath("data").joinpath("tests")
TEST_WIDTH, TEST_HEIGHT = 600, 600
class TestSettingMenuWindow(OmniUiTest):
async def setUp(self):
self._golden_img_dir = TEST_DATA_PATH.absolute().joinpath("golden_img").absolute()
await self.create_test_area(width=TEST_WIDTH, height=TEST_HEIGHT)
await omni.kit.app.get_app().next_update_async()
async def test_navigation(self):
await self.__show_subitem("menubar_setting_navigation.png", 86)
async def test_selection(self):
await self.__show_subitem("menubar_setting_selection.png", 106)
async def test_grid(self):
await self.__show_subitem("menubar_setting_grid.png", 126)
async def test_gizmo(self):
await self.__show_subitem("menubar_setting_gizmo.png", 146)
@unittest.skipIf(
(sys.platform == "linux" and is_running_in_teamcity()),
"OM-64377: Delegate for RadioMenuCollection does not work in Linux",
)
async def test_viewport(self):
await self.__show_subitem("menubar_setting_viewport.png", 166)
async def test_viewport_ui(self):
await self.__show_subitem("menubar_setting_viewport_ui.png", 186)
async def test_viewport_manipulate(self):
await self.__show_subitem("menubar_setting_viewport_manipulator.png", 206)
async def test_reset_item(self):
settings = carb.settings.get_settings()
cam_vel = settings.get("/persistent/app/viewport/camMoveVelocity")
in_enabled = settings.get("/persistent/app/viewport/camInertiaEnabled")
settings.set("/persistent/app/viewport/camMoveVelocity", cam_vel * 2)
settings.set("/persistent/app/viewport/camInertiaEnabled", not in_enabled)
try:
await self.__do_ui_test(ui_test.emulate_mouse_click, 225)
self.assertEqual(settings.get("/persistent/app/viewport/camMoveVelocity"), cam_vel)
self.assertEqual(settings.get("/persistent/app/viewport/camInertiaEnabled"), in_enabled)
finally:
settings.set("/persistent/app/viewport/camMoveVelocity", cam_vel)
settings.set("/persistent/app/viewport/camInertiaEnabled", in_enabled)
async def __show_subitem(self, golden_img_name: str, y: int) -> None:
async def gloden_compare():
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name=golden_img_name)
await self.__do_ui_test(gloden_compare, y)
async def __do_ui_test(self, test_operation, y: int, frame_wait: int = 3) -> None:
# Enable mouse input
app = omni.kit.app.get_app()
app_window = omni.appwindow.get_default_app_window()
for device in [carb.input.DeviceType.MOUSE]:
app_window.set_input_blocking_state(device, None)
try:
await ui_test.emulate_mouse_move(Vec2(20, 46), human_delay_speed=4)
await ui_test.emulate_mouse_click()
await ui_test.emulate_mouse_move(Vec2(20, y))
for _ in range(frame_wait):
await app.next_update_async()
await test_operation()
finally:
for _ in range(frame_wait):
await app.next_update_async()
await ui_test.emulate_mouse_move(Vec2(300, 26))
await ui_test.emulate_mouse_click()
for _ in range(frame_wait):
await app.next_update_async()
| 3,830 | Python | 36.930693 | 106 | 0.661358 |
omniverse-code/kit/exts/omni.kit.viewport.menubar.settings/docs/index.rst | omni.kit.viewport.menubar.settings
###################################
Viewport MenuBar Settings
.. toctree::
:maxdepth: 1
README
CHANGELOG
.. automodule:: omni.kit.viewport.menubar.settings
:platform: Windows-x86_64, Linux-x86_64
:members:
:undoc-members:
:show-inheritance:
:imported-members:
| 330 | reStructuredText | 15.549999 | 50 | 0.615152 |
omniverse-code/kit/exts/omni.kit.window.status_bar/PACKAGE-LICENSES/omni.kit.window.status_bar-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. | 412 | Markdown | 57.999992 | 74 | 0.839806 |
omniverse-code/kit/exts/omni.kit.window.status_bar/config/extension.toml | [package]
version = "0.1.5"
title = "Status Bar"
changelog = "docs/CHANGELOG.md"
[dependencies]
"omni.usd" = {}
"omni.ui" = {}
"omni.kit.mainwindow" = { optional=true }
[[native.plugin]]
path = "bin/*.plugin"
recursive = false
# That will make tests auto-discoverable by test_runner:
[[python.module]]
name = "omni.kit.window.status_bar.tests"
[[test]]
args = [
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false",
"--no-window"
]
dependencies = [
"omni.kit.renderer.capture",
"omni.ui",
]
| 538 | TOML | 16.966666 | 56 | 0.644981 |
omniverse-code/kit/exts/omni.kit.window.status_bar/omni/kit/window/status_bar/tests/test_status_bar.py | ## Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
from omni.ui.tests.test_base import OmniUiTest
import carb
import asyncio
class TestStatusBar(OmniUiTest):
# Before running each test
async def setUp(self):
self.name_progress = carb.events.type_from_string("omni.kit.window.status_bar@progress")
self.name_activity = carb.events.type_from_string("omni.kit.window.status_bar@activity")
self.message_bus = omni.kit.app.get_app().get_message_bus_event_stream()
self.message_bus.push(self.name_activity, payload={"text": ""})
self.message_bus.push(self.name_progress, payload={"progress": "-1"})
# After running each test
async def tearDown(self):
pass
async def test_general(self):
await self.create_test_area(256, 64)
async def log():
# Delayed log because self.finalize_test logs things
carb.log_warn("StatusBar test")
asyncio.ensure_future(log())
await self.finalize_test()
async def test_activity(self):
await self.create_test_area(512, 64)
async def log():
# Delayed log because self.finalize_test logs things
carb.log_warn("StatusBar test")
# Test activity name with spaces URL-encoded
self.message_bus.push(self.name_activity, payload={"text": "MFC%20For%20NvidiaAnimated.usd"})
self.message_bus.push(self.name_progress, payload={"progress": "0.2"})
asyncio.ensure_future(log())
await self.finalize_test()
| 1,941 | Python | 36.346153 | 101 | 0.686244 |
omniverse-code/kit/exts/omni.kit.window.status_bar/omni/kit/window/status_bar/tests/__init__.py | ## Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
from .test_status_bar import TestStatusBar
| 484 | Python | 47.499995 | 77 | 0.797521 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/PACKAGE-LICENSES/omni.kit.primitive.mesh-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. | 412 | Markdown | 57.999992 | 74 | 0.839806 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/config/extension.toml | [package]
# Semantic Versioning is used: https://semver.org/
version = "1.0.8"
# Lists people or organizations that are considered the "authors" of the package.
authors = ["NVIDIA"]
category = "Internal"
# The title and description fields are primarly for displaying extension info in UI
title = "Kit Mesh Primitives Generator"
description="Generators for basic mesh geometry."
# Keywords for the extension
keywords = ["kit", "mesh primitive"]
# URL of the extension source repository.
repository = ""
# Location of change log file in target (final) folder of extension, relative to the root.
# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
changelog = "docs/CHANGELOG.md"
# Preview image. Folder named "data" automatically goes in git lfs (see .gitattributes file).
preview_image = "data/preview.png"
[dependencies]
"omni.kit.commands" = {}
"omni.usd" = {}
"omni.ui" = {optional = true}
"omni.kit.menu.utils" = {optional = true}
"omni.kit.usd.layers" = {}
"omni.kit.actions.core" = {}
[[python.module]]
name = "omni.kit.primitive.mesh"
[[test]]
timeout=300
args = [
"--/app/file/ignoreUnsavedOnExit=true",
"--/app/asyncRendering=false",
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false",
"--no-window"
]
dependencies = [
"omni.hydra.pxr",
"omni.kit.commands",
"omni.kit.renderer.capture",
"omni.kit.mainwindow",
"omni.kit.test",
"omni.ui",
"omni.kit.menu.utils"
]
| 1,473 | TOML | 24.413793 | 93 | 0.691785 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/extension.py | __all__ = ["PrimitiveMeshExtension"]
import omni
from .mesh_actions import register_actions, deregister_actions
from pxr import Usd
class PrimitiveMeshExtension(omni.ext.IExt):
def __init__(self):
super().__init__()
def on_startup(self, ext_id):
self._ext_name = omni.ext.get_extension_name(ext_id)
self._mesh_generator = None
try:
from .generator import MeshGenerator
self._mesh_generator = MeshGenerator()
self._mesh_generator.register_menu()
except ImportError:
pass
register_actions(self._ext_name, PrimitiveMeshExtension, lambda: self._mesh_generator)
def on_shutdown(self):
deregister_actions(self._ext_name)
if self._mesh_generator:
self._mesh_generator.destroy()
| 814 | Python | 27.103447 | 94 | 0.635135 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/__init__.py | from .evaluators import get_geometry_mesh_prim_list, AbstractShapeEvaluator
from .command import CreateMeshPrimCommand, CreateMeshPrimWithDefaultXformCommand
from .extension import PrimitiveMeshExtension
| 204 | Python | 50.249987 | 81 | 0.887255 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/mesh_actions.py | import omni.usd
import omni.kit.commands
import omni.kit.actions.core
from .evaluators import get_geometry_mesh_prim_list
def register_actions(extension_id, cls, get_self_fn):
def create_mesh_prim(prim_type):
usd_context = omni.usd.get_context()
with omni.kit.usd.layers.active_authoring_layer_context(usd_context):
omni.kit.commands.execute("CreateMeshPrimWithDefaultXform", prim_type=prim_type, above_ground=True)
# actions
for prim in get_geometry_mesh_prim_list():
omni.kit.actions.core.get_action_registry().register_action(
extension_id,
f"create_mesh_prim_{prim.lower()}",
lambda p=prim: create_mesh_prim(p),
display_name=f"Create Mesh Prim {prim}",
description=f"Create Mesh Prim {prim}",
tag="Create Mesh Prim",
)
if get_self_fn() is not None:
omni.kit.actions.core.get_action_registry().register_action(
extension_id,
"show_setting_window",
get_self_fn().show_setting_window,
display_name="Show Settings Window",
description="Show Settings Window",
tag="Show Settings Window",
)
def deregister_actions(extension_id):
action_registry = omni.kit.actions.core.get_action_registry()
action_registry.deregister_all_actions_for_extension(extension_id)
| 1,391 | Python | 34.692307 | 111 | 0.646298 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/generator.py | import carb.settings
import omni
import omni.kit
from omni import ui
from .evaluators import _get_all_evaluators, get_geometry_mesh_prim_list
from omni.kit.menu.utils import MenuItemDescription, remove_menu_items, add_menu_items
class MeshGenerator:
def __init__(self):
self._settings = carb.settings.get_settings()
self._window = None
self._mesh_setting_ui = {}
self._current_setting_index = 0
self._mesh_menu_list = []
def destroy(self):
self._window = None
remove_menu_items(self._mesh_menu_list, "Create")
def register_menu(self):
sub_menu = []
for prim in get_geometry_mesh_prim_list():
sub_menu.append(MenuItemDescription(name=prim, onclick_action=("omni.kit.primitive.mesh", f"create_mesh_prim_{prim.lower()}")))
sub_menu.append(MenuItemDescription())
sub_menu.append(MenuItemDescription(name="Settings", onclick_action=("omni.kit.primitive.mesh", "show_setting_window")))
self._mesh_menu_list = [
MenuItemDescription(name="Mesh", glyph="menu_prim.svg", sub_menu=sub_menu)
]
add_menu_items(self._mesh_menu_list, "Create")
def on_primitive_type_selected(self, model, item):
names = get_geometry_mesh_prim_list()
old_mesh_name = names[self._current_setting_index]
if old_mesh_name in self._mesh_setting_ui:
self._mesh_setting_ui[old_mesh_name].visible = False
idx = model.get_item_value_model().as_int
mesh_name = names[idx]
if mesh_name in self._mesh_setting_ui:
self._mesh_setting_ui[old_mesh_name].visible = False
self._mesh_setting_ui[mesh_name].visible = True
self._current_setting_index = idx
def show_setting_window(self):
flags = ui.WINDOW_FLAGS_NO_COLLAPSE | ui.WINDOW_FLAGS_NO_SCROLLBAR
if not self._window:
self._window = ui.Window(
"Mesh Generation Settings",
ui.DockPreference.DISABLED,
width=400,
height=260,
flags=flags,
padding_x=0,
padding_y=0,
)
with self._window.frame:
with ui.VStack(height=0):
ui.Spacer(width=0, height=20)
with ui.HStack(height=0):
ui.Spacer(width=20, height=0)
ui.Label("Primitive Type", name="text", height=0)
model = ui.ComboBox(0, *get_geometry_mesh_prim_list(), name="primitive_type").model
model.add_item_changed_fn(self.on_primitive_type_selected)
ui.Spacer(width=20, height=0)
ui.Spacer(width=0, height=10)
ui.Separator(height=0, name="text")
ui.Spacer(width=0, height=10)
with ui.ZStack(height=0):
mesh_names = get_geometry_mesh_prim_list()
for i in range(len(mesh_names)):
mesh_name = mesh_names[i]
stack = ui.VStack(spacing=0)
self._mesh_setting_ui[mesh_name] = stack
with stack:
ui.Spacer(height=20)
evaluator_class = _get_all_evaluators()[mesh_name]
evaluator_class.build_setting_ui()
ui.Spacer(height=5)
if i != 0:
stack.visible = False
ui.Spacer(width=0, height=20)
with ui.HStack(height=0):
ui.Spacer()
ui.Button(
"Create",
alignment=ui.Alignment.H_CENTER,
name="create",
width=120,
height=0,
mouse_pressed_fn=lambda *args: self._create_shape(),
)
ui.Button(
"Reset Settings",
alignment=ui.Alignment.H_CENTER,
name="reset",
width=120,
height=0,
mouse_pressed_fn=lambda *args: self._reset_settings(),
)
ui.Spacer()
self._current_setting_index = 0
self._window.visible = True
def _create_shape(self):
names = get_geometry_mesh_prim_list()
mesh_type = names[self._current_setting_index]
usd_context = omni.usd.get_context()
with omni.kit.usd.layers.active_authoring_layer_context(usd_context):
omni.kit.commands.execute("CreateMeshPrimWithDefaultXform", prim_type=mesh_type, above_ground=True)
def _reset_settings(self):
names = get_geometry_mesh_prim_list()
mesh_type = names[self._current_setting_index]
evaluator_class = _get_all_evaluators()[mesh_type]
evaluator_class.reset_setting()
| 5,205 | Python | 39.992126 | 139 | 0.510086 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/command.py | __all__ = ["CreateMeshPrimWithDefaultXformCommand", "CreateMeshPrimCommand"]
import omni
import carb.settings
from pxr import UsdGeom, Usd, Vt, Kind, Sdf, Gf
from .evaluators import _get_all_evaluators
PERSISTENT_SETTINGS_PREFIX = "/persistent"
class CreateMeshPrimWithDefaultXformCommand(omni.kit.commands.Command):
def __init__(self, prim_type: str, **kwargs):
"""
Creates primitive.
Args:
prim_type (str): It supports Plane/Sphere/Cone/Cylinder/Disk/Torus/Cube.
kwargs:
object_origin (Gf.Vec3f): Position of mesh center in stage units.
u_patches (int): The number of patches to tessellate U direction.
v_patches (int): The number of patches to tessellate V direction.
w_patches (int): The number of patches to tessellate W direction.
It only works for Cone/Cylinder/Cube.
half_scale (float): Half size of mesh in centimeters. Default is None, which means it's controlled by settings.
u_verts_scale (int): Tessellation Level of U. It's a multiplier of u_patches.
v_verts_scale (int): Tessellation Level of V. It's a multiplier of v_patches.
w_verts_scale (int): Tessellation Level of W. It's a multiplier of w_patches.
It only works for Cone/Cylinder/Cube.
For Cone/Cylinder, it's to tessellate the caps.
For Cube, it's to tessellate along z-axis.
above_ground (bool): It will offset the center of mesh above the ground plane if it's True,
False otherwise. It's False by default. This param only works when param object_origin is not given.
Otherwise, it will be ignored.
"""
self._prim_type = prim_type[0:1].upper() + prim_type[1:].lower()
self._usd_context = omni.usd.get_context(kwargs.get("context_name", ""))
self._selection = self._usd_context.get_selection()
self._stage = self._usd_context.get_stage()
self._settings = carb.settings.get_settings()
self._default_path = kwargs.get("prim_path", None)
self._select_new_prim = kwargs.get("select_new_prim", True)
self._prepend_default_prim = kwargs.get("prepend_default_prim", True)
self._above_round = kwargs.get("above_ground", False)
self._attributes = {**kwargs}
# Supported mesh types should have an associated evaluator class
self._evaluator_class = _get_all_evaluators()[prim_type]
assert isinstance(self._evaluator_class, type)
def do(self):
self._prim_path = None
if self._default_path:
path = omni.usd.get_stage_next_free_path(self._stage, self._default_path, self._prepend_default_prim)
else:
path = omni.usd.get_stage_next_free_path(self._stage, "/" + self._prim_type, self._prepend_default_prim)
mesh = UsdGeom.Mesh.Define(self._stage, path)
prim = mesh.GetPrim()
defaultXformOpType = self._settings.get(PERSISTENT_SETTINGS_PREFIX + "/app/primCreation/DefaultXformOpType")
defaultRotationOrder = self._settings.get(
PERSISTENT_SETTINGS_PREFIX + "/app/primCreation/DefaultRotationOrder"
)
defaultXformOpOrder = self._settings.get(
PERSISTENT_SETTINGS_PREFIX + "/app/primCreation/DefaultXformOpOrder"
)
defaultXformPrecision = self._settings.get(
PERSISTENT_SETTINGS_PREFIX + "/app/primCreation/DefaultXformOpPrecision"
)
vec3_type = Sdf.ValueTypeNames.Double3 if defaultXformPrecision == "Double" else Sdf.ValueTypeNames.Float3
quat_type = Sdf.ValueTypeNames.Quatd if defaultXformPrecision == "Double" else Sdf.ValueTypeNames.Quatf
up_axis = UsdGeom.GetStageUpAxis(self._stage)
self._attributes["up_axis"] = up_axis
half_scale = self._attributes.get("half_scale", None)
if half_scale is None or half_scale <= 0.0:
half_scale = self._evaluator_class.get_default_half_scale()
object_origin = self._attributes.get("object_origin", None)
if object_origin is None and self._above_round:
# To move the mesh above the ground.
if self._prim_type != "Disk" and self._prim_type != "Plane":
if self._prim_type != "Torus":
offset = half_scale
else:
# The tube of torus is half of the half_scale.
offset = half_scale / 2.0
# Scale it to make sure it matches stage units.
units = UsdGeom.GetStageMetersPerUnit(mesh.GetPrim().GetStage())
if Gf.IsClose(units, 0.0, 1e-6):
units = 0.01
scale = 0.01 / units
offset = offset * scale
if up_axis == "Y":
object_origin = Gf.Vec3f(0.0, offset, 0.0)
else:
object_origin = Gf.Vec3f(0.0, 0.0, offset)
else:
object_origin = Gf.Vec3f(0.0)
elif isinstance(object_origin, list):
object_origin = Gf.Vec3f(*object_origin)
else:
object_origin = Gf.Vec3f(0.0)
default_translate = Gf.Vec3d(object_origin) if defaultXformPrecision == "Double" else object_origin
default_euler = Gf.Vec3d(0.0, 0.0, 0.0) if defaultXformPrecision == "Double" else Gf.Vec3f(0.0, 0.0, 0.0)
default_scale = Gf.Vec3d(1.0, 1.0, 1.0) if defaultXformPrecision == "Double" else Gf.Vec3f(1.0, 1.0, 1.0)
default_orient = (
Gf.Quatd(1.0, Gf.Vec3d(0.0, 0.0, 0.0))
if defaultXformPrecision == "Double"
else Gf.Quatf(1.0, Gf.Vec3f(0.0, 0.0, 0.0))
)
mat4_type = Sdf.ValueTypeNames.Matrix4d # there is no Matrix4f in SdfValueTypeNames
if defaultXformOpType == "Scale, Rotate, Translate":
attr_translate = prim.CreateAttribute("xformOp:translate", vec3_type, False)
attr_translate.Set(default_translate)
attr_rotate_name = "xformOp:rotate" + defaultRotationOrder
attr_rotate = prim.CreateAttribute(attr_rotate_name, vec3_type, False)
attr_rotate.Set(default_euler)
attr_scale = prim.CreateAttribute("xformOp:scale", vec3_type, False)
attr_scale.Set(default_scale)
attr_order = prim.CreateAttribute("xformOpOrder", Sdf.ValueTypeNames.TokenArray, False)
attr_order.Set(["xformOp:translate", attr_rotate_name, "xformOp:scale"])
if defaultXformOpType == "Scale, Orient, Translate":
attr_translate = prim.CreateAttribute("xformOp:translate", vec3_type, False)
attr_translate.Set(default_translate)
attr_rotate = prim.CreateAttribute("xformOp:orient", quat_type, False)
attr_rotate.Set(default_orient)
attr_scale = prim.CreateAttribute("xformOp:scale", vec3_type, False)
attr_scale.Set(default_scale)
attr_order = prim.CreateAttribute("xformOpOrder", Sdf.ValueTypeNames.TokenArray, False)
attr_order.Set(["xformOp:translate", "xformOp:orient", "xformOp:scale"])
if defaultXformOpType == "Transform":
attr_matrix = prim.CreateAttribute("xformOp:transform", mat4_type, False)
attr_matrix.Set(Gf.Matrix4d(1.0))
attr_order = prim.CreateAttribute("xformOpOrder", Sdf.ValueTypeNames.TokenArray, False)
attr_order.Set(["xformOp:transform"])
self._prim_path = path
if self._select_new_prim:
self._selection.set_prim_path_selected(path, True, False, True, True)
self._define_mesh(mesh)
return self._prim_path
def undo(self):
if self._prim_path:
self._stage.RemovePrim(self._prim_path)
def _define_mesh(self, mesh):
evaluator = self._evaluator_class(self._attributes)
points = []
normals = []
sts = []
point_indices = []
face_vertex_counts = []
points, normals, sts, point_indices, face_vertex_counts = evaluator.eval(**self._attributes)
units = UsdGeom.GetStageMetersPerUnit(mesh.GetPrim().GetStage())
if Gf.IsClose(units, 0.0, 1e-6):
units = 0.01
# Scale points to make sure it's already in centimeters
scale = 0.01 / units
points = [point * scale for point in points]
mesh.GetPointsAttr().Set(Vt.Vec3fArray(points))
mesh.GetNormalsAttr().Set(Vt.Vec3fArray(normals))
mesh.GetFaceVertexIndicesAttr().Set(point_indices)
mesh.GetFaceVertexCountsAttr().Set(face_vertex_counts)
mesh.SetNormalsInterpolation("faceVarying")
prim = mesh.GetPrim()
# https://github.com/PixarAnimationStudios/USD/commit/592b4d39edf5daf0534d467e970c95462a65d44b
# UsdGeom.Imageable.CreatePrimvar deprecated in v19.03 and removed in v22.08
sts_primvar = UsdGeom.PrimvarsAPI(prim).CreatePrimvar("st", Sdf.ValueTypeNames.TexCoord2fArray)
sts_primvar.SetInterpolation("faceVarying")
sts_primvar.Set(Vt.Vec2fArray(sts))
mesh.CreateSubdivisionSchemeAttr("none")
attr = prim.GetAttribute(UsdGeom.Tokens.extent)
if attr:
bounds = UsdGeom.Boundable.ComputeExtentFromPlugins(UsdGeom.Boundable(prim), Usd.TimeCode.Default())
if bounds:
attr.Set(bounds)
# set the new prim as the active selection
if self._select_new_prim:
self._selection.set_selected_prim_paths([prim.GetPath().pathString], False)
# For back compatibility.
class CreateMeshPrimCommand(CreateMeshPrimWithDefaultXformCommand):
def __init__(self, prim_type: str, **kwargs):
super().__init__(prim_type, **kwargs)
omni.kit.commands.register(CreateMeshPrimCommand)
omni.kit.commands.register(CreateMeshPrimWithDefaultXformCommand)
| 9,980 | Python | 44.995391 | 123 | 0.62505 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/cone.py | import math
from .utils import (
get_int_setting, build_int_slider, modify_winding_order,
transform_point, inverse_u, inverse_v, generate_disk
)
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
from typing import List, Tuple
class ConeEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/cone/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/cone/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/cone/v_scale"
SETTING_W_SCALE = "/persistent/app/mesh_generator/shapes/cone/w_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
self.radius = 1.0
self.height = 2.0
# The sequence must be kept in the same as generate_circle_points
# in the u direction to share points with the cap.
def _eval(self, up_axis, u, v) -> Tuple[Gf.Vec3f, Gf.Vec3f]:
theta = u * 2.0 * math.pi
x = (1 - v) * math.cos(theta)
h = v * self.height - 1
if up_axis == "Y":
z = (1 - v) * math.sin(theta)
point = Gf.Vec3f(x, h, z)
dpdu = Gf.Vec3f(-2.0 * math.pi * z, 0.0, 2.0 * math.pi * x)
dpdv = Gf.Vec3f(-x / (1 - v), self.height, -z / (1 - v))
normal = dpdv ^ dpdu
normal = normal.GetNormalized()
else:
y = (1 - v) * math.sin(theta)
point = Gf.Vec3f(x, y, h)
dpdu = Gf.Vec3f(-2.0 * math.pi * y, 2.0 * math.pi * x, 0)
dpdv = Gf.Vec3f(-x / (1 - v), -y / (1 - v), self.height)
normal = dpdu ^ dpdv
normal = normal.GetNormalized()
return point, normal
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(ConeEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(ConeEvaluator.SETTING_V_SCALE, 3)
num_w_verts_scale = kwargs.get("w_verts_scale", None)
if num_w_verts_scale is None or num_w_verts_scale <= 0:
num_w_verts_scale = get_int_setting(ConeEvaluator.SETTING_W_SCALE, 1)
num_u_verts_scale = max(num_u_verts_scale, 1)
num_v_verts_scale = max(num_v_verts_scale, 1)
num_w_verts_scale = max(num_w_verts_scale, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
u_patches = kwargs.get("u_patches", 64)
v_patches = kwargs.get("v_patches", 1)
w_patches = kwargs.get("w_patches", 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
w_patches = w_patches * num_w_verts_scale
u_patches = max(int(u_patches), 3)
v_patches = max(int(v_patches), 1)
w_patches = max(int(w_patches), 1)
accuracy = 0.00001
u_delta = 1.0 / u_patches
v_delta = (1.0 - accuracy) / v_patches
num_u_verts = u_patches
num_v_verts = v_patches + 1
points: List[Gf.Vec3f] = []
point_normals: List[Gf.Vec3f] = []
normals: List[Gf.Vec3f] = []
sts: List[Gf.Vec2f] = []
face_indices: List[int] = []
face_vertex_counts: List[int] = []
for j in range(num_v_verts):
for i in range(num_u_verts):
u = i * u_delta
v = j * v_delta
point, normal = self._eval(up_axis, u, v)
point = transform_point(point, origin, half_scale)
points.append(point)
point_normals.append(normal)
def calc_index(i, j):
i = i if i < num_u_verts else 0
base_index = j * num_u_verts
point_index = base_index + i
return point_index
def get_uv(i, j):
u = 1 - i * u_delta if i < num_u_verts else 0.0
v = j * v_delta if j != num_v_verts - 1 else 1.0
return Gf.Vec2f(u, v)
for j in range(v_patches):
for i in range(u_patches):
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
uv00 = get_uv(i, j)
uv10 = get_uv(i + 1, j)
uv11 = get_uv(i + 1, j + 1)
uv01 = get_uv(i, j + 1)
# Right-hand order
if up_axis == "Y":
sts.extend([uv00, uv01, uv11, uv10])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normals.extend(
[
point_normals[vindex00],
point_normals[vindex01],
point_normals[vindex11],
point_normals[vindex10],
]
)
else:
sts.extend([inverse_u(uv00), inverse_u(uv10), inverse_u(uv11), inverse_u(uv01)])
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
normals.extend(
[
point_normals[vindex00],
point_normals[vindex10],
point_normals[vindex11],
point_normals[vindex01],
]
)
face_vertex_counts.append(4)
# Add hat
if up_axis == "Y":
bottom_center_point = Gf.Vec3f(0, -1, 0)
top_center_point = Gf.Vec3f(0, 1 - accuracy, 0)
else:
bottom_center_point = Gf.Vec3f(0, 0, -1)
top_center_point = Gf.Vec3f(0, 0, 1 - accuracy)
def add_hat(center_point, rim_points_start_index, w_patches, invert_wind_order=False):
bt_points, _, bt_sts, bt_face_indices, bt_face_vertex_counts = generate_disk(
center_point, u_patches, w_patches, origin, half_scale, up_axis
)
# Total points before adding hat
total_points = len(points)
# Skips shared points
points.extend(bt_points[num_u_verts:])
if invert_wind_order:
modify_winding_order(bt_face_vertex_counts, bt_sts)
for st in bt_sts:
sts.append(inverse_v(st))
else:
sts.extend(bt_sts)
face_vertex_counts.extend(bt_face_vertex_counts)
normals.extend([center_point] * len(bt_face_indices))
# Remapping cap points
for i, index in enumerate(bt_face_indices):
if index >= num_u_verts:
bt_face_indices[i] += total_points - num_u_verts
else:
bt_face_indices[i] += rim_points_start_index
if invert_wind_order:
modify_winding_order(bt_face_vertex_counts, bt_face_indices)
face_indices.extend(bt_face_indices)
# Add top hat to close shape
top_hat_start_index = len(points) - num_u_verts
add_hat(top_center_point, top_hat_start_index, 1)
# Add bottom hat to close shape
add_hat(bottom_center_point, 0, w_patches, True)
return points, normals, sts, face_indices, face_vertex_counts
@staticmethod
def build_setting_ui():
from omni import ui
ConeEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", ConeEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
ConeEvaluator._u_scale_slider = build_int_slider(
"U Verts Scale", ConeEvaluator.SETTING_U_SCALE, 1, 1, 10,
"Tessellation Level in Horizontal Direction"
)
ui.Spacer(height=5)
ConeEvaluator._v_scale_slider = build_int_slider(
"V Verts Scale", ConeEvaluator.SETTING_V_SCALE, 1, 1, 10, "Tessellation Level in Vertical Direction"
)
ui.Spacer(height=5)
ConeEvaluator._w_scale_slider = build_int_slider(
"W Verts Scale", ConeEvaluator.SETTING_W_SCALE, 1, 1, 10, "Tessellation Level of Bottom Cap"
)
@staticmethod
def reset_setting():
ConeEvaluator._half_scale_slider.set_value(ConeEvaluator.get_default_half_scale())
ConeEvaluator._u_scale_slider.set_value(1)
ConeEvaluator._v_scale_slider.set_value(1)
ConeEvaluator._w_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(ConeEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 9,127 | Python | 38.008547 | 112 | 0.5315 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/__init__.py | __all__ = ["get_geometry_mesh_prim_list", "AbstractShapeEvaluator"]
import re
from .abstract_shape_evaluator import AbstractShapeEvaluator
from .cone import ConeEvaluator
from .disk import DiskEvaluator
from .cube import CubeEvaluator
from .cylinder import CylinderEvaluator
from .sphere import SphereEvaluator
from .torus import TorusEvaluator
from .plane import PlaneEvaluator
_all_evaluators = {}
def _get_all_evaluators():
global _all_evaluators
if not _all_evaluators:
evaluator_classes = list(filter(lambda x: re.search(r".+Evaluator$", x), globals().keys()))
evaluator_classes.remove(AbstractShapeEvaluator.__name__)
for evaluator in evaluator_classes:
name = re.sub(r"(.*)Evaluator$", r"\1", evaluator)
_all_evaluators[name] = globals()[f"{name}Evaluator"]
return _all_evaluators
def get_geometry_mesh_prim_list():
names = list(_get_all_evaluators().keys())
names.sort()
return names
| 973 | Python | 27.647058 | 99 | 0.706064 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/abstract_shape_evaluator.py | from typing import List, Tuple
from pxr import Gf
class AbstractShapeEvaluator: # pragma: no cover
def __init__(self, attributes: dict):
self._attributes = attributes
def eval(self, **kwargs) -> Tuple[
List[Gf.Vec3f], List[Gf.Vec3f],
List[Gf.Vec2f], List[int], List[int]
]:
"""It must be implemented to return tuple
[points, normals, uvs, face_indices, face_vertex_counts], where:
* points and normals are array of Gf.Vec3f.
* uvs are array of Gf.Vec2f that represents uv coordinates.
* face_indexes are array of int that represents face indices.
* face_vertex_counts are array of int that represents vertex count per face.
* Normals and uvs must be face varying.
"""
raise NotImplementedError("Eval must be implemented for this shape.")
@staticmethod
def build_setting_ui():
pass
@staticmethod
def reset_setting():
pass
@staticmethod
def get_default_half_scale():
return 50
| 1,039 | Python | 28.714285 | 84 | 0.636189 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/utils.py | import math
import carb.settings
from pxr import Gf
from typing import List, Tuple
from numbers import Number
def _save_settings(model, setting):
value = model.get_value_as_int()
carb.settings.get_settings().set(setting, value)
def build_int_slider(name, setting, default_value, min_value, max_value, tooltip=None):
from omni import ui
layout = ui.HStack(height=0)
with layout:
ui.Spacer(width=20, height=0)
ui.Label(name, height=0, name="text")
model = ui.IntSlider(name="text", min=min_value, max=max_value, height=0, aligment=ui.Alignment.LEFT).model
value = get_int_setting(setting, default_value)
model.set_value(value)
ui.Spacer(width=20, height=0)
model.add_value_changed_fn(lambda m: _save_settings(m, setting))
if tooltip:
layout.set_tooltip(tooltip)
return model
def inverse_u(uv) -> Gf.Vec2f:
return Gf.Vec2f(1 - uv[0], uv[1])
def inverse_v(uv) -> Gf.Vec2f:
return Gf.Vec2f(uv[0], 1 - uv[1])
def inverse_uv(uv) -> Gf.Vec2f:
return Gf.Vec2f(1 - uv[0], 1 - uv[1])
def transform_point(point: Gf.Vec3f, origin: Gf.Vec3f, half_scale: float) -> Gf.Vec3f:
return half_scale * point + origin
def generate_circle_points(
up_axis, num_points, delta, center_point=Gf.Vec3f(0.0)
) -> Tuple[List[Gf.Vec3f], List[Gf.Vec2f]]:
points: List[Gf.Vec3f] = []
point_sts: List[Gf.Vec2f] = []
for i in range(num_points):
theta = i * delta * math.pi * 2
if up_axis == "Y":
point = Gf.Vec3f(math.cos(theta), 0.0, math.sin(theta))
st = Gf.Vec2f(1.0 - point[0] / 2.0, (1.0 + point[2]) / 2.0)
else:
point = Gf.Vec3f(math.cos(theta), math.sin(theta), 0.0)
st = Gf.Vec2f((1.0 - point[0]) / 2.0, (1.0 + point[1]) / 2.0)
point_sts.append(st)
points.append(point + center_point)
return points, point_sts
def get_int_setting(key, default_value):
settings = carb.settings.get_settings()
settings.set_default(key, default_value)
value = settings.get_as_int(key)
return value
def generate_disk(
center_point: Gf.Vec3f, u_patches: int, v_patches: int,
origin: Gf.Vec3f, half_scale: float, up_axis="Y"
) -> Tuple[List[Gf.Vec3f], List[Gf.Vec3f], List[Gf.Vec2f], List[int], List[int]]:
u_delta = 1.0 / u_patches
v_delta = 1.0 / v_patches
num_u_verts = u_patches
num_v_verts = v_patches + 1
points: List[Gf.Vec3f] = []
normals: List[Gf.Vec3f] = []
sts: List[Gf.Vec2f] = []
face_indices: List[int] = []
face_vertex_counts: List[int] = []
center_point = transform_point(center_point, origin, half_scale)
circle_points, _ = generate_circle_points(up_axis, u_patches, 1.0 / u_patches)
for i in range(num_v_verts - 1):
v = v_delta * i
for j in range(num_u_verts):
point = transform_point(circle_points[j], (0, 0, 0), half_scale * (1 - v))
points.append(point + center_point)
# Center point
points.append(center_point)
def calc_index(i, j):
ii = i if i < num_u_verts else 0
base_index = j * num_u_verts
if j == num_v_verts - 1:
return base_index
else:
return base_index + ii
def get_uv(i, j):
vindex = calc_index(i, j)
# Ensure all axis to be [-1, 1]
point = (points[vindex] - origin) / half_scale
if up_axis == "Y":
st = (Gf.Vec2f(-point[0], -point[2]) + Gf.Vec2f(1, 1)) / 2
else:
st = (Gf.Vec2f(point[0], point[1]) + Gf.Vec2f(1)) / 2
return st
# Generating quads or triangles of the center
for j in range(v_patches):
for i in range(u_patches):
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
uv00 = get_uv(i, j)
uv10 = get_uv(i + 1, j)
uv11 = get_uv(i + 1, j + 1)
uv01 = get_uv(i, j + 1)
# Right-hand order
if up_axis == "Y":
if vindex11 == vindex01:
sts.extend([inverse_u(uv00), inverse_u(uv01), inverse_u(uv10)])
face_indices.extend((vindex00, vindex01, vindex10))
else:
sts.extend([inverse_u(uv00), inverse_u(uv01), inverse_u(uv11), inverse_u(uv10)])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normal = Gf.Vec3f(0.0, 1.0, 0.0)
else:
if vindex11 == vindex01:
sts.extend([uv00, uv10, uv01])
face_indices.extend((vindex00, vindex10, vindex01))
else:
sts.extend([uv00, uv10, uv11, uv01])
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
normal = Gf.Vec3f(0.0, 0.0, 1.0)
if vindex11 == vindex01:
face_vertex_counts.append(3)
normals.extend([normal] * 3)
else:
face_vertex_counts.append(4)
normals.extend([normal] * 4)
return points, normals, sts, face_indices, face_vertex_counts
def generate_plane(origin, half_scale, u_patches, v_patches, up_axis):
if isinstance(half_scale, Number):
[w, h, d] = half_scale, half_scale, half_scale
else:
[w, h, d] = half_scale
[x, y, z] = origin[0], origin[1], origin[2]
num_u_verts = u_patches + 1
num_v_verts = v_patches + 1
points = []
normals = []
sts = []
face_indices = []
face_vertex_counts = []
u_delta = 1.0 / u_patches
v_delta = 1.0 / v_patches
if up_axis == "Y":
w_delta = 2.0 * w * u_delta
h_delta = 2.0 * d * v_delta
bottom_left = Gf.Vec3f(x - w, y, z - d)
for i in range(num_v_verts):
for j in range(num_u_verts):
point = bottom_left + Gf.Vec3f(j * w_delta, 0.0, i * h_delta)
points.append(point)
elif up_axis == "Z":
w_delta = 2.0 * w / u_patches
h_delta = 2.0 * h / v_patches
bottom_left = Gf.Vec3f(x - w, y - h, z)
for i in range(num_v_verts):
for j in range(num_u_verts):
point = bottom_left + Gf.Vec3f(j * w_delta, i * h_delta, 0.0)
points.append(point)
else: # X up
w_delta = 2.0 * h / u_patches
h_delta = 2.0 * d / v_patches
bottom_left = Gf.Vec3f(x, y - h, z - d)
for i in range(num_v_verts):
for j in range(num_u_verts):
point = bottom_left + Gf.Vec3f(0, j * w_delta, i * h_delta)
points.append(point)
def calc_index(i, j):
ii = i if i < num_u_verts else 0
jj = j if j < num_v_verts else 0
return jj * num_u_verts + ii
def get_uv(i, j):
u = i * u_delta if i < num_u_verts else 1.0
if up_axis == "Y":
v = 1 - j * v_delta if j < num_v_verts else 0.0
else:
v = j * v_delta if j < num_v_verts else 1.0
return Gf.Vec2f(u, v)
# Generating quads
for j in range(v_patches):
for i in range(u_patches):
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
uv00 = get_uv(i, j)
uv10 = get_uv(i + 1, j)
uv11 = get_uv(i + 1, j + 1)
uv01 = get_uv(i, j + 1)
# Right-hand order
if up_axis == "Y":
sts.extend([uv00, uv01, uv11, uv10])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normal = Gf.Vec3f(0.0, 1.0, 0.0)
elif up_axis == "Z":
sts.extend([uv00, uv10, uv11, uv01])
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
normal = Gf.Vec3f(0.0, 0.0, 1.0)
else: # X
sts.extend([uv00, uv01, uv11, uv10])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normal = Gf.Vec3f(0.0, 1.0, 0.0)
face_vertex_counts.append(4)
normals.extend([normal] * 4)
return points, normals, sts, face_indices, face_vertex_counts
def modify_winding_order(face_counts, face_indices):
total = 0
for count in face_counts:
if count >= 3:
start = total + 1
end = total + count
face_indices[start:end] = face_indices[start:end][::-1]
total += count
| 8,670 | Python | 32.608527 | 115 | 0.529873 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/plane.py | from .utils import get_int_setting, build_int_slider, inverse_u, generate_plane
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
class PlaneEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/plane/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/plane/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/plane/v_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(PlaneEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(PlaneEvaluator.SETTING_V_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
half_scale = [half_scale, half_scale, half_scale]
u_patches = kwargs.get("u_patches", 1)
v_patches = kwargs.get("v_patches", 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
u_patches = max(int(u_patches), 1)
v_patches = max(int(v_patches), 1)
return generate_plane(origin, half_scale, u_patches, v_patches, up_axis)
@staticmethod
def build_setting_ui():
from omni import ui
PlaneEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", PlaneEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
PlaneEvaluator._u_scale_slider = build_int_slider("U Verts Scale", PlaneEvaluator.SETTING_U_SCALE, 1, 1, 10)
ui.Spacer(height=5)
PlaneEvaluator._v_scale_slider = build_int_slider("V Verts Scale", PlaneEvaluator.SETTING_V_SCALE, 1, 1, 10)
@staticmethod
def reset_setting():
PlaneEvaluator._half_scale_slider.set_value(PlaneEvaluator.get_default_half_scale())
PlaneEvaluator._u_scale_slider.set_value(1)
PlaneEvaluator._v_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(PlaneEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 2,585 | Python | 39.406249 | 116 | 0.649903 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/cylinder.py | from .utils import (
get_int_setting, build_int_slider, modify_winding_order,
generate_circle_points, transform_point, inverse_u, inverse_v, generate_disk
)
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
from typing import List
class CylinderEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/cylinder/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/cylinder/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/cylinder/v_scale"
SETTING_W_SCALE = "/persistent/app/mesh_generator/shapes/cylinder/w_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(CylinderEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(CylinderEvaluator.SETTING_V_SCALE, 1)
num_w_verts_scale = kwargs.get("w_verts_scale", None)
if num_w_verts_scale is None or num_w_verts_scale <= 0:
num_w_verts_scale = get_int_setting(CylinderEvaluator.SETTING_W_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
u_patches = kwargs.get("u_patches", 32)
v_patches = kwargs.get("v_patches", 1)
w_patches = kwargs.get("w_patches", 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
w_patches = w_patches * num_w_verts_scale
u_patches = max(int(u_patches), 3)
v_patches = max(int(v_patches), 1)
w_patches = max(int(w_patches), 1)
u_delta = 1.0 / (u_patches if u_patches != 0 else 1)
v_delta = 1.0 / (v_patches if v_patches != 0 else 1)
# open meshes need an extra vert on the end to create the last patch
# closed meshes reuse the vert at index 0 to close their final patch
num_u_verts = u_patches
num_v_verts = v_patches + 1
points: List[Gf.Vec3f] = []
normals: List[Gf.Vec3f] = []
sts: List[Gf.Vec2f] = []
face_indices: List[int] = []
face_vertex_counts: List[int] = []
# generate circle points
circle_points, _ = generate_circle_points(up_axis, num_u_verts, u_delta)
for j in range(num_v_verts):
for i in range(num_u_verts):
v = j * v_delta
point = circle_points[i]
if up_axis == "Y":
point[1] = 2.0 * (v - 0.5)
else:
point[2] = 2.0 * (v - 0.5)
point = transform_point(point, origin, half_scale)
points.append(point)
def calc_index(i, j):
ii = i if i < num_u_verts else 0
jj = j if j < num_v_verts else 0
return jj * num_u_verts + ii
def get_uv(i, j):
u = 1 - i * u_delta if i < num_u_verts else 0.0
v = j * v_delta if j < num_v_verts else 1.0
return Gf.Vec2f(u, v)
for j in range(v_patches):
for i in range(u_patches):
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
uv00 = get_uv(i, j)
uv10 = get_uv(i + 1, j)
uv11 = get_uv(i + 1, j + 1)
uv01 = get_uv(i, j + 1)
p00 = points[vindex00]
p10 = points[vindex10]
p11 = points[vindex11]
p01 = points[vindex01]
# Right-hand order
if up_axis == "Y":
sts.extend([uv00, uv01, uv11, uv10])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normals.append(Gf.Vec3f(p00[0], 0, p00[2]))
normals.append(Gf.Vec3f(p01[0], 0, p01[2]))
normals.append(Gf.Vec3f(p11[0], 0, p11[2]))
normals.append(Gf.Vec3f(p10[0], 0, p10[2]))
else:
sts.extend([inverse_u(uv00), inverse_u(uv10), inverse_u(uv11), inverse_u(uv01)])
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
normals.append(Gf.Vec3f(p00[0], p00[1], 0))
normals.append(Gf.Vec3f(p10[0], p10[1], 0))
normals.append(Gf.Vec3f(p11[0], p11[1], 0))
normals.append(Gf.Vec3f(p01[0], p01[1], 0))
face_vertex_counts.append(4)
# Add hat
if up_axis == "Y":
bottom_center_point = Gf.Vec3f(0, -1, 0)
top_center_point = Gf.Vec3f(0, 1, 0)
else:
bottom_center_point = Gf.Vec3f(0, 0, -1)
top_center_point = Gf.Vec3f(0, 0, 1)
def add_hat(center_point, rim_points_start_index, w_patches, invert_wind_order=False):
bt_points, _, bt_sts, bt_face_indices, bt_face_vertex_counts = generate_disk(
center_point, u_patches, w_patches, origin, half_scale, up_axis
)
total_points = len(points)
# Skips shared points
points.extend(bt_points[num_u_verts:])
if invert_wind_order:
modify_winding_order(bt_face_vertex_counts, bt_sts)
for st in bt_sts:
sts.append(inverse_v(st))
else:
sts.extend(bt_sts)
face_vertex_counts.extend(bt_face_vertex_counts)
normals.extend([center_point] * len(bt_face_indices))
# Remapping cap points
for i, index in enumerate(bt_face_indices):
if index >= num_u_verts:
bt_face_indices[i] += total_points - num_u_verts
else:
bt_face_indices[i] += rim_points_start_index
if invert_wind_order:
modify_winding_order(bt_face_vertex_counts, bt_face_indices)
face_indices.extend(bt_face_indices)
top_hat_start_index = len(points) - num_u_verts
# Add bottom hat to close shape
add_hat(bottom_center_point, 0, w_patches, True)
# Add top hat to close shape
add_hat(top_center_point, top_hat_start_index, w_patches)
return points, normals, sts, face_indices, face_vertex_counts
@staticmethod
def build_setting_ui():
from omni import ui
CylinderEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", CylinderEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
CylinderEvaluator._u_scale_slider = build_int_slider(
"U Verts Scale", CylinderEvaluator.SETTING_U_SCALE, 1, 1, 10,
"Tessellation Level in Horizontal Direction"
)
ui.Spacer(height=5)
CylinderEvaluator._v_scale_slider = build_int_slider(
"V Verts Scale", CylinderEvaluator.SETTING_V_SCALE, 1, 1, 10,
"Tessellation Level in Vertical Direction"
)
ui.Spacer(height=5)
CylinderEvaluator._w_scale_slider = build_int_slider(
"W Verts Scale", CylinderEvaluator.SETTING_W_SCALE, 1, 1, 10,
"Tessellation Level of Bottom and Top Caps"
)
@staticmethod
def reset_setting():
CylinderEvaluator._half_scale_slider.set_value(CylinderEvaluator.get_default_half_scale())
CylinderEvaluator._u_scale_slider.set_value(1)
CylinderEvaluator._v_scale_slider.set_value(1)
CylinderEvaluator._w_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(CylinderEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 8,285 | Python | 40.019802 | 100 | 0.555462 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/sphere.py | import math
from .utils import get_int_setting, build_int_slider
from .utils import transform_point
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
class SphereEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/shpere/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/sphere/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/sphere/v_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
def _eval(self, u, v, up_axis):
theta = u * 2.0 * math.pi
phi = (v - 0.5) * math.pi
cos_phi = math.cos(phi)
if up_axis == "Y":
x = cos_phi * math.cos(theta)
y = math.sin(phi)
z = cos_phi * math.sin(theta)
else:
x = cos_phi * math.cos(theta)
y = cos_phi * math.sin(theta)
z = math.sin(phi)
return Gf.Vec3f(x, y, z)
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(SphereEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(SphereEvaluator.SETTING_V_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
u_patches = kwargs.get("u_patches", 32)
v_patches = kwargs.get("v_patches", 16)
num_u_verts_scale = max(num_u_verts_scale, 1)
num_v_verts_scale = max(num_v_verts_scale, 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
u_patches = max(int(u_patches), 3)
v_patches = max(int(v_patches), 2)
u_delta = 1.0 / u_patches
v_delta = 1.0 / v_patches
num_u_verts = u_patches
num_v_verts = v_patches + 1
points = []
normals = []
sts = []
face_indices = []
face_vertex_counts = []
if up_axis == "Y":
bottom_point = Gf.Vec3f(0.0, -1.0, 0.0)
else:
bottom_point = Gf.Vec3f(0.0, 0.0, -1.0)
point = transform_point(bottom_point, origin, half_scale)
points.append(point)
for j in range(1, num_v_verts - 1):
v = j * v_delta
for i in range(num_u_verts):
u = i * u_delta
point = self._eval(u, v, up_axis)
point = transform_point(point, origin, half_scale)
points.append(Gf.Vec3f(point))
if up_axis == "Y":
top_point = Gf.Vec3f(0.0, 1.0, 0.0)
else:
top_point = Gf.Vec3f(0.0, 0.0, 1.0)
point = transform_point(top_point, origin, half_scale)
points.append(point)
def calc_index(i, j):
if j == 0:
return 0
elif j == num_v_verts - 1:
return len(points) - 1
else:
i = i if i < num_u_verts else 0
return (j - 1) * num_u_verts + i + 1
def get_uv(i, j):
if up_axis == "Y":
u = 1 - i * u_delta
v = j * v_delta
else:
u = i * u_delta
v = j * v_delta
return Gf.Vec2f(u, v)
# Generate body
for j in range(v_patches):
for i in range(u_patches):
# Index 0 is the bottom hat point
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
st00 = get_uv(i, j)
st10 = get_uv(i + 1, j)
st11 = get_uv(i + 1, j + 1)
st01 = get_uv(i, j + 1)
p0 = points[vindex00]
p1 = points[vindex10]
p2 = points[vindex11]
p3 = points[vindex01]
# Use face varying uv
if up_axis == "Y":
if vindex11 == vindex01:
sts.extend([st00, st01, st10])
face_indices.extend((vindex00, vindex01, vindex10))
face_vertex_counts.append(3)
normals.extend([p0, p3, p1])
elif vindex00 == vindex10:
sts.extend([st00, st01, st11])
face_indices.extend((vindex00, vindex01, vindex11))
face_vertex_counts.append(3)
normals.extend([p0, p3, p2])
else:
sts.extend([st00, st01, st11, st10])
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
face_vertex_counts.append(4)
normals.extend([p0, p3, p2, p1])
else:
if vindex11 == vindex01:
sts.extend([st00, st10, st01])
face_indices.extend((vindex00, vindex10, vindex01))
face_vertex_counts.append(3)
normals.extend([p0, p1, p3])
elif vindex00 == vindex10:
sts.extend([st00, st11, st01])
face_indices.extend((vindex00, vindex11, vindex01))
face_vertex_counts.append(3)
normals.extend([p0, p2, p3])
else:
sts.extend([st00, st10, st11, st01])
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
face_vertex_counts.append(4)
normals.extend([p0, p1, p2, p3])
return points, normals, sts, face_indices, face_vertex_counts
@staticmethod
def build_setting_ui():
from omni import ui
SphereEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", SphereEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
SphereEvaluator._u_scale_slider = build_int_slider(
"U Verts Scale", SphereEvaluator.SETTING_U_SCALE, 1, 1, 10
)
ui.Spacer(height=5)
SphereEvaluator._v_scale_slider = build_int_slider(
"V Verts Scale", SphereEvaluator.SETTING_V_SCALE, 1, 1, 10
)
@staticmethod
def reset_setting():
SphereEvaluator._half_scale_slider.set_value(SphereEvaluator.get_default_half_scale())
SphereEvaluator._u_scale_slider.set_value(1)
SphereEvaluator._v_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(SphereEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 7,142 | Python | 36.397906 | 96 | 0.506301 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/cube.py | from .utils import get_int_setting, build_int_slider, generate_plane, modify_winding_order
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
class CubeEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/cube/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/cube/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/cube/v_scale"
SETTING_W_SCALE = "/persistent/app/mesh_generator/shapes/cube/w_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(CubeEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(CubeEvaluator.SETTING_V_SCALE, 1)
num_w_verts_scale = kwargs.get("w_verts_scale", None)
if num_w_verts_scale is None or num_w_verts_scale <= 0:
num_w_verts_scale = get_int_setting(CubeEvaluator.SETTING_W_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
u_patches = kwargs.get("u_patches", 1)
v_patches = kwargs.get("v_patches", 1)
w_patches = kwargs.get("w_patches", 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
w_patches = w_patches * num_w_verts_scale
u_patches = max(int(u_patches), 1)
v_patches = max(int(v_patches), 1)
w_patches = max(int(w_patches), 1)
[x, y, z] = origin
(
xy_plane_points, xy_plane_normals, xy_plane_sts,
xy_plane_face_indices, xy_plane_face_vertex_counts
) = generate_plane(Gf.Vec3f(x, y, z + half_scale), half_scale, u_patches, v_patches, "Z")
(
xz_plane_points, xz_plane_normals, xz_plane_sts,
xz_plane_face_indices, xz_plane_face_vertex_counts
) = generate_plane(Gf.Vec3f(x, y - half_scale, z), half_scale, u_patches, w_patches, "Y")
(
yz_plane_points, yz_plane_normals, yz_plane_sts,
yz_plane_face_indices, yz_plane_face_vertex_counts
) = generate_plane(Gf.Vec3f(x - half_scale, y, z), half_scale, v_patches, w_patches, "X")
points = []
normals = []
sts = []
face_indices = []
face_vertex_counts = []
# XY planes
points.extend(xy_plane_points)
normals.extend([Gf.Vec3f(0, 0, 1)] * len(xy_plane_normals))
sts.extend(xy_plane_sts)
face_indices.extend(xy_plane_face_indices)
face_vertex_counts.extend(xy_plane_face_vertex_counts)
total_indices = len(points)
plane_points = [point + Gf.Vec3f(0, 0, -2.0 * half_scale) for point in xy_plane_points]
points.extend(plane_points)
normals.extend([Gf.Vec3f(0, 0, -1)] * len(xy_plane_normals))
modify_winding_order(xy_plane_face_vertex_counts, xy_plane_sts)
plane_sts = [Gf.Vec2f(1 - st[0], st[1]) for st in xy_plane_sts]
sts.extend(plane_sts)
plane_face_indices = [index + total_indices for index in xy_plane_face_indices]
modify_winding_order(xy_plane_face_vertex_counts, plane_face_indices)
face_indices.extend(plane_face_indices)
face_vertex_counts.extend(xy_plane_face_vertex_counts)
# xz planes
total_indices = len(points)
plane_points = [point + Gf.Vec3f(0, 2.0 * half_scale, 0) for point in xz_plane_points]
points.extend(plane_points)
normals.extend([Gf.Vec3f(0, 1, 0)] * len(xz_plane_normals))
sts.extend(xz_plane_sts)
plane_face_indices = [index + total_indices for index in xz_plane_face_indices]
face_indices.extend(plane_face_indices)
face_vertex_counts.extend(xz_plane_face_vertex_counts)
total_indices = len(points)
points.extend(xz_plane_points)
normals.extend([Gf.Vec3f(0, -1, 0)] * len(xz_plane_normals))
modify_winding_order(xz_plane_face_vertex_counts, xz_plane_sts)
plane_sts = [Gf.Vec2f(st[0], 1 - st[1]) for st in xz_plane_sts]
sts.extend(plane_sts)
plane_face_indices = [index + total_indices for index in xz_plane_face_indices]
modify_winding_order(xz_plane_face_vertex_counts, plane_face_indices)
face_indices.extend(plane_face_indices)
face_vertex_counts.extend(xz_plane_face_vertex_counts)
# yz planes
total_indices = len(points)
points.extend(yz_plane_points)
normals.extend([Gf.Vec3f(-1, 0, 0)] * len(yz_plane_normals))
plane_sts = [Gf.Vec2f(st[1], st[0]) for st in yz_plane_sts]
sts.extend(plane_sts)
plane_face_indices = [index + total_indices for index in yz_plane_face_indices]
face_indices.extend(plane_face_indices)
face_vertex_counts.extend(yz_plane_face_vertex_counts)
total_indices = len(points)
plane_points = [point + Gf.Vec3f(2.0 * half_scale, 0, 0) for point in yz_plane_points]
points.extend(plane_points)
normals.extend([Gf.Vec3f(1, 0, 0)] * len(yz_plane_normals))
modify_winding_order(yz_plane_face_vertex_counts, yz_plane_sts)
plane_sts = [Gf.Vec2f(1 - st[1], st[0]) for st in yz_plane_sts]
sts.extend(plane_sts)
plane_face_indices = [index + total_indices for index in yz_plane_face_indices]
modify_winding_order(yz_plane_face_vertex_counts, plane_face_indices)
face_indices.extend(plane_face_indices)
face_vertex_counts.extend(yz_plane_face_vertex_counts)
# Welds the edges of cube
keep = [True] * len(points)
index_remap = [-1] * len(points)
keep_points = []
for i in range(0, len(points)):
if not keep[i]:
continue
keep_points.append(points[i])
index_remap[i] = len(keep_points) - 1
for j in range(i + 1, len(points)):
if Gf.IsClose(points[j], points[i], 1e-6):
keep[j] = False
index_remap[j] = len(keep_points) - 1
for i in range(len(face_indices)):
face_indices[i] = index_remap[face_indices[i]]
return keep_points, normals, sts, face_indices, face_vertex_counts
@staticmethod
def build_setting_ui():
from omni import ui
CubeEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", CubeEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
CubeEvaluator._u_scale_slider = build_int_slider(
"U Verts Scale", CubeEvaluator.SETTING_U_SCALE, 1, 1, 10,
"Tessellation Level along X Axis"
)
ui.Spacer(height=5)
CubeEvaluator._v_scale_slider = build_int_slider(
"V Verts Scale", CubeEvaluator.SETTING_V_SCALE, 1, 1, 10,
"Tessellation Level along Y Axis"
)
ui.Spacer(height=5)
CubeEvaluator._w_scale_slider = build_int_slider(
"W Verts Scale", CubeEvaluator.SETTING_W_SCALE, 1, 1, 10,
"Tessellation Level along Z Axis"
)
@staticmethod
def reset_setting():
CubeEvaluator._half_scale_slider.set_value(CubeEvaluator.get_default_half_scale())
CubeEvaluator._u_scale_slider.set_value(1)
CubeEvaluator._v_scale_slider.set_value(1)
CubeEvaluator._w_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(CubeEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 7,997 | Python | 41.770053 | 97 | 0.614731 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/disk.py | from .utils import get_int_setting, build_int_slider
from .utils import generate_disk
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
class DiskEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/disk/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/disk/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/disk/v_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(DiskEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(DiskEvaluator.SETTING_V_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
# Disk will be approximated by quads composed from inner circle
# to outer circle. The parameter `u_patches` means the segments
# of circle. And v_patches means the number of segments (circles)
# in radius direction.
u_patches = kwargs.get("u_patches", 32)
v_patches = kwargs.get("v_patches", 1)
num_u_verts_scale = max(num_u_verts_scale, 1)
num_v_verts_scale = max(num_v_verts_scale, 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
u_patches = max(int(u_patches), 3)
v_patches = max(int(v_patches), 1)
center_point = Gf.Vec3f(0.0)
return generate_disk(center_point, u_patches, v_patches, origin, half_scale, up_axis)
@staticmethod
def build_setting_ui():
from omni import ui
DiskEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", DiskEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
DiskEvaluator._u_scale_slider = build_int_slider("U Verts Scale", DiskEvaluator.SETTING_U_SCALE, 1, 1, 10)
ui.Spacer(height=5)
DiskEvaluator._v_scale_slider = build_int_slider("V Verts Scale", DiskEvaluator.SETTING_V_SCALE, 1, 1, 10)
@staticmethod
def reset_setting():
DiskEvaluator._half_scale_slider.set_value(DiskEvaluator.get_default_half_scale())
DiskEvaluator._u_scale_slider.set_value(1)
DiskEvaluator._v_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(DiskEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 2,917 | Python | 39.527777 | 114 | 0.649983 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/evaluators/torus.py | import math
from .utils import get_int_setting, build_int_slider
from .utils import transform_point
from .abstract_shape_evaluator import AbstractShapeEvaluator
from pxr import Gf
class TorusEvaluator(AbstractShapeEvaluator):
SETTING_OBJECT_HALF_SCALE = "/persistent/app/mesh_generator/shapes/torus/object_half_scale"
SETTING_U_SCALE = "/persistent/app/mesh_generator/shapes/torus/u_scale"
SETTING_V_SCALE = "/persistent/app/mesh_generator/shapes/torus/v_scale"
def __init__(self, attributes: dict):
super().__init__(attributes)
self.hole_radius = 1.0
self.tube_radius = 0.5
def _eval(self, up_axis, u, v):
theta = u * 2.0 * math.pi
phi = v * 2.0 * math.pi - 0.5 * math.pi
rad_cos_phi = self.tube_radius * math.cos(phi)
cos_theta = math.cos(theta)
sin_phi = math.sin(phi)
sin_theta = math.sin(theta)
x = (self.hole_radius + rad_cos_phi) * cos_theta
nx = self.hole_radius * cos_theta
if up_axis == "Y":
y = self.tube_radius * sin_phi
z = (self.hole_radius + rad_cos_phi) * sin_theta
ny = 0
nz = self.hole_radius * sin_theta
else:
y = (self.hole_radius + rad_cos_phi) * sin_theta
z = self.tube_radius * sin_phi
ny = self.hole_radius * sin_theta
nz = 0
point = Gf.Vec3f(x, y, z)
# construct the normal by creating a vector from the center point of the tube to the surface
normal = Gf.Vec3f(x - nx, y - ny, z - nz)
normal = normal.GetNormalized()
return point, normal
def eval(self, **kwargs):
half_scale = kwargs.get("half_scale", None)
if half_scale is None or half_scale <= 0:
half_scale = self.get_default_half_scale()
num_u_verts_scale = kwargs.get("u_verts_scale", None)
if num_u_verts_scale is None or num_u_verts_scale <= 0:
num_u_verts_scale = get_int_setting(TorusEvaluator.SETTING_U_SCALE, 1)
num_v_verts_scale = kwargs.get("v_verts_scale", None)
if num_v_verts_scale is None or num_v_verts_scale <= 0:
num_v_verts_scale = get_int_setting(TorusEvaluator.SETTING_V_SCALE, 1)
up_axis = kwargs.get("up_axis", "Y")
origin = Gf.Vec3f(0.0)
u_patches = kwargs.get("u_patches", 32)
v_patches = kwargs.get("v_patches", 32)
num_u_verts_scale = max(num_u_verts_scale, 1)
num_v_verts_scale = max(num_v_verts_scale, 1)
u_patches = u_patches * num_u_verts_scale
v_patches = v_patches * num_v_verts_scale
u_patches = max(int(u_patches), 3)
v_patches = max(int(v_patches), 3)
u_delta = 1.0 / u_patches
v_delta = 1.0 / v_patches
num_u_verts = u_patches
num_v_verts = v_patches
points = []
point_normals = []
sts = []
face_indices = []
face_vertex_counts = []
for j in range(num_v_verts):
v = j * v_delta
for i in range(num_u_verts):
u = i * u_delta
point, point_normal = self._eval(up_axis, u, v)
point = transform_point(point, origin, half_scale)
points.append(point)
point_normals.append(point_normal)
def calc_index(i, j):
ii = i if i < num_u_verts else 0
jj = j if j < num_v_verts else 0
return jj * num_u_verts + ii
def get_uv(i, j):
if up_axis == "Y":
u = 1 - i * u_delta if i < num_u_verts else 0.0
else:
u = i * u_delta if i < num_u_verts else 1.0
v = j * v_delta if j < num_v_verts else 1.0
return Gf.Vec2f(u, v)
# Last patch from last vert to first vert to close shape
normals = []
for j in range(v_patches):
for i in range(u_patches):
vindex00 = calc_index(i, j)
vindex10 = calc_index(i + 1, j)
vindex11 = calc_index(i + 1, j + 1)
vindex01 = calc_index(i, j + 1)
# Use face varying uv
face_vertex_counts.append(4)
if up_axis == "Y":
sts.append(get_uv(i, j))
sts.append(get_uv(i, j + 1))
sts.append(get_uv(i + 1, j + 1))
sts.append(get_uv(i + 1, j))
face_indices.extend((vindex00, vindex01, vindex11, vindex10))
normals.extend(
[
point_normals[vindex00],
point_normals[vindex01],
point_normals[vindex11],
point_normals[vindex10],
]
)
else:
sts.append(get_uv(i, j))
sts.append(get_uv(i + 1, j))
sts.append(get_uv(i + 1, j + 1))
sts.append(get_uv(i, j + 1))
face_indices.extend((vindex00, vindex10, vindex11, vindex01))
normals.extend(
[
point_normals[vindex00],
point_normals[vindex10],
point_normals[vindex11],
point_normals[vindex01],
]
)
return points, normals, sts, face_indices, face_vertex_counts
@staticmethod
def build_setting_ui():
from omni import ui
TorusEvaluator._half_scale_slider = build_int_slider(
"Object Half Scale", TorusEvaluator.SETTING_OBJECT_HALF_SCALE, 50, 10, 1000
)
ui.Spacer(height=5)
TorusEvaluator._u_scale_slider = build_int_slider("U Verts Scale", TorusEvaluator.SETTING_U_SCALE, 1, 1, 10)
ui.Spacer(height=5)
TorusEvaluator._v_scale_slider = build_int_slider("V Verts Scale", TorusEvaluator.SETTING_V_SCALE, 1, 1, 10)
@staticmethod
def reset_setting():
TorusEvaluator._half_scale_slider.set_value(TorusEvaluator.get_default_half_scale())
TorusEvaluator._u_scale_slider.set_value(1)
TorusEvaluator._v_scale_slider.set_value(1)
@staticmethod
def get_default_half_scale():
half_scale = get_int_setting(TorusEvaluator.SETTING_OBJECT_HALF_SCALE, 50)
return half_scale
| 6,485 | Python | 36.275862 | 116 | 0.523516 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/tests/__init__.py | from .test_mesh_prims import *
| 31 | Python | 14.999993 | 30 | 0.741935 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/omni/kit/primitive/mesh/tests/test_mesh_prims.py | import omni.kit.test
import omni.usd
import omni.kit.app
import omni.kit.primitive.mesh
import omni.kit.commands
import omni.kit.actions.core
from pathlib import Path
from pxr import Gf, Kind, Sdf, Usd, UsdGeom, UsdShade
EXTENSION_FOLDER_PATH = Path(omni.kit.app.get_app().get_extension_manager().get_extension_path_by_module(__name__))
TEST_DATA_PATH = EXTENSION_FOLDER_PATH.joinpath("data/tests")
# NOTE: those tests belong to omni.kit.primitive.mesh extension.
class TestMeshPrims(omni.kit.test.AsyncTestCase):
async def test_tessellation_params(self):
test_data = {
"Cube": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1, "w_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 1},
},
{
"params": {"half_scale": 400, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1, "w_verts_scale": 1,
"u_patches": 2, "v_patches": 2, "w_patches": 2
},
},
],
"Cone": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1, "w_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 1},
},
{
"params": {"half_scale": 400, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1, "w_verts_scale": 1,
"u_patches": 2, "v_patches": 2, "w_patches": 2
},
},
],
"Cylinder": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1, "w_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 1},
},
{
"params": {"half_scale": 400, "u_verts_scale": 2, "v_verts_scale": 2, "w_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1, "w_verts_scale": 1,
"u_patches": 2, "v_patches": 2, "w_patches": 2
},
},
],
"Disk": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1,
"u_patches": 2, "v_patches": 2
},
},
],
"Plane": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1,
"u_patches": 2, "v_patches": 2
},
},
],
"Sphere": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1,
"u_patches": 2, "v_patches": 2
},
},
],
"Torus": [
{
"params": {"half_scale": 100, "u_verts_scale": 2, "v_verts_scale": 1},
},
{
"params": {"half_scale": 200, "u_verts_scale": 2, "v_verts_scale": 2},
},
{
"params": {
"half_scale": 100, "u_verts_scale": 1, "v_verts_scale": 1,
"u_patches": 2, "v_patches": 2
},
},
],
}
golden_file = TEST_DATA_PATH.joinpath("golden.usd")
golden_stage = Usd.Stage.Open(str(golden_file))
self.assertTrue(golden_stage)
await omni.usd.get_context().new_stage_async()
stage = omni.usd.get_context().get_stage()
for prim_type, test_cases in test_data.items():
for test_case in test_cases:
params = test_case["params"]
result, path = omni.kit.commands.execute(
"CreateMeshPrim", prim_type=prim_type, above_ground=True, **params
)
self.assertTrue(result)
mesh_prim = stage.GetPrimAtPath(path)
self.assertTrue(mesh_prim)
golden_prim = golden_stage.GetPrimAtPath(path)
self.assertTrue(golden_prim)
property_names = mesh_prim.GetPropertyNames()
golden_property_names = golden_prim.GetPropertyNames()
self.assertEqual(property_names, golden_property_names)
path = Sdf.Path(path)
for property_name in property_names:
property_path = path.AppendProperty(property_name)
prop = mesh_prim.GetPropertyAtPath(property_path)
golden_prop = golden_prim.GetPropertyAtPath(property_path)
# Skips relationship
if hasattr(prop, "GetTypeName"):
self.assertTrue(prop.GetTypeName(), golden_prop.GetTypeName())
self.assertEqual(prop.Get(), golden_prop.Get())
async def test_mesh_prims(self):
"""Test all mesh generator prims."""
for y_axis in [True, False]:
await omni.usd.get_context().new_stage_async()
stage = omni.usd.get_context().get_stage()
axis = UsdGeom.Tokens.y if y_axis else UsdGeom.Tokens.z
UsdGeom.SetStageUpAxis(stage, axis)
for prim_type in omni.kit.primitive.mesh.get_geometry_mesh_prim_list():
result, path = omni.kit.commands.execute("CreateMeshPrim", prim_type=prim_type, above_ground=True)
self.assertTrue(result)
def check_exist():
prim = stage.GetPrimAtPath(path)
attr = prim.GetAttribute(UsdGeom.Tokens.extent)
self.assertTrue(attr and attr.Get())
self.assertTrue(prim)
self.assertTrue(prim.IsA(UsdGeom.Mesh))
self.assertTrue(prim.IsA(UsdGeom.Xformable))
mesh_prim = UsdGeom.Mesh(prim)
points = mesh_prim.GetPointsAttr().Get()
face_indices = mesh_prim.GetFaceVertexIndicesAttr().Get()
normals = mesh_prim.GetNormalsAttr().Get()
face_counts = mesh_prim.GetFaceVertexCountsAttr().Get()
total = 0
for face_count in face_counts:
total += face_count
unique_indices = set(face_indices)
self.assertTrue(len(points) == len(unique_indices))
self.assertTrue(total == len(normals))
self.assertTrue(total == len(face_indices))
def check_does_not_exist():
self.assertFalse(stage.GetPrimAtPath(path))
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
omni.kit.undo.redo()
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
async def test_meshes_creation_from_menu(self):
import omni.kit.ui_test as ui_test
await omni.usd.get_context().new_stage_async()
stage = omni.usd.get_context().get_stage()
for prim_type in omni.kit.primitive.mesh.get_geometry_mesh_prim_list():
await ui_test.menu_click(f"Create/Mesh/{prim_type.capitalize()}")
path = f"/{prim_type}"
def check_exist():
prim = stage.GetPrimAtPath(path)
self.assertTrue(prim)
def check_does_not_exist():
self.assertFalse(stage.GetPrimAtPath(path))
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
omni.kit.undo.redo()
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
async def test_mesh_settings(self):
import omni.kit.ui_test as ui_test
await omni.usd.get_context().new_stage_async()
stage = omni.usd.get_context().get_stage()
await ui_test.menu_click("Create/Mesh/Settings")
window = ui_test.find("Mesh Generation Settings")
self.assertTrue(window)
await window.focus()
primitive_type_combobox = window.find("**/ComboBox[*].name=='primitive_type'")
self.assertTrue(primitive_type_combobox)
create_button = window.find("**/Button[*].name=='create'")
self.assertTrue(create_button)
model = primitive_type_combobox.model
value_model = model.get_item_value_model()
for i, prim_type in enumerate(omni.kit.primitive.mesh.get_geometry_mesh_prim_list()):
value_model.set_value(i)
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
await create_button.click()
path = f"/{prim_type}"
self.assertTrue(stage.GetPrimAtPath(path))
async def test_actions(self):
await omni.usd.get_context().new_stage_async()
stage = omni.usd.get_context().get_stage()
for prim_type in omni.kit.primitive.mesh.get_geometry_mesh_prim_list():
omni.kit.actions.core.execute_action(
"omni.kit.primitive.mesh",
f"create_mesh_prim_{prim_type.lower()}"
)
path = f"/{prim_type}"
def check_exist():
prim = stage.GetPrimAtPath(path)
self.assertTrue(prim)
def check_does_not_exist():
self.assertFalse(stage.GetPrimAtPath(path))
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
omni.kit.undo.redo()
check_exist()
omni.kit.undo.undo()
check_does_not_exist()
| 11,354 | Python | 38.702797 | 115 | 0.468822 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.0.8] - 2022-11-25
### Changes
- Improve mesh primitives for physx needs.
- Make sure cone and cylinder are watertight.
- Fix normal issues at the tip of Cone.
- Add more tessellation settings for caps of cone and cylinder.
- Add more tessellation settings for cube to tesselate cube with axis.
## [1.0.7] - 2022-11-22
### Changes
- Fix to avoid crash at shutdown when loading optional slice
## [1.0.6] - 2022-11-22
### Changes
- Make UI dpendency optional
## [1.0.5] - 2022-11-12
### Changes
- Export extent attr for mesh.
## [1.0.4] - 2022-11-11
### Changes
- Clean up dependencies.
## [1.0.3] - 2022-10-25
### Changes
- Added prepend_default_prim parameters to CreateMeshPrimWithDefaultXformCommand
## [1.0.2] - 2022-08-12
### Changes
- Added select_new_prim & prim_path parameters to CreateMeshPrimWithDefaultXformCommand
## [1.0.1] - 2022-06-08
### Changes
- Updated menus to use actions.
## [1.0.0] - 2020-09-09
### Changes
- Supports cube, cone, cylinder, disk, plane, sphere, torus generation.
- Supports subdivision of meshes.
| 1,143 | Markdown | 24.999999 | 87 | 0.702537 |
omniverse-code/kit/exts/omni.kit.primitive.mesh/docs/index.rst | omni.kit.primitive.mesh: omni.kit.mesh_generator
#################################################
Python Extension Mesh Generator
| 132 | reStructuredText | 25.599995 | 49 | 0.515152 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/animation.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['AnimationEventStream']
import carb
import omni.kit.app
import traceback
from typing import Any, Callable
class AnimationEventStream:
__g_instance = None
@staticmethod
def get_instance():
if AnimationEventStream.__g_instance is None:
AnimationEventStream.__g_instance = [AnimationEventStream(), 1]
else:
AnimationEventStream.__g_instance[1] = AnimationEventStream.__g_instance[1] + 1
return AnimationEventStream.__g_instance[0]
def __init__(self):
self.__event_sub = None
self.__callbacks = {}
def __del__(self):
self.destroy()
def destroy(self):
if AnimationEventStream.__g_instance and AnimationEventStream.__g_instance[0] == self:
AnimationEventStream.__g_instance[1] = AnimationEventStream.__g_instance[1] - 1
if AnimationEventStream.__g_instance[1] > 0:
return
AnimationEventStream.__g_instance = None
self.__event_sub = None
self.__callbacks = {}
def __on_event(self, e: carb.events.IEvent):
dt = e.payload['dt']
for _, callbacks in self.__callbacks.items():
for cb_fn in callbacks:
try:
cb_fn(dt)
except Exception:
carb.log_error(traceback.format_exc())
def __init(self):
if self.__event_sub:
return
self.__event_sub = omni.kit.app.get_app().get_update_event_stream().create_subscription_to_pop(
self.__on_event,
name="omni.kit.manipulator.camera.AnimationEventStream",
order=omni.kit.app.UPDATE_ORDER_PYTHON_ASYNC_FUTURE_END_UPDATE
)
def add_animation(self, animation_fn: Callable, key: Any, remove_others: bool = True):
if remove_others:
self.__callbacks[key] = [animation_fn]
else:
prev_fns = self.__callbacks.get(key) or []
if prev_fns:
prev_fns.append(animation_fn)
else:
self.__callbacks[key] = [animation_fn]
self.__init()
def remove_animation(self, key: Any, animation_fn: Callable = None):
if animation_fn:
prev_fns = self.__callbacks.get(key)
if prev_fns:
try:
prev_fns.remove(animation_fn)
except ValueError:
pass
else:
prev_fns = None
if not prev_fns:
try:
del self.__callbacks[key]
except KeyError:
pass
if not self.__callbacks:
self.__event_sub = None
| 3,114 | Python | 31.447916 | 103 | 0.582531 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/viewport_camera_manipulator.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .model import CameraManipulatorModel, _flatten_matrix, _optional_bool, _optional_int
from .usd_camera_manipulator import (
UsdCameraManipulator,
KIT_COI_ATTRIBUTE,
KIT_LOOKTHROUGH_ATTRIBUTE,
KIT_CAMERA_LOCK_ATTRIBUTE,
_compute_local_transform
)
from omni.ui import scene as sc
from pxr import Usd, UsdGeom, Sdf, Gf
import carb
import math
__all__ = ['ViewportCameraManipulator']
# More advanced implementation for a Viewport that can use picked objects and -look through- arbitrary scene items
#
def _check_for_camera_forwarding(imageable: UsdGeom.Imageable):
# Look for the relationship setup via LookAtCommand
prim = imageable.GetPrim()
look_through = prim.GetRelationship(KIT_LOOKTHROUGH_ATTRIBUTE).GetForwardedTargets()
if look_through:
stage = prim.GetStage()
# Loop over all targets (should really be only one) and see if we can get a valid UsdGeom.Imageable
for target in look_through:
target_prim = stage.GetPrimAtPath(target)
if not target_prim:
continue
target_imageable = UsdGeom.Imageable(target_prim)
if target_imageable:
return target_imageable
carb.log_warn(f'{prim.GetPath()} was set up for look-thorugh, but no valid prim was found for targets: {look_through}')
return imageable
def _setup_center_of_interest(model: sc.AbstractManipulatorModel, prim: Usd.Prim, time: Usd.TimeCode,
object_centric: int = 0, viewport_api=None, mouse=None):
def get_center_of_interest():
coi_attr = prim.GetAttribute(KIT_COI_ATTRIBUTE)
if not coi_attr or not coi_attr.IsAuthored():
# Use UsdGeomCamera.focusDistance is present
distance = 0
fcs_dist = prim.GetAttribute('focusDistance')
if fcs_dist and fcs_dist.IsAuthored():
distance = fcs_dist.Get(time)
# distance 0 is invalid, so create the atribute based on length from origin
if not fcs_dist or distance == 0:
origin = Gf.Matrix4d(*model.get_as_floats('initial_transform')).Transform((0, 0, 0))
distance = origin.GetLength()
coi_attr = prim.CreateAttribute(KIT_COI_ATTRIBUTE, Sdf.ValueTypeNames.Vector3d, True, Sdf.VariabilityUniform)
coi_attr.Set(Gf.Vec3d(0, 0, -distance))
# Make sure COI isn't ridiculously low
coi_val = coi_attr.Get()
length = coi_val.GetLength()
if length < 0.000001 or not math.isfinite(length):
coi_val = Gf.Vec3d(0, 0, -100)
return coi_val
def query_completed(path, pos, *args):
# Reset center-of-interest if there's an obect and world-space position
if path and pos:
# Convert carb value to Gf.Vec3d
pos = Gf.Vec3d(pos.x, pos.y, pos.z)
# Object centric 1 will use the object-center, so replace pos with the UsdGeom.Imageable's (0, 0, 0) coord
if object_centric == 1:
picked_prim = prim.GetStage().GetPrimAtPath(path)
imageable = UsdGeom.Imageable(picked_prim) if picked_prim else None
if imageable:
pos = imageable.ComputeLocalToWorldTransform(time).Transform(Gf.Vec3d(0, 0, 0))
if math.isfinite(pos[0]) and math.isfinite(pos[1]) and math.isfinite(pos[2]):
inv_xform = Gf.Matrix4d(*model.get_as_floats('transform')).GetInverse()
coi = inv_xform.Transform(pos)
model.set_floats('center_of_interest_picked', [pos[0], pos[1], pos[2]])
# Also need to trigger a recomputation of ndc_speed based on our new center of interest
coi_item = model.get_item('center_of_interest')
model.set_floats(coi_item, [coi[0], coi[1], coi[2]])
model._item_changed(coi_item)
# Re-enable all movement that we previouly disabled
model.set_ints('disable_pan', [disable_pan])
model.set_ints('disable_tumble', [disable_tumble])
model.set_ints('disable_look', [disable_look])
model.set_ints('disable_zoom', [disable_zoom])
coi = get_center_of_interest()
model.set_floats('center_of_interest', [coi[0], coi[1], coi[2]])
if object_centric != 0:
# Map the NDC co-ordinates to a viewport's texture-space
mouse, viewport_api = viewport_api.map_ndc_to_texture_pixel(mouse)
if (mouse is None) or (viewport_api is None):
object_centric = 0
if object_centric == 0:
model.set_floats('center_of_interest_picked', [])
return
# Block all movement until the query completes
disable_pan = _optional_bool(model, 'disable_pan')
disable_tumble = _optional_bool(model, 'disable_tumble')
disable_look = _optional_bool(model, 'disable_look')
disable_zoom = _optional_bool(model, 'disable_zoom')
model.set_ints('disable_pan', [1])
model.set_ints('disable_tumble', [1])
model.set_ints('disable_look', [1])
model.set_ints('disable_zoom', [1])
# Start the query
viewport_api.request_query(mouse, query_completed)
class ViewportCameraManipulator(UsdCameraManipulator):
def __init__(self, viewport_api, bindings: dict = None, *args, **kwargs):
super().__init__(bindings, viewport_api.usd_context_name)
self.__viewport_api = viewport_api
# def view_changed(*args):
# return
# from .gesturebase import set_frame_delivered
# set_frame_delivered(True)
# self.__vc_change = viewport_api.subscribe_to_frame_change(view_changed)
def _on_began(self, model: CameraManipulatorModel, mouse):
# We need a viewport and a stage to start. If either are missing disable any further processing.
viewport_api = self.__viewport_api
stage = viewport_api.stage if viewport_api else None
settings = carb.settings.get_settings()
# Store the viewport_id in the model for use later if necessary
model.set_ints('viewport_id', [viewport_api.id if viewport_api else 0])
if not stage:
# TODO: Could we forward this to adjust the viewport_api->omni.scene.ui ?
model.set_ints('disable_tumble', [1])
model.set_ints('disable_look', [1])
model.set_ints('disable_pan', [1])
model.set_ints('disable_zoom', [1])
model.set_ints('disable_fly', [1])
return
cam_path = viewport_api.camera_path
if hasattr(model, '_set_animation_key'):
model._set_animation_key(cam_path)
time = viewport_api.time
cam_prim = stage.GetPrimAtPath(cam_path)
cam_imageable = UsdGeom.Imageable(cam_prim)
camera = UsdGeom.Camera(cam_prim) if cam_imageable else None
if not cam_imageable or not cam_imageable.GetPrim().IsValid():
raise RuntimeError('ViewportCameraManipulator with an invalid UsdGeom.Imageable or Usd.Prim')
# Push the viewport's projection into the model
projection = _flatten_matrix(viewport_api.projection)
model.set_floats('projection', projection)
# Check if we should actaully keep camera at identity and forward our movements to another object
target_imageable = _check_for_camera_forwarding(cam_imageable)
local_xform, parent_xform = _compute_local_transform(target_imageable, time)
model.set_floats('initial_transform', _flatten_matrix(local_xform))
model.set_floats('transform', _flatten_matrix(local_xform))
# Setup the model if the camera is orthographic (where for Usd we must edit apertures)
# We do this before center-of-interest query to get disabled-state pushed into the model
if camera:
orthographic = int(camera.GetProjectionAttr().Get(time) == 'orthographic')
if orthographic:
model.set_floats('initial_aperture', [camera.GetHorizontalApertureAttr().Get(time),
camera.GetVerticalApertureAttr().Get(time)])
else:
orthographic = int(projection[15] == 1 if projection else False)
model.set_floats('initial_aperture', [])
up_axis = UsdGeom.GetStageUpAxis(stage)
if up_axis == UsdGeom.Tokens.x:
up_axis = Gf.Vec3d(1, 0, 0)
elif up_axis == UsdGeom.Tokens.y:
up_axis = Gf.Vec3d(0, 1, 0)
elif up_axis == UsdGeom.Tokens.z:
up_axis = Gf.Vec3d(0, 0, 1)
if not bool(settings.get("exts/omni.kit.manipulator.camera/forceStageUp")):
up_axis = parent_xform.TransformDir(up_axis).GetNormalized()
model.set_floats('up_axis', [up_axis[0], up_axis[1], up_axis[2]])
# Disable undo for implict cameras. This might be better handled with custom meta-data / attribute long term
disable_undo = cam_path.pathString in ['/OmniverseKit_Persp', '/OmniverseKit_Front', '/OmniverseKit_Right', '/OmniverseKit_Top']
model.set_ints('disable_undo', [int(disable_undo)])
# Test whether this camera is locked
cam_lock = cam_prim.GetAttribute(KIT_CAMERA_LOCK_ATTRIBUTE)
if cam_lock and cam_lock.Get():
model.set_ints('disable_tumble', [1])
model.set_ints('disable_look', [1])
model.set_ints('disable_pan', [1])
model.set_ints('disable_zoom', [1])
model.set_ints('disable_fly', [1])
else:
model.set_ints('orthographic', [orthographic])
model.set_ints('disable_tumble', [orthographic])
model.set_ints('disable_look', [orthographic])
model.set_ints('disable_pan', [0])
model.set_ints('disable_zoom', [0])
model.set_ints('disable_fly', [0])
# Extract the camera's center of interest, from a property or world-space query
# model.set_ints('object_centric_movement', [1])
object_centric = settings.get('/exts/omni.kit.manipulator.camera/objectCentric/type') or 0
object_centric = _optional_int(self.model, 'object_centric_movement', object_centric)
_setup_center_of_interest(model, target_imageable.GetPrim(), time, object_centric, viewport_api, mouse)
# Setup the model for command execution on key-framed data
had_transform_at_key = False
if not time.IsDefault():
xformable = UsdGeom.Xformable(target_imageable)
if xformable:
for xformOp in xformable.GetOrderedXformOps():
had_transform_at_key = time in xformOp.GetTimeSamples()
if had_transform_at_key:
break
model.set_ints('had_transform_at_key', [had_transform_at_key])
# Set the pan/zoom speed equivalent to the world space travel of the mouse
model.set_floats('world_speed', [1, 1, 1])
# Make a full drag across the viewport equal to a 180 tumble
uv_space = viewport_api.map_ndc_to_texture((1, 1))[0]
model.set_floats('rotation_speed', [((v * 2.0) - 1.0) for v in uv_space] + [1])
# Tell the USD manipulator the context and prim to operate on
self._set_context(viewport_api.usd_context_name, target_imageable.GetPath())
def destroy(self):
self.__vc_change = None
self.__viewport_api = None
super().destroy()
import omni.kit.app
import time
class ZoomEvents:
__instances = set()
@staticmethod
def get_instance(viewport_api):
instance = None
for inst in ZoomEvents.__instances:
if inst.__viewport_api == viewport_api:
instance = inst
break
if instance is None:
instance = ZoomEvents(viewport_api)
ZoomEvents.__instances.add(instance)
else:
instance.__mark_time()
return instance
def __init__(self, viewport_api):
self.__viewport_api = viewport_api
self.__mouse = [0, 0]
self.__manipulator = ViewportCameraManipulator(viewport_api, bindings={'ZoomGesture': 'LeftButton'})
self.__manipulator.on_build()
self.__zoom_gesture = self.__manipulator._screen.gestures[0]
self.__zoom_gesture._disable_flight()
self.__zoom_gesture.on_began(self.__mouse)
# 1030
if hasattr(omni.kit.app, 'UPDATE_ORDER_PYTHON_ASYNC_FUTURE_END_UPDATE'):
update_order = omni.kit.app.UPDATE_ORDER_PYTHON_ASYNC_FUTURE_END_UPDATE
else:
update_order = 50
self.__event_sub = omni.kit.app.get_app().get_update_event_stream().create_subscription_to_pop(
self.__on_event, name="omni.kit.manipulator.camera.ZoomEvents", order=update_order
)
def update(self, x, y):
self.__mark_time()
coi = Gf.Vec3d(*self.__manipulator.model.get_as_floats('center_of_interest'))
scale = math.log10(max(10, coi.GetLength())) / 40
self.__mouse = (self.__mouse[0] + x * scale, self.__mouse[1] + y * scale)
self.__zoom_gesture.on_changed(self.__mouse)
self.__mark_time()
def __mark_time(self):
self.__last_time = time.time()
def __time_since_last(self):
return time.time() - self.__last_time
def __on_event(self, e: carb.events.IEvent):
delta = self.__time_since_last()
if delta > 0.1:
self.destroy()
def destroy(self):
self.__event_sub = None
self.__zoom_gesture.on_ended()
self.__manipulator.destroy()
try:
ZoomEvents.__instances.remove(self)
except KeyError:
pass
# Helper function to do single a zoom-operation, from a scroll-wheel for example
def _zoom_operation(x, y, viewport_api):
if not viewport_api:
return None
instance = ZoomEvents.get_instance(viewport_api)
instance.update(x, y)
return True
| 14,470 | Python | 43.118902 | 136 | 0.622391 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/__init__.py | # Expose these for easier import via from omni.kit.manipulator.camera import XXX
from .manipulator import SceneViewCameraManipulator, CameraManipulatorBase, adjust_center_of_interest
from .usd_camera_manipulator import UsdCameraManipulator
from .viewport_camera_manipulator import ViewportCameraManipulator
| 308 | Python | 50.499992 | 101 | 0.863636 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/flight_mode.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['FlightModeKeyboard', 'get_keyboard_input']
from .model import CameraManipulatorModel, _accumulate_values, _optional_floats
from omni.ui import scene as sc
import omni.appwindow
from pxr import Gf
import carb
import carb.input
class FlightModeValues:
def __init__(self):
self.__xyz_values = (
[0, 0, 0],
[0, 0, 0],
[0, 0, 0],
)
def update(self, i0, i1, value) -> bool:
self.__xyz_values[i0][i1] = value
total = 0
for values in self.__xyz_values:
values[2] = values[1] - values[0]
total += values[2] != 0
return total != 0
@property
def value(self):
return (
self.__xyz_values[0][2],
self.__xyz_values[1][2],
self.__xyz_values[2][2]
)
class FlightModeKeyboard:
__g_char_map = None
@staticmethod
def get_char_map():
if not FlightModeKeyboard.__g_char_map:
key_char_map = {
'w': (2, 0),
's': (2, 1),
'a': (0, 0),
'd': (0, 1),
'q': (1, 0),
'e': (1, 1),
}
carb_key_map = {eval(f'carb.input.KeyboardInput.{ascii_val.upper()}'): index for ascii_val, index in key_char_map.items()}
FlightModeKeyboard.__g_char_map = carb_key_map
for k, v in FlightModeKeyboard.__g_char_map.items():
yield k, v
def __init__(self):
self.__input = None
self.__model = None
self.__stop_events = False
self.__keyboard_sub = None
self.__initial_speed = None
self.__current_adjusted_speed = 1
def init(self, model, iinput, mouse, mouse_button, app_window) -> None:
self.__model = model
if self.__input is None:
self.__input = iinput
self.__keyboard = app_window.get_keyboard()
self.__keyboard_sub = iinput.subscribe_to_keyboard_events(self.__keyboard, self.__on_key)
self.__mouse = mouse
# XXX: This isn't working
# self.__mouse_sub = iinput.subscribe_to_mouse_events(mouse, self.__on_mouse)
# So just query the state on key-down
self.__mouse_button = mouse_button
self.__key_index = {k: v for k, v in FlightModeKeyboard.get_char_map()}
self.__values = FlightModeValues()
# Setup for modifier keys adjusting speed
self.__settings = carb.settings.get_settings()
# Shift or Control can modify flight speed, get the current state
self.__setup_speed_modifiers()
# Need to update all input key states on start
for key, index in self.__key_index.items():
# Read the key and update the value. Update has to occur whether key is down or not as numeric field
# might have text focus; causing carbonite not to deliver __on_key messages
key_val = self.__input.get_keyboard_value(self.__keyboard, key)
self.__values.update(*index, 1 if key_val else 0)
# Record whether a previous invocation had started external events
prev_stop = self.__stop_events
# Test if any interesting key-pair result in a value
key_down = any(self.__values.value)
# If a key is no longer down, it may have not gotten to __on_key subscription if a numeric entry id focused
# In that case there is no more key down so kill any external trigger
if prev_stop and not key_down:
prev_stop = False
self.__model._stop_external_events()
self.__stop_events = key_down or prev_stop
self.__model.set_floats('fly', self.__values.value)
if self.__stop_events:
self.__model._start_external_events(True)
def _cancel(self) -> bool:
return self.__input.get_mouse_value(self.__mouse, self.__mouse_button) == 0 if self.__input else True
@property
def active(self) -> bool:
"""Returns if Flight mode is active or not"""
return bool(self.__stop_events)
def __adjust_speed_modifiers(self, cur_speed_mod: float, prev_speed_mod: float):
# Get the current state from
initial_speed = self.__settings.get('/persistent/app/viewport/camMoveVelocity') or 1
# Undo any previos speed modification based on key state
if prev_speed_mod and prev_speed_mod != 1:
initial_speed /= prev_speed_mod
# Store the unadjusted values for restoration later (camMoveVelocity may change underneath modifiers)
self.__initial_speed = initial_speed
# Set the new speed if it is different
cur_speed = initial_speed * cur_speed_mod
self.__settings.set('/persistent/app/viewport/camMoveVelocity', cur_speed)
def __setup_speed_modifiers(self):
# Default to legacy value of modifying speed by doubling / halving
self.__speed_modifier_amount = self.__settings.get('/exts/omni.kit.manipulator.camera/flightMode/keyModifierAmount')
if not self.__speed_modifier_amount:
return
# Store the current_adjusted_speed as inital_speed
prev_speed_mod = self.__current_adjusted_speed
cur_speed_mod = prev_speed_mod
# Scan the input keys that modify speed and adjust current_adjusted_speed
if self.__input.get_keyboard_value(self.__keyboard, carb.input.KeyboardInput.LEFT_SHIFT):
cur_speed_mod *= self.__speed_modifier_amount
if self.__input.get_keyboard_value(self.__keyboard, carb.input.KeyboardInput.LEFT_CONTROL):
if self.__speed_modifier_amount != 0:
cur_speed_mod /= self.__speed_modifier_amount
# Store new speed into proper place
if prev_speed_mod != cur_speed_mod:
self.__current_adjusted_speed = cur_speed_mod
self.__adjust_speed_modifiers(cur_speed_mod, prev_speed_mod)
def __process_speed_modifier(self, key: carb.input.KeyboardEventType, is_down: bool):
if not self.__speed_modifier_amount:
return
def speed_adjustment(increase: bool):
return self.__speed_modifier_amount if increase else (1 / self.__speed_modifier_amount)
prev_speed_mod = self.__current_adjusted_speed
cur_speed_mod = prev_speed_mod
if key == carb.input.KeyboardInput.LEFT_SHIFT:
cur_speed_mod *= speed_adjustment(is_down)
if key == carb.input.KeyboardInput.LEFT_CONTROL:
cur_speed_mod *= speed_adjustment(not is_down)
if prev_speed_mod != cur_speed_mod:
self.__current_adjusted_speed = cur_speed_mod
self.__adjust_speed_modifiers(cur_speed_mod, prev_speed_mod)
return True
return False
def __on_key(self, e) -> bool:
index, value, speed_changed = None, None, False
event_type = e.type
KeyboardEventType = carb.input.KeyboardEventType
if event_type == KeyboardEventType.KEY_PRESS or event_type == KeyboardEventType.KEY_REPEAT:
index, value = self.__key_index.get(e.input), 1
if event_type == KeyboardEventType.KEY_PRESS:
speed_changed = self.__process_speed_modifier(e.input, True)
elif event_type == KeyboardEventType.KEY_RELEASE:
index, value = self.__key_index.get(e.input), 0
speed_changed = self.__process_speed_modifier(e.input, False)
# If not a navigation key, pass it on to another handler (unless it was a speed-moficiation key).
if not index:
return not speed_changed
canceled = self._cancel()
if canceled:
value = 0
has_data = self.__values.update(*index, value)
if hasattr(self.__model, '_start_external_events'):
if has_data:
self.__stop_events = True
self.__model._start_external_events(True)
elif self.__stop_events:
self.__stop_events = False
self.__model._stop_external_events(True)
self.__model.set_floats('fly', self.__values.value)
# self.__model._item_changed(None)
if canceled:
self.destroy()
return False
def end(self):
self.destroy()
return None
def __del__(self):
self.destroy()
def destroy(self) -> None:
if self.__initial_speed is not None:
self.__settings.set('/persistent/app/viewport/camMoveVelocity', self.__initial_speed)
self.__initial_speed = None
self.__current_adjusted_speed = 1
if self.__model:
self.__model.set_floats('fly', None)
if self.__stop_events:
self.__model._stop_external_events()
if self.__keyboard_sub:
self.__input.unsubscribe_to_keyboard_events(self.__keyboard, self.__keyboard_sub)
self.__keyboard_sub = None
self.__keyboard = None
# if self.__mouse_sub:
# self.__input.unsubscribe_to_mouse_events(self.__mouse, self.__mouse_sub)
# self.__mouse_sub = None
self.__mouse = None
self.__input = None
self.__values = None
self.__key_index = None
def get_keyboard_input(model, walk_through: FlightModeKeyboard = None, end_with_mouse_ended: bool = False, mouse_button=carb.input.MouseInput.RIGHT_BUTTON):
iinput = carb.input.acquire_input_interface()
app_window = omni.appwindow.get_default_app_window()
mouse = app_window.get_mouse()
mouse_value = iinput.get_mouse_value(mouse, mouse_button)
if mouse_value:
if walk_through is None:
walk_through = FlightModeKeyboard()
walk_through.init(model, iinput, mouse, mouse_button, app_window)
elif walk_through and end_with_mouse_ended:
walk_through.destroy()
walk_through = None
return walk_through
| 10,350 | Python | 39.913043 | 156 | 0.604444 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/math.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['TransformAccumulator']
from pxr import Gf
class TransformAccumulator:
def __init__(self, initial_xform: Gf.Matrix4d):
self.__inverse_xform = initial_xform.GetInverse() if initial_xform else None
def get_rotation_axis(self, up_axis: Gf.Vec3d):
if up_axis:
return self.__inverse_xform.TransformDir(up_axis)
else:
return self.__inverse_xform.TransformDir(Gf.Vec3d(0, 1, 0))
def get_translation(self, amount: Gf.Vec3d):
return Gf.Matrix4d().SetTranslate(amount)
def get_tumble(self, degrees: Gf.Vec3d, center_of_interest: Gf.Vec3d, up_axis: Gf.Vec3d):
# Rotate around proper scene axis
rotate_axis = self.get_rotation_axis(up_axis)
# Move to center_of_interest, rotate and move back
# No need for identity, all SetXXX methods will do that for us
translate = Gf.Matrix4d().SetTranslate(-center_of_interest)
# X-Y in ui/mouse are swapped so x-move is rotate around Y, and Y-move is rotate around X
rotate_x = Gf.Matrix4d().SetRotate(Gf.Rotation(Gf.Vec3d(1, 0, 0), degrees[1]))
rotate_y = Gf.Matrix4d().SetRotate(Gf.Rotation(rotate_axis, degrees[0]))
return translate * rotate_x * rotate_y * translate.GetInverse()
def get_look(self, degrees: Gf.Vec3d, up_axis: Gf.Vec3d):
# Rotate around proper scene axis
rotate_axis = self.get_rotation_axis(up_axis)
# X-Y in ui/mouse are swapped so x-move is rotate around Y, and Y-move is rotate around X
rotate_x = Gf.Matrix4d().SetRotate(Gf.Rotation(Gf.Vec3d(1, 0, 0), degrees[1]))
rotate_y = Gf.Matrix4d().SetRotate(Gf.Rotation(rotate_axis, degrees[0]))
return rotate_x * rotate_y
| 2,166 | Python | 44.145832 | 97 | 0.686057 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/usd_camera_manipulator.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .manipulator import CameraManipulatorBase, adjust_center_of_interest
from .model import _optional_bool, _flatten_matrix
from omni.kit import commands, undo
import omni.usd
from pxr import Usd, UsdGeom, Sdf, Tf, Gf
import carb.profiler
import carb.settings
import math
from typing import List
__all__ = ['UsdCameraManipulator']
KIT_COI_ATTRIBUTE = 'omni:kit:centerOfInterest'
KIT_LOOKTHROUGH_ATTRIBUTE = 'omni:kit:viewport:lookThrough:target'
KIT_CAMERA_LOCK_ATTRIBUTE = 'omni:kit:cameraLock'
def _get_context_stage(usd_context_name: str):
return omni.usd.get_context(usd_context_name).get_stage()
def _compute_local_transform(imageable: UsdGeom.Imageable, time: Usd.TimeCode):
# xformable = UsdGeom.Xformable(imageable)
# if xformable:
# return xformable.GetLocalTransformation(time)
world_xform = imageable.ComputeLocalToWorldTransform(time)
parent_xform = imageable.ComputeParentToWorldTransform(time)
parent_ixform = parent_xform.GetInverse()
return (world_xform * parent_ixform), parent_ixform
class SRTDecomposer:
def __init__(self, prim: Usd.Prim, time: Usd.TimeCode = None):
if time is None:
time = Usd.TimeCode.Default()
xform_srt = omni.usd.get_local_transform_SRT(prim, time)
xform_srt = (Gf.Vec3d(xform_srt[0]), Gf.Vec3d(xform_srt[1]), Gf.Vec3i(xform_srt[2]), Gf.Vec3d(xform_srt[3]))
self.__start_scale, self.__start_rotation_euler, self.__start_rotation_order, self.__start_translation = xform_srt
self.__current_scale, self.__current_rotation_euler, self.__current_rotation_order, self.__current_translation = xform_srt
@staticmethod
def __repeat(t: float, length: float) -> float:
return t - (math.floor(t / length) * length)
@staticmethod
def __generate_compatible_euler_angles(euler: Gf.Vec3d, rotation_order: Gf.Vec3i) -> List[Gf.Vec3d]:
equal_eulers = [euler]
mid_order = rotation_order[1]
equal = Gf.Vec3d()
for i in range(3):
if i == mid_order:
equal[i] = 180 - euler[i]
else:
equal[i] = euler[i] + 180
equal_eulers.append(equal)
for i in range(3):
equal[i] -= 360
equal_eulers.append(equal)
return equal_eulers
@staticmethod
def __find_best_euler_angles(old_rot_vec: Gf.Vec3d, new_rot_vec: Gf.Vec3d, rotation_order: Gf.Vec3i) -> Gf.Vec3d:
equal_eulers = SRTDecomposer.__generate_compatible_euler_angles(new_rot_vec, rotation_order)
nearest_euler = None
for euler in equal_eulers:
for i in range(3):
euler[i] = SRTDecomposer.__repeat(euler[i] - old_rot_vec[i] + 180.0, 360.0) + old_rot_vec[i] - 180.0
if nearest_euler is None:
nearest_euler = euler
else:
distance_1 = (nearest_euler - old_rot_vec).GetLength()
distance_2 = (euler - old_rot_vec).GetLength()
if distance_2 < distance_1:
nearest_euler = euler
return nearest_euler
def update(self, xform: Gf.Matrix4d):
# Extract new translation
self.__current_translation = xform.ExtractTranslation()
# Extract new euler rotation
ro = self.__start_rotation_order
old_s_mtx = Gf.Matrix4d().SetScale(self.__start_scale)
old_t_mtx = Gf.Matrix4d().SetTranslate(self.__start_translation)
rot_new = (old_s_mtx.GetInverse() * xform * old_t_mtx.GetInverse()).ExtractRotation()
axes = [Gf.Vec3d.XAxis(), Gf.Vec3d.YAxis(), Gf.Vec3d.ZAxis()]
decomp_rot = rot_new.Decompose(axes[ro[2]], axes[ro[1]], axes[ro[0]])
index_order = Gf.Vec3i()
for i in range(3):
index_order[ro[i]] = 2 - i
new_rot_vec = Gf.Vec3d(decomp_rot[index_order[0]], decomp_rot[index_order[1]], decomp_rot[index_order[2]])
new_rot_vec = self.__find_best_euler_angles(self.__start_rotation_euler, new_rot_vec, self.__start_rotation_order)
self.__current_rotation_euler = new_rot_vec
# Because this is a camera manipulation, we purposefully ignore scale and rotation order changes
# They remain constant across the interaction.
return self
@property
def translation(self):
return self.__current_translation
@property
def rotation(self):
return self.__current_rotation_euler
@property
def start_translation(self):
return self.__start_translation
@property
def start_rotation(self):
self.__start_rotation_euler
class ExternalUsdCameraChange():
def __init__(self, time: Usd.TimeCode):
self.__tf_listener = None
self.__usd_context_name, self.__prim_path = None, None
self.__updates_paused = False
self.__kill_external_animation = None
self.__time = time
def __del__(self):
self.destroy()
def update(self, model, usd_context_name: str, prim_path: Sdf.Path):
self.__kill_external_animation = getattr(model, '_kill_external_animation', None)
if self.__kill_external_animation is None:
return
self.__prim_path = prim_path
if usd_context_name != self.__usd_context_name:
self.__usd_context_name = usd_context_name
if self.__tf_listener:
self.__tf_listener.Revoke()
self.__tf_listener = None
if not self.__tf_listener:
try:
stage = _get_context_stage(self.__usd_context_name)
if stage:
self.__tf_listener = Tf.Notice.Register(Usd.Notice.ObjectsChanged, self.__object_changed, stage)
except ImportError:
pass
def destroy(self):
if self.__tf_listener:
self.__tf_listener.Revoke()
self.__tf_listener = None
self.__usd_context_name, self.__prim_path = None, None
self.__kill_external_animation = None
@carb.profiler.profile
def __object_changed(self, notice, sender):
if self.__updates_paused:
return
if not sender or sender != _get_context_stage(self.__usd_context_name):
return
for p in notice.GetChangedInfoOnlyPaths():
if (p.IsPropertyPath()
and p.GetPrimPath() == self.__prim_path
and UsdGeom.Xformable.IsTransformationAffectedByAttrNamed(p.name)):
xformable = UsdGeom.Xformable(sender.GetPrimAtPath(self.__prim_path))
xform = _flatten_matrix(xformable.GetLocalTransformation(self.__time)) if xformable else None
self.__kill_external_animation(True, xform)
break
def pause_tracking(self):
self.__updates_paused = True
def start_tracking(self):
self.__updates_paused = False
# Base Usd implementation that will set model back to Usd data via kit-commands
class UsdCameraManipulator(CameraManipulatorBase):
def __init__(self, bindings: dict = None, usd_context_name: str = '', prim_path: Sdf.Path = None, *args, **kwargs):
self.__usd_context_name, self.__prim_path = None, None
self.__external_change_tracker = None
super().__init__(bindings, *args, **kwargs)
self._set_context(usd_context_name, prim_path)
def _set_context(self, usd_context_name: str, prim_path: Sdf.Path):
self.__usd_context_name = usd_context_name
self.__prim_path = prim_path
self.__srt_decompose = None
if prim_path and carb.settings.get_settings().get('/persistent/app/camera/controllerUseSRT'):
stage = _get_context_stage(self.__usd_context_name)
if stage:
prim = stage.GetPrimAtPath(prim_path)
if prim:
model = self.model
time = model.get_as_floats('time') if model else None
time = Usd.TimeCode(time[0]) if time else Usd.TimeCode.Default()
self.__srt_decompose = SRTDecomposer(prim)
def _on_began(self, model, *args, **kwargs):
super()._on_began(model, *args, **kwargs)
stage = _get_context_stage(self.__usd_context_name)
if not stage:
# TODO: Could we forward this to adjust the viewport_api->omni.scene.ui ?
model.set_ints('disable_tumble', [1])
model.set_ints('disable_look', [1])
model.set_ints('disable_pan', [1])
model.set_ints('disable_zoom', [1])
model.set_ints('disable_fly', [1])
return
cam_prim = stage.GetPrimAtPath(self.__prim_path)
cam_imageable = UsdGeom.Imageable(cam_prim) if bool(cam_prim) else None
if not cam_imageable or not cam_imageable.GetPrim().IsValid():
raise RuntimeError('ViewportCameraManipulator with an invalid UsdGeom.Imageable or Usd.Prim')
# Check if we should actaully keep camera at identity and forward our movements to another object
local_xform, parent_xform = _compute_local_transform(cam_imageable, Usd.TimeCode.Default())
model.set_floats('initial_transform', _flatten_matrix(local_xform))
model.set_floats('transform', _flatten_matrix(local_xform))
up_axis = UsdGeom.GetStageUpAxis(stage)
if up_axis == UsdGeom.Tokens.x:
up_axis = Gf.Vec3d(1, 0, 0)
elif up_axis == UsdGeom.Tokens.y:
up_axis = Gf.Vec3d(0, 1, 0)
elif up_axis == UsdGeom.Tokens.z:
up_axis = Gf.Vec3d(0, 0, 1)
if not bool(carb.settings.get_settings().get("exts/omni.kit.manipulator.camera/forceStageUp")):
up_axis = parent_xform.TransformDir(up_axis).GetNormalized()
model.set_floats('up_axis', [up_axis[0], up_axis[1], up_axis[2]])
@carb.profiler.profile
def __vp1_cooperation(self, prim_path, time, usd_context_name: str, center_of_interest_end):
try:
from omni.kit import viewport_legacy
vp1_iface = viewport_legacy.get_viewport_interface()
final_transform, coi_world, pos_world, cam_path = None, None, None, None
for vp1_handle in vp1_iface.get_instance_list():
vp1_window = vp1_iface.get_viewport_window(vp1_handle)
if not vp1_window or (vp1_window.get_usd_context_name() != usd_context_name):
continue
if not final_transform:
# Save the path's string represnetation
cam_path = prim_path.pathString
# We need to calculate world-space transform for VP-1, important for nested camera's
# TODO: UsdBBoxCache.ComputeWorldBound in compute_path_world_transform doesn't seem to work for non-geometry:
# final_transform = omni.usd.get_context(usd_context_name).compute_path_world_transform(cam_path)
# final_transform = Gf.Matrix4d(*final_transform)
final_transform = UsdGeom.Imageable(prim_path).ComputeLocalToWorldTransform(time)
# center_of_interest_end is adjusted and returned for VP-2
center_of_interest_end = Gf.Vec3d(0, 0, -center_of_interest_end.GetLength())
# Pass world center-of-interest to VP-1 set_camera_target
coi_world = final_transform.Transform(center_of_interest_end)
# Pass world position to VP-1 set_camera_position
pos_world = final_transform.Transform(Gf.Vec3d(0, 0, 0))
# False for first call to set target only, True for second to trigger radius re-calculation
# This isn't particuarly efficient; but 'has to be' for now due to some Viewport-1 internals
vp1_window.set_camera_target(cam_path, coi_world[0], coi_world[1], coi_world[2], False)
vp1_window.set_camera_position(cam_path, pos_world[0], pos_world[1], pos_world[2], True)
except Exception:
pass
return center_of_interest_end
@carb.profiler.profile
def on_model_updated(self, item):
# Handle case of inertia being applied though a new stage-open
usd_context_name = self.__usd_context_name
if usd_context_name is None or _get_context_stage(usd_context_name) is None:
return
model = self.model
prim_path = self.__prim_path
time = model.get_as_floats('time')
time = Usd.TimeCode(time[0]) if time else Usd.TimeCode.Default()
undoable = False
def run_command(cmd_name, **kwargs):
carb.profiler.begin(1, cmd_name)
if undoable:
commands.execute(cmd_name, **kwargs)
else:
commands.create(cmd_name, **kwargs).do()
carb.profiler.end(1)
try:
if item == model.get_item('transform'):
if self.__external_change_tracker:
self.__external_change_tracker.update(model, usd_context_name, prim_path)
self.__external_change_tracker.pause_tracking()
# We are undoable on the final event if undo hasn't been disabled on the model
undoable = _optional_bool(self.model, 'interaction_ended') and not _optional_bool(self.model, 'disable_undo')
if undoable:
undo.begin_group()
final_transform = Gf.Matrix4d(*model.get_as_floats('transform'))
initial_transform = model.get_as_floats('initial_transform')
initial_transform = Gf.Matrix4d(*initial_transform) if initial_transform else initial_transform
had_transform_at_key = _optional_bool(self.model, 'had_transform_at_key')
if self.__srt_decompose:
srt_deompose = self.__srt_decompose.update(final_transform)
run_command(
'TransformPrimSRTCommand',
path=prim_path,
new_translation=srt_deompose.translation,
new_rotation_euler=srt_deompose.rotation,
# new_scale=srt_deompose.scale,
# new_rotation_order=srt_deompose.rotation_order,
old_translation=srt_deompose.start_translation,
old_rotation_euler=srt_deompose.start_rotation,
# old_rotation_order=srt_deompose.start_rotation_order,
# old_scale=srt_deompose.start_scale,
time_code=time,
had_transform_at_key=had_transform_at_key,
usd_context_name=usd_context_name
)
else:
run_command(
'TransformPrimCommand',
path=prim_path,
new_transform_matrix=final_transform,
old_transform_matrix=initial_transform,
time_code=time,
had_transform_at_key=had_transform_at_key,
usd_context_name=usd_context_name
)
center_of_interest_start, center_of_interest_end = adjust_center_of_interest(model, initial_transform, final_transform)
if center_of_interest_start and center_of_interest_end:
# See if we need to adjust center-of-interest to cooperate with Viewport-1, which can only do a 1 dimensional version
center_of_interest_end = self.__vp1_cooperation(prim_path, time, usd_context_name, center_of_interest_end)
run_command(
'ChangePropertyCommand',
prop_path=prim_path.AppendProperty(KIT_COI_ATTRIBUTE),
value=center_of_interest_end,
prev=center_of_interest_start,
usd_context_name=usd_context_name
)
elif item == model.get_item('current_aperture'):
# We are undoable on the final event if undo hasn't been disabled on the model
undoable = _optional_bool(self.model, 'interaction_ended') and not _optional_bool(self.model, 'disable_undo')
if undoable:
undo.begin_group()
initial_aperture = model.get_as_floats('initial_aperture')
current_aperture = model.get_as_floats('current_aperture')
prop_names = ('horizontalAperture', 'verticalAperture')
for initial_value, current_value, prop_name in zip(initial_aperture, current_aperture, prop_names):
run_command(
'ChangePropertyCommand',
prop_path=prim_path.AppendProperty(prop_name),
value=current_value,
prev=initial_value,
timecode=time,
usd_context_name=usd_context_name
)
elif item == model.get_item('interaction_animating'):
interaction_animating = model.get_as_ints(item)
if interaction_animating and interaction_animating[0]:
if not self.__external_change_tracker:
self.__external_change_tracker = ExternalUsdCameraChange(time)
self.__external_change_tracker.update(model, usd_context_name, prim_path)
self.__external_change_tracker.pause_tracking()
elif self.__external_change_tracker:
self.__external_change_tracker.destroy()
self.__external_change_tracker = None
finally:
if undoable:
undo.end_group()
if self.__external_change_tracker:
self.__external_change_tracker.start_tracking()
| 18,397 | Python | 44.539604 | 137 | 0.594717 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/model.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['CameraManipulatorModel']
from omni.ui import scene as sc
from pxr import Gf
from typing import Any, Callable, List, Sequence, Union
from .math import TransformAccumulator
from .animation import AnimationEventStream
import time
import carb.profiler
import carb.settings
ALMOST_ZERO = 1.e-4
def _flatten_matrix(matrix: Gf.Matrix4d):
return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3],
matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]]
def _optional_floats(model: sc.AbstractManipulatorModel, item: str, default_value: Sequence[float] = None):
item = model.get_item(item)
if item:
values = model.get_as_floats(item)
if values:
return values
return default_value
def _optional_float(model: sc.AbstractManipulatorModel, item: str, default_value: float = 0):
item = model.get_item(item)
if item:
values = model.get_as_floats(item)
if values:
return values[0]
return default_value
def _optional_int(model: sc.AbstractManipulatorModel, item: str, default_value: int = 0):
item = model.get_item(item)
if item:
values = model.get_as_ints(item)
if values:
return values[0]
return default_value
def _optional_bool(model: sc.AbstractManipulatorModel, item: str, default_value: bool = False):
return _optional_int(model, item, default_value)
def _accumulate_values(model: sc.AbstractManipulatorModel, name: str, x: float, y: float, z: float):
item = model.get_item(name)
if item:
values = model.get_as_floats(item)
model.set_floats(item, [values[0] + x, values[1] + y, values[2] + z] if values else [x, y, z])
return item
def _scalar_or_vector(value: Sequence[float]):
acceleration_len = len(value)
if acceleration_len == 1:
return Gf.Vec3d(value[0], value[0], value[0])
if acceleration_len == 2:
return Gf.Vec3d(value[0], value[1], 1)
return Gf.Vec3d(value[0], value[1], value[2])
class ModelState:
def __reduce_value(self, vec: Gf.Vec3d):
if vec and (vec[0] == 0 and vec[1] == 0 and vec[2] == 0):
return None
return vec
def __expand_value(self, vec: Gf.Vec3d, alpha: float):
if vec:
vec = tuple(v * alpha for v in vec)
if vec[0] != 0 or vec[1] != 0 or vec[2] != 0:
return vec
return None
def __init__(self, tumble: Gf.Vec3d = None, look: Gf.Vec3d = None, move: Gf.Vec3d = None, fly: Gf.Vec3d = None):
self.__tumble = self.__reduce_value(tumble)
self.__look = self.__reduce_value(look)
self.__move = self.__reduce_value(move)
self.__fly = self.__reduce_value(fly)
def any_values(self):
return self.__tumble or self.__look or self.__move or self.__fly
def apply_alpha(self, alpha: float):
return (self.__expand_value(self.__tumble, alpha),
self.__expand_value(self.__look, alpha),
self.__expand_value(self.__move, alpha),
self.__expand_value(self.__fly, alpha))
@property
def tumble(self):
return self.__tumble
@property
def look(self):
return self.__look
@property
def move(self):
return self.__move
@property
def fly(self):
return self.__fly
class Velocity:
def __init__(self, acceleration: Sequence[float], dampening: Sequence[float] = (10,), clamp_dt: float = 0.15):
self.__velocity = Gf.Vec3d(0, 0, 0)
self.__acceleration_rate = _scalar_or_vector(acceleration)
self.__dampening = _scalar_or_vector(dampening)
self.__clamp_dt = clamp_dt
def apply(self, value: Gf.Vec3d, dt: float, alpha: float = 1):
### XXX: We're not locked to anything and event can come in spuriously
### So clamp the max delta-time to a value (if this is to high, it can introduces lag)
if (dt > 0) and (dt > self.__clamp_dt):
dt = self.__clamp_dt
if value:
acceleration = Gf.CompMult(value, self.__acceleration_rate) * alpha
self.__velocity += acceleration * dt
damp_factor = tuple(max(min(v * dt, 0.75), 0) for v in self.__dampening)
self.__velocity += Gf.CompMult(-self.__velocity, Gf.Vec3d(*damp_factor))
if Gf.Dot(self.__velocity, self.__velocity) < ALMOST_ZERO:
self.__velocity = Gf.Vec3d(0, 0, 0)
return self.__velocity * dt
@staticmethod
def create(model: sc.AbstractManipulatorModel, mode: str, clamp_dt: float = 0.15):
acceleration = _optional_floats(model, f'{mode}_acceleration')
if acceleration is None:
return None
dampening = _optional_floats(model, f'{mode}_dampening')
return Velocity(acceleration, dampening or (10, 10, 10), clamp_dt)
class Decay:
def __init__(self):
pass
def apply(self, value: Gf.Vec3d, dt: float, alpha: float = 1):
return value * alpha if value else None
class CameraManipulatorModel(sc.AbstractManipulatorModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__settings = carb.settings.get_settings()
self.__items = {
# 'view': (sc.AbstractManipulatorItem(), 16),
'projection': (sc.AbstractManipulatorItem(), 16),
'transform': (sc.AbstractManipulatorItem(), 16),
'orthographic': (sc.AbstractManipulatorItem(), 1),
'center_of_interest': (sc.AbstractManipulatorItem(), 3),
# Accumulated movement
'move': (sc.AbstractManipulatorItem(), 3),
'tumble': (sc.AbstractManipulatorItem(), 3),
'look': (sc.AbstractManipulatorItem(), 3),
'fly': (sc.AbstractManipulatorItem(), 3),
# Optional speed for world (pan, truck) and rotation (tumble, look) operation
# Can be set individually for x, y, z or as a scalar
'world_speed': (sc.AbstractManipulatorItem(), (3, 1)),
'move_speed': (sc.AbstractManipulatorItem(), (3, 1)),
'rotation_speed': (sc.AbstractManipulatorItem(), (3, 1)),
'tumble_speed': (sc.AbstractManipulatorItem(), (3, 1)),
'look_speed': (sc.AbstractManipulatorItem(), (3, 1)),
'fly_speed': (sc.AbstractManipulatorItem(), (3, 1)),
# Inertia enabled, and amoint of second to apply it for
'inertia_enabled': (sc.AbstractManipulatorItem(), 1),
'inertia_seconds': (sc.AbstractManipulatorItem(), 1),
# Power of ineratia decay (for an ease-out) 0 and 1 are linear
'inertia_decay': (sc.AbstractManipulatorItem(), 1),
# Acceleration and dampening values
'tumble_acceleration': (sc.AbstractManipulatorItem(), (3, 1)),
'look_acceleration': (sc.AbstractManipulatorItem(), (3, 1)),
'move_acceleration': (sc.AbstractManipulatorItem(), (3, 1)),
'fly_acceleration': (sc.AbstractManipulatorItem(), (3, 1)),
'tumble_dampening': (sc.AbstractManipulatorItem(), (3, 1)),
'look_dampening': (sc.AbstractManipulatorItem(), (3, 1)),
'move_dampening': (sc.AbstractManipulatorItem(), (3, 1)),
'fly_dampening': (sc.AbstractManipulatorItem(), (3, 1)),
'fly_mode_lock_view': (sc.AbstractManipulatorItem(), 1),
# Decimal precision of rotation operations
'rotation_precision': (sc.AbstractManipulatorItem(), 1),
# Mapping of units from input to world
'ndc_scale': (sc.AbstractManipulatorItem(), 3),
# Optional int-as-bool items
'disable_pan': (sc.AbstractManipulatorItem(), 1),
'disable_tumble': (sc.AbstractManipulatorItem(), 1),
'disable_look': (sc.AbstractManipulatorItem(), 1),
'disable_zoom': (sc.AbstractManipulatorItem(), 1),
'disable_fly': (sc.AbstractManipulatorItem(), 1),
'disable_undo': (sc.AbstractManipulatorItem(), 1),
'object_centric_movement': (sc.AbstractManipulatorItem(), 1),
'viewport_id': (sc.AbstractManipulatorItem(), 1),
# USD specific concepts
'up_axis': (sc.AbstractManipulatorItem(), 3),
'current_aperture': (sc.AbstractManipulatorItem(), 2),
'initial_aperture': (sc.AbstractManipulatorItem(), 2),
'had_transform_at_key': (sc.AbstractManipulatorItem(), 1),
'time': (sc.AbstractManipulatorItem(), 1),
# Internal signal for final application of the changes, use disable_undo for user-control
'interaction_ended': (sc.AbstractManipulatorItem(), 1), # Signal that undo should be applied
'interaction_active': (sc.AbstractManipulatorItem(), 1), # Signal that a gesture is manipualting camera
'interaction_animating': (sc.AbstractManipulatorItem(), 1), # Signal that an animation is manipulating camera
'center_of_interest_start': (sc.AbstractManipulatorItem(), 3),
'center_of_interest_picked': (sc.AbstractManipulatorItem(), 3),
'adjust_center_of_interest': (sc.AbstractManipulatorItem(), 1),
'initial_transform': (sc.AbstractManipulatorItem(), 16),
}
self.__values = {item: [] for item, _ in self.__items.values()}
self.__values[self.__items.get('look_speed')[0]] = [1, 0.5]
self.__values[self.__items.get('fly_speed')[0]] = [1]
self.__values[self.__items.get('inertia_seconds')[0]] = [0.5]
self.__values[self.__items.get('inertia_enabled')[0]] = [0]
# self.__values[self.__items.get('interaction_active')[0]] = [0]
# self.__values[self.__items.get('interaction_animating')[0]] = [0]
self.__settings_changed_subs = []
def read_inertia_setting(mode: str, setting_scale: float):
global_speed_key = f'/persistent/exts/omni.kit.manipulator.camera/{mode}Speed'
subscribe = self.__settings.subscribe_to_tree_change_events
self.__settings_changed_subs.append(
subscribe(global_speed_key,
lambda *args, **kwargs: self.__speed_setting_changed(*args, **kwargs,
mode=mode, setting_scale=setting_scale)),
)
self.__speed_setting_changed(None, None, carb.settings.ChangeEventType.CHANGED, mode, setting_scale)
accel = self.__settings.get(f'/exts/omni.kit.manipulator.camera/{mode}Acceleration')
damp = self.__settings.get(f'/exts/omni.kit.manipulator.camera/{mode}Dampening')
if accel is None or damp is None:
if accel is None and damp is not None:
pass
elif damp is None and accel is not None:
pass
return
self.__values[self.__items.get(f'{mode}_acceleration')[0]] = [accel]
self.__values[self.__items.get(f'{mode}_dampening')[0]] = [damp]
read_inertia_setting('fly', 1)
read_inertia_setting('look', 180)
read_inertia_setting('move', 1)
read_inertia_setting('tumble', 360)
self.__settings_changed_subs.append(
self.__settings.subscribe_to_node_change_events('/persistent/exts/omni.kit.manipulator.camera/flyViewLock',
self.__fly_mode_lock_view_changed)
)
self.__fly_mode_lock_view_changed(None, carb.settings.ChangeEventType.CHANGED)
self.__animation_key = id(self)
self.__flight_inertia_active = False
self.__last_applied = None
# Faster access for key-values looked up during animation
self.__move = self.__items.get('move')[0]
self.__tumble = self.__items.get('tumble')[0]
self.__look = self.__items.get('look')[0]
self.__fly = self.__items.get('fly')[0]
self.__transform = self.__items.get('transform')[0]
self.__projection = self.__items.get('projection')[0]
self.__center_of_interest = self.__items.get('center_of_interest')[0]
self.__adjust_center_of_interest = self.__items.get('adjust_center_of_interest')[0]
self.__inertia_enabled = self.__items.get('inertia_enabled')[0]
self.__inertia_seconds = self.__items.get('inertia_seconds')[0]
self.__tumble_velocity = None
self.__look_velocity = None
self.__move_velocity = None
self.__fly_velocity = None
self.__intertia_state = None
self.__anim_stream = None
self.__anim_stopped = 0
self.__mode = None
def __speed_setting_changed(self, tree_item: carb.dictionary.Item, changed_item: carb.dictionary.Item,
event_type: carb.settings.ChangeEventType, mode: str, setting_scale: float = 1):
if tree_item is None:
speed = self.__settings.get(f'/persistent/exts/omni.kit.manipulator.camera/{mode}Speed')
else:
speed = tree_item.get_dict()
if speed:
if (not isinstance(speed, tuple)) and (not isinstance(speed, list)):
speed = [speed]
self.__values[self.__items.get(f'{mode}_speed')[0]] = [float(x) / setting_scale for x in speed]
def __fly_mode_lock_view_changed(self, changed_item: carb.dictionary.Item, event_type: carb.settings.ChangeEventType):
model_key = self.__items.get('fly_mode_lock_view')[0]
setting_key = '/persistent/exts/omni.kit.manipulator.camera/flyViewLock'
self.__values[model_key] = [self.__settings.get(setting_key)]
def __del__(self):
self.destroy()
def destroy(self):
self.__destroy_animation()
if self.__settings and self.__settings_changed_subs:
for subscription in self.__settings_changed_subs:
self.__settings.unsubscribe_to_change_events(subscription)
self.__settings_changed_subs = None
self.__settings = None
def __destroy_animation(self):
if self.__anim_stream:
self.__anim_stream.destroy()
self.__anim_stream = None
self.__mark_animating(0)
def __validate_arguments(self, name: Union[str, sc.AbstractManipulatorItem],
values: Sequence[Union[int, float]] = None) -> sc.AbstractManipulatorItem:
if isinstance(name, sc.AbstractManipulatorItem):
return name
item, expected_len = self.__items.get(name, (None, None))
if item is None:
raise KeyError(f"CameraManipulatorModel doesn't understand values of {name}")
if values and (len(values) != expected_len):
if (not isinstance(expected_len, tuple)) or (not len(values) in expected_len):
raise ValueError(f"CameraManipulatorModel {name} takes {expected_len} values, got {len(values)}")
return item
def get_item(self, name: str) -> sc.AbstractManipulatorItem():
return self.__items.get(name, (None, None))[0]
def set_ints(self, item: Union[str, sc.AbstractManipulatorItem], values: Sequence[int]):
item = self.__validate_arguments(item, values)
self.__values[item] = values
def set_floats(self, item: Union[str, sc.AbstractManipulatorItem], values: Sequence[int]):
item = self.__validate_arguments(item, values)
self.__values[item] = values
def get_as_ints(self, item: Union[str, sc.AbstractManipulatorItem]) -> List[int]:
item = self.__validate_arguments(item)
return self.__values[item]
def get_as_floats(self, item: Union[str, sc.AbstractManipulatorItem]) -> List[float]:
item = self.__validate_arguments(item)
return self.__values[item]
@carb.profiler.profile
def _item_changed(self, item: Union[str, sc.AbstractManipulatorItem], delta_time: float = None, alpha: float = None):
# item == None is the signal to push all model values into a final matrix at 'transform'
if item is not None:
if not isinstance(item, sc.AbstractManipulatorItem):
item = self.__items.get(item)
item = item[0] if item else None
# Either of these adjust the pixel-to-world mapping
if item == self.__center_of_interest or item == self.__projection:
self.calculate_pixel_to_world(Gf.Vec3d(self.get_as_floats(self.__center_of_interest)))
super()._item_changed(item)
return
if self.__anim_stream and delta_time is None:
# If this is the end of an interaction (mouse up), return and let animation/inertia continue as is.
if _optional_int(self, 'interaction_ended', 0) or (self.__intertia_state is None):
return
# If inertia is active, look values should be passed through; so as camera is drifting the look-rotation
# is still applied. If there is no look applied, then inertia is killed for any other movement.
look = self.get_as_floats(self.__look) if self.__flight_inertia_active else None
if look:
# Destroy the look-velocity correction; otherwise look wil lag as camera drifts through inertia
self.__look_velocity = None
else:
self._kill_external_animation(False)
return
tumble, look, move, fly = None, None, None, None
if item is None or item == self.__tumble:
tumble = self.get_as_floats(self.__tumble)
if tumble:
tumble = Gf.Vec3d(*tumble)
self.set_floats(self.__tumble, None)
if item is None or item == self.__look:
look = self.get_as_floats(self.__look)
if look:
look = Gf.Vec3d(*look)
self.set_floats(self.__look, None)
if item is None or item == self.__move:
move = self.get_as_floats(self.__move)
if move:
move = Gf.Vec3d(*move)
self.set_floats(self.__move, None)
if item is None or item == self.__fly:
fly = self.get_as_floats(self.__fly)
if fly:
fly = Gf.Vec3d(*fly)
fly_speed = _optional_floats(self, 'fly_speed')
if fly_speed:
if len(fly_speed) == 1:
fly_speed = Gf.Vec3d(fly_speed[0], fly_speed[0], fly_speed[0])
else:
fly_speed = Gf.Vec3d(*fly_speed)
# Flight speed is multiplied by 5 for VP-1 compatability
fly = Gf.CompMult(fly, fly_speed * 5)
self.__last_applied = ModelState(tumble, look, move, fly)
if (delta_time is not None) or self.__last_applied.any_values():
self._apply_state(self.__last_applied, delta_time, alpha)
else:
super()._item_changed(item)
def calculate_pixel_to_world(self, pos):
projection = Gf.Matrix4d(*self.get_as_floats(self.__projection))
top_left, bot_right = self._calculate_pixel_to_world(pos, projection, projection.GetInverse())
x = top_left[0] - bot_right[0]
y = top_left[1] - bot_right[1]
# For NDC-z we don't want to use the clip range which could be huge
# So avergae the X-Y scales instead
self.set_floats('ndc_scale', [x, y, (x + y) * 0.5])
def _calculate_pixel_to_world(self, pos, projection, inv_projection):
ndc = projection.Transform(pos)
top_left = inv_projection.Transform(Gf.Vec3d(-1, -1, ndc[2]))
bot_right = inv_projection.Transform(Gf.Vec3d(1, 1, ndc[2]))
return (top_left, bot_right)
def _set_animation_key(self, key: str):
self.__animation_key = key
def _start_external_events(self, flight_mode: bool = False):
# If flight mode is already doing inertia, do nothing.
# This is for the case where right-click for WASD navigation end with a mouse up and global inertia is enabled.
if self.__flight_inertia_active and not flight_mode:
return False
# Quick check that inertia is enabled for any mode other than flight
if not flight_mode:
inertia_modes = self.__settings.get('/exts/omni.kit.manipulator.camera/inertiaModesEnabled')
len_inertia_enabled = len(inertia_modes) if inertia_modes else 0
if len_inertia_enabled == 0:
return
if len_inertia_enabled == 1:
self.__inertia_modes = [inertia_modes[0], 0, 0, 0]
elif len_inertia_enabled == 2:
self.__inertia_modes = [inertia_modes[0], inertia_modes[1], 0, 0]
elif len_inertia_enabled == 3:
self.__inertia_modes = [inertia_modes[0], inertia_modes[1], inertia_modes[2], 0]
else:
self.__inertia_modes = inertia_modes
else:
self.__inertia_modes = [1, 0, 1, 0]
# Setup the animation state
self.__anim_stopped = 0
self.__intertia_state = None
self.__flight_inertia_active = flight_mode
# Pull more infor from inertai settings fro what is to be created
create_tumble = self.__inertia_modes[1]
create_look = flight_mode or self.__inertia_modes[2]
create_move = self.__inertia_modes[3]
create_fly = flight_mode
if self.__anim_stream:
# Handle case where key was down, then lifted, then pushed again by recreating look_velocity / flight correction.
create_tumble = create_tumble and not self.__tumble_velocity
create_look = create_look and not self.__look_velocity
create_move = create_move and not self.__move_velocity
create_fly = False
clamp_dt = self.__settings.get('/ext/omni.kit.manipulator.camera/clampUpdates') or 0.15
if create_look:
self.__look_velocity = Velocity.create(self, 'look', clamp_dt)
if create_tumble:
self.__tumble_velocity = Velocity.create(self, 'tumble', clamp_dt)
if create_move:
self.__move_velocity = Velocity.create(self, 'move', clamp_dt)
if create_fly:
self.__fly_velocity = Velocity.create(self, 'fly', clamp_dt)
# If any velocities are valid, then setup an animation to apply it.
if self.__tumble_velocity or self.__look_velocity or self.__move_velocity or self.__fly_velocity:
# Only set up the animation in flight-mode, let _stop_external_events set it up otherwise
if flight_mode and not self.__anim_stream:
self.__anim_stream = AnimationEventStream.get_instance()
self.__anim_stream.add_animation(self._apply_state_tick, self.__animation_key)
return True
if self.__anim_stream:
anim_stream, self.__anim_stream = self.__anim_stream, None
anim_stream.destroy()
return False
def _stop_external_events(self, flight_mode: bool = False):
# Setup animation for inertia in non-flight mode
if not flight_mode and not self.__anim_stream:
tumble, look, move = None, None, None
if self.__last_applied and (self.__tumble_velocity or self.__look_velocity or self.__move_velocity or self.__fly_velocity):
if self.__tumble_velocity and self.__inertia_modes[1]:
tumble = self.__last_applied.tumble
if self.__look_velocity and self.__inertia_modes[2]:
look = self.__last_applied.look
if self.__move_velocity and self.__inertia_modes[3]:
move = self.__last_applied.move
if tumble or look or move:
self.__last_applied = ModelState(tumble, look, move, self.__last_applied.fly)
self.__anim_stream = AnimationEventStream.get_instance()
self.__anim_stream.add_animation(self._apply_state_tick, self.__animation_key)
else:
self.__tumble_velocity = None
self.__look_velocity = None
self.__move_velocity = None
self.__fly_velocity = None
self.__intertia_state = None
return
self.__anim_stopped = time.time()
self.__intertia_state = self.__last_applied
self.__mark_animating(1)
def __mark_animating(self, interaction_animating: int):
item, _ = self.__items.get('interaction_animating', (None, None))
self.set_ints(item, [interaction_animating])
super()._item_changed(item)
def _apply_state_time(self, dt: float, apply_fn: Callable):
alpha = 1
if self.__anim_stopped:
now = time.time()
inertia_enabled = _optional_int(self, 'inertia_enabled', 0)
inertia_seconds = _optional_float(self, 'inertia_seconds', 0)
if inertia_enabled and inertia_seconds > 0:
alpha = 1.0 - ((now - self.__anim_stopped) / inertia_seconds)
if alpha > ALMOST_ZERO:
decay = self.__settings.get('/exts/omni.kit.manipulator.camera/inertiaDecay')
decay = _optional_int(self, 'inertia_decay', decay)
alpha = pow(alpha, decay) if decay else 1
else:
alpha = 0
else:
alpha = 0
if alpha == 0:
if self.__anim_stream:
anim_stream, self.__anim_stream = self.__anim_stream, None
anim_stream.destroy()
self.set_ints('interaction_ended', [1])
apply_fn(dt * alpha, 1)
if alpha == 0:
self.set_ints('interaction_ended', [0])
self.__mark_animating(0)
self.__tumble_velocity = None
self.__look_velocity = None
self.__move_velocity = None
self.__fly_velocity = None
self.__intertia_state = None
self.__flight_inertia_active = False
return False
return True
def _apply_state_tick(self, dt: float = None):
keep_anim = True
istate = self.__intertia_state
if istate:
if self.__flight_inertia_active:
# See _item_changed, but during an inertia move, look should still be applied (but without any velocity)
look = self.get_as_floats(self.__look)
if look:
self.set_floats(self.__look, None)
state = ModelState(None, look, None, istate.fly)
else:
tumble = (self.get_as_floats(self.__tumble) or istate.tumble) if self.__inertia_modes[1] else None
look = (self.get_as_floats(self.__look) or istate.look) if self.__inertia_modes[2] else None
move = (self.get_as_floats(self.__move) or istate.move) if self.__inertia_modes[3] else None
state = ModelState(tumble, look, move)
keep_anim = self._apply_state_time(dt, lambda dt, alpha: self._apply_state(state, dt, alpha))
else:
keep_anim = self._apply_state_time(dt, lambda dt, alpha: self._item_changed(None, dt, alpha))
if not keep_anim and self.__anim_stream:
self.__destroy_animation()
def _kill_external_animation(self, kill_stream: bool = True, initial_transform = None):
if kill_stream:
self.__destroy_animation()
# self._stop_external_events()
self.__tumble_velocity = None
self.__look_velocity = None
self.__move_velocity = None
self.__fly_velocity = None
self.__intertia_state = None
self.__flight_inertia_active = False
# Reset internal transform if provided
if initial_transform:
self.set_floats('transform', initial_transform)
self.set_floats('initial_transform', initial_transform)
@carb.profiler.profile
def _apply_state(self, state: ModelState, dt: float = None, alpha: float = None):
up_axis = _optional_floats(self, 'up_axis')
rotation_precision = _optional_int(self, 'rotation_precision', 5)
last_transform = Gf.Matrix4d(*self.get_as_floats(self.__transform))
xforms = TransformAccumulator(last_transform)
center_of_interest = None
tumble = state.tumble
if self.__tumble_velocity:
tumble = self.__tumble_velocity.apply(tumble, dt, alpha)
if tumble:
center_of_interest = Gf.Vec3d(*self.get_as_floats(self.__center_of_interest))
tumble = Gf.Vec3d(round(tumble[0], rotation_precision), round(tumble[1], rotation_precision), round(tumble[2], rotation_precision))
final_xf = xforms.get_tumble(tumble, center_of_interest, up_axis)
else:
final_xf = Gf.Matrix4d(1)
look = state.look
if self.__look_velocity:
look = self.__look_velocity.apply(look, dt, alpha)
if look:
look = Gf.Vec3d(round(look[0], rotation_precision), round(look[1], rotation_precision), round(look[2], rotation_precision))
final_xf = final_xf * xforms.get_look(look, up_axis)
move = state.move
if self.__move_velocity:
move = self.__move_velocity.apply(move, dt, alpha)
if move:
final_xf = xforms.get_translation(move) * final_xf
adjust_coi = move[2] != 0
else:
adjust_coi = False
fly = None if _optional_int(self, 'disable_fly', 0) else state.fly
if self.__fly_velocity:
fly = self.__fly_velocity.apply(fly, dt, alpha)
if fly:
if _optional_bool(self, 'fly_mode_lock_view', False):
decomp_rot = last_transform.ExtractRotation().Decompose(Gf.Vec3d.ZAxis(), Gf.Vec3d.YAxis(), Gf.Vec3d.XAxis())
rot_z = Gf.Rotation(Gf.Vec3d.ZAxis(), decomp_rot[0])
rot_y = Gf.Rotation(Gf.Vec3d.YAxis(), decomp_rot[1])
rot_x = Gf.Rotation(Gf.Vec3d.XAxis(), decomp_rot[2])
last_transform_tr = Gf.Matrix4d().SetTranslate(last_transform.ExtractTranslation())
last_transform_rt_0 = Gf.Matrix4d().SetRotate(rot_x)
last_transform_rt_1 = Gf.Matrix4d().SetRotate(rot_y * rot_z)
if up_axis[2]:
fly[1], fly[2] = -fly[2], fly[1]
elif Gf.Dot(Gf.Vec3d.ZAxis(), last_transform.TransformDir((0, 0, 1))) < 0:
fly[1], fly[2] = -fly[1], -fly[2]
flight_xf = xforms.get_translation(fly)
last_transform = last_transform_rt_0 * flight_xf * last_transform_rt_1 * last_transform_tr
else:
final_xf = xforms.get_translation(fly) * final_xf
transform = final_xf * last_transform
# If zooming out in Z, adjust the center-of-interest and pixel-to-world in 'ndc_scale'
self.set_ints(self.__adjust_center_of_interest, [adjust_coi])
if adjust_coi:
center_of_interest = center_of_interest or Gf.Vec3d(*self.get_as_floats(self.__center_of_interest))
coi = Gf.Matrix4d(*self.get_as_floats('initial_transform')).Transform(center_of_interest)
coi = transform.GetInverse().Transform(coi)
self.calculate_pixel_to_world(coi)
self.set_floats(self.__transform, _flatten_matrix(transform))
super()._item_changed(self.__transform)
def _broadcast_mode(self, mode: str):
if mode == self.__mode:
return
viewport_id = _optional_int(self, 'viewport_id', None)
if viewport_id is None:
return
# Send a signal that contains the viewport_id and mode (carb requires a homogenous array, so as strings)
self.__settings.set("/exts/omni.kit.manipulator.camera/viewportMode", [str(viewport_id), mode])
self.__mode = mode
| 32,559 | Python | 45.781609 | 143 | 0.589729 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/manipulator.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['CameraManipulatorBase', 'adjust_center_of_interest']
from omni.ui import scene as sc
from .gestures import build_gestures
from .model import CameraManipulatorModel, _optional_bool, _flatten_matrix
from pxr import Gf
# Common math to adjust the center-of-interest
def adjust_center_of_interest(model: CameraManipulatorModel, initial_transform: Gf.Matrix4d, final_transform: Gf.Matrix4d):
# Adjust the center-of-interest if requested.
# For object-centric movement we always adjust it if an object was hit
object_centric = _optional_bool(model, 'object_centric_movement')
coi_picked = model.get_as_floats('center_of_interest_picked') if object_centric else False
adjust_center_of_interest = (object_centric and coi_picked) or _optional_bool(model, 'adjust_center_of_interest')
if not adjust_center_of_interest:
return None, None
# When adjusting the center of interest we'll operate on a direction and length (in camera-space)
# Which helps to not introduce -drift- as we jump through the different spaces to update it.
# Final camera position
world_cam_pos = final_transform.Transform(Gf.Vec3d(0, 0, 0))
# center_of_interest_start is in camera-space
center_of_interest_start = Gf.Vec3d(*model.get_as_floats('center_of_interest_start'))
# Save the direction
center_of_interest_dir = center_of_interest_start.GetNormalized()
if coi_picked:
# Keep original center-of-interest direction, but adjust its length to the picked position
world_coi = Gf.Vec3d(coi_picked[0], coi_picked[1], coi_picked[2])
# TODO: Setting to keep subsequent movement focused on screen-center or move it to the object.
if False:
# Save the center-of-interest to the hit-point by adjusting direction
center_of_interest_dir = final_transform.GetInverse().Transform(world_coi).GetNormalized()
else:
# Move center-of-interest to world space at initial transform
world_coi = initial_transform.Transform(center_of_interest_start)
# Now get the length between final camera-position and the world-space-coi,
# and apply that to the direction.
center_of_interest_end = center_of_interest_dir * (world_cam_pos - world_coi).GetLength()
return center_of_interest_start, center_of_interest_end
# Base class, resposible for building up the gestures
class CameraManipulatorBase(sc.Manipulator):
def __init__(self, bindings: dict = None, model: sc.AbstractManipulatorModel = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._screen = None
# Provide some defaults
self.model = model or CameraManipulatorModel()
self.bindings = bindings
# Provide a slot for a user to fill in with a GestureManager but don't use anything by default
self.manager = None
self.gestures = []
self.__transform = None
self.__gamepad = None
def _on_began(self, model: CameraManipulatorModel, *args, **kwargs):
pass
def on_build(self):
# Need to hold a reference to this or the sc.Screen would be destroyed when out of scope
self.__transform = sc.Transform()
with self.__transform:
self._screen = sc.Screen(gestures=self.gestures or build_gestures(self.model, self.bindings, self.manager, self._on_began))
def destroy(self):
if self.__gamepad:
self.__gamepad.destroy()
self.__gamepad = None
if self.__transform:
self.__transform.clear()
self.__transform = None
self._screen = None
if hasattr(self.model, 'destroy'):
self.model.destroy()
@property
def gamepad_enabled(self) -> bool:
return self.__gamepad is not None
@gamepad_enabled.setter
def gamepad_enabled(self, value: bool):
if value:
if not self.__gamepad:
from .gamepad import GamePadController
self.__gamepad = GamePadController(self)
elif self.__gamepad:
self.__gamepad.destroy()
self.__gamepad = None
# We have all the imoorts already, so provide a simple omni.ui.scene camera manipulator that one can use.
# Takes an omni.ui.scene view and center-of-interest and applies model changes to that view
class SceneViewCameraManipulator(CameraManipulatorBase):
def __init__(self, center_of_interest, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__center_of_interest = center_of_interest
def _on_began(self, model: CameraManipulatorModel, mouse):
model.set_floats('center_of_interest', [self.__center_of_interest[0], self.__center_of_interest[1], self.__center_of_interest[2]])
if _optional_bool(model, 'orthographic'):
model.set_ints('disable_tumble', [1])
model.set_ints('disable_look', [1])
def on_model_updated(self, item):
model = self.model
if item == model.get_item('transform'):
final_transform = Gf.Matrix4d(*model.get_as_floats(item))
initial_transform = Gf.Matrix4d(*model.get_as_floats('initial_transform'))
# Adjust our center-of-interest
coi_start, coi_end = adjust_center_of_interest(model, initial_transform, final_transform)
if coi_end:
self.__center_of_interest = coi_end
# omni.ui.scene.SceneView.CameraModel expects 'view', but we operate on 'transform'
# The following will push our transform changes into the SceneView.model.view
sv_model = self.scene_view.model
view = sv_model.get_item('view')
sv_model.set_floats(view, _flatten_matrix(final_transform.GetInverse()))
sv_model._item_changed(view)
| 6,222 | Python | 46.503816 | 138 | 0.674381 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/gesturebase.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['CameraGestureBase']
from omni.ui import scene as sc
from .model import _accumulate_values, _optional_bool, _optional_floats, _flatten_matrix
from .flight_mode import get_keyboard_input
import carb.settings
from pxr import Gf
import time
from typing import Callable, Sequence
# Base class for camera transform manipulation/gesture
#
class CameraGestureBase(sc.DragGesture):
def __init__(self, model: sc.AbstractManipulatorModel, configure_model: Callable = None, name: str = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = name if name else self.__class__.__name__
self.model = model
# XXX: Need a manipulator on_began method
self.__configure_model = configure_model
self.__prev_mouse = None
self.__prev_mouse_time = None
self.__keyboard = None
self.__fly_active = None
def destroy(self):
self.model = None
self._disable_flight()
super().destroy()
@property
def center_of_interest(self):
return Gf.Vec3d(self.model.get_as_floats('center_of_interest'))
@property
def initial_transform(self):
return Gf.Matrix4d(*self.model.get_as_floats('initial_transform'))
@property
def last_transform(self):
return Gf.Matrix4d(*self.model.get_as_floats('transform'))
@property
def projection(self):
return Gf.Matrix4d(*self.model.get_as_floats('projection'))
@property
def orthographic(self):
return _optional_bool(self.model, 'orthographic')
@property
def disable_pan(self):
return _optional_bool(self.model, 'disable_pan')
@property
def disable_tumble(self):
return _optional_bool(self.model, 'disable_tumble')
@property
def disable_look(self):
return _optional_bool(self.model, 'disable_look')
@property
def disable_zoom(self):
return _optional_bool(self.model, 'disable_zoom')
@property
def intertia(self):
inertia = _optional_bool(self.model, 'inertia_enabled')
if not inertia:
return 0
inertia = _optional_floats(self.model, 'inertia_seconds')
return inertia[0] if inertia else 0
@property
def up_axis(self):
# Assume Y-up if not specified
return _optional_bool(self.model, 'up_axis', 1)
@staticmethod
def __conform_speed(values):
if values:
vlen = len(values)
if vlen == 1:
return (values[0], values[0], values[0])
if vlen == 2:
return (values[0], values[1], 0)
return values
return (1, 1, 1)
def get_rotation_speed(self, secondary):
model = self.model
rotation_speed = self.__conform_speed(_optional_floats(model, 'rotation_speed'))
secondary_speed = self.__conform_speed(_optional_floats(model, secondary))
return (rotation_speed[0] * secondary_speed[0],
rotation_speed[1] * secondary_speed[1],
rotation_speed[2] * secondary_speed[2])
@property
def tumble_speed(self):
return self.get_rotation_speed('tumble_speed')
@property
def look_speed(self):
return self.get_rotation_speed('look_speed')
@property
def move_speed(self):
return self.__conform_speed(_optional_floats(self.model, 'move_speed'))
@property
def world_speed(self):
model = self.model
ndc_scale = self.__conform_speed(_optional_floats(model, 'ndc_scale'))
world_speed = self.__conform_speed(_optional_floats(model, 'world_speed'))
return Gf.CompMult(world_speed, ndc_scale)
def _disable_flight(self):
if self.__keyboard:
self.__keyboard.destroy()
def _setup_keyboard(self, model, exit_mode: bool) -> bool:
"""Setup keyboard and return whether the manipualtor mode (fly) was broadcast to consumers"""
self.__keyboard = get_keyboard_input(model, self.__keyboard)
if self.__keyboard:
# If the keyboard is active, broadcast that fly mode has been entered
if self.__keyboard.active:
self.__fly_active = True
model._broadcast_mode("fly")
return True
# Check if fly mode was exited
if self.__fly_active:
exit_mode = self.name.replace('Gesture', '').lower() if exit_mode else ""
model._broadcast_mode(exit_mode)
return True
return False
# omni.ui.scene Gesture interface
# We absract on top of this due to asynchronous picking, in that we
# don't want a gesture to begin until the object/world-space query has completed
# This 'delay' could be a setting, but will wind up 'snapping' from the transition
# from a Camera's centerOfInterest to the new world-space position
def on_began(self, mouse: Sequence[float] = None):
model = self.model
# Setup flight mode and possibly broadcast that mode to any consumers
was_brodcast = self._setup_keyboard(model, False)
# If fly mode was not broadcast, then brodcast this gesture's mode
if not was_brodcast:
# LookGesture => look
manip_mode = self.name.replace('Gesture', '').lower()
model._broadcast_mode(manip_mode)
mouse = mouse if mouse else self.sender.gesture_payload.mouse
if self.__configure_model:
self.__configure_model(model, mouse)
self.__prev_mouse = mouse
xf = model.get_as_floats('transform')
if xf:
# Save an imutable copy of transform for undoable end-event
model.set_floats('initial_transform', xf.copy())
coi = model.get_as_floats('center_of_interest')
if coi:
# Save an imutable copy of center_of_interest for end adjustment if desired (avoiding space conversions)
model.set_floats('center_of_interest_start', coi.copy())
model._item_changed('center_of_interest')
model.set_ints('interaction_active', [1])
def on_changed(self, mouse: Sequence[float] = None):
self._setup_keyboard(self.model, True)
self.__last_change = time.time()
cur_mouse = mouse if mouse else self.sender.gesture_payload.mouse
mouse_moved = (cur_mouse[0] - self.__prev_mouse[0], cur_mouse[1] - self.__prev_mouse[1])
# if (mouse_moved[0] != 0) or (mouse_moved[1] != 0):
self.__prev_mouse = cur_mouse
self.on_mouse_move(mouse_moved)
def on_ended(self):
model = self.model
final_position = True
# Brodcast that the camera manipulationmode is now none
model._broadcast_mode("")
if self.__keyboard:
self.__keyboard = self.__keyboard.end()
final_position = self.__keyboard is None
self.__prev_mouse = None
self.__prev_mouse_time = None
if final_position:
if model._start_external_events(False):
model._stop_external_events(False)
self.__apply_as_undoable()
model.set_ints('adjust_center_of_interest', [])
model.set_floats('current_aperture', [])
model.set_ints('interaction_active', [0])
# model.set_floats('center_of_interest_start', [])
# model.set_floats('center_of_interest_picked', [])
def dirty_items(self, model: sc.AbstractManipulatorModel):
model = self.model
cur_item = model.get_item('transform')
if model.get_as_floats('initial_transform') != model.get_as_floats(cur_item):
return [cur_item]
def __apply_as_undoable(self):
model = self.model
dirty_items = self.dirty_items(model)
if dirty_items:
model.set_ints('interaction_ended', [1])
try:
for item in dirty_items:
model._item_changed(item)
except:
raise
finally:
model.set_ints('interaction_ended', [0])
def _accumulate_values(self, key: str, x: float, y: float, z: float):
item = _accumulate_values(self.model, key, x, y, z)
if item:
self.model._item_changed(None if self.__keyboard else item)
| 8,715 | Python | 35.316667 | 128 | 0.616753 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/gamepad.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .model import _accumulate_values
import omni.kit.app
from omni.ui import scene as sc
import carb
import asyncio
from typing import Dict, List, Sequence, Set
# Setting per action mode (i.e):
# /exts/omni.kit.manipulator.camera/gamePad/fly/deadZone
# /exts/omni.kit.manipulator.camera/gamePad/look/deadZone
ACTION_MODE_SETTING_KEYS = {"scale", "deadZone"}
ACTION_MODE_SETTING_ROOT = "/exts/omni.kit.manipulator.camera/gamePad"
# Setting per action trigger (i.e):
# /exts/omni.kit.manipulator.camera/gamePad/button/a/scale
ACTION_TRIGGER_SETTING_KEYS = {"scale"}
__all__ = ['GamePadController']
class ValueMapper:
def __init__(self, mode: str, trigger: str, index: int, sub_index: int):
self.__mode: str = mode
self.__trigger: str = trigger
self.__index: int = index
self.__sub_index = sub_index
@property
def mode(self) -> str:
return self.__mode
@property
def trigger(self) -> str:
return self.__trigger
@property
def index(self) -> int:
return self.__index
@property
def sub_index(self) -> int:
return self.__sub_index
class ModeSettings:
def __init__(self, action_mode: str, settings: carb.settings.ISettings):
self.__scale: float = 1.0
self.__dead_zone: float = 1e-04
self.__action_mode = action_mode
self.__setting_subs: Sequence[carb.settings.SubscriptionId] = []
for setting_key in ACTION_MODE_SETTING_KEYS:
sp = self.__get_setting_path(setting_key)
self.__setting_subs.append(
settings.subscribe_to_node_change_events(sp, lambda *args, k=setting_key: self.__setting_changed(*args, setting_key=k))
)
self.__setting_changed(None, carb.settings.ChangeEventType.CHANGED, setting_key=setting_key)
def __del__(self):
self.destroy()
def __get_setting_path(self, setting_key: str):
return f"{ACTION_MODE_SETTING_ROOT}/{self.__action_mode}/{setting_key}"
def __setting_changed(self, item: carb.dictionary.Item, event_type: carb.settings.ChangeEventType, setting_key: str):
if event_type == carb.settings.ChangeEventType.CHANGED:
setting_path = self.__get_setting_path(setting_key)
if setting_key == "scale":
self.__scale = carb.settings.get_settings().get(setting_path)
if self.__scale is None:
self.__scale = 1.0
elif setting_key == "deadZone":
# Use absolute value, no negative dead-zones and clamp to 1.0
dead_zone = carb.settings.get_settings().get(setting_path)
self.__dead_zone = min(abs(dead_zone) or 1e-04, 1.0) if (dead_zone is not None) else 0.0
def destroy(self, settings: carb.settings.ISettings = None):
settings = settings or carb.settings.get_settings()
for setting_sub in self.__setting_subs:
settings.unsubscribe_to_change_events(setting_sub)
self.__setting_subs = tuple()
def get_value(self, value: float, axis_idx: int) -> float:
# Legacy implementation, which scales input value into new range fitted by dead_zone
value = (value - self.__dead_zone) / (1.0 - self.__dead_zone)
value = max(0, min(1, value))
scale = self.__scale
return value * scale
# Somewhat simpler version that doesn't scale input by dead-zone
if abs(value) > self.__dead_zone:
return value * scale
return 0
def _limit_camera_velocity(value: float, settings: carb.settings.ISettings, context_name: str):
cam_limit = settings.get('/exts/omni.kit.viewport.window/cameraSpeedLimit')
if context_name in cam_limit:
vel_min = settings.get('/persistent/app/viewport/camVelocityMin')
if vel_min is not None:
value = max(vel_min, value)
vel_max = settings.get('/persistent/app/viewport/camVelocityMax')
if vel_max is not None:
value = min(vel_max, value)
return value
def _adjust_flight_speed(xyz_value: Sequence[float]):
y = xyz_value[1]
if y == 0.0:
return
import math
settings = carb.settings.get_settings()
value = settings.get('/persistent/app/viewport/camMoveVelocity') or 1
scaler = settings.get('/persistent/app/viewport/camVelocityScalerMultAmount') or 1.1
scaler = 1.0 + (max(scaler, 1.0 + 1e-8) - 1.0) * abs(y)
if y < 0:
value = value / scaler
elif y > 0:
value = value * scaler
if math.isfinite(value) and (value > 1e-8):
value = _limit_camera_velocity(value, settings, 'gamepad')
settings.set('/persistent/app/viewport/camMoveVelocity', value)
class GamePadController:
def __init__(self, manipulator: sc.Manipulator):
self.__manipulator: sc.Manipulator = manipulator
self.__gp_event_sub: Dict[carb.input.Gamepad, int] = {}
self.__compressed_events: Dict[int, float] = {}
self.__action_modes: Dict[str, List[float]] = {}
self.__app_event_sub: carb.events.ISubscription = None
self.__mode_settings: Dict[str, ModeSettings] = {}
self.__value_actions: Dict[carb.input.GamepadInput, ValueMapper] = {}
self.__setting_subs: Sequence[carb.settings.SubscriptionId] = []
# Some button presses need synthetic events because unlike keyboard input, carb gamepad doesn't repeat.
# event 1 left presssed: value = 0.5
# event 2 right pressed: value = 0.5
# these should cancel, but there is no notification of left event until it changes from 0.5
# This is all handled in __gamepad_event
trigger_synth = {carb.input.GamepadInput.RIGHT_TRIGGER, carb.input.GamepadInput.LEFT_TRIGGER}
shoulder_synth = {carb.input.GamepadInput.RIGHT_SHOULDER, carb.input.GamepadInput.LEFT_SHOULDER}
self.__synthetic_state_init = {
carb.input.GamepadInput.RIGHT_TRIGGER: trigger_synth,
carb.input.GamepadInput.LEFT_TRIGGER: trigger_synth,
carb.input.GamepadInput.RIGHT_SHOULDER: shoulder_synth,
carb.input.GamepadInput.LEFT_SHOULDER: shoulder_synth,
}
self.__synthetic_state = self.__synthetic_state_init.copy()
self.__init_gamepad_action(None, carb.settings.ChangeEventType.CHANGED)
self.__gp_connect_sub = self._iinput.subscribe_to_gamepad_connection_events(self.__gamepad_connection)
def __init_gamepad_action(self, item: carb.dictionary.Item, event_type: carb.settings.ChangeEventType):
if event_type != carb.settings.ChangeEventType.CHANGED:
return
self.__value_actions: Dict[carb.input.GamepadInput, ValueMapper] = {}
settings = carb.settings.get_settings()
create_subs = not bool(self.__setting_subs)
gamepad_action_paths = []
gamepad_input_names = ["rightStick", "leftStick", "dPad", "trigger", "shoulder", "button/a", "button/b", "button/x", "button/y"]
for gamepad_input in gamepad_input_names:
action_setting_path = f"{ACTION_MODE_SETTING_ROOT}/{gamepad_input}/action"
gamepad_action_paths.append(action_setting_path)
if create_subs:
self.__setting_subs.append(
settings.subscribe_to_node_change_events(action_setting_path, self.__init_gamepad_action)
)
# TODO: Maybe need more configuable/robust action mapping
def action_mapping_4(action_mode: str):
action_modes = action_mode.split(".")
if len(action_modes) != 1:
carb.log_error(f"Action mapping '{action_mode}' for quad input is invalid, using '{action_modes[0]}'")
action_mode = action_modes[0]
if action_mode == "look":
return action_mode, (0, 1), (0, 1, 0, 1)
return action_mode, (0, 2), (1, 0, 1, 0)
def action_mapping_2(action_mode: str):
action_modes = action_mode.split(".")
if len(action_modes) != 2:
action_modes = (action_modes[0], "x")
carb.log_error(f"Action mapping '{action_mode}' for dual input is invalid, using '{action_modes[0]}.x'")
axis = {'x': 0, 'y': 1, 'z': 2}.get(action_modes[1], 0)
return action_modes[0], axis, (0, 1)
def action_mapping_1(action_mode: str):
action_modes = action_mode.split(".")
if len(action_modes) != 2:
action_modes = (action_modes[0], "x")
carb.log_error(f"Action mapping '{action_mode}' for dual input is invalid, using '{action_modes[0]}.x'")
axis = {'x': 0, 'y': 1, 'z': 2}.get(action_modes[1], 0)
return action_modes[0], axis, 0
# Go through the list of named events and setup the action based on it's value
right_stick_action = settings.get(gamepad_action_paths[0])
if right_stick_action:
right_stick_action, axis, sub_idx = action_mapping_4(right_stick_action)
self.__value_actions[carb.input.GamepadInput.RIGHT_STICK_LEFT] = ValueMapper(right_stick_action, gamepad_input_names[0], axis[0], sub_idx[0])
self.__value_actions[carb.input.GamepadInput.RIGHT_STICK_RIGHT] = ValueMapper(right_stick_action, gamepad_input_names[0], axis[0], sub_idx[1])
self.__value_actions[carb.input.GamepadInput.RIGHT_STICK_UP] = ValueMapper(right_stick_action, gamepad_input_names[0], axis[1], sub_idx[2])
self.__value_actions[carb.input.GamepadInput.RIGHT_STICK_DOWN] = ValueMapper(right_stick_action, gamepad_input_names[0], axis[1], sub_idx[3])
left_stick_action = settings.get(gamepad_action_paths[1])
if left_stick_action:
left_stick_action, axis, sub_idx = action_mapping_4(left_stick_action)
self.__value_actions[carb.input.GamepadInput.LEFT_STICK_LEFT] = ValueMapper(left_stick_action, gamepad_input_names[1], axis[0], sub_idx[0])
self.__value_actions[carb.input.GamepadInput.LEFT_STICK_RIGHT] = ValueMapper(left_stick_action, gamepad_input_names[1], axis[0], sub_idx[1])
self.__value_actions[carb.input.GamepadInput.LEFT_STICK_UP] = ValueMapper(left_stick_action, gamepad_input_names[1], axis[1], sub_idx[2])
self.__value_actions[carb.input.GamepadInput.LEFT_STICK_DOWN] = ValueMapper(left_stick_action, gamepad_input_names[1], axis[1], sub_idx[3])
dpad_action = settings.get(gamepad_action_paths[2])
if dpad_action:
dpad_action, axis, sub_idx = action_mapping_4(dpad_action)
self.__value_actions[carb.input.GamepadInput.DPAD_LEFT] = ValueMapper(dpad_action, gamepad_input_names[2], axis[0], sub_idx[0])
self.__value_actions[carb.input.GamepadInput.DPAD_RIGHT] = ValueMapper(dpad_action, gamepad_input_names[2], axis[0], sub_idx[1])
self.__value_actions[carb.input.GamepadInput.DPAD_UP] = ValueMapper(dpad_action, gamepad_input_names[2], axis[1], sub_idx[2])
self.__value_actions[carb.input.GamepadInput.DPAD_DOWN] = ValueMapper(dpad_action, gamepad_input_names[2], axis[1], sub_idx[3])
trigger_action = settings.get(gamepad_action_paths[3])
if trigger_action:
trigger_action, axis, sub_idx = action_mapping_2(trigger_action)
self.__value_actions[carb.input.GamepadInput.RIGHT_TRIGGER] = ValueMapper(trigger_action, gamepad_input_names[3], axis, sub_idx[0])
self.__value_actions[carb.input.GamepadInput.LEFT_TRIGGER] = ValueMapper(trigger_action, gamepad_input_names[3], axis, sub_idx[1])
shoulder_action = settings.get(gamepad_action_paths[4])
if shoulder_action:
shoulder_action, axis, sub_idx = action_mapping_2(shoulder_action)
self.__value_actions[carb.input.GamepadInput.RIGHT_SHOULDER] = ValueMapper(shoulder_action, gamepad_input_names[4], axis, sub_idx[0])
self.__value_actions[carb.input.GamepadInput.LEFT_SHOULDER] = ValueMapper(shoulder_action, gamepad_input_names[4], axis, sub_idx[1])
button_action = settings.get(gamepad_action_paths[5])
if button_action:
button_action, axis, sub_idx = action_mapping_1(button_action)
self.__value_actions[carb.input.GamepadInput.A] = ValueMapper(button_action, gamepad_input_names[5], axis, sub_idx)
button_action = settings.get(gamepad_action_paths[6])
if button_action:
button_action, axis, sub_idx = action_mapping_1(button_action)
self.__value_actions[carb.input.GamepadInput.B] = ValueMapper(button_action, gamepad_input_names[6], axis, sub_idx)
button_action = settings.get(gamepad_action_paths[7])
if button_action:
button_action, axis, sub_idx = action_mapping_1(button_action)
self.__value_actions[carb.input.GamepadInput.X] = ValueMapper(button_action, gamepad_input_names[7], axis, sub_idx)
button_action = settings.get(gamepad_action_paths[8])
if button_action:
button_action, axis, sub_idx = action_mapping_1(button_action)
self.__value_actions[carb.input.GamepadInput.Y] = ValueMapper(button_action, gamepad_input_names[8], axis, sub_idx)
for value_mapper in self.__value_actions.values():
action_mode = value_mapper.mode
if self.__mode_settings.get(action_mode) is None:
self.__mode_settings[action_mode] = ModeSettings(action_mode, settings)
action_trigger = value_mapper.trigger
if self.__mode_settings.get(action_trigger) is None:
self.__mode_settings[action_trigger] = ModeSettings(action_trigger, settings)
def __del__(self):
self.destroy()
@property
def _iinput(self):
return carb.input.acquire_input_interface()
async def __apply_events(self):
# Grab the events to apply and reset the state to empty
events, self.__compressed_events = self.__compressed_events, {}
# Reset the synthetic state
self.__synthetic_state = self.__synthetic_state_init.copy()
if not events:
return
manipulator = self.__manipulator
if not manipulator:
return
model = manipulator.model
manipulator._on_began(model, None)
# Map the action to +/- values per x, y, z components
action_modes: Dict[str, Dict[int, List[float]]] = {}
for input, value in events.items():
action = self.__value_actions.get(input)
if not action:
continue
# Must exists, KeyError otherwise
mode_seting = self.__mode_settings[action.mode]
trigger_setting = self.__mode_settings[action.trigger]
# Get the dict for this action storing +/- values per x, y, z
pos_neg_value_dict = action_modes.get(action.mode) or {}
# Get the +/- values for the x, y, z component
pos_neg_values = pos_neg_value_dict.get(action.index) or [0, 0]
# Scale the value by the action's scaling factor
value = mode_seting.get_value(value, action.index)
# Scale the value by the trigger's scaling factor
value = trigger_setting.get_value(value, action.index)
# Store the +/- value into the proper slot '+' into 0, '-' into 1
pos_neg_values[action.sub_index] += value
# Store back into the dict mapping x, y, z to +/- values
pos_neg_value_dict[action.index] = pos_neg_values
# Store back into the dict storing the +/- values per x, y, z into the action
action_modes[action.mode] = pos_neg_value_dict
# Collapse the +/- values per individual action and x, y, z into a single total
for action_mode, pos_neg_value_dict in action_modes.items():
# Some components may not have been touched but need to preserve last value
xyz_value = self.__action_modes.get(action_mode) or [0, 0, 0]
for xyz_index, pos_neg_value in pos_neg_value_dict.items():
xyz_value[xyz_index] = pos_neg_value[0] - pos_neg_value[1]
# Apply model speed to anything but fly (that is handled by model itself)
if action_mode != "fly":
model_speed = model.get_item(f"{action_mode}_speed")
if model_speed is not None:
model_speed = model.get_as_floats(model_speed)
if model_speed is not None:
for i in range(len(model_speed)):
xyz_value[i] *= model_speed[i]
# Store the final values
self.__action_modes[action_mode] = xyz_value
# Prune any actions that now do nothing (has 0 for x, y, and z)
self.__action_modes = {
action_mode: xyz_value for action_mode, xyz_value in self.__action_modes.items() if (xyz_value[0] or xyz_value[1] or xyz_value[2])
}
has_data: bool = bool(self.__action_modes)
if has_data:
self.__apply_gamepad_state()
if hasattr(model, '_start_external_events'):
if has_data:
self.___start_external_events(model)
else:
self.__stop_external_events(model)
def ___start_external_events(self, model):
if self.__app_event_sub:
return
_broadcast_mode = getattr(model, '_broadcast_mode', None)
if _broadcast_mode:
_broadcast_mode("gamepad")
self.__app_event_sub = omni.kit.app.get_app().get_update_event_stream().create_subscription_to_pop(
self.__apply_gamepad_state,
name=f"omni.kit.manipulator.camera.GamePadController.{id(self)}",
# order=omni.kit.app.UPDATE_ORDER_PYTHON_ASYNC_FUTURE_END_UPDATE
)
model._start_external_events(True)
def __stop_external_events(self, model):
if self.__app_event_sub:
_broadcast_mode = getattr(model, '_broadcast_mode', None)
if _broadcast_mode:
_broadcast_mode("")
self.__app_event_sub = None
self.__action_modes = {}
model._stop_external_events(True)
def __apply_gamepad_state(self, *args, **kwargs):
manipulator = self.__manipulator
model = manipulator.model
# manipulator._on_began(model, None)
for action_mode, xyz_value in self.__action_modes.items():
if action_mode == "fly":
model.set_floats("fly", xyz_value)
continue
elif action_mode == "speed":
_adjust_flight_speed(xyz_value)
continue
item = _accumulate_values(model, action_mode, xyz_value[0], xyz_value[1], xyz_value[2])
if item:
model._item_changed(item)
def __gamepad_event(self, event: carb.input.GamepadEvent):
event_input = event.input
self.__compressed_events[event_input] = event.value
# Gamepad does not get repeat events, so on certain button presses there needs to be a 'synthetic' event
# that represents the inverse-key (left/right) based on its last/current state.
synth_state = self.__synthetic_state.get(event.input)
if synth_state:
for synth_input in synth_state:
del self.__synthetic_state[synth_input]
if synth_input != event_input:
self.__compressed_events[synth_input] = self._iinput.get_gamepad_value(event.gamepad, synth_input)
asyncio.ensure_future(self.__apply_events())
def __gamepad_connection(self, event: carb.input.GamepadConnectionEvent):
e_type = event.type
e_gamepad = event.gamepad
if e_type == carb.input.GamepadConnectionEventType.DISCONNECTED:
e_gamepad_sub = self.__gp_event_sub.get(e_gamepad)
if e_gamepad_sub:
self._iinput.unsubscribe_to_gamepad_events(e_gamepad, e_gamepad_sub)
del self.__gp_event_sub[e_gamepad]
pass
elif e_type == carb.input.GamepadConnectionEventType.CONNECTED:
if self.__gp_event_sub.get(e_gamepad):
carb.log_error("Gamepad connected event, but already subscribed")
return
gp_event_sub = self._iinput.subscribe_to_gamepad_events(e_gamepad, self.__gamepad_event)
if gp_event_sub:
self.__gp_event_sub[e_gamepad] = gp_event_sub
def destroy(self):
iinput = self._iinput
settings = carb.settings.get_settings()
# Remove gamepad connected subscriptions
if self.__gp_connect_sub:
iinput.unsubscribe_to_gamepad_connection_events(self.__gp_connect_sub)
self.__gp_connect_sub = None
# Remove gamepad event subscriptions
for gamepad, gamepad_sub in self.__gp_event_sub.items():
iinput.unsubscribe_to_gamepad_events(gamepad, gamepad_sub)
self.__gp_event_sub = {}
# Remove any pending state on the model
model = self.__manipulator.model if self.__manipulator else None
if model:
self.__stop_external_events(model)
self.__manipulator = None
# Remove any settings subscriptions
for setting_sub in self.__setting_subs:
settings.unsubscribe_to_change_events(setting_sub)
self.__setting_subs = []
# Destroy any mode/action specific settings
for action_mode, mode_settings in self.__mode_settings.items():
mode_settings.destroy(settings)
self.__mode_settings = {}
| 22,082 | Python | 47.427631 | 154 | 0.623675 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/gestures.py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['build_gestures', 'PanGesture', 'TumbleGesture', 'LookGesture', 'ZoomGesture']
from omni.ui import scene as sc
from .gesturebase import CameraGestureBase
from pxr import Gf
import carb
from typing import Callable
kDefaultKeyBindings = {
'PanGesture': 'Any MiddleButton',
'TumbleGesture': 'Alt LeftButton',
'ZoomGesture': 'Alt RightButton',
'LookGesture': 'RightButton'
}
def build_gestures(model: sc.AbstractManipulatorModel,
bindings: dict = None,
manager: sc.GestureManager = None,
configure_model: Callable = None):
def _parse_binding(binding_str: str):
keys = binding_str.split(' ')
button = {
'LeftButton': 0,
'RightButton': 1,
'MiddleButton': 2
}.get(keys.pop())
modifiers = 0
for mod_str in keys:
mod_bit = {
'Shift': carb.input.KEYBOARD_MODIFIER_FLAG_SHIFT,
'Ctrl': carb.input.KEYBOARD_MODIFIER_FLAG_CONTROL,
'Alt': carb.input.KEYBOARD_MODIFIER_FLAG_ALT,
'Super': carb.input.KEYBOARD_MODIFIER_FLAG_SUPER,
'Any': 0xffffffff,
}.get(mod_str)
if not mod_bit:
raise RuntimeError(f'Unparseable binding: {binding_str}')
modifiers = modifiers | mod_bit
return (button, modifiers)
if not bindings:
bindings = kDefaultKeyBindings
gestures = []
for gesture, binding in bindings.items():
instantiator = globals().get(gesture)
if not instantiator:
carb.log_warn(f'Gesture "{gesture}" was not found for key-binding: "{binding}"')
continue
button, modifers = _parse_binding(binding)
gestures.append(instantiator(model, configure_model, mouse_button=button, modifiers=modifers, manager=manager))
return gestures
class PanGesture(CameraGestureBase):
def on_mouse_move(self, mouse_moved):
if self.disable_pan:
return
world_speed = self.world_speed
move_speed = self.move_speed
self._accumulate_values('move', mouse_moved[0] * 0.5 * world_speed[0] * move_speed[0],
mouse_moved[1] * 0.5 * world_speed[1] * move_speed[1],
0)
class TumbleGesture(CameraGestureBase):
def on_mouse_move(self, mouse_moved):
if self.disable_tumble:
return
# Mouse moved is [-1,1], so make a full drag scross the viewport a 180 tumble
speed = self.tumble_speed
self._accumulate_values('tumble', mouse_moved[0] * speed[0] * -90,
mouse_moved[1] * speed[1] * 90,
0)
class LookGesture(CameraGestureBase):
def on_mouse_move(self, mouse_moved):
if self.disable_look:
return
# Mouse moved is [-1,1], so make a full drag scross the viewport a 180 look
speed = self.look_speed
self._accumulate_values('look', mouse_moved[0] * speed[0] * -90,
mouse_moved[1] * speed[1] * 90,
0)
class OrthoZoomAperture():
def __init__(self, model: sc.AbstractManipulatorModel, apertures):
self.__values = apertures.copy()
def apply(self, model: sc.AbstractManipulatorModel, distance: float):
# TODO ortho-speed
for i in range(2):
self.__values[i] -= distance * 2
model.set_floats('current_aperture', self.__values)
model._item_changed(model.get_item('current_aperture'))
def dirty_items(self, model: sc.AbstractManipulatorModel):
cur_ap = model.get_item('current_aperture')
if model.get_as_floats('initial_aperture') != model.get_as_floats(cur_ap):
return [cur_ap]
class OrthoZoomProjection():
def __init__(self, model: sc.AbstractManipulatorModel, projection):
self.__projection = projection.copy()
def apply(self, model: sc.AbstractManipulatorModel, distance: float):
# TODO ortho-speed
distance /= 3.0
rml = (2.0 / self.__projection[0])
tmb = (2.0 / self.__projection[5])
aspect = tmb / rml
rpl = rml * -self.__projection[12]
tpb = tmb * self.__projection[13]
rml -= distance
tmb -= distance * aspect
rpl += distance
tpb += distance
self.__projection[0] = 2.0 / rml
self.__projection[5] = 2.0 / tmb
#self.__projection[12] = -rpl / rml
#self.__projection[13] = tpb / tmb
model.set_floats('projection', self.__projection)
# Trigger recomputation of ndc_speed
model._item_changed(model.get_item('projection'))
def dirty_items(self, model: sc.AbstractManipulatorModel):
proj = model.get_item('projection')
return [proj]
if model.get_as_floats('projection') != model.get_as_floats(proj):
return [proj]
class ZoomGesture(CameraGestureBase):
def dirty_items(self, model: sc.AbstractManipulatorModel):
return super().dirty_items(model) if not self.__orth_zoom else self.__orth_zoom.dirty_items(model)
def __setup_ortho_zoom(self):
apertures = self.model.get_as_floats('initial_aperture')
if apertures:
self.__orth_zoom = OrthoZoomAperture(self.model, apertures)
return True
projection = self.model.get_as_floats('projection')
if projection:
self.__orth_zoom = OrthoZoomProjection(self.model, projection)
return True
carb.log_warn("Orthographic zoom needs a projection or aperture")
return False
def on_began(self, *args, **kwargs):
super().on_began(*args, **kwargs)
# Setup an orthographic movement (aperture adjustment) if needed
self.__orth_zoom = False
if self.orthographic:
self.__setup_ortho_zoom()
# self.model.set_ints('adjust_center_of_interest', [1])
# Zoom into center of view or mouse interest
self.__direction = Gf.Vec3d(self.center_of_interest.GetNormalized()) if False else None
def on_mouse_move(self, mouse_moved):
if self.disable_zoom:
return
# Compute length/radius from gesture start
distance = (mouse_moved[0] + mouse_moved[1]) * self.world_speed.GetLength() * 1.41421356
distance *= self.move_speed[2]
if self.__orth_zoom:
self.__orth_zoom.apply(self.model, distance)
return
# Zoom into view-enter or current mouse/world interest
direction = self.__direction if self.__direction else Gf.Vec3d(self.center_of_interest.GetNormalized())
amount = direction * distance
self._accumulate_values('move', amount[0], amount[1], amount[2])
| 7,324 | Python | 36.372449 | 119 | 0.604178 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/tests/test_manipulator_camera.py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ['TestManipulatorCamera', 'TestFlightMode']
from omni.ui.tests.test_base import OmniUiTest
from pathlib import Path
from omni.ui import scene as sc
from omni.ui import color as cl
import carb
import omni.kit
import omni.kit.app
import omni.ui as ui
from pxr import Gf
from omni.kit.manipulator.camera.manipulator import CameraManipulatorBase, SceneViewCameraManipulator, adjust_center_of_interest
import omni.kit.ui_test as ui_test
from carb.input import MouseEventType, KeyboardEventType, KeyboardInput
CURRENT_PATH = Path(carb.tokens.get_tokens_interface().resolve("${omni.kit.manipulator.camera}/data"))
TEST_WIDTH, TEST_HEIGHT = 500, 500
TEST_UI_CENTER = ui_test.Vec2(TEST_WIDTH / 2, TEST_HEIGHT / 2)
def _flatten_matrix(matrix: Gf.Matrix4d):
return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3],
matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]]
class SimpleGrid():
def __init__(self, lineCount: float = 100, lineStep: float = 10, thicknes: float = 1, color: ui.color = ui.color(0.25)):
self.__transform = ui.scene.Transform()
with self.__transform:
for i in range(lineCount * 2 + 1):
ui.scene.Line(
((i - lineCount) * lineStep, 0, -lineCount * lineStep),
((i - lineCount) * lineStep, 0, lineCount * lineStep),
color=color, thickness=thicknes,
)
ui.scene.Line(
(-lineCount * lineStep, 0, (i - lineCount) * lineStep),
(lineCount * lineStep, 0, (i - lineCount) * lineStep),
color=color, thickness=thicknes,
)
class SimpleOrigin():
def __init__(self, length: float = 5, thickness: float = 4):
origin = (0, 0, 0)
with ui.scene.Transform():
ui.scene.Line(origin, (length, 0, 0), color=ui.color.red, thickness=thickness)
ui.scene.Line(origin, (0, length, 0), color=ui.color.green, thickness=thickness)
ui.scene.Line(origin, (0, 0, length), color=ui.color.blue, thickness=thickness)
# Create a few scenes with different camera-maniupulators (a general ui.scene manip and one that allows ortho-tumble )
class SimpleScene:
def __init__(self, ortho: bool = False, custom: bool = False, *args, **kwargs):
self.__scene_view = ui.scene.SceneView(*args, **kwargs)
if ortho:
view = [-1, 0, 0, 0, 0, 0, 0.9999999999999998, 0, 0, 0.9999999999999998, 0, 0, 0, 0, -1000, 1]
projection = [0.008, 0, 0, 0, 0, 0.008, 0, 0, 0, 0, -2.000002000002e-06, 0, 0, 0, -1.000002000002, 1]
else:
view = [0.7071067811865476, -0.40557978767263897, 0.5792279653395693, 0, -2.775557561562892e-17, 0.8191520442889919, 0.5735764363510462, 0, -0.7071067811865477, -0.4055797876726389, 0.5792279653395692, 0, 6.838973831690966e-14, -3.996234471857009, -866.0161835150924, 1.0000000000000002]
projection = [4.7602203407949375, 0, 0, 0, 0, 8.483787309173106, 0, 0, 0, 0, -1.000002000002, -1, 0, 0, -2.000002000002, 0]
view = Gf.Matrix4d(*view)
center_of_interest = [0, 0, -view.Transform((0, 0, 0)).GetLength()]
with self.__scene_view.scene:
self.items = [SimpleGrid(), ui.scene.Arc(100, axis=1, wireframe=True), SimpleOrigin()]
with ui.scene.Transform(transform = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1000, 0, 1000, 1]):
self.items.append(ui.scene.Arc(100, axis=1, wireframe=True, color=ui.color.green))
with ui.scene.Transform(transform = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, -260, 0, 260, 1]):
self.items.append(ui.scene.Arc(100, axis=1, wireframe=True, color=ui.color.blue))
if custom:
self.items.append(CameraManipulatorBase())
else:
self.items.append(SceneViewCameraManipulator(center_of_interest))
# Push the start values into the CameraManipulator
self.setup_camera_model(self.items[-1].model, view, projection, center_of_interest, ortho)
def setup_camera_model(self, cam_model, view, projection, center_of_interest, ortho):
cam_model.set_floats('transform', _flatten_matrix(view.GetInverse()))
cam_model.set_floats('projection', projection)
cam_model.set_floats('center_of_interest', [0, 0, -view.Transform((0, 0, 0)).GetLength()])
if ortho:
cam_model.set_ints('orthographic', [ortho])
# Setup up the subscription to the CameraModel so changes here get pushed to SceneView
self.model_changed_sub = cam_model.subscribe_item_changed_fn(self.model_changed)
# And push the view and projection into the SceneView.model
cam_model._item_changed(cam_model.get_item('transform'))
cam_model._item_changed(cam_model.get_item('projection'))
def model_changed(self, model, item):
if item == model.get_item('transform'):
transform = Gf.Matrix4d(*model.get_as_floats(item))
# Signal that this this is the final change block, adjust our center-of-interest then
interaction_ended = model.get_as_ints('interaction_ended')
if interaction_ended and interaction_ended[0]:
transform = Gf.Matrix4d(*model.get_as_floats(item))
# Adjust the center-of-interest if requested (zoom out in perspective does this)
initial_transform = Gf.Matrix4d(*model.get_as_floats('initial_transform'))
coi_start, coi_end = adjust_center_of_interest(model, initial_transform, transform)
if coi_end:
model.set_floats('center_of_interest', [coi_end[0], coi_end[1], coi_end[2]])
# Push the start values into the SceneView
self.__scene_view.model.set_floats('view', _flatten_matrix(transform.GetInverse()))
elif item == model.get_item('projection'):
self.__scene_view.model.set_floats('projection', model.get_as_floats('projection'))
@property
def scene(self):
return self.__scene_view.scene
@property
def model(self):
return self.__scene_view.model
async def wait_human_delay(delay=1):
await ui_test.human_delay(delay)
class TestManipulatorCamera(OmniUiTest):
# Before running each test
async def setUp(self):
await super().setUp()
self._golden_img_dir = CURRENT_PATH.absolute().resolve().joinpath("tests")
# After running each test
async def tearDown(self):
self._golden_img_dir = None
await super().tearDown()
async def create_test_view(self, name: str, ortho: bool = False, custom: bool = False):
window = await self.create_test_window(width=TEST_WIDTH, height=TEST_HEIGHT, block_devices=False)
with window.frame:
scene_view = SimpleScene(ortho, custom)
return (window, scene_view)
async def _test_perspective_camera(self):
objects = await self.create_test_view('Perspective')
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name='test_perspective_camera.png')
async def _test_orthographic_camera(self):
objects = await self.create_test_view('Orthographic', True)
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name='test_orthographic_camera.png')
async def _test_custom_camera(self):
objects = await self.create_test_view('Custom Orthographic', True, True)
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name='test_custom_camera.png')
async def _test_mouse_across_screen(self, mouse_down, mouse_up):
objects = await self.create_test_view('WASD Movement')
mouse_begin = ui_test.Vec2(0, TEST_UI_CENTER.y)
mouse_end = ui_test.Vec2(TEST_WIDTH , TEST_UI_CENTER.y)
await ui_test.input.emulate_mouse(MouseEventType.MOVE, mouse_begin)
await wait_human_delay()
try:
await ui_test.input.emulate_mouse(mouse_down, mouse_begin)
await ui_test.input.emulate_mouse_slow_move(mouse_begin, mouse_end)
await wait_human_delay()
finally:
await ui_test.input.emulate_mouse(mouse_up, mouse_end)
await wait_human_delay()
return objects
async def test_pan_across_screen(self):
"""Test pan across X is a full move across NDC by default"""
objects = await self._test_mouse_across_screen(MouseEventType.MIDDLE_BUTTON_DOWN, MouseEventType.MIDDLE_BUTTON_UP)
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name='test_pan_across_screen.png')
async def test_look_across_screen(self):
"""Test look rotation across X is 180 degrees by default"""
objects = await self._test_mouse_across_screen(MouseEventType.RIGHT_BUTTON_DOWN, MouseEventType.RIGHT_BUTTON_UP)
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name='test_look_across_screen.png')
class TestFlightMode(OmniUiTest):
async def create_test_view(self, name: str, custom=False, ortho: bool = False):
window = await self.create_test_window(width=TEST_WIDTH, height=TEST_HEIGHT, block_devices=False)
with window.frame:
simple_scene = SimpleScene()
return (window, simple_scene)
def get_translation(self, model):
matrix = model.view
return (matrix[12], matrix[13], matrix[14])
async def do_key_press(self, key: KeyboardInput, operation = None):
try:
await ui_test.input.emulate_keyboard(KeyboardEventType.KEY_PRESS, key)
await wait_human_delay()
if operation:
operation()
finally:
await ui_test.input.emulate_keyboard(KeyboardEventType.KEY_RELEASE, key)
await wait_human_delay()
async def test_movement(self):
"""Test flight movement via WASD keyboard."""
window, simple_scene = await self.create_test_view('WASD Movement')
model = simple_scene.model
await ui_test.input.emulate_mouse(MouseEventType.MOVE, TEST_UI_CENTER)
await wait_human_delay()
start_pos = self.get_translation(model)
try:
await ui_test.input.emulate_mouse(MouseEventType.RIGHT_BUTTON_DOWN, TEST_UI_CENTER)
await wait_human_delay()
await self.do_key_press(KeyboardInput.W)
after_w = self.get_translation(model)
# W should have moved Z forward
self.assertAlmostEqual(after_w[0], start_pos[0], places=5)
self.assertAlmostEqual(after_w[1], start_pos[1], places=5)
self.assertTrue(after_w[2] > start_pos[2])
await self.do_key_press(KeyboardInput.A)
after_wa = self.get_translation(model)
# A should have moved X left
self.assertTrue(after_wa[0] > after_w[0])
self.assertAlmostEqual(after_wa[1], after_w[1], places=5)
self.assertAlmostEqual(after_wa[2], after_w[2], places=5)
await self.do_key_press(KeyboardInput.S)
after_was = self.get_translation(model)
# S should have moved Z back
self.assertAlmostEqual(after_was[0], after_wa[0], places=5)
self.assertAlmostEqual(after_was[1], after_wa[1], places=5)
self.assertTrue(after_was[2] < after_wa[2])
await self.do_key_press(KeyboardInput.D)
after_wasd = self.get_translation(model)
# D should have moved X right
self.assertTrue(after_wasd[0] < after_was[0])
self.assertAlmostEqual(after_wasd[1], after_was[1], places=5)
self.assertAlmostEqual(after_wasd[2], after_was[2], places=5)
# Test disabling flight-mode in the model would stop keyboard from doing anything
before_wasd = self.get_translation(model)
simple_scene.items[-1].model.set_ints('disable_fly', [1])
await self.do_key_press(KeyboardInput.W)
await self.do_key_press(KeyboardInput.A)
await self.do_key_press(KeyboardInput.S)
await self.do_key_press(KeyboardInput.D)
await wait_human_delay()
after_wasd = self.get_translation(model)
simple_scene.items[-1].model.set_ints('disable_fly', [0])
self.assertTrue(Gf.IsClose(before_wasd, after_wasd, 1e-5))
finally:
await ui_test.input.emulate_mouse(MouseEventType.RIGHT_BUTTON_UP)
await wait_human_delay()
async def _test_speed_modifier(self, value_a, value_b):
vel_key = '/persistent/app/viewport/camMoveVelocity'
mod_amount_key = '/exts/omni.kit.manipulator.camera/flightMode/keyModifierAmount'
settings = carb.settings.get_settings()
window, simple_scene = await self.create_test_view('WASD Movement')
model = simple_scene.model
settings.set(vel_key, 5)
await ui_test.input.emulate_mouse(MouseEventType.MOVE, TEST_UI_CENTER)
await wait_human_delay()
def compare_velocity(velocity):
vel_value = settings.get(vel_key)
self.assertEqual(vel_value, velocity)
try:
compare_velocity(5)
await ui_test.input.emulate_mouse(MouseEventType.RIGHT_BUTTON_DOWN, TEST_UI_CENTER)
# By default Shift should double speed
await self.do_key_press(KeyboardInput.LEFT_SHIFT, lambda: compare_velocity(value_a))
# By default Shift should halve speed
await self.do_key_press(KeyboardInput.LEFT_CONTROL, lambda: compare_velocity(value_b))
await ui_test.input.emulate_mouse(MouseEventType.RIGHT_BUTTON_UP)
await wait_human_delay()
compare_velocity(5)
finally:
settings.set(vel_key, 5)
settings.set(mod_amount_key, 2)
await ui_test.input.emulate_mouse(MouseEventType.RIGHT_BUTTON_UP)
await wait_human_delay()
async def test_speed_modifier_a(self):
"""Test default flight speed adjustement: 2x"""
await self._test_speed_modifier(10, 2.5)
async def test_speed_modifier_b(self):
"""Test custom flight speed adjustement: 4x"""
carb.settings.get_settings().set('/exts/omni.kit.manipulator.camera/flightMode/keyModifierAmount', 4)
await self._test_speed_modifier(20, 1.25)
async def test_speed_modifier_c(self):
"""Test custom flight speed adjustement: 0x"""
# Test when set to 0
carb.settings.get_settings().set('/exts/omni.kit.manipulator.camera/flightMode/keyModifierAmount', 0)
await self._test_speed_modifier(5, 5)
| 15,278 | Python | 45.440729 | 299 | 0.63994 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/tests/test_manipulator_usd.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
__all__ = ['TestManipulatorUSDCamera']
from omni.kit.manipulator.camera.usd_camera_manipulator import UsdCameraManipulator
from omni.kit.manipulator.camera.model import CameraManipulatorModel, _flatten_matrix
import omni.usd
import omni.kit.test
import carb.settings
from pxr import Gf, Sdf, UsdGeom
from pathlib import Path
from typing import List, Sequence
import sys
import unittest
TESTS_PATH = Path(carb.tokens.get_tokens_interface().resolve("${omni.kit.manipulator.camera}/data")).absolute().resolve()
USD_FILES = TESTS_PATH.joinpath("tests", "usd")
class TestManipulatorUSDCamera(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
await omni.usd.get_context().new_stage_async()
self.stage = omni.usd.get_context().get_stage()
super().setUp()
# After running each test
async def tearDown(self):
super().tearDown()
def __reset_initial_xf(self, usd_manip, initial_transform_item, prim):
# Reset the initial transform to the current transform
matrix = omni.usd.get_world_transform_matrix(prim)
usd_manip.model.set_floats(initial_transform_item, _flatten_matrix(matrix))
# This synthesizes the start of a new event
usd_manip._set_context('', prim.GetPath())
def __setup_usdmanip_tumble_test(self, prim_path: Sdf.Path):
usd_manip = UsdCameraManipulator(prim_path=prim_path)
usd_manip.model = CameraManipulatorModel()
usd_manip._on_began(usd_manip.model)
cam_prim = self.stage.GetPrimAtPath(prim_path)
self.assertTrue(bool(cam_prim))
initial_transform_item = usd_manip.model.get_item('initial_transform')
tumble_item = usd_manip.model.get_item('tumble')
transform_item = usd_manip.model.get_item('transform')
self.__reset_initial_xf(usd_manip, initial_transform_item, cam_prim)
usd_manip.model.set_floats(transform_item, _flatten_matrix(Gf.Matrix4d(1)))
usd_manip.on_model_updated(transform_item)
return (usd_manip, cam_prim, initial_transform_item, tumble_item)
async def __test_tumble_camera(self, prim_path: Sdf.Path, rotations: List[Sequence[float]], epsilon: float = 1.0e-5):
(usd_manip, cam_prim,
initial_transform_item, tumble_item) = self.__setup_usdmanip_tumble_test(prim_path)
rotateYXZ = cam_prim.GetAttribute('xformOp:rotateYXZ')
self.assertIsNotNone(rotateYXZ)
cur_rot = Gf.Vec3d(rotateYXZ.Get())
self.assertTrue(Gf.IsClose(cur_rot, Gf.Vec3d(0, 0, 0), epsilon))
is_linux = sys.platform.startswith('linux')
for index, rotation in enumerate(rotations):
usd_manip.model.set_floats(tumble_item, [-90, 0, 0])
usd_manip.model._item_changed(tumble_item)
self.__reset_initial_xf(usd_manip, initial_transform_item, cam_prim)
cur_rot = Gf.Vec3d(rotateYXZ.Get())
is_equal = Gf.IsClose(cur_rot, Gf.Vec3d(rotation), epsilon)
if is_equal:
continue
# Linux and Windows are returning different results for some rotations that are essentially equivalent
is_equal = True
for current, expected in zip(cur_rot, rotation):
if not Gf.IsClose(current, expected, epsilon):
expected = abs(expected)
is_equal = (expected == 180) or (expected == 360)
if not is_equal:
break
self.assertTrue(is_equal,
msg=f"Rotation values differ: current: {cur_rot}, expected: {rotation}")
async def __test_camera_YXZ_edit(self, rotations: List[Sequence[float]]):
camera = UsdGeom.Camera.Define(self.stage, '/Camera')
cam_prim = camera.GetPrim()
cam_prim.CreateAttribute('omni:kit:centerOfInterest', Sdf.ValueTypeNames.Vector3d,
True, Sdf.VariabilityUniform).Set(Gf.Vec3d(0, 0, -10))
await self.__test_tumble_camera(cam_prim.GetPath(), rotations)
async def test_camera_rotate(self):
'''Test rotation values in USD (with controllerUseSRT set to False)'''
await self.__test_camera_YXZ_edit([
(0, -90, 0),
(0, 180, 0),
(0, 90, 0),
(0, 0, 0)
])
async def test_camera_rotate_SRT(self):
'''Test rotation accumulation in USD with controllerUseSRT set to True'''
settings = carb.settings.get_settings()
try:
settings.set('/persistent/app/camera/controllerUseSRT', True)
await self.__test_camera_YXZ_edit([
(0, -90, 0),
(0, -180, 0),
(0, -270, 0),
(0, -360, 0)
])
finally:
settings.destroy_item('/persistent/app/camera/controllerUseSRT')
async def test_camera_yup_in_zup(self):
'''Test Viewport rotation of a camera from a y-up layer, referenced in a z-up stage'''
await omni.usd.get_context().open_stage_async(str(USD_FILES.joinpath('yup_in_zup.usda')))
self.stage = omni.usd.get_context().get_stage()
await self.__test_tumble_camera(Sdf.Path('/World/yup_ref/Camera'),
[
(0, -90, 0),
(0, 180, 0),
(0, 90, 0),
(0, 0, 0)
]
)
async def test_camera_zup_in_yup(self):
'''Test Viewport rotation of a camera from a z-up layer, referenced in a y-up stage'''
await omni.usd.get_context().open_stage_async(str(USD_FILES.joinpath('zup_in_yup.usda')))
self.stage = omni.usd.get_context().get_stage()
await self.__test_tumble_camera(Sdf.Path('/World/zup_ref/Camera'),
[
(0, 0, -90),
(0, 0, 180),
(0, 0, 90),
(0, 0, 0)
]
)
| 6,352 | Python | 38.216049 | 121 | 0.615712 |
omniverse-code/kit/exts/omni.kit.manipulator.camera/omni/kit/manipulator/camera/tests/__init__.py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .test_manipulator_camera import *
from .test_manipulator_gamepad import *
from .test_manipulator_usd import *
from .test_viewport_manipulator import *
| 585 | Python | 40.85714 | 76 | 0.803419 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.