file_path
stringlengths 20
207
| content
stringlengths 5
3.85M
| size
int64 5
3.85M
| lang
stringclasses 9
values | avg_line_length
float64 1.33
100
| max_line_length
int64 4
993
| alphanum_fraction
float64 0.26
0.93
|
---|---|---|---|---|---|---|
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/apdlexer.py | """
pygments.lexers.apdlexer
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for ANSYS Parametric Design Language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Comment, Keyword, Name, Number, Operator, \
String, Generic, Punctuation, Whitespace
__all__ = ['apdlexer']
class apdlexer(RegexLexer):
"""
For APDL source code.
.. versionadded:: 2.9
"""
name = 'ANSYS parametric design language'
aliases = ['ansys', 'apdl']
filenames = ['*.ans']
flags = re.IGNORECASE
# list of elements
elafunb = ("SURF152", "SURF153", "SURF154", "SURF156", "SHELL157",
"SURF159", "LINK160", "BEAM161", "PLANE162",
"SHELL163", "SOLID164", "COMBI165", "MASS166",
"LINK167", "SOLID168", "TARGE169", "TARGE170",
"CONTA171", "CONTA172", "CONTA173", "CONTA174",
"CONTA175", "CONTA176", "CONTA177", "CONTA178",
"PRETS179", "LINK180", "SHELL181", "PLANE182",
"PLANE183", "MPC184", "SOLID185", "SOLID186",
"SOLID187", "BEAM188", "BEAM189", "SOLSH190",
"INTER192", "INTER193", "INTER194", "INTER195",
"MESH200", "FOLLW201", "INTER202", "INTER203",
"INTER204", "INTER205", "SHELL208", "SHELL209",
"CPT212", "CPT213", "COMBI214", "CPT215", "CPT216",
"CPT217", "FLUID220", "FLUID221", "PLANE223",
"SOLID226", "SOLID227", "PLANE230", "SOLID231",
"SOLID232", "PLANE233", "SOLID236", "SOLID237",
"PLANE238", "SOLID239", "SOLID240", "HSFLD241",
"HSFLD242", "SURF251", "SURF252", "REINF263",
"REINF264", "REINF265", "SOLID272", "SOLID273",
"SOLID278", "SOLID279", "SHELL281", "SOLID285",
"PIPE288", "PIPE289", "ELBOW290", "USER300", "BEAM3",
"BEAM4", "BEAM23", "BEAM24", "BEAM44", "BEAM54",
"COMBIN7", "FLUID79", "FLUID80", "FLUID81", "FLUID141",
"FLUID142", "INFIN9", "INFIN47", "PLANE13", "PLANE25",
"PLANE42", "PLANE53", "PLANE67", "PLANE82", "PLANE83",
"PLANE145", "PLANE146", "CONTAC12", "CONTAC52",
"LINK1", "LINK8", "LINK10", "LINK32", "PIPE16",
"PIPE17", "PIPE18", "PIPE20", "PIPE59", "PIPE60",
"SHELL41", "SHELL43", "SHELL57", "SHELL63", "SHELL91",
"SHELL93", "SHELL99", "SHELL150", "SOLID5", "SOLID45",
"SOLID46", "SOLID65", "SOLID69", "SOLID92", "SOLID95",
"SOLID117", "SOLID127", "SOLID128", "SOLID147",
"SOLID148", "SOLID191", "VISCO88", "VISCO89",
"VISCO106", "VISCO107", "VISCO108", "TRANS109")
elafunc = ("PGRAPH", "/VT", "VTIN", "VTRFIL", "VTTEMP", "PGRSET",
"VTCLR", "VTMETH", "VTRSLT", "VTVMOD", "PGSELE",
"VTDISC", "VTMP", "VTSEC", "PGWRITE", "VTEVAL", "VTOP",
"VTSFE", "POUTRES", "VTFREQ", "VTPOST", "VTSL",
"FLDATA1-40", "HFPCSWP", "MSDATA", "MSVARY", "QFACT",
"FLOCHECK", "HFPOWER", "MSMASS", "PERI", "SPADP",
"FLREAD", "HFPORT", "MSMETH", "PLFSS", "SPARM",
"FLOTRAN", "HFSCAT", "MSMIR", "PLSCH", "SPFSS",
"HFADP", "ICE", "MSNOMF", "PLSYZ", "SPICE", "HFARRAY",
"ICEDELE", "MSPROP", "PLTD", "SPSCAN", "HFDEEM",
"ICELIST", "MSQUAD", "PLTLINE", "SPSWP", "HFEIGOPT",
"ICVFRC", "MSRELAX", "PLVFRC", "HFEREFINE", "LPRT",
"MSSOLU", "/PICE", "HFMODPRT", "MSADV", "MSSPEC",
"PLWAVE", "HFPA", "MSCAP", "MSTERM", "PRSYZ")
elafund = ("*VOPER", "VOVLAP", "*VPLOT", "VPLOT", "VPTN", "*VPUT",
"VPUT", "*VREAD", "VROTAT", "VSBA", "VSBV", "VSBW",
"/VSCALE", "*VSCFUN", "VSEL", "VSLA", "*VSTAT", "VSUM",
"VSWEEP", "VSYMM", "VTRAN", "VTYPE", "/VUP", "*VWRITE",
"/WAIT", "WAVES", "WERASE", "WFRONT", "/WINDOW",
"WMID", "WMORE", "WPAVE", "WPCSYS", "WPLANE", "WPOFFS",
"WPROTA", "WPSTYL", "WRFULL", "WRITE", "WRITEMAP",
"*WRK", "WSORT", "WSPRINGS", "WSTART", "WTBCREATE",
"XFDATA", "XFENRICH", "XFLIST", "/XFRM", "/XRANGE",
"XVAR", "/YRANGE", "/ZOOM", "/WB", "XMLO", "/XML",
"CNTR", "EBLOCK", "CMBLOCK", "NBLOCK", "/TRACK",
"CWZPLOT", "~EUI", "NELE", "EALL", "NALL", "FLITEM",
"LSLN", "PSOLVE", "ASLN", "/VERIFY", "/SSS", "~CFIN",
"*EVAL", "*MOONEY", "/RUNSTAT", "ALPFILL",
"ARCOLLAPSE", "ARDETACH", "ARFILL", "ARMERGE",
"ARSPLIT", "FIPLOT", "GAPFINISH", "GAPLIST",
"GAPMERGE", "GAPOPT", "GAPPLOT", "LNCOLLAPSE",
"LNDETACH", "LNFILL", "LNMERGE", "LNSPLIT", "PCONV",
"PLCONV", "PEMOPTS", "PEXCLUDE", "PINCLUDE", "PMETH",
"/PMETH", "PMOPTS", "PPLOT", "PPRANGE", "PRCONV",
"PRECISION", "RALL", "RFILSZ", "RITER", "RMEMRY",
"RSPEED", "RSTAT", "RTIMST", "/RUNST", "RWFRNT",
"SARPLOT", "SHSD", "SLPPLOT", "SLSPLOT", "VCVFILL",
"/OPT", "OPEQN", "OPFACT", "OPFRST", "OPGRAD",
"OPKEEP", "OPLOOP", "OPPRNT", "OPRAND", "OPSUBP",
"OPSWEEP", "OPTYPE", "OPUSER", "OPVAR", "OPADD",
"OPCLR", "OPDEL", "OPMAKE", "OPSEL", "OPANL", "OPDATA",
"OPRESU", "OPSAVE", "OPEXE", "OPLFA", "OPLGR",
"OPLIST", "OPLSW", "OPRFA", "OPRGR", "OPRSW",
"PILECALC", "PILEDISPSET", "PILEGEN", "PILELOAD",
"PILEMASS", "PILERUN", "PILESEL", "PILESTIF",
"PLVAROPT", "PRVAROPT", "TOCOMP", "TODEF", "TOFREQ",
"TOTYPE", "TOVAR", "TOEXE", "TOLOOP", "TOGRAPH",
"TOLIST", "TOPLOT", "TOPRINT", "TOSTAT", "TZAMESH",
"TZDELE", "TZEGEN", "XVAROPT", "PGSAVE", "SOLCONTROL",
"TOTAL", "VTGEOM", "VTREAL", "VTSTAT")
elafune = ("/ANUM", "AOFFST", "AOVLAP", "APLOT", "APPEND", "APTN",
"ARCLEN", "ARCTRM", "AREAS", "AREFINE", "AREMESH",
"AREVERSE", "AROTAT", "ARSCALE", "ARSYM", "ASBA",
"ASBL", "ASBV", "ASBW", "ASCRES", "ASEL", "ASIFILE",
"*ASK", "ASKIN", "ASLL", "ASLV", "ASOL", "/ASSIGN",
"ASUB", "ASUM", "ATAN", "ATRAN", "ATYPE", "/AUTO",
"AUTOTS", "/AUX2", "/AUX3", "/AUX12", "/AUX15",
"AVPRIN", "AVRES", "AWAVE", "/AXLAB", "*AXPY",
"/BATCH", "BCSOPTION", "BETAD", "BF", "BFA", "BFADELE",
"BFALIST", "BFCUM", "BFDELE", "BFE", "BFECUM",
"BFEDELE", "BFELIST", "BFESCAL", "BFINT", "BFK",
"BFKDELE", "BFKLIST", "BFL", "BFLDELE", "BFLIST",
"BFLLIST", "BFSCALE", "BFTRAN", "BFUNIF", "BFV",
"BFVDELE", "BFVLIST", "BIOOPT", "BIOT", "BLC4", "BLC5",
"BLOCK", "BOOL", "BOPTN", "BSAX", "BSMD", "BSM1",
"BSM2", "BSPLIN", "BSS1", "BSS2", "BSTE", "BSTQ",
"BTOL", "BUCOPT", "C", "CALC", "CAMPBELL", "CBDOF",
"CBMD", "CBMX", "CBTE", "CBTMP", "CDOPT", "CDREAD",
"CDWRITE", "CE", "CECHECK", "CECMOD", "CECYC",
"CEDELE", "CEINTF", "CELIST", "CENTER", "CEQN",
"CERIG", "CESGEN", "CFACT", "*CFCLOS", "*CFOPEN",
"*CFWRITE", "/CFORMAT", "CGLOC", "CGOMGA", "CGROW",
"CHECK", "CHKMSH", "CINT", "CIRCLE", "CISOL",
"/CLABEL", "/CLEAR", "CLOCAL", "CLOG", "/CLOG",
"CLRMSHLN", "CM", "CMACEL", "/CMAP", "CMATRIX",
"CMDELE", "CMDOMEGA", "CMEDIT", "CMGRP", "CMLIST",
"CMMOD", "CMOMEGA", "CMPLOT", "CMROTATE", "CMSEL",
"CMSFILE", "CMSOPT", "CMWRITE", "CNCHECK", "CNKMOD",
"CNTR", "CNVTOL", "/COLOR", "/COM", "*COMP", "COMBINE",
"COMPRESS", "CON4", "CONE", "/CONFIG", "CONJUG",
"/CONTOUR", "/COPY", "CORIOLIS", "COUPLE", "COVAL",
"CP", "CPCYC", "CPDELE", "CPINTF", "/CPLANE", "CPLGEN",
"CPLIST", "CPMERGE", "CPNGEN", "CPSGEN", "CQC",
"*CREATE", "CRPLIM", "CS", "CSCIR", "CSDELE", "CSKP",
"CSLIST", "CSWPLA", "CSYS", "/CTYPE", "CURR2D",
"CUTCONTROL", "/CVAL", "CVAR", "/CWD", "CYCCALC",
"/CYCEXPAND", "CYCFILES", "CYCFREQ", "*CYCLE",
"CYCLIC", "CYCOPT", "CYCPHASE", "CYCSPEC", "CYL4",
"CYL5", "CYLIND", "CZDEL", "CZMESH", "D", "DA",
"DADELE", "DALIST", "DAMORPH", "DATA", "DATADEF",
"DCGOMG", "DCUM", "DCVSWP", "DDASPEC", "DDELE",
"DDOPTION", "DEACT", "DEFINE", "*DEL", "DELETE",
"/DELETE", "DELTIM", "DEMORPH", "DERIV", "DESIZE",
"DESOL", "DETAB", "/DEVDISP", "/DEVICE", "/DFLAB",
"DFLX", "DFSWAVE", "DIG", "DIGIT", "*DIM",
"/DIRECTORY", "DISPLAY", "/DIST", "DJ", "DJDELE",
"DJLIST", "DK", "DKDELE", "DKLIST", "DL", "DLDELE",
"DLIST", "DLLIST", "*DMAT", "DMOVE", "DMPEXT",
"DMPOPTION", "DMPRAT", "DMPSTR", "DNSOL", "*DO", "DOF",
"DOFSEL", "DOMEGA", "*DOT", "*DOWHILE", "DSCALE",
"/DSCALE", "DSET", "DSPOPTION", "DSUM", "DSURF",
"DSYM", "DSYS", "DTRAN", "DUMP", "/DV3D", "DVAL",
"DVMORPH", "DYNOPT", "E", "EALIVE", "EDADAPT", "EDALE",
"EDASMP", "EDBOUND", "EDBX", "EDBVIS", "EDCADAPT",
"EDCGEN", "EDCLIST", "EDCMORE", "EDCNSTR", "EDCONTACT",
"EDCPU", "EDCRB", "EDCSC", "EDCTS", "EDCURVE",
"EDDAMP", "EDDBL", "EDDC", "EDDRELAX", "EDDUMP",
"EDELE", "EDENERGY", "EDFPLOT", "EDGCALE", "/EDGE",
"EDHGLS", "EDHIST", "EDHTIME", "EDINT", "EDIPART",
"EDIS", "EDLCS", "EDLOAD", "EDMP", "EDNB", "EDNDTSD",
"EDNROT", "EDOPT", "EDOUT", "EDPART", "EDPC", "EDPL",
"EDPVEL", "EDRC", "EDRD", "EDREAD", "EDRI", "EDRST",
"EDRUN", "EDSHELL", "EDSOLV", "EDSP", "EDSTART",
"EDTERM", "EDTP", "EDVEL", "EDWELD", "EDWRITE",
"EEXTRUDE", "/EFACET", "EGEN", "*EIGEN", "EINFIN",
"EINTF", "EKILL", "ELBOW", "ELEM", "ELIST", "*ELSE",
"*ELSEIF", "EMAGERR", "EMATWRITE", "EMF", "EMFT",
"EMID", "EMIS", "EMODIF", "EMORE", "EMSYM", "EMTGEN",
"EMUNIT", "EN", "*END", "*ENDDO", "*ENDIF",
"ENDRELEASE", "ENERSOL", "ENGEN", "ENORM", "ENSYM",
"EORIENT", "EPLOT", "EQSLV", "ERASE", "/ERASE",
"EREAD", "EREFINE", "EREINF", "ERESX", "ERNORM",
"ERRANG", "ESCHECK", "ESEL", "/ESHAPE", "ESIZE",
"ESLA", "ESLL", "ESLN", "ESLV", "ESOL", "ESORT",
"ESSOLV", "ESTIF", "ESURF", "ESYM", "ESYS", "ET",
"ETABLE", "ETCHG", "ETCONTROL", "ETDELE", "ETLIST",
"ETYPE", "EUSORT", "EWRITE", "*EXIT", "/EXIT", "EXP",
"EXPAND", "/EXPAND", "EXPASS", "*EXPORT", "EXPROFILE",
"EXPSOL", "EXTOPT", "EXTREM", "EXUNIT", "F", "/FACET",
"FATIGUE", "FC", "FCCHECK", "FCDELE", "FCLIST", "FCUM",
"FCTYP", "FDELE", "/FDELE", "FE", "FEBODY", "FECONS",
"FEFOR", "FELIST", "FESURF", "*FFT", "FILE",
"FILEAUX2", "FILEAUX3", "FILEDISP", "FILL", "FILLDATA",
"/FILNAME", "FINISH", "FITEM", "FJ", "FJDELE",
"FJLIST", "FK", "FKDELE", "FKLIST", "FL", "FLIST",
"FLLIST", "FLST", "FLUXV", "FLUREAD", "FMAGBC",
"FMAGSUM", "/FOCUS", "FOR2D", "FORCE", "FORM",
"/FORMAT", "FP", "FPLIST", "*FREE", "FREQ", "FRQSCL",
"FS", "FSCALE", "FSDELE", "FSLIST", "FSNODE", "FSPLOT",
"FSSECT", "FSSPARM", "FSUM", "FTCALC", "FTRAN",
"FTSIZE", "FTWRITE", "FTYPE", "FVMESH", "GAP", "GAPF",
"GAUGE", "GCDEF", "GCGEN", "/GCMD", "/GCOLUMN",
"GENOPT", "GEOM", "GEOMETRY", "*GET", "/GFILE",
"/GFORMAT", "/GLINE", "/GMARKER", "GMATRIX", "GMFACE",
"*GO", "/GO", "/GOLIST", "/GOPR", "GP", "GPDELE",
"GPLIST", "GPLOT", "/GRAPHICS", "/GRESUME", "/GRID",
"/GROPT", "GRP", "/GRTYP", "/GSAVE", "GSBDATA",
"GSGDATA", "GSLIST", "GSSOL", "/GST", "GSUM", "/GTHK",
"/GTYPE", "HARFRQ", "/HBC", "HBMAT", "/HEADER", "HELP",
"HELPDISP", "HEMIOPT", "HFANG", "HFSYM", "HMAGSOLV",
"HPGL", "HPTCREATE", "HPTDELETE", "HRCPLX", "HREXP",
"HROPT", "HROCEAN", "HROUT", "IC", "ICDELE", "ICLIST",
"/ICLWID", "/ICSCALE", "*IF", "IGESIN", "IGESOUT",
"/IMAGE", "IMAGIN", "IMESH", "IMMED", "IMPD",
"INISTATE", "*INIT", "/INPUT", "/INQUIRE", "INRES",
"INRTIA", "INT1", "INTSRF", "IOPTN", "IRLF", "IRLIST",
"*ITENGINE", "JPEG", "JSOL", "K", "KATT", "KBC",
"KBETW", "KCALC", "KCENTER", "KCLEAR", "KDELE",
"KDIST", "KEEP", "KESIZE", "KEYOPT", "KEYPTS", "KEYW",
"KFILL", "KGEN", "KL", "KLIST", "KMESH", "KMODIF",
"KMOVE", "KNODE", "KPLOT", "KPSCALE", "KREFINE",
"KSCALE", "KSCON", "KSEL", "KSLL", "KSLN", "KSUM",
"KSYMM", "KTRAN", "KUSE", "KWPAVE", "KWPLAN", "L",
"L2ANG", "L2TAN", "LANG", "LARC", "/LARC", "LAREA",
"LARGE", "LATT", "LAYER", "LAYERP26", "LAYLIST",
"LAYPLOT", "LCABS", "LCASE", "LCCALC", "LCCAT",
"LCDEF", "LCFACT", "LCFILE", "LCLEAR", "LCOMB",
"LCOPER", "LCSEL", "LCSL", "LCSUM", "LCWRITE",
"LCZERO", "LDELE", "LDIV", "LDRAG", "LDREAD", "LESIZE",
"LEXTND", "LFILLT", "LFSURF", "LGEN", "LGLUE",
"LGWRITE", "/LIGHT", "LINA", "LINE", "/LINE", "LINES",
"LINL", "LINP", "LINV", "LIST", "*LIST", "LLIST",
"LMATRIX", "LMESH", "LNSRCH", "LOCAL", "LOVLAP",
"LPLOT", "LPTN", "LREFINE", "LREVERSE", "LROTAT",
"LSBA", "*LSBAC", "LSBL", "LSBV", "LSBW", "LSCLEAR",
"LSDELE", "*LSDUMP", "LSEL", "*LSENGINE", "*LSFACTOR",
"LSLA", "LSLK", "LSOPER", "/LSPEC", "LSREAD",
"*LSRESTORE", "LSSCALE", "LSSOLVE", "LSTR", "LSUM",
"LSWRITE", "/LSYMBOL", "LSYMM", "LTAN", "LTRAN",
"LUMPM", "LVSCALE", "LWPLAN", "M", "MADAPT", "MAGOPT",
"MAGSOLV", "/MAIL", "MAP", "/MAP", "MAP2DTO3D",
"MAPSOLVE", "MAPVAR", "MASTER", "MAT", "MATER",
"MCHECK", "MDAMP", "MDELE", "MDPLOT", "MEMM", "/MENU",
"MESHING", "MFANALYSIS", "MFBUCKET", "MFCALC", "MFCI",
"MFCLEAR", "MFCMMAND", "MFCONV", "MFDTIME", "MFELEM",
"MFEM", "MFEXTER", "MFFNAME", "MFFR", "MFIMPORT",
"MFINTER", "MFITER", "MFLCOMM", "MFLIST", "MFMAP",
"MFORDER", "MFOUTPUT", "*MFOURI", "MFPSIMUL", "MFRC",
"MFRELAX", "MFRSTART", "MFSORDER", "MFSURFACE",
"MFTIME", "MFTOL", "*MFUN", "MFVOLUME", "MFWRITE",
"MGEN", "MIDTOL", "/MKDIR", "MLIST", "MMASS", "MMF",
"MODCONT", "MODE", "MODIFY", "MODMSH", "MODSELOPTION",
"MODOPT", "MONITOR", "*MOPER", "MOPT", "MORPH", "MOVE",
"MP", "MPAMOD", "MPCHG", "MPCOPY", "MPDATA", "MPDELE",
"MPDRES", "/MPLIB", "MPLIST", "MPPLOT", "MPREAD",
"MPRINT", "MPTEMP", "MPTGEN", "MPTRES", "MPWRITE",
"/MREP", "MSAVE", "*MSG", "MSHAPE", "MSHCOPY",
"MSHKEY", "MSHMID", "MSHPATTERN", "MSOLVE", "/MSTART",
"MSTOLE", "*MULT", "*MWRITE", "MXPAND", "N", "NANG",
"NAXIS", "NCNV", "NDELE", "NDIST", "NDSURF", "NEQIT",
"/NERR", "NFORCE", "NGEN", "NKPT", "NLADAPTIVE",
"NLDIAG", "NLDPOST", "NLGEOM", "NLHIST", "NLIST",
"NLMESH", "NLOG", "NLOPT", "NMODIF", "NOCOLOR",
"NODES", "/NOERASE", "/NOLIST", "NOOFFSET", "NOORDER",
"/NOPR", "NORA", "NORL", "/NORMAL", "NPLOT", "NPRINT",
"NREAD", "NREFINE", "NRLSUM", "*NRM", "NROPT",
"NROTAT", "NRRANG", "NSCALE", "NSEL", "NSLA", "NSLE",
"NSLK", "NSLL", "NSLV", "NSMOOTH", "NSOL", "NSORT",
"NSTORE", "NSUBST", "NSVR", "NSYM", "/NUMBER",
"NUMCMP", "NUMEXP", "NUMMRG", "NUMOFF", "NUMSTR",
"NUMVAR", "NUSORT", "NWPAVE", "NWPLAN", "NWRITE",
"OCDATA", "OCDELETE", "OCLIST", "OCREAD", "OCTABLE",
"OCTYPE", "OCZONE", "OMEGA", "OPERATE", "OPNCONTROL",
"OUTAERO", "OUTOPT", "OUTPR", "/OUTPUT", "OUTRES",
"OVCHECK", "PADELE", "/PAGE", "PAGET", "PAPUT",
"PARESU", "PARTSEL", "PARRES", "PARSAV", "PASAVE",
"PATH", "PAUSE", "/PBC", "/PBF", "PCALC", "PCGOPT",
"PCIRC", "/PCIRCLE", "/PCOPY", "PCROSS", "PDANL",
"PDCDF", "PDCFLD", "PDCLR", "PDCMAT", "PDCORR",
"PDDMCS", "PDDOEL", "PDEF", "PDEXE", "PDHIST",
"PDINQR", "PDLHS", "PDMETH", "PDOT", "PDPINV",
"PDPLOT", "PDPROB", "PDRESU", "PDROPT", "/PDS",
"PDSAVE", "PDSCAT", "PDSENS", "PDSHIS", "PDUSER",
"PDVAR", "PDWRITE", "PERBC2D", "PERTURB", "PFACT",
"PHYSICS", "PIVCHECK", "PLCAMP", "PLCFREQ", "PLCHIST",
"PLCINT", "PLCPLX", "PLCRACK", "PLDISP", "PLESOL",
"PLETAB", "PLFAR", "PLF2D", "PLGEOM", "PLLS", "PLMAP",
"PLMC", "PLNEAR", "PLNSOL", "/PLOPTS", "PLORB", "PLOT",
"PLOTTING", "PLPAGM", "PLPATH", "PLSECT", "PLST",
"PLTIME", "PLTRAC", "PLVAR", "PLVECT", "PLZZ",
"/PMACRO", "PMAP", "PMGTRAN", "PMLOPT", "PMLSIZE",
"/PMORE", "PNGR", "/PNUM", "POINT", "POLY", "/POLYGON",
"/POST1", "/POST26", "POWERH", "PPATH", "PRANGE",
"PRAS", "PRCAMP", "PRCINT", "PRCPLX", "PRED",
"PRENERGY", "/PREP7", "PRERR", "PRESOL", "PRETAB",
"PRFAR", "PRI2", "PRIM", "PRINT", "*PRINT", "PRISM",
"PRITER", "PRJSOL", "PRNEAR", "PRNLD", "PRNSOL",
"PROD", "PRORB", "PRPATH", "PRRFOR", "PRRSOL",
"PRSCONTROL", "PRSECT", "PRTIME", "PRVAR", "PRVECT",
"PSCONTROL", "PSCR", "PSDCOM", "PSDFRQ", "PSDGRAPH",
"PSDRES", "PSDSPL", "PSDUNIT", "PSDVAL", "PSDWAV",
"/PSEARCH", "PSEL", "/PSF", "PSMAT", "PSMESH",
"/PSPEC", "/PSTATUS", "PSTRES", "/PSYMB", "PTR",
"PTXY", "PVECT", "/PWEDGE", "QDVAL", "QRDOPT", "QSOPT",
"QUAD", "/QUIT", "QUOT", "R", "RACE", "RADOPT",
"RAPPND", "RATE", "/RATIO", "RBE3", "RCON", "RCYC",
"RDEC", "RDELE", "READ", "REAL", "REALVAR", "RECTNG",
"REMESH", "/RENAME", "REORDER", "*REPEAT", "/REPLOT",
"RESCOMBINE", "RESCONTROL", "RESET", "/RESET", "RESP",
"RESUME", "RESVEC", "RESWRITE", "*RETURN", "REXPORT",
"REZONE", "RFORCE", "/RGB", "RIGID", "RIGRESP",
"RIMPORT", "RLIST", "RMALIST", "RMANL", "RMASTER",
"RMCAP", "RMCLIST", "/RMDIR", "RMFLVEC", "RMLVSCALE",
"RMMLIST", "RMMRANGE", "RMMSELECT", "RMNDISP",
"RMNEVEC", "RMODIF", "RMORE", "RMPORDER", "RMRESUME",
"RMRGENERATE", "RMROPTIONS", "RMRPLOT", "RMRSTATUS",
"RMSAVE", "RMSMPLE", "RMUSE", "RMXPORT", "ROCK",
"ROSE", "RPOLY", "RPR4", "RPRISM", "RPSD", "RSFIT",
"RSOPT", "RSPLIT", "RSPLOT", "RSPRNT", "RSSIMS",
"RSTMAC", "RSTOFF", "RSURF", "RSYMM", "RSYS", "RTHICK",
"SABS", "SADD", "SALLOW", "SAVE", "SBCLIST", "SBCTRAN",
"SDELETE", "SE", "SECCONTROL", "SECDATA",
"SECFUNCTION", "SECJOINT", "/SECLIB", "SECLOCK",
"SECMODIF", "SECNUM", "SECOFFSET", "SECPLOT",
"SECREAD", "SECSTOP", "SECTYPE", "SECWRITE", "SED",
"SEDLIST", "SEEXP", "/SEG", "SEGEN", "SELIST", "SELM",
"SELTOL", "SENERGY", "SEOPT", "SESYMM", "*SET", "SET",
"SETFGAP", "SETRAN", "SEXP", "SF", "SFA", "SFACT",
"SFADELE", "SFALIST", "SFBEAM", "SFCALC", "SFCUM",
"SFDELE", "SFE", "SFEDELE", "SFELIST", "SFFUN",
"SFGRAD", "SFL", "SFLDELE", "SFLEX", "SFLIST",
"SFLLIST", "SFSCALE", "SFTRAN", "/SHADE", "SHELL",
"/SHOW", "/SHOWDISP", "SHPP", "/SHRINK", "SLIST",
"SLOAD", "SMALL", "*SMAT", "SMAX", "/SMBC", "SMBODY",
"SMCONS", "SMFOR", "SMIN", "SMOOTH", "SMRTSIZE",
"SMSURF", "SMULT", "SNOPTION", "SOLU", "/SOLU",
"SOLUOPT", "SOLVE", "SORT", "SOURCE", "SPACE",
"SPCNOD", "SPCTEMP", "SPDAMP", "SPEC", "SPFREQ",
"SPGRAPH", "SPH4", "SPH5", "SPHERE", "SPLINE", "SPLOT",
"SPMWRITE", "SPOINT", "SPOPT", "SPREAD", "SPTOPT",
"SPOWER", "SPUNIT", "SPVAL", "SQRT", "*SREAD", "SRSS",
"SSBT", "/SSCALE", "SSLN", "SSMT", "SSPA", "SSPB",
"SSPD", "SSPE", "SSPM", "SSUM", "SSTATE", "STABILIZE",
"STAOPT", "STAT", "*STATUS", "/STATUS", "STEF",
"/STITLE", "STORE", "SUBOPT", "SUBSET", "SUCALC",
"SUCR", "SUDEL", "SUEVAL", "SUGET", "SUMAP", "SUMTYPE",
"SUPL", "SUPR", "SURESU", "SUSAVE", "SUSEL", "SUVECT",
"SV", "SVPLOT", "SVTYP", "SWADD", "SWDEL", "SWGEN",
"SWLIST", "SYNCHRO", "/SYP", "/SYS", "TALLOW",
"TARGET", "*TAXIS", "TB", "TBCOPY", "TBDATA", "TBDELE",
"TBEO", "TBIN", "TBFIELD", "TBFT", "TBLE", "TBLIST",
"TBMODIF", "TBPLOT", "TBPT", "TBTEMP", "TCHG", "/TEE",
"TERM", "THEXPAND", "THOPT", "TIFF", "TIME",
"TIMERANGE", "TIMINT", "TIMP", "TINTP", "/TITLE",
"/TLABEL", "TOFFST", "*TOPER", "TORQ2D", "TORQC2D",
"TORQSUM", "TORUS", "TRANS", "TRANSFER", "*TREAD",
"TREF", "/TRIAD", "/TRLCY", "TRNOPT", "TRPDEL",
"TRPLIS", "TRPOIN", "TRTIME", "TSHAP", "/TSPEC",
"TSRES", "TUNIF", "TVAR", "/TXTRE", "/TYPE", "TYPE",
"/UCMD", "/UDOC", "/UI", "UIMP", "/UIS", "*ULIB",
"UNDELETE", "UNDO", "/UNITS", "UNPAUSE", "UPCOORD",
"UPGEOM", "*USE", "/USER", "USRCAL", "USRDOF",
"USRELEM", "V", "V2DOPT", "VA", "*VABS", "VADD",
"VARDEL", "VARNAM", "VATT", "VCLEAR", "*VCOL",
"/VCONE", "VCROSS", "*VCUM", "VDDAM", "VDELE", "VDGL",
"VDOT", "VDRAG", "*VEC", "*VEDIT", "VEORIENT", "VEXT",
"*VFACT", "*VFILL", "VFOPT", "VFQUERY", "VFSM",
"*VFUN", "VGEN", "*VGET", "VGET", "VGLUE", "/VIEW",
"VIMP", "VINP", "VINV", "*VITRP", "*VLEN", "VLIST",
"VLSCALE", "*VMASK", "VMESH", "VOFFST", "VOLUMES")
# list of in-built () functions
elafunf = ("NX()", "NY()", "NZ()", "KX()", "KY()", "KZ()", "LX()",
"LY()", "LZ()", "LSX()", "LSY()", "LSZ()", "NODE()",
"KP()", "DISTND()", "DISTKP()", "DISTEN()", "ANGLEN()",
"ANGLEK()", "NNEAR()", "KNEAR()", "ENEARN()",
"AREAND()", "AREAKP()", "ARNODE()", "NORMNX()",
"NORMNY()", "NORMNZ()", "NORMKX()", "NORMKY()",
"NORMKZ()", "ENEXTN()", "NELEM()", "NODEDOF()",
"ELADJ()", "NDFACE()", "NMFACE()", "ARFACE()", "UX()",
"UY()", "UZ()", "ROTX()", "ROTY()", "ROTZ()", "TEMP()",
"PRES()", "VX()", "VY()", "VZ()", "ENKE()", "ENDS()",
"VOLT()", "MAG()", "AX()", "AY()", "AZ()",
"VIRTINQR()", "KWGET()", "VALCHR()", "VALHEX()",
"CHRHEX()", "STRFILL()", "STRCOMP()", "STRPOS()",
"STRLENG()", "UPCASE()", "LWCASE()", "JOIN()",
"SPLIT()", "ABS()", "SIGN()", "CXABS()", "EXP()",
"LOG()", "LOG10()", "SQRT()", "NINT()", "MOD()",
"RAND()", "GDIS()", "SIN()", "COS()", "TAN()",
"SINH()", "COSH()", "TANH()", "ASIN()", "ACOS()",
"ATAN()", "ATAN2()")
elafung = ("NSEL()", "ESEL()", "KSEL()", "LSEL()", "ASEL()",
"VSEL()", "NDNEXT()", "ELNEXT()", "KPNEXT()",
"LSNEXT()", "ARNEXT()", "VLNEXT()", "CENTRX()",
"CENTRY()", "CENTRZ()")
elafunh = ("~CAT5IN", "~CATIAIN", "~PARAIN", "~PROEIN", "~SATIN",
"~UGIN", "A", "AADD", "AATT", "ABEXTRACT", "*ABBR",
"ABBRES", "ABBSAV", "ABS", "ACCAT", "ACCOPTION",
"ACEL", "ACLEAR", "ADAMS", "ADAPT", "ADD", "ADDAM",
"ADELE", "ADGL", "ADRAG", "AESIZE", "AFILLT", "AFLIST",
"AFSURF", "*AFUN", "AGEN", "AGLUE", "AINA", "AINP",
"AINV", "AL", "ALIST", "ALLSEL", "ALPHAD", "AMAP",
"AMESH", "/AN3D", "ANCNTR", "ANCUT", "ANCYC", "ANDATA",
"ANDSCL", "ANDYNA", "/ANFILE", "ANFLOW", "/ANGLE",
"ANHARM", "ANIM", "ANISOS", "ANMODE", "ANMRES",
"/ANNOT", "ANORM", "ANPRES", "ANSOL", "ANSTOAQWA",
"ANSTOASAS", "ANTIME", "ANTYPE")
tokens = {
'root': [
(r'!.*\n', Comment),
include('strings'),
include('core'),
include('nums'),
(words((elafunb+elafunc+elafund+elafune+elafunh), suffix=r'\b'), Keyword),
(words((elafunf+elafung), suffix=r'\b'), Name.Builtin),
(r'AR[0-9]+', Name.Variable.Instance),
(r'[a-z][a-z0-9_]*', Name.Variable),
(r'[\s]+', Whitespace),
],
'core': [
# Operators
(r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
(r'/EOF', Generic.Emph),
(r'[(),:&;]', Punctuation),
],
'strings': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'[$%]', String.Symbol),
],
'nums': [
(r'\d+(?![.ef])', Number.Integer),
(r'[+-]?\d*\.?\d+([ef][-+]?\d+)?', Number.Float),
(r'[+-]?\d+\.?\d*([ef][-+]?\d+)?', Number.Float),
]
}
| 26,654 | Python | 58.497768 | 86 | 0.441172 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/factor.py | """
pygments.lexers.factor
~~~~~~~~~~~~~~~~~~~~~~
Lexers for the Factor language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, default, words
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Whitespace, Punctuation
__all__ = ['FactorLexer']
class FactorLexer(RegexLexer):
"""
Lexer for the Factor language.
.. versionadded:: 1.4
"""
name = 'Factor'
url = 'http://factorcode.org'
aliases = ['factor']
filenames = ['*.factor']
mimetypes = ['text/x-factor']
builtin_kernel = words((
'-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
'2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
'3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
'?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
'wrapper', 'wrapper?', 'xor'), suffix=r'(\s+)')
builtin_assocs = words((
'2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'(\s+)')
builtin_combinators = words((
'2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
'4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'(\s+)')
builtin_math = words((
'-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
'>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
'(each-integer)', '(find-integer)', '*', '+', '?1+',
'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
'zero?'), suffix=r'(\s+)')
builtin_sequences = words((
'1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
'2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
'3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
'?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
'assert-sequence', 'assert-sequence=', 'assert-sequence?',
'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
'non-negative-integer-expected', 'non-negative-integer-expected?',
'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
'when-empty'), suffix=r'(\s+)')
builtin_namespaces = words((
'+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
suffix=r'(\s+)')
builtin_arrays = words((
'1array', '2array', '3array', '4array', '<array>', '>array', 'array',
'array?', 'pair', 'pair?', 'resize-array'), suffix=r'(\s+)')
builtin_io = words((
'(each-stream-block-slice)', '(each-stream-block)',
'(stream-contents-by-block)', '(stream-contents-by-element)',
'(stream-contents-by-length-or-block)',
'(stream-contents-by-length)', '+byte+', '+character+',
'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
'error-stream', 'flush', 'input-stream', 'input-stream?',
'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
'output-stream', 'output-stream?', 'print', 'read', 'read-into',
'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
'stream-copy*', 'stream-element-type', 'stream-flush',
'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
'stream-read', 'stream-read-into', 'stream-read-partial',
'stream-read-partial-into', 'stream-read-partial-unsafe',
'stream-read-unsafe', 'stream-read-until', 'stream-read1',
'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
'stream-write', 'stream-write1', 'tell-input', 'tell-output',
'with-error-stream', 'with-error-stream*', 'with-error>output',
'with-input-output+error-streams',
'with-input-output+error-streams*', 'with-input-stream',
'with-input-stream*', 'with-output-stream', 'with-output-stream*',
'with-output>error', 'with-output+error-stream',
'with-output+error-stream*', 'with-streams', 'with-streams*',
'write', 'write1'), suffix=r'(\s+)')
builtin_strings = words((
'1string', '<string>', '>string', 'resize-string', 'string',
'string?'), suffix=r'(\s+)')
builtin_vectors = words((
'1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
suffix=r'(\s+)')
builtin_continuations = words((
'<condition>', '<continuation>', '<restart>', 'attempt-all',
'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
'condition?', 'continuation', 'continuation?', 'continue',
'continue-restart', 'continue-with', 'current-continuation',
'error', 'error-continuation', 'error-in-thread', 'error-thread',
'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
'throw-restarts', 'with-datastack', 'with-return'), suffix=r'(\s+)')
tokens = {
'root': [
# factor allows a file to start with a shebang
(r'#!.*$', Comment.Preproc),
default('base'),
],
'base': [
(r'\s+', Whitespace),
# defining words
(r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
(r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace,
Name.Function)),
(r'(C:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace,
Name.Class)),
(r'(GENERIC:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
(r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace,
Name.Function)),
(r'(\()(\s)', bygroups(Name.Function, Whitespace), 'stackeffect'),
(r'(;)(\s)', bygroups(Keyword, Whitespace)),
# imports and namespaces
(r'(USING:)(\s+)',
bygroups(Keyword.Namespace, Whitespace), 'vocabs'),
(r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
(r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
Whitespace, Name.Namespace)),
(r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
Whitespace), 'words'),
(r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+)(=>)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
Name.Namespace, Whitespace, Punctuation, Whitespace,
Name.Function)),
(r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
Name.Function)),
(r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function)),
# tuples and classes
(r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Punctuation,
Whitespace, Name.Class), 'slots'),
(r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class), 'slots'),
(r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class)),
(r'(PREDICATE:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace,
Punctuation, Whitespace, Name.Class)),
(r'(C:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace, Name.Class)),
(r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Name.Class)),
(r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Function)),
(r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Class)),
(r'SINGLETONS:', Keyword, 'classes'),
# other syntax
(r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
(r'(SYMBOLS:)(\s+)', bygroups(Keyword, Whitespace), 'words'),
(r'(SYNTAX:)(\s+)', bygroups(Keyword, Whitespace)),
(r'(ALIEN:)(\s+)', bygroups(Keyword, Whitespace)),
(r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Class)),
(r'(FUNCTION:)(\s+)'
r'(\S+)(\s+)(\S+)(\s+)'
r'(\()(\s+)([^)]+)(\))(\s)',
bygroups(Keyword.Namespace, Whitespace,
Text, Whitespace, Name.Function, Whitespace,
Punctuation, Whitespace, Text, Punctuation, Whitespace)),
(r'(FUNCTION-ALIAS:)(\s+)'
r'(\S+)(\s+)(\S+)(\s+)'
r'(\S+)(\s+)'
r'(\()(\s+)([^)]+)(\))(\s)',
bygroups(Keyword.Namespace, Whitespace,
Text, Whitespace, Name.Function, Whitespace,
Name.Function, Whitespace,
Punctuation, Whitespace, Text, Punctuation, Whitespace)),
# vocab.private
(r'(<PRIVATE|PRIVATE>)(\s)', bygroups(Keyword.Namespace, Whitespace)),
# strings
(r'"""\s(?:.|\n)*?\s"""', String),
(r'"(?:\\\\|\\"|[^"])*"', String),
(r'(\S+")(\s+)((?:\\\\|\\"|[^"])*")',
bygroups(String, Whitespace, String)),
(r'(CHAR:)(\s+)(\\[\\abfnrstv]|[^\\]\S*)(\s)',
bygroups(String.Char, Whitespace, String.Char, Whitespace)),
# comments
(r'!\s+.*$', Comment),
(r'#!\s+.*$', Comment),
(r'/\*\s+(?:.|\n)*?\s\*/', Comment),
# boolean constants
(r'[tf]\b', Name.Constant),
# symbols and literals
(r'[\\$]\s+\S+', Name.Constant),
(r'M\\\s+\S+\s+\S+', Name.Constant),
# numbers
(r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
(r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
(r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
(r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
(r'0b[01]+\s', Number.Bin),
(r'0o[0-7]+\s', Number.Oct),
(r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
(r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
# keywords
(r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
Keyword),
# builtins
(builtin_kernel, bygroups(Name.Builtin, Whitespace)),
(builtin_assocs, bygroups(Name.Builtin, Whitespace)),
(builtin_combinators, bygroups(Name.Builtin, Whitespace)),
(builtin_math, bygroups(Name.Builtin, Whitespace)),
(builtin_sequences, bygroups(Name.Builtin, Whitespace)),
(builtin_namespaces, bygroups(Name.Builtin, Whitespace)),
(builtin_arrays, bygroups(Name.Builtin, Whitespace)),
(builtin_io, bygroups(Name.Builtin, Whitespace)),
(builtin_strings, bygroups(Name.Builtin, Whitespace)),
(builtin_vectors, bygroups(Name.Builtin, Whitespace)),
(builtin_continuations, bygroups(Name.Builtin, Whitespace)),
# everything else is text
(r'\S+', Text),
],
'stackeffect': [
(r'\s+', Whitespace),
(r'(\()(\s+)', bygroups(Name.Function, Whitespace), 'stackeffect'),
(r'(\))(\s+)', bygroups(Name.Function, Whitespace), '#pop'),
(r'(--)(\s+)', bygroups(Name.Function, Whitespace)),
(r'\S+', Name.Variable),
],
'slots': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
(r'(\{)(\s+)(\S+)(\s+)([^}]+)(\s+)(\})(\s+)',
bygroups(Text, Whitespace, Name.Variable, Whitespace,
Text, Whitespace, Text, Whitespace)),
(r'\S+', Name.Variable),
],
'vocabs': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
(r'\S+', Name.Namespace),
],
'classes': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
(r'\S+', Name.Class),
],
'words': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
(r'\S+', Name.Function),
],
}
| 19,531 | Python | 52.512329 | 93 | 0.515027 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/scdoc.py | """
pygments.lexers.scdoc
~~~~~~~~~~~~~~~~~~~~~
Lexer for scdoc, a simple man page generator.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this
from pygments.token import Text, Comment, Keyword, String, Generic
__all__ = ['ScdocLexer']
class ScdocLexer(RegexLexer):
"""
`scdoc` is a simple man page generator for POSIX systems written in C99.
.. versionadded:: 2.5
"""
name = 'scdoc'
url = 'https://git.sr.ht/~sircmpwn/scdoc'
aliases = ['scdoc', 'scd']
filenames = ['*.scd', '*.scdoc']
flags = re.MULTILINE
tokens = {
'root': [
# comment
(r'^(;.+\n)', bygroups(Comment)),
# heading with pound prefix
(r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
(r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
# bulleted lists
(r'^(\s*)([*-])(\s)(.+\n)',
bygroups(Text, Keyword, Text, using(this, state='inline'))),
# numbered lists
(r'^(\s*)(\.+\.)( .+\n)',
bygroups(Text, Keyword, using(this, state='inline'))),
# quote
(r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
# text block
(r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
include('inline'),
],
'inline': [
# escape
(r'\\.', Text),
# underlines
(r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
# bold
(r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
# inline code
(r'`[^`]+`', String.Backtick),
# general text, must come last!
(r'[^\\\s]+', Text),
(r'.', Text),
],
}
def analyse_text(text):
"""This is very similar to markdown, save for the escape characters
needed for * and _."""
result = 0
if '\\*' in text:
result += 0.01
if '\\_' in text:
result += 0.01
return result
| 2,239 | Python | 27 | 78 | 0.472086 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/varnish.py | """
pygments.lexers.varnish
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Varnish configuration
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups, using, this, \
inherit, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Whitespace
__all__ = ['VCLLexer', 'VCLSnippetLexer']
class VCLLexer(RegexLexer):
"""
For Varnish Configuration Language (VCL).
.. versionadded:: 2.2
"""
name = 'VCL'
aliases = ['vcl']
filenames = ['*.vcl']
mimetypes = ['text/x-vclsrc']
def analyse_text(text):
# If the very first line is 'vcl 4.0;' it's pretty much guaranteed
# that this is VCL
if text.startswith('vcl 4.0;'):
return 1.0
# Skip over comments and blank lines
# This is accurate enough that returning 0.9 is reasonable.
# Almost no VCL files start without some comments.
elif '\nvcl 4.0;' in text[:1000]:
return 0.9
tokens = {
'probe': [
include('whitespace'),
include('comments'),
(r'(\.\w+)(\s*=\s*)([^;]*)(;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\}', Punctuation, '#pop'),
],
'acl': [
include('whitespace'),
include('comments'),
(r'[!/]+', Operator),
(r';', Punctuation),
(r'\d+', Number),
(r'\}', Punctuation, '#pop'),
],
'backend': [
include('whitespace'),
(r'(\.probe)(\s*=\s*)(\w+)(;)',
bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)),
(r'(\.probe)(\s*=\s*)(\{)',
bygroups(Name.Attribute, Operator, Punctuation), 'probe'),
(r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
'statements': [
(r'(\d\.)?\d+[sdwhmy]', Literal.Date),
(r'(\d\.)?\d+ms', Literal.Date),
(r'(vcl_pass|vcl_hash|vcl_hit|vcl_init|vcl_backend_fetch|vcl_pipe|'
r'vcl_backend_response|vcl_synth|vcl_deliver|vcl_backend_error|'
r'vcl_fini|vcl_recv|vcl_purge|vcl_miss)\b', Name.Function),
(r'(pipe|retry|hash|synth|deliver|purge|abandon|lookup|pass|fail|ok|'
r'miss|fetch|restart)\b', Name.Constant),
(r'(beresp|obj|resp|req|req_top|bereq)\.http\.[a-zA-Z_-]+\b', Name.Variable),
(words((
'obj.status', 'req.hash_always_miss', 'beresp.backend', 'req.esi_level',
'req.can_gzip', 'beresp.ttl', 'obj.uncacheable', 'req.ttl', 'obj.hits',
'client.identity', 'req.hash_ignore_busy', 'obj.reason', 'req.xid',
'req_top.proto', 'beresp.age', 'obj.proto', 'obj.age', 'local.ip',
'beresp.uncacheable', 'req.method', 'beresp.backend.ip', 'now',
'obj.grace', 'req.restarts', 'beresp.keep', 'req.proto', 'resp.proto',
'bereq.xid', 'bereq.between_bytes_timeout', 'req.esi',
'bereq.first_byte_timeout', 'bereq.method', 'bereq.connect_timeout',
'beresp.do_gzip', 'resp.status', 'beresp.do_gunzip',
'beresp.storage_hint', 'resp.is_streaming', 'beresp.do_stream',
'req_top.method', 'bereq.backend', 'beresp.backend.name', 'beresp.status',
'req.url', 'obj.keep', 'obj.ttl', 'beresp.reason', 'bereq.retries',
'resp.reason', 'bereq.url', 'beresp.do_esi', 'beresp.proto', 'client.ip',
'bereq.proto', 'server.hostname', 'remote.ip', 'req.backend_hint',
'server.identity', 'req_top.url', 'beresp.grace', 'beresp.was_304',
'server.ip', 'bereq.uncacheable'), suffix=r'\b'),
Name.Variable),
(r'[!%&+*\-,/<.}{>=|~]+', Operator),
(r'[();]', Punctuation),
(r'[,]+', Punctuation),
(words(('hash_data', 'regsub', 'regsuball', 'if', 'else',
'elsif', 'elif', 'synth', 'synthetic', 'ban',
'return', 'set', 'unset', 'import', 'include', 'new',
'rollback', 'call'), suffix=r'\b'),
Keyword),
(r'storage\.\w+\.\w+\b', Name.Variable),
(words(('true', 'false')), Name.Builtin),
(r'\d+\b', Number),
(r'(backend)(\s+\w+)(\s*\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'backend'),
(r'(probe\s)(\s*\w+\s)(\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'probe'),
(r'(acl\s)(\s*\w+\s)(\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'acl'),
(r'(vcl )(4.0)(;)$',
bygroups(Keyword.Reserved, Name.Constant, Punctuation)),
(r'(sub\s+)([a-zA-Z]\w*)(\s*\{)',
bygroups(Keyword, Name.Function, Punctuation)),
(r'([a-zA-Z_]\w*)'
r'(\.)'
r'([a-zA-Z_]\w*)'
r'(\s*\(.*\))',
bygroups(Name.Function, Punctuation, Name.Function, using(this))),
(r'[a-zA-Z_]\w*', Name),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'comments': [
(r'#.*$', Comment),
(r'/\*', Comment.Multiline, 'comment'),
(r'//.*$', Comment),
],
'string': [
(r'"', String, '#pop'),
(r'[^"\n]+', String), # all other characters
],
'multistring': [
(r'[^"}]', String),
(r'"\}', String, '#pop'),
(r'["}]', String),
],
'whitespace': [
(r'L?"', String, 'string'),
(r'\{"', String, 'multistring'),
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'\\\n', Text), # line continuation
],
'root': [
include('whitespace'),
include('comments'),
include('statements'),
(r'\s+', Whitespace),
],
}
class VCLSnippetLexer(VCLLexer):
"""
For Varnish Configuration Language snippets.
.. versionadded:: 2.2
"""
name = 'VCLSnippets'
aliases = ['vclsnippets', 'vclsnippet']
mimetypes = ['text/x-vclsnippet']
filenames = []
def analyse_text(text):
# override method inherited from VCLLexer
return 0
tokens = {
'snippetspre': [
(r'\.\.\.+', Comment),
(r'(bereq|req|req_top|resp|beresp|obj|client|server|local|remote|'
r'storage)($|\.\*)', Name.Variable),
],
'snippetspost': [
(r'(backend)\b', Keyword.Reserved),
],
'root': [
include('snippetspre'),
inherit,
include('snippetspost'),
],
}
| 7,273 | Python | 37.28421 | 90 | 0.47917 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/elpi.py | """
pygments.lexers.elpi
~~~~~~~~~~~~~~~~~~~~
Lexer for the `Elpi <http://github.com/LPCIC/elpi>`_ programming language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number
__all__ = ['ElpiLexer']
class ElpiLexer(RegexLexer):
"""
Lexer for the Elpi programming language.
.. versionadded:: 2.11
"""
name = 'Elpi'
url = 'http://github.com/LPCIC/elpi'
aliases = ['elpi']
filenames = ['*.elpi']
mimetypes = ['text/x-elpi']
lcase_re = r"[a-z]"
ucase_re = r"[A-Z]"
digit_re = r"[0-9]"
schar2_re = r"([+*^?/<>`'@#~=&!])"
schar_re = r"({}|-|\$|_)".format(schar2_re)
idchar_re = r"({}|{}|{}|{})".format(lcase_re,ucase_re,digit_re,schar_re)
idcharstarns_re = r"({}*(\.({}|{}){}*)*)".format(idchar_re, lcase_re, ucase_re, idchar_re)
symbchar_re = r"({}|{}|{}|{}|:)".format(lcase_re, ucase_re, digit_re, schar_re)
constant_re = r"({}{}*|{}{}|{}{}*|_{}+)".format(ucase_re, idchar_re, lcase_re, idcharstarns_re, schar2_re, symbchar_re, idchar_re)
symbol_re = r"(,|<=>|->|:-|;|\?-|->|&|=>|\bas\b|\buvar\b|<|=<|=|==|>=|>|\bi<|\bi=<|\bi>=|\bi>|\bis\b|\br<|\br=<|\br>=|\br>|\bs<|\bs=<|\bs>=|\bs>|@|::|\[\]|`->|`:|`:=|\^|-|\+|\bi-|\bi\+|r-|r\+|/|\*|\bdiv\b|\bi\*|\bmod\b|\br\*|~|\bi~|\br~)"
escape_re = r"\(({}|{})\)".format(constant_re,symbol_re)
const_sym_re = r"({}|{}|{})".format(constant_re,symbol_re,escape_re)
tokens = {
'root': [
include('elpi')
],
'elpi': [
include('_elpi-comment'),
(r"(:before|:after|:if|:name)(\s*)(\")",
bygroups(Keyword.Mode, Text.Whitespace, String.Double),
'elpi-string'),
(r"(:index)(\s*\()", bygroups(Keyword.Mode, Text.Whitespace),
'elpi-indexing-expr'),
(r"\b(external pred|pred)(\s+)({})".format(const_sym_re),
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-pred-item'),
(r"\b(external type|type)(\s+)(({}(,\s*)?)+)".format(const_sym_re),
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
(r"\b(kind)(\s+)(({}|,)+)".format(const_sym_re),
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
(r"\b(typeabbrev)(\s+)({})".format(const_sym_re),
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
(r"\b(accumulate)(\s+)(\")",
bygroups(Keyword.Declaration, Text.Whitespace, String.Double),
'elpi-string'),
(r"\b(accumulate|namespace|local)(\s+)({})".format(constant_re),
bygroups(Keyword.Declaration, Text.Whitespace, Text)),
(r"\b(shorten)(\s+)({}\.)".format(constant_re),
bygroups(Keyword.Declaration, Text.Whitespace, Text)),
(r"\b(pi|sigma)(\s+)([a-zA-Z][A-Za-z0-9_ ]*)(\\)",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, Text)),
(r"\b(constraint)(\s+)(({}(\s+)?)+)".format(const_sym_re),
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-chr-rule-start'),
(r"(?=[A-Z_]){}".format(constant_re), Name.Variable),
(r"(?=[a-z_]){}\\".format(constant_re), Name.Variable),
(r"_", Name.Variable),
(r"({}|!|=>|;)".format(symbol_re), Keyword.Declaration),
(constant_re, Text),
(r"\[|\]|\||=>", Keyword.Declaration),
(r'"', String.Double, 'elpi-string'),
(r'`', String.Double, 'elpi-btick'),
(r'\'', String.Double, 'elpi-tick'),
(r'\{[^\{]', Text, 'elpi-spill'),
(r"\(", Text, 'elpi-in-parens'),
(r'\d[\d_]*', Number.Integer),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r"[\+\*\-/\^\.]", Operator),
],
'_elpi-comment': [
(r'%[^\n]*\n', Comment),
(r'/\*', Comment, 'elpi-multiline-comment'),
(r"\s+", Text.Whitespace),
],
'elpi-multiline-comment': [
(r'\*/', Comment, '#pop'),
(r'.', Comment)
],
'elpi-indexing-expr':[
(r'[0-9 _]+', Number.Integer),
(r'\)', Text, '#pop'),
],
'elpi-type': [
(r"(ctype\s+)(\")", bygroups(Keyword.Type, String.Double), 'elpi-string'),
(r'->', Keyword.Type),
(constant_re, Keyword.Type),
(r"\(|\)", Keyword.Type),
(r"\.", Text, '#pop'),
include('_elpi-comment'),
],
'elpi-chr-rule-start': [
(r"\{", Text, 'elpi-chr-rule'),
include('_elpi-comment'),
],
'elpi-chr-rule': [
(r"\brule\b", Keyword.Declaration),
(r"\\", Keyword.Declaration),
(r"\}", Text, '#pop:2'),
include('elpi'),
],
'elpi-pred-item': [
(r"[io]:", Keyword.Mode, 'elpi-ctype'),
(r"\.", Text, '#pop'),
include('_elpi-comment'),
],
'elpi-ctype': [
(r"(ctype\s+)(\")", bygroups(Keyword.Type, String.Double), 'elpi-string'),
(r'->', Keyword.Type),
(constant_re, Keyword.Type),
(r"\(|\)", Keyword.Type),
(r",", Text, '#pop'),
(r"\.", Text, '#pop:2'),
include('_elpi-comment'),
],
'elpi-btick': [
(r'[^` ]+', String.Double),
(r'`', String.Double, '#pop'),
],
'elpi-tick': [
(r'[^\' ]+', String.Double),
(r'\'', String.Double, '#pop'),
],
'elpi-string': [
(r'[^\"]+', String.Double),
(r'"', String.Double, '#pop'),
],
'elpi-spill': [
(r'\{[^\{]', Text, '#push'),
(r'\}[^\}]', Text, '#pop'),
include('elpi'),
],
'elpi-in-parens': [
(r"\(", Operator, '#push'),
(r"\)", Operator, '#pop'),
include('elpi'),
],
}
| 6,370 | Python | 37.379518 | 242 | 0.451648 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/snobol.py | """
pygments.lexers.snobol
~~~~~~~~~~~~~~~~~~~~~~
Lexers for the SNOBOL language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['SnobolLexer']
class SnobolLexer(RegexLexer):
"""
Lexer for the SNOBOL4 programming language.
Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
Does not require spaces around binary operators.
.. versionadded:: 1.5
"""
name = "Snobol"
aliases = ["snobol"]
filenames = ['*.snobol']
mimetypes = ['text/x-snobol']
tokens = {
# root state, start of line
# comments, continuation lines, and directives start in column 1
# as do labels
'root': [
(r'\*.*\n', Comment),
(r'[+.] ', Punctuation, 'statement'),
(r'-.*\n', Comment),
(r'END\s*\n', Name.Label, 'heredoc'),
(r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
(r'\s+', Text, 'statement'),
],
# statement state, line after continuation or label
'statement': [
(r'\s*\n', Text, '#pop'),
(r'\s+', Text),
(r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
Name.Builtin),
(r'[A-Za-z][\w.]*', Name),
# ASCII equivalents of original operators
# | for the EBCDIC equivalent, ! likewise
# \ for EBCDIC negation
(r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
(r'"[^"]*"', String),
(r"'[^']*'", String),
# Accept SPITBOL syntax for real numbers
# as well as Macro SNOBOL4
(r'[0-9]+(?=[^.EeDd])', Number.Integer),
(r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
# Goto
(r':', Punctuation, 'goto'),
(r'[()<>,;]', Punctuation),
],
# Goto block
'goto': [
(r'\s*\n', Text, "#pop:2"),
(r'\s+', Text),
(r'F|S', Keyword),
(r'(\()([A-Za-z][\w.]*)(\))',
bygroups(Punctuation, Name.Label, Punctuation))
],
# everything after the END statement is basically one
# big heredoc.
'heredoc': [
(r'.*\n', String.Heredoc)
]
}
| 2,732 | Python | 31.92771 | 79 | 0.496706 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ezhil.py | """
pygments.lexers.ezhil
~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Ezhil language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Keyword, Comment, Name, String, Number, \
Punctuation, Operator, Whitespace
__all__ = ['EzhilLexer']
class EzhilLexer(RegexLexer):
"""
Lexer for Ezhil, a Tamil script-based programming language.
.. versionadded:: 2.1
"""
name = 'Ezhil'
url = 'http://ezhillang.org'
aliases = ['ezhil']
filenames = ['*.n']
mimetypes = ['text/x-ezhil']
# Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
# This much simpler version is close enough, and includes combining marks.
_TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
tokens = {
'root': [
include('keywords'),
(r'#.*$', Comment.Single),
(r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
('இல்', Operator.Word),
(words(('assert', 'max', 'min',
'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி',
'பட்டியல்', 'பின்இணை', 'வரிசைப்படுத்து',
'எடு', 'தலைகீழ்', 'நீட்டிக்க', 'நுழைக்க', 'வை',
'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு',
'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow',
'exp', 'log', 'log10', 'exit',
), suffix=r'\b'), Name.Builtin),
(r'(True|False)\b', Keyword.Constant),
(r'[^\S\n]+', Whitespace),
include('identifier'),
include('literal'),
(r'[(){}\[\]:;.]', Punctuation),
],
'keywords': [
('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
],
'identifier': [
('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name),
],
'literal': [
(r'".*?"', String),
(r'\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
(r'\d+', Number.Integer),
]
}
def analyse_text(text):
"""This language uses Tamil-script. We'll assume that if there's a
decent amount of Tamil-characters, it's this language. This assumption
is obviously horribly off if someone uses string literals in tamil
in another language."""
if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10:
return 0.25
def __init__(self, **options):
super().__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
| 2,773 | Python | 34.564102 | 171 | 0.485755 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/q.py | """
pygments.lexers.q
~~~~~~~~~~~~~~~~~
Lexer for the Q programming language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include, bygroups, inherit
from pygments.token import Comment, Name, Number, Operator, Punctuation, \
String, Whitespace, Literal, Generic
__all__ = ["KLexer", "QLexer"]
class KLexer(RegexLexer):
"""
For `K <https://code.kx.com/>`_ source code.
.. versionadded:: 2.12
"""
name = "K"
aliases = ["k"]
filenames = ["*.k"]
tokens = {
"whitespace": [
# hashbang script
(r"^#!.*", Comment.Hashbang),
# Comments
(r"^/\s*\n", Comment.Multiline, "comments"),
(r"(?<!\S)/.*", Comment.Single),
# Whitespace
(r"\s+", Whitespace),
# Strings
(r"\"", String.Double, "strings"),
],
"root": [
include("whitespace"),
include("keywords"),
include("declarations"),
],
"keywords": [
(words(("abs", "acos", "asin", "atan", "avg", "bin",
"binr", "by", "cor", "cos", "cov", "dev",
"delete", "div", "do", "enlist", "exec", "exit",
"exp", "from", "getenv", "hopen", "if", "in",
"insert", "last", "like", "log", "max", "min",
"prd", "select", "setenv", "sin", "sqrt", "ss",
"sum", "tan", "update", "var", "wavg", "while",
"within", "wsum", "xexp"),
suffix=r"\b"), Operator.Word),
],
"declarations": [
# Timing
(r"^\\ts?", Comment.Preproc),
(r"^(\\\w\s+[^/\n]*?)(/.*)",
bygroups(Comment.Preproc, Comment.Single)),
# Generic System Commands
(r"^\\\w.*", Comment.Preproc),
# Prompt
(r"^[a-zA-Z]\)", Generic.Prompt),
# Function Names
(r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)(\s*)(\{)",
bygroups(Name.Function, Whitespace, Operator, Whitespace, Punctuation),
"functions"),
# Variable Names
(r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)",
bygroups(Name.Variable, Whitespace, Operator)),
# Functions
(r"\{", Punctuation, "functions"),
# Parentheses
(r"\(", Punctuation, "parentheses"),
# Brackets
(r"\[", Punctuation, "brackets"),
# Errors
(r"'`([a-zA-Z][\w.]*)?", Name.Exception),
# File Symbols
(r"`:([a-zA-Z/][\w./]*)?", String.Symbol),
# Symbols
(r"`([a-zA-Z][\w.]*)?", String.Symbol),
# Numbers
include("numbers"),
# Variable Names
(r"[a-zA-Z][\w.]*", Name),
# Operators
(r"[-=+*#$%@!~^&:.,<>'\\|/?_]", Operator),
# Punctuation
(r";", Punctuation),
],
"functions": [
include("root"),
(r"\}", Punctuation, "#pop"),
],
"parentheses": [
include("root"),
(r"\)", Punctuation, "#pop"),
],
"brackets": [
include("root"),
(r"\]", Punctuation, "#pop"),
],
"numbers": [
# Binary Values
(r"[01]+b", Number.Bin),
# Nulls/Infinities
(r"0[nNwW][cefghijmndzuvtp]?", Number),
# Timestamps
((r"(?:[0-9]{4}[.][0-9]{2}[.][0-9]{2}|[0-9]+)"
"D(?:[0-9](?:[0-9](?::[0-9]{2}"
"(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)?"), Literal.Date),
# Datetimes
((r"[0-9]{4}[.][0-9]{2}"
"(?:m|[.][0-9]{2}(?:T(?:[0-9]{2}:[0-9]{2}"
"(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)"), Literal.Date),
# Times
(r"[0-9]{2}:[0-9]{2}(?::[0-9]{2}(?:[.][0-9]{1,3})?)?",
Literal.Date),
# GUIDs
(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
Number.Hex),
# Byte Vectors
(r"0x[0-9a-fA-F]+", Number.Hex),
# Floats
(r"([0-9]*[.]?[0-9]+|[0-9]+[.]?[0-9]*)[eE][+-]?[0-9]+[ef]?",
Number.Float),
(r"([0-9]*[.][0-9]+|[0-9]+[.][0-9]*)[ef]?", Number.Float),
(r"[0-9]+[ef]", Number.Float),
# Characters
(r"[0-9]+c", Number),
# Integers
(r"[0-9]+[ihtuv]", Number.Integer),
# Long Integers
(r"[0-9]+[jnp]?", Number.Integer.Long),
],
"comments": [
(r"[^\\]+", Comment.Multiline),
(r"^\\", Comment.Multiline, "#pop"),
(r"\\", Comment.Multiline),
],
"strings": [
(r'[^"\\]+', String.Double),
(r"\\.", String.Escape),
(r'"', String.Double, "#pop"),
],
}
class QLexer(KLexer):
"""
For `Q <https://code.kx.com/>`_ source code.
.. versionadded:: 2.12
"""
name = "Q"
aliases = ["q"]
filenames = ["*.q"]
tokens = {
"root": [
(words(("aj", "aj0", "ajf", "ajf0", "all", "and", "any", "asc",
"asof", "attr", "avgs", "ceiling", "cols", "count", "cross",
"csv", "cut", "deltas", "desc", "differ", "distinct", "dsave",
"each", "ej", "ema", "eval", "except", "fby", "fills", "first",
"fkeys", "flip", "floor", "get", "group", "gtime", "hclose",
"hcount", "hdel", "hsym", "iasc", "idesc", "ij", "ijf",
"inter", "inv", "key", "keys", "lj", "ljf", "load", "lower",
"lsq", "ltime", "ltrim", "mavg", "maxs", "mcount", "md5",
"mdev", "med", "meta", "mins", "mmax", "mmin", "mmu", "mod",
"msum", "neg", "next", "not", "null", "or", "over", "parse",
"peach", "pj", "prds", "prior", "prev", "rand", "rank", "ratios",
"raze", "read0", "read1", "reciprocal", "reval", "reverse",
"rload", "rotate", "rsave", "rtrim", "save", "scan", "scov",
"sdev", "set", "show", "signum", "ssr", "string", "sublist",
"sums", "sv", "svar", "system", "tables", "til", "trim", "txf",
"type", "uj", "ujf", "ungroup", "union", "upper", "upsert",
"value", "view", "views", "vs", "where", "wj", "wj1", "ww",
"xasc", "xbar", "xcol", "xcols", "xdesc", "xgroup", "xkey",
"xlog", "xprev", "xrank"),
suffix=r"\b"), Name.Builtin,
),
inherit,
],
}
| 6,932 | Python | 35.682539 | 85 | 0.383151 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/email.py | """
pygments.lexers.email
~~~~~~~~~~~~~~~~~~~~~
Lexer for the raw E-mail.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
from pygments.lexers.mime import MIMELexer
from pygments.token import Text, Keyword, Name, String, Number, Comment
from pygments.util import get_bool_opt
__all__ = ["EmailLexer"]
class EmailHeaderLexer(RegexLexer):
"""
Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
.. versionadded:: 2.5
"""
def __init__(self, **options):
super().__init__(**options)
self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
def get_x_header_tokens(self, match):
if self.highlight_x:
# field
yield match.start(1), Name.Tag, match.group(1)
# content
default_actions = self.get_tokens_unprocessed(
match.group(2), stack=("root", "header"))
yield from default_actions
else:
# lowlight
yield match.start(1), Comment.Special, match.group(1)
yield match.start(2), Comment.Multiline, match.group(2)
tokens = {
"root": [
(r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
(r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
],
"header": [
# folding
(r"\n[ \t]", Text.Whitespace),
(r"\n(?![ \t])", Text.Whitespace, "#pop"),
# keywords
(r"\bE?SMTPS?\b", Keyword),
(r"\b(?:HE|EH)LO\b", Keyword),
# mailbox
(r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
(r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
# domain
(r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
# IPv4
(r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
Number.Integer),
# IPv6
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
(r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
(r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
(r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
Number.Hex),
(r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
r"[0-9])(?=\b)",
Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9])"
r"{0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])(?=\b)",
Number.Hex),
# Date time
(r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
Name.Decorator),
# RFC-2047 encoded string
(r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
r"\]^_`{|}~]+)(\?=)",
bygroups(String.Affix, Name.Constant, String.Affix, Keyword.Constant,
String.Affix, Number.Hex, String.Affix)),
# others
(r'[\s]+', Text.Whitespace),
(r'[\S]', Text),
],
}
class EmailLexer(DelegatingLexer):
"""
Lexer for raw E-mail.
Additional options accepted:
`highlight-X-header`
Highlight the fields of ``X-`` user-defined email header. (default:
``False``).
.. versionadded:: 2.5
"""
name = "E-mail"
aliases = ["email", "eml"]
filenames = ["*.eml"]
mimetypes = ["message/rfc822"]
def __init__(self, **options):
super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
| 4,742 | Python | 34.661654 | 91 | 0.427457 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/arturo.py | """
pygments.lexers.arturo
~~~~~~~~~~~~~~~~~~~~~~
Lexer for the Arturo language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, do_insertions, include, \
this, using, words
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
Punctuation, String, Text
from pygments.util import ClassNotFound, get_bool_opt
__all__ = ['ArturoLexer']
class ArturoLexer(RegexLexer):
"""
For Arturo source code.
See `Arturo's Github <https://github.com/arturo-lang/arturo>`_
and `Arturo's Website <https://arturo-lang.io/>`_.
.. versionadded:: 2.14.0
"""
name = 'Arturo'
aliases = ['arturo', 'art']
filenames = ['*.art']
url = 'https://arturo-lang.io/'
def __init__(self, **options):
self.handle_annotateds = get_bool_opt(options, 'handle_annotateds',
True)
RegexLexer.__init__(self, **options)
def handle_annotated_strings(self, match):
"""Adds syntax from another languages inside annotated strings
match args:
1:open_string,
2:exclamation_mark,
3:lang_name,
4:space_or_newline,
5:code,
6:close_string
"""
from pygments.lexers import get_lexer_by_name
# Header's section
yield match.start(1), String.Double, match.group(1)
yield match.start(2), String.Interpol, match.group(2)
yield match.start(3), String.Interpol, match.group(3)
yield match.start(4), Text.Whitespace, match.group(4)
lexer = None
if self.handle_annotateds:
try:
lexer = get_lexer_by_name(match.group(3).strip())
except ClassNotFound:
pass
code = match.group(5)
if lexer is None:
yield match.group(5), String, code
else:
yield from do_insertions([], lexer.get_tokens_unprocessed(code))
yield match.start(6), String.Double, match.group(6)
tokens = {
'root': [
(r';.*?$', Comment.Single),
(r'^((\s#!)|(#!)).*?$', Comment.Hashbang),
# Constants
(words(('false', 'true', 'maybe'), # boolean
suffix=r'\b'), Name.Constant),
(words(('this', 'init'), # class related keywords
prefix=r'\b', suffix=r'\b\??:?'), Name.Builtin.Pseudo),
(r'`.`', String.Char), # character
(r'\\\w+\b\??:?', Name.Property), # array index
(r'#\w+', Name.Constant), # color
(r'\b[0-9]+\.[0-9]+', Number.Float), # float
(r'\b[0-9]+', Number.Integer), # integer
(r'\w+\b\??:', Name.Label), # label
# Note: Literals can be labeled too
(r'\'(?:\w+\b\??:?)', Keyword.Declaration), # literal
(r'\:\w+', Keyword.Type), # type
# Note: Attributes can be labeled too
(r'\.\w+\??:?', Name.Attribute), # attributes
# Switch structure
(r'(\()(.*?)(\)\?)',
bygroups(Punctuation, using(this), Punctuation)),
# Single Line Strings
(r'"', String.Double, 'inside-simple-string'),
(r'»', String.Single, 'inside-smart-string'),
(r'«««', String.Double, 'inside-safe-string'),
(r'\{\/', String.Single, 'inside-regex-string'),
# Multi Line Strings
(r'\{\:', String.Double, 'inside-curly-verb-string'),
(r'(\{)(\!)(\w+)(\s|\n)([\w\W]*?)(^\})', handle_annotated_strings),
(r'\{', String.Single, 'inside-curly-string'),
(r'\-{3,}', String.Single, 'inside-eof-string'),
include('builtin-functions'),
# Operators
(r'[()[\],]', Punctuation),
(words(('->', '==>', '|', '::', '@', '#', # sugar syntax
'$', '&', '!', '!!', './')), Name.Decorator),
(words(('<:', ':>', ':<', '>:', '<\\', '<>', '<', '>',
'ø', '∞',
'+', '-', '*', '~', '=', '^', '%', '/', '//',
'==>', '<=>', '<==>',
'=>>', '<<=>>', '<<==>>',
'-->', '<->', '<-->',
'=|', '|=', '-:', ':-',
'_', '.', '..', '\\')), Operator),
(r'\b\w+', Name),
(r'\s+', Text.Whitespace),
(r'.+$', Error),
],
'inside-interpol': [
(r'\|', String.Interpol, '#pop'),
(r'[^|]+', using(this)),
],
'inside-template': [
(r'\|\|\>', String.Interpol, '#pop'),
(r'[^|]+', using(this)),
],
'string-escape': [
(words(('\\\\', '\\n', '\\t', '\\"')), String.Escape),
],
'inside-simple-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'"', String.Double, '#pop'), # Closing Quote
(r'[^|"]+', String) # String Content
],
'inside-smart-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'\n', String.Single, '#pop'), # Closing Quote
(r'[^|\n]+', String) # String Content
],
'inside-safe-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'»»»', String.Double, '#pop'), # Closing Quote
(r'[^|»]+', String) # String Content
],
'inside-regex-string': [
(r'\\[sSwWdDbBZApPxucItnvfr0]+', String.Escape),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'\/\}', String.Single, '#pop'), # Closing Quote
(r'[^|\/]+', String.Regex), # String Content
],
'inside-curly-verb-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'\:\}', String.Double, '#pop'), # Closing Quote
(r'[^|<:]+', String), # String Content
],
'inside-curly-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'\}', String.Single, '#pop'), # Closing Quote
(r'[^|<}]+', String), # String Content
],
'inside-eof-string': [
include('string-escape'),
(r'\|', String.Interpol, 'inside-interpol'), # Interpolation
(r'\<\|\|', String.Interpol, 'inside-template'), # Templates
(r'\Z', String.Single, '#pop'), # Closing Quote
(r'[^|<]+', String), # String Content
],
'builtin-functions': [
(words((
'all', 'and', 'any', 'ascii', 'attr', 'attribute',
'attributeLabel', 'binary', 'block' 'char', 'contains',
'database', 'date', 'dictionary', 'empty', 'equal', 'even',
'every', 'exists', 'false', 'floatin', 'function', 'greater',
'greaterOrEqual', 'if', 'in', 'inline', 'integer', 'is',
'key', 'label', 'leap', 'less', 'lessOrEqual', 'literal',
'logical', 'lower', 'nand', 'negative', 'nor', 'not',
'notEqual', 'null', 'numeric', 'odd', 'or', 'path',
'pathLabel', 'positive', 'prefix', 'prime', 'set', 'some',
'sorted', 'standalone', 'string', 'subset', 'suffix',
'superset', 'ymbol', 'true', 'try', 'type', 'unless', 'upper',
'when', 'whitespace', 'word', 'xnor', 'xor', 'zero',
), prefix=r'\b', suffix=r'\b\?'), Name.Builtin),
(words((
'abs', 'acos', 'acosh', 'acsec', 'acsech', 'actan', 'actanh',
'add', 'after', 'alphabet', 'and', 'angle', 'append', 'arg',
'args', 'arity', 'array', 'as', 'asec', 'asech', 'asin',
'asinh', 'atan', 'atan2', 'atanh', 'attr', 'attrs', 'average',
'before', 'benchmark', 'blend', 'break', 'builtins1',
'builtins2', 'call', 'capitalize', 'case', 'ceil', 'chop',
'chunk', 'clear', 'close', 'cluster', 'color', 'combine',
'conj', 'continue', 'copy', 'cos', 'cosh', 'couple', 'csec',
'csech', 'ctan', 'ctanh', 'cursor', 'darken', 'dec', 'decode',
'decouple', 'define', 'delete', 'desaturate', 'deviation',
'dictionary', 'difference', 'digest', 'digits', 'div', 'do',
'download', 'drop', 'dup', 'e', 'else', 'empty', 'encode',
'ensure', 'env', 'epsilon', 'escape', 'execute', 'exit', 'exp',
'extend', 'extract', 'factors', 'false', 'fdiv', 'filter',
'first', 'flatten', 'floor', 'fold', 'from', 'function',
'gamma', 'gcd', 'get', 'goto', 'hash', 'help', 'hypot', 'if',
'in', 'inc', 'indent', 'index', 'infinity', 'info', 'input',
'insert', 'inspect', 'intersection', 'invert', 'join', 'keys',
'kurtosis', 'last', 'let', 'levenshtein', 'lighten', 'list',
'ln', 'log', 'loop', 'lower', 'mail', 'map', 'match', 'max',
'maybe', 'median', 'min', 'mod', 'module', 'mul', 'nand',
'neg', 'new', 'nor', 'normalize', 'not', 'now', 'null', 'open',
'or', 'outdent', 'pad', 'panic', 'path', 'pause',
'permissions', 'permutate', 'pi', 'pop', 'pow', 'powerset',
'powmod', 'prefix', 'print', 'prints', 'process', 'product',
'query', 'random', 'range', 'read', 'relative', 'remove',
'rename', 'render', 'repeat', 'replace', 'request', 'return',
'reverse', 'round', 'sample', 'saturate', 'script', 'sec',
'sech', 'select', 'serve', 'set', 'shl', 'shr', 'shuffle',
'sin', 'sinh', 'size', 'skewness', 'slice', 'sort', 'split',
'sqrt', 'squeeze', 'stack', 'strip', 'sub', 'suffix', 'sum',
'switch', 'symbols', 'symlink', 'sys', 'take', 'tan', 'tanh',
'terminal', 'to', 'true', 'truncate', 'try', 'type', 'union',
'unique', 'unless', 'until', 'unzip', 'upper', 'values', 'var',
'variance', 'volume', 'webview', 'while', 'with', 'wordwrap',
'write', 'xnor', 'xor', 'zip'
), prefix=r'\b', suffix=r'\b'), Name.Builtin)
],
}
| 11,406 | Python | 44.446215 | 79 | 0.445818 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/c_cpp.py | """
pygments.lexers.c_cpp
~~~~~~~~~~~~~~~~~~~~~
Lexers for C/C++ languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, \
this, inherit, default, words
from pygments.util import get_bool_opt
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['CLexer', 'CppLexer']
class CFamilyLexer(RegexLexer):
"""
For C family source code. This is used as a base class to avoid repetitious
definitions.
"""
# The trailing ?, rather than *, avoids a geometric performance drop here.
#: only one /* */ style comment
_ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
# Hexadecimal part in an hexadecimal integer/floating-point literal.
# This includes decimal separators matching.
_hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*'
# Decimal part in an decimal integer/floating-point literal.
# This includes decimal separators matching.
_decpart = r'\d(\'?\d)*'
# Integer literal suffix (e.g. 'ull' or 'll').
_intsuffix = r'(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?'
# Identifier regex with C and C++ Universal Character Name (UCN) support.
_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+'
_namespaced_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+'
# Single and multiline comment regexes
# Beware not to use *? for the inner content! When these regexes
# are embedded in larger regexes, that can cause the stuff*? to
# match more than it would have if the regex had been used in
# a standalone way ...
_comment_single = r'//(?:.|(?<=\\)\n)*\n'
_comment_multiline = r'/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/'
# Regex to match optional comments
_possible_comments = rf'\s*(?:(?:(?:{_comment_single})|(?:{_comment_multiline}))\s*)*'
tokens = {
'whitespace': [
# preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
('^(' + _ws1 + r')(#if\s+0)',
bygroups(using(this), Comment.Preproc), 'if0'),
('^(' + _ws1 + ')(#)',
bygroups(using(this), Comment.Preproc), 'macro'),
# Labels:
# Line start and possible indentation.
(r'(^[ \t]*)'
# Not followed by keywords which can be mistaken as labels.
r'(?!(?:public|private|protected|default)\b)'
# Actual label, followed by a single colon.
r'(' + _ident + r')(\s*)(:)(?!:)',
bygroups(Whitespace, Name.Label, Whitespace, Punctuation)),
(r'\n', Whitespace),
(r'[^\S\n]+', Whitespace),
(r'\\\n', Text), # line continuation
(_comment_single, Comment.Single),
(_comment_multiline, Comment.Multiline),
# Open until EOF, so no ending delimiter
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
],
'statements': [
include('keywords'),
include('types'),
(r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
(r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
bygroups(String.Affix, String.Char, String.Char, String.Char)),
# Hexadecimal floating-point literals (C11, C++17)
(r'0[xX](' + _hexpart + r'\.' + _hexpart + r'|\.' + _hexpart +
r'|' + _hexpart + r')[pP][+-]?' + _hexpart + r'[lL]?', Number.Float),
(r'(-)?(' + _decpart + r'\.' + _decpart + r'|\.' + _decpart + r'|' +
_decpart + r')[eE][+-]?' + _decpart + r'[fFlL]?', Number.Float),
(r'(-)?((' + _decpart + r'\.(' + _decpart + r')?|\.' +
_decpart + r')[fFlL]?)|(' + _decpart + r'[fFlL])', Number.Float),
(r'(-)?0[xX]' + _hexpart + _intsuffix, Number.Hex),
(r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin),
(r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct),
(r'(-)?' + _decpart + _intsuffix, Number.Integer),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.]', Punctuation),
(r'(true|false|NULL)\b', Name.Builtin),
(_ident, Name)
],
'types': [
(words(('int8', 'int16', 'int32', 'int64', 'wchar_t'), prefix=r'__',
suffix=r'\b'), Keyword.Reserved),
(words(('bool', 'int', 'long', 'float', 'short', 'double', 'char',
'unsigned', 'signed', 'void'), suffix=r'\b'), Keyword.Type)
],
'keywords': [
(r'(struct|union)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
(r'case\b', Keyword, 'case-value'),
(words(('asm', 'auto', 'break', 'const', 'continue', 'default',
'do', 'else', 'enum', 'extern', 'for', 'goto', 'if',
'register', 'restricted', 'return', 'sizeof', 'struct',
'static', 'switch', 'typedef', 'volatile', 'while', 'union',
'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
suffix=r'\b'), Keyword),
(words(('inline', '_inline', '__inline', 'naked', 'restrict',
'thread'), suffix=r'\b'), Keyword.Reserved),
# Vector intrinsics
(r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
# Microsoft-isms
(words((
'asm', 'based', 'except', 'stdcall', 'cdecl',
'fastcall', 'declspec', 'finally', 'try',
'leave', 'w64', 'unaligned', 'raise', 'noop',
'identifier', 'forceinline', 'assume'),
prefix=r'__', suffix=r'\b'), Keyword.Reserved)
],
'root': [
include('whitespace'),
include('keywords'),
# functions
(r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
r'(' + _possible_comments + r')'
r'(' + _namespaced_ident + r')' # method name
r'(' + _possible_comments + r')'
r'(\([^;"\')]*?\))' # signature
r'(' + _possible_comments + r')'
r'([^;{/"\']*)(\{)',
bygroups(using(this), using(this, state='whitespace'),
Name.Function, using(this, state='whitespace'),
using(this), using(this, state='whitespace'),
using(this), Punctuation),
'function'),
# function declarations
(r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
r'(' + _possible_comments + r')'
r'(' + _namespaced_ident + r')' # method name
r'(' + _possible_comments + r')'
r'(\([^;"\')]*?\))' # signature
r'(' + _possible_comments + r')'
r'([^;/"\']*)(;)',
bygroups(using(this), using(this, state='whitespace'),
Name.Function, using(this, state='whitespace'),
using(this), using(this, state='whitespace'),
using(this), Punctuation)),
include('types'),
default('statement'),
],
'statement': [
include('whitespace'),
include('statements'),
(r'\}', Punctuation),
(r'[{;]', Punctuation, '#pop'),
],
'function': [
include('whitespace'),
include('statements'),
(';', Punctuation),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
],
'macro': [
(r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
bygroups(using(this), Comment.Preproc, using(this),
Comment.PreprocFile, Comment.Single)),
(r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
bygroups(using(this), Comment.Preproc, using(this),
Comment.PreprocFile, Comment.Single)),
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
(r'/', Comment.Preproc),
(r'(?<=\\)\n', Comment.Preproc),
(r'\n', Comment.Preproc, '#pop'),
],
'if0': [
(r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
(r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
(r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
(r'.*?\n', Comment),
],
'classname': [
(_ident, Name.Class, '#pop'),
# template specification
(r'\s*(?=>)', Text, '#pop'),
default('#pop')
],
# Mark identifiers preceded by `case` keyword as constants.
'case-value': [
(r'(?<!:)(:)(?!:)', Punctuation, '#pop'),
(_ident, Name.Constant),
include('whitespace'),
include('statements'),
]
}
stdlib_types = {
'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
c99_types = {
'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
linux_types = {
'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
c11_atomic_types = {
'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
'atomic_llong', 'atomic_ullong', 'atomic_char16_t', 'atomic_char32_t', 'atomic_wchar_t',
'atomic_int_least8_t', 'atomic_uint_least8_t', 'atomic_int_least16_t',
'atomic_uint_least16_t', 'atomic_int_least32_t', 'atomic_uint_least32_t',
'atomic_int_least64_t', 'atomic_uint_least64_t', 'atomic_int_fast8_t',
'atomic_uint_fast8_t', 'atomic_int_fast16_t', 'atomic_uint_fast16_t',
'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t',
'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t',
'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'}
def __init__(self, **options):
self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
self.c11highlighting = get_bool_opt(options, 'c11highlighting', True)
self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text, stack=('root',)):
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name:
if self.stdlibhighlighting and value in self.stdlib_types:
token = Keyword.Type
elif self.c99highlighting and value in self.c99_types:
token = Keyword.Type
elif self.c11highlighting and value in self.c11_atomic_types:
token = Keyword.Type
elif self.platformhighlighting and value in self.linux_types:
token = Keyword.Type
yield index, token, value
class CLexer(CFamilyLexer):
"""
For C source code with preprocessor directives.
Additional options accepted:
`stdlibhighlighting`
Highlight common types found in the C/C++ standard library (e.g. `size_t`).
(default: ``True``).
`c99highlighting`
Highlight common types found in the C99 standard library (e.g. `int8_t`).
Actually, this includes all fixed-width integer types.
(default: ``True``).
`c11highlighting`
Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
(default: ``True``).
`platformhighlighting`
Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
(default: ``True``).
"""
name = 'C'
aliases = ['c']
filenames = ['*.c', '*.h', '*.idc', '*.x[bp]m']
mimetypes = ['text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap']
priority = 0.1
tokens = {
'keywords': [
(words((
'_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local',
'_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'),
suffix=r'\b'), Keyword),
inherit
],
'types': [
(words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type),
inherit
]
}
def analyse_text(text):
if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
return 0.1
if re.search(r'^\s*#ifn?def ', text, re.MULTILINE):
return 0.1
class CppLexer(CFamilyLexer):
"""
For C++ source code with preprocessor directives.
Additional options accepted:
`stdlibhighlighting`
Highlight common types found in the C/C++ standard library (e.g. `size_t`).
(default: ``True``).
`c99highlighting`
Highlight common types found in the C99 standard library (e.g. `int8_t`).
Actually, this includes all fixed-width integer types.
(default: ``True``).
`c11highlighting`
Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
(default: ``True``).
`platformhighlighting`
Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
(default: ``True``).
"""
name = 'C++'
url = 'https://isocpp.org/'
aliases = ['cpp', 'c++']
filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
'*.cc', '*.hh', '*.cxx', '*.hxx',
'*.C', '*.H', '*.cp', '*.CPP', '*.tpp']
mimetypes = ['text/x-c++hdr', 'text/x-c++src']
priority = 0.1
tokens = {
'statements': [
# C++11 raw strings
(r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
String, String.Delimiter, String)),
inherit,
],
'root': [
inherit,
# C++ Microsoft-isms
(words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
'multiple_inheritance', 'interface', 'event'),
prefix=r'__', suffix=r'\b'), Keyword.Reserved),
# Offload C++ extensions, http://offload.codeplay.com/
(r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
],
'enumname': [
include('whitespace'),
# 'enum class' and 'enum struct' C++11 support
(words(('class', 'struct'), suffix=r'\b'), Keyword),
(CFamilyLexer._ident, Name.Class, '#pop'),
# template specification
(r'\s*(?=>)', Text, '#pop'),
default('#pop')
],
'keywords': [
(r'(class|concept|typename)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
(words((
'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
'export', 'friend', 'mutable', 'new', 'operator',
'private', 'protected', 'public', 'reinterpret_cast', 'class',
'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
'typename'),
suffix=r'\b'), Keyword),
(r'namespace\b', Keyword, 'namespace'),
(r'(enum)(\s+)', bygroups(Keyword, Whitespace), 'enumname'),
inherit
],
'types': [
(r'char(16_t|32_t|8_t)\b', Keyword.Type),
inherit
],
'namespace': [
(r'[;{]', Punctuation, ('#pop', 'root')),
(r'inline\b', Keyword.Reserved),
(CFamilyLexer._ident, Name.Namespace),
include('statement')
]
}
def analyse_text(text):
if re.search('#include <[a-z_]+>', text):
return 0.2
if re.search('using namespace ', text):
return 0.4
| 17,791 | Python | 42.395122 | 96 | 0.490417 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/gsql.py | """
pygments.lexers.gsql
~~~~~~~~~~~~~~~~~~~~
Lexers for TigerGraph GSQL graph query language
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this, words
from pygments.token import Keyword, Punctuation, Comment, Operator, Name, \
String, Number, Whitespace
__all__ = ["GSQLLexer"]
class GSQLLexer(RegexLexer):
"""
For GSQL queries (version 3.x).
.. versionadded:: 2.10
"""
name = 'GSQL'
url = 'https://docs.tigergraph.com/dev/gsql-ref'
aliases = ['gsql']
filenames = ['*.gsql']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
include('comment'),
include('keywords'),
include('clauses'),
include('accums'),
include('relations'),
include('strings'),
include('whitespace'),
include('barewords'),
include('operators'),
],
'comment': [
(r'\#.*', Comment.Single),
(r'/\*(.|\n)*?\*/', Comment.Multiline),
],
'keywords': [
(words((
'ACCUM', 'AND', 'ANY', 'API', 'AS', 'ASC', 'AVG', 'BAG', 'BATCH',
'BETWEEN', 'BOOL', 'BOTH', 'BREAK', 'BY', 'CASE', 'CATCH', 'COALESCE',
'COMPRESS', 'CONTINUE', 'COUNT', 'CREATE', 'DATETIME', 'DATETIME_ADD',
'DATETIME_SUB', 'DELETE', 'DESC', 'DISTRIBUTED', 'DO', 'DOUBLE',
'EDGE', 'ELSE', 'END', 'ESCAPE', 'EXCEPTION', 'FALSE', 'FILE',
'FILTER', 'FLOAT', 'FOREACH', 'FOR', 'FROM', 'GRAPH', 'GROUP',
'GSQL_INT_MAX', 'GSQL_INT_MIN', 'GSQL_UINT_MAX', 'HAVING', 'IF',
'IN', 'INSERT', 'INT', 'INTERPRET', 'INTERSECT', 'INTERVAL', 'INTO',
'IS', 'ISEMPTY', 'JSONARRAY', 'JSONOBJECT', 'LASTHOP', 'LEADING',
'LIKE', 'LIMIT', 'LIST', 'LOAD_ACCUM', 'LOG', 'MAP', 'MATCH', 'MAX',
'MIN', 'MINUS', 'NOT', 'NOW', 'NULL', 'OFFSET', 'OR', 'ORDER', 'PATH',
'PER', 'PINNED', 'POST_ACCUM', 'POST-ACCUM', 'PRIMARY_ID', 'PRINT',
'QUERY', 'RAISE', 'RANGE', 'REPLACE', 'RESET_COLLECTION_ACCUM',
'RETURN', 'RETURNS', 'RUN', 'SAMPLE', 'SELECT', 'SELECT_VERTEX',
'SET', 'SRC', 'STATIC', 'STRING', 'SUM', 'SYNTAX', 'TARGET',
'TAGSTGT', 'THEN', 'TO', 'TO_CSV', 'TO_DATETIME', 'TRAILING',
'TRIM', 'TRUE', 'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE',
'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'),
prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
],
'clauses': [
(words(('accum', 'having', 'limit', 'order', 'postAccum', 'sample', 'where')),
Name.Builtin),
],
'accums': [
(words(('andaccum', 'arrayaccum', 'avgaccum', 'bagaccum', 'bitwiseandaccum',
'bitwiseoraccum', 'groupbyaccum', 'heapaccum', 'listaccum',
'MapAccum', 'maxaccum', 'minaccum', 'oraccum', 'setaccum',
'sumaccum')), Name.Builtin),
],
'relations': [
(r'(-\s?)(\(.*\:\w?\))(\s?-)', bygroups(Operator, using(this), Operator)),
(r'->|<-', Operator),
(r'[.*{}\[\]\<\>\_]', Punctuation),
],
'strings': [
(r'"([^"\\]|\\.)*"', String),
(r'@{1,2}\w+', Name.Variable),
],
'whitespace': [
(r'\s+', Whitespace),
],
'barewords': [
(r'[a-z]\w*', Name),
(r'(\d+\.\d+|\d+)', Number),
],
'operators': [
(r'\$|[^0-9|\/|\-](\-\=|\+\=|\*\=|\\\=|\=|\=\=|\=\=\=|'
r'\+|\-|\*|\\|\+\=|\>|\<)[^\>|\/]', Operator),
(r'(\||\(|\)|\,|\;|\=|\-|\+|\*|\/|\>|\<|\:)', Operator),
],
}
| 3,991 | Python | 37.019047 | 90 | 0.441493 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/bdd.py | """
pygments.lexers.bdd
~~~~~~~~~~~~~~~~~~~
Lexer for BDD(Behavior-driven development).
More information: https://en.wikipedia.org/wiki/Behavior-driven_development
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include
from pygments.token import Comment, Keyword, Name, String, Number, Text, \
Punctuation, Whitespace
__all__ = ['BddLexer']
class BddLexer(RegexLexer):
"""
Lexer for BDD(Behavior-driven development), which highlights not only
keywords, but also comments, punctuations, strings, numbers, and variables.
.. versionadded:: 2.11
"""
name = 'Bdd'
aliases = ['bdd']
filenames = ['*.feature']
mimetypes = ['text/x-bdd']
step_keywords = (r'Given|When|Then|Add|And|Feature|Scenario Outline|'
r'Scenario|Background|Examples|But')
tokens = {
'comments': [
(r'^\s*#.*$', Comment),
],
'miscellaneous': [
(r'(<|>|\[|\]|=|\||:|\(|\)|\{|\}|,|\.|;|-|_|\$)', Punctuation),
(r'((?<=\<)[^\\>]+(?=\>))', Name.Variable),
(r'"([^\"]*)"', String),
(r'^@\S+', Name.Label),
],
'numbers': [
(r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number),
],
'root': [
(r'\n|\s+', Whitespace),
(step_keywords, Keyword),
include('comments'),
include('miscellaneous'),
include('numbers'),
(r'\S+', Text),
]
}
def analyse_text(self, text):
return
| 1,652 | Python | 27.016949 | 79 | 0.507869 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/supercollider.py | """
pygments.lexers.supercollider
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for SuperCollider
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['SuperColliderLexer']
class SuperColliderLexer(RegexLexer):
"""
For SuperCollider source code.
.. versionadded:: 2.1
"""
name = 'SuperCollider'
url = 'http://supercollider.github.io/'
aliases = ['supercollider', 'sc']
filenames = ['*.sc', '*.scd']
mimetypes = ['application/supercollider', 'text/supercollider']
flags = re.DOTALL | re.MULTILINE
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop'),
],
'badregex': [
(r'\n', Text, '#pop')
],
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(words((
'for', 'in', 'while', 'do', 'break', 'return', 'continue',
'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
'void'), suffix=r'\b'),
Keyword, 'slashstartsregex'),
(words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
Keyword.Declaration, 'slashstartsregex'),
(words((
'(abstract', 'boolean', 'byte', 'char', 'class', 'const',
'debugger', 'double', 'enum', 'export', 'extends', 'final',
'float', 'goto', 'implements', 'import', 'int', 'interface',
'long', 'native', 'package', 'private', 'protected', 'public',
'short', 'static', 'super', 'synchronized', 'throws',
'transient', 'volatile'), suffix=r'\b'),
Keyword.Reserved),
(words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
(words((
'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
'Object', 'Packages', 'RegExp', 'String',
'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
'thisThread', 'this'), suffix=r'\b'),
Name.Builtin),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'\\?[$a-zA-Z_]\w*', String.Symbol),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
def analyse_text(text):
"""We're searching for a common function and a unique keyword here."""
if 'SinOsc' in text or 'thisFunctionDef' in text:
return 0.1
| 3,698 | Python | 37.53125 | 85 | 0.462142 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/nit.py | """
pygments.lexers.nit
~~~~~~~~~~~~~~~~~~~
Lexer for the Nit language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['NitLexer']
class NitLexer(RegexLexer):
"""
For nit source.
.. versionadded:: 2.0
"""
name = 'Nit'
url = 'http://nitlanguage.org'
aliases = ['nit']
filenames = ['*.nit']
tokens = {
'root': [
(r'#.*?$', Comment.Single),
(words((
'package', 'module', 'import', 'class', 'abstract', 'interface',
'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
'public', 'protected', 'private', 'intrude', 'if', 'then',
'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
'implies', 'return', 'continue', 'break', 'abort', 'assert',
'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
Keyword),
(r'[A-Z]\w*', Name.Class),
(r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
(r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
(r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
(r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
(r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
(r'"(\\.|([^"}{\\]))*"', String), # Simple String
(r'"(\\.|([^"}{\\]))*\{', String), # Start string
(r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
(r'\}(\\.|([^"}{\\]))*"', String), # End String
(r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
(r'[0-9]+', Number.Integer),
(r'[0-9]*.[0-9]+', Number.Float),
(r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
(r'[a-z]\w*', Name),
(r'_\w+', Name.Variable.Instance),
(r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
(r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
(r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
(r'[\r\n\t ]+', Text),
],
}
| 2,726 | Python | 40.953846 | 103 | 0.373074 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/hexdump.py | """
pygments.lexers.hexdump
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for hexadecimal dumps.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Name, Number, String, Punctuation, Whitespace
__all__ = ['HexdumpLexer']
class HexdumpLexer(RegexLexer):
"""
For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
.. sourcecode:: hexdump
00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
The specific supported formats are the outputs of:
* ``hexdump FILE``
* ``hexdump -C FILE`` -- the `canonical` format used in the example.
* ``hd FILE`` -- same as ``hexdump -C FILE``.
* ``hexcat FILE``
* ``od -t x1z FILE``
* ``xxd FILE``
* ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
.. versionadded:: 2.1
"""
name = 'Hexdump'
aliases = ['hexdump']
hd = r'[0-9A-Ha-h]'
tokens = {
'root': [
(r'\n', Whitespace),
include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{16})(\<)$',
bygroups(Whitespace, Punctuation, String, Punctuation), 'bracket-strings'),
(r'(\s{2,3})(\|)(.{16})(\|)$',
bygroups(Whitespace, Punctuation, String, Punctuation), 'piped-strings'),
(r'(\s{2,3})(\>)(.{1,15})(\<)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'(\s{2,3})(\|)(.{1,15})(\|)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'(\s{2,3})(.{1,15})$', bygroups(Whitespace, String)),
(r'(\s{2,3})(.{16}|.{20})$', bygroups(Whitespace, String), 'nonpiped-strings'),
(r'\s', Whitespace),
(r'^\*', Punctuation),
],
'offset': [
(r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
(r'^'+hd+'+', Name.Label),
],
'offset-mode': [
(r'\s', Whitespace, '#pop'),
(hd+'+', Name.Label),
(r':', Punctuation)
],
'piped-strings': [
(r'\n', Whitespace),
include('offset'),
(hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\|)(.{1,16})(\|)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'\s', Whitespace),
(r'^\*', Punctuation),
],
'bracket-strings': [
(r'\n', Whitespace),
include('offset'),
(hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{1,16})(\<)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'\s', Whitespace),
(r'^\*', Punctuation),
],
'nonpiped-strings': [
(r'\n', Whitespace),
include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
(r'(\s{19,})(.{1,20}?)$', bygroups(Whitespace, String)),
(r'(\s{2,3})(.{1,20})$', bygroups(Whitespace, String)),
(r'\s', Whitespace),
(r'^\*', Punctuation),
],
}
| 3,603 | Python | 33.990291 | 91 | 0.472384 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_css_builtins.py | """
pygments.lexers._css_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file is autogenerated by scripts/get_css_properties.py
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
_css_properties = (
'-webkit-line-clamp',
'accent-color',
'align-content',
'align-items',
'align-self',
'alignment-baseline',
'all',
'animation',
'animation-delay',
'animation-direction',
'animation-duration',
'animation-fill-mode',
'animation-iteration-count',
'animation-name',
'animation-play-state',
'animation-timing-function',
'appearance',
'aspect-ratio',
'azimuth',
'backface-visibility',
'background',
'background-attachment',
'background-blend-mode',
'background-clip',
'background-color',
'background-image',
'background-origin',
'background-position',
'background-repeat',
'background-size',
'baseline-shift',
'baseline-source',
'block-ellipsis',
'block-size',
'block-step',
'block-step-align',
'block-step-insert',
'block-step-round',
'block-step-size',
'bookmark-label',
'bookmark-level',
'bookmark-state',
'border',
'border-block',
'border-block-color',
'border-block-end',
'border-block-end-color',
'border-block-end-style',
'border-block-end-width',
'border-block-start',
'border-block-start-color',
'border-block-start-style',
'border-block-start-width',
'border-block-style',
'border-block-width',
'border-bottom',
'border-bottom-color',
'border-bottom-left-radius',
'border-bottom-right-radius',
'border-bottom-style',
'border-bottom-width',
'border-boundary',
'border-collapse',
'border-color',
'border-end-end-radius',
'border-end-start-radius',
'border-image',
'border-image-outset',
'border-image-repeat',
'border-image-slice',
'border-image-source',
'border-image-width',
'border-inline',
'border-inline-color',
'border-inline-end',
'border-inline-end-color',
'border-inline-end-style',
'border-inline-end-width',
'border-inline-start',
'border-inline-start-color',
'border-inline-start-style',
'border-inline-start-width',
'border-inline-style',
'border-inline-width',
'border-left',
'border-left-color',
'border-left-style',
'border-left-width',
'border-radius',
'border-right',
'border-right-color',
'border-right-style',
'border-right-width',
'border-spacing',
'border-start-end-radius',
'border-start-start-radius',
'border-style',
'border-top',
'border-top-color',
'border-top-left-radius',
'border-top-right-radius',
'border-top-style',
'border-top-width',
'border-width',
'bottom',
'box-decoration-break',
'box-shadow',
'box-sizing',
'box-snap',
'break-after',
'break-before',
'break-inside',
'caption-side',
'caret',
'caret-color',
'caret-shape',
'chains',
'clear',
'clip',
'clip-path',
'clip-rule',
'color',
'color-adjust',
'color-interpolation-filters',
'color-scheme',
'column-count',
'column-fill',
'column-gap',
'column-rule',
'column-rule-color',
'column-rule-style',
'column-rule-width',
'column-span',
'column-width',
'columns',
'contain',
'contain-intrinsic-block-size',
'contain-intrinsic-height',
'contain-intrinsic-inline-size',
'contain-intrinsic-size',
'contain-intrinsic-width',
'container',
'container-name',
'container-type',
'content',
'content-visibility',
'continue',
'counter-increment',
'counter-reset',
'counter-set',
'cue',
'cue-after',
'cue-before',
'cursor',
'direction',
'display',
'dominant-baseline',
'elevation',
'empty-cells',
'fill',
'fill-break',
'fill-color',
'fill-image',
'fill-opacity',
'fill-origin',
'fill-position',
'fill-repeat',
'fill-rule',
'fill-size',
'filter',
'flex',
'flex-basis',
'flex-direction',
'flex-flow',
'flex-grow',
'flex-shrink',
'flex-wrap',
'float',
'float-defer',
'float-offset',
'float-reference',
'flood-color',
'flood-opacity',
'flow',
'flow-from',
'flow-into',
'font',
'font-family',
'font-feature-settings',
'font-kerning',
'font-language-override',
'font-optical-sizing',
'font-palette',
'font-size',
'font-size-adjust',
'font-stretch',
'font-style',
'font-synthesis',
'font-synthesis-small-caps',
'font-synthesis-style',
'font-synthesis-weight',
'font-variant',
'font-variant-alternates',
'font-variant-caps',
'font-variant-east-asian',
'font-variant-emoji',
'font-variant-ligatures',
'font-variant-numeric',
'font-variant-position',
'font-variation-settings',
'font-weight',
'footnote-display',
'footnote-policy',
'forced-color-adjust',
'gap',
'glyph-orientation-vertical',
'grid',
'grid-area',
'grid-auto-columns',
'grid-auto-flow',
'grid-auto-rows',
'grid-column',
'grid-column-end',
'grid-column-start',
'grid-row',
'grid-row-end',
'grid-row-start',
'grid-template',
'grid-template-areas',
'grid-template-columns',
'grid-template-rows',
'hanging-punctuation',
'height',
'hyphenate-character',
'hyphenate-limit-chars',
'hyphenate-limit-last',
'hyphenate-limit-lines',
'hyphenate-limit-zone',
'hyphens',
'image-orientation',
'image-rendering',
'image-resolution',
'initial-letter',
'initial-letter-align',
'initial-letter-wrap',
'inline-size',
'inline-sizing',
'input-security',
'inset',
'inset-block',
'inset-block-end',
'inset-block-start',
'inset-inline',
'inset-inline-end',
'inset-inline-start',
'isolation',
'justify-content',
'justify-items',
'justify-self',
'leading-trim',
'left',
'letter-spacing',
'lighting-color',
'line-break',
'line-clamp',
'line-grid',
'line-height',
'line-height-step',
'line-padding',
'line-snap',
'list-style',
'list-style-image',
'list-style-position',
'list-style-type',
'margin',
'margin-block',
'margin-block-end',
'margin-block-start',
'margin-bottom',
'margin-break',
'margin-inline',
'margin-inline-end',
'margin-inline-start',
'margin-left',
'margin-right',
'margin-top',
'margin-trim',
'marker',
'marker-end',
'marker-knockout-left',
'marker-knockout-right',
'marker-mid',
'marker-pattern',
'marker-segment',
'marker-side',
'marker-start',
'mask',
'mask-border',
'mask-border-mode',
'mask-border-outset',
'mask-border-repeat',
'mask-border-slice',
'mask-border-source',
'mask-border-width',
'mask-clip',
'mask-composite',
'mask-image',
'mask-mode',
'mask-origin',
'mask-position',
'mask-repeat',
'mask-size',
'mask-type',
'max-block-size',
'max-height',
'max-inline-size',
'max-lines',
'max-width',
'min-block-size',
'min-height',
'min-inline-size',
'min-intrinsic-sizing',
'min-width',
'mix-blend-mode',
'nav-down',
'nav-left',
'nav-right',
'nav-up',
'object-fit',
'object-overflow',
'object-position',
'object-view-box',
'offset',
'offset-anchor',
'offset-distance',
'offset-path',
'offset-position',
'offset-rotate',
'opacity',
'order',
'orphans',
'outline',
'outline-color',
'outline-offset',
'outline-style',
'outline-width',
'overflow',
'overflow-anchor',
'overflow-block',
'overflow-clip-margin',
'overflow-inline',
'overflow-wrap',
'overflow-x',
'overflow-y',
'overscroll-behavior',
'overscroll-behavior-block',
'overscroll-behavior-inline',
'overscroll-behavior-x',
'overscroll-behavior-y',
'padding',
'padding-block',
'padding-block-end',
'padding-block-start',
'padding-bottom',
'padding-inline',
'padding-inline-end',
'padding-inline-start',
'padding-left',
'padding-right',
'padding-top',
'page',
'page-break-after',
'page-break-before',
'page-break-inside',
'pause',
'pause-after',
'pause-before',
'perspective',
'perspective-origin',
'pitch',
'pitch-range',
'place-content',
'place-items',
'place-self',
'play-during',
'pointer-events',
'position',
'print-color-adjust',
'property-name',
'quotes',
'region-fragment',
'resize',
'rest',
'rest-after',
'rest-before',
'richness',
'right',
'rotate',
'row-gap',
'ruby-align',
'ruby-merge',
'ruby-overhang',
'ruby-position',
'running',
'scale',
'scroll-behavior',
'scroll-margin',
'scroll-margin-block',
'scroll-margin-block-end',
'scroll-margin-block-start',
'scroll-margin-bottom',
'scroll-margin-inline',
'scroll-margin-inline-end',
'scroll-margin-inline-start',
'scroll-margin-left',
'scroll-margin-right',
'scroll-margin-top',
'scroll-padding',
'scroll-padding-block',
'scroll-padding-block-end',
'scroll-padding-block-start',
'scroll-padding-bottom',
'scroll-padding-inline',
'scroll-padding-inline-end',
'scroll-padding-inline-start',
'scroll-padding-left',
'scroll-padding-right',
'scroll-padding-top',
'scroll-snap-align',
'scroll-snap-stop',
'scroll-snap-type',
'scrollbar-color',
'scrollbar-gutter',
'scrollbar-width',
'shape-image-threshold',
'shape-inside',
'shape-margin',
'shape-outside',
'spatial-navigation-action',
'spatial-navigation-contain',
'spatial-navigation-function',
'speak',
'speak-as',
'speak-header',
'speak-numeral',
'speak-punctuation',
'speech-rate',
'stress',
'string-set',
'stroke',
'stroke-align',
'stroke-alignment',
'stroke-break',
'stroke-color',
'stroke-dash-corner',
'stroke-dash-justify',
'stroke-dashadjust',
'stroke-dasharray',
'stroke-dashcorner',
'stroke-dashoffset',
'stroke-image',
'stroke-linecap',
'stroke-linejoin',
'stroke-miterlimit',
'stroke-opacity',
'stroke-origin',
'stroke-position',
'stroke-repeat',
'stroke-size',
'stroke-width',
'tab-size',
'table-layout',
'text-align',
'text-align-all',
'text-align-last',
'text-combine-upright',
'text-decoration',
'text-decoration-color',
'text-decoration-line',
'text-decoration-skip',
'text-decoration-skip-box',
'text-decoration-skip-ink',
'text-decoration-skip-inset',
'text-decoration-skip-self',
'text-decoration-skip-spaces',
'text-decoration-style',
'text-decoration-thickness',
'text-edge',
'text-emphasis',
'text-emphasis-color',
'text-emphasis-position',
'text-emphasis-skip',
'text-emphasis-style',
'text-group-align',
'text-indent',
'text-justify',
'text-orientation',
'text-overflow',
'text-shadow',
'text-space-collapse',
'text-space-trim',
'text-spacing',
'text-transform',
'text-underline-offset',
'text-underline-position',
'text-wrap',
'top',
'transform',
'transform-box',
'transform-origin',
'transform-style',
'transition',
'transition-delay',
'transition-duration',
'transition-property',
'transition-timing-function',
'translate',
'unicode-bidi',
'user-select',
'vertical-align',
'visibility',
'voice-balance',
'voice-duration',
'voice-family',
'voice-pitch',
'voice-range',
'voice-rate',
'voice-stress',
'voice-volume',
'volume',
'white-space',
'widows',
'width',
'will-change',
'word-boundary-detection',
'word-boundary-expansion',
'word-break',
'word-spacing',
'word-wrap',
'wrap-after',
'wrap-before',
'wrap-flow',
'wrap-inside',
'wrap-through',
'writing-mode',
'z-index',
) | 12,446 | Python | 21.306452 | 70 | 0.5834 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/savi.py | """
pygments.lexers.savi
~~~~~~~~~~~~~~~~~~~~
Lexer for Savi.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Whitespace, Keyword, Name, String, Number, \
Operator, Punctuation, Comment, Generic, Error
__all__ = ['SaviLexer']
# The canonical version of this file can be found in the following repository,
# where it is kept in sync with any language changes, as well as the other
# pygments-like lexers that are maintained for use with other tools:
# - https://github.com/savi-lang/savi/blob/main/tooling/pygments/lexers/savi.py
#
# If you're changing this file in the pygments repository, please ensure that
# any changes you make are also propagated to the official Savi repository,
# in order to avoid accidental clobbering of your changes later when an update
# from the Savi repository flows forward into the pygments repository.
#
# If you're changing this file in the Savi repository, please ensure that
# any changes you make are also reflected in the other pygments-like lexers
# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
class SaviLexer(RegexLexer):
"""
For Savi source code.
.. versionadded: 2.10
"""
name = 'Savi'
url = 'https://github.com/savi-lang/savi'
aliases = ['savi']
filenames = ['*.savi']
tokens = {
"root": [
# Line Comment
(r'//.*?$', Comment.Single),
# Doc Comment
(r'::.*?$', Comment.Single),
# Capability Operator
(r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
# Double-Quote String
(r'\w?"', String.Double, "string.double"),
# Single-Char String
(r"'", String.Char, "string.char"),
# Type Name
(r'(_?[A-Z]\w*)', Name.Class),
# Nested Type Name
(r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
# Declare
(r'^([ \t]*)(:\w+)',
bygroups(Whitespace, Name.Tag),
"decl"),
# Error-Raising Calls/Names
(r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
# Numeric Values
(r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
# Hex Numeric Values
(r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
# Binary Numeric Values
(r'\b0b([01_]+)\b', Number.Bin),
# Function Call (with braces)
(r'\w+(?=\()', Name.Function),
# Function Call (with receiver)
(r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
# Function Call (with self receiver)
(r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
# Parenthesis
(r'\(', Punctuation, "root"),
(r'\)', Punctuation, "#pop"),
# Brace
(r'\{', Punctuation, "root"),
(r'\}', Punctuation, "#pop"),
# Bracket
(r'\[', Punctuation, "root"),
(r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
(r'\]', Punctuation, "#pop"),
# Punctuation
(r'[,;:\.@]', Punctuation),
# Piping Operators
(r'(\|\>)', Operator),
# Branching Operators
(r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
# Comparison Operators
(r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
# Arithmetic Operators
(r'(\+|\-|\/|\*|\%)', Operator),
# Assignment Operators
(r'(\=)', Operator),
# Other Operators
(r'(\!|\<\<|\<|\&|\|)', Operator),
# Identifiers
(r'\b\w+\b', Name),
# Whitespace
(r'[ \t\r]+\n*|\n+', Whitespace),
],
# Declare (nested rules)
"decl": [
(r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
(r':', Punctuation, "#pop"),
(r'\n', Whitespace, "#pop"),
include("root"),
],
# Double-Quote String (nested rules)
"string.double": [
(r'\\\(', String.Interpol, "string.interpolation"),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
(r'\\[bfnrt\\\']', String.Escape),
(r'\\"', String.Escape),
(r'"', String.Double, "#pop"),
(r'[^\\"]+', String.Double),
(r'.', Error),
],
# Single-Char String (nested rules)
"string.char": [
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
(r'\\[bfnrt\\\']', String.Escape),
(r"\\'", String.Escape),
(r"'", String.Char, "#pop"),
(r"[^\\']+", String.Char),
(r'.', Error),
],
# Interpolation inside String (nested rules)
"string.interpolation": [
(r"\)", String.Interpol, "#pop"),
include("root"),
]
}
| 4,645 | Python | 26.16959 | 80 | 0.53972 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/dsls.py | """
pygments.lexers.dsls
~~~~~~~~~~~~~~~~~~~~
Lexers for various domain-specific languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
include, default, this, using, combined
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
class ProtoBufLexer(RegexLexer):
"""
Lexer for Protocol Buffer definition files.
.. versionadded:: 1.4
"""
name = 'Protocol Buffer'
url = 'https://developers.google.com/protocol-buffers/'
aliases = ['protobuf', 'proto']
filenames = ['*.proto']
tokens = {
'root': [
(r'[ \t]+', Whitespace),
(r'[,;{}\[\]()<>]', Punctuation),
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
(words((
'import', 'option', 'optional', 'required', 'repeated',
'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
Keyword.Type),
(r'(true|false)\b', Keyword.Constant),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'package'),
(r'(message|extend)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'message'),
(r'(enum|group|service)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'type'),
(r'\".*?\"', String),
(r'\'.*?\'', String),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'(\-?(inf|nan))\b', Number.Float),
(r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
(r'0[0-7]+[LlUu]*', Number.Oct),
(r'\d+[LlUu]*', Number.Integer),
(r'[+-=]', Operator),
(r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
bygroups(Name.Attribute, Whitespace, Operator)),
(r'[a-zA-Z_][\w.]*', Name),
],
'package': [
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
default('#pop'),
],
'message': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
'type': [
(r'[a-zA-Z_]\w*', Name, '#pop'),
default('#pop'),
],
}
class ThriftLexer(RegexLexer):
"""
For Thrift interface definitions.
.. versionadded:: 2.1
"""
name = 'Thrift'
url = 'https://thrift.apache.org/'
aliases = ['thrift']
filenames = ['*.thrift']
mimetypes = ['application/x-thrift']
tokens = {
'root': [
include('whitespace'),
include('comments'),
(r'"', String.Double, combined('stringescape', 'dqs')),
(r'\'', String.Single, combined('stringescape', 'sqs')),
(r'(namespace)(\s+)',
bygroups(Keyword.Namespace, Whitespace), 'namespace'),
(r'(enum|union|struct|service|exception)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'class'),
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Whitespace, Operator)),
include('keywords'),
include('numbers'),
(r'[&=]', Operator),
(r'[:;,{}()<>\[\]]', Punctuation),
(r'[a-zA-Z_](\.\w|\w)*', Name),
],
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
],
'comments': [
(r'#.*$', Comment),
(r'//.*?\n', Comment),
(r'/\*[\w\W]*?\*/', Comment.Multiline),
],
'stringescape': [
(r'\\([\\nrt"\'])', String.Escape),
],
'dqs': [
(r'"', String.Double, '#pop'),
(r'[^\\"\n]+', String.Double),
],
'sqs': [
(r"'", String.Single, '#pop'),
(r'[^\\\'\n]+', String.Single),
],
'namespace': [
(r'[a-z*](\.\w|\w)*', Name.Namespace, '#pop'),
default('#pop'),
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
'keywords': [
(r'(async|oneway|extends|throws|required|optional)\b', Keyword),
(r'(true|false)\b', Keyword.Constant),
(r'(const|typedef)\b', Keyword.Declaration),
(words((
'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
'php_namespace', 'py_module', 'perl_package',
'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
'xsd_attrs', 'include'), suffix=r'\b'),
Keyword.Namespace),
(words((
'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
'string', 'binary', 'map', 'list', 'set', 'slist',
'senum'), suffix=r'\b'),
Keyword.Type),
(words((
'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
'__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
'break', 'case', 'catch', 'class', 'clone', 'continue',
'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
'ensure', 'except', 'exec', 'finally', 'float', 'for',
'foreach', 'function', 'global', 'goto', 'if', 'implements',
'import', 'in', 'inline', 'instanceof', 'interface', 'is',
'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
'or', 'pass', 'public', 'print', 'private', 'protected',
'raise', 'redo', 'rescue', 'retry', 'register', 'return',
'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
'then', 'this', 'throw', 'transient', 'try', 'undef',
'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
'volatile', 'when', 'while', 'with', 'xor', 'yield'),
prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
],
'numbers': [
(r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
(r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
(r'[+-]?[0-9]+', Number.Integer),
],
}
class ZeekLexer(RegexLexer):
"""
For Zeek scripts.
.. versionadded:: 2.5
"""
name = 'Zeek'
url = 'https://www.zeek.org/'
aliases = ['zeek', 'bro']
filenames = ['*.zeek', '*.bro']
_hex = r'[0-9a-fA-F]'
_float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
_h = r'[A-Za-z0-9][-A-Za-z0-9]*'
tokens = {
'root': [
include('whitespace'),
include('comments'),
include('directives'),
include('attributes'),
include('types'),
include('keywords'),
include('literals'),
include('operators'),
include('punctuation'),
(r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
Name.Function),
include('identifiers'),
],
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)),
],
'comments': [
(r'#.*$', Comment),
],
'directives': [
(r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
(r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
(r'(@prefixes)(\s*)((\+?=).*)$', bygroups(Comment.Preproc,
Whitespace, Comment.Preproc)),
],
'attributes': [
(words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
'delete_func', 'expire_func', 'read_expire', 'write_expire',
'create_expire', 'synchronized', 'persistent', 'rotate_interval',
'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
'type_column', 'deprecated'),
prefix=r'&', suffix=r'\b'),
Keyword.Pseudo),
],
'types': [
(words(('any',
'enum', 'record', 'set', 'table', 'vector',
'function', 'hook', 'event',
'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
'pattern', 'port', 'string', 'subnet', 'time'),
suffix=r'\b'),
Keyword.Type),
(r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
bygroups(Keyword.Type, Whitespace, Operator.Word, Whitespace, Keyword.Type)),
(r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Operator, Whitespace, Keyword.Type)),
(r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
bygroups(Keyword, Whitespace, Name, Whitespace, Operator)),
(r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Name.Class)),
],
'keywords': [
(words(('redef', 'export', 'if', 'else', 'for', 'while',
'return', 'break', 'next', 'continue', 'fallthrough',
'switch', 'default', 'case',
'add', 'delete',
'when', 'timeout', 'schedule'),
suffix=r'\b'),
Keyword),
(r'(print)\b', Keyword),
(r'(global|local|const|option)\b', Keyword.Declaration),
(r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
],
'literals': [
(r'"', String, 'string'),
# Not the greatest match for patterns, but generally helps
# disambiguate between start of a pattern and just a division
# operator.
(r'/(?=.*/)', String.Regex, 'regex'),
(r'(T|F)\b', Keyword.Constant),
# Port
(r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
# IPv4 Address
(r'(\d{1,3}.){3}(\d{1,3})\b', Number),
# IPv6 Address
(r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
# Numeric
(r'0[xX]' + _hex + r'+\b', Number.Hex),
(_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
(_float + r'\b', Number.Float),
(r'(\d+)\b', Number.Integer),
# Hostnames
(_h + r'(\.' + _h + r')+', String),
],
'operators': [
(r'[!%*/+<=>~|&^-]', Operator),
(r'([-+=&|]{2}|[+=!><-]=)', Operator),
(r'(in|as|is|of)\b', Operator.Word),
(r'\??\$', Operator),
],
'punctuation': [
(r'[{}()\[\],;.]', Punctuation),
# The "ternary if", which uses '?' and ':', could instead be
# treated as an Operator, but colons are more frequently used to
# separate field/identifier names from their types, so the (often)
# less-prominent Punctuation is used even with '?' for consistency.
(r'[?:]', Punctuation),
],
'identifiers': [
(r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
(r'[a-zA-Z_]\w*', Name)
],
'string': [
(r'\\.', String.Escape),
(r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
(r'"', String, '#pop'),
(r'.', String),
],
'regex': [
(r'\\.', String.Escape),
(r'/', String.Regex, '#pop'),
(r'.', String.Regex),
],
}
BroLexer = ZeekLexer
class PuppetLexer(RegexLexer):
"""
For Puppet configuration DSL.
.. versionadded:: 1.6
"""
name = 'Puppet'
url = 'https://puppet.com/'
aliases = ['puppet']
filenames = ['*.pp']
tokens = {
'root': [
include('comments'),
include('keywords'),
include('names'),
include('numbers'),
include('operators'),
include('strings'),
(r'[]{}:(),;[]', Punctuation),
(r'\s+', Whitespace),
],
'comments': [
(r'(\s*)(#.*)$', bygroups(Whitespace, Comment)),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
],
'operators': [
(r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
(r'(in|and|or|not)\b', Operator.Word),
],
'names': [
(r'[a-zA-Z_]\w*', Name.Attribute),
(r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
String, Punctuation)),
(r'\$\S+', Name.Variable),
],
'numbers': [
# Copypasta from the Python lexer
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
(r'\d+[eE][+-]?[0-9]+j?', Number.Float),
(r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'\d+L', Number.Integer.Long),
(r'\d+j?', Number.Integer)
],
'keywords': [
# Left out 'group' and 'require'
# Since they're often used as attributes
(words((
'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
'check', 'class', 'computer', 'configured', 'contained',
'create_resources', 'crit', 'cron', 'debug', 'default',
'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
'err', 'exec', 'extlookup', 'fail', 'false', 'file',
'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
'include', 'info', 'inherits', 'inline_template', 'installed',
'interface', 'k5login', 'latest', 'link', 'loglevel',
'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
'mount', 'mounted', 'nagios_command', 'nagios_contact',
'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
'realize', 'regsubst', 'resources', 'role', 'router', 'running',
'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
'service', 'sha1', 'shellquote', 'split', 'sprintf',
'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
'zpool'), prefix='(?i)', suffix=r'\b'),
Keyword),
],
'strings': [
(r'"([^"])*"', String),
(r"'(\\'|[^'])*'", String),
],
}
class RslLexer(RegexLexer):
"""
RSL is the formal specification
language used in RAISE (Rigorous Approach to Industrial Software Engineering)
method.
.. versionadded:: 2.0
"""
name = 'RSL'
url = 'http://en.wikipedia.org/wiki/RAISE'
aliases = ['rsl']
filenames = ['*.rsl']
mimetypes = ['text/rsl']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
(words((
'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
'type', 'union', 'until', 'use', 'value', 'variable', 'while',
'with', 'write', '~isin', '-inflist', '-infset', '-list',
'-set'), prefix=r'\b', suffix=r'\b'),
Keyword),
(r'(variable|value)\b', Keyword.Declaration),
(r'--.*?\n', Comment),
(r'<:.*?:>', Comment),
(r'\{!.*?!\}', Comment),
(r'/\*.*?\*/', Comment),
(r'^([ \t]*)([\w]+)([ \t]*)(:[^:])', bygroups(Whitespace,
Name.Function, Whitespace, Name.Function)),
(r'(^[ \t]*)([\w]+)([ \t]*)(\([\w\s,]*\))([ \t]*)(is|as)',
bygroups(Whitespace, Name.Function, Whitespace, Text,
Whitespace, Keyword)),
(r'\b[A-Z]\w*\b', Keyword.Type),
(r'(true|false)\b', Keyword.Constant),
(r'".*"', String),
(r'\'.\'', String.Char),
(r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
Operator),
(r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'\s+', Whitespace),
(r'.', Text),
],
}
def analyse_text(text):
"""
Check for the most common text in the beginning of a RSL file.
"""
if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
return 1.0
class MscgenLexer(RegexLexer):
"""
For Mscgen files.
.. versionadded:: 1.6
"""
name = 'Mscgen'
url = 'http://www.mcternan.me.uk/mscgen/'
aliases = ['mscgen', 'msc']
filenames = ['*.msc']
_var = r'(\w+|"(?:\\"|[^"])*")'
tokens = {
'root': [
(r'msc\b', Keyword.Type),
# Options
(r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
r'|arcgradient|ARCGRADIENT)\b', Name.Property),
# Operators
(r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
(r'(\.|-|\|){3}', Keyword),
(r'(?:-|=|\.|:){2}'
r'|<<=>>|<->|<=>|<<>>|<:>'
r'|->|=>>|>>|=>|:>|-x|-X'
r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
# Names
(r'\*', Name.Builtin),
(_var, Name.Variable),
# Other
(r'\[', Punctuation, 'attrs'),
(r'\{|\}|,|;', Punctuation),
include('comments')
],
'attrs': [
(r'\]', Punctuation, '#pop'),
(_var + r'(\s*)(=)(\s*)' + _var,
bygroups(Name.Attribute, Whitespace, Operator, Whitespace,
String)),
(r',', Punctuation),
include('comments')
],
'comments': [
(r'(?://|#).*?\n', Comment.Single),
(r'/\*(?:.|\n)*?\*/', Comment.Multiline),
(r'[ \t\r\n]+', Whitespace)
]
}
class VGLLexer(RegexLexer):
"""
For SampleManager VGL source code.
.. versionadded:: 1.6
"""
name = 'VGL'
url = 'http://www.thermoscientific.com/samplemanager'
aliases = ['vgl']
filenames = ['*.rpf']
flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
tokens = {
'root': [
(r'\{[^}]*\}', Comment.Multiline),
(r'declare', Keyword.Constant),
(r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
r'|create|on|line|with|global|routine|value|endroutine|constant'
r'|global|set|join|library|compile_option|file|exists|create|copy'
r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
Keyword),
(r'(true|false|null|empty|error|locked)', Keyword.Constant),
(r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
(r'"[^"]*"', String),
(r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
(r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
(r'[a-z_$][\w$]*', Name),
(r'[\r\n]+', Whitespace),
(r'\s+', Whitespace)
]
}
class AlloyLexer(RegexLexer):
"""
For Alloy source code.
.. versionadded:: 2.0
"""
name = 'Alloy'
url = 'http://alloy.mit.edu'
aliases = ['alloy']
filenames = ['*.als']
mimetypes = ['text/x-alloy']
flags = re.MULTILINE | re.DOTALL
iden_rex = r'[a-zA-Z_][\w]*"*'
string_rex = r'"\b(\\\\|\\[^\\]|[^"\\])*"'
text_tuple = (r'[^\S\n]+', Whitespace)
tokens = {
'sig': [
(r'(extends)\b', Keyword, '#pop'),
(iden_rex, Name),
text_tuple,
(r',', Punctuation),
(r'\{', Operator, '#pop'),
],
'module': [
text_tuple,
(iden_rex, Name, '#pop'),
],
'fun': [
text_tuple,
(r'\{', Operator, '#pop'),
(iden_rex, Name, '#pop'),
],
'fact': [
include('fun'),
(string_rex, String, '#pop'),
],
'root': [
(r'--.*?$', Comment.Single),
(r'//.*?$', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
text_tuple,
(r'(module|open)(\s+)', bygroups(Keyword.Namespace, Whitespace),
'module'),
(r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Whitespace), 'sig'),
(r'(iden|univ|none)\b', Keyword.Constant),
(r'(int|Int)\b', Keyword.Type),
(r'(var|this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
(r'(all|some|no|sum|disj|when|else)\b', Keyword),
(r'(run|check|for|but|exactly|expect|as|steps)\b', Keyword),
(r'(always|after|eventually|until|release)\b', Keyword), # future time operators
(r'(historically|before|once|since|triggered)\b', Keyword), # past time operators
(r'(and|or|implies|iff|in)\b', Operator.Word),
(r'(fun|pred|assert)(\s+)', bygroups(Keyword, Whitespace), 'fun'),
(r'(fact)(\s+)', bygroups(Keyword, Whitespace), 'fact'),
(r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.\.|\.|->', Operator),
(r'[-+/*%=<>&!^|~{}\[\]().\';]', Operator),
(iden_rex, Name),
(r'[:,]', Punctuation),
(r'[0-9]+', Number.Integer),
(string_rex, String),
(r'\n', Whitespace),
]
}
class PanLexer(RegexLexer):
"""
Lexer for pan source files.
Based on tcsh lexer.
.. versionadded:: 2.0
"""
name = 'Pan'
url = 'https://github.com/quattor/pan/'
aliases = ['pan']
filenames = ['*.pan']
tokens = {
'root': [
include('basic'),
(r'\(', Keyword, 'paren'),
(r'\{', Keyword, 'curly'),
include('data'),
],
'basic': [
(words((
'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
'prefix', 'unique', 'object', 'foreach', 'include', 'template',
'function', 'variable', 'structure', 'extensible', 'declaration'),
prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
'file_contents', 'format', 'index', 'length', 'match', 'matches',
'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
'path_exists', 'if_exists', 'return', 'value'),
prefix=r'\b', suffix=r'\b'),
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Whitespace),
(r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
],
'curly': [
(r'\}', Keyword, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
}
class CrmshLexer(RegexLexer):
"""
Lexer for crmsh configuration files for Pacemaker clusters.
.. versionadded:: 2.1
"""
name = 'Crmsh'
url = 'http://crmsh.github.io/'
aliases = ['crmsh', 'pcmk']
filenames = ['*.crmsh', '*.pcmk']
mimetypes = []
elem = words((
'node', 'primitive', 'group', 'clone', 'ms', 'location',
'colocation', 'order', 'fencing_topology', 'rsc_ticket',
'rsc_template', 'property', 'rsc_defaults',
'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
'tag'), suffix=r'(?![\w#$-])')
sub = words((
'params', 'meta', 'operations', 'op', 'rule',
'attributes', 'utilization'), suffix=r'(?![\w#$-])')
acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
val_qual = (r'(?:string|version|number)')
rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
r'start|promote|demote|stop)')
tokens = {
'root': [
(r'^(#.*)(\n)?', bygroups(Comment, Whitespace)),
# attr=value (nvpair)
(r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
bygroups(Name.Attribute, Punctuation, String)),
# need this construct, otherwise numeric node ids
# are matched as scores
# elem id:
(r'(node)(\s+)([\w#$-]+)(:)',
bygroups(Keyword, Whitespace, Name, Punctuation)),
# scores
(r'([+-]?([0-9]+|inf)):', Number),
# keywords (elements and other)
(elem, Keyword),
(sub, Keyword),
(acl, Keyword),
# binary operators
(r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
# other operators
(bin_rel, Operator.Word),
(un_ops, Operator.Word),
(date_exp, Operator.Word),
# builtin attributes (e.g. #uname)
(r'#[a-z]+(?![\w#$-])', Name.Builtin),
# acl_mod:blah
(r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
bygroups(Keyword, Punctuation, Name)),
# rsc_id[:(role|action)]
# NB: this matches all other identifiers
(r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
bygroups(Name, Punctuation, Operator.Word)),
# punctuation
(r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
(r'\s+|\n', Whitespace),
],
}
class FlatlineLexer(RegexLexer):
"""
Lexer for Flatline expressions.
.. versionadded:: 2.2
"""
name = 'Flatline'
url = 'https://github.com/bigmlcom/flatline'
aliases = ['flatline']
filenames = []
mimetypes = ['text/x-flatline']
special_forms = ('let',)
builtins = (
"!=", "*", "+", "-", "<", "<=", "=", ">", ">=", "abs", "acos", "all",
"all-but", "all-with-defaults", "all-with-numeric-default", "and",
"asin", "atan", "avg", "avg-window", "bin-center", "bin-count", "call",
"category-count", "ceil", "cond", "cond-window", "cons", "cos", "cosh",
"count", "diff-window", "div", "ensure-value", "ensure-weighted-value",
"epoch", "epoch-day", "epoch-fields", "epoch-hour", "epoch-millisecond",
"epoch-minute", "epoch-month", "epoch-second", "epoch-weekday",
"epoch-year", "exp", "f", "field", "field-prop", "fields", "filter",
"first", "floor", "head", "if", "in", "integer", "language", "length",
"levenshtein", "linear-regression", "list", "ln", "log", "log10", "map",
"matches", "matches?", "max", "maximum", "md5", "mean", "median", "min",
"minimum", "missing", "missing-count", "missing?", "missing_count",
"mod", "mode", "normalize", "not", "nth", "occurrences", "or",
"percentile", "percentile-label", "population", "population-fraction",
"pow", "preferred", "preferred?", "quantile-label", "rand", "rand-int",
"random-value", "re-quote", "real", "replace", "replace-first", "rest",
"round", "row-number", "segment-label", "sha1", "sha256", "sin", "sinh",
"sqrt", "square", "standard-deviation", "standard_deviation", "str",
"subs", "sum", "sum-squares", "sum-window", "sum_squares", "summary",
"summary-no", "summary-str", "tail", "tan", "tanh", "to-degrees",
"to-radians", "variance", "vectorize", "weighted-random-value", "window",
"winnow", "within-percentiles?", "z-score",
)
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
tokens = {
'root': [
# whitespaces - usually not relevant
(r'[,]+', Text),
(r'\s+', Whitespace),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0x-?[a-f\d]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"\\(.|[a-z]+)", String.Char),
# expression template placeholder
(r'_', String.Symbol),
# highlight the special forms
(words(special_forms, suffix=' '), Keyword),
# highlight the builtins
(words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# parentheses
(r'(\(|\))', Punctuation),
],
}
class SnowballLexer(ExtendedRegexLexer):
"""
Lexer for Snowball source code.
.. versionadded:: 2.2
"""
name = 'Snowball'
url = 'http://snowballstem.org/'
aliases = ['snowball']
filenames = ['*.sbl']
_ws = r'\n\r\t '
def __init__(self, **options):
self._reset_stringescapes()
ExtendedRegexLexer.__init__(self, **options)
def _reset_stringescapes(self):
self._start = "'"
self._end = "'"
def _string(do_string_first):
def callback(lexer, match, ctx):
s = match.start()
text = match.group()
string = re.compile(r'([^%s]*)(.)' % re.escape(lexer._start)).match
escape = re.compile(r'([^%s]*)(.)' % re.escape(lexer._end)).match
pos = 0
do_string = do_string_first
while pos < len(text):
if do_string:
match = string(text, pos)
yield s + match.start(1), String.Single, match.group(1)
if match.group(2) == "'":
yield s + match.start(2), String.Single, match.group(2)
ctx.stack.pop()
break
yield s + match.start(2), String.Escape, match.group(2)
pos = match.end()
match = escape(text, pos)
yield s + match.start(), String.Escape, match.group()
if match.group(2) != lexer._end:
ctx.stack[-1] = 'escape'
break
pos = match.end()
do_string = True
ctx.pos = s + match.end()
return callback
def _stringescapes(lexer, match, ctx):
lexer._start = match.group(3)
lexer._end = match.group(5)
return bygroups(Keyword.Reserved, Whitespace, String.Escape, Whitespace,
String.Escape)(lexer, match, ctx)
tokens = {
'root': [
(words(('len', 'lenof'), suffix=r'\b'), Operator.Word),
include('root1'),
],
'root1': [
(r'[%s]+' % _ws, Whitespace),
(r'\d+', Number.Integer),
(r"'", String.Single, 'string'),
(r'[()]', Punctuation),
(r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'//.*', Comment.Single),
(r'[!*+\-/<=>]=|[-=]>|<[+-]|[$*+\-/<=>?\[\]]', Operator),
(words(('as', 'get', 'hex', 'among', 'define', 'decimal',
'backwardmode'), suffix=r'\b'),
Keyword.Reserved),
(words(('strings', 'booleans', 'integers', 'routines', 'externals',
'groupings'), suffix=r'\b'),
Keyword.Reserved, 'declaration'),
(words(('do', 'or', 'and', 'for', 'hop', 'non', 'not', 'set', 'try',
'fail', 'goto', 'loop', 'next', 'test', 'true',
'false', 'unset', 'atmark', 'attach', 'delete', 'gopast',
'insert', 'repeat', 'sizeof', 'tomark', 'atleast',
'atlimit', 'reverse', 'setmark', 'tolimit', 'setlimit',
'backwards', 'substring'), suffix=r'\b'),
Operator.Word),
(words(('size', 'limit', 'cursor', 'maxint', 'minint'),
suffix=r'\b'),
Name.Builtin),
(r'(stringdef\b)([%s]*)([^%s]+)' % (_ws, _ws),
bygroups(Keyword.Reserved, Whitespace, String.Escape)),
(r'(stringescapes\b)([%s]*)(.)([%s]*)(.)' % (_ws, _ws),
_stringescapes),
(r'[A-Za-z]\w*', Name),
],
'declaration': [
(r'\)', Punctuation, '#pop'),
(words(('len', 'lenof'), suffix=r'\b'), Name,
('root1', 'declaration')),
include('root1'),
],
'string': [
(r"[^']*'", _string(True)),
],
'escape': [
(r"[^']*'", _string(False)),
],
}
def get_tokens_unprocessed(self, text=None, context=None):
self._reset_stringescapes()
return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
| 36,774 | Python | 36.449083 | 107 | 0.437891 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/elm.py | """
pygments.lexers.elm
~~~~~~~~~~~~~~~~~~~
Lexer for the Elm programming language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include, bygroups
from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
String, Whitespace
__all__ = ['ElmLexer']
class ElmLexer(RegexLexer):
"""
For Elm source code.
.. versionadded:: 2.1
"""
name = 'Elm'
url = 'http://elm-lang.org/'
aliases = ['elm']
filenames = ['*.elm']
mimetypes = ['text/x-elm']
validName = r'[a-z_][a-zA-Z0-9_\']*'
specialName = r'^main '
builtinOps = (
'~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
'=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
'..', '.', '->', '-', '++', '+', '*', '&&', '%',
)
reservedWords = words((
'alias', 'as', 'case', 'else', 'if', 'import', 'in',
'let', 'module', 'of', 'port', 'then', 'type', 'where',
), suffix=r'\b')
tokens = {
'root': [
# Comments
(r'\{-', Comment.Multiline, 'comment'),
(r'--.*', Comment.Single),
# Whitespace
(r'\s+', Whitespace),
# Strings
(r'"', String, 'doublequote'),
# Modules
(r'^(\s*)(module)(\s*)', bygroups(Whitespace, Keyword.Namespace,
Whitespace), 'imports'),
# Imports
(r'^(\s*)(import)(\s*)', bygroups(Whitespace, Keyword.Namespace,
Whitespace), 'imports'),
# Shaders
(r'\[glsl\|.*', Name.Entity, 'shader'),
# Keywords
(reservedWords, Keyword.Reserved),
# Types
(r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
# Main
(specialName, Keyword.Reserved),
# Prefix Operators
(words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
# Infix Operators
(words(builtinOps), Name.Function),
# Numbers
include('numbers'),
# Variable Names
(validName, Name.Variable),
# Parens
(r'[,()\[\]{}]', Punctuation),
],
'comment': [
(r'-(?!\})', Comment.Multiline),
(r'\{-', Comment.Multiline, 'comment'),
(r'[^-}]', Comment.Multiline),
(r'-\}', Comment.Multiline, '#pop'),
],
'doublequote': [
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\[nrfvb\\"]', String.Escape),
(r'[^"]', String),
(r'"', String, '#pop'),
],
'imports': [
(r'\w+(\.\w+)*', Name.Class, '#pop'),
],
'numbers': [
(r'_?\d+\.(?=\d+)', Number.Float),
(r'_?\d+', Number.Integer),
],
'shader': [
(r'\|(?!\])', Name.Entity),
(r'\|\]', Name.Entity, '#pop'),
(r'(.*)(\n)', bygroups(Name.Entity, Whitespace)),
],
}
| 3,152 | Python | 24.224 | 77 | 0.414657 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/jmespath.py | """
pygments.lexers.jmespath
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for the JMESPath language
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import String, Punctuation, Whitespace, Name, Operator, \
Number, Literal, Keyword
__all__ = ['JMESPathLexer']
class JMESPathLexer(RegexLexer):
"""
For JMESPath queries.
"""
name = 'JMESPath'
url = 'https://jmespath.org'
filenames = ['*.jp']
aliases = ['jmespath', 'jp']
tokens = {
'string': [
(r"'(\\(.|\n)|[^'\\])*'", String),
],
'punctuation': [
(r'(\[\?|[\.\*\[\],:\(\)\{\}\|])', Punctuation),
],
'ws': [
(r" |\t|\n|\r", Whitespace)
],
"dq-identifier": [
(r'[^\\"]+', Name.Variable),
(r'\\"', Name.Variable),
(r'.', Punctuation, '#pop'),
],
'identifier': [
(r'(&)?(")', bygroups(Name.Variable, Punctuation), 'dq-identifier'),
(r'(")?(&?[A-Za-z][A-Za-z0-9_-]*)(")?', bygroups(Punctuation, Name.Variable, Punctuation)),
],
'root': [
include('ws'),
include('string'),
(r'(==|!=|<=|>=|<|>|&&|\|\||!)', Operator),
include('punctuation'),
(r'@', Name.Variable.Global),
(r'(&?[A-Za-z][A-Za-z0-9_]*)(\()', bygroups(Name.Function, Punctuation)),
(r'(&)(\()', bygroups(Name.Variable, Punctuation)),
include('identifier'),
(r'-?\d+', Number),
(r'`', Literal, 'literal'),
],
'literal': [
include('ws'),
include('string'),
include('punctuation'),
(r'(false|true|null)\b', Keyword.Constant),
include('identifier'),
(r'-?\d+\.?\d*([eE][-+]\d+)?', Number),
(r'\\`', Literal),
(r'`', Literal, '#pop'),
]
}
| 2,059 | Python | 28.855072 | 103 | 0.443905 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ampl.py | """
pygments.lexers.ampl
~~~~~~~~~~~~~~~~~~~~
Lexers for the AMPL language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, using, this, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['AmplLexer']
class AmplLexer(RegexLexer):
"""
For AMPL source code.
.. versionadded:: 2.2
"""
name = 'Ampl'
url = 'http://ampl.com/'
aliases = ['ampl']
filenames = ['*.run']
tokens = {
'root': [
(r'\n', Text),
(r'\s+', Whitespace),
(r'#.*?\n', Comment.Single),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(words((
'call', 'cd', 'close', 'commands', 'data', 'delete', 'display',
'drop', 'end', 'environ', 'exit', 'expand', 'include', 'load',
'model', 'objective', 'option', 'problem', 'purge', 'quit',
'redeclare', 'reload', 'remove', 'reset', 'restore', 'shell',
'show', 'solexpand', 'solution', 'solve', 'update', 'unload',
'xref', 'coeff', 'coef', 'cover', 'obj', 'interval', 'default',
'from', 'to', 'to_come', 'net_in', 'net_out', 'dimen',
'dimension', 'check', 'complements', 'write', 'function',
'pipe', 'format', 'if', 'then', 'else', 'in', 'while', 'repeat',
'for'), suffix=r'\b'), Keyword.Reserved),
(r'(integer|binary|symbolic|ordered|circular|reversed|INOUT|IN|OUT|LOCAL)',
Keyword.Type),
(r'\".*?\"', String.Double),
(r'\'.*?\'', String.Single),
(r'[()\[\]{},;:]+', Punctuation),
(r'\b(\w+)(\.)(astatus|init0|init|lb0|lb1|lb2|lb|lrc|'
r'lslack|rc|relax|slack|sstatus|status|ub0|ub1|ub2|'
r'ub|urc|uslack|val)',
bygroups(Name.Variable, Punctuation, Keyword.Reserved)),
(r'(set|param|var|arc|minimize|maximize|subject to|s\.t\.|subj to|'
r'node|table|suffix|read table|write table)(\s+)(\w+)',
bygroups(Keyword.Declaration, Whitespace, Name.Variable)),
(r'(param)(\s*)(:)(\s*)(\w+)(\s*)(:)(\s*)((\w|\s)+)',
bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace,
Name.Variable, Whitespace, Punctuation, Whitespace, Name.Variable)),
(r'(let|fix|unfix)(\s*)((?:\{.*\})?)(\s*)(\w+)',
bygroups(Keyword.Declaration, Whitespace, using(this), Whitespace,
Name.Variable)),
(words((
'abs', 'acos', 'acosh', 'alias', 'asin', 'asinh', 'atan', 'atan2',
'atanh', 'ceil', 'ctime', 'cos', 'exp', 'floor', 'log', 'log10',
'max', 'min', 'precision', 'round', 'sin', 'sinh', 'sqrt', 'tan',
'tanh', 'time', 'trunc', 'Beta', 'Cauchy', 'Exponential', 'Gamma',
'Irand224', 'Normal', 'Normal01', 'Poisson', 'Uniform', 'Uniform01',
'num', 'num0', 'ichar', 'char', 'length', 'substr', 'sprintf',
'match', 'sub', 'gsub', 'print', 'printf', 'next', 'nextw', 'prev',
'prevw', 'first', 'last', 'ord', 'ord0', 'card', 'arity',
'indexarity'), prefix=r'\b', suffix=r'\b'), Name.Builtin),
(r'(\+|\-|\*|/|\*\*|=|<=|>=|==|\||\^|<|>|\!|\.\.|:=|\&|\!=|<<|>>)',
Operator),
(words((
'or', 'exists', 'forall', 'and', 'in', 'not', 'within', 'union',
'diff', 'difference', 'symdiff', 'inter', 'intersect',
'intersection', 'cross', 'setof', 'by', 'less', 'sum', 'prod',
'product', 'div', 'mod'), suffix=r'\b'),
Keyword.Reserved), # Operator.Name but not enough emphasized with that
(r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
(r'\d+([eE][+-]?\d+)?', Number.Integer),
(r'[+-]?Infinity', Number.Integer),
(r'(\w+|(\.(?!\.)))', Text)
]
}
| 4,177 | Python | 45.94382 | 90 | 0.469236 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/crystal.py | """
pygments.lexers.crystal
~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Crystal.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import ExtendedRegexLexer, include, bygroups, default, \
words, line_re
from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
Punctuation, Error, Whitespace
__all__ = ['CrystalLexer']
CRYSTAL_OPERATORS = [
'!=', '!~', '!', '%', '&&', '&', '**', '*', '+', '-', '/', '<=>', '<<', '<=', '<',
'===', '==', '=~', '=', '>=', '>>', '>', '[]=', '[]?', '[]', '^', '||', '|', '~'
]
class CrystalLexer(ExtendedRegexLexer):
"""
For Crystal source code.
.. versionadded:: 2.2
"""
name = 'Crystal'
url = 'http://crystal-lang.org'
aliases = ['cr', 'crystal']
filenames = ['*.cr']
mimetypes = ['text/x-crystal']
flags = re.DOTALL | re.MULTILINE
def heredoc_callback(self, match, ctx):
# okay, this is the hardest part of parsing Crystal...
# match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
start = match.start(1)
yield start, Operator, match.group(1) # <<-?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
heredocstack.append((match.group(1) == '<<-', match.group(3)))
ctx.pos = match.start(5)
ctx.end = match.end(5)
# this may find other heredocs, so limit the recursion depth
if len(heredocstack) < 100:
yield from self.get_tokens_unprocessed(context=ctx)
else:
yield ctx.pos, String.Heredoc, match.group(5)
ctx.pos = match.end()
if outermost:
# this is the outer heredoc again, now we can process them all
for tolerant, hdname in heredocstack:
lines = []
for match in line_re.finditer(ctx.text, ctx.pos):
if tolerant:
check = match.group().strip()
else:
check = match.group().rstrip()
if check == hdname:
for amatch in lines:
yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), String.Delimiter, match.group()
ctx.pos = match.end()
break
else:
lines.append(match)
else:
# end of heredoc not found -- error!
for amatch in lines:
yield amatch.start(), Error, amatch.group()
ctx.end = len(ctx.text)
del heredocstack[:]
def gen_crystalstrings_rules():
states = {}
states['strings'] = [
(r'\:\w+[!?]?', String.Symbol),
(words(CRYSTAL_OPERATORS, prefix=r'\:'), String.Symbol),
(r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
# This allows arbitrary text after '\ for simplicity
(r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char),
(r':"', String.Symbol, 'simple-sym'),
# Crystal doesn't have "symbol:"s but this simplifies function args
(r'([a-zA-Z_]\w*)(:)(?!:)', bygroups(String.Symbol, Punctuation)),
(r'"', String.Double, 'simple-string'),
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
]
# double-quoted string and symbol
for name, ttype, end in ('string', String.Double, '"'), \
('sym', String.Symbol, '"'), \
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
include('string-escaped' if name == 'sym' else 'string-intp-escaped'),
(r'[^\\%s#]+' % end, ttype),
(r'[\\#]', ttype),
(end, ttype, '#pop'),
]
# https://crystal-lang.org/docs/syntax_and_semantics/literals/string.html#percent-string-literals
for lbrace, rbrace, bracecc, name in \
('\\{', '\\}', '{}', 'cb'), \
('\\[', '\\]', '\\[\\]', 'sb'), \
('\\(', '\\)', '()', 'pa'), \
('<', '>', '<>', 'ab'), \
('\\|', '\\|', '\\|', 'pi'):
states[name+'-intp-string'] = [
(r'\\' + lbrace, String.Other),
] + (lbrace != rbrace) * [
(lbrace, String.Other, '#push'),
] + [
(rbrace, String.Other, '#pop'),
include('string-intp-escaped'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
states['strings'].append((r'%Q?' + lbrace, String.Other,
name+'-intp-string'))
states[name+'-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
] + (lbrace != rbrace) * [
(lbrace, String.Other, '#push'),
] + [
(rbrace, String.Other, '#pop'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
# https://crystal-lang.org/docs/syntax_and_semantics/literals/array.html#percent-array-literals
states['strings'].append((r'%[qwi]' + lbrace, String.Other,
name+'-string'))
states[name+'-regex'] = [
(r'\\[\\' + bracecc + ']', String.Regex),
] + (lbrace != rbrace) * [
(lbrace, String.Regex, '#push'),
] + [
(rbrace + '[imsx]*', String.Regex, '#pop'),
include('string-intp'),
(r'[\\#' + bracecc + ']', String.Regex),
(r'[^\\#' + bracecc + ']+', String.Regex),
]
states['strings'].append((r'%r' + lbrace, String.Regex,
name+'-regex'))
return states
tokens = {
'root': [
(r'#.*?$', Comment.Single),
# keywords
(words('''
abstract asm begin break case do else elsif end ensure extend if in
include next of private protected require rescue return select self super
then unless until when while with yield
'''.split(), suffix=r'\b'), Keyword),
(words('''
previous_def forall out uninitialized __DIR__ __FILE__ __LINE__
__END_LINE__
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
# https://crystal-lang.org/docs/syntax_and_semantics/is_a.html
(r'\.(is_a\?|nil\?|responds_to\?|as\?|as\b)', Keyword.Pseudo),
(words(['true', 'false', 'nil'], suffix=r'\b'), Keyword.Constant),
# start of function, class and module names
(r'(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
bygroups(Keyword, Whitespace, Name.Namespace)),
(r'(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)',
bygroups(Keyword, Whitespace, Name.Namespace), 'funcname'),
(r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
(r'(annotation|class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
bygroups(Keyword, Whitespace, Name.Namespace), 'classname'),
# https://crystal-lang.org/api/toplevel.html
(words('''
instance_sizeof offsetof pointerof sizeof typeof
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
# macros
(r'(?<!\.)(debugger\b|p!|pp!|record\b|spawn\b)', Name.Builtin.Pseudo),
# builtins
(words('''
abort at_exit caller exit gets loop main p pp print printf puts
raise rand read_line sleep spawn sprintf system
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
# https://crystal-lang.org/api/Object.html#macro-summary
(r'(?<!\.)(((class_)?((getter|property)\b[!?]?|setter\b))|'
r'(def_(clone|equals|equals_and_hash|hash)|delegate|forward_missing_to)\b)',
Name.Builtin.Pseudo),
# normal heredocs
(r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
# empty string heredocs
(r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
# multiline regex (after keywords or assignments)
(r'(?:^|(?<=[=<>~!:])|'
r'(?<=(?:\s|;)when\s)|'
r'(?<=(?:\s|;)or\s)|'
r'(?<=(?:\s|;)and\s)|'
r'(?<=\.index\s)|'
r'(?<=\.scan\s)|'
r'(?<=\.sub\s)|'
r'(?<=\.sub!\s)|'
r'(?<=\.gsub\s)|'
r'(?<=\.gsub!\s)|'
r'(?<=\.match\s)|'
r'(?<=(?:\s|;)if\s)|'
r'(?<=(?:\s|;)elsif\s)|'
r'(?<=^when\s)|'
r'(?<=^index\s)|'
r'(?<=^scan\s)|'
r'(?<=^sub\s)|'
r'(?<=^gsub\s)|'
r'(?<=^sub!\s)|'
r'(?<=^gsub!\s)|'
r'(?<=^match\s)|'
r'(?<=^if\s)|'
r'(?<=^elsif\s)'
r')(\s*)(/)', bygroups(Whitespace, String.Regex), 'multiline-regex'),
# multiline regex (in method calls or subscripts)
(r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
# multiline regex (this time the funny no whitespace rule)
(r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
'multiline-regex'),
# lex numbers and ignore following regular expressions which
# are division operators in fact (grrrr. i hate that. any
# better ideas?)
# since pygments 0.7 we also eat a "?" operator after numbers
# so that the char operator does not work. Chars are not allowed
# there so that you can use the ternary operator.
# stupid example:
# x>=0?n[x]:""
(r'(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Oct, Whitespace, Operator)),
(r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Hex, Whitespace, Operator)),
(r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Bin, Whitespace, Operator)),
# 3 separate expressions for floats because any of the 3 optional
# parts makes it a float
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?'
r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Whitespace, Operator)),
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)'
r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Whitespace, Operator)),
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?'
r'(?:_?f[0-9]+))(\s*)([/?])?',
bygroups(Number.Float, Whitespace, Operator)),
(r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Integer, Whitespace, Operator)),
# Names
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'\$\w+', Name.Variable.Global),
(r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
(r'\$-[0adFiIlpvw]', Name.Variable.Global),
(r'::', Operator),
include('strings'),
# https://crystal-lang.org/reference/syntax_and_semantics/literals/char.html
(r'\?(\\[MC]-)*' # modifiers
r'(\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})|\S)'
r'(?!\w)',
String.Char),
(r'[A-Z][A-Z_]+\b(?!::|\.)', Name.Constant),
# macro expansion
(r'\{%', String.Interpol, 'in-macro-control'),
(r'\{\{', String.Interpol, 'in-macro-expr'),
# annotations
(r'(@\[)(\s*)([A-Z]\w*(::[A-Z]\w*)*)',
bygroups(Operator, Whitespace, Name.Decorator), 'in-annot'),
# this is needed because Crystal attributes can look
# like keywords (class) or like this: ` ?!?
(words(CRYSTAL_OPERATORS, prefix=r'(\.|::)'),
bygroups(Operator, Name.Operator)),
(r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
bygroups(Operator, Name)),
# Names can end with [!?] unless it's "!="
(r'[a-zA-Z_]\w*(?:[!?](?!=))?', Name),
(r'(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|'
r'!~|&&?|\|\||\.{1,3})', Operator),
(r'[-+/*%=<>&!^|~]=?', Operator),
(r'[(){};,/?:\\]', Punctuation),
(r'\s+', Whitespace)
],
'funcname': [
(r'(?:([a-zA-Z_]\w*)(\.))?'
r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
bygroups(Name.Class, Operator, Name.Function), '#pop'),
default('#pop')
],
'classname': [
(r'[A-Z_]\w*', Name.Class),
(r'(\()(\s*)([A-Z_]\w*)(\s*)(\))',
bygroups(Punctuation, Whitespace, Name.Class, Whitespace, Punctuation)),
default('#pop')
],
'in-intp': [
(r'\{', String.Interpol, '#push'),
(r'\}', String.Interpol, '#pop'),
include('root'),
],
'string-intp': [
(r'#\{', String.Interpol, 'in-intp'),
],
'string-escaped': [
# https://crystal-lang.org/reference/syntax_and_semantics/literals/string.html
(r'\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})',
String.Escape)
],
'string-intp-escaped': [
include('string-intp'),
include('string-escaped'),
],
'interpolated-regex': [
include('string-intp'),
(r'[\\#]', String.Regex),
(r'[^\\#]+', String.Regex),
],
'interpolated-string': [
include('string-intp'),
(r'[\\#]', String.Other),
(r'[^\\#]+', String.Other),
],
'multiline-regex': [
include('string-intp'),
(r'\\\\', String.Regex),
(r'\\/', String.Regex),
(r'[\\#]', String.Regex),
(r'[^\\/#]+', String.Regex),
(r'/[imsx]*', String.Regex, '#pop'),
],
'end-part': [
(r'.+', Comment.Preproc, '#pop')
],
'in-macro-control': [
(r'\{%', String.Interpol, '#push'),
(r'%\}', String.Interpol, '#pop'),
(r'(for|verbatim)\b', Keyword),
include('root'),
],
'in-macro-expr': [
(r'\{\{', String.Interpol, '#push'),
(r'\}\}', String.Interpol, '#pop'),
include('root'),
],
'in-annot': [
(r'\[', Operator, '#push'),
(r'\]', Operator, '#pop'),
include('root'),
],
}
tokens.update(gen_crystalstrings_rules())
| 15,756 | Python | 42.051912 | 107 | 0.424092 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/jsonnet.py | """
pygments.lexers.jsonnet
~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Jsonnet data templating language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import include, RegexLexer, words
from pygments.token import Comment, Keyword, Name, Number, Operator, \
Punctuation, String, Text, Whitespace
__all__ = ['JsonnetLexer']
jsonnet_token = r'[^\W\d]\w*'
jsonnet_function_token = jsonnet_token + r'(?=\()'
def string_rules(quote_mark):
return [
(r"[^{}\\]".format(quote_mark), String),
(r"\\.", String.Escape),
(quote_mark, String, '#pop'),
]
def quoted_field_name(quote_mark):
return [
(r'([^{quote}\\]|\\.)*{quote}'.format(quote=quote_mark),
Name.Variable, 'field_separator')
]
class JsonnetLexer(RegexLexer):
"""Lexer for Jsonnet source code."""
name = 'Jsonnet'
aliases = ['jsonnet']
filenames = ['*.jsonnet', '*.libsonnet']
url = "https://jsonnet.org"
tokens = {
# Not used by itself
'_comments': [
(r'(//|#).*\n', Comment.Single),
(r'/\*\*([^/]|/(?!\*))*\*/', String.Doc),
(r'/\*([^/]|/(?!\*))*\*/', Comment),
],
'root': [
include('_comments'),
(r"@'.*'", String),
(r'@".*"', String),
(r"'", String, 'singlestring'),
(r'"', String, 'doublestring'),
(r'\|\|\|(.|\n)*\|\|\|', String),
# Jsonnet has no integers, only an IEEE754 64-bit float
(r'[+-]?[0-9]+(.[0-9])?', Number.Float),
# Omit : despite spec because it appears to be used as a field
# separator
(r'[!$~+\-&|^=<>*/%]', Operator),
(r'\{', Punctuation, 'object'),
(r'\[', Punctuation, 'array'),
(r'local\b', Keyword, ('local_name')),
(r'assert\b', Keyword, 'assert'),
(words([
'assert', 'else', 'error', 'false', 'for', 'if', 'import',
'importstr', 'in', 'null', 'tailstrict', 'then', 'self',
'super', 'true',
], suffix=r'\b'), Keyword),
(r'\s+', Whitespace),
(r'function(?=\()', Keyword, 'function_params'),
(r'std\.' + jsonnet_function_token, Name.Builtin, 'function_args'),
(jsonnet_function_token, Name.Function, 'function_args'),
(jsonnet_token, Name.Variable),
(r'[\.()]', Punctuation),
],
'singlestring': string_rules("'"),
'doublestring': string_rules('"'),
'array': [
(r',', Punctuation),
(r'\]', Punctuation, '#pop'),
include('root'),
],
'local_name': [
(jsonnet_function_token, Name.Function, 'function_params'),
(jsonnet_token, Name.Variable),
(r'\s+', Whitespace),
('(?==)', Whitespace, ('#pop', 'local_value')),
],
'local_value': [
(r'=', Operator),
(r';', Punctuation, '#pop'),
include('root'),
],
'assert': [
(r':', Punctuation),
(r';', Punctuation, '#pop'),
include('root'),
],
'function_params': [
(jsonnet_token, Name.Variable),
(r'\(', Punctuation),
(r'\)', Punctuation, '#pop'),
(r',', Punctuation),
(r'\s+', Whitespace),
(r'=', Operator, 'function_param_default'),
],
'function_args': [
(r'\(', Punctuation),
(r'\)', Punctuation, '#pop'),
(r',', Punctuation),
(r'\s+', Whitespace),
include('root'),
],
'object': [
(r'\s+', Whitespace),
(r'local\b', Keyword, 'object_local_name'),
(r'assert\b', Keyword, 'object_assert'),
(r'\[', Operator, 'field_name_expr'),
(fr'(?={jsonnet_token})', Text, 'field_name'),
(r'\}', Punctuation, '#pop'),
(r'"', Name.Variable, 'double_field_name'),
(r"'", Name.Variable, 'single_field_name'),
include('_comments'),
],
'field_name': [
(jsonnet_function_token, Name.Function,
('field_separator', 'function_params')
),
(jsonnet_token, Name.Variable, 'field_separator'),
],
'double_field_name': quoted_field_name('"'),
'single_field_name': quoted_field_name("'"),
'field_name_expr': [
(r'\]', Operator, 'field_separator'),
include('root'),
],
'function_param_default': [
(r'(?=[,\)])', Whitespace, '#pop'),
include('root'),
],
'field_separator': [
(r'\s+', Whitespace),
(r'\+?::?:?', Punctuation, ('#pop', '#pop', 'field_value')),
include('_comments'),
],
'field_value': [
(r',', Punctuation, '#pop'),
(r'\}', Punctuation, '#pop:2'),
include('root'),
],
'object_assert': [
(r':', Punctuation),
(r',', Punctuation, '#pop'),
include('root'),
],
'object_local_name': [
(jsonnet_token, Name.Variable, ('#pop', 'object_local_value')),
(r'\s+', Whitespace),
],
'object_local_value': [
(r'=', Operator),
(r',', Punctuation, '#pop'),
(r'\}', Punctuation, '#pop:2'),
include('root'),
],
}
| 5,635 | Python | 32.349112 | 79 | 0.442591 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/rnc.py | """
pygments.lexers.rnc
~~~~~~~~~~~~~~~~~~~
Lexer for Relax-NG Compact syntax
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Punctuation
__all__ = ['RNCCompactLexer']
class RNCCompactLexer(RegexLexer):
"""
For RelaxNG-compact syntax.
.. versionadded:: 2.2
"""
name = 'Relax-NG Compact'
url = 'http://relaxng.org'
aliases = ['rng-compact', 'rnc']
filenames = ['*.rnc']
tokens = {
'root': [
(r'namespace\b', Keyword.Namespace),
(r'(?:default|datatypes)\b', Keyword.Declaration),
(r'##.*$', Comment.Preproc),
(r'#.*$', Comment.Single),
(r'"[^"]*"', String.Double),
# TODO single quoted strings and escape sequences outside of
# double-quoted strings
(r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
(r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
(r'[,?&*=|~]|>>', Operator),
(r'[(){}]', Punctuation),
(r'.', Text),
],
# a variable has been declared using `element` or `attribute`
'variable': [
(r'[^{]+', Name.Variable),
(r'\{', Punctuation, '#pop'),
],
# after an xsd:<datatype> declaration there may be attributes
'maybe_xsdattributes': [
(r'\{', Punctuation, 'xsdattributes'),
(r'\}', Punctuation, '#pop'),
(r'.', Text),
],
# attributes take the form { key1 = value1 key2 = value2 ... }
'xsdattributes': [
(r'[^ =}]', Name.Attribute),
(r'=', Operator),
(r'"[^"]*"', String.Double),
(r'\}', Punctuation, '#pop'),
(r'.', Text),
],
}
| 1,973 | Python | 28.029411 | 80 | 0.49924 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ruby.py | """
pygments.lexers.ruby
~~~~~~~~~~~~~~~~~~~~
Lexers for Ruby and related languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
bygroups, default, LexerContext, do_insertions, words, line_re
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Generic, Whitespace
from pygments.util import shebang_matches
__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
RUBY_OPERATORS = (
'*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
'[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
)
class RubyLexer(ExtendedRegexLexer):
"""
For Ruby source code.
"""
name = 'Ruby'
url = 'http://www.ruby-lang.org'
aliases = ['ruby', 'rb', 'duby']
filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
mimetypes = ['text/x-ruby', 'application/x-ruby']
flags = re.DOTALL | re.MULTILINE
def heredoc_callback(self, match, ctx):
# okay, this is the hardest part of parsing Ruby...
# match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
start = match.start(1)
yield start, Operator, match.group(1) # <<[-~]?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
ctx.pos = match.start(5)
ctx.end = match.end(5)
# this may find other heredocs, so limit the recursion depth
if len(heredocstack) < 100:
yield from self.get_tokens_unprocessed(context=ctx)
else:
yield ctx.pos, String.Heredoc, match.group(5)
ctx.pos = match.end()
if outermost:
# this is the outer heredoc again, now we can process them all
for tolerant, hdname in heredocstack:
lines = []
for match in line_re.finditer(ctx.text, ctx.pos):
if tolerant:
check = match.group().strip()
else:
check = match.group().rstrip()
if check == hdname:
for amatch in lines:
yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), String.Delimiter, match.group()
ctx.pos = match.end()
break
else:
lines.append(match)
else:
# end of heredoc not found -- error!
for amatch in lines:
yield amatch.start(), Error, amatch.group()
ctx.end = len(ctx.text)
del heredocstack[:]
def gen_rubystrings_rules():
def intp_regex_callback(self, match, ctx):
yield match.start(1), String.Regex, match.group(1) # begin
nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
for i, t, v in self.get_tokens_unprocessed(context=nctx):
yield match.start(3)+i, t, v
yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
ctx.pos = match.end()
def intp_string_callback(self, match, ctx):
yield match.start(1), String.Other, match.group(1)
nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
for i, t, v in self.get_tokens_unprocessed(context=nctx):
yield match.start(3)+i, t, v
yield match.start(4), String.Other, match.group(4) # end
ctx.pos = match.end()
states = {}
states['strings'] = [
# easy ones
(r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
(words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
(r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
(r':"', String.Symbol, 'simple-sym'),
(r'([a-zA-Z_]\w*)(:)(?!:)',
bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
(r'"', String.Double, 'simple-string-double'),
(r"'", String.Single, 'simple-string-single'),
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
]
# quoted string and symbol
for name, ttype, end in ('string-double', String.Double, '"'), \
('string-single', String.Single, "'"),\
('sym', String.Symbol, '"'), \
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
include('string-intp-escaped'),
(r'[^\\%s#]+' % end, ttype),
(r'[\\#]', ttype),
(end, ttype, '#pop'),
]
# braced quoted strings
for lbrace, rbrace, bracecc, name in \
('\\{', '\\}', '{}', 'cb'), \
('\\[', '\\]', '\\[\\]', 'sb'), \
('\\(', '\\)', '()', 'pa'), \
('<', '>', '<>', 'ab'):
states[name+'-intp-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
(lbrace, String.Other, '#push'),
(rbrace, String.Other, '#pop'),
include('string-intp-escaped'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
name+'-intp-string'))
states[name+'-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
(lbrace, String.Other, '#push'),
(rbrace, String.Other, '#pop'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
states['strings'].append((r'%[qsw]' + lbrace, String.Other,
name+'-string'))
states[name+'-regex'] = [
(r'\\[\\' + bracecc + ']', String.Regex),
(lbrace, String.Regex, '#push'),
(rbrace + '[mixounse]*', String.Regex, '#pop'),
include('string-intp'),
(r'[\\#' + bracecc + ']', String.Regex),
(r'[^\\#' + bracecc + ']+', String.Regex),
]
states['strings'].append((r'%r' + lbrace, String.Regex,
name+'-regex'))
# these must come after %<brace>!
states['strings'] += [
# %r regex
(r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
intp_regex_callback),
# regular fancy strings with qsw
(r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
(r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
intp_string_callback),
# special forms of fancy strings after operators or
# in method calls with braces
(r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
bygroups(Whitespace, String.Other, None)),
# and because of fixed width lookbehinds the whole thing a
# second time for line startings...
(r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
bygroups(Whitespace, String.Other, None)),
# all regular fancy strings without qsw
(r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
intp_string_callback),
]
return states
tokens = {
'root': [
(r'\A#!.+?$', Comment.Hashbang),
(r'#.*?$', Comment.Single),
(r'=begin\s.*?\n=end.*?$', Comment.Multiline),
# keywords
(words((
'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
Keyword),
# start of function, class and module names
(r'(module)(\s+)([a-zA-Z_]\w*'
r'(?:::[a-zA-Z_]\w*)*)',
bygroups(Keyword, Whitespace, Name.Namespace)),
(r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
(r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
(r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
# special methods
(words((
'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
'module_function', 'public', 'protected', 'true', 'false', 'nil'),
suffix=r'\b'),
Keyword.Pseudo),
(r'(not|and|or)\b', Operator.Word),
(words((
'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
'private_method_defined', 'protected_method_defined',
'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
Name.Builtin),
(r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
(words((
'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
'instance_method', 'instance_methods',
'instance_variable_get', 'instance_variable_set', 'instance_variables',
'lambda', 'load', 'local_variables', 'loop',
'method', 'method_missing', 'methods', 'module_eval', 'name',
'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
'private_instance_methods',
'private_methods', 'proc', 'protected_instance_methods',
'protected_methods', 'public_class_method',
'public_instance_methods', 'public_methods',
'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
Name.Builtin),
(r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
# normal heredocs
(r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
# empty string heredocs
(r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
# multiline regex (after keywords or assignments)
(r'(?:^|(?<=[=<>~!:])|'
r'(?<=(?:\s|;)when\s)|'
r'(?<=(?:\s|;)or\s)|'
r'(?<=(?:\s|;)and\s)|'
r'(?<=\.index\s)|'
r'(?<=\.scan\s)|'
r'(?<=\.sub\s)|'
r'(?<=\.sub!\s)|'
r'(?<=\.gsub\s)|'
r'(?<=\.gsub!\s)|'
r'(?<=\.match\s)|'
r'(?<=(?:\s|;)if\s)|'
r'(?<=(?:\s|;)elsif\s)|'
r'(?<=^when\s)|'
r'(?<=^index\s)|'
r'(?<=^scan\s)|'
r'(?<=^sub\s)|'
r'(?<=^gsub\s)|'
r'(?<=^sub!\s)|'
r'(?<=^gsub!\s)|'
r'(?<=^match\s)|'
r'(?<=^if\s)|'
r'(?<=^elsif\s)'
r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
# multiline regex (in method calls or subscripts)
(r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
# multiline regex (this time the funny no whitespace rule)
(r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
'multiline-regex'),
# lex numbers and ignore following regular expressions which
# are division operators in fact (grrrr. i hate that. any
# better ideas?)
# since pygments 0.7 we also eat a "?" operator after numbers
# so that the char operator does not work. Chars are not allowed
# there so that you can use the ternary operator.
# stupid example:
# x>=0?n[x]:""
(r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
bygroups(Number.Oct, Whitespace, Operator)),
(r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
bygroups(Number.Hex, Whitespace, Operator)),
(r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
bygroups(Number.Bin, Whitespace, Operator)),
(r'([\d]+(?:_\d+)*)(\s*)([/?])?',
bygroups(Number.Integer, Whitespace, Operator)),
# Names
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'\$\w+', Name.Variable.Global),
(r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
(r'\$-[0adFiIlpvw]', Name.Variable.Global),
(r'::', Operator),
include('strings'),
# chars
(r'\?(\\[MC]-)*' # modifiers
r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
r'(?!\w)',
String.Char),
(r'[A-Z]\w+', Name.Constant),
# this is needed because ruby attributes can look
# like keywords (class) or like this: ` ?!?
(words(RUBY_OPERATORS, prefix=r'(\.|::)'),
bygroups(Operator, Name.Operator)),
(r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
bygroups(Operator, Name)),
(r'[a-zA-Z_]\w*[!?]?', Name),
(r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
r'!~|&&?|\|\||\.{1,3})', Operator),
(r'[-+/*%=<>&!^|~]=?', Operator),
(r'[(){};,/?:\\]', Punctuation),
(r'\s+', Whitespace)
],
'funcname': [
(r'\(', Punctuation, 'defexpr'),
(r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
r'('
r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?' # or operator override
r'|\[\]=?' # or element reference/assignment override
r'|`' # or the undocumented backtick override
r')',
bygroups(Name.Class, Operator, Name.Function), '#pop'),
default('#pop')
],
'classname': [
(r'\(', Punctuation, 'defexpr'),
(r'<<', Operator, '#pop'),
(r'[A-Z_]\w*', Name.Class, '#pop'),
default('#pop')
],
'defexpr': [
(r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
(r'\(', Operator, '#push'),
include('root')
],
'in-intp': [
(r'\{', String.Interpol, '#push'),
(r'\}', String.Interpol, '#pop'),
include('root'),
],
'string-intp': [
(r'#\{', String.Interpol, 'in-intp'),
(r'#@@?[a-zA-Z_]\w*', String.Interpol),
(r'#\$[a-zA-Z_]\w*', String.Interpol)
],
'string-intp-escaped': [
include('string-intp'),
(r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
String.Escape)
],
'interpolated-regex': [
include('string-intp'),
(r'[\\#]', String.Regex),
(r'[^\\#]+', String.Regex),
],
'interpolated-string': [
include('string-intp'),
(r'[\\#]', String.Other),
(r'[^\\#]+', String.Other),
],
'multiline-regex': [
include('string-intp'),
(r'\\\\', String.Regex),
(r'\\/', String.Regex),
(r'[\\#]', String.Regex),
(r'[^\\/#]+', String.Regex),
(r'/[mixounse]*', String.Regex, '#pop'),
],
'end-part': [
(r'.+', Comment.Preproc, '#pop')
]
}
tokens.update(gen_rubystrings_rules())
def analyse_text(text):
return shebang_matches(text, r'ruby(1\.\d)?')
class RubyConsoleLexer(Lexer):
"""
For Ruby interactive console (**irb**) output like:
.. sourcecode:: rbcon
irb(main):001:0> a = 1
=> 1
irb(main):002:0> puts a
1
=> nil
"""
name = 'Ruby irb session'
aliases = ['rbcon', 'irb']
mimetypes = ['text/x-ruby-shellsession']
_prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
r'|>> |\?> ')
def get_tokens_unprocessed(self, text):
rblexer = RubyLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = self._prompt_re.match(line)
if m is not None:
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
yield from do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
yield from do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode))
class FancyLexer(RegexLexer):
"""
Pygments Lexer For Fancy.
Fancy is a self-hosted, pure object-oriented, dynamic,
class-based, concurrent general-purpose programming language
running on Rubinius, the Ruby VM.
.. versionadded:: 1.5
"""
name = 'Fancy'
url = 'https://github.com/bakkdoor/fancy'
filenames = ['*.fy', '*.fancypack']
aliases = ['fancy', 'fy']
mimetypes = ['text/x-fancysrc']
tokens = {
# copied from PerlLexer:
'balanced-regex': [
(r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
(r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
(r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
(r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
(r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
(r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
(r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
(r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
(r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
(r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
],
'root': [
(r'\s+', Whitespace),
# balanced delimiters (copied from PerlLexer):
(r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
(r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
(r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
(r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
(r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
(r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
# Comments
(r'#(.*?)\n', Comment.Single),
# Symbols
(r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
# Multi-line DoubleQuotedString
(r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
# DoubleQuotedString
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# keywords
(r'(def|class|try|catch|finally|retry|return|return_local|match|'
r'case|->|=>)\b', Keyword),
# constants
(r'(self|super|nil|false|true)\b', Name.Constant),
(r'[(){};,/?|:\\]', Punctuation),
# names
(words((
'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
Name.Builtin),
# functions
(r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
# operators, must be below functions
(r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
(r'[A-Z]\w*', Name.Constant),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
('@@?', Operator),
(r'[a-zA-Z_]\w*', Name),
# numbers - / checks are necessary to avoid mismarking regexes,
# see comment in RubyLexer
(r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
bygroups(Number.Oct, Whitespace, Operator)),
(r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
bygroups(Number.Hex, Whitespace, Operator)),
(r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
bygroups(Number.Bin, Whitespace, Operator)),
(r'([\d]+(?:_\d+)*)(\s*)([/?])?',
bygroups(Number.Integer, Whitespace, Operator)),
(r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+', Number.Integer)
]
}
| 22,775 | Python | 42.465649 | 98 | 0.429199 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/gcodelexer.py | """
pygments.lexers.gcodelexer
~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for the G Code Language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Comment, Name, Text, Keyword, Number
__all__ = ['GcodeLexer']
class GcodeLexer(RegexLexer):
"""
For gcode source code.
.. versionadded:: 2.9
"""
name = 'g-code'
aliases = ['gcode']
filenames = ['*.gcode']
tokens = {
'root': [
(r';.*\n', Comment),
(r'^[gmGM]\d{1,4}\s', Name.Builtin), # M or G commands
(r'([^gGmM])([+-]?\d*[.]?\d+)', bygroups(Keyword, Number)),
(r'\s', Text.Whitespace),
(r'.*\n', Text),
]
}
| 826 | Python | 21.972222 | 71 | 0.520581 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ambient.py | """
pygments.lexers.ambient
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for AmbientTalk language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words, bygroups
from pygments.token import Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['AmbientTalkLexer']
class AmbientTalkLexer(RegexLexer):
"""
Lexer for AmbientTalk source code.
.. versionadded:: 2.0
"""
name = 'AmbientTalk'
url = 'https://code.google.com/p/ambienttalk'
filenames = ['*.at']
aliases = ['ambienttalk', 'ambienttalk/2', 'at']
mimetypes = ['text/x-ambienttalk']
flags = re.MULTILINE | re.DOTALL
builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
'mirroredBy:', 'is:'))
tokens = {
'root': [
(r'\s+', Whitespace),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'(def|deftype|import|alias|exclude)\b', Keyword),
(builtin, Name.Builtin),
(r'(true|false|nil)\b', Keyword.Constant),
(r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\|', Punctuation, 'arglist'),
(r'<:|[*^!%&<>+=,./?-]|:=', Operator),
(r"`[a-zA-Z_]\w*", String.Symbol),
(r"[a-zA-Z_]\w*:", Name.Function),
(r"[{}()\[\];`]", Punctuation),
(r'(self|super)\b', Name.Variable.Instance),
(r"[a-zA-Z_]\w*", Name.Variable),
(r"@[a-zA-Z_]\w*", Name.Class),
(r"@\[", Name.Class, 'annotations'),
include('numbers'),
],
'numbers': [
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+', Number.Integer)
],
'namespace': [
(r'[a-zA-Z_]\w*\.', Name.Namespace),
(r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
(r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
],
'annotations': [
(r"(.*?)\]", Name.Class, '#pop')
],
'arglist': [
(r'\|', Punctuation, '#pop'),
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
(r'[a-zA-Z_]\w*', Name.Variable),
],
}
| 2,606 | Python | 32.857142 | 83 | 0.465464 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/asc.py | """
pygments.lexers.asc
~~~~~~~~~~~~~~~~~~~
Lexer for various ASCII armored files.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Comment, Generic, Name, Operator, String, Whitespace
__all__ = ['AscLexer']
class AscLexer(RegexLexer):
"""
Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped
base64 data.
.. versionadded:: 2.10
"""
name = 'ASCII armored'
aliases = ['asc', 'pem']
filenames = [
'*.asc', # PGP; *.gpg, *.pgp, and *.sig too, but those can be binary
'*.pem', # X.509; *.cer, *.crt, *.csr, and key etc too, but those can be binary
'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk',
'id_rsa', # SSH private keys
]
mimetypes = ['application/pgp-keys', 'application/pgp-encrypted',
'application/pgp-signature']
flags = re.MULTILINE
tokens = {
'root': [
(r'\s+', Whitespace),
(r'^-----BEGIN [^\n]+-----$', Generic.Heading, 'data'),
(r'\S+', Comment),
],
'data': [
(r'\s+', Whitespace),
(r'^([^:]+)(:)([ \t]+)(.*)',
bygroups(Name.Attribute, Operator, Whitespace, String)),
(r'^-----END [^\n]+-----$', Generic.Heading, 'root'),
(r'\S+', String),
],
}
def analyse_text(text):
if re.search(r'^-----BEGIN [^\n]+-----\r?\n', text):
return True
| 1,621 | Python | 27.964285 | 88 | 0.51203 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ml.py | """
pygments.lexers.ml
~~~~~~~~~~~~~~~~~~
Lexers for ML family languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
class SMLLexer(RegexLexer):
"""
For the Standard ML language.
.. versionadded:: 1.5
"""
name = 'Standard ML'
aliases = ['sml']
filenames = ['*.sml', '*.sig', '*.fun']
mimetypes = ['text/x-standardml', 'application/x-standardml']
alphanumid_reserved = {
# Core
'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
# Modules
'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
'struct', 'structure', 'where',
}
symbolicid_reserved = {
# Core
':', r'\|', '=', '=>', '->', '#',
# Modules
':>',
}
nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
alphanumid_re = r"[a-zA-Z][\w']*"
symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
# A character constant is a sequence of the form #s, where s is a string
# constant denoting a string of size one character. This setup just parses
# the entire string as either a String.Double or a String.Char (depending
# on the argument), even if the String.Char is an erroneous
# multiple-character string.
def stringy(whatkind):
return [
(r'[^"\\]', whatkind),
(r'\\[\\"abtnvfr]', String.Escape),
# Control-character notation is used for codes < 32,
# where \^@ == \000
(r'\\\^[\x40-\x5e]', String.Escape),
# Docs say 'decimal digits'
(r'\\[0-9]{3}', String.Escape),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\\s+\\', String.Interpol),
(r'"', whatkind, '#pop'),
]
# Callbacks for distinguishing tokens and reserved words
def long_id_callback(self, match):
if match.group(1) in self.alphanumid_reserved:
token = Error
else:
token = Name.Namespace
yield match.start(1), token, match.group(1)
yield match.start(2), Punctuation, match.group(2)
def end_id_callback(self, match):
if match.group(1) in self.alphanumid_reserved:
token = Error
elif match.group(1) in self.symbolicid_reserved:
token = Error
else:
token = Name
yield match.start(1), token, match.group(1)
def id_callback(self, match):
str = match.group(1)
if str in self.alphanumid_reserved:
token = Keyword.Reserved
elif str in self.symbolicid_reserved:
token = Punctuation
else:
token = Name
yield match.start(1), token, str
tokens = {
# Whitespace and comments are (almost) everywhere
'whitespace': [
(r'\s+', Text),
(r'\(\*', Comment.Multiline, 'comment'),
],
'delimiters': [
# This lexer treats these delimiters specially:
# Delimiters define scopes, and the scope is how the meaning of
# the `|' is resolved - is it a case/handle expression, or function
# definition by cases? (This is not how the Definition works, but
# it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
(r'\(|\[|\{', Punctuation, 'main'),
(r'\)|\]|\}', Punctuation, '#pop'),
(r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
(r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
(r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
],
'core': [
# Punctuation that doesn't overlap symbolic identifiers
(r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
Punctuation),
# Special constants: strings, floats, numbers in decimal and hex
(r'#"', String.Char, 'char'),
(r'"', String.Double, 'string'),
(r'~?0x[0-9a-fA-F]+', Number.Hex),
(r'0wx[0-9a-fA-F]+', Number.Hex),
(r'0w\d+', Number.Integer),
(r'~?\d+\.\d+[eE]~?\d+', Number.Float),
(r'~?\d+\.\d+', Number.Float),
(r'~?\d+[eE]~?\d+', Number.Float),
(r'~?\d+', Number.Integer),
# Labels
(r'#\s*[1-9][0-9]*', Name.Label),
(r'#\s*(%s)' % alphanumid_re, Name.Label),
(r'#\s+(%s)' % symbolicid_re, Name.Label),
# Some reserved words trigger a special, local lexer state change
(r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
(r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
(r'\b(functor|include|open|signature|structure)\b(?!\')',
Keyword.Reserved, 'sname'),
(r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
# Regular identifiers, long and otherwise
(r'\'[\w\']*', Name.Decorator),
(r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
(r'(%s)' % alphanumid_re, id_callback),
(r'(%s)' % symbolicid_re, id_callback),
],
'dotted': [
(r'(%s)(\.)' % alphanumid_re, long_id_callback),
(r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
(r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
(r'\s+', Error),
(r'\S+', Error),
],
# Main parser (prevents errors in files that have scoping errors)
'root': [
default('main')
],
# In this scope, I expect '|' to not be followed by a function name,
# and I expect 'and' to be followed by a binding site
'main': [
include('whitespace'),
# Special behavior of val/and/fun
(r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
(r'\b(fun)\b(?!\')', Keyword.Reserved,
('#pop', 'main-fun', 'fname')),
include('delimiters'),
include('core'),
(r'\S+', Error),
],
# In this scope, I expect '|' and 'and' to be followed by a function
'main-fun': [
include('whitespace'),
(r'\s', Text),
(r'\(\*', Comment.Multiline, 'comment'),
# Special behavior of val/and/fun
(r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
(r'\b(val)\b(?!\')', Keyword.Reserved,
('#pop', 'main', 'vname')),
# Special behavior of '|' and '|'-manipulating keywords
(r'\|', Punctuation, 'fname'),
(r'\b(case|handle)\b(?!\')', Keyword.Reserved,
('#pop', 'main')),
include('delimiters'),
include('core'),
(r'\S+', Error),
],
# Character and string parsers
'char': stringy(String.Char),
'string': stringy(String.Double),
'breakout': [
(r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
],
# Dealing with what comes after module system keywords
'sname': [
include('whitespace'),
include('breakout'),
(r'(%s)' % alphanumid_re, Name.Namespace),
default('#pop'),
],
# Dealing with what comes after the 'fun' (or 'and' or '|') keyword
'fname': [
include('whitespace'),
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
(r'(%s)' % alphanumid_re, Name.Function, '#pop'),
(r'(%s)' % symbolicid_re, Name.Function, '#pop'),
# Ignore interesting function declarations like "fun (x + y) = ..."
default('#pop'),
],
# Dealing with what comes after the 'val' (or 'and') keyword
'vname': [
include('whitespace'),
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
(r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
bygroups(Name.Variable, Text, Punctuation), '#pop'),
(r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
bygroups(Name.Variable, Text, Punctuation), '#pop'),
(r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
(r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
# Ignore interesting patterns like 'val (x, y)'
default('#pop'),
],
# Dealing with what comes after the 'type' (or 'and') keyword
'tname': [
include('whitespace'),
include('breakout'),
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
(r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
(r'(%s)' % alphanumid_re, Keyword.Type),
(r'(%s)' % symbolicid_re, Keyword.Type),
(r'\S+', Error, '#pop'),
],
# A type binding includes most identifiers
'typbind': [
include('whitespace'),
(r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
include('breakout'),
include('core'),
(r'\S+', Error, '#pop'),
],
# Dealing with what comes after the 'datatype' (or 'and') keyword
'dname': [
include('whitespace'),
include('breakout'),
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
(r'(=)(\s*)(datatype)',
bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
(r'=(?!%s)' % symbolicid_re, Punctuation,
('#pop', 'datbind', 'datcon')),
(r'(%s)' % alphanumid_re, Keyword.Type),
(r'(%s)' % symbolicid_re, Keyword.Type),
(r'\S+', Error, '#pop'),
],
# common case - A | B | C of int
'datbind': [
include('whitespace'),
(r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
(r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
(r'\b(of)\b(?!\')', Keyword.Reserved),
(r'(\|)(\s*)(%s)' % alphanumid_re,
bygroups(Punctuation, Text, Name.Class)),
(r'(\|)(\s+)(%s)' % symbolicid_re,
bygroups(Punctuation, Text, Name.Class)),
include('breakout'),
include('core'),
(r'\S+', Error),
],
# Dealing with what comes after an exception
'ename': [
include('whitespace'),
(r'(and\b)(\s+)(%s)' % alphanumid_re,
bygroups(Keyword.Reserved, Text, Name.Class)),
(r'(and\b)(\s*)(%s)' % symbolicid_re,
bygroups(Keyword.Reserved, Text, Name.Class)),
(r'\b(of)\b(?!\')', Keyword.Reserved),
(r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
default('#pop'),
],
'datcon': [
include('whitespace'),
(r'(%s)' % alphanumid_re, Name.Class, '#pop'),
(r'(%s)' % symbolicid_re, Name.Class, '#pop'),
(r'\S+', Error, '#pop'),
],
# Series of type variables
'tyvarseq': [
(r'\s', Text),
(r'\(\*', Comment.Multiline, 'comment'),
(r'\'[\w\']*', Name.Decorator),
(alphanumid_re, Name),
(r',', Punctuation),
(r'\)', Punctuation, '#pop'),
(symbolicid_re, Name),
],
'comment': [
(r'[^(*)]', Comment.Multiline),
(r'\(\*', Comment.Multiline, '#push'),
(r'\*\)', Comment.Multiline, '#pop'),
(r'[(*)]', Comment.Multiline),
],
}
class OcamlLexer(RegexLexer):
"""
For the OCaml language.
.. versionadded:: 0.7
"""
name = 'OCaml'
url = 'https://ocaml.org/'
aliases = ['ocaml']
filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
mimetypes = ['text/x-ocaml']
keywords = (
'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
'downto', 'else', 'end', 'exception', 'external', 'false',
'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
)
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
'<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
tokens = {
'escape-sequence': [
(r'\\[\\"\'ntbr]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
],
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'\(\*(?![)])', Comment, 'comment'),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'\d[\d_]*', Number.Integer),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^\\"]+', String.Double),
include('escape-sequence'),
(r'\\\n', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z_][\w\']*', Name, '#pop'),
default('#pop'),
],
}
class OpaLexer(RegexLexer):
"""
Lexer for the Opa language.
.. versionadded:: 1.5
"""
name = 'Opa'
aliases = ['opa']
filenames = ['*.opa']
mimetypes = ['text/x-opa']
# most of these aren't strictly keywords
# but if you color only real keywords, you might just
# as well not color anything
keywords = (
'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
'else', 'end', 'external', 'forall', 'function', 'if', 'import',
'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
'type', 'val', 'with', 'xml_parser',
)
# matches both stuff and `stuff`
ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
op_re = r'[.=\-<>,@~%/+?*&^!]'
punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
# because they are also used for inserts
tokens = {
# copied from the caml lexer, should be adapted
'escape-sequence': [
(r'\\[\\"\'ntr}]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
],
# factorizing these rules, because they are inserted many times
'comments': [
(r'/\*', Comment, 'nested-comment'),
(r'//.*?$', Comment),
],
'comments-and-spaces': [
include('comments'),
(r'\s+', Text),
],
'root': [
include('comments-and-spaces'),
# keywords
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
# directives
# we could parse the actual set of directives instead of anything
# starting with @, but this is troublesome
# because it needs to be adjusted all the time
# and assuming we parse only sources that compile, it is useless
(r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
# number literals
(r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
(r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
(r'-?\d+[eE][+\-]?\d+', Number.Float),
(r'0[xX][\da-fA-F]+', Number.Hex),
(r'0[oO][0-7]+', Number.Oct),
(r'0[bB][01]+', Number.Bin),
(r'\d+', Number.Integer),
# color literals
(r'#[\da-fA-F]{3,6}', Number.Integer),
# string literals
(r'"', String.Double, 'string'),
# char literal, should be checked because this is the regexp from
# the caml lexer
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
String.Char),
# this is meant to deal with embedded exprs in strings
# every time we find a '}' we pop a state so that if we were
# inside a string, we are back in the string state
# as a consequence, we must also push a state every time we find a
# '{' or else we will have errors when parsing {} for instance
(r'\{', Operator, '#push'),
(r'\}', Operator, '#pop'),
# html literals
# this is a much more strict that the actual parser,
# since a<b would not be parsed as html
# but then again, the parser is way too lax, and we can't hope
# to have something as tolerant
(r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
# db path
# matching the '[_]' in '/a[_]' because it is a part
# of the syntax of the db path definition
# unfortunately, i don't know how to match the ']' in
# /a[1], so this is somewhat inconsistent
(r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
# putting the same color on <- as on db path, since
# it can be used only to mean Db.write
(r'<-(?!'+op_re+r')', Name.Variable),
# 'modules'
# although modules are not distinguished by their names as in caml
# the standard library seems to follow the convention that modules
# only area capitalized
(r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
# operators
# = has a special role because this is the only
# way to syntactic distinguish binding constructions
# unfortunately, this colors the equal in {x=2} too
(r'=(?!'+op_re+r')', Keyword),
(r'(%s)+' % op_re, Operator),
(r'(%s)+' % punc_re, Operator),
# coercions
(r':', Operator, 'type'),
# type variables
# we need this rule because we don't parse specially type
# definitions so in "type t('a) = ...", "'a" is parsed by 'root'
("'"+ident_re, Keyword.Type),
# id literal, #something, or #{expr}
(r'#'+ident_re, String.Single),
(r'#(?=\{)', String.Single),
# identifiers
# this avoids to color '2' in 'a2' as an integer
(ident_re, Text),
# default, not sure if that is needed or not
# (r'.', Text),
],
# it is quite painful to have to parse types to know where they end
# this is the general rule for a type
# a type is either:
# * -> ty
# * type-with-slash
# * type-with-slash -> ty
# * type-with-slash (, type-with-slash)+ -> ty
#
# the code is pretty funky in here, but this code would roughly
# translate in caml to:
# let rec type stream =
# match stream with
# | [< "->"; stream >] -> type stream
# | [< ""; stream >] ->
# type_with_slash stream
# type_lhs_1 stream;
# and type_1 stream = ...
'type': [
include('comments-and-spaces'),
(r'->', Keyword.Type),
default(('#pop', 'type-lhs-1', 'type-with-slash')),
],
# parses all the atomic or closed constructions in the syntax of type
# expressions: record types, tuple types, type constructors, basic type
# and type variables
'type-1': [
include('comments-and-spaces'),
(r'\(', Keyword.Type, ('#pop', 'type-tuple')),
(r'~?\{', Keyword.Type, ('#pop', 'type-record')),
(ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
(ident_re, Keyword.Type, '#pop'),
("'"+ident_re, Keyword.Type),
# this case is not in the syntax but sometimes
# we think we are parsing types when in fact we are parsing
# some css, so we just pop the states until we get back into
# the root state
default('#pop'),
],
# type-with-slash is either:
# * type-1
# * type-1 (/ type-1)+
'type-with-slash': [
include('comments-and-spaces'),
default(('#pop', 'slash-type-1', 'type-1')),
],
'slash-type-1': [
include('comments-and-spaces'),
('/', Keyword.Type, ('#pop', 'type-1')),
# same remark as above
default('#pop'),
],
# we go in this state after having parsed a type-with-slash
# while trying to parse a type
# and at this point we must determine if we are parsing an arrow
# type (in which case we must continue parsing) or not (in which
# case we stop)
'type-lhs-1': [
include('comments-and-spaces'),
(r'->', Keyword.Type, ('#pop', 'type')),
(r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
default('#pop'),
],
'type-arrow': [
include('comments-and-spaces'),
# the look ahead here allows to parse f(x : int, y : float -> truc)
# correctly
(r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
(r'->', Keyword.Type, ('#pop', 'type')),
# same remark as above
default('#pop'),
],
# no need to do precise parsing for tuples and records
# because they are closed constructions, so we can simply
# find the closing delimiter
# note that this function would be not work if the source
# contained identifiers like `{)` (although it could be patched
# to support it)
'type-tuple': [
include('comments-and-spaces'),
(r'[^()/*]+', Keyword.Type),
(r'[/*]', Keyword.Type),
(r'\(', Keyword.Type, '#push'),
(r'\)', Keyword.Type, '#pop'),
],
'type-record': [
include('comments-and-spaces'),
(r'[^{}/*]+', Keyword.Type),
(r'[/*]', Keyword.Type),
(r'\{', Keyword.Type, '#push'),
(r'\}', Keyword.Type, '#pop'),
],
# 'type-tuple': [
# include('comments-and-spaces'),
# (r'\)', Keyword.Type, '#pop'),
# default(('#pop', 'type-tuple-1', 'type-1')),
# ],
# 'type-tuple-1': [
# include('comments-and-spaces'),
# (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
# (r',', Keyword.Type, 'type-1'),
# ],
# 'type-record':[
# include('comments-and-spaces'),
# (r'\}', Keyword.Type, '#pop'),
# (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
# ],
# 'type-record-field-expr': [
#
# ],
'nested-comment': [
(r'[^/*]+', Comment),
(r'/\*', Comment, '#push'),
(r'\*/', Comment, '#pop'),
(r'[/*]', Comment),
],
# the copy pasting between string and single-string
# is kinda sad. Is there a way to avoid that??
'string': [
(r'[^\\"{]+', String.Double),
(r'"', String.Double, '#pop'),
(r'\{', Operator, 'root'),
include('escape-sequence'),
],
'single-string': [
(r'[^\\\'{]+', String.Double),
(r'\'', String.Double, '#pop'),
(r'\{', Operator, 'root'),
include('escape-sequence'),
],
# all the html stuff
# can't really reuse some existing html parser
# because we must be able to parse embedded expressions
# we are in this state after someone parsed the '<' that
# started the html literal
'html-open-tag': [
(r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
(r'>', String.Single, ('#pop', 'html-content')),
],
# we are in this state after someone parsed the '</' that
# started the end of the closing tag
'html-end-tag': [
# this is a star, because </> is allowed
(r'[\w\-:]*>', String.Single, '#pop'),
],
# we are in this state after having parsed '<ident(:ident)?'
# we thus parse a possibly empty list of attributes
'html-attr': [
(r'\s+', Text),
(r'[\w\-:]+=', String.Single, 'html-attr-value'),
(r'/>', String.Single, '#pop'),
(r'>', String.Single, ('#pop', 'html-content')),
],
'html-attr-value': [
(r"'", String.Single, ('#pop', 'single-string')),
(r'"', String.Single, ('#pop', 'string')),
(r'#'+ident_re, String.Single, '#pop'),
(r'#(?=\{)', String.Single, ('#pop', 'root')),
(r'[^"\'{`=<>]+', String.Single, '#pop'),
(r'\{', Operator, ('#pop', 'root')), # this is a tail call!
],
# we should probably deal with '\' escapes here
'html-content': [
(r'<!--', Comment, 'html-comment'),
(r'</', String.Single, ('#pop', 'html-end-tag')),
(r'<', String.Single, 'html-open-tag'),
(r'\{', Operator, 'root'),
(r'[^<{]+', String.Single),
],
'html-comment': [
(r'-->', Comment, '#pop'),
(r'[^\-]+|-', Comment),
],
}
class ReasonLexer(RegexLexer):
"""
For the ReasonML language.
.. versionadded:: 2.6
"""
name = 'ReasonML'
url = 'https://reasonml.github.io/'
aliases = ['reasonml', 'reason']
filenames = ['*.re', '*.rei']
mimetypes = ['text/x-reasonml']
keywords = (
'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
'type', 'val', 'virtual', 'when', 'while', 'with',
)
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
'<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
tokens = {
'escape-sequence': [
(r'\\[\\"\'ntbr]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
],
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'//.*?\n', Comment.Single),
(r'\/\*(?!/)', Comment.Multiline, 'comment'),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'\d[\d_]*', Number.Integer),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword),
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'\/\*', Comment.Multiline, '#push'),
(r'\*\/', Comment.Multiline, '#pop'),
(r'\*', Comment.Multiline),
],
'string': [
(r'[^\\"]+', String.Double),
include('escape-sequence'),
(r'\\\n', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z_][\w\']*', Name, '#pop'),
default('#pop'),
],
}
class FStarLexer(RegexLexer):
"""
For the F* language.
.. versionadded:: 2.7
"""
name = 'FStar'
url = 'https://www.fstar-lang.org/'
aliases = ['fstar']
filenames = ['*.fst', '*.fsti']
mimetypes = ['text/x-fstar']
keywords = (
'abstract', 'attributes', 'noeq', 'unopteq', 'and'
'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
'val', 'when', 'with', 'not'
)
decl_keywords = ('let', 'rec')
assume_keywords = ('assume', 'admit', 'assert', 'calc')
keyopts = (
r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
tokens = {
'escape-sequence': [
(r'\\[\\"\'ntbr]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
],
'root': [
(r'\s+', Text),
(r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'\(\*(?![)])', Comment, 'comment'),
(r'\/\/.+$', Comment),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
(r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'\d[\d_]*', Number.Integer),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
(r"\`", Keyword), # for quoting
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^\\"]+', String.Double),
include('escape-sequence'),
(r'\\\n', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z_][\w\']*', Name, '#pop'),
default('#pop'),
],
}
| 35,324 | Python | 35.758585 | 87 | 0.439729 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/go.py | """
pygments.lexers.go
~~~~~~~~~~~~~~~~~~
Lexers for the Google Go language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['GoLexer']
class GoLexer(RegexLexer):
"""
For Go source.
.. versionadded:: 1.2
"""
name = 'Go'
url = 'https://go.dev/'
filenames = ['*.go']
aliases = ['go', 'golang']
mimetypes = ['text/x-gosrc']
tokens = {
'root': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuations
(r'//(.*?)$', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'(import|package)\b', Keyword.Namespace),
(r'(var|func|struct|map|chan|type|interface|const)\b',
Keyword.Declaration),
(words((
'break', 'default', 'select', 'case', 'defer', 'go',
'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
'continue', 'for', 'return'), suffix=r'\b'),
Keyword),
(r'(true|false|iota|nil)\b', Keyword.Constant),
# It seems the builtin types aren't actually keywords, but
# can be used as functions. So we need two declarations.
(words((
'uint', 'uint8', 'uint16', 'uint32', 'uint64',
'int', 'int8', 'int16', 'int32', 'int64',
'float', 'float32', 'float64',
'complex64', 'complex128', 'byte', 'rune',
'string', 'bool', 'error', 'uintptr', 'any', 'comparable',
'print', 'println', 'panic', 'recover', 'close', 'complex',
'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
'new', 'make'), suffix=r'\b(\()'),
bygroups(Name.Builtin, Punctuation)),
(words((
'uint', 'uint8', 'uint16', 'uint32', 'uint64',
'int', 'int8', 'int16', 'int32', 'int64',
'float', 'float32', 'float64',
'complex64', 'complex128', 'byte', 'rune',
'string', 'bool', 'error', 'uintptr', 'any', 'comparable'), suffix=r'\b'),
Keyword.Type),
# imaginary_lit
(r'\d+i', Number),
(r'\d+\.\d*([Ee][-+]\d+)?i', Number),
(r'\.\d+([Ee][-+]\d+)?i', Number),
(r'\d+[Ee][-+]\d+i', Number),
# float_lit
(r'\d+(\.\d+[eE][+\-]?\d+|'
r'\.\d*|[eE][+\-]?\d+)', Number.Float),
(r'\.\d+([eE][+\-]?\d+)?', Number.Float),
# int_lit
# -- octal_lit
(r'0[0-7]+', Number.Oct),
# -- hex_lit
(r'0[xX][0-9a-fA-F]+', Number.Hex),
# -- decimal_lit
(r'(0|[1-9][0-9]*)', Number.Integer),
# char_lit
(r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
String.Char),
# StringLiteral
# -- raw_string_lit
(r'`[^`]*`', String),
# -- interpreted_string_lit
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Tokens
(r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&]'
r'|~|\|)', Operator),
(r'[|^<>=!()\[\]{}.,;:]', Punctuation),
# identifier
(r'[^\W\d]\w*', Name.Other),
]
}
| 3,761 | Python | 37 | 90 | 0.411593 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/boa.py | """
pygments.lexers.boa
~~~~~~~~~~~~~~~~~~~
Lexers for the Boa language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words
from pygments.token import String, Comment, Keyword, Name, Number, Operator, \
Punctuation, Whitespace
__all__ = ['BoaLexer']
class BoaLexer(RegexLexer):
"""
Lexer for the `Boa <http://boa.cs.iastate.edu/docs/>`_ language.
.. versionadded:: 2.4
"""
name = 'Boa'
aliases = ['boa']
filenames = ['*.boa']
reserved = words(
('input', 'output', 'of', 'weight', 'before', 'after', 'stop',
'ifall', 'foreach', 'exists', 'function', 'break', 'switch', 'case',
'visitor', 'default', 'return', 'visit', 'while', 'if', 'else'),
suffix=r'\b', prefix=r'\b')
keywords = words(
('bottom', 'collection', 'maximum', 'mean', 'minimum', 'set', 'sum',
'top', 'string', 'int', 'bool', 'float', 'time', 'false', 'true',
'array', 'map', 'stack', 'enum', 'type'), suffix=r'\b', prefix=r'\b')
classes = words(
('Project', 'ForgeKind', 'CodeRepository', 'Revision', 'RepositoryKind',
'ChangedFile', 'FileKind', 'ASTRoot', 'Namespace', 'Declaration', 'Type',
'Method', 'Variable', 'Statement', 'Expression', 'Modifier',
'StatementKind', 'ExpressionKind', 'ModifierKind', 'Visibility',
'TypeKind', 'Person', 'ChangeKind'),
suffix=r'\b', prefix=r'\b')
operators = ('->', ':=', ':', '=', '<<', '!', '++', '||',
'&&', '+', '-', '*', ">", "<")
string_sep = ('`', '\"')
built_in_functions = words(
(
# Array functions
'new', 'sort',
# Date & Time functions
'yearof', 'dayofyear', 'hourof', 'minuteof', 'secondof', 'now',
'addday', 'addmonth', 'addweek', 'addyear', 'dayofmonth', 'dayofweek',
'dayofyear', 'formattime', 'trunctoday', 'trunctohour', 'trunctominute',
'trunctomonth', 'trunctosecond', 'trunctoyear',
# Map functions
'clear', 'haskey', 'keys', 'lookup', 'remove', 'values',
# Math functions
'abs', 'acos', 'acosh', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
'ceil', 'cos', 'cosh', 'exp', 'floor', 'highbit', 'isfinite', 'isinf',
'isnan', 'isnormal', 'log', 'log10', 'max', 'min', 'nrand', 'pow',
'rand', 'round', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'trunc',
# Other functions
'def', 'hash', 'len',
# Set functions
'add', 'contains', 'remove',
# String functions
'format', 'lowercase', 'match', 'matchposns', 'matchstrs', 'regex',
'split', 'splitall', 'splitn', 'strfind', 'strreplace', 'strrfind',
'substring', 'trim', 'uppercase',
# Type Conversion functions
'bool', 'float', 'int', 'string', 'time',
# Domain-Specific functions
'getast', 'getsnapshot', 'hasfiletype', 'isfixingrevision', 'iskind',
'isliteral',
),
prefix=r'\b',
suffix=r'\(')
tokens = {
'root': [
(r'#.*?$', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(reserved, Keyword.Reserved),
(built_in_functions, Name.Function),
(keywords, Keyword.Type),
(classes, Name.Classes),
(words(operators), Operator),
(r'[][(),;{}\\.]', Punctuation),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"`(\\\\|\\[^\\]|[^`\\])*`", String.Backtick),
(words(string_sep), String.Delimiter),
(r'[a-zA-Z_]+', Name.Variable),
(r'[0-9]+', Number.Integer),
(r'\s+', Whitespace), # Whitespace
]
}
| 3,915 | Python | 38.959183 | 84 | 0.490421 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/graph.py | """
pygments.lexers.graph
~~~~~~~~~~~~~~~~~~~~~
Lexers for graph query languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this, words
from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
String, Number, Whitespace
__all__ = ['CypherLexer']
class CypherLexer(RegexLexer):
"""
For Cypher Query Language
For the Cypher version in Neo4j 3.3
.. versionadded:: 2.0
"""
name = 'Cypher'
url = 'https://neo4j.com/docs/developer-manual/3.3/cypher/'
aliases = ['cypher']
filenames = ['*.cyp', '*.cypher']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
include('comment'),
include('clauses'),
include('keywords'),
include('relations'),
include('strings'),
include('whitespace'),
include('barewords'),
],
'comment': [
(r'^.*//.*$', Comment.Single),
],
'keywords': [
(r'(create|order|match|limit|set|skip|start|return|with|where|'
r'delete|foreach|not|by|true|false)\b', Keyword),
],
'clauses': [
# based on https://neo4j.com/docs/cypher-refcard/3.3/
(r'(create)(\s+)(index|unique)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(drop)(\s+)(contraint|index)(\s+)(on)\b',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
(r'(ends)(\s+)(with)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(is)(\s+)(node)(\s+)(key)\b',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
(r'(is)(\s+)(null|unique)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(load)(\s+)(csv)(\s+)(from)\b',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
(r'(on)(\s+)(match|create)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(optional)(\s+)(match)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(order)(\s+)(by)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(starts)(\s+)(with)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(union)(\s+)(all)\b',
bygroups(Keyword, Whitespace, Keyword)),
(r'(using)(\s+)(periodic)(\s+)(commit)\b',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
(words((
'all', 'any', 'as', 'asc', 'ascending', 'assert', 'call', 'case', 'create',
'delete', 'desc', 'descending', 'distinct', 'end', 'fieldterminator',
'foreach', 'in', 'limit', 'match', 'merge', 'none', 'not', 'null',
'remove', 'return', 'set', 'skip', 'single', 'start', 'then', 'union',
'unwind', 'yield', 'where', 'when', 'with'), suffix=r'\b'), Keyword),
],
'relations': [
(r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
(r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
(r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
(r'-->|<--|\[|\]', Operator),
(r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
(r'[.*{}]', Punctuation),
],
'strings': [
(r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
(r'`(?:``|[^`])+`', Name.Variable),
],
'whitespace': [
(r'\s+', Whitespace),
],
'barewords': [
(r'[a-z]\w*', Name),
(r'\d+', Number),
],
}
| 3,861 | Python | 35.433962 | 91 | 0.47656 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/tal.py | """
pygments.lexers.tal
~~~~~~~~~~~~~~~~~~~
Lexer for Uxntal
.. versionadded:: 2.12
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words
from pygments.token import Comment, Keyword, Name, String, Number, \
Punctuation, Whitespace, Literal
__all__ = ['TalLexer']
class TalLexer(RegexLexer):
"""
For `Uxntal <https://wiki.xxiivv.com/site/uxntal.html>`_ source code.
.. versionadded:: 2.12
"""
name = 'Tal'
aliases = ['tal', 'uxntal']
filenames = ['*.tal']
mimetypes = ['text/x-uxntal']
instructions = [
'BRK', 'LIT', 'INC', 'POP', 'DUP', 'NIP', 'SWP', 'OVR', 'ROT',
'EQU', 'NEQ', 'GTH', 'LTH', 'JMP', 'JCN', 'JSR', 'STH',
'LDZ', 'STZ', 'LDR', 'STR', 'LDA', 'STA', 'DEI', 'DEO',
'ADD', 'SUB', 'MUL', 'DIV', 'AND', 'ORA', 'EOR', 'SFT'
]
tokens = {
# the comment delimiters must not be adjacent to non-space characters.
# this means ( foo ) is a valid comment but (foo) is not. this also
# applies to nested comments.
'comment': [
(r'(?<!\S)\((?!\S)', Comment.Multiline, '#push'), # nested comments
(r'(?<!\S)\)(?!\S)', Comment.Multiline, '#pop'), # nested comments
(r'[^()]+', Comment.Multiline), # comments
(r'[()]+', Comment.Multiline), # comments
],
'root': [
(r'\s+', Whitespace), # spaces
(r'(?<!\S)\((?!\S)', Comment.Multiline, 'comment'), # comments
(words(instructions, prefix=r'(?<!\S)', suffix=r'2?k?r?(?!\S)'),
Keyword.Reserved), # instructions
(r'[][{}](?!\S)', Punctuation), # delimiters
(r'#([0-9a-f]{2}){1,2}(?!\S)', Number.Hex), # integer
(r'"\S+', String), # raw string
(r"'\S(?!\S)", String.Char), # raw char
(r'([0-9a-f]{2}){1,2}(?!\S)', Literal), # raw integer
(r'[|$][0-9a-f]{1,4}(?!\S)', Keyword.Declaration), # abs/rel pad
(r'%\S+', Name.Decorator), # macro
(r'@\S+', Name.Function), # label
(r'&\S+', Name.Label), # sublabel
(r'/\S+', Name.Tag), # spacer
(r'\.\S+', Name.Variable.Magic), # zero page addr
(r',\S+', Name.Variable.Instance), # rel addr
(r';\S+', Name.Variable.Global), # abs addr
(r':\S+', Literal), # raw addr
(r'~\S+', Keyword.Namespace), # include
(r'\S+', Name),
]
}
def analyse_text(text):
return '|0100' in text[:500]
| 2,639 | Python | 34.2 | 79 | 0.486927 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/dylan.py | """
pygments.lexers.dylan
~~~~~~~~~~~~~~~~~~~~~
Lexers for the Dylan language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
default, line_re
from pygments.token import Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Literal, Whitespace
__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
class DylanLexer(RegexLexer):
"""
For the Dylan language.
.. versionadded:: 0.7
"""
name = 'Dylan'
url = 'http://www.opendylan.org/'
aliases = ['dylan']
filenames = ['*.dylan', '*.dyl', '*.intr']
mimetypes = ['text/x-dylan']
flags = re.IGNORECASE
builtins = {
'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
'each-subclass', 'exception', 'exclude', 'function', 'generic',
'handler', 'inherited', 'inline', 'inline-only', 'instance',
'interface', 'import', 'keyword', 'library', 'macro', 'method',
'module', 'open', 'primary', 'required', 'sealed', 'sideways',
'singleton', 'slot', 'thread', 'variable', 'virtual'}
keywords = {
'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
'while'}
operators = {
'~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
'>', '>=', '&', '|'}
functions = {
'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
'condition-format-arguments', 'condition-format-string', 'conjoin',
'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
'function-arguments', 'function-return-values',
'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
'generic-function-methods', 'head', 'head-setter', 'identity',
'initialize', 'instance?', 'integral?', 'intersection',
'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
'min', 'modulo', 'negative', 'negative?', 'next-method',
'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
'remove-duplicates', 'remove-duplicates!', 'remove-key!',
'remove-method', 'replace-elements!', 'replace-subsequence!',
'restart-query', 'return-allowed?', 'return-description',
'return-query', 'reverse', 'reverse!', 'round', 'round/',
'row-major-index', 'second', 'second-setter', 'shallow-copy',
'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
'vector', 'zero?'}
valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
def get_tokens_unprocessed(self, text):
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
if token is Name:
lowercase_value = value.lower()
if lowercase_value in self.builtins:
yield index, Name.Builtin, value
continue
if lowercase_value in self.keywords:
yield index, Keyword, value
continue
if lowercase_value in self.functions:
yield index, Name.Builtin, value
continue
if lowercase_value in self.operators:
yield index, Operator, value
continue
yield index, token, value
tokens = {
'root': [
# Whitespace
(r'\s+', Whitespace),
# single line comment
(r'//.*?\n', Comment.Single),
# lid header
(r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Whitespace, String)),
default('code') # no header match, switch to code
],
'code': [
# Whitespace
(r'\s+', Whitespace),
# single line comment
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
# multi-line comment
(r'/\*', Comment.Multiline, 'comment'),
# strings and characters
(r'"', String, 'string'),
(r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
# binary integer
(r'#b[01]+', Number.Bin),
# octal integer
(r'#o[0-7]+', Number.Oct),
# floating point
(r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
# decimal integer
(r'[-+]?\d+', Number.Integer),
# hex integer
(r'#x[0-9a-f]+', Number.Hex),
# Macro parameters
(r'(\?' + valid_name + ')(:)'
r'(token|name|variable|expression|body|case-body|\*)',
bygroups(Name.Tag, Operator, Name.Builtin)),
(r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
bygroups(Name.Tag, Operator, Name.Builtin)),
(r'\?' + valid_name, Name.Tag),
# Punctuation
(r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
# Most operators are picked up as names and then re-flagged.
# This one isn't valid in a name though, so we pick it up now.
(r':=', Operator),
# Pick up #t / #f before we match other stuff with #.
(r'#[tf]', Literal),
# #"foo" style keywords
(r'#"', String.Symbol, 'keyword'),
# #rest, #key, #all-keys, etc.
(r'#[a-z0-9-]+', Keyword),
# required-init-keyword: style keywords.
(valid_name + ':', Keyword),
# class names
('<' + valid_name + '>', Name.Class),
# define variable forms.
(r'\*' + valid_name + r'\*', Name.Variable.Global),
# define constant forms.
(r'\$' + valid_name, Name.Constant),
# everything else. We re-flag some of these in the method above.
(valid_name, Name),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'keyword': [
(r'"', String.Symbol, '#pop'),
(r'[^\\"]+', String.Symbol), # all other characters
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
]
}
class DylanLidLexer(RegexLexer):
"""
For Dylan LID (Library Interchange Definition) files.
.. versionadded:: 1.6
"""
name = 'DylanLID'
aliases = ['dylan-lid', 'lid']
filenames = ['*.lid', '*.hdp']
mimetypes = ['text/x-dylan-lid']
flags = re.IGNORECASE
tokens = {
'root': [
# Whitespace
(r'\s+', Whitespace),
# single line comment
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
# lid header
(r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Whitespace, String)),
]
}
class DylanConsoleLexer(Lexer):
"""
For Dylan interactive console output like:
.. sourcecode:: dylan-console
? let a = 1;
=> 1
? a
=> 1
This is based on a copy of the RubyConsoleLexer.
.. versionadded:: 1.6
"""
name = 'Dylan session'
aliases = ['dylan-console', 'dylan-repl']
filenames = ['*.dylan-console']
mimetypes = ['text/x-dylan-console']
_prompt_re = re.compile(r'\?| ')
def get_tokens_unprocessed(self, text):
dylexer = DylanLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = self._prompt_re.match(line)
if m is not None:
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
yield from do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
yield from do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode))
| 10,380 | Python | 35.045139 | 85 | 0.495665 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/graphviz.py | """
pygments.lexers.graphviz
~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for the DOT language (graphviz).
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Comment, Keyword, Operator, Name, String, Number, \
Punctuation, Whitespace
__all__ = ['GraphvizLexer']
class GraphvizLexer(RegexLexer):
"""
For graphviz DOT graph description language.
.. versionadded:: 2.8
"""
name = 'Graphviz'
url = 'https://www.graphviz.org/doc/info/lang.html'
aliases = ['graphviz', 'dot']
filenames = ['*.gv', '*.dot']
mimetypes = ['text/x-graphviz', 'text/vnd.graphviz']
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(#|//).*?$', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'(?i)(node|edge|graph|digraph|subgraph|strict)\b', Keyword),
(r'--|->', Operator),
(r'[{}[\]:;,]', Punctuation),
(r'(\b\D\w*)(\s*)(=)(\s*)',
bygroups(Name.Attribute, Whitespace, Punctuation, Whitespace),
'attr_id'),
(r'\b(n|ne|e|se|s|sw|w|nw|c|_)\b', Name.Builtin),
(r'\b\D\w*', Name.Tag), # node
(r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number),
(r'"(\\"|[^"])*?"', Name.Tag), # quoted node
(r'<', Punctuation, 'xml'),
],
'attr_id': [
(r'\b\D\w*', String, '#pop'),
(r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number, '#pop'),
(r'"(\\"|[^"])*?"', String.Double, '#pop'),
(r'<', Punctuation, ('#pop', 'xml')),
],
'xml': [
(r'<', Punctuation, '#push'),
(r'>', Punctuation, '#pop'),
(r'\s+', Whitespace),
(r'[^<>\s]', Name.Tag),
]
}
| 1,935 | Python | 31.266666 | 78 | 0.449096 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/tnt.py | """
pygments.lexers.tnt
~~~~~~~~~~~~~~~~~~~
Lexer for Typographic Number Theory.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer
from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
Punctuation, Error
__all__ = ['TNTLexer']
class TNTLexer(Lexer):
"""
Lexer for Typographic Number Theory, as described in the book
Gödel, Escher, Bach, by Douglas R. Hofstadter
.. versionadded:: 2.7
"""
name = 'Typographic Number Theory'
url = 'https://github.com/Kenny2github/language-tnt'
aliases = ['tnt']
filenames = ['*.tnt']
cur = []
LOGIC = set('⊃→]&∧^|∨Vv')
OPERATORS = set('+.⋅*')
VARIABLES = set('abcde')
PRIMES = set("'′")
NEGATORS = set('~!')
QUANTIFIERS = set('AE∀∃')
NUMBERS = set('0123456789')
WHITESPACE = set('\t \v\n')
RULES = re.compile('''(?xi)
joining | separation | double-tilde | fantasy\\ rule
| carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment
| contrapositive | De\\ Morgan | switcheroo
| specification | generalization | interchange
| existence | symmetry | transitivity
| add\\ S | drop\\ S | induction
| axiom\\ ([1-5]) | premise | push | pop
''')
LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')
COMMENT = re.compile(r'\[[^\n\]]+\]')
def __init__(self, *args, **kwargs):
Lexer.__init__(self, *args, **kwargs)
self.cur = []
def whitespace(self, start, text, required=False):
"""Tokenize whitespace."""
end = start
try:
while text[end] in self.WHITESPACE:
end += 1
except IndexError:
end = len(text)
if required and end == start:
raise AssertionError
if end != start:
self.cur.append((start, Text, text[start:end]))
return end
def variable(self, start, text):
"""Tokenize a variable."""
if text[start] not in self.VARIABLES:
raise AssertionError
end = start+1
while text[end] in self.PRIMES:
end += 1
self.cur.append((start, Name.Variable, text[start:end]))
return end
def term(self, start, text):
"""Tokenize a term."""
if text[start] == 'S': # S...S(...) or S...0
end = start+1
while text[end] == 'S':
end += 1
self.cur.append((start, Number.Integer, text[start:end]))
return self.term(end, text)
if text[start] == '0': # the singleton 0
self.cur.append((start, Number.Integer, text[start]))
return start+1
if text[start] in self.VARIABLES: # a''...
return self.variable(start, text)
if text[start] == '(': # (...+...)
self.cur.append((start, Punctuation, text[start]))
start = self.term(start+1, text)
if text[start] not in self.OPERATORS:
raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
if text[start] != ')':
raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return start+1
raise AssertionError # no matches
def formula(self, start, text):
"""Tokenize a formula."""
if text[start] in self.NEGATORS: # ~<...>
end = start+1
while text[end] in self.NEGATORS:
end += 1
self.cur.append((start, Operator, text[start:end]))
return self.formula(end, text)
if text[start] in self.QUANTIFIERS: # Aa:<...>
self.cur.append((start, Keyword.Declaration, text[start]))
start = self.variable(start+1, text)
if text[start] != ':':
raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return self.formula(start+1, text)
if text[start] == '<': # <...&...>
self.cur.append((start, Punctuation, text[start]))
start = self.formula(start+1, text)
if text[start] not in self.LOGIC:
raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.formula(start+1, text)
if text[start] != '>':
raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return start+1
# ...=...
start = self.term(start, text)
if text[start] != '=':
raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
return start
def rule(self, start, text):
"""Tokenize a rule."""
match = self.RULES.match(text, start)
if match is None:
raise AssertionError
groups = sorted(match.regs[1:]) # exclude whole match
for group in groups:
if group[0] >= 0: # this group matched
self.cur.append((start, Keyword, text[start:group[0]]))
self.cur.append((group[0], Number.Integer,
text[group[0]:group[1]]))
if group[1] != match.end():
self.cur.append((group[1], Keyword,
text[group[1]:match.end()]))
break
else:
self.cur.append((start, Keyword, text[start:match.end()]))
return match.end()
def lineno(self, start, text):
"""Tokenize a line referral."""
end = start
while text[end] not in self.NUMBERS:
end += 1
self.cur.append((start, Punctuation, text[start]))
self.cur.append((start+1, Text, text[start+1:end]))
start = end
match = self.LINENOS.match(text, start)
if match is None:
raise AssertionError
if text[match.end()] != ')':
raise AssertionError
self.cur.append((match.start(), Number.Integer, match.group(0)))
self.cur.append((match.end(), Punctuation, text[match.end()]))
return match.end() + 1
def error_till_line_end(self, start, text):
"""Mark everything from ``start`` to the end of the line as Error."""
end = start
try:
while text[end] != '\n': # there's whitespace in rules
end += 1
except IndexError:
end = len(text)
if end != start:
self.cur.append((start, Error, text[start:end]))
end = self.whitespace(end, text)
return end
def get_tokens_unprocessed(self, text):
"""Returns a list of TNT tokens."""
self.cur = []
start = end = self.whitespace(0, text)
while start <= end < len(text):
try:
# try line number
while text[end] in self.NUMBERS:
end += 1
if end != start: # actual number present
self.cur.append((start, Number.Integer, text[start:end]))
# whitespace is required after a line number
orig = len(self.cur)
try:
start = end = self.whitespace(end, text, True)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(end, text)
continue
# at this point it could be a comment
match = self.COMMENT.match(text, start)
if match is not None:
self.cur.append((start, Comment, text[start:match.end()]))
start = end = match.end()
# anything after the closing bracket is invalid
start = end = self.error_till_line_end(start, text)
# do not attempt to process the rest
continue
del match
if text[start] in '[]': # fantasy push or pop
self.cur.append((start, Keyword, text[start]))
start += 1
end += 1
else:
# one formula, possibly containing subformulae
orig = len(self.cur)
try:
start = end = self.formula(start, text)
except (AssertionError, RecursionError): # not well-formed
del self.cur[orig:]
while text[end] not in self.WHITESPACE:
end += 1
self.cur.append((start, Error, text[start:end]))
start = end
# skip whitespace after formula
orig = len(self.cur)
try:
start = end = self.whitespace(end, text, True)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
# rule proving this formula a theorem
orig = len(self.cur)
try:
start = end = self.rule(start, text)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
# skip whitespace after rule
start = end = self.whitespace(end, text)
# line marker
if text[start] == '(':
orig = len(self.cur)
try:
start = end = self.lineno(start, text)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
start = end = self.whitespace(start, text)
except IndexError:
try:
del self.cur[orig:]
except NameError:
pass # if orig was never defined, fine
self.error_till_line_end(start, text)
return self.cur
| 10,440 | Python | 37.386029 | 79 | 0.494444 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/business.py | """
pygments.lexers.business
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for "business-oriented" languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace
from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
'GoodDataCLLexer', 'MaqlLexer']
class CobolLexer(RegexLexer):
"""
Lexer for OpenCOBOL code.
.. versionadded:: 1.6
"""
name = 'COBOL'
aliases = ['cobol']
filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
mimetypes = ['text/x-cobol']
flags = re.IGNORECASE | re.MULTILINE
# Data Types: by PICTURE and USAGE
# Operators: **, *, +, -, /, <, >, <=, >=, =, <>
# Logical (?): NOT, AND, OR
# Reserved words:
# http://opencobol.add1tocobol.com/#reserved-words
# Intrinsics:
# http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
tokens = {
'root': [
include('comment'),
include('strings'),
include('core'),
include('nums'),
(r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
# (r'[\s]+', Text),
(r'[ \t]+', Whitespace),
],
'comment': [
(r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
],
'core': [
# Figurative constants
(r'(^|(?<=[^\w\-]))(ALL\s+)?'
r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
r'\s*($|(?=[^\w\-]))',
Name.Constant),
# Reserved words STATEMENTS and other bolds
(words((
'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))'),
Keyword.Reserved),
# Reserved words
(words((
'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC',
'AUTOTERMINATE', 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL',
'COLLATING', 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE',
'COMMIT', 'COMMON', 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE',
'DEBUGGING', 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
'EXCLUSIVE', 'EXTEND', 'EXTERNAL', 'FILE-ID', 'FILLER', 'FINAL',
'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL',
'FUNCTION', 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
'INITIAL', 'INITIALIZED', 'INPUT', 'INTO', 'INTRINSIC', 'INVALID',
'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', 'MULTIPLE',
'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', 'NEGATIVE', 'NEXT', 'NO',
'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', 'NUMERIC-EDITED',
'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUBKEY', 'SUM',
'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
Keyword.Pseudo),
# inactive reserved words
(words((
'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE',
'B-AND', 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER',
'CF', 'CH', 'CHAIN', 'CLASS-ID', 'CLASSIFICATION', 'COMMUNICATION',
'CONDITION', 'DATA-POINTER', 'DESTINATION', 'DISABLE', 'EC', 'EGI',
'EMI', 'ENABLE', 'END-RECEIVE', 'ENTRY-CONVENTION', 'EO', 'ESI',
'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY', 'FLOAT-BINARY-16',
'FLOAT-BINARY-34', 'FLOAT-BINARY-7', 'FLOAT-DECIMAL-16',
'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', 'FUNCTION-POINTER',
'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', 'INHERITS',
'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE',
'NORMAL', 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE',
'PAGE-COUNTER', 'PF', 'PH', 'PROPERTY', 'PROTOTYPE', 'PURGE',
'QUEUE', 'RAISE', 'RAISING', 'RECEIVE', 'RELATION', 'REPLACE',
'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY', 'RF', 'RH',
'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT',
'STEP', 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3',
'SUPER', 'SYMBOL', 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT',
'TYPEDEF', 'UCS-4', 'UNIVERSAL', 'USER-DEFAULT', 'UTF-16', 'UTF-8',
'VAL-STATUS', 'VALID', 'VALIDATE', 'VALIDATE-STATUS'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
Error),
# Data Types
(r'(^|(?<=[^\w\-]))'
r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
r'BINARY-C-LONG|'
r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
r'BINARY)\s*($|(?=[^\w\-]))', Keyword.Type),
# Operators
(r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
# (r'(::)', Keyword.Declaration),
(r'([(),;:&%.])', Punctuation),
# Intrinsics
(r'(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
r'($|(?=[^\w\-]))', Name.Function),
# Booleans
(r'(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))', Name.Builtin),
# Comparing Operators
(r'(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|'
r'greater|less|than|not|and|or)\s*($|(?=[^\w\-]))', Operator.Word),
],
# \"[^\"\n]*\"|\'[^\'\n]*\'
'strings': [
# apparently strings can be delimited by EOL if they are continued
# in the next line
(r'"[^"\n]*("|\n)', String.Double),
(r"'[^'\n]*('|\n)", String.Single),
],
'nums': [
(r'\d+(\s*|\.$|$)', Number.Integer),
(r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
(r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
],
}
class CobolFreeformatLexer(CobolLexer):
"""
Lexer for Free format OpenCOBOL code.
.. versionadded:: 1.6
"""
name = 'COBOLFree'
aliases = ['cobolfree']
filenames = ['*.cbl', '*.CBL']
mimetypes = []
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'comment': [
(r'(\*>.*\n|^\w*\*.*$)', Comment),
],
}
class ABAPLexer(RegexLexer):
"""
Lexer for ABAP, SAP's integrated language.
.. versionadded:: 1.1
"""
name = 'ABAP'
aliases = ['abap']
filenames = ['*.abap', '*.ABAP']
mimetypes = ['text/x-abap']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'common': [
(r'\s+', Whitespace),
(r'^\*.*$', Comment.Single),
(r'\".*?\n', Comment.Single),
(r'##\w+', Comment.Special),
],
'variable-names': [
(r'<\S+>', Name.Variable),
(r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
],
'root': [
include('common'),
# function calls
(r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
Keyword),
(r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
r'TRANSACTION|TRANSFORMATION))\b',
Keyword),
(r'(FORM|PERFORM)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Name.Function)),
(r'(PERFORM)(\s+)(\()(\w+)(\))',
bygroups(Keyword, Whitespace, Punctuation, Name.Variable, Punctuation)),
(r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace, Keyword)),
# method implementation
(r'(METHOD)(\s+)([\w~]+)',
bygroups(Keyword, Whitespace, Name.Function)),
# method calls
(r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
bygroups(Whitespace, Name.Variable, Operator, Name.Function)),
# call methodnames returning style
(r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
# text elements
(r'(TEXT)(-)(\d{3})',
bygroups(Keyword, Punctuation, Number.Integer)),
(r'(TEXT)(-)(\w{3})',
bygroups(Keyword, Punctuation, Name.Variable)),
# keywords with dashes in them.
# these need to be first, because for instance the -ID part
# of MESSAGE-ID wouldn't get highlighted if MESSAGE was
# first in the list of keywords.
(r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
r'FIELD-GROUPS|FIELD-SYMBOLS|FIELD-SYMBOL|FUNCTION-POOL|'
r'INTERFACE-POOL|INVERTED-DATE|'
r'LOAD-OF-PROGRAM|LOG-POINT|'
r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
r'OUTPUT-LENGTH|PRINT-CONTROL|'
r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
r')\b', Keyword),
# keyword kombinations
(r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
r'FREE\s(MEMORY|OBJECT)?|'
r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
r'SKIP|ULINE)|'
r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
r'TO LIST-PROCESSING|TO TRANSACTION)'
r'(ENDING|STARTING)\s+AT|'
r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
r'(BEGIN|END)\s+OF|'
r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
r'COMPARING(\s+ALL\s+FIELDS)?|'
r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
r'END-OF-(DEFINITION|PAGE|SELECTION)|'
r'WITH\s+FRAME(\s+TITLE)|'
r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
r'(RESPECTING|IGNORING)\s+CASE|'
r'IN\s+UPDATE\s+TASK|'
r'(SOURCE|RESULT)\s+(XML)?|'
r'REFERENCE\s+INTO|'
# simple kombinations
r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
# single word keywords.
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
r'BACK|BLOCK|BREAK-POINT|'
r'CASE|CAST|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|COND|CONV|'
r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
r'ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
r'HIDE|'
r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
r'JOIN|'
r'KEY|'
r'NEW|NEXT|'
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
r'NODES|NUMBER|'
r'OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|'
r'PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|'
r'RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|'
r'REF|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|'
r'SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|'
r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|SWITCH|'
r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
r'ULINE|UNDER|UNPACK|UPDATE|USING|'
r'VALUE|VALUES|VIA|VARYING|VARY|'
r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
# builtins
(r'(abs|acos|asin|atan|'
r'boolc|boolx|bit_set|'
r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
r'count|count_any_of|count_any_not_of|'
r'dbmaxlen|distance|'
r'escape|exp|'
r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
r'insert|'
r'lines|log|log10|'
r'match|matches|'
r'nmax|nmin|numofchar|'
r'repeat|replace|rescale|reverse|round|'
r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
r'substring|substring_after|substring_from|substring_before|substring_to|'
r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
(r'&[0-9]', Name),
(r'[0-9]+', Number.Integer),
# operators which look like variable names before
# parsing variable names.
(r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
include('variable-names'),
# standard operators after variable names,
# because < and > are part of field symbols.
(r'[?*<>=\-+&]', Operator),
(r"'(''|[^'])*'", String.Single),
(r"`([^`])*`", String.Single),
(r"([|}])([^{}|]*?)([|{])",
bygroups(Punctuation, String.Single, Punctuation)),
(r'[/;:()\[\],.]', Punctuation),
(r'(!)(\w+)', bygroups(Operator, Name)),
],
}
class OpenEdgeLexer(RegexLexer):
"""
Lexer for `OpenEdge ABL (formerly Progress)
<http://web.progress.com/en/openedge/abl.html>`_ source code.
.. versionadded:: 1.5
"""
name = 'OpenEdge ABL'
aliases = ['openedge', 'abl', 'progress']
filenames = ['*.p', '*.cls']
mimetypes = ['text/x-openedge', 'application/x-openedge']
types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))')
keywords = words(OPENEDGEKEYWORDS,
prefix=r'(?i)(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))')
tokens = {
'root': [
(r'/\*', Comment.Multiline, 'comment'),
(r'\{', Comment.Preproc, 'preprocessor'),
(r'\s*&.*', Comment.Preproc),
(r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
(r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
(types, Keyword.Type),
(keywords, Name.Builtin),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\s+', Whitespace),
(r'[+*/=-]', Operator),
(r'[.:()]', Punctuation),
(r'.', Name.Variable), # Lazy catch-all
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'preprocessor': [
(r'[^{}]', Comment.Preproc),
(r'\{', Comment.Preproc, '#push'),
(r'\}', Comment.Preproc, '#pop'),
],
}
def analyse_text(text):
"""Try to identify OpenEdge ABL based on a few common constructs."""
result = 0
if 'END.' in text:
result += 0.05
if 'END PROCEDURE.' in text:
result += 0.05
if 'ELSE DO:' in text:
result += 0.05
return result
class GoodDataCLLexer(RegexLexer):
"""
Lexer for `GoodData-CL
<https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
com/gooddata/processor/COMMANDS.txt>`_
script files.
.. versionadded:: 1.4
"""
name = 'GoodData-CL'
aliases = ['gooddata-cl']
filenames = ['*.gdc']
mimetypes = ['text/x-gooddata-cl']
flags = re.IGNORECASE
tokens = {
'root': [
# Comments
(r'#.*', Comment.Single),
# Function call
(r'[a-z]\w*', Name.Function),
# Argument list
(r'\(', Punctuation, 'args-list'),
# Punctuation
(r';', Punctuation),
# Space is not significant
(r'\s+', Text)
],
'args-list': [
(r'\)', Punctuation, '#pop'),
(r',', Punctuation),
(r'[a-z]\w*', Name.Variable),
(r'=', Operator),
(r'"', String, 'string-literal'),
(r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
# Space is not significant
(r'\s', Whitespace)
],
'string-literal': [
(r'\\[tnrfbae"\\]', String.Escape),
(r'"', String, '#pop'),
(r'[^\\"]+', String)
]
}
class MaqlLexer(RegexLexer):
"""
Lexer for `GoodData MAQL
<https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
scripts.
.. versionadded:: 1.4
"""
name = 'MAQL'
aliases = ['maql']
filenames = ['*.maql']
mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
flags = re.IGNORECASE
tokens = {
'root': [
# IDENTITY
(r'IDENTIFIER\b', Name.Builtin),
# IDENTIFIER
(r'\{[^}]+\}', Name.Variable),
# NUMBER
(r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
# STRING
(r'"', String, 'string-literal'),
# RELATION
(r'\<\>|\!\=', Operator),
(r'\=|\>\=|\>|\<\=|\<', Operator),
# :=
(r'\:\=', Operator),
# OBJECT
(r'\[[^]]+\]', Name.Variable.Class),
# keywords
(words((
'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
suffix=r'\b'),
Keyword),
# FUNCNAME
(r'[a-z]\w*\b', Name.Function),
# Comments
(r'#.*', Comment.Single),
# Punctuation
(r'[,;()]', Punctuation),
# Space is not significant
(r'\s+', Whitespace)
],
'string-literal': [
(r'\\[tnrfbae"\\]', String.Escape),
(r'"', String, '#pop'),
(r'[^\\"]+', String)
],
}
| 28,112 | Python | 43.837321 | 94 | 0.498755 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ncl.py | """
pygments.lexers.ncl
~~~~~~~~~~~~~~~~~~~
Lexers for NCAR Command Language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['NCLLexer']
class NCLLexer(RegexLexer):
"""
Lexer for NCL code.
.. versionadded:: 2.2
"""
name = 'NCL'
aliases = ['ncl']
filenames = ['*.ncl']
mimetypes = ['text/ncl']
flags = re.MULTILINE
tokens = {
'root': [
(r';.*\n', Comment),
include('strings'),
include('core'),
(r'[a-zA-Z_]\w*', Name),
include('nums'),
(r'[\s]+', Text),
],
'core': [
# Statements
(words((
'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
'new', '_Missing', 'Missing', 'noparent', 'procedure',
'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
Keyword),
# Data Types
(words((
'ubyte', 'uint', 'uint64', 'ulong', 'string', 'byte',
'character', 'double', 'float', 'integer', 'int64', 'logical',
'long', 'short', 'ushort', 'enumeric', 'numeric', 'snumeric'),
prefix=r'\b', suffix=r'\s*\b'),
Keyword.Type),
# Operators
(r'[\%^*+\-/<>]', Operator),
# punctuation:
(r'[\[\]():@$!&|.,\\{}]', Punctuation),
(r'[=:]', Punctuation),
# Intrinsics
(words((
'abs', 'acos', 'addfile', 'addfiles', 'all', 'angmom_atm', 'any',
'area_conserve_remap', 'area_hi2lores', 'area_poly_sphere',
'asciiread', 'asciiwrite', 'asin', 'atan', 'atan2', 'attsetvalues',
'avg', 'betainc', 'bin_avg', 'bin_sum', 'bw_bandpass_filter',
'cancor', 'cbinread', 'cbinwrite', 'cd_calendar', 'cd_inv_calendar',
'cdfbin_p', 'cdfbin_pr', 'cdfbin_s', 'cdfbin_xn', 'cdfchi_p',
'cdfchi_x', 'cdfgam_p', 'cdfgam_x', 'cdfnor_p', 'cdfnor_x',
'cdft_p', 'cdft_t', 'ceil', 'center_finite_diff',
'center_finite_diff_n', 'cfftb', 'cfftf', 'cfftf_frq_reorder',
'charactertodouble', 'charactertofloat', 'charactertointeger',
'charactertolong', 'charactertoshort', 'charactertostring',
'chartodouble', 'chartofloat', 'chartoint', 'chartointeger',
'chartolong', 'chartoshort', 'chartostring', 'chiinv', 'clear',
'color_index_to_rgba', 'conform', 'conform_dims', 'cos', 'cosh',
'count_unique_values', 'covcorm', 'covcorm_xy', 'craybinnumrec',
'craybinrecread', 'create_graphic', 'csa1', 'csa1d', 'csa1s',
'csa1x', 'csa1xd', 'csa1xs', 'csa2', 'csa2d', 'csa2l', 'csa2ld',
'csa2ls', 'csa2lx', 'csa2lxd', 'csa2lxs', 'csa2s', 'csa2x',
'csa2xd', 'csa2xs', 'csa3', 'csa3d', 'csa3l', 'csa3ld', 'csa3ls',
'csa3lx', 'csa3lxd', 'csa3lxs', 'csa3s', 'csa3x', 'csa3xd',
'csa3xs', 'csc2s', 'csgetp', 'css2c', 'cssetp', 'cssgrid', 'csstri',
'csvoro', 'cumsum', 'cz2ccm', 'datatondc', 'day_of_week',
'day_of_year', 'days_in_month', 'default_fillvalue', 'delete',
'depth_to_pres', 'destroy', 'determinant', 'dewtemp_trh',
'dgeevx_lapack', 'dim_acumrun_n', 'dim_avg', 'dim_avg_n',
'dim_avg_wgt', 'dim_avg_wgt_n', 'dim_cumsum', 'dim_cumsum_n',
'dim_gamfit_n', 'dim_gbits', 'dim_max', 'dim_max_n', 'dim_median',
'dim_median_n', 'dim_min', 'dim_min_n', 'dim_num', 'dim_num_n',
'dim_numrun_n', 'dim_pqsort', 'dim_pqsort_n', 'dim_product',
'dim_product_n', 'dim_rmsd', 'dim_rmsd_n', 'dim_rmvmean',
'dim_rmvmean_n', 'dim_rmvmed', 'dim_rmvmed_n', 'dim_spi_n',
'dim_standardize', 'dim_standardize_n', 'dim_stat4', 'dim_stat4_n',
'dim_stddev', 'dim_stddev_n', 'dim_sum', 'dim_sum_n', 'dim_sum_wgt',
'dim_sum_wgt_n', 'dim_variance', 'dim_variance_n', 'dimsizes',
'doubletobyte', 'doubletochar', 'doubletocharacter',
'doubletofloat', 'doubletoint', 'doubletointeger', 'doubletolong',
'doubletoshort', 'dpres_hybrid_ccm', 'dpres_plevel', 'draw',
'draw_color_palette', 'dsgetp', 'dsgrid2', 'dsgrid2d', 'dsgrid2s',
'dsgrid3', 'dsgrid3d', 'dsgrid3s', 'dspnt2', 'dspnt2d', 'dspnt2s',
'dspnt3', 'dspnt3d', 'dspnt3s', 'dssetp', 'dtrend', 'dtrend_msg',
'dtrend_msg_n', 'dtrend_n', 'dtrend_quadratic',
'dtrend_quadratic_msg_n', 'dv2uvf', 'dv2uvg', 'dz_height',
'echo_off', 'echo_on', 'eof2data', 'eof_varimax', 'eofcor',
'eofcor_pcmsg', 'eofcor_ts', 'eofcov', 'eofcov_pcmsg', 'eofcov_ts',
'eofunc', 'eofunc_ts', 'eofunc_varimax', 'equiv_sample_size', 'erf',
'erfc', 'esacr', 'esacv', 'esccr', 'esccv', 'escorc', 'escorc_n',
'escovc', 'exit', 'exp', 'exp_tapersh', 'exp_tapersh_wgts',
'exp_tapershC', 'ezfftb', 'ezfftb_n', 'ezfftf', 'ezfftf_n',
'f2fosh', 'f2foshv', 'f2fsh', 'f2fshv', 'f2gsh', 'f2gshv', 'fabs',
'fbindirread', 'fbindirwrite', 'fbinnumrec', 'fbinread',
'fbinrecread', 'fbinrecwrite', 'fbinwrite', 'fft2db', 'fft2df',
'fftshift', 'fileattdef', 'filechunkdimdef', 'filedimdef',
'fileexists', 'filegrpdef', 'filevarattdef', 'filevarchunkdef',
'filevarcompressleveldef', 'filevardef', 'filevardimsizes',
'filwgts_lancos', 'filwgts_lanczos', 'filwgts_normal',
'floattobyte', 'floattochar', 'floattocharacter', 'floattoint',
'floattointeger', 'floattolong', 'floattoshort', 'floor',
'fluxEddy', 'fo2fsh', 'fo2fshv', 'fourier_info', 'frame', 'fspan',
'ftcurv', 'ftcurvd', 'ftcurvi', 'ftcurvp', 'ftcurvpi', 'ftcurvps',
'ftcurvs', 'ftest', 'ftgetp', 'ftkurv', 'ftkurvd', 'ftkurvp',
'ftkurvpd', 'ftsetp', 'ftsurf', 'g2fsh', 'g2fshv', 'g2gsh',
'g2gshv', 'gamma', 'gammainc', 'gaus', 'gaus_lobat',
'gaus_lobat_wgt', 'gc_aangle', 'gc_clkwise', 'gc_dangle',
'gc_inout', 'gc_latlon', 'gc_onarc', 'gc_pnt2gc', 'gc_qarea',
'gc_tarea', 'generate_2d_array', 'get_color_index',
'get_color_rgba', 'get_cpu_time', 'get_isolines', 'get_ncl_version',
'get_script_name', 'get_script_prefix_name', 'get_sphere_radius',
'get_unique_values', 'getbitsone', 'getenv', 'getfiledimsizes',
'getfilegrpnames', 'getfilepath', 'getfilevaratts',
'getfilevarchunkdimsizes', 'getfilevardims', 'getfilevardimsizes',
'getfilevarnames', 'getfilevartypes', 'getvaratts', 'getvardims',
'gradsf', 'gradsg', 'greg2jul', 'grid2triple', 'hlsrgb', 'hsvrgb',
'hydro', 'hyi2hyo', 'idsfft', 'igradsf', 'igradsg', 'ilapsf',
'ilapsg', 'ilapvf', 'ilapvg', 'ind', 'ind_resolve', 'int2p',
'int2p_n', 'integertobyte', 'integertochar', 'integertocharacter',
'integertoshort', 'inttobyte', 'inttochar', 'inttoshort',
'inverse_matrix', 'isatt', 'isbigendian', 'isbyte', 'ischar',
'iscoord', 'isdefined', 'isdim', 'isdimnamed', 'isdouble',
'isenumeric', 'isfile', 'isfilepresent', 'isfilevar',
'isfilevaratt', 'isfilevarcoord', 'isfilevardim', 'isfloat',
'isfunc', 'isgraphic', 'isint', 'isint64', 'isinteger',
'isleapyear', 'islogical', 'islong', 'ismissing', 'isnan_ieee',
'isnumeric', 'ispan', 'isproc', 'isshort', 'issnumeric', 'isstring',
'isubyte', 'isuint', 'isuint64', 'isulong', 'isunlimited',
'isunsigned', 'isushort', 'isvar', 'jul2greg', 'kmeans_as136',
'kolsm2_n', 'kron_product', 'lapsf', 'lapsg', 'lapvf', 'lapvg',
'latlon2utm', 'lclvl', 'lderuvf', 'lderuvg', 'linint1', 'linint1_n',
'linint2', 'linint2_points', 'linmsg', 'linmsg_n', 'linrood_latwgt',
'linrood_wgt', 'list_files', 'list_filevars', 'list_hlus',
'list_procfuncs', 'list_vars', 'ListAppend', 'ListCount',
'ListGetType', 'ListIndex', 'ListIndexFromName', 'ListPop',
'ListPush', 'ListSetType', 'loadscript', 'local_max', 'local_min',
'log', 'log10', 'longtobyte', 'longtochar', 'longtocharacter',
'longtoint', 'longtointeger', 'longtoshort', 'lspoly', 'lspoly_n',
'mask', 'max', 'maxind', 'min', 'minind', 'mixed_layer_depth',
'mixhum_ptd', 'mixhum_ptrh', 'mjo_cross_coh2pha',
'mjo_cross_segment', 'moc_globe_atl', 'monthday', 'natgrid',
'natgridd', 'natgrids', 'ncargpath', 'ncargversion', 'ndctodata',
'ndtooned', 'new', 'NewList', 'ngezlogo', 'nggcog', 'nggetp',
'nglogo', 'ngsetp', 'NhlAddAnnotation', 'NhlAddData',
'NhlAddOverlay', 'NhlAddPrimitive', 'NhlAppGetDefaultParentId',
'NhlChangeWorkstation', 'NhlClassName', 'NhlClearWorkstation',
'NhlDataPolygon', 'NhlDataPolyline', 'NhlDataPolymarker',
'NhlDataToNDC', 'NhlDestroy', 'NhlDraw', 'NhlFrame', 'NhlFreeColor',
'NhlGetBB', 'NhlGetClassResources', 'NhlGetErrorObjectId',
'NhlGetNamedColorIndex', 'NhlGetParentId',
'NhlGetParentWorkstation', 'NhlGetWorkspaceObjectId',
'NhlIsAllocatedColor', 'NhlIsApp', 'NhlIsDataComm', 'NhlIsDataItem',
'NhlIsDataSpec', 'NhlIsTransform', 'NhlIsView', 'NhlIsWorkstation',
'NhlName', 'NhlNDCPolygon', 'NhlNDCPolyline', 'NhlNDCPolymarker',
'NhlNDCToData', 'NhlNewColor', 'NhlNewDashPattern', 'NhlNewMarker',
'NhlPalGetDefined', 'NhlRemoveAnnotation', 'NhlRemoveData',
'NhlRemoveOverlay', 'NhlRemovePrimitive', 'NhlSetColor',
'NhlSetDashPattern', 'NhlSetMarker', 'NhlUpdateData',
'NhlUpdateWorkstation', 'nice_mnmxintvl', 'nngetaspectd',
'nngetaspects', 'nngetp', 'nngetsloped', 'nngetslopes', 'nngetwts',
'nngetwtsd', 'nnpnt', 'nnpntd', 'nnpntend', 'nnpntendd',
'nnpntinit', 'nnpntinitd', 'nnpntinits', 'nnpnts', 'nnsetp', 'num',
'obj_anal_ic', 'omega_ccm', 'onedtond', 'overlay', 'paleo_outline',
'pdfxy_bin', 'poisson_grid_fill', 'pop_remap', 'potmp_insitu_ocn',
'prcwater_dp', 'pres2hybrid', 'pres_hybrid_ccm', 'pres_sigma',
'print', 'print_table', 'printFileVarSummary', 'printVarSummary',
'product', 'pslec', 'pslhor', 'pslhyp', 'qsort', 'rand',
'random_chi', 'random_gamma', 'random_normal', 'random_setallseed',
'random_uniform', 'rcm2points', 'rcm2rgrid', 'rdsstoi',
'read_colormap_file', 'reg_multlin', 'regcoef', 'regCoef_n',
'regline', 'relhum', 'replace_ieeenan', 'reshape', 'reshape_ind',
'rgba_to_color_index', 'rgbhls', 'rgbhsv', 'rgbyiq', 'rgrid2rcm',
'rhomb_trunc', 'rip_cape_2d', 'rip_cape_3d', 'round', 'rtest',
'runave', 'runave_n', 'set_default_fillvalue', 'set_sphere_radius',
'setfileoption', 'sfvp2uvf', 'sfvp2uvg', 'shaec', 'shagc',
'shgetnp', 'shgetp', 'shgrid', 'shorttobyte', 'shorttochar',
'shorttocharacter', 'show_ascii', 'shsec', 'shsetp', 'shsgc',
'shsgc_R42', 'sigma2hybrid', 'simpeq', 'simpne', 'sin',
'sindex_yrmo', 'sinh', 'sizeof', 'sleep', 'smth9', 'snindex_yrmo',
'solve_linsys', 'span_color_indexes', 'span_color_rgba',
'sparse_matrix_mult', 'spcorr', 'spcorr_n', 'specx_anal',
'specxy_anal', 'spei', 'sprintf', 'sprinti', 'sqrt', 'sqsort',
'srand', 'stat2', 'stat4', 'stat_medrng', 'stat_trim',
'status_exit', 'stdatmus_p2tdz', 'stdatmus_z2tdp', 'stddev',
'str_capital', 'str_concat', 'str_fields_count', 'str_get_cols',
'str_get_dq', 'str_get_field', 'str_get_nl', 'str_get_sq',
'str_get_tab', 'str_index_of_substr', 'str_insert', 'str_is_blank',
'str_join', 'str_left_strip', 'str_lower', 'str_match',
'str_match_ic', 'str_match_ic_regex', 'str_match_ind',
'str_match_ind_ic', 'str_match_ind_ic_regex', 'str_match_ind_regex',
'str_match_regex', 'str_right_strip', 'str_split',
'str_split_by_length', 'str_split_csv', 'str_squeeze', 'str_strip',
'str_sub_str', 'str_switch', 'str_upper', 'stringtochar',
'stringtocharacter', 'stringtodouble', 'stringtofloat',
'stringtoint', 'stringtointeger', 'stringtolong', 'stringtoshort',
'strlen', 'student_t', 'sum', 'svd_lapack', 'svdcov', 'svdcov_sv',
'svdstd', 'svdstd_sv', 'system', 'systemfunc', 'tan', 'tanh',
'taper', 'taper_n', 'tdclrs', 'tdctri', 'tdcudp', 'tdcurv',
'tddtri', 'tdez2d', 'tdez3d', 'tdgetp', 'tdgrds', 'tdgrid',
'tdgtrs', 'tdinit', 'tditri', 'tdlbla', 'tdlblp', 'tdlbls',
'tdline', 'tdlndp', 'tdlnpa', 'tdlpdp', 'tdmtri', 'tdotri',
'tdpara', 'tdplch', 'tdprpa', 'tdprpi', 'tdprpt', 'tdsetp',
'tdsort', 'tdstri', 'tdstrs', 'tdttri', 'thornthwaite', 'tobyte',
'tochar', 'todouble', 'tofloat', 'toint', 'toint64', 'tointeger',
'tolong', 'toshort', 'tosigned', 'tostring', 'tostring_with_format',
'totype', 'toubyte', 'touint', 'touint64', 'toulong', 'tounsigned',
'toushort', 'trend_manken', 'tri_trunc', 'triple2grid',
'triple2grid2d', 'trop_wmo', 'ttest', 'typeof', 'undef',
'unique_string', 'update', 'ushorttoint', 'ut_calendar',
'ut_inv_calendar', 'utm2latlon', 'uv2dv_cfd', 'uv2dvf', 'uv2dvg',
'uv2sfvpf', 'uv2sfvpg', 'uv2vr_cfd', 'uv2vrdvf', 'uv2vrdvg',
'uv2vrf', 'uv2vrg', 'v5d_close', 'v5d_create', 'v5d_setLowLev',
'v5d_setUnits', 'v5d_write', 'v5d_write_var', 'variance', 'vhaec',
'vhagc', 'vhsec', 'vhsgc', 'vibeta', 'vinth2p', 'vinth2p_ecmwf',
'vinth2p_ecmwf_nodes', 'vinth2p_nodes', 'vintp2p_ecmwf', 'vr2uvf',
'vr2uvg', 'vrdv2uvf', 'vrdv2uvg', 'wavelet', 'wavelet_default',
'weibull', 'wgt_area_smooth', 'wgt_areaave', 'wgt_areaave2',
'wgt_arearmse', 'wgt_arearmse2', 'wgt_areasum2', 'wgt_runave',
'wgt_runave_n', 'wgt_vert_avg_beta', 'wgt_volave', 'wgt_volave_ccm',
'wgt_volrmse', 'wgt_volrmse_ccm', 'where', 'wk_smooth121', 'wmbarb',
'wmbarbmap', 'wmdrft', 'wmgetp', 'wmlabs', 'wmsetp', 'wmstnm',
'wmvect', 'wmvectmap', 'wmvlbl', 'wrf_avo', 'wrf_cape_2d',
'wrf_cape_3d', 'wrf_dbz', 'wrf_eth', 'wrf_helicity', 'wrf_ij_to_ll',
'wrf_interp_1d', 'wrf_interp_2d_xy', 'wrf_interp_3d_z',
'wrf_latlon_to_ij', 'wrf_ll_to_ij', 'wrf_omega', 'wrf_pvo',
'wrf_rh', 'wrf_slp', 'wrf_smooth_2d', 'wrf_td', 'wrf_tk',
'wrf_updraft_helicity', 'wrf_uvmet', 'wrf_virtual_temp',
'wrf_wetbulb', 'wrf_wps_close_int', 'wrf_wps_open_int',
'wrf_wps_rddata_int', 'wrf_wps_rdhead_int', 'wrf_wps_read_int',
'wrf_wps_write_int', 'write_matrix', 'write_table', 'yiqrgb',
'z2geouv', 'zonal_mpsi', 'addfiles_GetVar', 'advect_variable',
'area_conserve_remap_Wrap', 'area_hi2lores_Wrap',
'array_append_record', 'assignFillValue', 'byte2flt',
'byte2flt_hdf', 'calcDayAnomTLL', 'calcMonAnomLLLT',
'calcMonAnomLLT', 'calcMonAnomTLL', 'calcMonAnomTLLL',
'calculate_monthly_values', 'cd_convert', 'changeCase',
'changeCaseChar', 'clmDayTLL', 'clmDayTLLL', 'clmMon2clmDay',
'clmMonLLLT', 'clmMonLLT', 'clmMonTLL', 'clmMonTLLL', 'closest_val',
'copy_VarAtts', 'copy_VarCoords', 'copy_VarCoords_1',
'copy_VarCoords_2', 'copy_VarMeta', 'copyatt', 'crossp3',
'cshstringtolist', 'cssgrid_Wrap', 'dble2flt', 'decimalPlaces',
'delete_VarAtts', 'dim_avg_n_Wrap', 'dim_avg_wgt_n_Wrap',
'dim_avg_wgt_Wrap', 'dim_avg_Wrap', 'dim_cumsum_n_Wrap',
'dim_cumsum_Wrap', 'dim_max_n_Wrap', 'dim_min_n_Wrap',
'dim_rmsd_n_Wrap', 'dim_rmsd_Wrap', 'dim_rmvmean_n_Wrap',
'dim_rmvmean_Wrap', 'dim_rmvmed_n_Wrap', 'dim_rmvmed_Wrap',
'dim_standardize_n_Wrap', 'dim_standardize_Wrap',
'dim_stddev_n_Wrap', 'dim_stddev_Wrap', 'dim_sum_n_Wrap',
'dim_sum_wgt_n_Wrap', 'dim_sum_wgt_Wrap', 'dim_sum_Wrap',
'dim_variance_n_Wrap', 'dim_variance_Wrap', 'dpres_plevel_Wrap',
'dtrend_leftdim', 'dv2uvF_Wrap', 'dv2uvG_Wrap', 'eof_north',
'eofcor_Wrap', 'eofcov_Wrap', 'eofunc_north', 'eofunc_ts_Wrap',
'eofunc_varimax_reorder', 'eofunc_varimax_Wrap', 'eofunc_Wrap',
'epsZero', 'f2fosh_Wrap', 'f2foshv_Wrap', 'f2fsh_Wrap',
'f2fshv_Wrap', 'f2gsh_Wrap', 'f2gshv_Wrap', 'fbindirSwap',
'fbinseqSwap1', 'fbinseqSwap2', 'flt2dble', 'flt2string',
'fo2fsh_Wrap', 'fo2fshv_Wrap', 'g2fsh_Wrap', 'g2fshv_Wrap',
'g2gsh_Wrap', 'g2gshv_Wrap', 'generate_resample_indices',
'generate_sample_indices', 'generate_unique_indices',
'genNormalDist', 'get1Dindex', 'get1Dindex_Collapse',
'get1Dindex_Exclude', 'get_file_suffix', 'GetFillColor',
'GetFillColorIndex', 'getFillValue', 'getind_latlon2d',
'getVarDimNames', 'getVarFillValue', 'grib_stime2itime',
'hyi2hyo_Wrap', 'ilapsF_Wrap', 'ilapsG_Wrap', 'ind_nearest_coord',
'indStrSubset', 'int2dble', 'int2flt', 'int2p_n_Wrap', 'int2p_Wrap',
'isMonotonic', 'isStrSubset', 'latGau', 'latGauWgt', 'latGlobeF',
'latGlobeFo', 'latRegWgt', 'linint1_n_Wrap', 'linint1_Wrap',
'linint2_points_Wrap', 'linint2_Wrap', 'local_max_1d',
'local_min_1d', 'lonFlip', 'lonGlobeF', 'lonGlobeFo', 'lonPivot',
'merge_levels_sfc', 'mod', 'month_to_annual',
'month_to_annual_weighted', 'month_to_season', 'month_to_season12',
'month_to_seasonN', 'monthly_total_to_daily_mean', 'nameDim',
'natgrid_Wrap', 'NewCosWeight', 'niceLatLon2D', 'NormCosWgtGlobe',
'numAsciiCol', 'numAsciiRow', 'numeric2int',
'obj_anal_ic_deprecated', 'obj_anal_ic_Wrap', 'omega_ccm_driver',
'omega_to_w', 'oneDtostring', 'pack_values', 'pattern_cor', 'pdfx',
'pdfxy', 'pdfxy_conform', 'pot_temp', 'pot_vort_hybrid',
'pot_vort_isobaric', 'pres2hybrid_Wrap', 'print_clock',
'printMinMax', 'quadroots', 'rcm2points_Wrap', 'rcm2rgrid_Wrap',
'readAsciiHead', 'readAsciiTable', 'reg_multlin_stats',
'region_ind', 'regline_stats', 'relhum_ttd', 'replaceSingleChar',
'RGBtoCmap', 'rgrid2rcm_Wrap', 'rho_mwjf', 'rm_single_dims',
'rmAnnCycle1D', 'rmInsufData', 'rmMonAnnCycLLLT', 'rmMonAnnCycLLT',
'rmMonAnnCycTLL', 'runave_n_Wrap', 'runave_Wrap', 'short2flt',
'short2flt_hdf', 'shsgc_R42_Wrap', 'sign_f90', 'sign_matlab',
'smth9_Wrap', 'smthClmDayTLL', 'smthClmDayTLLL', 'SqrtCosWeight',
'stat_dispersion', 'static_stability', 'stdMonLLLT', 'stdMonLLT',
'stdMonTLL', 'stdMonTLLL', 'symMinMaxPlt', 'table_attach_columns',
'table_attach_rows', 'time_to_newtime', 'transpose',
'triple2grid_Wrap', 'ut_convert', 'uv2dvF_Wrap', 'uv2dvG_Wrap',
'uv2vrF_Wrap', 'uv2vrG_Wrap', 'vr2uvF_Wrap', 'vr2uvG_Wrap',
'w_to_omega', 'wallClockElapseTime', 'wave_number_spc',
'wgt_areaave_Wrap', 'wgt_runave_leftdim', 'wgt_runave_n_Wrap',
'wgt_runave_Wrap', 'wgt_vertical_n', 'wind_component',
'wind_direction', 'yyyyddd_to_yyyymmdd', 'yyyymm_time',
'yyyymm_to_yyyyfrac', 'yyyymmdd_time', 'yyyymmdd_to_yyyyddd',
'yyyymmdd_to_yyyyfrac', 'yyyymmddhh_time', 'yyyymmddhh_to_yyyyfrac',
'zonal_mpsi_Wrap', 'zonalAve', 'calendar_decode2', 'cd_string',
'kf_filter', 'run_cor', 'time_axis_labels', 'ut_string',
'wrf_contour', 'wrf_map', 'wrf_map_overlay', 'wrf_map_overlays',
'wrf_map_resources', 'wrf_map_zoom', 'wrf_overlay', 'wrf_overlays',
'wrf_user_getvar', 'wrf_user_ij_to_ll', 'wrf_user_intrp2d',
'wrf_user_intrp3d', 'wrf_user_latlon_to_ij', 'wrf_user_list_times',
'wrf_user_ll_to_ij', 'wrf_user_unstagger', 'wrf_user_vert_interp',
'wrf_vector', 'gsn_add_annotation', 'gsn_add_polygon',
'gsn_add_polyline', 'gsn_add_polymarker',
'gsn_add_shapefile_polygons', 'gsn_add_shapefile_polylines',
'gsn_add_shapefile_polymarkers', 'gsn_add_text', 'gsn_attach_plots',
'gsn_blank_plot', 'gsn_contour', 'gsn_contour_map',
'gsn_contour_shade', 'gsn_coordinates', 'gsn_create_labelbar',
'gsn_create_legend', 'gsn_create_text',
'gsn_csm_attach_zonal_means', 'gsn_csm_blank_plot',
'gsn_csm_contour', 'gsn_csm_contour_map', 'gsn_csm_contour_map_ce',
'gsn_csm_contour_map_overlay', 'gsn_csm_contour_map_polar',
'gsn_csm_hov', 'gsn_csm_lat_time', 'gsn_csm_map', 'gsn_csm_map_ce',
'gsn_csm_map_polar', 'gsn_csm_pres_hgt',
'gsn_csm_pres_hgt_streamline', 'gsn_csm_pres_hgt_vector',
'gsn_csm_streamline', 'gsn_csm_streamline_contour_map',
'gsn_csm_streamline_contour_map_ce',
'gsn_csm_streamline_contour_map_polar', 'gsn_csm_streamline_map',
'gsn_csm_streamline_map_ce', 'gsn_csm_streamline_map_polar',
'gsn_csm_streamline_scalar', 'gsn_csm_streamline_scalar_map',
'gsn_csm_streamline_scalar_map_ce',
'gsn_csm_streamline_scalar_map_polar', 'gsn_csm_time_lat',
'gsn_csm_vector', 'gsn_csm_vector_map', 'gsn_csm_vector_map_ce',
'gsn_csm_vector_map_polar', 'gsn_csm_vector_scalar',
'gsn_csm_vector_scalar_map', 'gsn_csm_vector_scalar_map_ce',
'gsn_csm_vector_scalar_map_polar', 'gsn_csm_x2y', 'gsn_csm_x2y2',
'gsn_csm_xy', 'gsn_csm_xy2', 'gsn_csm_xy3', 'gsn_csm_y',
'gsn_define_colormap', 'gsn_draw_colormap', 'gsn_draw_named_colors',
'gsn_histogram', 'gsn_labelbar_ndc', 'gsn_legend_ndc', 'gsn_map',
'gsn_merge_colormaps', 'gsn_open_wks', 'gsn_panel', 'gsn_polygon',
'gsn_polygon_ndc', 'gsn_polyline', 'gsn_polyline_ndc',
'gsn_polymarker', 'gsn_polymarker_ndc', 'gsn_retrieve_colormap',
'gsn_reverse_colormap', 'gsn_streamline', 'gsn_streamline_map',
'gsn_streamline_scalar', 'gsn_streamline_scalar_map', 'gsn_table',
'gsn_text', 'gsn_text_ndc', 'gsn_vector', 'gsn_vector_map',
'gsn_vector_scalar', 'gsn_vector_scalar_map', 'gsn_xy', 'gsn_y',
'hsv2rgb', 'maximize_output', 'namedcolor2rgb', 'namedcolor2rgba',
'reset_device_coordinates', 'span_named_colors'), prefix=r'\b'),
Name.Builtin),
# Resources
(words((
'amDataXF', 'amDataYF', 'amJust', 'amOn', 'amOrthogonalPosF',
'amParallelPosF', 'amResizeNotify', 'amSide', 'amTrackData',
'amViewId', 'amZone', 'appDefaultParent', 'appFileSuffix',
'appResources', 'appSysDir', 'appUsrDir', 'caCopyArrays',
'caXArray', 'caXCast', 'caXMaxV', 'caXMinV', 'caXMissingV',
'caYArray', 'caYCast', 'caYMaxV', 'caYMinV', 'caYMissingV',
'cnCellFillEdgeColor', 'cnCellFillMissingValEdgeColor',
'cnConpackParams', 'cnConstFEnableFill', 'cnConstFLabelAngleF',
'cnConstFLabelBackgroundColor', 'cnConstFLabelConstantSpacingF',
'cnConstFLabelFont', 'cnConstFLabelFontAspectF',
'cnConstFLabelFontColor', 'cnConstFLabelFontHeightF',
'cnConstFLabelFontQuality', 'cnConstFLabelFontThicknessF',
'cnConstFLabelFormat', 'cnConstFLabelFuncCode', 'cnConstFLabelJust',
'cnConstFLabelOn', 'cnConstFLabelOrthogonalPosF',
'cnConstFLabelParallelPosF', 'cnConstFLabelPerimColor',
'cnConstFLabelPerimOn', 'cnConstFLabelPerimSpaceF',
'cnConstFLabelPerimThicknessF', 'cnConstFLabelSide',
'cnConstFLabelString', 'cnConstFLabelTextDirection',
'cnConstFLabelZone', 'cnConstFUseInfoLabelRes',
'cnExplicitLabelBarLabelsOn', 'cnExplicitLegendLabelsOn',
'cnExplicitLineLabelsOn', 'cnFillBackgroundColor', 'cnFillColor',
'cnFillColors', 'cnFillDotSizeF', 'cnFillDrawOrder', 'cnFillMode',
'cnFillOn', 'cnFillOpacityF', 'cnFillPalette', 'cnFillPattern',
'cnFillPatterns', 'cnFillScaleF', 'cnFillScales', 'cnFixFillBleed',
'cnGridBoundFillColor', 'cnGridBoundFillPattern',
'cnGridBoundFillScaleF', 'cnGridBoundPerimColor',
'cnGridBoundPerimDashPattern', 'cnGridBoundPerimOn',
'cnGridBoundPerimThicknessF', 'cnHighLabelAngleF',
'cnHighLabelBackgroundColor', 'cnHighLabelConstantSpacingF',
'cnHighLabelCount', 'cnHighLabelFont', 'cnHighLabelFontAspectF',
'cnHighLabelFontColor', 'cnHighLabelFontHeightF',
'cnHighLabelFontQuality', 'cnHighLabelFontThicknessF',
'cnHighLabelFormat', 'cnHighLabelFuncCode', 'cnHighLabelPerimColor',
'cnHighLabelPerimOn', 'cnHighLabelPerimSpaceF',
'cnHighLabelPerimThicknessF', 'cnHighLabelString', 'cnHighLabelsOn',
'cnHighLowLabelOverlapMode', 'cnHighUseLineLabelRes',
'cnInfoLabelAngleF', 'cnInfoLabelBackgroundColor',
'cnInfoLabelConstantSpacingF', 'cnInfoLabelFont',
'cnInfoLabelFontAspectF', 'cnInfoLabelFontColor',
'cnInfoLabelFontHeightF', 'cnInfoLabelFontQuality',
'cnInfoLabelFontThicknessF', 'cnInfoLabelFormat',
'cnInfoLabelFuncCode', 'cnInfoLabelJust', 'cnInfoLabelOn',
'cnInfoLabelOrthogonalPosF', 'cnInfoLabelParallelPosF',
'cnInfoLabelPerimColor', 'cnInfoLabelPerimOn',
'cnInfoLabelPerimSpaceF', 'cnInfoLabelPerimThicknessF',
'cnInfoLabelSide', 'cnInfoLabelString', 'cnInfoLabelTextDirection',
'cnInfoLabelZone', 'cnLabelBarEndLabelsOn', 'cnLabelBarEndStyle',
'cnLabelDrawOrder', 'cnLabelMasking', 'cnLabelScaleFactorF',
'cnLabelScaleValueF', 'cnLabelScalingMode', 'cnLegendLevelFlags',
'cnLevelCount', 'cnLevelFlag', 'cnLevelFlags', 'cnLevelSelectionMode',
'cnLevelSpacingF', 'cnLevels', 'cnLineColor', 'cnLineColors',
'cnLineDashPattern', 'cnLineDashPatterns', 'cnLineDashSegLenF',
'cnLineDrawOrder', 'cnLineLabelAngleF', 'cnLineLabelBackgroundColor',
'cnLineLabelConstantSpacingF', 'cnLineLabelCount',
'cnLineLabelDensityF', 'cnLineLabelFont', 'cnLineLabelFontAspectF',
'cnLineLabelFontColor', 'cnLineLabelFontColors',
'cnLineLabelFontHeightF', 'cnLineLabelFontQuality',
'cnLineLabelFontThicknessF', 'cnLineLabelFormat',
'cnLineLabelFuncCode', 'cnLineLabelInterval', 'cnLineLabelPerimColor',
'cnLineLabelPerimOn', 'cnLineLabelPerimSpaceF',
'cnLineLabelPerimThicknessF', 'cnLineLabelPlacementMode',
'cnLineLabelStrings', 'cnLineLabelsOn', 'cnLinePalette',
'cnLineThicknessF', 'cnLineThicknesses', 'cnLinesOn',
'cnLowLabelAngleF', 'cnLowLabelBackgroundColor',
'cnLowLabelConstantSpacingF', 'cnLowLabelCount', 'cnLowLabelFont',
'cnLowLabelFontAspectF', 'cnLowLabelFontColor',
'cnLowLabelFontHeightF', 'cnLowLabelFontQuality',
'cnLowLabelFontThicknessF', 'cnLowLabelFormat', 'cnLowLabelFuncCode',
'cnLowLabelPerimColor', 'cnLowLabelPerimOn', 'cnLowLabelPerimSpaceF',
'cnLowLabelPerimThicknessF', 'cnLowLabelString', 'cnLowLabelsOn',
'cnLowUseHighLabelRes', 'cnMaxDataValueFormat', 'cnMaxLevelCount',
'cnMaxLevelValF', 'cnMaxPointDistanceF', 'cnMinLevelValF',
'cnMissingValFillColor', 'cnMissingValFillPattern',
'cnMissingValFillScaleF', 'cnMissingValPerimColor',
'cnMissingValPerimDashPattern', 'cnMissingValPerimGridBoundOn',
'cnMissingValPerimOn', 'cnMissingValPerimThicknessF',
'cnMonoFillColor', 'cnMonoFillPattern', 'cnMonoFillScale',
'cnMonoLevelFlag', 'cnMonoLineColor', 'cnMonoLineDashPattern',
'cnMonoLineLabelFontColor', 'cnMonoLineThickness', 'cnNoDataLabelOn',
'cnNoDataLabelString', 'cnOutOfRangeFillColor',
'cnOutOfRangeFillPattern', 'cnOutOfRangeFillScaleF',
'cnOutOfRangePerimColor', 'cnOutOfRangePerimDashPattern',
'cnOutOfRangePerimOn', 'cnOutOfRangePerimThicknessF',
'cnRasterCellSizeF', 'cnRasterMinCellSizeF', 'cnRasterModeOn',
'cnRasterSampleFactorF', 'cnRasterSmoothingOn', 'cnScalarFieldData',
'cnSmoothingDistanceF', 'cnSmoothingOn', 'cnSmoothingTensionF',
'cnSpanFillPalette', 'cnSpanLinePalette', 'ctCopyTables',
'ctXElementSize', 'ctXMaxV', 'ctXMinV', 'ctXMissingV', 'ctXTable',
'ctXTableLengths', 'ctXTableType', 'ctYElementSize', 'ctYMaxV',
'ctYMinV', 'ctYMissingV', 'ctYTable', 'ctYTableLengths',
'ctYTableType', 'dcDelayCompute', 'errBuffer',
'errFileName', 'errFilePtr', 'errLevel', 'errPrint', 'errUnitNumber',
'gsClipOn', 'gsColors', 'gsEdgeColor', 'gsEdgeDashPattern',
'gsEdgeDashSegLenF', 'gsEdgeThicknessF', 'gsEdgesOn',
'gsFillBackgroundColor', 'gsFillColor', 'gsFillDotSizeF',
'gsFillIndex', 'gsFillLineThicknessF', 'gsFillOpacityF',
'gsFillScaleF', 'gsFont', 'gsFontAspectF', 'gsFontColor',
'gsFontHeightF', 'gsFontOpacityF', 'gsFontQuality',
'gsFontThicknessF', 'gsLineColor', 'gsLineDashPattern',
'gsLineDashSegLenF', 'gsLineLabelConstantSpacingF', 'gsLineLabelFont',
'gsLineLabelFontAspectF', 'gsLineLabelFontColor',
'gsLineLabelFontHeightF', 'gsLineLabelFontQuality',
'gsLineLabelFontThicknessF', 'gsLineLabelFuncCode',
'gsLineLabelString', 'gsLineOpacityF', 'gsLineThicknessF',
'gsMarkerColor', 'gsMarkerIndex', 'gsMarkerOpacityF', 'gsMarkerSizeF',
'gsMarkerThicknessF', 'gsSegments', 'gsTextAngleF',
'gsTextConstantSpacingF', 'gsTextDirection', 'gsTextFuncCode',
'gsTextJustification', 'gsnAboveYRefLineBarColors',
'gsnAboveYRefLineBarFillScales', 'gsnAboveYRefLineBarPatterns',
'gsnAboveYRefLineColor', 'gsnAddCyclic', 'gsnAttachBorderOn',
'gsnAttachPlotsXAxis', 'gsnBelowYRefLineBarColors',
'gsnBelowYRefLineBarFillScales', 'gsnBelowYRefLineBarPatterns',
'gsnBelowYRefLineColor', 'gsnBoxMargin', 'gsnCenterString',
'gsnCenterStringFontColor', 'gsnCenterStringFontHeightF',
'gsnCenterStringFuncCode', 'gsnCenterStringOrthogonalPosF',
'gsnCenterStringParallelPosF', 'gsnContourLineThicknessesScale',
'gsnContourNegLineDashPattern', 'gsnContourPosLineDashPattern',
'gsnContourZeroLineThicknessF', 'gsnDebugWriteFileName', 'gsnDraw',
'gsnFrame', 'gsnHistogramBarWidthPercent', 'gsnHistogramBinIntervals',
'gsnHistogramBinMissing', 'gsnHistogramBinWidth',
'gsnHistogramClassIntervals', 'gsnHistogramCompare',
'gsnHistogramComputePercentages',
'gsnHistogramComputePercentagesNoMissing',
'gsnHistogramDiscreteBinValues', 'gsnHistogramDiscreteClassValues',
'gsnHistogramHorizontal', 'gsnHistogramMinMaxBinsOn',
'gsnHistogramNumberOfBins', 'gsnHistogramPercentSign',
'gsnHistogramSelectNiceIntervals', 'gsnLeftString',
'gsnLeftStringFontColor', 'gsnLeftStringFontHeightF',
'gsnLeftStringFuncCode', 'gsnLeftStringOrthogonalPosF',
'gsnLeftStringParallelPosF', 'gsnMajorLatSpacing',
'gsnMajorLonSpacing', 'gsnMaskLambertConformal',
'gsnMaskLambertConformalOutlineOn', 'gsnMaximize',
'gsnMinorLatSpacing', 'gsnMinorLonSpacing', 'gsnPanelBottom',
'gsnPanelCenter', 'gsnPanelDebug', 'gsnPanelFigureStrings',
'gsnPanelFigureStringsBackgroundFillColor',
'gsnPanelFigureStringsFontHeightF', 'gsnPanelFigureStringsJust',
'gsnPanelFigureStringsPerimOn', 'gsnPanelLabelBar', 'gsnPanelLeft',
'gsnPanelMainFont', 'gsnPanelMainFontColor',
'gsnPanelMainFontHeightF', 'gsnPanelMainString', 'gsnPanelRight',
'gsnPanelRowSpec', 'gsnPanelScalePlotIndex', 'gsnPanelTop',
'gsnPanelXF', 'gsnPanelXWhiteSpacePercent', 'gsnPanelYF',
'gsnPanelYWhiteSpacePercent', 'gsnPaperHeight', 'gsnPaperMargin',
'gsnPaperOrientation', 'gsnPaperWidth', 'gsnPolar',
'gsnPolarLabelDistance', 'gsnPolarLabelFont',
'gsnPolarLabelFontHeightF', 'gsnPolarLabelSpacing', 'gsnPolarTime',
'gsnPolarUT', 'gsnRightString', 'gsnRightStringFontColor',
'gsnRightStringFontHeightF', 'gsnRightStringFuncCode',
'gsnRightStringOrthogonalPosF', 'gsnRightStringParallelPosF',
'gsnScalarContour', 'gsnScale', 'gsnShape', 'gsnSpreadColorEnd',
'gsnSpreadColorStart', 'gsnSpreadColors', 'gsnStringFont',
'gsnStringFontColor', 'gsnStringFontHeightF', 'gsnStringFuncCode',
'gsnTickMarksOn', 'gsnXAxisIrregular2Linear', 'gsnXAxisIrregular2Log',
'gsnXRefLine', 'gsnXRefLineColor', 'gsnXRefLineDashPattern',
'gsnXRefLineThicknessF', 'gsnXYAboveFillColors', 'gsnXYBarChart',
'gsnXYBarChartBarWidth', 'gsnXYBarChartColors',
'gsnXYBarChartColors2', 'gsnXYBarChartFillDotSizeF',
'gsnXYBarChartFillLineThicknessF', 'gsnXYBarChartFillOpacityF',
'gsnXYBarChartFillScaleF', 'gsnXYBarChartOutlineOnly',
'gsnXYBarChartOutlineThicknessF', 'gsnXYBarChartPatterns',
'gsnXYBarChartPatterns2', 'gsnXYBelowFillColors', 'gsnXYFillColors',
'gsnXYFillOpacities', 'gsnXYLeftFillColors', 'gsnXYRightFillColors',
'gsnYAxisIrregular2Linear', 'gsnYAxisIrregular2Log', 'gsnYRefLine',
'gsnYRefLineColor', 'gsnYRefLineColors', 'gsnYRefLineDashPattern',
'gsnYRefLineDashPatterns', 'gsnYRefLineThicknessF',
'gsnYRefLineThicknesses', 'gsnZonalMean', 'gsnZonalMeanXMaxF',
'gsnZonalMeanXMinF', 'gsnZonalMeanYRefLine', 'lbAutoManage',
'lbBottomMarginF', 'lbBoxCount', 'lbBoxEndCapStyle', 'lbBoxFractions',
'lbBoxLineColor', 'lbBoxLineDashPattern', 'lbBoxLineDashSegLenF',
'lbBoxLineThicknessF', 'lbBoxLinesOn', 'lbBoxMajorExtentF',
'lbBoxMinorExtentF', 'lbBoxSeparatorLinesOn', 'lbBoxSizing',
'lbFillBackground', 'lbFillColor', 'lbFillColors', 'lbFillDotSizeF',
'lbFillLineThicknessF', 'lbFillPattern', 'lbFillPatterns',
'lbFillScaleF', 'lbFillScales', 'lbJustification', 'lbLabelAlignment',
'lbLabelAngleF', 'lbLabelAutoStride', 'lbLabelBarOn',
'lbLabelConstantSpacingF', 'lbLabelDirection', 'lbLabelFont',
'lbLabelFontAspectF', 'lbLabelFontColor', 'lbLabelFontHeightF',
'lbLabelFontQuality', 'lbLabelFontThicknessF', 'lbLabelFuncCode',
'lbLabelJust', 'lbLabelOffsetF', 'lbLabelPosition', 'lbLabelStride',
'lbLabelStrings', 'lbLabelsOn', 'lbLeftMarginF', 'lbMaxLabelLenF',
'lbMinLabelSpacingF', 'lbMonoFillColor', 'lbMonoFillPattern',
'lbMonoFillScale', 'lbOrientation', 'lbPerimColor',
'lbPerimDashPattern', 'lbPerimDashSegLenF', 'lbPerimFill',
'lbPerimFillColor', 'lbPerimOn', 'lbPerimThicknessF',
'lbRasterFillOn', 'lbRightMarginF', 'lbTitleAngleF',
'lbTitleConstantSpacingF', 'lbTitleDirection', 'lbTitleExtentF',
'lbTitleFont', 'lbTitleFontAspectF', 'lbTitleFontColor',
'lbTitleFontHeightF', 'lbTitleFontQuality', 'lbTitleFontThicknessF',
'lbTitleFuncCode', 'lbTitleJust', 'lbTitleOffsetF', 'lbTitleOn',
'lbTitlePosition', 'lbTitleString', 'lbTopMarginF', 'lgAutoManage',
'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor',
'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF',
'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF',
'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder',
'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes',
'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF',
'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection',
'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor',
'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF',
'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition',
'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF',
'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF',
'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont',
'lgLineLabelFontAspectF', 'lgLineLabelFontColor',
'lgLineLabelFontColors', 'lgLineLabelFontHeightF',
'lgLineLabelFontHeights', 'lgLineLabelFontQuality',
'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode',
'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF',
'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors',
'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes',
'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex',
'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen',
'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight',
'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex',
'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation',
'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF',
'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF',
'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF',
'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont',
'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF',
'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
'mpFixedAreaGroups', 'mpGeophysicalLineColor',
'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
'mpUSStateLineColor', 'mpUSStateLineDashPattern',
'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF',
'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
'stArrowStride', 'stCrossoverCheckCount',
'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode',
'stLevelSpacingF', 'stLevels', 'stLineColor', 'stLineOpacityF',
'stLineStartStride', 'stLineThicknessF', 'stMapDirection',
'stMaxLevelCount', 'stMaxLevelValF', 'stMinArrowSpacingF',
'stMinDistanceF', 'stMinLevelValF', 'stMinLineSpacingF',
'stMinStepFactorF', 'stMonoLineColor', 'stNoDataLabelOn',
'stNoDataLabelString', 'stScalarFieldData', 'stScalarMissingValColor',
'stSpanLevelPalette', 'stStepSizeF', 'stStreamlineDrawOrder',
'stUseScalarArray', 'stVectorFieldData', 'stZeroFLabelAngleF',
'stZeroFLabelBackgroundColor', 'stZeroFLabelConstantSpacingF',
'stZeroFLabelFont', 'stZeroFLabelFontAspectF',
'stZeroFLabelFontColor', 'stZeroFLabelFontHeightF',
'stZeroFLabelFontQuality', 'stZeroFLabelFontThicknessF',
'stZeroFLabelFuncCode', 'stZeroFLabelJust', 'stZeroFLabelOn',
'stZeroFLabelOrthogonalPosF', 'stZeroFLabelParallelPosF',
'stZeroFLabelPerimColor', 'stZeroFLabelPerimOn',
'stZeroFLabelPerimSpaceF', 'stZeroFLabelPerimThicknessF',
'stZeroFLabelSide', 'stZeroFLabelString', 'stZeroFLabelTextDirection',
'stZeroFLabelZone', 'tfDoNDCOverlay', 'tfPlotManagerOn',
'tfPolyDrawList', 'tfPolyDrawOrder', 'tiDeltaF', 'tiMainAngleF',
'tiMainConstantSpacingF', 'tiMainDirection', 'tiMainFont',
'tiMainFontAspectF', 'tiMainFontColor', 'tiMainFontHeightF',
'tiMainFontQuality', 'tiMainFontThicknessF', 'tiMainFuncCode',
'tiMainJust', 'tiMainOffsetXF', 'tiMainOffsetYF', 'tiMainOn',
'tiMainPosition', 'tiMainSide', 'tiMainString', 'tiUseMainAttributes',
'tiXAxisAngleF', 'tiXAxisConstantSpacingF', 'tiXAxisDirection',
'tiXAxisFont', 'tiXAxisFontAspectF', 'tiXAxisFontColor',
'tiXAxisFontHeightF', 'tiXAxisFontQuality', 'tiXAxisFontThicknessF',
'tiXAxisFuncCode', 'tiXAxisJust', 'tiXAxisOffsetXF',
'tiXAxisOffsetYF', 'tiXAxisOn', 'tiXAxisPosition', 'tiXAxisSide',
'tiXAxisString', 'tiYAxisAngleF', 'tiYAxisConstantSpacingF',
'tiYAxisDirection', 'tiYAxisFont', 'tiYAxisFontAspectF',
'tiYAxisFontColor', 'tiYAxisFontHeightF', 'tiYAxisFontQuality',
'tiYAxisFontThicknessF', 'tiYAxisFuncCode', 'tiYAxisJust',
'tiYAxisOffsetXF', 'tiYAxisOffsetYF', 'tiYAxisOn', 'tiYAxisPosition',
'tiYAxisSide', 'tiYAxisString', 'tmBorderLineColor',
'tmBorderThicknessF', 'tmEqualizeXYSizes', 'tmLabelAutoStride',
'tmSciNoteCutoff', 'tmXBAutoPrecision', 'tmXBBorderOn',
'tmXBDataLeftF', 'tmXBDataRightF', 'tmXBFormat', 'tmXBIrrTensionF',
'tmXBIrregularPoints', 'tmXBLabelAngleF', 'tmXBLabelConstantSpacingF',
'tmXBLabelDeltaF', 'tmXBLabelDirection', 'tmXBLabelFont',
'tmXBLabelFontAspectF', 'tmXBLabelFontColor', 'tmXBLabelFontHeightF',
'tmXBLabelFontQuality', 'tmXBLabelFontThicknessF',
'tmXBLabelFuncCode', 'tmXBLabelJust', 'tmXBLabelStride', 'tmXBLabels',
'tmXBLabelsOn', 'tmXBMajorLengthF', 'tmXBMajorLineColor',
'tmXBMajorOutwardLengthF', 'tmXBMajorThicknessF', 'tmXBMaxLabelLenF',
'tmXBMaxTicks', 'tmXBMinLabelSpacingF', 'tmXBMinorLengthF',
'tmXBMinorLineColor', 'tmXBMinorOn', 'tmXBMinorOutwardLengthF',
'tmXBMinorPerMajor', 'tmXBMinorThicknessF', 'tmXBMinorValues',
'tmXBMode', 'tmXBOn', 'tmXBPrecision', 'tmXBStyle', 'tmXBTickEndF',
'tmXBTickSpacingF', 'tmXBTickStartF', 'tmXBValues', 'tmXMajorGrid',
'tmXMajorGridLineColor', 'tmXMajorGridLineDashPattern',
'tmXMajorGridThicknessF', 'tmXMinorGrid', 'tmXMinorGridLineColor',
'tmXMinorGridLineDashPattern', 'tmXMinorGridThicknessF',
'tmXTAutoPrecision', 'tmXTBorderOn', 'tmXTDataLeftF',
'tmXTDataRightF', 'tmXTFormat', 'tmXTIrrTensionF',
'tmXTIrregularPoints', 'tmXTLabelAngleF', 'tmXTLabelConstantSpacingF',
'tmXTLabelDeltaF', 'tmXTLabelDirection', 'tmXTLabelFont',
'tmXTLabelFontAspectF', 'tmXTLabelFontColor', 'tmXTLabelFontHeightF',
'tmXTLabelFontQuality', 'tmXTLabelFontThicknessF',
'tmXTLabelFuncCode', 'tmXTLabelJust', 'tmXTLabelStride', 'tmXTLabels',
'tmXTLabelsOn', 'tmXTMajorLengthF', 'tmXTMajorLineColor',
'tmXTMajorOutwardLengthF', 'tmXTMajorThicknessF', 'tmXTMaxLabelLenF',
'tmXTMaxTicks', 'tmXTMinLabelSpacingF', 'tmXTMinorLengthF',
'tmXTMinorLineColor', 'tmXTMinorOn', 'tmXTMinorOutwardLengthF',
'tmXTMinorPerMajor', 'tmXTMinorThicknessF', 'tmXTMinorValues',
'tmXTMode', 'tmXTOn', 'tmXTPrecision', 'tmXTStyle', 'tmXTTickEndF',
'tmXTTickSpacingF', 'tmXTTickStartF', 'tmXTValues', 'tmXUseBottom',
'tmYLAutoPrecision', 'tmYLBorderOn', 'tmYLDataBottomF',
'tmYLDataTopF', 'tmYLFormat', 'tmYLIrrTensionF',
'tmYLIrregularPoints', 'tmYLLabelAngleF', 'tmYLLabelConstantSpacingF',
'tmYLLabelDeltaF', 'tmYLLabelDirection', 'tmYLLabelFont',
'tmYLLabelFontAspectF', 'tmYLLabelFontColor', 'tmYLLabelFontHeightF',
'tmYLLabelFontQuality', 'tmYLLabelFontThicknessF',
'tmYLLabelFuncCode', 'tmYLLabelJust', 'tmYLLabelStride', 'tmYLLabels',
'tmYLLabelsOn', 'tmYLMajorLengthF', 'tmYLMajorLineColor',
'tmYLMajorOutwardLengthF', 'tmYLMajorThicknessF', 'tmYLMaxLabelLenF',
'tmYLMaxTicks', 'tmYLMinLabelSpacingF', 'tmYLMinorLengthF',
'tmYLMinorLineColor', 'tmYLMinorOn', 'tmYLMinorOutwardLengthF',
'tmYLMinorPerMajor', 'tmYLMinorThicknessF', 'tmYLMinorValues',
'tmYLMode', 'tmYLOn', 'tmYLPrecision', 'tmYLStyle', 'tmYLTickEndF',
'tmYLTickSpacingF', 'tmYLTickStartF', 'tmYLValues', 'tmYMajorGrid',
'tmYMajorGridLineColor', 'tmYMajorGridLineDashPattern',
'tmYMajorGridThicknessF', 'tmYMinorGrid', 'tmYMinorGridLineColor',
'tmYMinorGridLineDashPattern', 'tmYMinorGridThicknessF',
'tmYRAutoPrecision', 'tmYRBorderOn', 'tmYRDataBottomF',
'tmYRDataTopF', 'tmYRFormat', 'tmYRIrrTensionF',
'tmYRIrregularPoints', 'tmYRLabelAngleF', 'tmYRLabelConstantSpacingF',
'tmYRLabelDeltaF', 'tmYRLabelDirection', 'tmYRLabelFont',
'tmYRLabelFontAspectF', 'tmYRLabelFontColor', 'tmYRLabelFontHeightF',
'tmYRLabelFontQuality', 'tmYRLabelFontThicknessF',
'tmYRLabelFuncCode', 'tmYRLabelJust', 'tmYRLabelStride', 'tmYRLabels',
'tmYRLabelsOn', 'tmYRMajorLengthF', 'tmYRMajorLineColor',
'tmYRMajorOutwardLengthF', 'tmYRMajorThicknessF', 'tmYRMaxLabelLenF',
'tmYRMaxTicks', 'tmYRMinLabelSpacingF', 'tmYRMinorLengthF',
'tmYRMinorLineColor', 'tmYRMinorOn', 'tmYRMinorOutwardLengthF',
'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
'trGridType', 'trLineInterpolationOn',
'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
'txPerimDashLengthF', 'txPerimDashPattern', 'txPerimOn',
'txPerimSpaceF', 'txPerimThicknessF', 'txPosXF', 'txPosYF',
'txString', 'vcExplicitLabelBarLabelsOn', 'vcFillArrowEdgeColor',
'vcFillArrowEdgeThicknessF', 'vcFillArrowFillColor',
'vcFillArrowHeadInteriorXF', 'vcFillArrowHeadMinFracXF',
'vcFillArrowHeadMinFracYF', 'vcFillArrowHeadXF', 'vcFillArrowHeadYF',
'vcFillArrowMinFracWidthF', 'vcFillArrowWidthF', 'vcFillArrowsOn',
'vcFillOverEdge', 'vcGlyphOpacityF', 'vcGlyphStyle',
'vcLabelBarEndLabelsOn', 'vcLabelFontColor', 'vcLabelFontHeightF',
'vcLabelsOn', 'vcLabelsUseVectorColor', 'vcLevelColors',
'vcLevelCount', 'vcLevelPalette', 'vcLevelSelectionMode',
'vcLevelSpacingF', 'vcLevels', 'vcLineArrowColor',
'vcLineArrowHeadMaxSizeF', 'vcLineArrowHeadMinSizeF',
'vcLineArrowThicknessF', 'vcMagnitudeFormat',
'vcMagnitudeScaleFactorF', 'vcMagnitudeScaleValueF',
'vcMagnitudeScalingMode', 'vcMapDirection', 'vcMaxLevelCount',
'vcMaxLevelValF', 'vcMaxMagnitudeF', 'vcMinAnnoAngleF',
'vcMinAnnoArrowAngleF', 'vcMinAnnoArrowEdgeColor',
'vcMinAnnoArrowFillColor', 'vcMinAnnoArrowLineColor',
'vcMinAnnoArrowMinOffsetF', 'vcMinAnnoArrowSpaceF',
'vcMinAnnoArrowUseVecColor', 'vcMinAnnoBackgroundColor',
'vcMinAnnoConstantSpacingF', 'vcMinAnnoExplicitMagnitudeF',
'vcMinAnnoFont', 'vcMinAnnoFontAspectF', 'vcMinAnnoFontColor',
'vcMinAnnoFontHeightF', 'vcMinAnnoFontQuality',
'vcMinAnnoFontThicknessF', 'vcMinAnnoFuncCode', 'vcMinAnnoJust',
'vcMinAnnoOn', 'vcMinAnnoOrientation', 'vcMinAnnoOrthogonalPosF',
'vcMinAnnoParallelPosF', 'vcMinAnnoPerimColor', 'vcMinAnnoPerimOn',
'vcMinAnnoPerimSpaceF', 'vcMinAnnoPerimThicknessF', 'vcMinAnnoSide',
'vcMinAnnoString1', 'vcMinAnnoString1On', 'vcMinAnnoString2',
'vcMinAnnoString2On', 'vcMinAnnoTextDirection', 'vcMinAnnoZone',
'vcMinDistanceF', 'vcMinFracLengthF', 'vcMinLevelValF',
'vcMinMagnitudeF', 'vcMonoFillArrowEdgeColor',
'vcMonoFillArrowFillColor', 'vcMonoLineArrowColor',
'vcMonoWindBarbColor', 'vcNoDataLabelOn', 'vcNoDataLabelString',
'vcPositionMode', 'vcRefAnnoAngleF', 'vcRefAnnoArrowAngleF',
'vcRefAnnoArrowEdgeColor', 'vcRefAnnoArrowFillColor',
'vcRefAnnoArrowLineColor', 'vcRefAnnoArrowMinOffsetF',
'vcRefAnnoArrowSpaceF', 'vcRefAnnoArrowUseVecColor',
'vcRefAnnoBackgroundColor', 'vcRefAnnoConstantSpacingF',
'vcRefAnnoExplicitMagnitudeF', 'vcRefAnnoFont',
'vcRefAnnoFontAspectF', 'vcRefAnnoFontColor', 'vcRefAnnoFontHeightF',
'vcRefAnnoFontQuality', 'vcRefAnnoFontThicknessF',
'vcRefAnnoFuncCode', 'vcRefAnnoJust', 'vcRefAnnoOn',
'vcRefAnnoOrientation', 'vcRefAnnoOrthogonalPosF',
'vcRefAnnoParallelPosF', 'vcRefAnnoPerimColor', 'vcRefAnnoPerimOn',
'vcRefAnnoPerimSpaceF', 'vcRefAnnoPerimThicknessF', 'vcRefAnnoSide',
'vcRefAnnoString1', 'vcRefAnnoString1On', 'vcRefAnnoString2',
'vcRefAnnoString2On', 'vcRefAnnoTextDirection', 'vcRefAnnoZone',
'vcRefLengthF', 'vcRefMagnitudeF', 'vcScalarFieldData',
'vcScalarMissingValColor', 'vcScalarValueFormat',
'vcScalarValueScaleFactorF', 'vcScalarValueScaleValueF',
'vcScalarValueScalingMode', 'vcSpanLevelPalette', 'vcUseRefAnnoRes',
'vcUseScalarArray', 'vcVectorDrawOrder', 'vcVectorFieldData',
'vcWindBarbCalmCircleSizeF', 'vcWindBarbColor',
'vcWindBarbLineThicknessF', 'vcWindBarbScaleFactorF',
'vcWindBarbTickAngleF', 'vcWindBarbTickLengthF',
'vcWindBarbTickSpacingF', 'vcZeroFLabelAngleF',
'vcZeroFLabelBackgroundColor', 'vcZeroFLabelConstantSpacingF',
'vcZeroFLabelFont', 'vcZeroFLabelFontAspectF',
'vcZeroFLabelFontColor', 'vcZeroFLabelFontHeightF',
'vcZeroFLabelFontQuality', 'vcZeroFLabelFontThicknessF',
'vcZeroFLabelFuncCode', 'vcZeroFLabelJust', 'vcZeroFLabelOn',
'vcZeroFLabelOrthogonalPosF', 'vcZeroFLabelParallelPosF',
'vcZeroFLabelPerimColor', 'vcZeroFLabelPerimOn',
'vcZeroFLabelPerimSpaceF', 'vcZeroFLabelPerimThicknessF',
'vcZeroFLabelSide', 'vcZeroFLabelString', 'vcZeroFLabelTextDirection',
'vcZeroFLabelZone', 'vfCopyData', 'vfDataArray',
'vfExchangeDimensions', 'vfExchangeUVData', 'vfMagMaxV', 'vfMagMinV',
'vfMissingUValueV', 'vfMissingVValueV', 'vfPolarData',
'vfSingleMissingValue', 'vfUDataArray', 'vfUMaxV', 'vfUMinV',
'vfVDataArray', 'vfVMaxV', 'vfVMinV', 'vfXArray', 'vfXCActualEndF',
'vfXCActualStartF', 'vfXCEndIndex', 'vfXCEndSubsetV', 'vfXCEndV',
'vfXCStartIndex', 'vfXCStartSubsetV', 'vfXCStartV', 'vfXCStride',
'vfYArray', 'vfYCActualEndF', 'vfYCActualStartF', 'vfYCEndIndex',
'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
'xyDashPatterns', 'xyExplicitLabels', 'xyExplicitLegendLabels',
'xyLabelMode', 'xyLineColor', 'xyLineColors', 'xyLineDashSegLenF',
'xyLineLabelConstantSpacingF', 'xyLineLabelFont',
'xyLineLabelFontAspectF', 'xyLineLabelFontColor',
'xyLineLabelFontColors', 'xyLineLabelFontHeightF',
'xyLineLabelFontQuality', 'xyLineLabelFontThicknessF',
'xyLineLabelFuncCode', 'xyLineThicknessF', 'xyLineThicknesses',
'xyMarkLineMode', 'xyMarkLineModes', 'xyMarker', 'xyMarkerColor',
'xyMarkerColors', 'xyMarkerSizeF', 'xyMarkerSizes',
'xyMarkerThicknessF', 'xyMarkerThicknesses', 'xyMarkers',
'xyMonoDashPattern', 'xyMonoLineColor', 'xyMonoLineLabelFontColor',
'xyMonoLineThickness', 'xyMonoMarkLineMode', 'xyMonoMarker',
'xyMonoMarkerColor', 'xyMonoMarkerSize', 'xyMonoMarkerThickness',
'xyXIrrTensionF', 'xyXIrregularPoints', 'xyXStyle', 'xyYIrrTensionF',
'xyYIrregularPoints', 'xyYStyle'), prefix=r'\b'),
Name.Builtin),
# Booleans
(r'\.(True|False)\.', Name.Builtin),
# Comparing Operators
(r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
],
'strings': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
],
'nums': [
(r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
(r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
(r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
],
}
| 63,962 | Python | 70.54698 | 91 | 0.592868 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/mosel.py | """
pygments.lexers.mosel
~~~~~~~~~~~~~~~~~~~~~
Lexers for the mosel language.
http://www.fico.com/en/products/fico-xpress-optimization
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['MoselLexer']
FUNCTIONS = (
# core functions
'_',
'abs',
'arctan',
'asproc',
'assert',
'bitflip',
'bitneg',
'bitset',
'bitshift',
'bittest',
'bitval',
'ceil',
'cos',
'create',
'currentdate',
'currenttime',
'cutelt',
'cutfirst',
'cuthead',
'cutlast',
'cuttail',
'datablock',
'delcell',
'exists',
'exit',
'exp',
'exportprob',
'fclose',
'fflush',
'finalize',
'findfirst',
'findlast',
'floor',
'fopen',
'fselect',
'fskipline',
'fwrite',
'fwrite_',
'fwriteln',
'fwriteln_',
'getact',
'getcoeff',
'getcoeffs',
'getdual',
'getelt',
'getfid',
'getfirst',
'getfname',
'gethead',
'getlast',
'getobjval',
'getparam',
'getrcost',
'getreadcnt',
'getreverse',
'getsize',
'getslack',
'getsol',
'gettail',
'gettype',
'getvars',
'isdynamic',
'iseof',
'isfinite',
'ishidden',
'isinf',
'isnan',
'isodd',
'ln',
'localsetparam',
'log',
'makesos1',
'makesos2',
'maxlist',
'memoryuse',
'minlist',
'newmuid',
'publish',
'random',
'read',
'readln',
'reset',
'restoreparam',
'reverse',
'round',
'setcoeff',
'sethidden',
'setioerr',
'setmatherr',
'setname',
'setparam',
'setrandseed',
'setrange',
'settype',
'sin',
'splithead',
'splittail',
'sqrt',
'strfmt',
'substr',
'timestamp',
'unpublish',
'versionnum',
'versionstr',
'write',
'write_',
'writeln',
'writeln_',
# mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
'addcut',
'addcuts',
'addmipsol',
'basisstability',
'calcsolinfo',
'clearmipdir',
'clearmodcut',
'command',
'copysoltoinit',
'crossoverlpsol',
'defdelayedrows',
'defsecurevecs',
'delcuts',
'dropcuts',
'estimatemarginals',
'fixglobal',
'flushmsgq',
'getbstat',
'getcnlist',
'getcplist',
'getdualray',
'getiis',
'getiissense',
'getiistype',
'getinfcause',
'getinfeas',
'getlb',
'getlct',
'getleft',
'getloadedlinctrs',
'getloadedmpvars',
'getname',
'getprimalray',
'getprobstat',
'getrange',
'getright',
'getsensrng',
'getsize',
'getsol',
'gettype',
'getub',
'getvars',
'gety',
'hasfeature',
'implies',
'indicator',
'initglobal',
'ishidden',
'isiisvalid',
'isintegral',
'loadbasis',
'loadcuts',
'loadlpsol',
'loadmipsol',
'loadprob',
'maximise',
'maximize',
'minimise',
'minimize',
'postsolve',
'readbasis',
'readdirs',
'readsol',
'refinemipsol',
'rejectintsol',
'repairinfeas',
'repairinfeas_deprec',
'resetbasis',
'resetiis',
'resetsol',
'savebasis',
'savemipsol',
'savesol',
'savestate',
'selectsol',
'setarchconsistency',
'setbstat',
'setcallback',
'setcbcutoff',
'setgndata',
'sethidden',
'setlb',
'setmipdir',
'setmodcut',
'setsol',
'setub',
'setucbdata',
'stopoptimise',
'stopoptimize',
'storecut',
'storecuts',
'unloadprob',
'uselastbarsol',
'writebasis',
'writedirs',
'writeprob',
'writesol',
'xor',
'xprs_addctr',
'xprs_addindic',
# mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
'addmonths',
'copytext',
'cuttext',
'deltext',
'endswith',
'erase',
'expandpath',
'fcopy',
'fdelete',
'findfiles',
'findtext',
'fmove',
'formattext',
'getasnumber',
'getchar',
'getcwd',
'getdate',
'getday',
'getdaynum',
'getdays',
'getdirsep',
'getdsoparam',
'getendparse',
'getenv',
'getfsize',
'getfstat',
'getftime',
'gethour',
'getminute',
'getmonth',
'getmsec',
'getoserrmsg',
'getoserror',
'getpathsep',
'getqtype',
'getsecond',
'getsepchar',
'getsize',
'getstart',
'getsucc',
'getsysinfo',
'getsysstat',
'gettime',
'gettmpdir',
'gettrim',
'getweekday',
'getyear',
'inserttext',
'isvalid',
'jointext',
'makedir',
'makepath',
'newtar',
'newzip',
'nextfield',
'openpipe',
'parseextn',
'parseint',
'parsereal',
'parsetext',
'pastetext',
'pathmatch',
'pathsplit',
'qsort',
'quote',
'readtextline',
'regmatch',
'regreplace',
'removedir',
'removefiles',
'setchar',
'setdate',
'setday',
'setdsoparam',
'setendparse',
'setenv',
'sethour',
'setminute',
'setmonth',
'setmsec',
'setoserror',
'setqtype',
'setsecond',
'setsepchar',
'setstart',
'setsucc',
'settime',
'settrim',
'setyear',
'sleep',
'splittext',
'startswith',
'system',
'tarlist',
'textfmt',
'tolower',
'toupper',
'trim',
'untar',
'unzip',
'ziplist',
# mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
'canceltimer',
'clearaliases',
'compile',
'connect',
'detach',
'disconnect',
'dropnextevent',
'findxsrvs',
'getaliases',
'getannidents',
'getannotations',
'getbanner',
'getclass',
'getdsoprop',
'getdsopropnum',
'getexitcode',
'getfromgid',
'getfromid',
'getfromuid',
'getgid',
'gethostalias',
'getid',
'getmodprop',
'getmodpropnum',
'getnextevent',
'getnode',
'getrmtid',
'getstatus',
'getsysinfo',
'gettimer',
'getuid',
'getvalue',
'isqueueempty',
'load',
'nullevent',
'peeknextevent',
'resetmodpar',
'run',
'send',
'setcontrol',
'setdefstream',
'setgid',
'sethostalias',
'setmodpar',
'settimer',
'setuid',
'setworkdir',
'stop',
'unload',
'wait',
'waitexpired',
'waitfor',
'waitforend',
)
class MoselLexer(RegexLexer):
"""
For the Mosel optimization language.
.. versionadded:: 2.6
"""
name = 'Mosel'
aliases = ['mosel']
filenames = ['*.mos']
tokens = {
'root': [
(r'\n', Text),
(r'\s+', Text.Whitespace),
(r'!.*?\n', Comment.Single),
(r'\(!(.|\n)*?!\)', Comment.Multiline),
(words((
'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
'forall', 'forward', 'from', 'function', 'hashmap', 'if',
'imports', 'include', 'initialisations', 'initializations', 'inter',
'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
'nssearch', 'of', 'options', 'or', 'package', 'parameters',
'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
Keyword.Builtin),
(words((
'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
'is_continuous', 'is_free', 'is_semcont', 'is_semint',
'is_partint'), prefix=r'\b', suffix=r'\b'),
Keyword.Type),
(r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
Operator),
(r'[()\[\]{},;]+', Punctuation),
(words(FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function),
(r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
(r'\d+([eE][+-]?\d+)?', Number.Integer),
(r'[+-]?Infinity', Number.Integer),
(r'0[xX][0-9a-fA-F]+', Number),
(r'"', String.Double, 'double_quote'),
(r'\'', String.Single, 'single_quote'),
(r'(\w+|(\.(?!\.)))', Text),
],
'single_quote': [
(r'\'', String.Single, '#pop'),
(r'[^\']+', String.Single),
],
'double_quote': [
(r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
(r'\"', String.Double, '#pop'),
(r'[^"\\]+', String.Double),
],
}
| 9,187 | Python | 19.508929 | 91 | 0.484816 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/templates.py | """
pygments.lexers.templates
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various template engines' markup.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexers.html import HtmlLexer, XmlLexer
from pygments.lexers.javascript import JavascriptLexer, LassoLexer
from pygments.lexers.css import CssLexer
from pygments.lexers.php import PhpLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.perl import PerlLexer
from pygments.lexers.jvm import JavaLexer, TeaLangLexer
from pygments.lexers.data import YamlLexer
from pygments.lexers.sql import SqlLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
include, using, this, default, combined
from pygments.token import Error, Punctuation, Whitespace, \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer',
'SqlJinjaLexer']
class ErbLexer(Lexer):
"""
Generic ERB (Ruby Templating) lexer.
Just highlights ruby code between the preprocessor directives, other data
is left untouched by the lexer.
All options are also forwarded to the `RubyLexer`.
"""
name = 'ERB'
url = 'https://github.com/ruby/erb'
aliases = ['erb']
mimetypes = ['application/x-ruby-templating']
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
def __init__(self, **options):
from pygments.lexers.ruby import RubyLexer
self.ruby_lexer = RubyLexer(**options)
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
"""
Since ERB doesn't allow "<%" and other tags inside of ruby
blocks we have to use a split approach here that fails for
that too.
"""
tokens = self._block_re.split(text)
tokens.reverse()
state = idx = 0
try:
while True:
# text
if state == 0:
val = tokens.pop()
yield idx, Other, val
idx += len(val)
state = 1
# block starts
elif state == 1:
tag = tokens.pop()
# literals
if tag in ('<%%', '%%>'):
yield idx, Other, tag
idx += 3
state = 0
# comment
elif tag == '<%#':
yield idx, Comment.Preproc, tag
val = tokens.pop()
yield idx + 3, Comment, val
idx += 3 + len(val)
state = 2
# blocks or output
elif tag in ('<%', '<%=', '<%-'):
yield idx, Comment.Preproc, tag
idx += len(tag)
data = tokens.pop()
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(data):
yield r_idx + idx, r_token, r_value
idx += len(data)
state = 2
elif tag in ('%>', '-%>'):
yield idx, Error, tag
idx += len(tag)
state = 0
# % raw ruby statements
else:
yield idx, Comment.Preproc, tag[0]
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
yield idx + 1 + r_idx, r_token, r_value
idx += len(tag)
state = 0
# block ends
elif state == 2:
tag = tokens.pop()
if tag not in ('%>', '-%>'):
yield idx, Other, tag
else:
yield idx, Comment.Preproc, tag
idx += len(tag)
state = 0
except IndexError:
return
def analyse_text(text):
if '<%' in text and '%>' in text:
return 0.4
class SmartyLexer(RegexLexer):
"""
Generic Smarty template lexer.
Just highlights smarty code between the preprocessor directives, other
data is left untouched by the lexer.
"""
name = 'Smarty'
url = 'https://www.smarty.net/'
aliases = ['smarty']
filenames = ['*.tpl']
mimetypes = ['application/x-smarty']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
(r'[^{]+', Other),
(r'(\{)(\*.*?\*)(\})',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(\{php\})(.*?)(\{/php\})',
bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
Comment.Preproc)),
(r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
(r'\{', Comment.Preproc, 'smarty')
],
'smarty': [
(r'\s+', Text),
(r'\{', Comment.Preproc, '#push'),
(r'\}', Comment.Preproc, '#pop'),
(r'#[a-zA-Z_]\w*#', Name.Variable),
(r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
(r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*', Name.Attribute)
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
rv += 0.15
if re.search(r'\{include\s+file=.*?\}', text):
rv += 0.15
if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
rv += 0.15
if re.search(r'\{\$.*?\}', text):
rv += 0.01
return rv
class VelocityLexer(RegexLexer):
"""
Generic Velocity template lexer.
Just highlights velocity directives and variable references, other
data is left untouched by the lexer.
"""
name = 'Velocity'
url = 'https://velocity.apache.org/'
aliases = ['velocity']
filenames = ['*.vm', '*.fhtml']
flags = re.MULTILINE | re.DOTALL
identifier = r'[a-zA-Z_]\w*'
tokens = {
'root': [
(r'[^{#$]+', Other),
(r'(#)(\*.*?\*)(#)',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(##)(.*?$)',
bygroups(Comment.Preproc, Comment)),
(r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
'directiveparams'),
(r'(#\{?)(' + identifier + r')(\}|\b)',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
(r'\$!?\{?', Punctuation, 'variable')
],
'variable': [
(identifier, Name.Variable),
(r'\(', Punctuation, 'funcparams'),
(r'(\.)(' + identifier + r')',
bygroups(Punctuation, Name.Variable), '#push'),
(r'\}', Punctuation, '#pop'),
default('#pop')
],
'directiveparams': [
(r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
Operator),
(r'\[', Operator, 'rangeoperator'),
(r'\b' + identifier + r'\b', Name.Function),
include('funcparams')
],
'rangeoperator': [
(r'\.\.', Operator),
include('funcparams'),
(r'\]', Operator, '#pop')
],
'funcparams': [
(r'\$!?\{?', Punctuation, 'variable'),
(r'\s+', Text),
(r'[,:]', Punctuation),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"\b[0-9]+\b", Number),
(r'(true|false|null)\b', Keyword.Constant),
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop'),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
(r'\[', Punctuation, '#push'),
(r'\]', Punctuation, '#pop'),
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.25
if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
r'(\.\w+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
class VelocityHtmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexed data
with the `HtmlLexer`.
"""
name = 'HTML+Velocity'
aliases = ['html+velocity']
alias_filenames = ['*.html', '*.fhtml']
mimetypes = ['text/html+velocity']
def __init__(self, **options):
super().__init__(HtmlLexer, VelocityLexer, **options)
class VelocityXmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexed data
with the `XmlLexer`.
"""
name = 'XML+Velocity'
aliases = ['xml+velocity']
alias_filenames = ['*.xml', '*.vm']
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
super().__init__(XmlLexer, VelocityLexer, **options)
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class DjangoLexer(RegexLexer):
"""
Generic `django <http://www.djangoproject.com/documentation/templates/>`_
and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
It just highlights django/jinja code between the preprocessor directives,
other data is left untouched by the lexer.
"""
name = 'Django/Jinja'
aliases = ['django', 'jinja']
mimetypes = ['application/x-django-templating', 'application/x-jinja']
flags = re.M | re.S
tokens = {
'root': [
(r'[^{]+', Other),
(r'\{\{', Comment.Preproc, 'var'),
# jinja/django comments
(r'\{#.*?#\}', Comment),
# django comments
(r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Comment, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# raw jinja blocks
(r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Text, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
(r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'block'),
(r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
bygroups(Comment.Preproc, Text, Keyword), 'block'),
(r'\{', Other)
],
'varnames': [
(r'(\|)(\s*)([a-zA-Z_]\w*)',
bygroups(Operator, Text, Name.Function)),
(r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
(r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
Keyword),
(r'(loop|block|super|forloop)\b', Name.Builtin),
(r'[a-zA-Z_][\w-]*', Name.Variable),
(r'\.\w+', Name.Variable),
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
],
'var': [
(r'\s+', Text),
(r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames')
],
'block': [
(r'\s+', Text),
(r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames'),
(r'.', Punctuation)
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'\{%\s*(block|extends)', text) is not None:
rv += 0.4
if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
rv += 0.1
if re.search(r'\{\{.*?\}\}', text) is not None:
rv += 0.1
return rv
class MyghtyLexer(RegexLexer):
"""
Generic myghty templates lexer. Code that isn't Myghty
markup is yielded as `Token.Other`.
.. versionadded:: 0.6
"""
name = 'Myghty'
url = 'http://www.myghty.org/'
aliases = ['myghty']
filenames = ['*.myt', 'autodelegate']
mimetypes = ['application/x-myghty']
tokens = {
'root': [
(r'\s+', Text),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PythonLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PythonLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(Other, Operator)),
]
}
class MyghtyHtmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `HtmlLexer`.
.. versionadded:: 0.6
"""
name = 'HTML+Myghty'
aliases = ['html+myghty']
mimetypes = ['text/html+myghty']
def __init__(self, **options):
super().__init__(HtmlLexer, MyghtyLexer, **options)
class MyghtyXmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `XmlLexer`.
.. versionadded:: 0.6
"""
name = 'XML+Myghty'
aliases = ['xml+myghty']
mimetypes = ['application/xml+myghty']
def __init__(self, **options):
super().__init__(XmlLexer, MyghtyLexer, **options)
class MyghtyJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `JavascriptLexer`.
.. versionadded:: 0.6
"""
name = 'JavaScript+Myghty'
aliases = ['javascript+myghty', 'js+myghty']
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
def __init__(self, **options):
super().__init__(JavascriptLexer, MyghtyLexer, **options)
class MyghtyCssLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `CssLexer`.
.. versionadded:: 0.6
"""
name = 'CSS+Myghty'
aliases = ['css+myghty']
mimetypes = ['text/css+myghty']
def __init__(self, **options):
super().__init__(CssLexer, MyghtyLexer, **options)
class MasonLexer(RegexLexer):
"""
Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't
Mason markup is HTML.
.. versionadded:: 1.4
"""
name = 'Mason'
url = 'http://www.masonhq.com/'
aliases = ['mason']
filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
mimetypes = ['application/x-mason']
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(?s)(<%doc>)(.*?)(</%doc>)',
bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Whitespace, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
(r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PerlLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(using(HtmlLexer), Operator)),
]
}
def analyse_text(text):
result = 0.0
if re.search(r'</%(class|doc|init)>', text) is not None:
result = 1.0
elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
result = 0.11
return result
class MakoLexer(RegexLexer):
"""
Generic mako templates lexer. Code that isn't Mako
markup is yielded as `Token.Other`.
.. versionadded:: 0.7
"""
name = 'Mako'
url = 'http://www.makotemplates.org/'
aliases = ['mako']
filenames = ['*.mao']
mimetypes = ['application/x-mako']
tokens = {
'root': [
(r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
(r'(\s*)(%)([^\n]*)(\n|\Z)',
bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
(r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
(r'(<%)([\w.:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(</%)([\w.:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
(r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
(r'(?s)(<%(?:!?))(.*?)(%>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
''', bygroups(Other, Operator)),
(r'\s+', Text),
],
'ondeftags': [
(r'<%', Comment.Preproc),
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
include('tag'),
],
'tag': [
(r'((?:\w+)\s*=)(\s*)(".*?")',
bygroups(Name.Attribute, Text, String)),
(r'/?\s*>', Comment.Preproc, '#pop'),
(r'\s+', Text),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class MakoHtmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `HtmlLexer`.
.. versionadded:: 0.7
"""
name = 'HTML+Mako'
aliases = ['html+mako']
mimetypes = ['text/html+mako']
def __init__(self, **options):
super().__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `XmlLexer`.
.. versionadded:: 0.7
"""
name = 'XML+Mako'
aliases = ['xml+mako']
mimetypes = ['application/xml+mako']
def __init__(self, **options):
super().__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `JavascriptLexer`.
.. versionadded:: 0.7
"""
name = 'JavaScript+Mako'
aliases = ['javascript+mako', 'js+mako']
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
def __init__(self, **options):
super().__init__(JavascriptLexer, MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `CssLexer`.
.. versionadded:: 0.7
"""
name = 'CSS+Mako'
aliases = ['css+mako']
mimetypes = ['text/css+mako']
def __init__(self, **options):
super().__init__(CssLexer, MakoLexer, **options)
# Genshi and Cheetah lexers courtesy of Matt Good.
class CheetahPythonLexer(Lexer):
"""
Lexer for handling Cheetah's special $ tokens in Python syntax.
"""
def get_tokens_unprocessed(self, text):
pylexer = PythonLexer(**self.options)
for pos, type_, value in pylexer.get_tokens_unprocessed(text):
if type_ == Token.Error and value == '$':
type_ = Comment.Preproc
yield pos, type_, value
class CheetahLexer(RegexLexer):
"""
Generic cheetah templates lexer. Code that isn't Cheetah
markup is yielded as `Token.Other`. This also works for
`spitfire templates`_ which use the same syntax.
.. _spitfire templates: http://code.google.com/p/spitfire/
"""
name = 'Cheetah'
url = 'http://www.cheetahtemplate.org/'
aliases = ['cheetah', 'spitfire']
filenames = ['*.tmpl', '*.spt']
mimetypes = ['application/x-cheetah', 'application/x-spitfire']
tokens = {
'root': [
(r'(##[^\n]*)$',
(bygroups(Comment))),
(r'#[*](.|\n)*?[*]#', Comment),
(r'#end[^#\n]*(?:#|$)', Comment.Preproc),
(r'#slurp$', Comment.Preproc),
(r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
(bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc))),
# TODO support other Python syntax like $foo['bar']
(r'(\$)([a-zA-Z_][\w.]*\w)',
bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(?s)(\$\{!?)(.*?)(\})',
bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?=\#[#a-zA-Z]*) | # an eval comment
(?=\$[a-zA-Z_{]) | # a substitution
\Z # end of string
)
''', Other),
(r'\s+', Text),
],
}
class CheetahHtmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexed data
with the `HtmlLexer`.
"""
name = 'HTML+Cheetah'
aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
super().__init__(HtmlLexer, CheetahLexer, **options)
class CheetahXmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexed data
with the `XmlLexer`.
"""
name = 'XML+Cheetah'
aliases = ['xml+cheetah', 'xml+spitfire']
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
def __init__(self, **options):
super().__init__(XmlLexer, CheetahLexer, **options)
class CheetahJavascriptLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexed data
with the `JavascriptLexer`.
"""
name = 'JavaScript+Cheetah'
aliases = ['javascript+cheetah', 'js+cheetah',
'javascript+spitfire', 'js+spitfire']
mimetypes = ['application/x-javascript+cheetah',
'text/x-javascript+cheetah',
'text/javascript+cheetah',
'application/x-javascript+spitfire',
'text/x-javascript+spitfire',
'text/javascript+spitfire']
def __init__(self, **options):
super().__init__(JavascriptLexer, CheetahLexer, **options)
class GenshiTextLexer(RegexLexer):
"""
A lexer that highlights genshi text templates.
"""
name = 'Genshi Text'
url = 'http://genshi.edgewall.org/'
aliases = ['genshitext']
mimetypes = ['application/x-genshi-text', 'text/x-genshi']
tokens = {
'root': [
(r'[^#$\s]+', Other),
(r'^(\s*)(##.*)$', bygroups(Text, Comment)),
(r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
include('variable'),
(r'[#$\s]', Other),
],
'directive': [
(r'\n', Text, '#pop'),
(r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
(r'(choose|when|with)([^\S\n]+)(.*)',
bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
(r'(choose|otherwise)\b', Keyword, '#pop'),
(r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
Name.Variable),
]
}
class GenshiMarkupLexer(RegexLexer):
"""
Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
`GenshiLexer`.
"""
flags = re.DOTALL
tokens = {
'root': [
(r'[^<$]+', Other),
(r'(<\?python)(.*?)(\?>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
# yield style and script blocks as Other
(r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
(r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
include('variable'),
(r'[<$]', Other),
],
'pytag': [
(r'\s+', Text),
(r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'pyattr': [
('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
(r'[^\s>]+', String, '#pop'),
],
'tag': [
(r'\s+', Text),
(r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
(r'[\w:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('"', String, 'attr-dstring'),
("'", String, 'attr-sstring'),
(r'[^\s>]*', String, '#pop')
],
'attr-dstring': [
('"', String, '#pop'),
include('strings'),
("'", String)
],
'attr-sstring': [
("'", String, '#pop'),
include('strings'),
("'", String)
],
'strings': [
('[^"\'$]+', String),
include('variable')
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
Name.Variable),
]
}
class HtmlGenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid HTML templates.
"""
name = 'HTML+Genshi'
aliases = ['html+genshi', 'html+kid']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+genshi']
def __init__(self, **options):
super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + HtmlLexer.analyse_text(text) - 0.01
class GenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid XML templates.
"""
name = 'Genshi'
aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
filenames = ['*.kid']
alias_filenames = ['*.xml']
mimetypes = ['application/x-genshi', 'application/x-kid']
def __init__(self, **options):
super().__init__(XmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + XmlLexer.analyse_text(text) - 0.01
class JavascriptGenshiLexer(DelegatingLexer):
"""
A lexer that highlights javascript code in genshi text templates.
"""
name = 'JavaScript+Genshi Text'
aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
'javascript+genshi']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+genshi',
'text/x-javascript+genshi',
'text/javascript+genshi']
def __init__(self, **options):
super().__init__(JavascriptLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class CssGenshiLexer(DelegatingLexer):
"""
A lexer that highlights CSS definitions in genshi text templates.
"""
name = 'CSS+Genshi Text'
aliases = ['css+genshitext', 'css+genshi']
alias_filenames = ['*.css']
mimetypes = ['text/css+genshi']
def __init__(self, **options):
super().__init__(CssLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class RhtmlLexer(DelegatingLexer):
"""
Subclass of the ERB lexer that highlights the unlexed data with the
html lexer.
Nested Javascript and CSS is highlighted too.
"""
name = 'RHTML'
aliases = ['rhtml', 'html+erb', 'html+ruby']
filenames = ['*.rhtml']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+ruby']
def __init__(self, **options):
super().__init__(HtmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
# one more than the XmlErbLexer returns
rv += 0.5
return rv
class XmlErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights data outside preprocessor
directives with the `XmlLexer`.
"""
name = 'XML+Ruby'
aliases = ['xml+ruby', 'xml+erb']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
def __init__(self, **options):
super().__init__(XmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
"""
name = 'CSS+Ruby'
aliases = ['css+ruby', 'css+erb']
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
def __init__(self, **options):
super().__init__(CssLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class JavascriptErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Ruby'
aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
'text/javascript+ruby']
def __init__(self, **options):
super().__init__(JavascriptLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class HtmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+PHP'
aliases = ['html+php']
filenames = ['*.phtml']
alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
'*.php[345]']
mimetypes = ['application/x-php',
'application/x-httpd-php', 'application/x-httpd-php3',
'application/x-httpd-php4', 'application/x-httpd-php5']
def __init__(self, **options):
super().__init__(HtmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
"""
name = 'XML+PHP'
aliases = ['xml+php']
alias_filenames = ['*.xml', '*.php', '*.php[345]']
mimetypes = ['application/xml+php']
def __init__(self, **options):
super().__init__(XmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
"""
name = 'CSS+PHP'
aliases = ['css+php']
alias_filenames = ['*.css']
mimetypes = ['text/css+php']
def __init__(self, **options):
super().__init__(CssLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text) - 0.05
class JavascriptPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the
`JavascriptLexer`.
"""
name = 'JavaScript+PHP'
aliases = ['javascript+php', 'js+php']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
'text/javascript+php']
def __init__(self, **options):
super().__init__(JavascriptLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text)
class HtmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Smarty'
aliases = ['html+smarty']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
mimetypes = ['text/html+smarty']
def __init__(self, **options):
super().__init__(HtmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Smarty'
aliases = ['xml+smarty']
alias_filenames = ['*.xml', '*.tpl']
mimetypes = ['application/xml+smarty']
def __init__(self, **options):
super().__init__(XmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Smarty'
aliases = ['css+smarty']
alias_filenames = ['*.css', '*.tpl']
mimetypes = ['text/css+smarty']
def __init__(self, **options):
super().__init__(CssLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class JavascriptSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Smarty'
aliases = ['javascript+smarty', 'js+smarty']
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
'text/javascript+smarty']
def __init__(self, **options):
super().__init__(JavascriptLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class HtmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Django/Jinja'
aliases = ['html+django', 'html+jinja', 'htmldjango']
filenames = ['*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
def __init__(self, **options):
super().__init__(HtmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Django/Jinja'
aliases = ['xml+django', 'xml+jinja']
filenames = ['*.xml.j2', '*.xml.jinja2']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+django', 'application/xml+jinja']
def __init__(self, **options):
super().__init__(XmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Django/Jinja'
aliases = ['css+django', 'css+jinja']
filenames = ['*.css.j2', '*.css.jinja2']
alias_filenames = ['*.css']
mimetypes = ['text/css+django', 'text/css+jinja']
def __init__(self, **options):
super().__init__(CssLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JavascriptDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Django/Jinja'
aliases = ['javascript+django', 'js+django',
'javascript+jinja', 'js+jinja']
filenames = ['*.js.j2', '*.js.jinja2']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
'text/x-javascript+django',
'text/x-javascript+jinja',
'text/javascript+django',
'text/javascript+jinja']
def __init__(self, **options):
super().__init__(JavascriptLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JspRootLexer(RegexLexer):
"""
Base for the `JspLexer`. Yields `Token.Other` for area outside of
JSP tags.
.. versionadded:: 0.7
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
# FIXME: I want to make these keywords but still parse attributes.
(r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
Keyword),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
],
}
class JspLexer(DelegatingLexer):
"""
Lexer for Java Server Pages.
.. versionadded:: 0.7
"""
name = 'Java Server Page'
aliases = ['jsp']
filenames = ['*.jsp']
mimetypes = ['application/x-jsp']
def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = JavaLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class EvoqueLexer(RegexLexer):
"""
For files using the Evoque templating system.
.. versionadded:: 1.1
"""
name = 'Evoque'
aliases = ['evoque']
filenames = ['*.evoque']
mimetypes = ['application/x-evoque']
flags = re.DOTALL
tokens = {
'root': [
(r'[^#$]+', Other),
(r'#\[', Comment.Multiline, 'comment'),
(r'\$\$', Other),
# svn keywords
(r'\$\w+:[^$\n]*\$', Comment.Multiline),
# directives: begin, end
(r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, Punctuation)),
# directives: evoque, overlay
# see doc for handling first name arg: /directives/evoque/
# + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
r'(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, using(PythonLexer), Punctuation)),
# directives: if, for, prefer, test
(r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
using(PythonLexer), Punctuation)),
# directive clauses (no {} expression)
(r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
# expressions
(r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
bygroups(Punctuation, None, using(PythonLexer),
Name.Builtin, None, None, Punctuation)),
(r'#', Other),
],
'comment': [
(r'[^\]#]', Comment.Multiline),
(r'#\[', Comment.Multiline, '#push'),
(r'\]#', Comment.Multiline, '#pop'),
(r'[\]#]', Comment.Multiline)
],
}
def analyse_text(text):
"""Evoque templates use $evoque, which is unique."""
if '$evoque' in text:
return 1
class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`HtmlLexer`.
.. versionadded:: 1.1
"""
name = 'HTML+Evoque'
aliases = ['html+evoque']
filenames = ['*.html']
mimetypes = ['text/html+evoque']
def __init__(self, **options):
super().__init__(HtmlLexer, EvoqueLexer, **options)
def analyse_text(text):
return EvoqueLexer.analyse_text(text)
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`XmlLexer`.
.. versionadded:: 1.1
"""
name = 'XML+Evoque'
aliases = ['xml+evoque']
filenames = ['*.xml']
mimetypes = ['application/xml+evoque']
def __init__(self, **options):
super().__init__(XmlLexer, EvoqueLexer, **options)
def analyse_text(text):
return EvoqueLexer.analyse_text(text)
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
"""
name = 'cfstatement'
aliases = ['cfs']
filenames = []
mimetypes = []
flags = re.IGNORECASE
tokens = {
'root': [
(r'//.*?\n', Comment.Single),
(r'/\*(?:.|\n)*?\*/', Comment.Multiline),
(r'\+\+|--', Operator),
(r'[-+*/^&=!]', Operator),
(r'<=|>=|<|>|==', Operator),
(r'mod\b', Operator),
(r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
(r'\|\||&&', Operator),
(r'\?', Operator),
(r'"', String.Double, 'string'),
# There is a special rule for allowing html in single quoted
# strings, evidently.
(r"'.*?'", String.Single),
(r'\d+', Number),
(r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
(r'(true|false|null)\b', Keyword.Constant),
(r'(application|session|client|cookie|super|this|variables|arguments)\b',
Name.Constant),
(r'([a-z_$][\w.]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[a-z_$][\w.]*', Name.Variable),
(r'[()\[\]{};:,.\\]', Punctuation),
(r'\s+', Text),
],
'string': [
(r'""', String.Double),
(r'#.+?#', String.Interp),
(r'[^"#]+', String.Double),
(r'#', String.Double),
(r'"', String.Double, '#pop'),
],
}
class ColdfusionMarkupLexer(RegexLexer):
"""
Coldfusion markup only
"""
name = 'Coldfusion'
aliases = ['cf']
filenames = []
mimetypes = []
tokens = {
'root': [
(r'[^<]+', Other),
include('tags'),
(r'<[^<>]*', Other),
],
'tags': [
(r'<!---', Comment.Multiline, 'cfcomment'),
(r'(?s)<!--.*?-->', Comment),
(r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
(r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
# negative lookbehind is for strings with embedded >
(r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
r'mailpart|mail|header|content|zip|image|lock|argument|try|'
r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
],
'cfoutput': [
(r'[^#<]+', Other),
(r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
Punctuation)),
# (r'<cfoutput.*?>', Name.Builtin, '#push'),
(r'</cfoutput.*?>', Name.Builtin, '#pop'),
include('tags'),
(r'(?s)<[^<>]*', Other),
(r'#', Other),
],
'cfcomment': [
(r'<!---', Comment.Multiline, '#push'),
(r'--->', Comment.Multiline, '#pop'),
(r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
],
}
class ColdfusionHtmlLexer(DelegatingLexer):
"""
Coldfusion markup in html
"""
name = 'Coldfusion HTML'
aliases = ['cfm']
filenames = ['*.cfm', '*.cfml']
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
class ColdfusionCFCLexer(DelegatingLexer):
"""
Coldfusion markup/script components
.. versionadded:: 2.0
"""
name = 'Coldfusion CFC'
aliases = ['cfc']
filenames = ['*.cfc']
mimetypes = []
def __init__(self, **options):
super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
class SspLexer(DelegatingLexer):
"""
Lexer for Scalate Server Pages.
.. versionadded:: 1.4
"""
name = 'Scalate Server Page'
aliases = ['ssp']
filenames = ['*.ssp']
mimetypes = ['application/x-ssp']
def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = 0.0
if re.search(r'val \w+\s*:', text):
rv += 0.6
if looks_like_xml(text):
rv += 0.2
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class TeaTemplateRootLexer(RegexLexer):
"""
Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
code blocks.
.. versionadded:: 1.5
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
],
}
class TeaTemplateLexer(DelegatingLexer):
"""
Lexer for `Tea Templates <http://teatrove.org/>`_.
.. versionadded:: 1.5
"""
name = 'Tea'
aliases = ['tea']
filenames = ['*.tea']
mimetypes = ['text/x-tea']
def __init__(self, **options):
super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
def analyse_text(text):
rv = TeaLangLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class LassoHtmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`HtmlLexer`.
Nested JavaScript and CSS is also highlighted.
.. versionadded:: 1.6
"""
name = 'HTML+Lasso'
aliases = ['html+lasso']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['text/html+lasso',
'application/x-httpd-lasso',
'application/x-httpd-lasso[89]']
def __init__(self, **options):
super().__init__(HtmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if html_doctype_matches(text): # same as HTML lexer
rv += 0.5
return rv
class LassoXmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`XmlLexer`.
.. versionadded:: 1.6
"""
name = 'XML+Lasso'
aliases = ['xml+lasso']
alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['application/xml+lasso']
def __init__(self, **options):
super().__init__(XmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class LassoCssLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`CssLexer`.
.. versionadded:: 1.6
"""
name = 'CSS+Lasso'
aliases = ['css+lasso']
alias_filenames = ['*.css']
mimetypes = ['text/css+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super().__init__(CssLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
if re.search(r'\w+:[^;]+;', text):
rv += 0.1
if 'padding:' in text:
rv += 0.1
return rv
class LassoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`JavascriptLexer`.
.. versionadded:: 1.6
"""
name = 'JavaScript+Lasso'
aliases = ['javascript+lasso', 'js+lasso']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
'text/javascript+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super().__init__(JavascriptLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
return rv
class HandlebarsLexer(RegexLexer):
"""
Generic handlebars template lexer.
Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
Everything else is left for a delegating lexer.
.. versionadded:: 2.0
"""
name = "Handlebars"
url = 'https://handlebarsjs.com/'
aliases = ['handlebars']
tokens = {
'root': [
(r'[^{]+', Other),
# Comment start {{! }} or {{!--
(r'\{\{!.*\}\}', Comment),
# HTML Escaping open {{{expression
(r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
# {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
(r'(\{\{)([#~/]+)([^\s}]*)',
bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
],
'tag': [
(r'\s+', Text),
# HTML Escaping close }}}
(r'\}\}\}', Comment.Special, '#pop'),
# blockClose}}, includes optional tilde ~
(r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
# {{opt=something}}
(r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
(r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
(r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
'dynamic-partial'),
include('generic'),
],
'dynamic-partial': [
(r'\s+', Text),
(r'\)', Punctuation, '#pop'),
(r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
Name.Variable, Text)),
(r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
using(this, state='variable'))),
(r'[\w-]+', Name.Function),
include('generic'),
],
'variable': [
(r'[()/@a-zA-Z][\w-]*', Name.Variable),
(r'\.[\w-]+', Name.Variable),
(r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
],
'generic': [
include('variable'),
# borrowed from DjangoLexer
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
}
class HandlebarsHtmlLexer(DelegatingLexer):
"""
Subclass of the `HandlebarsLexer` that highlights unlexed data with the
`HtmlLexer`.
.. versionadded:: 2.0
"""
name = "HTML+Handlebars"
aliases = ["html+handlebars"]
filenames = ['*.handlebars', '*.hbs']
mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
def __init__(self, **options):
super().__init__(HtmlLexer, HandlebarsLexer, **options)
class YamlJinjaLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`YamlLexer`.
Commonly used in Saltstack salt states.
.. versionadded:: 2.0
"""
name = 'YAML+Jinja'
aliases = ['yaml+jinja', 'salt', 'sls']
filenames = ['*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2']
mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
def __init__(self, **options):
super().__init__(YamlLexer, DjangoLexer, **options)
class LiquidLexer(RegexLexer):
"""
Lexer for Liquid templates.
.. versionadded:: 2.0
"""
name = 'liquid'
url = 'https://www.rubydoc.info/github/Shopify/liquid'
aliases = ['liquid']
filenames = ['*.liquid']
tokens = {
'root': [
(r'[^{]+', Text),
# tags and block tags
(r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
# output tags
(r'(\{\{)(\s*)([^\s}]+)',
bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
'output'),
(r'\{', Text)
],
'tag-or-block': [
# builtin logic blocks
(r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
(r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
combined('end-of-block', 'whitespace', 'generic')),
(r'(else)(\s*)(%\})',
bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
# other builtin blocks
(r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
Whitespace, Punctuation), '#pop'),
(r'(comment)(\s*)(%\})',
bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
(r'(raw)(\s*)(%\})',
bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
# end of block
(r'(end(case|unless|if))(\s*)(%\})',
bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
(r'(end([^\s%]+))(\s*)(%\})',
bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
# builtin tags (assign and include are handled together with usual tags)
(r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
bygroups(Name.Tag, Whitespace,
using(this, state='generic'), Punctuation, Whitespace),
'variable-tag-markup'),
# other tags or blocks
(r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
],
'output': [
include('whitespace'),
(r'\}\}', Punctuation, '#pop'), # end of output
(r'\|', Punctuation, 'filters')
],
'filters': [
include('whitespace'),
(r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
(r'([^\s|:]+)(:?)(\s*)',
bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
],
'filter-markup': [
(r'\|', Punctuation, '#pop'),
include('end-of-tag'),
include('default-param-markup')
],
'condition': [
include('end-of-block'),
include('whitespace'),
(r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
bygroups(using(this, state = 'generic'), Whitespace, Operator,
Whitespace, using(this, state = 'generic'), Whitespace,
Punctuation)),
(r'\b!', Operator),
(r'\bnot\b', Operator.Word),
(r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
Whitespace, using(this, state = 'generic'))),
include('generic'),
include('whitespace')
],
'generic-value': [
include('generic'),
include('end-at-whitespace')
],
'operator': [
(r'(\s*)((=|!|>|<)=?)(\s*)',
bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
(r'(\s*)(\bcontains\b)(\s*)',
bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
],
'end-of-tag': [
(r'\}\}', Punctuation, '#pop')
],
'end-of-block': [
(r'%\}', Punctuation, ('#pop', '#pop'))
],
'end-at-whitespace': [
(r'\s+', Whitespace, '#pop')
],
# states for unknown markup
'param-markup': [
include('whitespace'),
# params with colons or equals
(r'([^\s=:]+)(\s*)(=|:)',
bygroups(Name.Attribute, Whitespace, Operator)),
# explicit variables
(r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
Whitespace, Punctuation)),
include('string'),
include('number'),
include('keyword'),
(r',', Punctuation)
],
'default-param-markup': [
include('param-markup'),
(r'.', Text) # fallback for switches / variables / un-quoted strings / ...
],
'variable-param-markup': [
include('param-markup'),
include('variable'),
(r'.', Text) # fallback
],
'tag-markup': [
(r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
include('default-param-markup')
],
'variable-tag-markup': [
(r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
include('variable-param-markup')
],
# states for different values types
'keyword': [
(r'\b(false|true)\b', Keyword.Constant)
],
'variable': [
(r'[a-zA-Z_]\w*', Name.Variable),
(r'(?<=\w)\.(?=\w)', Punctuation)
],
'string': [
(r"'[^']*'", String.Single),
(r'"[^"]*"', String.Double)
],
'number': [
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer)
],
'generic': [ # decides for variable, string, keyword or number
include('keyword'),
include('string'),
include('number'),
include('variable')
],
'whitespace': [
(r'[ \t]+', Whitespace)
],
# states for builtin blocks
'comment': [
(r'(\{%)(\s*)(endcomment)(\s*)(%\})',
bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
Punctuation), ('#pop', '#pop')),
(r'.', Comment)
],
'raw': [
(r'[^{]+', Text),
(r'(\{%)(\s*)(endraw)(\s*)(%\})',
bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
Punctuation), '#pop'),
(r'\{', Text)
],
}
class TwigLexer(RegexLexer):
"""
Twig template lexer.
It just highlights Twig code between the preprocessor directives,
other data is left untouched by the lexer.
.. versionadded:: 2.0
"""
name = 'Twig'
aliases = ['twig']
mimetypes = ['application/x-twig']
flags = re.M | re.S
# Note that a backslash is included in the following two patterns
# PHP uses a backslash as a namespace separator
_ident_char = r'[\\\w-]|[^\x00-\x7f]'
_ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
_ident_end = r'(?:' + _ident_char + ')*'
_ident_inner = _ident_begin + _ident_end
tokens = {
'root': [
(r'[^{]+', Other),
(r'\{\{', Comment.Preproc, 'var'),
# twig comments
(r'\{\#.*?\#\}', Comment),
# raw twig blocks
(r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Other, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
(r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Other, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
(r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'tag'),
(r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
bygroups(Comment.Preproc, Text, Keyword), 'tag'),
(r'\{', Other),
],
'varnames': [
(r'(\|)(\s*)(%s)' % _ident_inner,
bygroups(Operator, Text, Name.Function)),
(r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
(r'(in|not|and|b-and|or|b-or|b-xor|is'
r'if|elseif|else|import'
r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
r'matches|starts\s+with|ends\s+with)\b',
Keyword),
(r'(loop|block|parent)\b', Name.Builtin),
(_ident_inner, Name.Variable),
(r'\.' + _ident_inner, Name.Variable),
(r'\.[0-9]+', Number),
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
],
'var': [
(r'\s+', Text),
(r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames')
],
'tag': [
(r'\s+', Text),
(r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames'),
(r'.', Punctuation),
],
}
class TwigHtmlLexer(DelegatingLexer):
"""
Subclass of the `TwigLexer` that highlights unlexed data with the
`HtmlLexer`.
.. versionadded:: 2.0
"""
name = "HTML+Twig"
aliases = ["html+twig"]
filenames = ['*.twig']
mimetypes = ['text/html+twig']
def __init__(self, **options):
super().__init__(HtmlLexer, TwigLexer, **options)
class Angular2Lexer(RegexLexer):
"""
Generic angular2 template lexer.
Highlights only the Angular template tags (stuff between `{{` and `}}` and
special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
Everything else is left for a delegating lexer.
.. versionadded:: 2.1
"""
name = "Angular2"
url = 'https://angular.io/guide/template-syntax'
aliases = ['ng2']
tokens = {
'root': [
(r'[^{([*#]+', Other),
# {{meal.name}}
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
# (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
(r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
'attr'),
(r'([([]+)([\w:.-]+)([\])]+)(\s*)',
bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
# *ngIf="..."; #f="ngForm"
(r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
(r'([*#])([\w:.-]+)(\s*)',
bygroups(Punctuation, Name.Attribute, Text)),
],
'ngExpression': [
(r'\s+(\|\s+)?', Text),
(r'\}\}', Comment.Preproc, '#pop'),
# Literals
(r':?(true|false)', String.Boolean),
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
# Variabletext
(r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
(r'\.[\w-]+(\(.*\))?', Name.Variable),
# inline If
(r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class Angular2HtmlLexer(DelegatingLexer):
"""
Subclass of the `Angular2Lexer` that highlights unlexed data with the
`HtmlLexer`.
.. versionadded:: 2.0
"""
name = "HTML + Angular2"
aliases = ["html+ng2"]
filenames = ['*.ng2']
def __init__(self, **options):
super().__init__(HtmlLexer, Angular2Lexer, **options)
class SqlJinjaLexer(DelegatingLexer):
"""
Templated SQL lexer.
.. versionadded:: 2.13
"""
name = 'SQL+Jinja'
aliases = ['sql+jinja']
filenames = ['*.sql', '*.sql.j2', '*.sql.jinja2']
def __init__(self, **options):
super().__init__(SqlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = 0.0
# dbt's ref function
if re.search(r'\{\{\s*ref\(.*\)\s*\}\}', text):
rv += 0.4
# dbt's source function
if re.search(r'\{\{\s*source\(.*\)\s*\}\}', text):
rv += 0.25
# Jinja macro
if re.search(
r'\{%-?\s*macro \w+\(.*\)\s*-?%\}\s+.*\s+\{%-?\s*endmacro\s*-?%\}',
text,
re.S,
):
rv += 0.15
return rv
| 72,695 | Python | 30.59322 | 106 | 0.487255 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/fift.py | """
pygments.lexers.fift
~~~~~~~~~~~~~~~~~~~~
Lexers for fift.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include
from pygments.token import Literal, Comment, Name, String, Number, Whitespace
__all__ = ['FiftLexer']
class FiftLexer(RegexLexer):
"""
For Fift source code.
"""
name = 'Fift'
aliases = ['fift', 'fif']
filenames = ['*.fif']
url = 'https://ton-blockchain.github.io/docs/fiftbase.pdf'
tokens = {
'root': [
(r'\s+', Whitespace),
include('comments'),
(r'[\.+]?\"', String, 'string'),
# numbers
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'0b[01]+', Number.Bin),
(r'-?[0-9]+("/"-?[0-9]+)?', Number.Decimal),
# slices
(r'b\{[01]+\}', Literal),
(r'x\{[0-9a-fA-F_]+\}', Literal),
# byte literal
(r'B\{[0-9a-fA-F_]+\}', Literal),
# treat anything as word
(r'\S+', Name)
],
'string': [
(r'\\.', String.Escape),
(r'\"', String, '#pop'),
(r'[^\"\r\n\\]+', String)
],
'comments': [
(r'//.*', Comment.Singleline),
(r'/\*', Comment.Multiline, 'comment'),
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
}
| 1,621 | Python | 22.852941 | 77 | 0.436767 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_qlik_builtins.py | """
pygments.lexers._qlik_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Qlik builtins.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# operators
# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/Operators/operators.htm
OPERATORS_LIST = {
"words": [
# Bit operators
"bitnot",
"bitand",
"bitor",
"bitxor",
# Logical operators
"and",
"or",
"not",
"xor",
# Relational operators
"precedes",
"follows",
# String operators
"like",
],
"symbols": [
# Bit operators
">>",
"<<",
# Logical operators
# Numeric operators
"+",
"-",
"/",
"*",
# Relational operators
"<",
"<=",
">",
">=",
"=",
"<>",
# String operators
"&",
],
}
# SCRIPT STATEMENTS
# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/
STATEMENT_LIST = [
# control statements
"for",
"each",
"in",
"next",
"do",
"while",
"until",
"unless",
"loop",
"return",
"switch",
"case",
"default",
"if",
"else",
"endif",
"then",
"end",
"exit",
"script",
"switch",
# prefixes
"Add",
"Buffer",
"Concatenate",
"Crosstable",
"First",
"Generic",
"Hierarchy",
"HierarchyBelongsTo",
"Inner",
"IntervalMatch",
"Join",
"Keep",
"Left",
"Mapping",
"Merge",
"NoConcatenate",
"Outer",
"Partial reload",
"Replace",
"Right",
"Sample",
"Semantic",
"Unless",
"When",
# regular statements
"Alias", # alias ... as ...
"as",
"AutoNumber",
"Binary",
"Comment field", # comment fields ... using ...
"Comment fields", # comment field ... with ...
"using",
"with",
"Comment table", # comment table ... with ...
"Comment tables", # comment tables ... using ...
"Connect",
"ODBC", # ODBC CONNECT TO ...
"OLEBD", # OLEDB CONNECT TO ...
"CUSTOM", # CUSTOM CONNECT TO ...
"LIB", # LIB CONNECT TO ...
"Declare",
"Derive",
"From",
"explicit",
"implicit",
"Direct Query",
"dimension",
"measure",
"Directory",
"Disconnect",
"Drop field",
"Drop fields",
"Drop table",
"Drop tables",
"Execute",
"FlushLog",
"Force",
"capitalization",
"case upper",
"case lower",
"case mixed",
"Load",
"distinct",
"from",
"inline",
"resident",
"from_field",
"autogenerate",
"extension",
"where",
"group by",
"order by",
"asc",
"desc",
"Let",
"Loosen Table",
"Map",
"NullAsNull",
"NullAsValue",
"Qualify",
"Rem",
"Rename field",
"Rename fields",
"Rename table",
"Rename tables",
"Search",
"include",
"exclude",
"Section",
"access",
"application",
"Select",
"Set",
"Sleep",
"SQL",
"SQLColumns",
"SQLTables",
"SQLTypes",
"Star",
"Store",
"Tag",
"Trace",
"Unmap",
"Unqualify",
"Untag",
# Qualifiers
"total",
]
# Script functions
# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/functions-in-scripts-chart-expressions.htm
SCRIPT_FUNCTIONS = [
# Basic aggregation functions in the data load script
"FirstSortedValue",
"Max",
"Min",
"Mode",
"Only",
"Sum",
# Counter aggregation functions in the data load script
"Count",
"MissingCount",
"NullCount",
"NumericCount",
"TextCount",
# Financial aggregation functions in the data load script
"IRR",
"XIRR",
"NPV",
"XNPV",
# Statistical aggregation functions in the data load script
"Avg",
"Correl",
"Fractile",
"FractileExc",
"Kurtosis",
"LINEST_B" "LINEST_df",
"LINEST_f",
"LINEST_m",
"LINEST_r2",
"LINEST_seb",
"LINEST_sem",
"LINEST_sey",
"LINEST_ssreg",
"Linest_ssresid",
"Median",
"Skew",
"Stdev",
"Sterr",
"STEYX",
# Statistical test functions
"Chi2Test_chi2",
"Chi2Test_df",
"Chi2Test_p",
# Two independent samples t-tests
"ttest_conf",
"ttest_df",
"ttest_dif",
"ttest_lower",
"ttest_sig",
"ttest_sterr",
"ttest_t",
"ttest_upper",
# Two independent weighted samples t-tests
"ttestw_conf",
"ttestw_df",
"ttestw_dif",
"ttestw_lower",
"ttestw_sig",
"ttestw_sterr",
"ttestw_t",
"ttestw_upper",
# One sample t-tests
"ttest1_conf",
"ttest1_df",
"ttest1_dif",
"ttest1_lower",
"ttest1_sig",
"ttest1_sterr",
"ttest1_t",
"ttest1_upper",
# One weighted sample t-tests
"ttest1w_conf",
"ttest1w_df",
"ttest1w_dif",
"ttest1w_lower",
"ttest1w_sig",
"ttest1w_sterr",
"ttest1w_t",
"ttest1w_upper",
# One column format functions
"ztest_conf",
"ztest_dif",
"ztest_sig",
"ztest_sterr",
"ztest_z",
"ztest_lower",
"ztest_upper",
# Weighted two-column format functions
"ztestw_conf",
"ztestw_dif",
"ztestw_lower",
"ztestw_sig",
"ztestw_sterr",
"ztestw_upper",
"ztestw_z",
# String aggregation functions in the data load script
"Concat",
"FirstValue",
"LastValue",
"MaxString",
"MinString",
# Synthetic dimension functions
"ValueList",
"ValueLoop",
# Color functions
"ARGB",
"HSL",
"RGB",
"Color",
"Colormix1",
"Colormix2",
"SysColor",
"ColorMapHue",
"ColorMapJet",
"black",
"blue",
"brown",
"cyan",
"darkgray",
"green",
"lightblue",
"lightcyan",
"lightgray",
"lightgreen",
"lightmagenta",
"lightred",
"magenta",
"red",
"white",
"yellow",
# Conditional functions
"alt",
"class",
"coalesce",
"if",
"match",
"mixmatch",
"pick",
"wildmatch",
# Counter functions
"autonumber",
"autonumberhash128",
"autonumberhash256",
"IterNo",
"RecNo",
"RowNo",
# Integer expressions of time
"second",
"minute",
"hour",
"day",
"week",
"month",
"year",
"weekyear",
"weekday",
# Timestamp functions
"now",
"today",
"LocalTime",
# Make functions
"makedate",
"makeweekdate",
"maketime",
# Other date functions
"AddMonths",
"AddYears",
"yeartodate",
# Timezone functions
"timezone",
"GMT",
"UTC",
"daylightsaving",
"converttolocaltime",
# Set time functions
"setdateyear",
"setdateyearmonth",
# In... functions
"inyear",
"inyeartodate",
"inquarter",
"inquartertodate",
"inmonth",
"inmonthtodate",
"inmonths",
"inmonthstodate",
"inweek",
"inweektodate",
"inlunarweek",
"inlunarweektodate",
"inday",
"indaytotime",
# Start ... end functions
"yearstart",
"yearend",
"yearname",
"quarterstart",
"quarterend",
"quartername",
"monthstart",
"monthend",
"monthname",
"monthsstart",
"monthsend",
"monthsname",
"weekstart",
"weekend",
"weekname",
"lunarweekstart",
"lunarweekend",
"lunarweekname",
"daystart",
"dayend",
"dayname",
# Day numbering functions
"age",
"networkdays",
"firstworkdate",
"lastworkdate",
"daynumberofyear",
"daynumberofquarter",
# Exponential and logarithmic
"exp",
"log",
"log10",
"pow",
"sqr",
"sqrt",
# Count functions
"GetAlternativeCount",
"GetExcludedCount",
"GetNotSelectedCount",
"GetPossibleCount",
"GetSelectedCount",
# Field and selection functions
"GetCurrentSelections",
"GetFieldSelections",
"GetObjectDimension",
"GetObjectField",
"GetObjectMeasure",
# File functions
"Attribute",
"ConnectString",
"FileBaseName",
"FileDir",
"FileExtension",
"FileName",
"FilePath",
"FileSize",
"FileTime",
"GetFolderPath",
"QvdCreateTime",
"QvdFieldName",
"QvdNoOfFields",
"QvdNoOfRecords",
"QvdTableName",
# Financial functions
"FV",
"nPer",
"Pmt",
"PV",
"Rate",
# Formatting functions
"ApplyCodepage",
"Date",
"Dual",
"Interval",
"Money",
"Num",
"Time",
"Timestamp",
# General numeric functions
"bitcount",
"div",
"fabs",
"fact",
"frac",
"sign",
# Combination and permutation functions
"combin",
"permut",
# Modulo functions
"fmod",
"mod",
# Parity functions
"even",
"odd",
# Rounding functions
"ceil",
"floor",
"round",
# Geospatial functions
"GeoAggrGeometry",
"GeoBoundingBox",
"GeoCountVertex",
"GeoInvProjectGeometry",
"GeoProjectGeometry",
"GeoReduceGeometry",
"GeoGetBoundingBox",
"GeoGetPolygonCenter",
"GeoMakePoint",
"GeoProject",
# Interpretation functions
"Date#",
"Interval#",
"Money#",
"Num#",
"Text",
"Time#",
"Timestamp#",
# Field functions
"FieldIndex",
"FieldValue",
"FieldValueCount",
# Inter-record functions in the data load script
"Exists",
"LookUp",
"Peek",
"Previous",
# Logical functions
"IsNum",
"IsText",
# Mapping functions
"ApplyMap",
"MapSubstring",
# Mathematical functions
"e",
"false",
"pi",
"rand",
"true",
# NULL functions
"EmptyIsNull",
"IsNull",
"Null",
# Basic range functions
"RangeMax",
"RangeMaxString",
"RangeMin",
"RangeMinString",
"RangeMode",
"RangeOnly",
"RangeSum",
# Counter range functions
"RangeCount",
"RangeMissingCount",
"RangeNullCount",
"RangeNumericCount",
"RangeTextCount",
# Statistical range functions
"RangeAvg",
"RangeCorrel",
"RangeFractile",
"RangeKurtosis",
"RangeSkew",
"RangeStdev",
# Financial range functions
"RangeIRR",
"RangeNPV",
"RangeXIRR",
"RangeXNPV",
# Statistical distribution
"CHIDIST",
"CHIINV",
"NORMDIST",
"NORMINV",
"TDIST",
"TINV",
"FDIST",
"FINV",
# String functions
"Capitalize",
"Chr",
"Evaluate",
"FindOneOf",
"Hash128",
"Hash160",
"Hash256",
"Index",
"KeepChar",
"Left",
"Len",
"LevenshteinDist",
"Lower",
"LTrim",
"Mid",
"Ord",
"PurgeChar",
"Repeat",
"Replace",
"Right",
"RTrim",
"SubField",
"SubStringCount",
"TextBetween",
"Trim",
"Upper",
# System functions
"Author",
"ClientPlatform",
"ComputerName",
"DocumentName",
"DocumentPath",
"DocumentTitle",
"EngineVersion",
"GetCollationLocale",
"GetObjectField",
"GetRegistryString",
"IsPartialReload",
"OSUser",
"ProductVersion",
"ReloadTime",
"StateName",
# Table functions
"FieldName",
"FieldNumber",
"NoOfFields",
"NoOfRows",
"NoOfTables",
"TableName",
"TableNumber",
]
# System variables and constants
# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/work-with-variables-in-data-load-editor.htm
CONSTANT_LIST = [
# System Variables
"floppy",
"cd",
"include",
"must_include",
"hideprefix",
"hidesuffix",
"qvpath",
"qvroot",
"QvWorkPath",
"QvWorkRoot",
"StripComments",
"Verbatim",
"OpenUrlTimeout",
"WinPath",
"WinRoot",
"CollationLocale",
"CreateSearchIndexOnReload",
# value handling variables
"NullDisplay",
"NullInterpret",
"NullValue",
"OtherSymbol",
# Currency formatting
"MoneyDecimalSep",
"MoneyFormat",
"MoneyThousandSep",
# Number formatting
"DecimalSep",
"ThousandSep",
"NumericalAbbreviation",
# Time formatting
"DateFormat",
"TimeFormat",
"TimestampFormat",
"MonthNames",
"LongMonthNames",
"DayNames",
"LongDayNames",
"FirstWeekDay",
"BrokenWeeks",
"ReferenceDay",
"FirstMonthOfYear",
# Error variables
"errormode",
"scripterror",
"scripterrorcount",
"scripterrorlist",
# Other
"null",
]
| 12,595 | Python | 17.884558 | 139 | 0.541882 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/special.py | """
pygments.lexers.special
~~~~~~~~~~~~~~~~~~~~~~~
Special lexers.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import ast
from pygments.lexer import Lexer, line_re
from pygments.token import Token, Error, Text, Generic
from pygments.util import get_choice_opt
__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
class TextLexer(Lexer):
"""
"Null" lexer, doesn't highlight anything.
"""
name = 'Text only'
aliases = ['text']
filenames = ['*.txt']
mimetypes = ['text/plain']
priority = 0.01
def get_tokens_unprocessed(self, text):
yield 0, Text, text
def analyse_text(text):
return TextLexer.priority
class OutputLexer(Lexer):
"""
Simple lexer that highlights everything as ``Token.Generic.Output``.
.. versionadded:: 2.10
"""
name = 'Text output'
aliases = ['output']
def get_tokens_unprocessed(self, text):
yield 0, Generic.Output, text
_ttype_cache = {}
class RawTokenLexer(Lexer):
"""
Recreate a token stream formatted with the `RawTokenFormatter`.
Additional options accepted:
`compress`
If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
the given compression algorithm before lexing (default: ``""``).
"""
name = 'Raw token data'
aliases = []
filenames = []
mimetypes = ['application/x-pygments-tokens']
def __init__(self, **options):
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
Lexer.__init__(self, **options)
def get_tokens(self, text):
if self.compress:
if isinstance(text, str):
text = text.encode('latin1')
try:
if self.compress == 'gz':
import gzip
text = gzip.decompress(text)
elif self.compress == 'bz2':
import bz2
text = bz2.decompress(text)
except OSError:
yield Error, text.decode('latin1')
if isinstance(text, bytes):
text = text.decode('latin1')
# do not call Lexer.get_tokens() because stripping is not optional.
text = text.strip('\n') + '\n'
for i, t, v in self.get_tokens_unprocessed(text):
yield t, v
def get_tokens_unprocessed(self, text):
length = 0
for match in line_re.finditer(text):
try:
ttypestr, val = match.group().rstrip().split('\t', 1)
ttype = _ttype_cache.get(ttypestr)
if not ttype:
ttype = Token
ttypes = ttypestr.split('.')[1:]
for ttype_ in ttypes:
if not ttype_ or not ttype_[0].isupper():
raise ValueError('malformed token name')
ttype = getattr(ttype, ttype_)
_ttype_cache[ttypestr] = ttype
val = ast.literal_eval(val)
if not isinstance(val, str):
raise ValueError('expected str')
except (SyntaxError, ValueError):
val = match.group()
ttype = Error
yield length, ttype, val
length += len(val)
| 3,414 | Python | 28.188034 | 75 | 0.53017 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/asm.py | """
pygments.lexers.asm
~~~~~~~~~~~~~~~~~~~
Lexers for assembly languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, words, \
DelegatingLexer, default
from pygments.lexers.c_cpp import CppLexer, CLexer
from pygments.lexers.d import DLexer
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
Other, Keyword, Operator, Whitespace
__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer',
'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer',
'Ca65Lexer', 'Dasm16Lexer']
class GasLexer(RegexLexer):
"""
For Gas (AT&T) assembly code.
"""
name = 'GAS'
aliases = ['gas', 'asm']
filenames = ['*.s', '*.S']
mimetypes = ['text/x-gas']
#: optional Comment or Whitespace
string = r'"(\\"|[^"])*"'
char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
register = '%' + identifier + r'\b'
tokens = {
'root': [
include('whitespace'),
(identifier + ':', Name.Label),
(r'\.' + identifier, Name.Attribute, 'directive-args'),
(r'lock|rep(n?z)?|data\d+', Name.Attribute),
(identifier, Name.Function, 'instruction-args'),
(r'[\r\n]+', Text)
],
'directive-args': [
(identifier, Name.Constant),
(string, String),
('@' + identifier, Name.Attribute),
(number, Number.Integer),
(register, Name.Variable),
(r'[\r\n]+', Whitespace, '#pop'),
(r'([;#]|//).*?\n', Comment.Single, '#pop'),
(r'/[*].*?[*]/', Comment.Multiline),
(r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
include('punctuation'),
include('whitespace')
],
'instruction-args': [
# For objdump-disassembled code, shouldn't occur in
# actual assembler input
('([a-z0-9]+)( )(<)('+identifier+')(>)',
bygroups(Number.Hex, Text, Punctuation, Name.Constant,
Punctuation)),
('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
bygroups(Number.Hex, Text, Punctuation, Name.Constant,
Punctuation, Number.Integer, Punctuation)),
# Address constants
(identifier, Name.Constant),
(number, Number.Integer),
# Registers
(register, Name.Variable),
# Numeric constants
('$'+number, Number.Integer),
(r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Whitespace, '#pop'),
(r'([;#]|//).*?\n', Comment.Single, '#pop'),
(r'/[*].*?[*]/', Comment.Multiline),
(r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
include('punctuation'),
include('whitespace')
],
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'([;#]|//).*?\n', Comment.Single),
(r'/[*][\w\W]*?[*]/', Comment.Multiline)
],
'punctuation': [
(r'[-*,.()\[\]!:{}]+', Punctuation)
]
}
def analyse_text(text):
if re.search(r'^\.(text|data|section)', text, re.M):
return True
elif re.search(r'^\.\w+', text, re.M):
return 0.1
def _objdump_lexer_tokens(asm_lexer):
"""
Common objdump lexer tokens to wrap an ASM lexer.
"""
hex_re = r'[0-9A-Za-z]'
return {
'root': [
# File name & format:
('(.*?)(:)( +file format )(.*?)$',
bygroups(Name.Label, Punctuation, Text, String)),
# Section header
('(Disassembly of section )(.*?)(:)$',
bygroups(Text, Name.Label, Punctuation)),
# Function labels
# (With offset)
('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
Punctuation, Number.Hex, Punctuation)),
# (Without offset)
('('+hex_re+'+)( )(<)(.*?)(>:)$',
bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
Punctuation)),
# Code line with disassembled instructions
('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace,
using(asm_lexer))),
# Code line without raw instructions (objdump --no-show-raw-insn)
('( *)('+hex_re+r'+:)( *\t)([a-zA-Z].*?)$',
bygroups(Whitespace, Name.Label, Whitespace,
using(asm_lexer))),
# Code line with ascii
('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace, String)),
# Continued code line, only raw opcodes without disassembled
# instruction
('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex)),
# Skipped a few bytes
(r'\t\.\.\.$', Text),
# Relocation line
# (With offset)
(r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
Name.Constant, Punctuation, Number.Hex)),
# (Without offset)
(r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
Name.Constant)),
(r'[^\n]+\n', Other)
]
}
class ObjdumpLexer(RegexLexer):
"""
For the output of ``objdump -dr``.
"""
name = 'objdump'
aliases = ['objdump']
filenames = ['*.objdump']
mimetypes = ['text/x-objdump']
tokens = _objdump_lexer_tokens(GasLexer)
class DObjdumpLexer(DelegatingLexer):
"""
For the output of ``objdump -Sr`` on compiled D files.
"""
name = 'd-objdump'
aliases = ['d-objdump']
filenames = ['*.d-objdump']
mimetypes = ['text/x-d-objdump']
def __init__(self, **options):
super().__init__(DLexer, ObjdumpLexer, **options)
class CppObjdumpLexer(DelegatingLexer):
"""
For the output of ``objdump -Sr`` on compiled C++ files.
"""
name = 'cpp-objdump'
aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
mimetypes = ['text/x-cpp-objdump']
def __init__(self, **options):
super().__init__(CppLexer, ObjdumpLexer, **options)
class CObjdumpLexer(DelegatingLexer):
"""
For the output of ``objdump -Sr`` on compiled C files.
"""
name = 'c-objdump'
aliases = ['c-objdump']
filenames = ['*.c-objdump']
mimetypes = ['text/x-c-objdump']
def __init__(self, **options):
super().__init__(CLexer, ObjdumpLexer, **options)
class HsailLexer(RegexLexer):
"""
For HSAIL assembly code.
.. versionadded:: 2.2
"""
name = 'HSAIL'
aliases = ['hsail', 'hsa']
filenames = ['*.hsail']
mimetypes = ['text/x-hsail']
string = r'"[^"]*?"'
identifier = r'[a-zA-Z_][\w.]*'
# Registers
register_number = r'[0-9]+'
register = r'(\$(c|s|d|q)' + register_number + r')\b'
# Qualifiers
alignQual = r'(align\(\d+\))'
widthQual = r'(width\((\d+|all)\))'
allocQual = r'(alloc\(agent\))'
# Instruction Modifiers
roundingMod = (r'((_ftz)?(_up|_down|_zero|_near))')
datatypeMod = (r'_('
# packedTypes
r'u8x4|s8x4|u16x2|s16x2|u8x8|s8x8|u16x4|s16x4|u32x2|s32x2|'
r'u8x16|s8x16|u16x8|s16x8|u32x4|s32x4|u64x2|s64x2|'
r'f16x2|f16x4|f16x8|f32x2|f32x4|f64x2|'
# baseTypes
r'u8|s8|u16|s16|u32|s32|u64|s64|'
r'b128|b8|b16|b32|b64|b1|'
r'f16|f32|f64|'
# opaqueType
r'roimg|woimg|rwimg|samp|sig32|sig64)')
# Numeric Constant
float = r'((\d+\.)|(\d*\.\d+))[eE][+-]?\d+'
hexfloat = r'0[xX](([0-9a-fA-F]+\.[0-9a-fA-F]*)|([0-9a-fA-F]*\.[0-9a-fA-F]+))[pP][+-]?\d+'
ieeefloat = r'0((h|H)[0-9a-fA-F]{4}|(f|F)[0-9a-fA-F]{8}|(d|D)[0-9a-fA-F]{16})'
tokens = {
'root': [
include('whitespace'),
include('comments'),
(string, String),
(r'@' + identifier + ':?', Name.Label),
(register, Name.Variable.Anonymous),
include('keyword'),
(r'&' + identifier, Name.Variable.Global),
(r'%' + identifier, Name.Variable),
(hexfloat, Number.Hex),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(ieeefloat, Number.Float),
(float, Number.Float),
(r'\d+', Number.Integer),
(r'[=<>{}\[\]()*.,:;!]|x\b', Punctuation)
],
'whitespace': [
(r'(\n|\s)+', Whitespace),
],
'comments': [
(r'/\*.*?\*/', Comment.Multiline),
(r'//.*?\n', Comment.Single),
],
'keyword': [
# Types
(r'kernarg' + datatypeMod, Keyword.Type),
# Regular keywords
(r'\$(full|base|small|large|default|zero|near)', Keyword),
(words((
'module', 'extension', 'pragma', 'prog', 'indirect', 'signature',
'decl', 'kernel', 'function', 'enablebreakexceptions',
'enabledetectexceptions', 'maxdynamicgroupsize', 'maxflatgridsize',
'maxflatworkgroupsize', 'requireddim', 'requiredgridsize',
'requiredworkgroupsize', 'requirenopartialworkgroups'),
suffix=r'\b'), Keyword),
# instructions
(roundingMod, Keyword),
(datatypeMod, Keyword),
(r'_(' + alignQual + '|' + widthQual + ')', Keyword),
(r'_kernarg', Keyword),
(r'(nop|imagefence)\b', Keyword),
(words((
'cleardetectexcept', 'clock', 'cuid', 'debugtrap', 'dim',
'getdetectexcept', 'groupbaseptr', 'kernargbaseptr', 'laneid',
'maxcuid', 'maxwaveid', 'packetid', 'setdetectexcept', 'waveid',
'workitemflatabsid', 'workitemflatid', 'nullptr', 'abs', 'bitrev',
'currentworkgroupsize', 'currentworkitemflatid', 'fract', 'ncos',
'neg', 'nexp2', 'nlog2', 'nrcp', 'nrsqrt', 'nsin', 'nsqrt',
'gridgroups', 'gridsize', 'not', 'sqrt', 'workgroupid',
'workgroupsize', 'workitemabsid', 'workitemid', 'ceil', 'floor',
'rint', 'trunc', 'add', 'bitmask', 'borrow', 'carry', 'copysign',
'div', 'rem', 'sub', 'shl', 'shr', 'and', 'or', 'xor', 'unpackhi',
'unpacklo', 'max', 'min', 'fma', 'mad', 'bitextract', 'bitselect',
'shuffle', 'cmov', 'bitalign', 'bytealign', 'lerp', 'nfma', 'mul',
'mulhi', 'mul24hi', 'mul24', 'mad24', 'mad24hi', 'bitinsert',
'combine', 'expand', 'lda', 'mov', 'pack', 'unpack', 'packcvt',
'unpackcvt', 'sad', 'sementp', 'ftos', 'stof', 'cmp', 'ld', 'st',
'_eq', '_ne', '_lt', '_le', '_gt', '_ge', '_equ', '_neu', '_ltu',
'_leu', '_gtu', '_geu', '_num', '_nan', '_seq', '_sne', '_slt',
'_sle', '_sgt', '_sge', '_snum', '_snan', '_sequ', '_sneu', '_sltu',
'_sleu', '_sgtu', '_sgeu', 'atomic', '_ld', '_st', '_cas', '_add',
'_and', '_exch', '_max', '_min', '_or', '_sub', '_wrapdec',
'_wrapinc', '_xor', 'ret', 'cvt', '_readonly', '_kernarg', '_global',
'br', 'cbr', 'sbr', '_scacq', '_screl', '_scar', '_rlx', '_wave',
'_wg', '_agent', '_system', 'ldimage', 'stimage', '_v2', '_v3', '_v4',
'_1d', '_2d', '_3d', '_1da', '_2da', '_1db', '_2ddepth', '_2dadepth',
'_width', '_height', '_depth', '_array', '_channelorder',
'_channeltype', 'querysampler', '_coord', '_filter', '_addressing',
'barrier', 'wavebarrier', 'initfbar', 'joinfbar', 'waitfbar',
'arrivefbar', 'leavefbar', 'releasefbar', 'ldf', 'activelaneid',
'activelanecount', 'activelanemask', 'activelanepermute', 'call',
'scall', 'icall', 'alloca', 'packetcompletionsig',
'addqueuewriteindex', 'casqueuewriteindex', 'ldqueuereadindex',
'stqueuereadindex', 'readonly', 'global', 'private', 'group',
'spill', 'arg', '_upi', '_downi', '_zeroi', '_neari', '_upi_sat',
'_downi_sat', '_zeroi_sat', '_neari_sat', '_supi', '_sdowni',
'_szeroi', '_sneari', '_supi_sat', '_sdowni_sat', '_szeroi_sat',
'_sneari_sat', '_pp', '_ps', '_sp', '_ss', '_s', '_p', '_pp_sat',
'_ps_sat', '_sp_sat', '_ss_sat', '_s_sat', '_p_sat')), Keyword),
# Integer types
(r'i[1-9]\d*', Keyword)
]
}
class LlvmLexer(RegexLexer):
"""
For LLVM assembly code.
"""
name = 'LLVM'
url = 'https://llvm.org/docs/LangRef.html'
aliases = ['llvm']
filenames = ['*.ll']
mimetypes = ['text/x-llvm']
#: optional Comment or Whitespace
string = r'"[^"]*?"'
identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
block_label = r'(' + identifier + r'|(\d+))'
tokens = {
'root': [
include('whitespace'),
# Before keywords, because keywords are valid label names :(...
(block_label + r'\s*:', Name.Label),
include('keyword'),
(r'%' + identifier, Name.Variable),
(r'@' + identifier, Name.Variable.Global),
(r'%\d+', Name.Variable.Anonymous),
(r'@\d+', Name.Variable.Global),
(r'#\d+', Name.Variable.Global),
(r'!' + identifier, Name.Variable),
(r'!\d+', Name.Variable.Anonymous),
(r'c?' + string, String),
(r'0[xX][a-fA-F0-9]+', Number),
(r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
(r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
],
'whitespace': [
(r'(\n|\s+)+', Whitespace),
(r';.*?\n', Comment)
],
'keyword': [
# Regular keywords
(words((
'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca',
'allocsize', 'allOnes', 'alwaysinline', 'alwaysInline',
'amdgpu_cs', 'amdgpu_es', 'amdgpu_gfx', 'amdgpu_gs',
'amdgpu_hs', 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps',
'amdgpu_vs', 'and', 'any', 'anyregcc', 'appending', 'arcp',
'argmemonly', 'args', 'arm_aapcs_vfpcc', 'arm_aapcscc',
'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw',
'attributes', 'available_externally', 'avr_intrcc',
'avr_signalcc', 'bit', 'bitcast', 'bitMask', 'blockaddress',
'blockcount', 'br', 'branchFunnel', 'builtin', 'byArg',
'byref', 'byte', 'byteArray', 'byval', 'c', 'call', 'callbr',
'callee', 'caller', 'calls', 'canAutoHide', 'catch',
'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc',
'cfguard_checkcc', 'cleanup', 'cleanuppad', 'cleanupret',
'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
'contract', 'convergent', 'critical', 'cxx_fast_tlscc',
'datalayout', 'declare', 'default', 'define', 'deplibs',
'dereferenceable', 'dereferenceable_or_null', 'distinct',
'dllexport', 'dllimport', 'dso_local', 'dso_local_equivalent',
'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch',
'extern_weak', 'external', 'externally_initialized',
'extractelement', 'extractvalue', 'fadd', 'false', 'fast',
'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
'fneg', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze',
'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc',
'getelementptr', 'ghccc', 'global', 'guid', 'gv', 'hash',
'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
'ifunc', 'inaccessiblemem_or_argmemonly',
'inaccessiblememonly', 'inalloca', 'inbounds', 'indir',
'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits',
'inlinehint', 'inrange', 'inreg', 'insertelement',
'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect',
'internal', 'inttoptr', 'invoke', 'jumptable', 'kind',
'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr',
'live', 'load', 'local_unnamed_addr', 'localdynamic',
'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize',
'module', 'monotonic', 'msp430_intrcc', 'mul', 'mustprogress',
'musttail', 'naked', 'name', 'nand', 'ne', 'nest', 'ninf',
'nnan', 'noalias', 'nobuiltin', 'nocallback', 'nocapture',
'nocf_check', 'noduplicate', 'noduplicates', 'nofree',
'noimplicitfloat', 'noinline', 'noInline', 'nomerge', 'none',
'nonlazybind', 'nonnull', 'noprofile', 'norecurse',
'noRecurse', 'noredzone', 'noreturn', 'nosync', 'notail',
'notEligibleToImport', 'noundef', 'nounwind', 'nsw',
'nsz', 'null', 'null_pointer_is_valid', 'nuw', 'oeq', 'offset',
'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
'optnone', 'optsize', 'or', 'ord', 'param', 'params',
'partition', 'path', 'personality', 'phi', 'poison',
'preallocated', 'prefix', 'preserve_allcc', 'preserve_mostcc',
'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device',
'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly',
'reassoc', 'refs', 'relbf', 'release', 'resByArg', 'resume',
'ret', 'returnDoesNotAlias', 'returned', 'returns_twice',
'safestack', 'samesize', 'sanitize_address',
'sanitize_hwaddress', 'sanitize_memory', 'sanitize_memtag',
'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst',
'sext', 'sge', 'sgt', 'shadowcallstack', 'shl',
'shufflevector', 'sideeffect', 'signext', 'single',
'singleImpl', 'singleImplName', 'sitofp', 'sizeM1',
'sizeM1BitWidth', 'sle', 'slt', 'source_filename',
'speculatable', 'speculative_load_hardening', 'spir_func',
'spir_kernel', 'srem', 'sret', 'ssp', 'sspreq', 'sspstrong',
'store', 'strictfp', 'sub', 'summaries', 'summary', 'swiftcc',
'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
'tailcc', 'target', 'thread_local', 'to', 'token', 'triple',
'true', 'trunc', 'type', 'typeCheckedLoadConstVCalls',
'typeCheckedLoadVCalls', 'typeid', 'typeidCompatibleVTable',
'typeIdInfo', 'typeTestAssumeConstVCalls',
'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', 'udiv',
'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin',
'undef', 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown',
'unnamed_addr', 'uno', 'unordered', 'unreachable', 'unsat',
'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable',
'va_arg', 'varFlags', 'variable', 'vcall_visibility',
'vFuncId', 'virtFunc', 'virtualConstProp', 'void', 'volatile',
'vscale', 'vTableFuncs', 'weak', 'weak_odr', 'webkit_jscc',
'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly',
'x', 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc',
'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc',
'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
'zeroinitializer', 'zext', 'immarg', 'willreturn'),
suffix=r'\b'), Keyword),
# Types
(words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
'x86_amx', 'token', 'ptr')),
Keyword.Type),
# Integer types
(r'i[1-9]\d*', Keyword.Type)
]
}
class LlvmMirBodyLexer(RegexLexer):
"""
For LLVM MIR examples without the YAML wrapper.
.. versionadded:: 2.6
"""
name = 'LLVM-MIR Body'
url = 'https://llvm.org/docs/MIRLangRef.html'
aliases = ['llvm-mir-body']
filenames = []
mimetypes = []
tokens = {
'root': [
# Attributes on basic blocks
(words(('liveins', 'successors'), suffix=':'), Keyword),
# Basic Block Labels
(r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label),
(r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label),
(r'%bb\.[0-9]+(\.\w+)?', Name.Label),
# Stack references
(r'%stack\.[0-9]+(\.\w+\.addr)?', Name),
# Subreg indices
(r'%subreg\.\w+', Name),
# Virtual registers
(r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'),
# Reference to LLVM-IR global
include('global'),
# Reference to Intrinsic
(r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global),
# Comparison predicates
(words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult',
'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin),
(words(('oeq', 'one', 'ogt', 'oge', 'olt', 'ole', 'ugt', 'uge',
'ult', 'ule'), prefix=r'floatpred\(', suffix=r'\)'),
Name.Builtin),
# Physical registers
(r'\$\w+', String.Single),
# Assignment operator
(r'=', Operator),
# gMIR Opcodes
(r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|'
r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|'
r'G_CONSTANT|G_FCONSTANT|G_VASTART|G_VAARG|G_CTLZ|G_CTLZ_ZERO_UNDEF|'
r'G_CTTZ|G_CTTZ_ZERO_UNDEF|G_CTPOP|G_BSWAP|G_BITREVERSE|'
r'G_ADDRSPACE_CAST|G_BLOCK_ADDR|G_JUMP_TABLE|G_DYN_STACKALLOC|'
r'G_ADD|G_SUB|G_MUL|G_[SU]DIV|G_[SU]REM|G_AND|G_OR|G_XOR|G_SHL|'
r'G_[LA]SHR|G_[IF]CMP|G_SELECT|G_GEP|G_PTR_MASK|G_SMIN|G_SMAX|'
r'G_UMIN|G_UMAX|G_[US]ADDO|G_[US]ADDE|G_[US]SUBO|G_[US]SUBE|'
r'G_[US]MULO|G_[US]MULH|G_FNEG|G_FPEXT|G_FPTRUNC|G_FPTO[US]I|'
r'G_[US]ITOFP|G_FABS|G_FCOPYSIGN|G_FCANONICALIZE|G_FMINNUM|'
r'G_FMAXNUM|G_FMINNUM_IEEE|G_FMAXNUM_IEEE|G_FMINIMUM|G_FMAXIMUM|'
r'G_FADD|G_FSUB|G_FMUL|G_FMA|G_FMAD|G_FDIV|G_FREM|G_FPOW|G_FEXP|'
r'G_FEXP2|G_FLOG|G_FLOG2|G_FLOG10|G_FCEIL|G_FCOS|G_FSIN|G_FSQRT|'
r'G_FFLOOR|G_FRINT|G_FNEARBYINT|G_INTRINSIC_TRUNC|'
r'G_INTRINSIC_ROUND|G_LOAD|G_[ZS]EXTLOAD|G_INDEXED_LOAD|'
r'G_INDEXED_[ZS]EXTLOAD|G_STORE|G_INDEXED_STORE|'
r'G_ATOMIC_CMPXCHG_WITH_SUCCESS|G_ATOMIC_CMPXCHG|'
r'G_ATOMICRMW_(XCHG|ADD|SUB|AND|NAND|OR|XOR|MAX|MIN|UMAX|UMIN|FADD|'
r'FSUB)'
r'|G_FENCE|G_EXTRACT|G_UNMERGE_VALUES|G_INSERT|G_MERGE_VALUES|'
r'G_BUILD_VECTOR|G_BUILD_VECTOR_TRUNC|G_CONCAT_VECTORS|'
r'G_INTRINSIC|G_INTRINSIC_W_SIDE_EFFECTS|G_BR|G_BRCOND|'
r'G_BRINDIRECT|G_BRJT|G_INSERT_VECTOR_ELT|G_EXTRACT_VECTOR_ELT|'
r'G_SHUFFLE_VECTOR)\b',
Name.Builtin),
# Target independent opcodes
(r'(COPY|PHI|INSERT_SUBREG|EXTRACT_SUBREG|REG_SEQUENCE)\b',
Name.Builtin),
# Flags
(words(('killed', 'implicit')), Keyword),
# ConstantInt values
(r'(i[0-9]+)( +)', bygroups(Keyword.Type, Whitespace), 'constantint'),
# ConstantFloat values
(r'(half|float|double) +', Keyword.Type, 'constantfloat'),
# Bare immediates
include('integer'),
# MMO's
(r'(::)( *)', bygroups(Operator, Whitespace), 'mmo'),
# MIR Comments
(r';.*', Comment),
# If we get here, assume it's a target instruction
(r'[a-zA-Z0-9_]+', Name),
# Everything else that isn't highlighted
(r'[(), \n]+', Text),
],
# The integer constant from a ConstantInt value
'constantint': [
include('integer'),
(r'(?=.)', Text, '#pop'),
],
# The floating point constant from a ConstantFloat value
'constantfloat': [
include('float'),
(r'(?=.)', Text, '#pop'),
],
'vreg': [
# The bank or class if there is one
(r'( *)(:(?!:))', bygroups(Whitespace, Keyword), ('#pop', 'vreg_bank_or_class')),
# The LLT if there is one
(r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
(r'(?=.)', Text, '#pop'),
],
'vreg_bank_or_class': [
# The unassigned bank/class
(r'( *)(_)', bygroups(Whitespace, Name.Variable.Magic)),
(r'( *)([a-zA-Z0-9_]+)', bygroups(Whitespace, Name.Variable)),
# The LLT if there is one
(r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
(r'(?=.)', Text, '#pop'),
],
'vreg_type': [
# Scalar and pointer types
(r'( *)([sp][0-9]+)', bygroups(Whitespace, Keyword.Type)),
(r'( *)(<[0-9]+ *x *[sp][0-9]+>)', bygroups(Whitespace, Keyword.Type)),
(r'\)', Text, '#pop'),
(r'(?=.)', Text, '#pop'),
],
'mmo': [
(r'\(', Text),
(r' +', Whitespace),
(words(('load', 'store', 'on', 'into', 'from', 'align', 'monotonic',
'acquire', 'release', 'acq_rel', 'seq_cst')),
Keyword),
# IR references
(r'%ir\.[a-zA-Z0-9_.-]+', Name),
(r'%ir-block\.[a-zA-Z0-9_.-]+', Name),
(r'[-+]', Operator),
include('integer'),
include('global'),
(r',', Punctuation),
(r'\), \(', Text),
(r'\)', Text, '#pop'),
],
'integer': [(r'-?[0-9]+', Number.Integer),],
'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)],
'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)],
}
class LlvmMirLexer(RegexLexer):
"""
Lexer for the overall LLVM MIR document format.
MIR is a human readable serialization format that's used to represent LLVM's
machine specific intermediate representation. It allows LLVM's developers to
see the state of the compilation process at various points, as well as test
individual pieces of the compiler.
.. versionadded:: 2.6
"""
name = 'LLVM-MIR'
url = 'https://llvm.org/docs/MIRLangRef.html'
aliases = ['llvm-mir']
filenames = ['*.mir']
tokens = {
'root': [
# Comments are hashes at the YAML level
(r'#.*', Comment),
# Documents starting with | are LLVM-IR
(r'--- \|$', Keyword, 'llvm_ir'),
# Other documents are MIR
(r'---', Keyword, 'llvm_mir'),
# Consume everything else in one token for efficiency
(r'[^-#]+|.', Text),
],
'llvm_ir': [
# Documents end with '...' or '---'
(r'(\.\.\.|(?=---))', Keyword, '#pop'),
# Delegate to the LlvmLexer
(r'((?:.|\n)+?)(?=(\.\.\.|---))', bygroups(using(LlvmLexer))),
],
'llvm_mir': [
# Comments are hashes at the YAML level
(r'#.*', Comment),
# Documents end with '...' or '---'
(r'(\.\.\.|(?=---))', Keyword, '#pop'),
# Handle the simple attributes
(r'name:', Keyword, 'name'),
(words(('alignment', ),
suffix=':'), Keyword, 'number'),
(words(('legalized', 'regBankSelected', 'tracksRegLiveness',
'selected', 'exposesReturnsTwice'),
suffix=':'), Keyword, 'boolean'),
# Handle the attributes don't highlight inside
(words(('registers', 'stack', 'fixedStack', 'liveins', 'frameInfo',
'machineFunctionInfo'),
suffix=':'), Keyword),
# Delegate the body block to the LlvmMirBodyLexer
(r'body: *\|', Keyword, 'llvm_mir_body'),
# Consume everything else
(r'.+', Text),
(r'\n', Whitespace),
],
'name': [
(r'[^\n]+', Name),
default('#pop'),
],
'boolean': [
(r' *(true|false)', Name.Builtin),
default('#pop'),
],
'number': [
(r' *[0-9]+', Number),
default('#pop'),
],
'llvm_mir_body': [
# Documents end with '...' or '---'.
# We have to pop llvm_mir_body and llvm_mir
(r'(\.\.\.|(?=---))', Keyword, '#pop:2'),
# Delegate the body block to the LlvmMirBodyLexer
(r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))),
# The '...' is optional. If we didn't already find it then it isn't
# there. There might be a '---' instead though.
(r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))),
],
}
class NasmLexer(RegexLexer):
"""
For Nasm (Intel) assembly code.
"""
name = 'NASM'
aliases = ['nasm']
filenames = ['*.asm', '*.ASM', '*.nasm']
mimetypes = ['text/x-nasm']
# Tasm uses the same file endings, but TASM is not as common as NASM, so
# we prioritize NASM higher by default
priority = 1.0
identifier = r'[a-z$._?][\w$.?#@~]*'
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
octn = r'[0-7]+q'
binn = r'[01]+b'
decn = r'[0-9]+'
floatn = decn + r'\.e?' + decn
string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
declkw = r'(?:res|d)[bwdqt]|times'
register = (r'(r[0-9][0-5]?[bwd]?|'
r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]|k[0-7]|'
r'[xyz]mm(?:[12][0-9]?|3[01]?|[04-9]))\b')
wordop = r'seg|wrt|strict|rel|abs'
type = r'byte|[dq]?word'
# Directives must be followed by whitespace, otherwise CPU will match
# cpuid for instance.
directives = (r'(?:BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
r'EXPORT|LIBRARY|MODULE)(?=\s)')
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'^\s*%', Comment.Preproc, 'preproc'),
include('whitespace'),
(identifier + ':', Name.Label),
(r'(%s)(\s+)(equ)' % identifier,
bygroups(Name.Constant, Whitespace, Keyword.Declaration),
'instruction-args'),
(directives, Keyword, 'instruction-args'),
(declkw, Keyword.Declaration, 'instruction-args'),
(identifier, Name.Function, 'instruction-args'),
(r'[\r\n]+', Whitespace)
],
'instruction-args': [
(string, String),
(hexn, Number.Hex),
(octn, Number.Oct),
(binn, Number.Bin),
(floatn, Number.Float),
(decn, Number.Integer),
include('punctuation'),
(register, Name.Builtin),
(identifier, Name.Variable),
(r'[\r\n]+', Whitespace, '#pop'),
include('whitespace')
],
'preproc': [
(r'[^;\n]+', Comment.Preproc),
(r';.*?\n', Comment.Single, '#pop'),
(r'\n', Comment.Preproc, '#pop'),
],
'whitespace': [
(r'\n', Whitespace),
(r'[ \t]+', Whitespace),
(r';.*', Comment.Single),
(r'#.*', Comment.Single)
],
'punctuation': [
(r'[,{}():\[\]]+', Punctuation),
(r'[&|^<>+*/%~-]+', Operator),
(r'[$]+', Keyword.Constant),
(wordop, Operator.Word),
(type, Keyword.Type)
],
}
def analyse_text(text):
# Probably TASM
if re.match(r'PROC', text, re.IGNORECASE):
return False
class NasmObjdumpLexer(ObjdumpLexer):
"""
For the output of ``objdump -d -M intel``.
.. versionadded:: 2.0
"""
name = 'objdump-nasm'
aliases = ['objdump-nasm']
filenames = ['*.objdump-intel']
mimetypes = ['text/x-nasm-objdump']
tokens = _objdump_lexer_tokens(NasmLexer)
class TasmLexer(RegexLexer):
"""
For Tasm (Turbo Assembler) assembly code.
"""
name = 'TASM'
aliases = ['tasm']
filenames = ['*.asm', '*.ASM', '*.tasm']
mimetypes = ['text/x-tasm']
identifier = r'[@a-z$._?][\w$.?#@~]*'
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
octn = r'[0-7]+q'
binn = r'[01]+b'
decn = r'[0-9]+'
floatn = decn + r'\.e?' + decn
string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
declkw = r'(?:res|d)[bwdqt]|times'
register = (r'(r[0-9][0-5]?[bwd]|'
r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7])\b')
wordop = r'seg|wrt|strict'
type = r'byte|[dq]?word'
directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
r'ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|'
r'EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|'
r'P386|MODEL|ASSUME|CODESEG|SIZE')
# T[A-Z][a-z] is more of a convention. Lexer should filter out STRUC definitions
# and then 'add' them to datatype somehow.
datatype = (r'db|dd|dw|T[A-Z][a-z]+')
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'^\s*%', Comment.Preproc, 'preproc'),
include('whitespace'),
(identifier + ':', Name.Label),
(directives, Keyword, 'instruction-args'),
(r'(%s)(\s+)(%s)' % (identifier, datatype),
bygroups(Name.Constant, Whitespace, Keyword.Declaration),
'instruction-args'),
(declkw, Keyword.Declaration, 'instruction-args'),
(identifier, Name.Function, 'instruction-args'),
(r'[\r\n]+', Whitespace)
],
'instruction-args': [
(string, String),
(hexn, Number.Hex),
(octn, Number.Oct),
(binn, Number.Bin),
(floatn, Number.Float),
(decn, Number.Integer),
include('punctuation'),
(register, Name.Builtin),
(identifier, Name.Variable),
# Do not match newline when it's preceded by a backslash
(r'(\\)(\s*)(;.*)([\r\n])',
bygroups(Text, Whitespace, Comment.Single, Whitespace)),
(r'[\r\n]+', Whitespace, '#pop'),
include('whitespace')
],
'preproc': [
(r'[^;\n]+', Comment.Preproc),
(r';.*?\n', Comment.Single, '#pop'),
(r'\n', Comment.Preproc, '#pop'),
],
'whitespace': [
(r'[\n\r]', Whitespace),
(r'(\\)([\n\r])', bygroups(Text, Whitespace)),
(r'[ \t]+', Whitespace),
(r';.*', Comment.Single)
],
'punctuation': [
(r'[,():\[\]]+', Punctuation),
(r'[&|^<>+*=/%~-]+', Operator),
(r'[$]+', Keyword.Constant),
(wordop, Operator.Word),
(type, Keyword.Type)
],
}
def analyse_text(text):
# See above
if re.match(r'PROC', text, re.I):
return True
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
.. versionadded:: 1.6
"""
name = 'ca65 assembler'
aliases = ['ca65']
filenames = ['*.s']
flags = re.IGNORECASE
tokens = {
'root': [
(r';.*', Comment.Single),
(r'\s+', Whitespace),
(r'[a-z_.@$][\w.@$]*:', Name.Label),
(r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
r'|bit)\b', Keyword),
(r'\.\w+', Keyword.Pseudo),
(r'[-+~*/^&|!<>=]', Operator),
(r'"[^"\n]*.', String),
(r"'[^'\n]*.", String.Char),
(r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
(r'\d+', Number.Integer),
(r'%[01]+', Number.Bin),
(r'[#,.:()=\[\]]', Punctuation),
(r'[a-z_.@$][\w.@$]*', Name),
]
}
def analyse_text(self, text):
# comments in GAS start with "#"
if re.search(r'^\s*;', text, re.MULTILINE):
return 0.9
class Dasm16Lexer(RegexLexer):
"""
For DCPU-16 Assembly.
.. versionadded:: 2.4
"""
name = 'DASM16'
url = 'http://0x10c.com/doc/dcpu-16.txt'
aliases = ['dasm16']
filenames = ['*.dasm16', '*.dasm']
mimetypes = ['text/x-dasm16']
INSTRUCTIONS = [
'SET',
'ADD', 'SUB',
'MUL', 'MLI',
'DIV', 'DVI',
'MOD', 'MDI',
'AND', 'BOR', 'XOR',
'SHR', 'ASR', 'SHL',
'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU',
'ADX', 'SBX',
'STI', 'STD',
'JSR',
'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI',
]
REGISTERS = [
'A', 'B', 'C',
'X', 'Y', 'Z',
'I', 'J',
'SP', 'PC', 'EX',
'POP', 'PEEK', 'PUSH'
]
# Regexes yo
char = r'[a-zA-Z0-9_$@.]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
binary_number = r'0b[01_]+'
instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')'
single_char = r"'\\?" + char + "'"
string = r'"(\\"|[^"])*"'
def guess_identifier(lexer, match):
ident = match.group(0)
klass = Name.Variable if ident.upper() in lexer.REGISTERS else Name.Label
yield match.start(), klass, ident
tokens = {
'root': [
include('whitespace'),
(':' + identifier, Name.Label),
(identifier + ':', Name.Label),
(instruction, Name.Function, 'instruction-args'),
(r'\.' + identifier, Name.Function, 'data-args'),
(r'[\r\n]+', Whitespace)
],
'numeric' : [
(binary_number, Number.Integer),
(number, Number.Integer),
(single_char, String),
],
'arg' : [
(identifier, guess_identifier),
include('numeric')
],
'deref' : [
(r'\+', Punctuation),
(r'\]', Punctuation, '#pop'),
include('arg'),
include('whitespace')
],
'instruction-line' : [
(r'[\r\n]+', Whitespace, '#pop'),
(r';.*?$', Comment, '#pop'),
include('whitespace')
],
'instruction-args': [
(r',', Punctuation),
(r'\[', Punctuation, 'deref'),
include('arg'),
include('instruction-line')
],
'data-args' : [
(r',', Punctuation),
include('numeric'),
(string, String),
include('instruction-line')
],
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r';.*?\n', Comment)
],
}
| 41,243 | Python | 38.734104 | 94 | 0.471159 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/rdf.py | """
pygments.lexers.rdf
~~~~~~~~~~~~~~~~~~~
Lexers for semantic web and RDF query languages and markup.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, \
Generic, Whitespace, Name, Literal, Comment, Text
__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
class SparqlLexer(RegexLexer):
"""
Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
.. versionadded:: 2.0
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
'\u00c0-\u00d6'
'\u00d8-\u00f6'
'\u00f8-\u02ff'
'\u0370-\u037d'
'\u037f-\u1fff'
'\u200c-\u200d'
'\u2070-\u218f'
'\u2c00-\u2fef'
'\u3001-\ud7ff'
'\uf900-\ufdcf'
'\ufdf0-\ufffd')
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
r'\-' +
r'0-9' +
'\u00b7' +
'\u0300-\u036f' +
'\u203f-\u2040')
HEX_GRP = '0-9A-Fa-f'
PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
# terminal productions ::
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
PN_CHARS = '[' + PN_CHARS_GRP + ']'
HEX = '[' + HEX_GRP + ']'
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
'.]*' + PN_CHARS + ')?'
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
'0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
PERCENT = '%' + HEX + HEX
PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
EXPONENT = r'[eE][+-]?\d+'
# Lexer token definitions ::
tokens = {
'root': [
(r'\s+', Text),
# keywords ::
(r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
r'using\s+named|using|graph|default|named|all|optional|service|'
r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
(r'(a)\b', Keyword),
# IRIs ::
('(' + IRIREF + ')', Name.Label),
# blank nodes ::
('(' + BLANK_NODE_LABEL + ')', Name.Label),
# # variables ::
('[?$]' + VARNAME, Name.Variable),
# prefixed names ::
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
# function names ::
(r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
Name.Function),
# boolean literals ::
(r'(true|false)', Keyword.Constant),
# double literals ::
(r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
# decimal literals ::
(r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
# integer literals ::
(r'[+\-]?\d+', Number.Integer),
# operators ::
(r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
# punctuation characters ::
(r'[(){}.;,:^\[\]]', Punctuation),
# line comments ::
(r'#[^\n]*', Comment),
# strings ::
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String.Escape, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'u' + HEX + '{4}', String.Escape, '#pop'),
(r'U' + HEX + '{8}', String.Escape, '#pop'),
(r'.', String.Escape, '#pop'),
],
'end-of-string': [
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
class TurtleLexer(RegexLexer):
"""
Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
.. versionadded:: 2.1
"""
name = 'Turtle'
aliases = ['turtle']
filenames = ['*.ttl']
mimetypes = ['text/turtle', 'application/x-turtle']
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
'\u00c0-\u00d6'
'\u00d8-\u00f6'
'\u00f8-\u02ff'
'\u0370-\u037d'
'\u037f-\u1fff'
'\u200c-\u200d'
'\u2070-\u218f'
'\u2c00-\u2fef'
'\u3001-\ud7ff'
'\uf900-\ufdcf'
'\ufdf0-\ufffd')
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
r'\-' +
r'0-9' +
'\u00b7' +
'\u0300-\u036f' +
'\u203f-\u2040')
PN_CHARS = '[' + PN_CHARS_GRP + ']'
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
HEX_GRP = '0-9A-Fa-f'
HEX = '[' + HEX_GRP + ']'
PERCENT = '%' + HEX + HEX
PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
patterns = {
'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
}
tokens = {
'root': [
(r'\s+', Text),
# Base / prefix
(r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation)),
(r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Name.Variable, Whitespace, Punctuation)),
# The shorthand predicate 'a'
(r'(?<=\s)a(?=\s)', Keyword.Type),
# IRIREF
(r'%(IRIREF)s' % patterns, Name.Variable),
# PrefixedName
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
# Comment
(r'#[^\n]+', Comment),
(r'\b(true|false)\b', Literal),
(r'[+\-]?\d*\.\d+', Number.Float),
(r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
(r'[+\-]?\d+', Number.Integer),
(r'[\[\](){}.;,:^]', Punctuation),
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'.', String, '#pop'),
],
'end-of-string': [
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
bygroups(Operator, Generic.Emph), '#pop:2'),
(r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
default('#pop:2'),
],
}
# Turtle and Tera Term macro files share the same file extension
# but each has a recognizable and distinct syntax.
def analyse_text(text):
for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
if re.search(r'^\s*%s' % t, text):
return 0.80
class ShExCLexer(RegexLexer):
"""
Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
"""
name = 'ShExC'
aliases = ['shexc', 'shex']
filenames = ['*.shex']
mimetypes = ['text/shex']
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
'\u00c0-\u00d6'
'\u00d8-\u00f6'
'\u00f8-\u02ff'
'\u0370-\u037d'
'\u037f-\u1fff'
'\u200c-\u200d'
'\u2070-\u218f'
'\u2c00-\u2fef'
'\u3001-\ud7ff'
'\uf900-\ufdcf'
'\ufdf0-\ufffd')
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
r'\-' +
r'0-9' +
'\u00b7' +
'\u0300-\u036f' +
'\u203f-\u2040')
HEX_GRP = '0-9A-Fa-f'
PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
# terminal productions ::
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
PN_CHARS = '[' + PN_CHARS_GRP + ']'
HEX = '[' + HEX_GRP + ']'
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
UCHAR = r'\\' + UCHAR_NO_BACKSLASH
IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
'.]*' + PN_CHARS + ')?'
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
PERCENT = '%' + HEX + HEX
PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
EXPONENT = r'[eE][+-]?\d+'
# Lexer token definitions ::
tokens = {
'root': [
(r'\s+', Text),
# keywords ::
(r'(?i)(base|prefix|start|external|'
r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
r'totaldigits|fractiondigits|'
r'closed|extra)\b', Keyword),
(r'(a)\b', Keyword),
# IRIs ::
('(' + IRIREF + ')', Name.Label),
# blank nodes ::
('(' + BLANK_NODE_LABEL + ')', Name.Label),
# prefixed names ::
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
# boolean literals ::
(r'(true|false)', Keyword.Constant),
# double literals ::
(r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
# decimal literals ::
(r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
# integer literals ::
(r'[+\-]?\d+', Number.Integer),
# operators ::
(r'[@|$&=*+?^\-~]', Operator),
# operator keywords ::
(r'(?i)(and|or|not)\b', Operator.Word),
# punctuation characters ::
(r'[(){}.;,:^\[\]]', Punctuation),
# line comments ::
(r'#[^\n]*', Comment),
# strings ::
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String.Escape, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
(r'.', String.Escape, '#pop'),
],
'end-of-string': [
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
| 15,790 | Python | 33.105831 | 97 | 0.421786 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_mql_builtins.py | """
pygments.lexers._mql_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Builtins for the MqlLexer.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
'AccountBalance',
'AccountCompany',
'AccountCredit',
'AccountCurrency',
'AccountEquity',
'AccountFreeMarginCheck',
'AccountFreeMarginMode',
'AccountFreeMargin',
'AccountInfoDouble',
'AccountInfoInteger',
'AccountInfoString',
'AccountLeverage',
'AccountMargin',
'AccountName',
'AccountNumber',
'AccountProfit',
'AccountServer',
'AccountStopoutLevel',
'AccountStopoutMode',
'Alert',
'ArrayBsearch',
'ArrayCompare',
'ArrayCopyRates',
'ArrayCopySeries',
'ArrayCopy',
'ArrayDimension',
'ArrayFill',
'ArrayFree',
'ArrayGetAsSeries',
'ArrayInitialize',
'ArrayIsDynamic',
'ArrayIsSeries',
'ArrayMaximum',
'ArrayMinimum',
'ArrayRange',
'ArrayResize',
'ArraySetAsSeries',
'ArraySize',
'ArraySort',
'CharArrayToString',
'CharToString',
'CharToStr',
'CheckPointer',
'ColorToARGB',
'ColorToString',
'Comment',
'CopyClose',
'CopyHigh',
'CopyLow',
'CopyOpen',
'CopyRates',
'CopyRealVolume',
'CopySpread',
'CopyTickVolume',
'CopyTime',
'DayOfWeek',
'DayOfYear',
'Day',
'DebugBreak',
'Digits',
'DoubleToString',
'DoubleToStr',
'EnumToString',
'EventChartCustom',
'EventKillTimer',
'EventSetMillisecondTimer',
'EventSetTimer',
'ExpertRemove',
'FileClose',
'FileCopy',
'FileDelete',
'FileFindClose',
'FileFindFirst',
'FileFindNext',
'FileFlush',
'FileGetInteger',
'FileIsEnding',
'FileIsExist',
'FileIsLineEnding',
'FileMove',
'FileOpenHistory',
'FileOpen',
'FileReadArray',
'FileReadBool',
'FileReadDatetime',
'FileReadDouble',
'FileReadFloat',
'FileReadInteger',
'FileReadLong',
'FileReadNumber',
'FileReadString',
'FileReadStruct',
'FileSeek',
'FileSize',
'FileTell',
'FileWriteArray',
'FileWriteDouble',
'FileWriteFloat',
'FileWriteInteger',
'FileWriteLong',
'FileWriteString',
'FileWriteStruct',
'FileWrite',
'FolderClean',
'FolderCreate',
'FolderDelete',
'GetLastError',
'GetPointer',
'GetTickCount',
'GlobalVariableCheck',
'GlobalVariableDel',
'GlobalVariableGet',
'GlobalVariableName',
'GlobalVariableSetOnCondition',
'GlobalVariableSet',
'GlobalVariableTemp',
'GlobalVariableTime',
'GlobalVariablesDeleteAll',
'GlobalVariablesFlush',
'GlobalVariablesTotal',
'HideTestIndicators',
'Hour',
'IndicatorBuffers',
'IndicatorCounted',
'IndicatorDigits',
'IndicatorSetDouble',
'IndicatorSetInteger',
'IndicatorSetString',
'IndicatorShortName',
'IntegerToString',
'IsConnected',
'IsDemo',
'IsDllsAllowed',
'IsExpertEnabled',
'IsLibrariesAllowed',
'IsOptimization',
'IsStopped',
'IsTesting',
'IsTradeAllowed',
'IsTradeContextBusy',
'IsVisualMode',
'MQLInfoInteger',
'MQLInfoString',
'MarketInfo',
'MathAbs',
'MathArccos',
'MathArcsin',
'MathArctan',
'MathCeil',
'MathCos',
'MathExp',
'MathFloor',
'MathIsValidNumber',
'MathLog',
'MathMax',
'MathMin',
'MathMod',
'MathPow',
'MathRand',
'MathRound',
'MathSin',
'MathSqrt',
'MathSrand',
'MathTan',
'MessageBox',
'Minute',
'Month',
'NormalizeDouble',
'ObjectCreate',
'ObjectDelete',
'ObjectDescription',
'ObjectFind',
'ObjectGetDouble',
'ObjectGetFiboDescription',
'ObjectGetInteger',
'ObjectGetShiftByValue',
'ObjectGetString',
'ObjectGetTimeByValue',
'ObjectGetValueByShift',
'ObjectGetValueByTime',
'ObjectGet',
'ObjectMove',
'ObjectName',
'ObjectSetDouble',
'ObjectSetFiboDescription',
'ObjectSetInteger',
'ObjectSetString',
'ObjectSetText',
'ObjectSet',
'ObjectType',
'ObjectsDeleteAll',
'ObjectsTotal',
'OrderCloseBy',
'OrderClosePrice',
'OrderCloseTime',
'OrderClose',
'OrderComment',
'OrderCommission',
'OrderDelete',
'OrderExpiration',
'OrderLots',
'OrderMagicNumber',
'OrderModify',
'OrderOpenPrice',
'OrderOpenTime',
'OrderPrint',
'OrderProfit',
'OrderSelect',
'OrderSend',
'OrderStopLoss',
'OrderSwap',
'OrderSymbol',
'OrderTakeProfit',
'OrderTicket',
'OrderType',
'OrdersHistoryTotal',
'OrdersTotal',
'PeriodSeconds',
'Period',
'PlaySound',
'Point',
'PrintFormat',
'Print',
'RefreshRates',
'ResetLastError',
'ResourceCreate',
'ResourceFree',
'ResourceReadImage',
'ResourceSave',
'Seconds',
'SendFTP',
'SendMail',
'SendNotification',
'SeriesInfoInteger',
'SetIndexArrow',
'SetIndexBuffer',
'SetIndexDrawBegin',
'SetIndexEmptyValue',
'SetIndexLabel',
'SetIndexShift',
'SetIndexStyle',
'SetLevelStyle',
'SetLevelValue',
'ShortArrayToString',
'ShortToString',
'Sleep',
'StrToDouble',
'StrToInteger',
'StrToTime',
'StringAdd',
'StringBufferLen',
'StringCompare',
'StringConcatenate',
'StringFill',
'StringFind',
'StringFormat',
'StringGetCharacter',
'StringGetChar',
'StringInit',
'StringLen',
'StringReplace',
'StringSetCharacter',
'StringSetChar',
'StringSplit',
'StringSubstr',
'StringToCharArray',
'StringToColor',
'StringToDouble',
'StringToInteger',
'StringToLower',
'StringToShortArray',
'StringToTime',
'StringToUpper',
'StringTrimLeft',
'StringTrimRight',
'StructToTime',
'SymbolInfoDouble',
'SymbolInfoInteger',
'SymbolInfoSessionQuote',
'SymbolInfoSessionTrade',
'SymbolInfoString',
'SymbolInfoTick',
'SymbolIsSynchronized',
'SymbolName',
'SymbolSelect',
'SymbolsTotal',
'Symbol',
'TerminalClose',
'TerminalCompany',
'TerminalName',
'TerminalPath',
'TesterStatistics',
'TextGetSize',
'TextOut',
'TextSetFont',
'TimeCurrent',
'TimeDayOfWeek',
'TimeDayOfYear',
'TimeDaylightSavings',
'TimeDay',
'TimeGMTOffset',
'TimeGMT',
'TimeHour',
'TimeLocal',
'TimeMinute',
'TimeMonth',
'TimeSeconds',
'TimeToString',
'TimeToStruct',
'TimeToStr',
'TimeTradeServer',
'TimeYear',
'UninitializeReason',
'WindowBarsPerChart',
'WindowExpertName',
'WindowFind',
'WindowFirstVisibleBar',
'WindowHandle',
'WindowIsVisible',
'WindowOnDropped',
'WindowPriceMax',
'WindowPriceMin',
'WindowPriceOnDropped',
'WindowRedraw',
'WindowScreenShot',
'WindowTimeOnDropped',
'WindowXOnDropped',
'WindowYOnDropped',
'WindowsTotal',
'Year',
'ZeroMemory',
'iAC',
'iADX',
'iAD',
'iAO',
'iATR',
'iAlligator',
'iBWMFI',
'iBandsOnArray',
'iBands',
'iBarShift',
'iBars',
'iBearsPower',
'iBullsPower',
'iCCIOnArray',
'iCCI',
'iClose',
'iCustom',
'iDeMarker',
'iEnvelopesOnArray',
'iEnvelopes',
'iForce',
'iFractals',
'iGator',
'iHighest',
'iHigh',
'iIchimoku',
'iLowest',
'iLow',
'iMACD',
'iMAOnArray',
'iMA',
'iMFI',
'iMomentumOnArray',
'iMomentum',
'iOBV',
'iOpen',
'iOsMA',
'iRSIOnArray',
'iRSI',
'iRVI',
'iSAR',
'iStdDevOnArray',
'iStdDev',
'iStochastic',
'iTime',
'iVolume',
'iWPR',
)
constants = (
'ACCOUNT_BALANCE',
'ACCOUNT_COMPANY',
'ACCOUNT_CREDIT',
'ACCOUNT_CURRENCY',
'ACCOUNT_EQUITY',
'ACCOUNT_FREEMARGIN',
'ACCOUNT_LEVERAGE',
'ACCOUNT_LIMIT_ORDERS',
'ACCOUNT_LOGIN',
'ACCOUNT_MARGIN',
'ACCOUNT_MARGIN_LEVEL',
'ACCOUNT_MARGIN_SO_CALL',
'ACCOUNT_MARGIN_SO_MODE',
'ACCOUNT_MARGIN_SO_SO',
'ACCOUNT_NAME',
'ACCOUNT_PROFIT',
'ACCOUNT_SERVER',
'ACCOUNT_STOPOUT_MODE_MONEY',
'ACCOUNT_STOPOUT_MODE_PERCENT',
'ACCOUNT_TRADE_ALLOWED',
'ACCOUNT_TRADE_EXPERT',
'ACCOUNT_TRADE_MODE',
'ACCOUNT_TRADE_MODE_CONTEST',
'ACCOUNT_TRADE_MODE_DEMO',
'ACCOUNT_TRADE_MODE_REAL',
'ALIGN_CENTER',
'ALIGN_LEFT',
'ALIGN_RIGHT',
'ANCHOR_BOTTOM',
'ANCHOR_CENTER',
'ANCHOR_LEFT',
'ANCHOR_LEFT_LOWER',
'ANCHOR_LEFT_UPPER',
'ANCHOR_LOWER',
'ANCHOR_RIGHT',
'ANCHOR_RIGHT_LOWER',
'ANCHOR_RIGHT_UPPER',
'ANCHOR_TOP',
'ANCHOR_UPPER',
'BORDER_FLAT',
'BORDER_RAISED',
'BORDER_SUNKEN',
'CHARTEVENT_CHART_CHANGE',
'CHARTEVENT_CLICK',
'CHARTEVENT_CUSTOM',
'CHARTEVENT_CUSTOM_LAST',
'CHARTEVENT_KEYDOWN',
'CHARTEVENT_MOUSE_MOVE',
'CHARTEVENT_OBJECT_CHANGE',
'CHARTEVENT_OBJECT_CLICK',
'CHARTEVENT_OBJECT_CREATE',
'CHARTEVENT_OBJECT_DELETE',
'CHARTEVENT_OBJECT_DRAG',
'CHARTEVENT_OBJECT_ENDEDIT',
'CHARTS_MAX',
'CHART_AUTOSCROLL',
'CHART_BARS',
'CHART_BEGIN',
'CHART_BRING_TO_TOP',
'CHART_CANDLES',
'CHART_COLOR_ASK',
'CHART_COLOR_BACKGROUND',
'CHART_COLOR_BID',
'CHART_COLOR_CANDLE_BEAR',
'CHART_COLOR_CANDLE_BULL',
'CHART_COLOR_CHART_DOWN',
'CHART_COLOR_CHART_LINE',
'CHART_COLOR_CHART_UP',
'CHART_COLOR_FOREGROUND',
'CHART_COLOR_GRID',
'CHART_COLOR_LAST',
'CHART_COLOR_STOP_LEVEL',
'CHART_COLOR_VOLUME',
'CHART_COMMENT',
'CHART_CURRENT_POS',
'CHART_DRAG_TRADE_LEVELS',
'CHART_END',
'CHART_EVENT_MOUSE_MOVE',
'CHART_EVENT_OBJECT_CREATE',
'CHART_EVENT_OBJECT_DELETE',
'CHART_FIRST_VISIBLE_BAR',
'CHART_FIXED_MAX',
'CHART_FIXED_MIN',
'CHART_FIXED_POSITION',
'CHART_FOREGROUND',
'CHART_HEIGHT_IN_PIXELS',
'CHART_IS_OBJECT',
'CHART_LINE',
'CHART_MODE',
'CHART_MOUSE_SCROLL',
'CHART_POINTS_PER_BAR',
'CHART_PRICE_MAX',
'CHART_PRICE_MIN',
'CHART_SCALEFIX',
'CHART_SCALEFIX_11',
'CHART_SCALE',
'CHART_SCALE_PT_PER_BAR',
'CHART_SHIFT',
'CHART_SHIFT_SIZE',
'CHART_SHOW_ASK_LINE',
'CHART_SHOW_BID_LINE',
'CHART_SHOW_DATE_SCALE',
'CHART_SHOW_GRID',
'CHART_SHOW_LAST_LINE',
'CHART_SHOW_OBJECT_DESCR',
'CHART_SHOW_OHLC',
'CHART_SHOW_PERIOD_SEP',
'CHART_SHOW_PRICE_SCALE',
'CHART_SHOW_TRADE_LEVELS',
'CHART_SHOW_VOLUMES',
'CHART_VISIBLE_BARS',
'CHART_VOLUME_HIDE',
'CHART_VOLUME_REAL',
'CHART_VOLUME_TICK',
'CHART_WIDTH_IN_BARS',
'CHART_WIDTH_IN_PIXELS',
'CHART_WINDOWS_TOTAL',
'CHART_WINDOW_HANDLE',
'CHART_WINDOW_IS_VISIBLE',
'CHART_WINDOW_YDISTANCE',
'CHAR_MAX',
'CHAR_MIN',
'CLR_NONE',
'CORNER_LEFT_LOWER',
'CORNER_LEFT_UPPER',
'CORNER_RIGHT_LOWER',
'CORNER_RIGHT_UPPER',
'CP_ACP',
'CP_MACCP',
'CP_OEMCP',
'CP_SYMBOL',
'CP_THREAD_ACP',
'CP_UTF7',
'CP_UTF8',
'DBL_DIG',
'DBL_EPSILON',
'DBL_MANT_DIG',
'DBL_MAX',
'DBL_MAX_10_EXP',
'DBL_MAX_EXP',
'DBL_MIN',
'DBL_MIN_10_EXP',
'DBL_MIN_EXP',
'DRAW_ARROW',
'DRAW_FILLING',
'DRAW_HISTOGRAM',
'DRAW_LINE',
'DRAW_NONE',
'DRAW_SECTION',
'DRAW_ZIGZAG',
'EMPTY',
'EMPTY_VALUE',
'ERR_ACCOUNT_DISABLED',
'ERR_BROKER_BUSY',
'ERR_COMMON_ERROR',
'ERR_INVALID_ACCOUNT',
'ERR_INVALID_PRICE',
'ERR_INVALID_STOPS',
'ERR_INVALID_TRADE_PARAMETERS',
'ERR_INVALID_TRADE_VOLUME',
'ERR_LONG_POSITIONS_ONLY_ALLOWED',
'ERR_MALFUNCTIONAL_TRADE',
'ERR_MARKET_CLOSED',
'ERR_NOT_ENOUGH_MONEY',
'ERR_NOT_ENOUGH_RIGHTS',
'ERR_NO_CONNECTION',
'ERR_NO_ERROR',
'ERR_NO_RESULT',
'ERR_OFF_QUOTES',
'ERR_OLD_VERSION',
'ERR_ORDER_LOCKED',
'ERR_PRICE_CHANGED',
'ERR_REQUOTE',
'ERR_SERVER_BUSY',
'ERR_TOO_FREQUENT_REQUESTS',
'ERR_TOO_MANY_REQUESTS',
'ERR_TRADE_CONTEXT_BUSY',
'ERR_TRADE_DISABLED',
'ERR_TRADE_EXPIRATION_DENIED',
'ERR_TRADE_HEDGE_PROHIBITED',
'ERR_TRADE_MODIFY_DENIED',
'ERR_TRADE_PROHIBITED_BY_FIFO',
'ERR_TRADE_TIMEOUT',
'ERR_TRADE_TOO_MANY_ORDERS',
'FILE_ACCESS_DATE',
'FILE_ANSI',
'FILE_BIN',
'FILE_COMMON',
'FILE_CREATE_DATE',
'FILE_CSV',
'FILE_END',
'FILE_EXISTS',
'FILE_IS_ANSI',
'FILE_IS_BINARY',
'FILE_IS_COMMON',
'FILE_IS_CSV',
'FILE_IS_READABLE',
'FILE_IS_TEXT',
'FILE_IS_WRITABLE',
'FILE_LINE_END',
'FILE_MODIFY_DATE',
'FILE_POSITION',
'FILE_READ',
'FILE_REWRITE',
'FILE_SHARE_READ',
'FILE_SHARE_WRITE',
'FILE_SIZE',
'FILE_TXT',
'FILE_UNICODE',
'FILE_WRITE',
'FLT_DIG',
'FLT_EPSILON',
'FLT_MANT_DIG',
'FLT_MAX',
'FLT_MAX_10_EXP',
'FLT_MAX_EXP',
'FLT_MIN',
'FLT_MIN_10_EXP',
'FLT_MIN_EXP',
'FRIDAY',
'GANN_DOWN_TREND',
'GANN_UP_TREND',
'IDABORT',
'IDCANCEL',
'IDCONTINUE',
'IDIGNORE',
'IDNO',
'IDOK',
'IDRETRY',
'IDTRYAGAIN',
'IDYES',
'INDICATOR_CALCULATIONS',
'INDICATOR_COLOR_INDEX',
'INDICATOR_DATA',
'INDICATOR_DIGITS',
'INDICATOR_HEIGHT',
'INDICATOR_LEVELCOLOR',
'INDICATOR_LEVELSTYLE',
'INDICATOR_LEVELS',
'INDICATOR_LEVELTEXT',
'INDICATOR_LEVELVALUE',
'INDICATOR_LEVELWIDTH',
'INDICATOR_MAXIMUM',
'INDICATOR_MINIMUM',
'INDICATOR_SHORTNAME',
'INT_MAX',
'INT_MIN',
'INVALID_HANDLE',
'IS_DEBUG_MODE',
'IS_PROFILE_MODE',
'LICENSE_DEMO',
'LICENSE_FREE',
'LICENSE_FULL',
'LICENSE_TIME',
'LONG_MAX',
'LONG_MIN',
'MB_ABORTRETRYIGNORE',
'MB_CANCELTRYCONTINUE',
'MB_DEFBUTTON1',
'MB_DEFBUTTON2',
'MB_DEFBUTTON3',
'MB_DEFBUTTON4',
'MB_ICONASTERISK',
'MB_ICONERROR',
'MB_ICONEXCLAMATION',
'MB_ICONHAND',
'MB_ICONINFORMATION',
'MB_ICONQUESTION',
'MB_ICONSTOP',
'MB_ICONWARNING',
'MB_OKCANCEL',
'MB_OK',
'MB_RETRYCANCEL',
'MB_YESNOCANCEL',
'MB_YESNO',
'MODE_ASK',
'MODE_BID',
'MODE_CHINKOUSPAN',
'MODE_CLOSE',
'MODE_DIGITS',
'MODE_EMA',
'MODE_EXPIRATION',
'MODE_FREEZELEVEL',
'MODE_GATORJAW',
'MODE_GATORLIPS',
'MODE_GATORTEETH',
'MODE_HIGH',
'MODE_KIJUNSEN',
'MODE_LOTSIZE',
'MODE_LOTSTEP',
'MODE_LOWER',
'MODE_LOW',
'MODE_LWMA',
'MODE_MAIN',
'MODE_MARGINCALCMODE',
'MODE_MARGINHEDGED',
'MODE_MARGININIT',
'MODE_MARGINMAINTENANCE',
'MODE_MARGINREQUIRED',
'MODE_MAXLOT',
'MODE_MINLOT',
'MODE_MINUSDI',
'MODE_OPEN',
'MODE_PLUSDI',
'MODE_POINT',
'MODE_PROFITCALCMODE',
'MODE_SENKOUSPANA',
'MODE_SENKOUSPANB',
'MODE_SIGNAL',
'MODE_SMA',
'MODE_SMMA',
'MODE_SPREAD',
'MODE_STARTING',
'MODE_STOPLEVEL',
'MODE_SWAPLONG',
'MODE_SWAPSHORT',
'MODE_SWAPTYPE',
'MODE_TENKANSEN',
'MODE_TICKSIZE',
'MODE_TICKVALUE',
'MODE_TIME',
'MODE_TRADEALLOWED',
'MODE_UPPER',
'MODE_VOLUME',
'MONDAY',
'MQL_DEBUG',
'MQL_DLLS_ALLOWED',
'MQL_FRAME_MODE',
'MQL_LICENSE_TYPE',
'MQL_OPTIMIZATION',
'MQL_PROFILER',
'MQL_PROGRAM_NAME',
'MQL_PROGRAM_PATH',
'MQL_PROGRAM_TYPE',
'MQL_TESTER',
'MQL_TRADE_ALLOWED',
'MQL_VISUAL_MODE',
'M_1_PI',
'M_2_PI',
'M_2_SQRTPI',
'M_E',
'M_LN2',
'M_LN10',
'M_LOG2E',
'M_LOG10E',
'M_PI',
'M_PI_2',
'M_PI_4',
'M_SQRT1_2',
'M_SQRT2',
'NULL',
'OBJPROP_ALIGN',
'OBJPROP_ANCHOR',
'OBJPROP_ANGLE',
'OBJPROP_ARROWCODE',
'OBJPROP_BACK',
'OBJPROP_BGCOLOR',
'OBJPROP_BMPFILE',
'OBJPROP_BORDER_COLOR',
'OBJPROP_BORDER_TYPE',
'OBJPROP_CHART_ID',
'OBJPROP_CHART_SCALE',
'OBJPROP_COLOR',
'OBJPROP_CORNER',
'OBJPROP_CREATETIME',
'OBJPROP_DATE_SCALE',
'OBJPROP_DEVIATION',
'OBJPROP_DRAWLINES',
'OBJPROP_ELLIPSE',
'OBJPROP_FIBOLEVELS',
'OBJPROP_FILL',
'OBJPROP_FIRSTLEVEL',
'OBJPROP_FONTSIZE',
'OBJPROP_FONT',
'OBJPROP_HIDDEN',
'OBJPROP_LEVELCOLOR',
'OBJPROP_LEVELSTYLE',
'OBJPROP_LEVELS',
'OBJPROP_LEVELTEXT',
'OBJPROP_LEVELVALUE',
'OBJPROP_LEVELWIDTH',
'OBJPROP_NAME',
'OBJPROP_PERIOD',
'OBJPROP_PRICE1',
'OBJPROP_PRICE2',
'OBJPROP_PRICE3',
'OBJPROP_PRICE',
'OBJPROP_PRICE_SCALE',
'OBJPROP_RAY',
'OBJPROP_RAY_RIGHT',
'OBJPROP_READONLY',
'OBJPROP_SCALE',
'OBJPROP_SELECTABLE',
'OBJPROP_SELECTED',
'OBJPROP_STATE',
'OBJPROP_STYLE',
'OBJPROP_SYMBOL',
'OBJPROP_TEXT',
'OBJPROP_TIME1',
'OBJPROP_TIME2',
'OBJPROP_TIME3',
'OBJPROP_TIMEFRAMES',
'OBJPROP_TIME',
'OBJPROP_TOOLTIP',
'OBJPROP_TYPE',
'OBJPROP_WIDTH',
'OBJPROP_XDISTANCE',
'OBJPROP_XOFFSET',
'OBJPROP_XSIZE',
'OBJPROP_YDISTANCE',
'OBJPROP_YOFFSET',
'OBJPROP_YSIZE',
'OBJPROP_ZORDER',
'OBJ_ALL_PERIODS',
'OBJ_ARROW',
'OBJ_ARROW_BUY',
'OBJ_ARROW_CHECK',
'OBJ_ARROW_DOWN',
'OBJ_ARROW_LEFT_PRICE',
'OBJ_ARROW_RIGHT_PRICE',
'OBJ_ARROW_SELL',
'OBJ_ARROW_STOP',
'OBJ_ARROW_THUMB_DOWN',
'OBJ_ARROW_THUMB_UP',
'OBJ_ARROW_UP',
'OBJ_BITMAP',
'OBJ_BITMAP_LABEL',
'OBJ_BUTTON',
'OBJ_CHANNEL',
'OBJ_CYCLES',
'OBJ_EDIT',
'OBJ_ELLIPSE',
'OBJ_EVENT',
'OBJ_EXPANSION',
'OBJ_FIBOARC',
'OBJ_FIBOCHANNEL',
'OBJ_FIBOFAN',
'OBJ_FIBOTIMES',
'OBJ_FIBO',
'OBJ_GANNFAN',
'OBJ_GANNGRID',
'OBJ_GANNLINE',
'OBJ_HLINE',
'OBJ_LABEL',
'OBJ_NO_PERIODS',
'OBJ_PERIOD_D1',
'OBJ_PERIOD_H1',
'OBJ_PERIOD_H4',
'OBJ_PERIOD_M1',
'OBJ_PERIOD_M5',
'OBJ_PERIOD_M15',
'OBJ_PERIOD_M30',
'OBJ_PERIOD_MN1',
'OBJ_PERIOD_W1',
'OBJ_PITCHFORK',
'OBJ_RECTANGLE',
'OBJ_RECTANGLE_LABEL',
'OBJ_REGRESSION',
'OBJ_STDDEVCHANNEL',
'OBJ_TEXT',
'OBJ_TRENDBYANGLE',
'OBJ_TREND',
'OBJ_TRIANGLE',
'OBJ_VLINE',
'OP_BUYLIMIT',
'OP_BUYSTOP',
'OP_BUY',
'OP_SELLLIMIT',
'OP_SELLSTOP',
'OP_SELL',
'PERIOD_CURRENT',
'PERIOD_D1',
'PERIOD_H1',
'PERIOD_H2',
'PERIOD_H3',
'PERIOD_H4',
'PERIOD_H6',
'PERIOD_H8',
'PERIOD_H12',
'PERIOD_M1',
'PERIOD_M2',
'PERIOD_M3',
'PERIOD_M4',
'PERIOD_M5',
'PERIOD_M6',
'PERIOD_M10',
'PERIOD_M12',
'PERIOD_M15',
'PERIOD_M20',
'PERIOD_M30',
'PERIOD_MN1',
'PERIOD_W1',
'POINTER_AUTOMATIC',
'POINTER_DYNAMIC',
'POINTER_INVALID',
'PRICE_CLOSE',
'PRICE_HIGH',
'PRICE_LOW',
'PRICE_MEDIAN',
'PRICE_OPEN',
'PRICE_TYPICAL',
'PRICE_WEIGHTED',
'PROGRAM_EXPERT',
'PROGRAM_INDICATOR',
'PROGRAM_SCRIPT',
'REASON_ACCOUNT',
'REASON_CHARTCHANGE',
'REASON_CHARTCLOSE',
'REASON_CLOSE',
'REASON_INITFAILED',
'REASON_PARAMETERS',
'REASON_PROGRAM'
'REASON_RECOMPILE',
'REASON_REMOVE',
'REASON_TEMPLATE',
'SATURDAY',
'SEEK_CUR',
'SEEK_END',
'SEEK_SET',
'SERIES_BARS_COUNT',
'SERIES_FIRSTDATE',
'SERIES_LASTBAR_DATE',
'SERIES_SERVER_FIRSTDATE',
'SERIES_SYNCHRONIZED',
'SERIES_TERMINAL_FIRSTDATE',
'SHORT_MAX',
'SHORT_MIN',
'STAT_BALANCEDD_PERCENT',
'STAT_BALANCEMIN',
'STAT_BALANCE_DDREL_PERCENT',
'STAT_BALANCE_DD',
'STAT_BALANCE_DD_RELATIVE',
'STAT_CONLOSSMAX',
'STAT_CONLOSSMAX_TRADES',
'STAT_CONPROFITMAX',
'STAT_CONPROFITMAX_TRADES',
'STAT_CUSTOM_ONTESTER',
'STAT_DEALS',
'STAT_EQUITYDD_PERCENT',
'STAT_EQUITYMIN',
'STAT_EQUITY_DDREL_PERCENT',
'STAT_EQUITY_DD',
'STAT_EQUITY_DD_RELATIVE',
'STAT_EXPECTED_PAYOFF',
'STAT_GROSS_LOSS',
'STAT_GROSS_PROFIT',
'STAT_INITIAL_DEPOSIT',
'STAT_LONG_TRADES',
'STAT_LOSSTRADES_AVGCON',
'STAT_LOSS_TRADES',
'STAT_MAX_CONLOSSES',
'STAT_MAX_CONLOSS_TRADES',
'STAT_MAX_CONPROFIT_TRADES',
'STAT_MAX_CONWINS',
'STAT_MAX_LOSSTRADE',
'STAT_MAX_PROFITTRADE',
'STAT_MIN_MARGINLEVEL',
'STAT_PROFITTRADES_AVGCON',
'STAT_PROFIT',
'STAT_PROFIT_FACTOR',
'STAT_PROFIT_LONGTRADES',
'STAT_PROFIT_SHORTTRADES',
'STAT_PROFIT_TRADES',
'STAT_RECOVERY_FACTOR',
'STAT_SHARPE_RATIO',
'STAT_SHORT_TRADES',
'STAT_TRADES',
'STAT_WITHDRAWAL',
'STO_CLOSECLOSE',
'STO_LOWHIGH',
'STYLE_DASHDOTDOT',
'STYLE_DASHDOT',
'STYLE_DASH',
'STYLE_DOT',
'STYLE_SOLID',
'SUNDAY',
'SYMBOL_ARROWDOWN',
'SYMBOL_ARROWUP',
'SYMBOL_CHECKSIGN',
'SYMBOL_LEFTPRICE',
'SYMBOL_RIGHTPRICE',
'SYMBOL_STOPSIGN',
'SYMBOL_THUMBSDOWN',
'SYMBOL_THUMBSUP',
'TERMINAL_BUILD',
'TERMINAL_CODEPAGE',
'TERMINAL_COMMONDATA_PATH',
'TERMINAL_COMPANY',
'TERMINAL_CONNECTED',
'TERMINAL_CPU_CORES',
'TERMINAL_DATA_PATH',
'TERMINAL_DISK_SPACE',
'TERMINAL_DLLS_ALLOWED',
'TERMINAL_EMAIL_ENABLED',
'TERMINAL_FTP_ENABLED',
'TERMINAL_LANGUAGE',
'TERMINAL_MAXBARS',
'TERMINAL_MEMORY_AVAILABLE',
'TERMINAL_MEMORY_PHYSICAL',
'TERMINAL_MEMORY_TOTAL',
'TERMINAL_MEMORY_USED',
'TERMINAL_NAME',
'TERMINAL_OPENCL_SUPPORT',
'TERMINAL_PATH',
'TERMINAL_TRADE_ALLOWED',
'TERMINAL_X64',
'THURSDAY',
'TRADE_ACTION_DEAL',
'TRADE_ACTION_MODIFY',
'TRADE_ACTION_PENDING',
'TRADE_ACTION_REMOVE',
'TRADE_ACTION_SLTP',
'TUESDAY',
'UCHAR_MAX',
'UINT_MAX',
'ULONG_MAX',
'USHORT_MAX',
'VOLUME_REAL',
'VOLUME_TICK',
'WEDNESDAY',
'WHOLE_ARRAY',
'WRONG_VALUE',
'clrNONE',
'__DATETIME__',
'__DATE__',
'__FILE__',
'__FUNCSIG__',
'__FUNCTION__',
'__LINE__',
'__MQL4BUILD__',
'__MQLBUILD__',
'__PATH__',
)
colors = (
'AliceBlue',
'AntiqueWhite',
'Aquamarine',
'Aqua',
'Beige',
'Bisque',
'Black',
'BlanchedAlmond',
'BlueViolet',
'Blue',
'Brown',
'BurlyWood',
'CadetBlue',
'Chartreuse',
'Chocolate',
'Coral',
'CornflowerBlue',
'Cornsilk',
'Crimson',
'DarkBlue',
'DarkGoldenrod',
'DarkGray',
'DarkGreen',
'DarkKhaki',
'DarkOliveGreen',
'DarkOrange',
'DarkOrchid',
'DarkSalmon',
'DarkSeaGreen',
'DarkSlateBlue',
'DarkSlateGray',
'DarkTurquoise',
'DarkViolet',
'DeepPink',
'DeepSkyBlue',
'DimGray',
'DodgerBlue',
'FireBrick',
'ForestGreen',
'Gainsboro',
'Goldenrod',
'Gold',
'Gray',
'GreenYellow',
'Green',
'Honeydew',
'HotPink',
'IndianRed',
'Indigo',
'Ivory',
'Khaki',
'LavenderBlush',
'Lavender',
'LawnGreen',
'LemonChiffon',
'LightBlue',
'LightCoral',
'LightCyan',
'LightGoldenrod',
'LightGray',
'LightGreen',
'LightPink',
'LightSalmon',
'LightSeaGreen',
'LightSkyBlue',
'LightSlateGray',
'LightSteelBlue',
'LightYellow',
'LimeGreen',
'Lime',
'Linen',
'Magenta',
'Maroon',
'MediumAquamarine',
'MediumBlue',
'MediumOrchid',
'MediumPurple',
'MediumSeaGreen',
'MediumSlateBlue',
'MediumSpringGreen',
'MediumTurquoise',
'MediumVioletRed',
'MidnightBlue',
'MintCream',
'MistyRose',
'Moccasin',
'NavajoWhite',
'Navy',
'OldLace',
'OliveDrab',
'Olive',
'OrangeRed',
'Orange',
'Orchid',
'PaleGoldenrod',
'PaleGreen',
'PaleTurquoise',
'PaleVioletRed',
'PapayaWhip',
'PeachPuff',
'Peru',
'Pink',
'Plum',
'PowderBlue',
'Purple',
'Red',
'RosyBrown',
'RoyalBlue',
'SaddleBrown',
'Salmon',
'SandyBrown',
'SeaGreen',
'Seashell',
'Sienna',
'Silver',
'SkyBlue',
'SlateBlue',
'SlateGray',
'Snow',
'SpringGreen',
'SteelBlue',
'Tan',
'Teal',
'Thistle',
'Tomato',
'Turquoise',
'Violet',
'Wheat',
'WhiteSmoke',
'White',
'YellowGreen',
'Yellow',
)
keywords = (
'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
'_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
'Volume',
)
c_types = (
'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
'color', 'long', 'ulong', 'datetime', 'float', 'double',
'string',
)
| 24,713 | Python | 20.087031 | 73 | 0.579533 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/yang.py | """
pygments.lexers.yang
~~~~~~~~~~~~~~~~~~~~
Lexer for the YANG 1.1 modeling language. See :rfc:`7950`.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, words
from pygments.token import Text, Token, Name, String, Comment, Number
__all__ = ['YangLexer']
class YangLexer(RegexLexer):
"""
Lexer for YANG, based on RFC7950.
.. versionadded:: 2.7
"""
name = 'YANG'
url = 'https://tools.ietf.org/html/rfc7950/'
aliases = ['yang']
filenames = ['*.yang']
mimetypes = ['application/yang']
#Keywords from RFC7950 ; oriented at BNF style
TOP_STMTS_KEYWORDS = ("module", "submodule")
MODULE_HEADER_STMT_KEYWORDS = ("belongs-to", "namespace", "prefix", "yang-version")
META_STMT_KEYWORDS = ("contact", "description", "organization",
"reference", "revision")
LINKAGE_STMTS_KEYWORDS = ("import", "include", "revision-date")
BODY_STMT_KEYWORDS = ("action", "argument", "augment", "deviation",
"extension", "feature", "grouping", "identity",
"if-feature", "input", "notification", "output",
"rpc", "typedef")
DATA_DEF_STMT_KEYWORDS = ("anydata", "anyxml", "case", "choice",
"config", "container", "deviate", "leaf",
"leaf-list", "list", "must", "presence",
"refine", "uses", "when")
TYPE_STMT_KEYWORDS = ("base", "bit", "default", "enum", "error-app-tag",
"error-message", "fraction-digits", "length",
"max-elements", "min-elements", "modifier",
"ordered-by", "path", "pattern", "position",
"range", "require-instance", "status", "type",
"units", "value", "yin-element")
LIST_STMT_KEYWORDS = ("key", "mandatory", "unique")
#RFC7950 other keywords
CONSTANTS_KEYWORDS = ("add", "current", "delete", "deprecated", "false",
"invert-match", "max", "min", "not-supported",
"obsolete", "replace", "true", "unbounded", "user")
#RFC7950 Built-In Types
TYPES = ("binary", "bits", "boolean", "decimal64", "empty", "enumeration",
"identityref", "instance-identifier", "int16", "int32", "int64",
"int8", "leafref", "string", "uint16", "uint32", "uint64",
"uint8", "union")
suffix_re_pattern = r'(?=[^\w\-:])'
tokens = {
'comments': [
(r'[^*/]', Comment),
(r'/\*', Comment, '#push'),
(r'\*/', Comment, '#pop'),
(r'[*/]', Comment),
],
"root": [
(r'\s+', Text.Whitespace),
(r'[{};]+', Token.Punctuation),
(r'(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])', Token.Operator),
(r'"(?:\\"|[^"])*?"', String.Double),
(r"'(?:\\'|[^'])*?'", String.Single),
(r'/\*', Comment, 'comments'),
(r'//.*?$', Comment),
#match BNF stmt for `node-identifier` with [ prefix ":"]
(r'(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])',
bygroups(Name.Namespace, Token.Punctuation, Name.Variable)),
#match BNF stmt `date-arg-str`
(r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s{};])', Name.Label),
(r'([0-9]+\.[0-9]+)(?=[\s{};])', Number.Float),
(r'([0-9]+)(?=[\s{};])', Number.Integer),
(words(TOP_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(MODULE_HEADER_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(META_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(LINKAGE_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(BODY_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(DATA_DEF_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(TYPE_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(LIST_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(TYPES, suffix=suffix_re_pattern), Name.Class),
(words(CONSTANTS_KEYWORDS, suffix=suffix_re_pattern), Name.Class),
(r'[^;{}\s\'"]+', Name.Variable),
]
}
| 4,500 | Python | 41.866666 | 90 | 0.505556 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/typoscript.py | """
pygments.lexers.typoscript
~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for TypoScript
`TypoScriptLexer`
A TypoScript lexer.
`TypoScriptCssDataLexer`
Lexer that highlights markers, constants and registers within css.
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using
from pygments.token import Text, Comment, Name, String, Number, \
Operator, Punctuation
__all__ = ['TypoScriptLexer', 'TypoScriptCssDataLexer', 'TypoScriptHtmlDataLexer']
class TypoScriptCssDataLexer(RegexLexer):
"""
Lexer that highlights markers, constants and registers within css blocks.
.. versionadded:: 2.2
"""
name = 'TypoScriptCssData'
aliases = ['typoscriptcssdata']
tokens = {
'root': [
# marker: ###MARK###
(r'(.*)(###\w+###)(.*)', bygroups(String, Name.Constant, String)),
# constant: {$some.constant}
(r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
bygroups(String.Symbol, Operator, Name.Constant,
Name.Constant, String.Symbol)), # constant
# constant: {register:somevalue}
(r'(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)',
bygroups(String, String.Symbol, Name.Constant, Operator,
Name.Constant, String.Symbol, String)), # constant
# whitespace
(r'\s+', Text),
# comments
(r'/\*(?:(?!\*/).)*\*/', Comment),
(r'(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)',
Comment),
# other
(r'[<>,:=.*%+|]', String),
(r'[\w"\-!/&;(){}]+', String),
]
}
class TypoScriptHtmlDataLexer(RegexLexer):
"""
Lexer that highlights markers, constants and registers within html tags.
.. versionadded:: 2.2
"""
name = 'TypoScriptHtmlData'
aliases = ['typoscripthtmldata']
tokens = {
'root': [
# INCLUDE_TYPOSCRIPT
(r'(INCLUDE_TYPOSCRIPT)', Name.Class),
# Language label or extension resource FILE:... or LLL:... or EXT:...
(r'(EXT|FILE|LLL):[^}\n"]*', String),
# marker: ###MARK###
(r'(.*)(###\w+###)(.*)', bygroups(String, Name.Constant, String)),
# constant: {$some.constant}
(r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
bygroups(String.Symbol, Operator, Name.Constant,
Name.Constant, String.Symbol)), # constant
# constant: {register:somevalue}
(r'(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)',
bygroups(String, String.Symbol, Name.Constant, Operator,
Name.Constant, String.Symbol, String)), # constant
# whitespace
(r'\s+', Text),
# other
(r'[<>,:=.*%+|]', String),
(r'[\w"\-!/&;(){}#]+', String),
]
}
class TypoScriptLexer(RegexLexer):
"""
Lexer for TypoScript code.
.. versionadded:: 2.2
"""
name = 'TypoScript'
url = 'http://docs.typo3.org/typo3cms/TyposcriptReference/'
aliases = ['typoscript']
filenames = ['*.typoscript']
mimetypes = ['text/x-typoscript']
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
include('comment'),
include('constant'),
include('html'),
include('label'),
include('whitespace'),
include('keywords'),
include('punctuation'),
include('operator'),
include('structure'),
include('literal'),
include('other'),
],
'keywords': [
# Conditions
(r'(?i)(\[)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|'
r'device|ELSE|END|GLOBAL|globalString|globalVar|hostname|hour|IP|'
r'language|loginUser|loginuser|minute|month|page|PIDinRootline|'
r'PIDupinRootline|system|treeLevel|useragent|userFunc|usergroup|'
r'version)([^\]]*)(\])',
bygroups(String.Symbol, Name.Constant, Text, String.Symbol)),
# Functions
(r'(?=[\w\-])(HTMLparser|HTMLparser_tags|addParams|cache|encapsLines|'
r'filelink|if|imageLinkWrap|imgResource|makelinks|numRows|numberFormat|'
r'parseFunc|replacement|round|select|split|stdWrap|strPad|tableStyle|'
r'tags|textStyle|typolink)(?![\w\-])', Name.Function),
# Toplevel objects and _*
(r'(?:(=?\s*<?\s+|^\s*))(cObj|field|config|content|constants|FEData|'
r'file|frameset|includeLibs|lib|page|plugin|register|resources|sitemap|'
r'sitetitle|styles|temp|tt_[^:.\s]*|types|xmlnews|INCLUDE_TYPOSCRIPT|'
r'_CSS_DEFAULT_STYLE|_DEFAULT_PI_VARS|_LOCAL_LANG)(?![\w\-])',
bygroups(Operator, Name.Builtin)),
# Content objects
(r'(?=[\w\-])(CASE|CLEARGIF|COA|COA_INT|COBJ_ARRAY|COLUMNS|CONTENT|'
r'CTABLE|EDITPANEL|FILE|FILES|FLUIDTEMPLATE|FORM|HMENU|HRULER|HTML|'
r'IMAGE|IMGTEXT|IMG_RESOURCE|LOAD_REGISTER|MEDIA|MULTIMEDIA|OTABLE|'
r'PAGE|QTOBJECT|RECORDS|RESTORE_REGISTER|SEARCHRESULT|SVG|SWFOBJECT|'
r'TEMPLATE|TEXT|USER|USER_INT)(?![\w\-])', Name.Class),
# Menu states
(r'(?=[\w\-])(ACTIFSUBRO|ACTIFSUB|ACTRO|ACT|CURIFSUBRO|CURIFSUB|CURRO|'
r'CUR|IFSUBRO|IFSUB|NO|SPC|USERDEF1RO|USERDEF1|USERDEF2RO|USERDEF2|'
r'USRRO|USR)', Name.Class),
# Menu objects
(r'(?=[\w\-])(GMENU_FOLDOUT|GMENU_LAYERS|GMENU|IMGMENUITEM|IMGMENU|'
r'JSMENUITEM|JSMENU|TMENUITEM|TMENU_LAYERS|TMENU)', Name.Class),
# PHP objects
(r'(?=[\w\-])(PHP_SCRIPT(_EXT|_INT)?)', Name.Class),
(r'(?=[\w\-])(userFunc)(?![\w\-])', Name.Function),
],
'whitespace': [
(r'\s+', Text),
],
'html': [
(r'<\S[^\n>]*>', using(TypoScriptHtmlDataLexer)),
(r'&[^;\n]*;', String),
(r'(?s)(_CSS_DEFAULT_STYLE)(\s*)(\()(.*(?=\n\)))',
bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
],
'literal': [
(r'0x[0-9A-Fa-f]+t?', Number.Hex),
# (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?\s*(?:[^=])', Number.Float),
(r'[0-9]+', Number.Integer),
(r'(###\w+###)', Name.Constant),
],
'label': [
# Language label or extension resource FILE:... or LLL:... or EXT:...
(r'(EXT|FILE|LLL):[^}\n"]*', String),
# Path to a resource
(r'(?![^\w\-])([\w\-]+(?:/[\w\-]+)+/?)(\S*\n)',
bygroups(String, String)),
],
'punctuation': [
(r'[,.]', Punctuation),
],
'operator': [
(r'[<>,:=.*%+|]', Operator),
],
'structure': [
# Brackets and braces
(r'[{}()\[\]\\]', String.Symbol),
],
'constant': [
# Constant: {$some.constant}
(r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
bygroups(String.Symbol, Operator, Name.Constant,
Name.Constant, String.Symbol)), # constant
# Constant: {register:somevalue}
(r'(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})',
bygroups(String.Symbol, Name.Constant, Operator,
Name.Constant, String.Symbol)), # constant
# Hex color: #ff0077
(r'(#[a-fA-F0-9]{6}\b|#[a-fA-F0-9]{3}\b)', String.Char)
],
'comment': [
(r'(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)',
Comment),
(r'/\*(?:(?!\*/).)*\*/', Comment),
(r'(\s*#\s*\n)', Comment),
],
'other': [
(r'[\w"\-!/&;]+', Text),
],
}
| 8,207 | Python | 36.651376 | 88 | 0.485805 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/sophia.py | """
pygments.lexers.sophia
~~~~~~~~~~~~~~~~~~~~~~
Lexer for Sophia.
Derived from pygments/lexers/reason.py.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, default, words
from pygments.token import Comment, Keyword, Name, Number, Operator, \
Punctuation, String, Text
__all__ = ['SophiaLexer']
class SophiaLexer(RegexLexer):
"""
A Sophia lexer.
.. versionadded:: 2.11
"""
name = 'Sophia'
aliases = ['sophia']
filenames = ['*.aes']
mimetypes = []
keywords = (
'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
'if', 'elif', 'else', 'function', 'stateful', 'payable', 'public',
'entrypoint', 'private', 'indexed', 'namespace', 'interface', 'main',
'using', 'as', 'for', 'hiding',
)
builtins = ('state', 'put', 'abort', 'require')
word_operators = ('mod', 'band', 'bor', 'bxor', 'bnot')
primitive_types = ('int', 'address', 'bool', 'bits', 'bytes', 'string',
'list', 'option', 'char', 'unit', 'map', 'event',
'hash', 'signature', 'oracle', 'oracle_query')
tokens = {
'escape-sequence': [
(r'\\[\\"\'ntbr]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
],
'root': [
(r'\s+', Text.Whitespace),
(r'(true|false)\b', Keyword.Constant),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Class, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Function),
(r'//.*?\n', Comment.Single),
(r'\/\*(?!/)', Comment.Multiline, 'comment'),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'#[\da-fA-F][\da-fA-F_]*', Name.Label),
(r'\d[\d_]*', Number.Integer),
(words(keywords, suffix=r'\b'), Keyword),
(words(builtins, suffix=r'\b'), Name.Builtin),
(words(word_operators, prefix=r'\b', suffix=r'\b'), Operator.Word),
(words(primitive_types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(r'[=!<>+\\*/:&|?~@^-]', Operator.Word),
(r'[.;:{}(),\[\]]', Punctuation),
(r"(ak_|ok_|oq_|ct_)[\w']*", Name.Label),
(r"[^\W\d][\w']*", Name),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'[a-z][\w]*", Name.Variable),
(r'"', String.Double, 'string')
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'\/\*', Comment.Multiline, '#push'),
(r'\*\/', Comment.Multiline, '#pop'),
(r'\*', Comment.Multiline),
],
'string': [
(r'[^\\"]+', String.Double),
include('escape-sequence'),
(r'\\\n', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Function),
(r'[A-Z][\w\']*', Name.Function, '#pop'),
(r'[a-z_][\w\']*', Name, '#pop'),
default('#pop'),
],
}
| 3,330 | Python | 31.028846 | 79 | 0.439339 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/int_fiction.py | """
pygments.lexers.int_fiction
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for interactive fiction languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, \
this, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Generic
__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
'Tads3Lexer']
class Inform6Lexer(RegexLexer):
"""
For Inform 6 source code.
.. versionadded:: 2.0
"""
name = 'Inform 6'
url = 'http://inform-fiction.org/'
aliases = ['inform6', 'i6']
filenames = ['*.inf']
flags = re.MULTILINE | re.DOTALL
_name = r'[a-zA-Z_]\w*'
# Inform 7 maps these four character classes to their ASCII
# equivalents. To support Inform 6 inclusions within Inform 7,
# Inform6Lexer maps them too.
_dash = '\\-\u2010-\u2014'
_dquote = '"\u201c\u201d'
_squote = "'\u2018\u2019"
_newline = '\\n\u0085\u2028\u2029'
tokens = {
'root': [
(r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
'directive'),
default('directive')
],
'_whitespace': [
(r'\s+', Text),
(r'![^%s]*' % _newline, Comment.Single)
],
'default': [
include('_whitespace'),
(r'\[', Punctuation, 'many-values'), # Array initialization
(r':|(?=;)', Punctuation, '#pop'),
(r'<', Punctuation), # Second angle bracket in an action statement
default(('expression', '_expression'))
],
# Expressions
'_expression': [
include('_whitespace'),
(r'(?=sp\b)', Text, '#pop'),
(r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
('#pop', 'value')),
(r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
(r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
],
'expression': [
include('_whitespace'),
(r'\(', Punctuation, ('expression', '_expression')),
(r'\)', Punctuation, '#pop'),
(r'\[', Punctuation, ('#pop', 'statements', 'locals')),
(r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
(r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
(r',', Punctuation, '_expression'),
(r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
Operator, '_expression'),
(r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
'_expression'),
(r'sp\b', Name),
(r'\?~?', Name.Label, 'label?'),
(r'[@{]', Error),
default('#pop')
],
'_assembly-expression': [
(r'\(', Punctuation, ('#push', '_expression')),
(r'[\[\]]', Punctuation),
(r'[%s]>' % _dash, Punctuation, '_expression'),
(r'sp\b', Keyword.Pseudo),
(r';', Punctuation, '#pop:3'),
include('expression')
],
'_for-expression': [
(r'\)', Punctuation, '#pop:2'),
(r':', Punctuation, '#pop'),
include('expression')
],
'_keyword-expression': [
(r'(from|near|to)\b', Keyword, '_expression'),
include('expression')
],
'_list-expression': [
(r',', Punctuation, '#pop'),
include('expression')
],
'_object-expression': [
(r'has\b', Keyword.Declaration, '#pop'),
include('_list-expression')
],
# Values
'value': [
include('_whitespace'),
# Strings
(r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
(r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
bygroups(String.Char, String.Escape, String.Char), '#pop'),
(r'([%s])(@.{2})([%s])' % (_squote, _squote),
bygroups(String.Char, String.Escape, String.Char), '#pop'),
(r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
(r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
# Numbers
(r'\$[<>]?[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
Number.Float, '#pop'),
(r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
(r'\$\$[01]+', Number.Bin, '#pop'),
(r'[0-9]+', Number.Integer, '#pop'),
# Values prefixed by hashes
(r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
(r'(#g\$)(%s)' % _name,
bygroups(Operator, Name.Variable.Global), '#pop'),
(r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
(r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
(r'#', Name.Builtin, ('#pop', 'system-constant')),
# System functions
(words((
'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
Name.Builtin, '#pop'),
# Metaclasses
(r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
# Veneer routines
(words((
'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
prefix='(?i)', suffix=r'\b'),
Name.Builtin, '#pop'),
# Other built-in symbols
(words((
'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'DOUBLE_HI_INFINITY',
'DOUBLE_HI_NAN', 'DOUBLE_HI_NINFINITY', 'DOUBLE_LO_INFINITY', 'DOUBLE_LO_NAN',
'DOUBLE_LO_NINFINITY', 'false', 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY',
'GOBJFIELD_CHAIN', 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
prefix='(?i)', suffix=r'\b'),
Name.Builtin, '#pop'),
# Other values
(_name, Name, '#pop')
],
'value?': [
include('value'),
default('#pop')
],
# Strings
'dictionary-word': [
(r'[~^]+', String.Escape),
(r'[^~^\\@({%s]+' % _squote, String.Single),
(r'[({]', String.Single),
(r'@\{[0-9a-fA-F]*\}', String.Escape),
(r'@.{2}', String.Escape),
(r'[%s]' % _squote, String.Single, '#pop')
],
'string': [
(r'[~^]+', String.Escape),
(r'[^~^\\@({%s]+' % _dquote, String.Double),
(r'[({]', String.Double),
(r'\\', String.Escape),
(r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
(_newline, _newline), String.Escape),
(r'@(\\\s*[%s]\s*)*[({]((\\\s*[%s]\s*)*[0-9a-zA-Z_])*'
r'(\\\s*[%s]\s*)*[)}]' % (_newline, _newline, _newline),
String.Escape),
(r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
String.Escape),
(r'[%s]' % _dquote, String.Double, '#pop')
],
'plain-string': [
(r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
(r'[~^({\[\]]', String.Double),
(r'\\', String.Escape),
(r'[%s]' % _dquote, String.Double, '#pop')
],
# Names
'_constant': [
include('_whitespace'),
(_name, Name.Constant, '#pop'),
include('value')
],
'constant*': [
include('_whitespace'),
(r',', Punctuation),
(r'=', Punctuation, 'value?'),
(_name, Name.Constant, 'value?'),
default('#pop')
],
'_global': [
include('_whitespace'),
(_name, Name.Variable.Global, '#pop'),
include('value')
],
'label?': [
include('_whitespace'),
(_name, Name.Label, '#pop'),
default('#pop')
],
'variable?': [
include('_whitespace'),
(_name, Name.Variable, '#pop'),
default('#pop')
],
# Values after hashes
'obsolete-dictionary-word': [
(r'\S\w*', String.Other, '#pop')
],
'system-constant': [
include('_whitespace'),
(_name, Name.Builtin, '#pop')
],
# Directives
'directive': [
include('_whitespace'),
(r'#', Punctuation),
(r';', Punctuation, '#pop'),
(r'\[', Punctuation,
('default', 'statements', 'locals', 'routine-name?')),
(words((
'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
'version'), prefix='(?i)', suffix=r'\b'),
Keyword, 'default'),
(r'(?i)(array|global)\b', Keyword,
('default', 'directive-keyword?', '_global')),
(r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
(r'(?i)class\b', Keyword,
('object-body', 'duplicates', 'class-name')),
(r'(?i)(constant|default)\b', Keyword,
('default', 'constant*')),
(r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
(r'(?i)(extend|verb)\b', Keyword, 'grammar'),
(r'(?i)fake_action\b', Keyword, ('default', '_constant')),
(r'(?i)import\b', Keyword, 'manifest'),
(r'(?i)(include|link|origsource)\b', Keyword,
('default', 'before-plain-string?')),
(r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
(r'(?i)message\b', Keyword, ('default', 'diagnostic')),
(r'(?i)(nearby|object)\b', Keyword,
('object-body', '_object-head')),
(r'(?i)property\b', Keyword,
('default', 'alias?', '_constant', 'property-keyword*')),
(r'(?i)replace\b', Keyword,
('default', 'routine-name?', 'routine-name?')),
(r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
(r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
(r'(?i)trace\b', Keyword,
('default', 'trace-keyword?', 'trace-keyword?')),
(r'(?i)zcharacter\b', Keyword,
('default', 'directive-keyword?', 'directive-keyword?')),
(_name, Name.Class, ('object-body', '_object-head'))
],
# [, Replace, Stub
'routine-name?': [
include('_whitespace'),
(_name, Name.Function, '#pop'),
default('#pop')
],
'locals': [
include('_whitespace'),
(r';', Punctuation, '#pop'),
(r'\*', Punctuation),
(r'"', String.Double, 'plain-string'),
(_name, Name.Variable)
],
# Array
'many-values': [
include('_whitespace'),
(r';', Punctuation),
(r'\]', Punctuation, '#pop'),
(r':', Error),
default(('expression', '_expression'))
],
# Attribute, Property
'alias?': [
include('_whitespace'),
(r'alias\b', Keyword, ('#pop', '_constant')),
default('#pop')
],
# Class, Object, Nearby
'class-name': [
include('_whitespace'),
(r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
(_name, Name.Class, '#pop')
],
'duplicates': [
include('_whitespace'),
(r'\(', Punctuation, ('#pop', 'expression', '_expression')),
default('#pop')
],
'_object-head': [
(r'[%s]>' % _dash, Punctuation),
(r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
include('_global')
],
'object-body': [
include('_whitespace'),
(r';', Punctuation, '#pop:2'),
(r',', Punctuation),
(r'class\b', Keyword.Declaration, 'class-segment'),
(r'(has|private|with)\b', Keyword.Declaration),
(r':', Error),
default(('_object-expression', '_expression'))
],
'class-segment': [
include('_whitespace'),
(r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
(_name, Name.Class),
default('value')
],
# Extend, Verb
'grammar': [
include('_whitespace'),
(r'=', Punctuation, ('#pop', 'default')),
(r'\*', Punctuation, ('#pop', 'grammar-line')),
default('_directive-keyword')
],
'grammar-line': [
include('_whitespace'),
(r';', Punctuation, '#pop'),
(r'[/*]', Punctuation),
(r'[%s]>' % _dash, Punctuation, 'value'),
(r'(noun|scope)\b', Keyword, '=routine'),
default('_directive-keyword')
],
'=routine': [
include('_whitespace'),
(r'=', Punctuation, 'routine-name?'),
default('#pop')
],
# Import
'manifest': [
include('_whitespace'),
(r';', Punctuation, '#pop'),
(r',', Punctuation),
(r'(?i)global\b', Keyword, '_global'),
default('_global')
],
# Include, Link, Message
'diagnostic': [
include('_whitespace'),
(r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
default(('#pop', 'before-plain-string?', 'directive-keyword?'))
],
'before-plain-string?': [
include('_whitespace'),
(r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
default('#pop')
],
'message-string': [
(r'[~^]+', String.Escape),
include('plain-string')
],
# Keywords used in directives
'_directive-keyword!': [
include('_whitespace'),
(words((
'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
'first', 'has', 'held', 'individual', 'initial', 'initstr', 'last', 'long', 'meta',
'multi', 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only',
'private', 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table',
'terminating', 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
Keyword, '#pop'),
(r'static\b', Keyword),
(r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
],
'_directive-keyword': [
include('_directive-keyword!'),
include('value')
],
'directive-keyword?': [
include('_directive-keyword!'),
default('#pop')
],
'property-keyword*': [
include('_whitespace'),
(words(('additive', 'individual', 'long'),
suffix=r'\b(?=(\s*|(![^%s]*[%s]))*[_a-zA-Z])' % (_newline, _newline)),
Keyword),
default('#pop')
],
'trace-keyword?': [
include('_whitespace'),
(words((
'assembly', 'dictionary', 'expressions', 'lines', 'linker',
'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
Keyword, '#pop'),
default('#pop')
],
# Statements
'statements': [
include('_whitespace'),
(r'\]', Punctuation, '#pop'),
(r'[;{}]', Punctuation),
(words((
'box', 'break', 'continue', 'default', 'give', 'inversion',
'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
'spaces', 'string', 'until'), suffix=r'\b'),
Keyword, 'default'),
(r'(do|else)\b', Keyword),
(r'(font|style)\b', Keyword,
('default', 'miscellaneous-keyword?')),
(r'for\b', Keyword, ('for', '(?')),
(r'(if|switch|while)', Keyword,
('expression', '_expression', '(?')),
(r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
(r'objectloop\b', Keyword,
('_keyword-expression', 'variable?', '(?')),
(r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
(r'\.', Name.Label, 'label?'),
(r'@', Keyword, 'opcode'),
(r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
(r'<', Punctuation, 'default'),
(r'move\b', Keyword,
('default', '_keyword-expression', '_expression')),
default(('default', '_keyword-expression', '_expression'))
],
'miscellaneous-keyword?': [
include('_whitespace'),
(r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
Keyword, '#pop'),
(r'(a|A|an|address|char|name|number|object|property|string|the|'
r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
'#pop'),
(r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
'#pop'),
default('#pop')
],
'(?': [
include('_whitespace'),
(r'\(', Punctuation, '#pop'),
default('#pop')
],
'for': [
include('_whitespace'),
(r';', Punctuation, ('_for-expression', '_expression')),
default(('_for-expression', '_expression'))
],
'print-list': [
include('_whitespace'),
(r';', Punctuation, '#pop'),
(r':', Error),
default(('_list-expression', '_expression', '_list-expression', 'form'))
],
'form': [
include('_whitespace'),
(r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
default('#pop')
],
# Assembly
'opcode': [
include('_whitespace'),
(r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
(_name, Keyword, 'operands')
],
'operands': [
(r':', Error),
default(('_assembly-expression', '_expression'))
]
}
def get_tokens_unprocessed(self, text):
# 'in' is either a keyword or an operator.
# If the token two tokens after 'in' is ')', 'in' is a keyword:
# objectloop(a in b)
# Otherwise, it is an operator:
# objectloop(a in b && true)
objectloop_queue = []
objectloop_token_count = -1
previous_token = None
for index, token, value in RegexLexer.get_tokens_unprocessed(self,
text):
if previous_token is Name.Variable and value == 'in':
objectloop_queue = [[index, token, value]]
objectloop_token_count = 2
elif objectloop_token_count > 0:
if token not in Comment and token not in Text:
objectloop_token_count -= 1
objectloop_queue.append((index, token, value))
else:
if objectloop_token_count == 0:
if objectloop_queue[-1][2] == ')':
objectloop_queue[0][1] = Keyword
while objectloop_queue:
yield objectloop_queue.pop(0)
objectloop_token_count = -1
yield index, token, value
if token not in Comment and token not in Text:
previous_token = token
while objectloop_queue:
yield objectloop_queue.pop(0)
def analyse_text(text):
"""We try to find a keyword which seem relatively common, unfortunately
there is a decent overlap with Smalltalk keywords otherwise here.."""
result = 0
if re.search('\borigsource\b', text, re.IGNORECASE):
result += 0.05
return result
class Inform7Lexer(RegexLexer):
"""
For Inform 7 source code.
.. versionadded:: 2.0
"""
name = 'Inform 7'
url = 'http://inform7.com/'
aliases = ['inform7', 'i7']
filenames = ['*.ni', '*.i7x']
flags = re.MULTILINE | re.DOTALL
_dash = Inform6Lexer._dash
_dquote = Inform6Lexer._dquote
_newline = Inform6Lexer._newline
_start = r'\A|(?<=[%s])' % _newline
# There are three variants of Inform 7, differing in how to
# interpret at signs and braces in I6T. In top-level inclusions, at
# signs in the first column are inweb syntax. In phrase definitions
# and use options, tokens in braces are treated as I7. Use options
# also interpret "{N}".
tokens = {}
token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
for level in token_variants:
tokens[level] = {
'+i6-root': list(Inform6Lexer.tokens['root']),
'+i6t-root': [ # For Inform6TemplateLexer
(r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
('directive', '+p'))
],
'root': [
(r'(\|?\s)+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'[%s]' % _dquote, Generic.Heading,
('+main', '+titling', '+titling-string')),
default(('+main', '+heading?'))
],
'+titling-string': [
(r'[^%s]+' % _dquote, Generic.Heading),
(r'[%s]' % _dquote, Generic.Heading, '#pop')
],
'+titling': [
(r'\[', Comment.Multiline, '+comment'),
(r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
(r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
(r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
Text, ('#pop', '+heading?')),
(r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
(r'[|%s]' % _newline, Generic.Heading)
],
'+main': [
(r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
(r'[%s]' % _dquote, String.Double, '+text'),
(r':', Text, '+phrase-definition'),
(r'(?i)\bas\b', Text, '+use-option'),
(r'\[', Comment.Multiline, '+comment'),
(r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive'),
i6t='+i6t-not-inline'), Punctuation)),
(r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
(_start, _dquote, _newline), Text, '+heading?'),
(r'(?i)[a(|%s]' % _newline, Text)
],
'+phrase-definition': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive',
'default', 'statements'),
i6t='+i6t-inline'), Punctuation), '#pop'),
default('#pop')
],
'+use-option': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive'),
i6t='+i6t-use-option'), Punctuation), '#pop'),
default('#pop')
],
'+comment': [
(r'[^\[\]]+', Comment.Multiline),
(r'\[', Comment.Multiline, '#push'),
(r'\]', Comment.Multiline, '#pop')
],
'+text': [
(r'[^\[%s]+' % _dquote, String.Double),
(r'\[.*?\]', String.Interpol),
(r'[%s]' % _dquote, String.Double, '#pop')
],
'+heading?': [
(r'(\|?\s)+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
(r'[%s]{1,3}' % _dash, Text),
(r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
Generic.Heading, '#pop'),
default('#pop')
],
'+documentation-heading': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'(?i)documentation\s+', Text, '+documentation-heading2'),
default('#pop')
],
'+documentation-heading2': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
(r'[%s]{4}\s' % _dash, Text, '+documentation'),
default('#pop:2')
],
'+documentation': [
(r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
(_start, _newline), Generic.Heading),
(r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
Generic.Subheading),
(r'((%s)\t.*?[%s])+' % (_start, _newline),
using(this, state='+main')),
(r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
(r'\[', Comment.Multiline, '+comment'),
],
'+i6t-not-inline': [
(r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
Comment.Preproc),
(r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
Comment.Preproc),
(r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
Generic.Heading, '+p')
],
'+i6t-use-option': [
include('+i6t-not-inline'),
(r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
],
'+i6t-inline': [
(r'(\{)(\S[^}]*)?(\})',
bygroups(Punctuation, using(this, state='+main'),
Punctuation))
],
'+i6t': [
(r'(\{[%s])(![^}]*)(\}?)' % _dash,
bygroups(Punctuation, Comment.Single, Punctuation)),
(r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
bygroups(Punctuation, Keyword, Punctuation, Text,
Punctuation), '+lines'),
(r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
bygroups(Punctuation, Keyword, Punctuation, Text,
Punctuation)),
(r'(\(\+)(.*?)(\+\)|\Z)',
bygroups(Punctuation, using(this, state='+main'),
Punctuation))
],
'+p': [
(r'[^@]+', Comment.Preproc),
(r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
Comment.Preproc, '#pop'),
(r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
(r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
Generic.Heading),
(r'@', Comment.Preproc)
],
'+lines': [
(r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
Comment.Preproc),
(r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
Comment.Preproc),
(r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
Generic.Heading, '+p'),
(r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
(r'![^%s]*' % _newline, Comment.Single),
(r'(\{)([%s]endlines)(\})' % _dash,
bygroups(Punctuation, Keyword, Punctuation), '#pop'),
(r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
]
}
# Inform 7 can include snippets of Inform 6 template language,
# so all of Inform6Lexer's states are copied here, with
# modifications to account for template syntax. Inform7Lexer's
# own states begin with '+' to avoid name conflicts. Some of
# Inform6Lexer's states begin with '_': these are not modified.
# They deal with template syntax either by including modified
# states, or by matching r'' then pushing to modified states.
for token in Inform6Lexer.tokens:
if token == 'root':
continue
tokens[level][token] = list(Inform6Lexer.tokens[token])
if not token.startswith('_'):
tokens[level][token][:0] = [include('+i6t'), include(level)]
def __init__(self, **options):
level = options.get('i6t', '+i6t-not-inline')
if level not in self._all_tokens:
self._tokens = self.__class__.process_tokendef(level)
else:
self._tokens = self._all_tokens[level]
RegexLexer.__init__(self, **options)
class Inform6TemplateLexer(Inform7Lexer):
"""
For Inform 6 template code.
.. versionadded:: 2.0
"""
name = 'Inform 6 template'
aliases = ['i6t']
filenames = ['*.i6t']
def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
class Tads3Lexer(RegexLexer):
"""
For TADS 3 source code.
"""
name = 'TADS 3'
aliases = ['tads3']
filenames = ['*.t']
flags = re.DOTALL | re.MULTILINE
_comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
_comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
_escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
r'[0-3]?[0-7]{1,2}))')
_name = r'(?:[_a-zA-Z]\w*)'
_no_quote = r'(?=\s|\\?>)'
_operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
_ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
_ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
def _make_string_state(triple, double, verbatim=None, _escape=_escape):
if verbatim:
verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
re.escape(c.upper()))
for c in verbatim])
char = r'"' if double else r"'"
token = String.Double if double else String.Single
escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
tag_state_name = '%sqt' % prefix
state = []
if triple:
state += [
(r'%s{3,}' % char, token, '#pop'),
(r'\\%s+' % char, String.Escape),
(char, token)
]
else:
state.append((char, token, '#pop'))
state += [
include('s/verbatim'),
(r'[^\\<&{}%s]+' % char, token)
]
if verbatim:
# This regex can't use `(?i)` because escape sequences are
# case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
(_escape, verbatim),
Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
else:
state += [
(r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
(char, char, escaped_quotes, _escape), Comment.Multiline),
(r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
('#pop', '%sqs/listing' % prefix, tag_state_name)),
(r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
('#pop', '%sqs/xmp' % prefix, tag_state_name)),
(r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
(char, char, escaped_quotes, _escape), Name.Tag,
tag_state_name),
include('s/entity')
]
state += [
include('s/escape'),
(r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
(char, char, escaped_quotes, _escape), String.Interpol),
(r'[\\&{}<]', token)
]
return state
def _make_tag_state(triple, double, _escape=_escape):
char = r'"' if double else r"'"
quantifier = r'{3,}' if triple else r''
state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
token = String.Double if double else String.Single
escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
return [
(r'%s%s' % (char, quantifier), token, '#pop:2'),
(r'(\s|\\\n)+', Text),
(r'(=)(\\?")', bygroups(Punctuation, String.Double),
'dqs/%s' % state_name),
(r"(=)(\\?')", bygroups(Punctuation, String.Single),
'sqs/%s' % state_name),
(r'=', Punctuation, 'uqs/%s' % state_name),
(r'\\?>', Name.Tag, '#pop'),
(r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
(char, char, escaped_quotes, _escape), String.Interpol),
(r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
(char, char, escaped_quotes, _escape), Name.Attribute),
include('s/escape'),
include('s/verbatim'),
include('s/entity'),
(r'[\\{}&]', Name.Attribute)
]
def _make_attribute_value_state(terminator, host_triple, host_double,
_escape=_escape):
token = (String.Double if terminator == r'"' else
String.Single if terminator == r"'" else String.Other)
host_char = r'"' if host_double else r"'"
host_quantifier = r'{3,}' if host_triple else r''
host_token = String.Double if host_double else String.Single
escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
if host_triple else r'')
return [
(r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
(r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
token, '#pop'),
include('s/verbatim'),
include('s/entity'),
(r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
(host_char, host_char, escaped_quotes, _escape), String.Interpol),
(r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
token),
include('s/escape'),
(r'["\'\s&{<}\\]', token)
]
tokens = {
'root': [
('\ufeff', Text),
(r'\{', Punctuation, 'object-body'),
(r';+', Punctuation),
(r'(?=(argcount|break|case|catch|continue|default|definingobj|'
r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
r'invokee|local|nil|new|operator|replaced|return|self|switch|'
r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
(r'(%s)(%s*)(\()' % (_name, _ws),
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation),
('block?/root', 'more/parameters', 'main/parameters')),
include('whitespace'),
(r'\++', Punctuation),
(r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
(r'(?!\Z)', Text, 'main/root')
],
'main/root': [
include('main/basic'),
default(('#pop', 'object-body/no-braces', 'classes', 'class'))
],
'object-body/no-braces': [
(r';', Punctuation, '#pop'),
(r'\{', Punctuation, ('#pop', 'object-body')),
include('object-body')
],
'object-body': [
(r';', Punctuation),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
(r':', Punctuation, ('classes', 'class')),
(r'(%s?)(%s*)(\()' % (_name, _ws),
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation),
('block?', 'more/parameters', 'main/parameters')),
(r'(%s)(%s*)(\{)' % (_name, _ws),
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation), 'block'),
(r'(%s)(%s*)(:)' % (_name, _ws),
bygroups(Name.Variable, using(this, state='whitespace'),
Punctuation),
('object-body/no-braces', 'classes', 'class')),
include('whitespace'),
(r'->|%s' % _operator, Punctuation, 'main'),
default('main/object-body')
],
'main/object-body': [
include('main/basic'),
(r'(%s)(%s*)(=?)' % (_name, _ws),
bygroups(Name.Variable, using(this, state='whitespace'),
Punctuation), ('#pop', 'more', 'main')),
default('#pop:2')
],
'block?/root': [
(r'\{', Punctuation, ('#pop', 'block')),
include('whitespace'),
(r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro.
('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
# It might be a macro like DefineAction.
default(('#pop', 'object-body/no-braces'))
],
'block?': [
(r'\{', Punctuation, ('#pop', 'block')),
include('whitespace'),
default('#pop')
],
'block/basic': [
(r'[;:]+', Punctuation),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
(r'default\b', Keyword.Reserved),
(r'(%s)(%s*)(:)' % (_name, _ws),
bygroups(Name.Label, using(this, state='whitespace'),
Punctuation)),
include('whitespace')
],
'block': [
include('block/basic'),
(r'(?!\Z)', Text, ('more', 'main'))
],
'block/embed': [
(r'>>', String.Interpol, '#pop'),
include('block/basic'),
(r'(?!\Z)', Text, ('more/embed', 'main'))
],
'main/basic': [
include('whitespace'),
(r'\(', Punctuation, ('#pop', 'more', 'main')),
(r'\[', Punctuation, ('#pop', 'more/list', 'main')),
(r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
'more/parameters', 'main/parameters')),
(r'\*|\.{3}', Punctuation, '#pop'),
(r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
(r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
Number.Float, '#pop'),
(r'0[0-7]+', Number.Oct, '#pop'),
(r'\d+', Number.Integer, '#pop'),
(r'"""', String.Double, ('#pop', 'tdqs')),
(r"'''", String.Single, ('#pop', 'tsqs')),
(r'"', String.Double, ('#pop', 'dqs')),
(r"'", String.Single, ('#pop', 'sqs')),
(r'R"""', String.Regex, ('#pop', 'tdqr')),
(r"R'''", String.Regex, ('#pop', 'tsqr')),
(r'R"', String.Regex, ('#pop', 'dqr')),
(r"R'", String.Regex, ('#pop', 'sqr')),
# Two-token keywords
(r'(extern)(%s+)(object\b)' % _ws,
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved)),
(r'(function|method)(%s*)(\()' % _ws,
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Punctuation),
('#pop', 'block?', 'more/parameters', 'main/parameters')),
(r'(modify)(%s+)(grammar\b)' % _ws,
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved),
('#pop', 'object-body/no-braces', ':', 'grammar')),
(r'(new)(%s+(?=(?:function|method)\b))' % _ws,
bygroups(Keyword.Reserved, using(this, state='whitespace'))),
(r'(object)(%s+)(template\b)' % _ws,
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved), ('#pop', 'template')),
(r'(string)(%s+)(template\b)' % _ws,
bygroups(Keyword, using(this, state='whitespace'),
Keyword.Reserved), ('#pop', 'function-name')),
# Keywords
(r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
Name.Builtin, '#pop'),
(r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
(r'(case|extern|if|intrinsic|return|static|while)\b',
Keyword.Reserved),
(r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
(r'class\b', Keyword.Reserved,
('#pop', 'object-body/no-braces', 'class')),
(r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
(r'(dictionary|property)\b', Keyword.Reserved,
('#pop', 'constants')),
(r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
(r'export\b', Keyword.Reserved, ('#pop', 'main')),
(r'(for|foreach)\b', Keyword.Reserved,
('#pop', 'more/inner', 'main/inner')),
(r'(function|method)\b', Keyword.Reserved,
('#pop', 'block?', 'function-name')),
(r'grammar\b', Keyword.Reserved,
('#pop', 'object-body/no-braces', 'grammar')),
(r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
(r'local\b', Keyword.Reserved,
('#pop', 'more/local', 'main/local')),
(r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
'#pop'),
(r'new\b', Keyword.Reserved, ('#pop', 'class')),
(r'(nil|true)\b', Keyword.Constant, '#pop'),
(r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
(r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
(r'propertyset\b', Keyword.Reserved,
('#pop', 'propertyset', 'main')),
(r'self\b', Name.Builtin.Pseudo, '#pop'),
(r'template\b', Keyword.Reserved, ('#pop', 'template')),
# Operators
(r'(__objref|defined)(%s*)(\()' % _ws,
bygroups(Operator.Word, using(this, state='whitespace'),
Operator), ('#pop', 'more/__objref', 'main')),
(r'delegated\b', Operator.Word),
# Compiler-defined macros and built-in properties
(r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
r'miscVocab|sourceTextGroup|sourceTextGroupName|'
r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
],
'main': [
include('main/basic'),
(_name, Name, '#pop'),
default('#pop')
],
'more/basic': [
(r'\(', Punctuation, ('more/list', 'main')),
(r'\[', Punctuation, ('more', 'main')),
(r'\.{3}', Punctuation),
(r'->|\.\.', Punctuation, 'main'),
(r'(?=;)|[:)\]]', Punctuation, '#pop'),
include('whitespace'),
(_operator, Operator, 'main'),
(r'\?', Operator, ('main', 'more/conditional', 'main')),
(r'(is|not)(%s+)(in\b)' % _ws,
bygroups(Operator.Word, using(this, state='whitespace'),
Operator.Word)),
(r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
],
'more': [
include('more/basic'),
default('#pop')
],
# Then expression (conditional operator)
'more/conditional': [
(r':(?!:)', Operator, '#pop'),
include('more')
],
# Embedded expressions
'more/embed': [
(r'>>', String.Interpol, '#pop:2'),
include('more')
],
# For/foreach loop initializer or short-form anonymous function
'main/inner': [
(r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
(r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
include('main')
],
'more/inner': [
(r'\}', Punctuation, '#pop'),
(r',', Punctuation, 'main/inner'),
(r'(in|step)\b', Keyword, 'main/inner'),
include('more')
],
# Local
'main/local': [
(_name, Name.Variable, '#pop'),
include('whitespace')
],
'more/local': [
(r',', Punctuation, 'main/local'),
include('more')
],
# List
'more/list': [
(r'[,:]', Punctuation, 'main'),
include('more')
],
# Parameter list
'main/parameters': [
(r'(%s)(%s*)(?=:)' % (_name, _ws),
bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
(r'(%s)(%s+)(%s)' % (_name, _ws, _name),
bygroups(Name.Class, using(this, state='whitespace'),
Name.Variable), '#pop'),
(r'\[+', Punctuation),
include('main/basic'),
(_name, Name.Variable, '#pop'),
default('#pop')
],
'more/parameters': [
(r'(:)(%s*(?=[?=,:)]))' % _ws,
bygroups(Punctuation, using(this, state='whitespace'))),
(r'[?\]]+', Punctuation),
(r'[:)]', Punctuation, ('#pop', 'multimethod?')),
(r',', Punctuation, 'main/parameters'),
(r'=', Punctuation, ('more/parameter', 'main')),
include('more')
],
'more/parameter': [
(r'(?=[,)])', Text, '#pop'),
include('more')
],
'multimethod?': [
(r'multimethod\b', Keyword, '#pop'),
include('whitespace'),
default('#pop')
],
# Statements and expressions
'more/__objref': [
(r',', Punctuation, 'mode'),
(r'\)', Operator, '#pop'),
include('more')
],
'mode': [
(r'(error|warn)\b', Keyword, '#pop'),
include('whitespace')
],
'catch': [
(r'\(+', Punctuation),
(_name, Name.Exception, ('#pop', 'variables')),
include('whitespace')
],
'enum': [
include('whitespace'),
(r'token\b', Keyword, ('#pop', 'constants')),
default(('#pop', 'constants'))
],
'grammar': [
(r'\)+', Punctuation),
(r'\(', Punctuation, 'grammar-tag'),
(r':', Punctuation, 'grammar-rules'),
(_name, Name.Class),
include('whitespace')
],
'grammar-tag': [
include('whitespace'),
(r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
],
'grammar-rules': [
include('string'),
include('whitespace'),
(r'(\[)(%s*)(badness)' % _ws,
bygroups(Punctuation, using(this, state='whitespace'), Keyword),
'main'),
(r'->|%s|[()]' % _operator, Punctuation),
(_name, Name.Constant),
default('#pop:2')
],
':': [
(r':', Punctuation, '#pop')
],
'function-name': [
(r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
(r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
(_name, Name.Function, '#pop'),
include('whitespace')
],
'inherited': [
(r'<', Punctuation, ('#pop', 'classes', 'class')),
include('whitespace'),
(_name, Name.Class, '#pop'),
default('#pop')
],
'operator': [
(r'negate\b', Operator.Word, '#pop'),
include('whitespace'),
(_operator, Operator),
default('#pop')
],
'propertyset': [
(r'\(', Punctuation, ('more/parameters', 'main/parameters')),
(r'\{', Punctuation, ('#pop', 'object-body')),
include('whitespace')
],
'template': [
(r'(?=;)', Text, '#pop'),
include('string'),
(r'inherited\b', Keyword.Reserved),
include('whitespace'),
(r'->|\?|%s' % _operator, Punctuation),
(_name, Name.Variable)
],
# Identifiers
'class': [
(r'\*|\.{3}', Punctuation, '#pop'),
(r'object\b', Keyword.Reserved, '#pop'),
(r'transient\b', Keyword.Reserved),
(_name, Name.Class, '#pop'),
include('whitespace'),
default('#pop')
],
'classes': [
(r'[:,]', Punctuation, 'class'),
include('whitespace'),
(r'>', Punctuation, '#pop'),
default('#pop')
],
'constants': [
(r',+', Punctuation),
(r';', Punctuation, '#pop'),
(r'property\b', Keyword.Reserved),
(_name, Name.Constant),
include('whitespace')
],
'label': [
(_name, Name.Label, '#pop'),
include('whitespace'),
default('#pop')
],
'variables': [
(r',+', Punctuation),
(r'\)', Punctuation, '#pop'),
include('whitespace'),
(_name, Name.Variable)
],
# Whitespace and comments
'whitespace': [
(r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
Comment.Preproc),
(_comment_single, Comment.Single),
(_comment_multiline, Comment.Multiline),
(r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
],
# Strings
'string': [
(r'"""', String.Double, 'tdqs'),
(r"'''", String.Single, 'tsqs'),
(r'"', String.Double, 'dqs'),
(r"'", String.Single, 'sqs')
],
's/escape': [
(r'\{\{|\}\}|%s' % _escape, String.Escape)
],
's/verbatim': [
(r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
r'first\s+time|one\s+of|only|or|otherwise|'
r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
(r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
String.Interpol, ('block/embed', 'more/embed', 'main'))
],
's/entity': [
(r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
],
'tdqs': _make_string_state(True, True),
'tsqs': _make_string_state(True, False),
'dqs': _make_string_state(False, True),
'sqs': _make_string_state(False, False),
'tdqs/listing': _make_string_state(True, True, 'listing'),
'tsqs/listing': _make_string_state(True, False, 'listing'),
'dqs/listing': _make_string_state(False, True, 'listing'),
'sqs/listing': _make_string_state(False, False, 'listing'),
'tdqs/xmp': _make_string_state(True, True, 'xmp'),
'tsqs/xmp': _make_string_state(True, False, 'xmp'),
'dqs/xmp': _make_string_state(False, True, 'xmp'),
'sqs/xmp': _make_string_state(False, False, 'xmp'),
# Tags
'tdqt': _make_tag_state(True, True),
'tsqt': _make_tag_state(True, False),
'dqt': _make_tag_state(False, True),
'sqt': _make_tag_state(False, False),
'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
'dqs/dqt': _make_attribute_value_state(r'"', False, True),
'dqs/sqt': _make_attribute_value_state(r'"', False, False),
'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
'sqs/dqt': _make_attribute_value_state(r"'", False, True),
'sqs/sqt': _make_attribute_value_state(r"'", False, False),
'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
# Regular expressions
'tdqr': [
(r'[^\\"]+', String.Regex),
(r'\\"*', String.Regex),
(r'"{3,}', String.Regex, '#pop'),
(r'"', String.Regex)
],
'tsqr': [
(r"[^\\']+", String.Regex),
(r"\\'*", String.Regex),
(r"'{3,}", String.Regex, '#pop'),
(r"'", String.Regex)
],
'dqr': [
(r'[^\\"]+', String.Regex),
(r'\\"?', String.Regex),
(r'"', String.Regex, '#pop')
],
'sqr': [
(r"[^\\']+", String.Regex),
(r"\\'?", String.Regex),
(r"'", String.Regex, '#pop')
]
}
def get_tokens_unprocessed(self, text, **kwargs):
pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
if_false_level = 0
for index, token, value in (
RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
if if_false_level == 0: # Not in a false #if
if (token is Comment.Preproc and
re.match(r'%sif%s+(0|nil)%s*$\n?' %
(pp, self._ws_pp, self._ws_pp), value)):
if_false_level = 1
else: # In a false #if
if token is Comment.Preproc:
if (if_false_level == 1 and
re.match(r'%sel(if|se)\b' % pp, value)):
if_false_level = 0
elif re.match(r'%sif' % pp, value):
if_false_level += 1
elif re.match(r'%sendif\b' % pp, value):
if_false_level -= 1
else:
token = Comment
yield index, token, value
def analyse_text(text):
"""This is a rather generic descriptive language without strong
identifiers. It looks like a 'GameMainDef' has to be present,
and/or a 'versionInfo' with an 'IFID' field."""
result = 0
if '__TADS' in text or 'GameMainDef' in text:
result += 0.2
# This is a fairly unique keyword which is likely used in source as well
if 'versionInfo' in text and 'IFID' in text:
result += 0.1
return result
| 57,119 | Python | 40.301518 | 99 | 0.431117 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/forth.py | """
pygments.lexers.forth
~~~~~~~~~~~~~~~~~~~~~
Lexer for the Forth language.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Whitespace
__all__ = ['ForthLexer']
class ForthLexer(RegexLexer):
"""
Lexer for Forth files.
.. versionadded:: 2.2
"""
name = 'Forth'
url = 'https://www.forth.com/forth/'
aliases = ['forth']
filenames = ['*.frt', '*.fs']
mimetypes = ['application/x-forth']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'\s+', Whitespace),
# All comment types
(r'\\.*?$', Comment.Single),
(r'\([\s].*?\)', Comment.Single),
# defining words. The next word is a new command name
(r'(:|variable|constant|value|buffer:)(\s+)',
bygroups(Keyword.Namespace, Whitespace), 'worddef'),
# strings are rather simple
(r'([.sc]")(\s+?)', bygroups(String, Whitespace), 'stringdef'),
# keywords from the various wordsets
# *** Wordset BLOCK
(r'(blk|block|buffer|evaluate|flush|load|save-buffers|update|'
# *** Wordset BLOCK-EXT
r'empty-buffers|list|refill|scr|thru|'
# *** Wordset CORE
r'\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|'
r'2\*|2\/|2@|2drop|2dup|2over|2swap|>body|'
r'>in|>number|>r|\?dup|abort|abort\"|abs|'
r'accept|align|aligned|allot|and|base|begin|'
r'bl|c!|c,|c@|cell\+|cells|char|char\+|'
r'chars|constant|count|cr|create|decimal|'
r'depth|do|does>|drop|dup|else|emit|environment\?|'
r'evaluate|execute|exit|fill|find|fm\/mod|'
r'here|hold|i|if|immediate|invert|j|key|'
r'leave|literal|loop|lshift|m\*|max|min|'
r'mod|move|negate|or|over|postpone|quit|'
r'r>|r@|recurse|repeat|rot|rshift|s\"|s>d|'
r'sign|sm\/rem|source|space|spaces|state|swap|'
r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
r'variable|while|word|xor|\[char\]|\[\'\]|'
r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
# *** Wordset CORE-EXT
r'\.r|0<>|'
r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
r'case|compile,|endcase|endof|erase|false|'
r'hex|marker|nip|of|pad|parse|pick|refill|'
r'restore-input|roll|save-input|source-id|to|'
r'true|tuck|u\.r|u>|unused|value|within|'
r'\[compile\]|'
# *** Wordset CORE-EXT-obsolescent
r'\#tib|convert|expect|query|span|'
r'tib|'
# *** Wordset DOUBLE
r'2constant|2literal|2variable|d\+|d-|'
r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
# *** Wordset DOUBLE-EXT
r'2rot|du<|'
# *** Wordset EXCEPTION
r'catch|throw|'
# *** Wordset EXCEPTION-EXT
r'abort|abort\"|'
# *** Wordset FACILITY
r'at-xy|key\?|page|'
# *** Wordset FACILITY-EXT
r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
# *** Wordset FILE
r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
# *** Wordset FILE-EXT
r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
# *** Wordset FLOAT
r'>float|d>f|'
r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
r'fliteral|float\+|floats|floor|fmax|fmin|'
r'fnegate|fover|frot|fround|fswap|fvariable|'
r'represent|'
# *** Wordset FLOAT-EXT
r'df!|df@|dfalign|dfaligned|dfloat\+|'
r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
r'fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|'
r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
r'sfloats|'
# *** Wordset LOCAL
r'\(local\)|to|'
# *** Wordset LOCAL-EXT
r'locals\||'
# *** Wordset MEMORY
r'allocate|free|resize|'
# *** Wordset SEARCH
r'definitions|find|forth-wordlist|get-current|'
r'get-order|search-wordlist|set-current|set-order|'
r'wordlist|'
# *** Wordset SEARCH-EXT
r'also|forth|only|order|previous|'
# *** Wordset STRING
r'-trailing|\/string|blank|cmove|cmove>|compare|'
r'search|sliteral|'
# *** Wordset TOOLS
r'.s|dump|see|words|'
# *** Wordset TOOLS-EXT
r';code|'
r'ahead|assembler|bye|code|cs-pick|cs-roll|'
r'editor|state|\[else\]|\[if\]|\[then\]|'
# *** Wordset TOOLS-EXT-obsolescent
r'forget|'
# Forth 2012
r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
r'name>interpret|name>compile|name>string|'
r'cfield:|end-structure)(?!\S)', Keyword),
# Numbers
(r'(\$[0-9A-F]+)', Number.Hex),
(r'(\#|%|&|\-|\+)?[0-9]+', Number.Integer),
(r'(\#|%|&|\-|\+)?[0-9.]+', Keyword.Type),
# amforth specific
(r'(@i|!i|@e|!e|pause|noop|turnkey|sleep|'
r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
r'find-name|1ms|'
r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)',
Name.Constant),
# a proposal
(r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
r'set-recognizers|r:float|r>comp|r>int|r>post|'
r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator),
# defining words. The next word is a new command name
(r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
bygroups(Keyword.Namespace, Text), 'worddef'),
(r'\S+', Name.Function), # Anything else is executed
],
'worddef': [
(r'\S+', Name.Class, '#pop'),
],
'stringdef': [
(r'[^"]+', String, '#pop'),
],
}
def analyse_text(text):
"""Forth uses : COMMAND ; quite a lot in a single line, so we're trying
to find that."""
if re.search('\n:[^\n]+;\n', text):
return 0.3
| 7,194 | Python | 38.972222 | 79 | 0.493328 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_lua_builtins.py | """
pygments.lexers._lua_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names and modules of lua functions
It is able to re-generate itself, but for adding new functions you
probably have to add some callbacks (see function module_callbacks).
Do not edit the MODULES dict by hand.
Run with `python -I` to regenerate.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
MODULES = {'basic': ('_G',
'_VERSION',
'assert',
'collectgarbage',
'dofile',
'error',
'getmetatable',
'ipairs',
'load',
'loadfile',
'next',
'pairs',
'pcall',
'print',
'rawequal',
'rawget',
'rawlen',
'rawset',
'select',
'setmetatable',
'tonumber',
'tostring',
'type',
'warn',
'xpcall'),
'bit32': ('bit32.arshift',
'bit32.band',
'bit32.bnot',
'bit32.bor',
'bit32.btest',
'bit32.bxor',
'bit32.extract',
'bit32.lrotate',
'bit32.lshift',
'bit32.replace',
'bit32.rrotate',
'bit32.rshift'),
'coroutine': ('coroutine.close',
'coroutine.create',
'coroutine.isyieldable',
'coroutine.resume',
'coroutine.running',
'coroutine.status',
'coroutine.wrap',
'coroutine.yield'),
'debug': ('debug.debug',
'debug.gethook',
'debug.getinfo',
'debug.getlocal',
'debug.getmetatable',
'debug.getregistry',
'debug.getupvalue',
'debug.getuservalue',
'debug.sethook',
'debug.setlocal',
'debug.setmetatable',
'debug.setupvalue',
'debug.setuservalue',
'debug.traceback',
'debug.upvalueid',
'debug.upvaluejoin'),
'io': ('io.close',
'io.flush',
'io.input',
'io.lines',
'io.open',
'io.output',
'io.popen',
'io.read',
'io.stderr',
'io.stdin',
'io.stdout',
'io.tmpfile',
'io.type',
'io.write'),
'math': ('math.abs',
'math.acos',
'math.asin',
'math.atan',
'math.atan2',
'math.ceil',
'math.cos',
'math.cosh',
'math.deg',
'math.exp',
'math.floor',
'math.fmod',
'math.frexp',
'math.huge',
'math.ldexp',
'math.log',
'math.max',
'math.maxinteger',
'math.min',
'math.mininteger',
'math.modf',
'math.pi',
'math.pow',
'math.rad',
'math.random',
'math.randomseed',
'math.sin',
'math.sinh',
'math.sqrt',
'math.tan',
'math.tanh',
'math.tointeger',
'math.type',
'math.ult'),
'modules': ('package.config',
'package.cpath',
'package.loaded',
'package.loadlib',
'package.path',
'package.preload',
'package.searchers',
'package.searchpath',
'require'),
'os': ('os.clock',
'os.date',
'os.difftime',
'os.execute',
'os.exit',
'os.getenv',
'os.remove',
'os.rename',
'os.setlocale',
'os.time',
'os.tmpname'),
'string': ('string.byte',
'string.char',
'string.dump',
'string.find',
'string.format',
'string.gmatch',
'string.gsub',
'string.len',
'string.lower',
'string.match',
'string.pack',
'string.packsize',
'string.rep',
'string.reverse',
'string.sub',
'string.unpack',
'string.upper'),
'table': ('table.concat',
'table.insert',
'table.move',
'table.pack',
'table.remove',
'table.sort',
'table.unpack'),
'utf8': ('utf8.char',
'utf8.charpattern',
'utf8.codepoint',
'utf8.codes',
'utf8.len',
'utf8.offset')}
if __name__ == '__main__': # pragma: no cover
import re
from urllib.request import urlopen
import pprint
# you can't generally find out what module a function belongs to if you
# have only its name. Because of this, here are some callback functions
# that recognize if a gioven function belongs to a specific module
def module_callbacks():
def is_in_coroutine_module(name):
return name.startswith('coroutine.')
def is_in_modules_module(name):
if name in ['require', 'module'] or name.startswith('package'):
return True
else:
return False
def is_in_string_module(name):
return name.startswith('string.')
def is_in_table_module(name):
return name.startswith('table.')
def is_in_math_module(name):
return name.startswith('math')
def is_in_io_module(name):
return name.startswith('io.')
def is_in_os_module(name):
return name.startswith('os.')
def is_in_debug_module(name):
return name.startswith('debug.')
return {'coroutine': is_in_coroutine_module,
'modules': is_in_modules_module,
'string': is_in_string_module,
'table': is_in_table_module,
'math': is_in_math_module,
'io': is_in_io_module,
'os': is_in_os_module,
'debug': is_in_debug_module}
def get_newest_version():
f = urlopen('http://www.lua.org/manual/')
r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
for line in f:
m = r.match(line.decode('iso-8859-1'))
if m is not None:
return m.groups()[0]
def get_lua_functions(version):
f = urlopen('http://www.lua.org/manual/%s/' % version)
r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
functions = []
for line in f:
m = r.match(line.decode('iso-8859-1'))
if m is not None:
functions.append(m.groups()[0])
return functions
def get_function_module(name):
for mod, cb in module_callbacks().items():
if cb(name):
return mod
if '.' in name:
return name.split('.')[0]
else:
return 'basic'
def regenerate(filename, modules):
with open(filename) as fp:
content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
with open(filename, 'w') as fp:
fp.write(header)
fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
fp.write(footer)
def run():
version = get_newest_version()
functions = set()
for v in ('5.2', version):
print('> Downloading function index for Lua %s' % v)
f = get_lua_functions(v)
print('> %d functions found, %d new:' %
(len(f), len(set(f) - functions)))
functions |= set(f)
functions = sorted(functions)
modules = {}
for full_function_name in functions:
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
modules = {k: tuple(v) for k, v in modules.items()}
regenerate(__file__, modules)
run()
| 8,080 | Python | 27.255245 | 79 | 0.469431 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/erlang.py | """
pygments.lexers.erlang
~~~~~~~~~~~~~~~~~~~~~~
Lexers for Erlang.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
include, default, line_re
from pygments.token import Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
'ElixirLexer']
class ErlangLexer(RegexLexer):
"""
For the Erlang functional programming language.
.. versionadded:: 0.9
"""
name = 'Erlang'
url = 'https://www.erlang.org/'
aliases = ['erlang']
filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
mimetypes = ['text/x-erlang']
keywords = (
'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
'let', 'of', 'query', 'receive', 'try', 'when',
)
builtins = ( # See erlang(3) man page
'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
'float', 'float_to_list', 'fun_info', 'fun_to_list',
'function_exported', 'garbage_collect', 'get', 'get_keys',
'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
'pid_to_list', 'port_close', 'port_command', 'port_connect',
'port_control', 'port_call', 'port_info', 'port_to_list',
'process_display', 'process_flag', 'process_info', 'purge_module',
'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
'spawn_opt', 'split_binary', 'start_timer', 'statistics',
'suspend_process', 'system_flag', 'system_info', 'system_monitor',
'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
)
operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
word_operators = (
'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
'div', 'not', 'or', 'orelse', 'rem', 'xor'
)
atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
variable_re = r'(?:[A-Z_]\w*)'
esc_char_re = r'[bdefnrstv\'"\\]'
esc_octal_re = r'[0-7][0-7]?[0-7]?'
esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})'
esc_ctrl_re = r'\^[a-zA-Z]'
escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))'
macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(%.*)(\n)', bygroups(Comment, Whitespace)),
(words(keywords, suffix=r'\b'), Keyword),
(words(builtins, suffix=r'\b'), Name.Builtin),
(words(word_operators, suffix=r'\b'), Operator.Word),
(r'^-', Punctuation, 'directive'),
(operators, Operator),
(r'"', String, 'string'),
(r'<<', Name.Label),
(r'>>', Name.Label),
('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
bygroups(Name.Function, Whitespace, Punctuation)),
(r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
(r'[+-]?\d+', Number.Integer),
(r'[+-]?\d+.\d+', Number.Float),
(r'[]\[:_@\".{}()|;,]', Punctuation),
(variable_re, Name.Variable),
(atom_re, Name),
(r'\?'+macro_re, Name.Constant),
(r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
(r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
# Erlang script shebang
(r'\A#!.+\n', Comment.Hashbang),
# EEP 43: Maps
# http://www.erlang.org/eeps/eep-0043.html
(r'#\{', Punctuation, 'map_key'),
],
'string': [
(escape_re, String.Escape),
(r'"', String, '#pop'),
(r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol),
(r'[^"\\~]+', String),
(r'~', String),
],
'directive': [
(r'(define)(\s*)(\()('+macro_re+r')',
bygroups(Name.Entity, Whitespace, Punctuation, Name.Constant), '#pop'),
(r'(record)(\s*)(\()('+macro_re+r')',
bygroups(Name.Entity, Whitespace, Punctuation, Name.Label), '#pop'),
(atom_re, Name.Entity, '#pop'),
],
'map_key': [
include('root'),
(r'=>', Punctuation, 'map_val'),
(r':=', Punctuation, 'map_val'),
(r'\}', Punctuation, '#pop'),
],
'map_val': [
include('root'),
(r',', Punctuation, '#pop'),
(r'(?=\})', Punctuation, '#pop'),
],
}
class ErlangShellLexer(Lexer):
"""
Shell sessions in erl (for Erlang code).
.. versionadded:: 1.1
"""
name = 'Erlang erl session'
aliases = ['erl']
filenames = ['*.erl-sh']
mimetypes = ['text/x-erl-shellsession']
_prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
def get_tokens_unprocessed(self, text):
erlexer = ErlangLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = self._prompt_re.match(line)
if m is not None:
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
yield from do_insertions(insertions,
erlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('*'):
yield match.start(), Generic.Traceback, line
else:
yield match.start(), Generic.Output, line
if curcode:
yield from do_insertions(insertions,
erlexer.get_tokens_unprocessed(curcode))
def gen_elixir_string_rules(name, symbol, token):
states = {}
states['string_' + name] = [
(r'[^#%s\\]+' % (symbol,), token),
include('escapes'),
(r'\\.', token),
(r'(%s)' % (symbol,), bygroups(token), "#pop"),
include('interpol')
]
return states
def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
if interpol:
return [
(r'[^#%s\\]+' % (term_class,), token),
include('escapes'),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
include('interpol')
]
else:
return [
(r'[^%s\\]+' % (term_class,), token),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
]
class ElixirLexer(RegexLexer):
"""
For the Elixir language.
.. versionadded:: 1.5
"""
name = 'Elixir'
url = 'http://elixir-lang.org'
aliases = ['elixir', 'ex', 'exs']
filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
mimetypes = ['text/x-elixir']
KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
BUILTIN = (
'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
'quote', 'unquote', 'unquote_splicing', 'throw', 'super',
)
BUILTIN_DECLARATION = (
'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback',
)
BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
CONSTANT = ('nil', 'true', 'false')
PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
OPERATORS3 = (
'<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
'~>>', '<~>', '|~>', '<|>',
)
OPERATORS2 = (
'==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
'->', '<-', '|', '.', '=', '~>', '<~',
)
OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
PUNCTUATION = (
'\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']',
)
def get_tokens_unprocessed(self, text):
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
if token is Name:
if value in self.KEYWORD:
yield index, Keyword, value
elif value in self.KEYWORD_OPERATOR:
yield index, Operator.Word, value
elif value in self.BUILTIN:
yield index, Keyword, value
elif value in self.BUILTIN_DECLARATION:
yield index, Keyword.Declaration, value
elif value in self.BUILTIN_NAMESPACE:
yield index, Keyword.Namespace, value
elif value in self.CONSTANT:
yield index, Name.Constant, value
elif value in self.PSEUDO_VAR:
yield index, Name.Builtin.Pseudo, value
else:
yield index, token, value
else:
yield index, token, value
def gen_elixir_sigil_rules():
# all valid sigil terminators (excluding heredocs)
terminators = [
(r'\{', r'\}', '}', 'cb'),
(r'\[', r'\]', r'\]', 'sb'),
(r'\(', r'\)', ')', 'pa'),
('<', '>', '>', 'ab'),
('/', '/', '/', 'slas'),
(r'\|', r'\|', '|', 'pipe'),
('"', '"', '"', 'quot'),
("'", "'", "'", 'apos'),
]
# heredocs have slightly different rules
triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
token = String.Other
states = {'sigils': []}
for term, name in triquotes:
states['sigils'] += [
(r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
(name + '-end', name + '-intp')),
(r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
(name + '-end', name + '-no-intp')),
]
states[name + '-end'] = [
(r'[a-zA-Z]+', token, '#pop'),
default('#pop'),
]
states[name + '-intp'] = [
(r'^(\s*)(' + term + ')', bygroups(Whitespace, String.Heredoc), '#pop'),
include('heredoc_interpol'),
]
states[name + '-no-intp'] = [
(r'^(\s*)(' + term +')', bygroups(Whitespace, String.Heredoc), '#pop'),
include('heredoc_no_interpol'),
]
for lterm, rterm, rterm_class, name in terminators:
states['sigils'] += [
(r'~[a-z]' + lterm, token, name + '-intp'),
(r'~[A-Z]' + lterm, token, name + '-no-intp'),
]
states[name + '-intp'] = \
gen_elixir_sigstr_rules(rterm, rterm_class, token)
states[name + '-no-intp'] = \
gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
return states
op3_re = "|".join(re.escape(s) for s in OPERATORS3)
op2_re = "|".join(re.escape(s) for s in OPERATORS2)
op1_re = "|".join(re.escape(s) for s in OPERATORS1)
ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
alnum = r'\w'
name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
hex_char_re = r'(\\x[\da-fA-F]{1,2})'
escape_char_re = r'(\\[abdefnrstv])'
tokens = {
'root': [
(r'\s+', Whitespace),
(r'#.*$', Comment.Single),
# Various kinds of characters
(r'(\?)' + long_hex_char_re,
bygroups(String.Char,
String.Escape, Number.Hex, String.Escape)),
(r'(\?)' + hex_char_re,
bygroups(String.Char, String.Escape)),
(r'(\?)' + escape_char_re,
bygroups(String.Char, String.Escape)),
(r'\?\\?.', String.Char),
# '::' has to go before atoms
(r':::', String.Symbol),
(r'::', Operator),
# atoms
(r':' + special_atom_re, String.Symbol),
(r':' + complex_name_re, String.Symbol),
(r':"', String.Symbol, 'string_double_atom'),
(r":'", String.Symbol, 'string_single_atom'),
# [keywords: ...]
(r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
bygroups(String.Symbol, Punctuation)),
# @attributes
(r'@' + name_re, Name.Attribute),
# identifiers
(name_re, Name),
(r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
# operators and punctuation
(op3_re, Operator),
(op2_re, Operator),
(punctuation_re, Punctuation),
(r'&\d', Name.Entity), # anon func arguments
(op1_re, Operator),
# numbers
(r'0b[01]+', Number.Bin),
(r'0o[0-7]+', Number.Oct),
(r'0x[\da-fA-F]+', Number.Hex),
(r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
(r'\d(_?\d)*', Number.Integer),
# strings and heredocs
(r'(""")(\s*)', bygroups(String.Heredoc, Whitespace),
'heredoc_double'),
(r"(''')(\s*)$", bygroups(String.Heredoc, Whitespace),
'heredoc_single'),
(r'"', String.Double, 'string_double'),
(r"'", String.Single, 'string_single'),
include('sigils'),
(r'%\{', Punctuation, 'map_key'),
(r'\{', Punctuation, 'tuple'),
],
'heredoc_double': [
(r'^(\s*)(""")', bygroups(Whitespace, String.Heredoc), '#pop'),
include('heredoc_interpol'),
],
'heredoc_single': [
(r"^\s*'''", String.Heredoc, '#pop'),
include('heredoc_interpol'),
],
'heredoc_interpol': [
(r'[^#\\\n]+', String.Heredoc),
include('escapes'),
(r'\\.', String.Heredoc),
(r'\n+', String.Heredoc),
include('interpol'),
],
'heredoc_no_interpol': [
(r'[^\\\n]+', String.Heredoc),
(r'\\.', String.Heredoc),
(r'\n+', Whitespace),
],
'escapes': [
(long_hex_char_re,
bygroups(String.Escape, Number.Hex, String.Escape)),
(hex_char_re, String.Escape),
(escape_char_re, String.Escape),
],
'interpol': [
(r'#\{', String.Interpol, 'interpol_string'),
],
'interpol_string': [
(r'\}', String.Interpol, "#pop"),
include('root')
],
'map_key': [
include('root'),
(r':', Punctuation, 'map_val'),
(r'=>', Punctuation, 'map_val'),
(r'\}', Punctuation, '#pop'),
],
'map_val': [
include('root'),
(r',', Punctuation, '#pop'),
(r'(?=\})', Punctuation, '#pop'),
],
'tuple': [
include('root'),
(r'\}', Punctuation, '#pop'),
],
}
tokens.update(gen_elixir_string_rules('double', '"', String.Double))
tokens.update(gen_elixir_string_rules('single', "'", String.Single))
tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
tokens.update(gen_elixir_sigil_rules())
class ElixirConsoleLexer(Lexer):
"""
For Elixir interactive console (iex) output like:
.. sourcecode:: iex
iex> [head | tail] = [1,2,3]
[1,2,3]
iex> head
1
iex> tail
[2,3]
iex> [head | tail]
[1,2,3]
iex> length [head | tail]
3
.. versionadded:: 1.5
"""
name = 'Elixir iex session'
aliases = ['iex']
mimetypes = ['text/x-elixir-shellsession']
_prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
def get_tokens_unprocessed(self, text):
exlexer = ElixirLexer(**self.options)
curcode = ''
in_error = False
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('** '):
in_error = True
insertions.append((len(curcode),
[(0, Generic.Error, line[:-1])]))
curcode += line[-1:]
else:
m = self._prompt_re.match(line)
if m is not None:
in_error = False
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
yield from do_insertions(
insertions, exlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
token = Generic.Error if in_error else Generic.Output
yield match.start(), token, line
if curcode:
yield from do_insertions(
insertions, exlexer.get_tokens_unprocessed(curcode))
| 19,170 | Python | 35.240076 | 96 | 0.452374 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/ul4.py | """
pygments.lexers.ul4
~~~~~~~~~~~~~~~~~~~
Lexer for the UL4 templating language.
More information: https://python.livinglogic.de/UL4.html
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, words, include
from pygments.token import Comment, Text, Keyword, String, Number, Literal, \
Name, Other, Operator
from pygments.lexers.web import HtmlLexer, XmlLexer, CssLexer, JavascriptLexer
from pygments.lexers.python import PythonLexer
__all__ = ['UL4Lexer', 'HTMLUL4Lexer', 'XMLUL4Lexer', 'CSSUL4Lexer',
'JavascriptUL4Lexer', 'PythonUL4Lexer']
class UL4Lexer(RegexLexer):
"""
Generic lexer for UL4.
.. versionadded:: 2.12
"""
flags = re.MULTILINE | re.DOTALL
name = 'UL4'
aliases = ['ul4']
filenames = ['*.ul4']
tokens = {
"root": [
(
# Template header without name:
# ``<?ul4?>``
r"(<\?)(\s*)(ul4)(\s*)(\?>)",
bygroups(Comment.Preproc, Text.Whitespace, Keyword,
Text.Whitespace, Comment.Preproc),
),
(
# Template header with name (potentially followed by the signature):
# ``<?ul4 foo(bar=42)?>``
r"(<\?)(\s*)(ul4)(\s*)([a-zA-Z_][a-zA-Z_0-9]*)?",
bygroups(Comment.Preproc, Text.Whitespace, Keyword,
Text.Whitespace, Name.Function),
"ul4", # Switch to "expression" mode
),
(
# Comment:
# ``<?note foobar?>``
r"<\?\s*note\s.*?\?>",
Comment,
),
(
# Template documentation:
# ``<?doc foobar?>``
r"<\?\s*doc\s.*?\?>",
String.Doc,
),
(
# ``<?ignore?>`` tag for commenting out code:
# ``<?ignore?>...<?end ignore?>``
r"<\?\s*ignore\s*\?>",
Comment,
"ignore", # Switch to "ignore" mode
),
(
# ``<?def?>`` tag for defining local templates
# ``<?def foo(bar=42)?>...<?end def?>``
r"(<\?)(\s*)(def)(\s*)([a-zA-Z_][a-zA-Z_0-9]*)?",
bygroups(Comment.Preproc, Text.Whitespace, Keyword,
Text.Whitespace, Name.Function),
"ul4", # Switch to "expression" mode
),
(
# The rest of the supported tags
r"(<\?)(\s*)(printx|print|for|if|elif|else|while|code|renderblocks?|render)\b",
bygroups(Comment.Preproc, Text.Whitespace, Keyword),
"ul4", # Switch to "expression" mode
),
(
# ``<?end?>`` tag for ending ``<?def?>``, ``<?for?>``,
# ``<?if?>``, ``<?while?>``, ``<?renderblock?>`` and
# ``<?renderblocks?>`` blocks.
r"(<\?)(\s*)(end)\b",
bygroups(Comment.Preproc, Text.Whitespace, Keyword),
"end", # Switch to "end tag" mode
),
(
# ``<?whitespace?>`` tag for configuring whitespace handlng
r"(<\?)(\s*)(whitespace)\b",
bygroups(Comment.Preproc, Text.Whitespace, Keyword),
"whitespace", # Switch to "whitespace" mode
),
# Plain text
(r"[^<]+", Other),
(r"<", Other),
],
# Ignore mode ignores everything upto the matching ``<?end ignore?>`` tag
"ignore": [
# Nested ``<?ignore?>`` tag
(r"<\?\s*ignore\s*\?>", Comment, "#push"),
# ``<?end ignore?>`` tag
(r"<\?\s*end\s+ignore\s*\?>", Comment, "#pop"),
# Everything else
(r"[^<]+", Comment),
(r".", Comment),
],
# UL4 expressions
"ul4": [
# End the tag
(r"\?>", Comment.Preproc, "#pop"),
# Start triple quoted string constant
("'''", String, "string13"),
('"""', String, "string23"),
# Start single quoted string constant
("'", String, "string1"),
('"', String, "string2"),
# Floating point number
(r"\d+\.\d*([eE][+-]?\d+)?", Number.Float),
(r"\.\d+([eE][+-]?\d+)?", Number.Float),
(r"\d+[eE][+-]?\d+", Number.Float),
# Binary integer: ``0b101010``
(r"0[bB][01]+", Number.Bin),
# Octal integer: ``0o52``
(r"0[oO][0-7]+", Number.Oct),
# Hexadecimal integer: ``0x2a``
(r"0[xX][0-9a-fA-F]+", Number.Hex),
# Date or datetime: ``@(2000-02-29)``/``@(2000-02-29T12:34:56.987654)``
(r"@\(\d\d\d\d-\d\d-\d\d(T(\d\d:\d\d(:\d\d(\.\d{6})?)?)?)?\)", Literal.Date),
# Color: ``#fff``, ``#fff8f0`` etc.
(r"#[0-9a-fA-F]{8}", Literal.Color),
(r"#[0-9a-fA-F]{6}", Literal.Color),
(r"#[0-9a-fA-F]{3,4}", Literal.Color),
# Decimal integer: ``42``
(r"\d+", Number.Integer),
# Operators
(r"//|==|!=|>=|<=|<<|>>|\+=|-=|\*=|/=|//=|<<=|>>=|&=|\|=|^=|=|[\[\]{},:*/().~%&|<>^+-]", Operator),
# Keywords
(words(("for", "in", "if", "else", "not", "is", "and", "or"), suffix=r"\b"), Keyword),
# Builtin constants
(words(("None", "False", "True"), suffix=r"\b"), Keyword.Constant),
# Variable names
(r"[a-zA-Z_][a-zA-Z0-9_]*", Name),
# Whitespace
(r"\s+", Text.Whitespace),
],
# ``<?end ...?>`` tag for closing the last open block
"end": [
(r"\?>", Comment.Preproc, "#pop"),
(words(("for", "if", "def", "while", "renderblock", "renderblocks"), suffix=r"\b"), Keyword),
(r"\s+", Text),
],
# Content of the ``<?whitespace ...?>`` tag:
# ``keep``, ``strip`` or ``smart``
"whitespace": [
(r"\?>", Comment.Preproc, "#pop"),
(words(("keep", "strip", "smart"), suffix=r"\b"), Comment.Preproc),
(r"\s+", Text.Whitespace),
],
# Inside a string constant
"stringescapes": [
(r"""\\[\\'"abtnfr]""", String.Escape),
(r"\\x[0-9a-fA-F]{2}", String.Escape),
(r"\\u[0-9a-fA-F]{4}", String.Escape),
(r"\\U[0-9a-fA-F]{8}", String.Escape),
],
# Inside a triple quoted string started with ``'''``
"string13": [
(r"'''", String, "#pop"),
include("stringescapes"),
(r"[^\\']+", String),
(r'.', String),
],
# Inside a triple quoted string started with ``"""``
"string23": [
(r'"""', String, "#pop"),
include("stringescapes"),
(r'[^\\"]+', String),
(r'.', String),
],
# Inside a single quoted string started with ``'``
"string1": [
(r"'", String, "#pop"),
include("stringescapes"),
(r"[^\\']+", String),
(r'.', String),
],
# Inside a single quoted string started with ``"``
"string2": [
(r'"', String, "#pop"),
include("stringescapes"),
(r'[^\\"]+', String),
(r'.', String),
],
}
class HTMLUL4Lexer(DelegatingLexer):
"""
Lexer for UL4 embedded in HTML.
"""
name = 'HTML+UL4'
aliases = ['html+ul4']
filenames = ['*.htmlul4']
def __init__(self, **options):
super().__init__(HtmlLexer, UL4Lexer, **options)
class XMLUL4Lexer(DelegatingLexer):
"""
Lexer for UL4 embedded in XML.
"""
name = 'XML+UL4'
aliases = ['xml+ul4']
filenames = ['*.xmlul4']
def __init__(self, **options):
super().__init__(XmlLexer, UL4Lexer, **options)
class CSSUL4Lexer(DelegatingLexer):
"""
Lexer for UL4 embedded in CSS.
"""
name = 'CSS+UL4'
aliases = ['css+ul4']
filenames = ['*.cssul4']
def __init__(self, **options):
super().__init__(CssLexer, UL4Lexer, **options)
class JavascriptUL4Lexer(DelegatingLexer):
"""
Lexer for UL4 embedded in Javascript.
"""
name = 'Javascript+UL4'
aliases = ['js+ul4']
filenames = ['*.jsul4']
def __init__(self, **options):
super().__init__(JavascriptLexer, UL4Lexer, **options)
class PythonUL4Lexer(DelegatingLexer):
"""
Lexer for UL4 embedded in Python.
"""
name = 'Python+UL4'
aliases = ['py+ul4']
filenames = ['*.pyul4']
def __init__(self, **options):
super().__init__(PythonLexer, UL4Lexer, **options)
| 8,956 | Python | 32.421642 | 111 | 0.44473 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/futhark.py | """
pygments.lexers.futhark
~~~~~~~~~~~~~~~~~~~~~~~
Lexer for the Futhark language
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
from pygments import unistring as uni
__all__ = ['FutharkLexer']
class FutharkLexer(RegexLexer):
"""
A Futhark lexer
.. versionadded:: 2.8
"""
name = 'Futhark'
url = 'https://futhark-lang.org/'
aliases = ['futhark']
filenames = ['*.fut']
mimetypes = ['text/x-futhark']
num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64')
other_types = ('bool', )
reserved = ('if', 'then', 'else', 'def', 'let', 'loop', 'in', 'with',
'type', 'type~', 'type^',
'val', 'entry', 'for', 'while', 'do', 'case', 'match',
'include', 'import', 'module', 'open', 'local', 'assert', '_')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
num_postfix = r'(%s)?' % '|'.join(num_types)
identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*'
# opstart_re = '+\-\*/%=\!><\|&\^'
tokens = {
'root': [
(r'--(.*?)$', Comment.Single),
(r'\s+', Whitespace),
(r'\(\)', Punctuation),
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'\b(%s)(?!\')\b' % '|'.join(num_types + other_types), Keyword.Type),
# Identifiers
(r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc),
(r'[#!]?(%s\.)*%s' % (identifier_re, identifier_re), Name),
(r'\\', Operator),
(r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator),
(r'[][(),:;`{}?.\'~^]', Punctuation),
# Numbers
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix,
Number.Float),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
(r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float),
(r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
(r'0[bB]_*[01](_*[01])*' + num_postfix, Number.Bin),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*' + num_postfix, Number.Hex),
(r'\d(_*\d)*' + num_postfix, Number.Integer),
# Character/String Literals
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
# Special
(r'\[[a-zA-Z_\d]*\]', Keyword.Type),
(r'\(\)', Name.Builtin),
],
'character': [
# Allows multi-chars, incorrectly.
(r"[^\\']'", String.Char, '#pop'),
(r"\\", String.Escape, 'escape'),
("'", String.Char, '#pop'),
],
'string': [
(r'[^\\"]+', String),
(r"\\", String.Escape, 'escape'),
('"', String, '#pop'),
],
'escape': [
(r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
(r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
('|'.join(ascii), String.Escape, '#pop'),
(r'o[0-7]+', String.Escape, '#pop'),
(r'x[\da-fA-F]+', String.Escape, '#pop'),
(r'\d+', String.Escape, '#pop'),
(r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
],
}
| 3,732 | Python | 33.88785 | 89 | 0.416667 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/configs.py | """
pygments.lexers.configs
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for configuration file formats.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import ExtendedRegexLexer, RegexLexer, default, words, \
bygroups, include, using, line_re
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace, Literal, Error, Generic
from pygments.lexers.shell import BashLexer
from pygments.lexers.data import JsonLexer
__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
'NestedTextLexer', 'SingularityLexer', 'UnixConfigLexer']
class IniLexer(RegexLexer):
"""
Lexer for configuration files in INI style.
"""
name = 'INI'
aliases = ['ini', 'cfg', 'dosini']
filenames = [
'*.ini', '*.cfg', '*.inf', '.editorconfig',
# systemd unit files
# https://www.freedesktop.org/software/systemd/man/systemd.unit.html
'*.service', '*.socket', '*.device', '*.mount', '*.automount',
'*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
]
mimetypes = ['text/x-ini', 'text/inf']
tokens = {
'root': [
(r'\s+', Whitespace),
(r'[;#].*', Comment.Single),
(r'(\[.*?\])([ \t]*)$', bygroups(Keyword, Whitespace)),
(r'(.*?)([ \t]*)([=:])([ \t]*)([^;#\n]*)(\\)(\s+)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String,
Text, Whitespace),
"value"),
(r'(.*?)([ \t]*)([=:])([ \t]*)([^ ;#\n]*(?: +[^ ;#\n]+)*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
# standalone option, supported by some INI parsers
(r'(.+?)$', Name.Attribute),
],
'value': [ # line continuation
(r'\s+', Whitespace),
(r'(\s*)(.*)(\\)([ \t]*)',
bygroups(Whitespace, String, Text, Whitespace)),
(r'.*$', String, "#pop"),
],
}
def analyse_text(text):
npos = text.find('\n')
if npos < 3:
return False
return text[0] == '[' and text[npos-1] == ']'
class RegeditLexer(RegexLexer):
"""
Lexer for Windows Registry files produced by regedit.
.. versionadded:: 1.6
"""
name = 'reg'
url = 'http://en.wikipedia.org/wiki/Windows_Registry#.REG_files'
aliases = ['registry']
filenames = ['*.reg']
mimetypes = ['text/x-windows-registry']
tokens = {
'root': [
(r'Windows Registry Editor.*', Text),
(r'\s+', Whitespace),
(r'[;#].*', Comment.Single),
(r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
bygroups(Keyword, Operator, Name.Builtin, Keyword)),
# String keys, which obey somewhat normal escaping
(r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
'value'),
# Bare keys (includes @)
(r'(.*?)([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
'value'),
],
'value': [
(r'-', Operator, '#pop'), # delete value
(r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
bygroups(Name.Variable, Punctuation, Number), '#pop'),
# As far as I know, .reg files do not support line continuation.
(r'.+', String, '#pop'),
default('#pop'),
]
}
def analyse_text(text):
return text.startswith('Windows Registry Editor')
class PropertiesLexer(RegexLexer):
"""
Lexer for configuration files in Java's properties format.
Note: trailing whitespace counts as part of the value as per spec
.. versionadded:: 1.4
"""
name = 'Properties'
aliases = ['properties', 'jproperties']
filenames = ['*.properties']
mimetypes = ['text/x-java-properties']
tokens = {
'root': [
(r'\s+', Whitespace),
(r'[!#].*|/{2}.*', Comment.Single),
# search for first separator
(r'([^\\\n]|\\.)*?(?=[ \f\t=:])', Name.Attribute, "separator"),
# empty key
(r'.+?$', Name.Attribute),
],
'separator': [
# search for line continuation escape
(r'([ \f\t]*)([=:]*)([ \f\t]*)(.*(?<!\\)(?:\\{2})*)(\\)(?!\\)$',
bygroups(Whitespace, Operator, Whitespace, String, Text), "value", "#pop"),
(r'([ \f\t]*)([=:]*)([ \f\t]*)(.*)',
bygroups(Whitespace, Operator, Whitespace, String), "#pop"),
],
'value': [ # line continuation
(r'\s+', Whitespace),
# search for line continuation escape
(r'(\s*)(.*(?<!\\)(?:\\{2})*)(\\)(?!\\)([ \t]*)',
bygroups(Whitespace, String, Text, Whitespace)),
(r'.*$', String, "#pop"),
],
}
def _rx_indent(level):
# Kconfig *always* interprets a tab as 8 spaces, so this is the default.
# Edit this if you are in an environment where KconfigLexer gets expanded
# input (tabs expanded to spaces) and the expansion tab width is != 8,
# e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
# Value range here is 2 <= {tab_width} <= 8.
tab_width = 8
# Regex matching a given indentation {level}, assuming that indentation is
# a multiple of {tab_width}. In other cases there might be problems.
if tab_width == 2:
space_repeat = '+'
else:
space_repeat = '{1,%d}' % (tab_width - 1)
if level == 1:
level_repeat = ''
else:
level_repeat = '{%s}' % level
return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
class KconfigLexer(RegexLexer):
"""
For Linux-style Kconfig files.
.. versionadded:: 1.6
"""
name = 'Kconfig'
aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
# Adjust this if new kconfig file names appear in your environment
filenames = ['Kconfig*', '*Config.in*', 'external.in*',
'standard-modules.in']
mimetypes = ['text/x-kconfig']
# No re.MULTILINE, indentation-aware help text needs line-by-line handling
flags = 0
def call_indent(level):
# If indentation >= {level} is detected, enter state 'indent{level}'
return (_rx_indent(level), String.Doc, 'indent%s' % level)
def do_indent(level):
# Print paragraphs of indentation level >= {level} as String.Doc,
# ignoring blank lines. Then return to 'root' state.
return [
(_rx_indent(level), String.Doc),
(r'\s*\n', Text),
default('#pop:2')
]
tokens = {
'root': [
(r'\s+', Whitespace),
(r'#.*?\n', Comment.Single),
(words((
'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
'source', 'prompt', 'select', 'depends on', 'default',
'range', 'option'), suffix=r'\b'),
Keyword),
(r'(---help---|help)[\t ]*\n', Keyword, 'help'),
(r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
Name.Builtin),
(r'[!=&|]', Operator),
(r'[()]', Punctuation),
(r'[0-9]+', Number.Integer),
(r"'(''|[^'])*'", String.Single),
(r'"(""|[^"])*"', String.Double),
(r'\S+', Text),
],
# Help text is indented, multi-line and ends when a lower indentation
# level is detected.
'help': [
# Skip blank lines after help token, if any
(r'\s*\n', Text),
# Determine the first help line's indentation level heuristically(!).
# Attention: this is not perfect, but works for 99% of "normal"
# indentation schemes up to a max. indentation level of 7.
call_indent(7),
call_indent(6),
call_indent(5),
call_indent(4),
call_indent(3),
call_indent(2),
call_indent(1),
default('#pop'), # for incomplete help sections without text
],
# Handle text for indentation levels 7 to 1
'indent7': do_indent(7),
'indent6': do_indent(6),
'indent5': do_indent(5),
'indent4': do_indent(4),
'indent3': do_indent(3),
'indent2': do_indent(2),
'indent1': do_indent(1),
}
class Cfengine3Lexer(RegexLexer):
"""
Lexer for CFEngine3 policy files.
.. versionadded:: 1.5
"""
name = 'CFEngine3'
url = 'http://cfengine.org'
aliases = ['cfengine3', 'cf3']
filenames = ['*.cf']
mimetypes = []
tokens = {
'root': [
(r'#.*?\n', Comment),
(r'(body)(\s+)(\S+)(\s+)(control)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
(r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function, Punctuation),
'arglist'),
(r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
(r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
bygroups(Punctuation, Name.Variable, Punctuation,
Whitespace, Keyword.Type, Whitespace, Operator, Whitespace)),
(r'(\S+)(\s*)(=>)(\s*)',
bygroups(Keyword.Reserved, Whitespace, Operator, Text)),
(r'"', String, 'string'),
(r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
(r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
(r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
(r'@[{(][^)}]+[})]', Name.Variable),
(r'[(){},;]', Punctuation),
(r'=>', Operator),
(r'->', Operator),
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\w+', Name.Function),
(r'\s+', Whitespace),
],
'string': [
(r'\$[{(]', String.Interpol, 'interpol'),
(r'\\.', String.Escape),
(r'"', String, '#pop'),
(r'\n', String),
(r'.', String),
],
'interpol': [
(r'\$[{(]', String.Interpol, '#push'),
(r'[})]', String.Interpol, '#pop'),
(r'[^${()}]+', String.Interpol),
],
'arglist': [
(r'\)', Punctuation, '#pop'),
(r',', Punctuation),
(r'\w+', Name.Variable),
(r'\s+', Whitespace),
],
}
class ApacheConfLexer(RegexLexer):
"""
Lexer for configuration files following the Apache config file
format.
.. versionadded:: 0.6
"""
name = 'ApacheConf'
aliases = ['apacheconf', 'aconf', 'apache']
filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
mimetypes = ['text/x-apacheconf']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\s+', Whitespace),
(r'#(.*\\\n)+.*$|(#.*?)$', Comment),
(r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)',
bygroups(Name.Tag, Whitespace, String, Name.Tag)),
(r'(</[^\s>]+)(>)',
bygroups(Name.Tag, Name.Tag)),
(r'[a-z]\w*', Name.Builtin, 'value'),
(r'\.+', Text),
],
'value': [
(r'\\\n', Text),
(r'\n+', Whitespace, '#pop'),
(r'\\', Text),
(r'[^\S\n]+', Whitespace),
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
(r'\d+', Number),
(r'/([*a-z0-9][*\w./-]+)', String.Other),
(r'(on|off|none|any|all|double|email|dns|min|minimal|'
r'os|productonly|full|emerg|alert|crit|error|warn|'
r'notice|info|debug|registry|script|inetd|standalone|'
r'user|group)\b', Keyword),
(r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
(r'[^\s"\\]+', Text)
],
}
class SquidConfLexer(RegexLexer):
"""
Lexer for squid configuration files.
.. versionadded:: 0.9
"""
name = 'SquidConf'
url = 'http://www.squid-cache.org/'
aliases = ['squidconf', 'squid.conf', 'squid']
filenames = ['squid.conf']
mimetypes = ['text/x-squidconf']
flags = re.IGNORECASE
keywords = (
"access_log", "acl", "always_direct", "announce_host",
"announce_period", "announce_port", "announce_to", "anonymize_headers",
"append_domain", "as_whois_server", "auth_param_basic",
"authenticate_children", "authenticate_program", "authenticate_ttl",
"broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
"cache_dir", "cache_dns_program", "cache_effective_group",
"cache_effective_user", "cache_host", "cache_host_acl",
"cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
"cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
"cache_peer_access", "cache_replacement_policy", "cache_stoplist",
"cache_stoplist_pattern", "cache_store_log", "cache_swap",
"cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
"client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
"dead_peer_timeout", "debug_options", "delay_access", "delay_class",
"delay_initial_bucket_level", "delay_parameters", "delay_pools",
"deny_info", "dns_children", "dns_defnames", "dns_nameservers",
"dns_testnames", "emulate_httpd_log", "err_html_text",
"fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
"fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
"ftp_passive", "ftp_user", "half_closed_clients", "header_access",
"header_replace", "hierarchy_stoplist", "high_response_time_warning",
"high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
"http_anonymizer", "httpd_accel", "httpd_accel_host",
"httpd_accel_port", "httpd_accel_uses_host_header",
"httpd_accel_with_proxy", "http_port", "http_reply_access",
"icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
"ident_lookup", "ident_lookup_access", "ident_timeout",
"incoming_http_average", "incoming_icp_average", "inside_firewall",
"ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
"local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
"log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
"mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
"mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
"memory_pools_limit", "memory_replacement_policy", "mime_table",
"min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
"minimum_object_size", "minimum_retry_timeout", "miss_access",
"negative_dns_ttl", "negative_ttl", "neighbor_timeout",
"neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
"netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
"pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
"prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
"quick_abort", "quick_abort_max", "quick_abort_min",
"quick_abort_pct", "range_offset_limit", "read_timeout",
"redirect_children", "redirect_program",
"redirect_rewrites_host_header", "reference_age",
"refresh_pattern", "reload_into_ims", "request_body_max_size",
"request_size", "request_timeout", "shutdown_lifetime",
"single_parent_bypass", "siteselect_timeout", "snmp_access",
"snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
"store_avg_object_size", "store_objects_per_bucket",
"strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
"tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
"test_reachability", "udp_hit_obj", "udp_hit_obj_size",
"udp_incoming_address", "udp_outgoing_address", "unique_hostname",
"unlinkd_program", "uri_whitespace", "useragent_log",
"visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
)
opts = (
"proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
"multicast-responder", "on", "off", "all", "deny", "allow", "via",
"parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
"credentialsttl", "none", "disable", "offline_toggle", "diskd",
)
actions = (
"shutdown", "info", "parameter", "server_list", "client_list",
r'squid.conf',
)
actions_stats = (
"objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
"redirector", "io", "reply_headers", "filedescriptors", "netdb",
)
actions_log = ("status", "enable", "disable", "clear")
acls = (
"url_regex", "urlpath_regex", "referer_regex", "port", "proto",
"req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
"dst", "time", "dstdomain", "ident", "snmp_community",
)
ip_re = (
r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
r'[1-9]?\d)){3}))'
)
tokens = {
'root': [
(r'\s+', Whitespace),
(r'#', Comment, 'comment'),
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
(words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
# Actions
(words(actions, prefix=r'\b', suffix=r'\b'), String),
(words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
(words(actions_log, prefix=r'log/', suffix=r'='), String),
(words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
(ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
(r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
(r'\S+', Text),
],
'comment': [
(r'\s*TAG:.*', String.Escape, '#pop'),
(r'.+', Comment, '#pop'),
default('#pop'),
],
}
class NginxConfLexer(RegexLexer):
"""
Lexer for Nginx configuration files.
.. versionadded:: 0.11
"""
name = 'Nginx configuration file'
url = 'http://nginx.net/'
aliases = ['nginx']
filenames = ['nginx.conf']
mimetypes = ['text/x-nginx-conf']
tokens = {
'root': [
(r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Whitespace, Name)),
(r'[^\s;#]+', Keyword, 'stmt'),
include('base'),
],
'block': [
(r'\}', Punctuation, '#pop:2'),
(r'[^\s;#]+', Keyword.Namespace, 'stmt'),
include('base'),
],
'stmt': [
(r'\{', Punctuation, 'block'),
(r';', Punctuation, '#pop'),
include('base'),
],
'base': [
(r'#.*\n', Comment.Single),
(r'on|off', Name.Constant),
(r'\$[^\s;#()]+', Name.Variable),
(r'([a-z0-9.-]+)(:)([0-9]+)',
bygroups(Name, Punctuation, Number.Integer)),
(r'[a-z-]+/[a-z-+]+', String), # mimetype
# (r'[a-zA-Z._-]+', Keyword),
(r'[0-9]+[km]?\b', Number.Integer),
(r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Whitespace, String.Regex)),
(r'[:=~]', Punctuation),
(r'[^\s;#{}$]+', String), # catch all
(r'/[^\s;#]*', Name), # pathname
(r'\s+', Whitespace),
(r'[$;]', Text), # leftover characters
],
}
class LighttpdConfLexer(RegexLexer):
"""
Lexer for Lighttpd configuration files.
.. versionadded:: 0.11
"""
name = 'Lighttpd configuration file'
url = 'http://lighttpd.net/'
aliases = ['lighttpd', 'lighty']
filenames = ['lighttpd.conf']
mimetypes = ['text/x-lighttpd-conf']
tokens = {
'root': [
(r'#.*\n', Comment.Single),
(r'/\S*', Name), # pathname
(r'[a-zA-Z._-]+', Keyword),
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
(r'[0-9]+', Number),
(r'=>|=~|\+=|==|=|\+', Operator),
(r'\$[A-Z]+', Name.Builtin),
(r'[(){}\[\],]', Punctuation),
(r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
(r'\s+', Whitespace),
],
}
class DockerLexer(RegexLexer):
"""
Lexer for Docker configuration files.
.. versionadded:: 2.0
"""
name = 'Docker'
url = 'http://docker.io'
aliases = ['docker', 'dockerfile']
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
_keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
_lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'#.*', Comment),
(r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
bygroups(Keyword, Whitespace, String, Whitespace, Keyword, Whitespace, String)),
(r'(ONBUILD)(\s+)(%s)' % (_lb,), bygroups(Keyword, Whitespace, using(BashLexer))),
(r'(HEALTHCHECK)(\s+)((%s--\w+=\w+%s)*)' % (_lb, _lb),
bygroups(Keyword, Whitespace, using(BashLexer))),
(r'(VOLUME|ENTRYPOINT|CMD|SHELL)(\s+)(%s)(\[.*?\])' % (_lb,),
bygroups(Keyword, Whitespace, using(BashLexer), using(JsonLexer))),
(r'(LABEL|ENV|ARG)(\s+)((%s\w+=\w+%s)*)' % (_lb, _lb),
bygroups(Keyword, Whitespace, using(BashLexer))),
(r'(%s|VOLUME)\b(\s+)(.*)' % (_keywords), bygroups(Keyword, Whitespace, String)),
(r'(%s)(\s+)' % (_bash_keywords,), bygroups(Keyword, Whitespace)),
(r'(.*\\\n)*.+', using(BashLexer)),
]
}
class TerraformLexer(ExtendedRegexLexer):
"""
Lexer for terraformi ``.tf`` files.
.. versionadded:: 2.1
"""
name = 'Terraform'
url = 'https://www.terraform.io/'
aliases = ['terraform', 'tf']
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
classes = ('backend', 'data', 'module', 'output', 'provider',
'provisioner', 'resource', 'variable')
classes_re = "({})".format(('|').join(classes))
types = ('string', 'number', 'bool', 'list', 'tuple', 'map', 'set', 'object', 'null')
numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max',
'mix', 'parseint', 'pow', 'signum')
string_functions = ('chomp', 'format', 'formatlist', 'indent',
'join', 'lower', 'regex', 'regexall', 'replace',
'split', 'strrev', 'substr', 'title', 'trim',
'trimprefix', 'trimsuffix', 'trimspace', 'upper'
)
collection_functions = ('alltrue', 'anytrue', 'chunklist', 'coalesce',
'coalescelist', 'compact', 'concat', 'contains',
'distinct', 'element', 'flatten', 'index', 'keys',
'length', 'list', 'lookup', 'map', 'matchkeys',
'merge', 'range', 'reverse', 'setintersection',
'setproduct', 'setsubtract', 'setunion', 'slice',
'sort', 'sum', 'transpose', 'values', 'zipmap'
)
encoding_functions = ('base64decode', 'base64encode', 'base64gzip',
'csvdecode', 'jsondecode', 'jsonencode', 'textdecodebase64',
'textencodebase64', 'urlencode', 'yamldecode', 'yamlencode')
filesystem_functions = ('abspath', 'dirname', 'pathexpand', 'basename',
'file', 'fileexists', 'fileset', 'filebase64', 'templatefile')
date_time_functions = ('formatdate', 'timeadd', 'timestamp')
hash_crypto_functions = ('base64sha256', 'base64sha512', 'bcrypt', 'filebase64sha256',
'filebase64sha512', 'filemd5', 'filesha1', 'filesha256', 'filesha512',
'md5', 'rsadecrypt', 'sha1', 'sha256', 'sha512', 'uuid', 'uuidv5')
ip_network_functions = ('cidrhost', 'cidrnetmask', 'cidrsubnet', 'cidrsubnets')
type_conversion_functions = ('can', 'defaults', 'tobool', 'tolist', 'tomap',
'tonumber', 'toset', 'tostring', 'try')
builtins = numeric_functions + string_functions + collection_functions + encoding_functions +\
filesystem_functions + date_time_functions + hash_crypto_functions + ip_network_functions +\
type_conversion_functions
builtins_re = "({})".format(('|').join(builtins))
def heredoc_callback(self, match, ctx):
# Parse a terraform heredoc
# match: 1 = <<[-]?, 2 = name 3 = rest of line
start = match.start(1)
yield start, Operator, match.group(1) # <<[-]?
yield match.start(2), String.Delimiter, match.group(2) # heredoc name
ctx.pos = match.start(3)
ctx.end = match.end(3)
yield ctx.pos, String.Heredoc, match.group(3)
ctx.pos = match.end()
hdname = match.group(2)
tolerant = True # leading whitespace is always accepted
lines = []
for match in line_re.finditer(ctx.text, ctx.pos):
if tolerant:
check = match.group().strip()
else:
check = match.group().rstrip()
if check == hdname:
for amatch in lines:
yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), String.Delimiter, match.group()
ctx.pos = match.end()
break
else:
lines.append(match)
else:
# end of heredoc not found -- error!
for amatch in lines:
yield amatch.start(), Error, amatch.group()
ctx.end = len(ctx.text)
tokens = {
'root': [
include('basic'),
include('whitespace'),
# Strings
(r'(".*")', bygroups(String.Double)),
# Constants
(words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Name.Constant),
# Types
(words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
include('identifier'),
include('punctuation'),
(r'[0-9]+', Number),
],
'basic': [
(r'\s*/\*', Comment.Multiline, 'comment'),
(r'\s*(#|//).*\n', Comment.Single),
include('whitespace'),
# e.g. terraform {
# e.g. egress {
(r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=?)(\s*)(\{)',
bygroups(Whitespace, Name.Builtin, Whitespace, Operator, Whitespace, Punctuation)),
# Assignment with attributes, e.g. something = ...
(r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=)(\s*)',
bygroups(Whitespace, Name.Attribute, Whitespace, Operator, Whitespace)),
# Assignment with environment variables and similar, e.g. "something" = ...
# or key value assignment, e.g. "SlotName" : ...
(r'(\s*)("\S+")(\s*)([=:])(\s*)',
bygroups(Whitespace, Literal.String.Double, Whitespace, Operator, Whitespace)),
# Functions, e.g. jsonencode(element("value"))
(builtins_re + r'(\()', bygroups(Name.Function, Punctuation)),
# List of attributes, e.g. ignore_changes = [last_modified, filename]
(r'(\[)([a-z_,\s]+)(\])', bygroups(Punctuation, Name.Builtin, Punctuation)),
# e.g. resource "aws_security_group" "allow_tls" {
# e.g. backend "consul" {
(classes_re + r'(\s+)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
bygroups(Keyword.Reserved, Whitespace, Name.Class, Whitespace, Name.Variable, Whitespace, Punctuation)),
# here-doc style delimited strings
(r'(<<-?)\s*([a-zA-Z_]\w*)(.*?\n)', heredoc_callback),
],
'identifier': [
(r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
(r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b',
bygroups(Name.Variable)),
],
'punctuation': [
(r'[\[\]()\{\},.?:!=]', Punctuation),
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)),
],
}
class TermcapLexer(RegexLexer):
"""
Lexer for termcap database source.
This is very simple and minimal.
.. versionadded:: 2.1
"""
name = 'Termcap'
aliases = ['termcap']
filenames = ['termcap', 'termcap.src']
mimetypes = []
# NOTE:
# * multiline with trailing backslash
# * separator is ':'
# * to embed colon as data, we must use \072
# * space after separator is not allowed (mayve)
tokens = {
'root': [
(r'^#.*', Comment),
(r'^[^\s#:|]+', Name.Tag, 'names'),
(r'\s+', Whitespace),
],
'names': [
(r'\n', Whitespace, '#pop'),
(r':', Punctuation, 'defs'),
(r'\|', Punctuation),
(r'[^:|]+', Name.Attribute),
],
'defs': [
(r'(\\)(\n[ \t]*)', bygroups(Text, Whitespace)),
(r'\n[ \t]*', Whitespace, '#pop:2'),
(r'(#)([0-9]+)', bygroups(Operator, Number)),
(r'=', Operator, 'data'),
(r':', Punctuation),
(r'[^\s:=#]+', Name.Class),
],
'data': [
(r'\\072', Literal),
(r':', Punctuation, '#pop'),
(r'[^:\\]+', Literal), # for performance
(r'.', Literal),
],
}
class TerminfoLexer(RegexLexer):
"""
Lexer for terminfo database source.
This is very simple and minimal.
.. versionadded:: 2.1
"""
name = 'Terminfo'
aliases = ['terminfo']
filenames = ['terminfo', 'terminfo.src']
mimetypes = []
# NOTE:
# * multiline with leading whitespace
# * separator is ','
# * to embed comma as data, we can use \,
# * space after separator is allowed
tokens = {
'root': [
(r'^#.*$', Comment),
(r'^[^\s#,|]+', Name.Tag, 'names'),
(r'\s+', Whitespace),
],
'names': [
(r'\n', Whitespace, '#pop'),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), 'defs'),
(r'\|', Punctuation),
(r'[^,|]+', Name.Attribute),
],
'defs': [
(r'\n[ \t]+', Whitespace),
(r'\n', Whitespace, '#pop:2'),
(r'(#)([0-9]+)', bygroups(Operator, Number)),
(r'=', Operator, 'data'),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace)),
(r'[^\s,=#]+', Name.Class),
],
'data': [
(r'\\[,\\]', Literal),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), '#pop'),
(r'[^\\,]+', Literal), # for performance
(r'.', Literal),
],
}
class PkgConfigLexer(RegexLexer):
"""
Lexer for pkg-config
(see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
.. versionadded:: 2.1
"""
name = 'PkgConfig'
url = 'http://www.freedesktop.org/wiki/Software/pkg-config/'
aliases = ['pkgconfig']
filenames = ['*.pc']
mimetypes = []
tokens = {
'root': [
(r'#.*$', Comment.Single),
# variable definitions
(r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
# keyword lines
(r'^([\w.]+)(:)',
bygroups(Name.Tag, Punctuation), 'spvalue'),
# variable references
include('interp'),
# fallback
(r'\s+', Whitespace),
(r'[^${}#=:\n.]+', Text),
(r'.', Text),
],
'interp': [
# you can escape literal "$" as "$$"
(r'\$\$', Text),
# variable references
(r'\$\{', String.Interpol, 'curly'),
],
'curly': [
(r'\}', String.Interpol, '#pop'),
(r'\w+', Name.Attribute),
],
'spvalue': [
include('interp'),
(r'#.*$', Comment.Single, '#pop'),
(r'\n', Whitespace, '#pop'),
# fallback
(r'\s+', Whitespace),
(r'[^${}#\n\s]+', Text),
(r'.', Text),
],
}
class PacmanConfLexer(RegexLexer):
"""
Lexer for pacman.conf.
Actually, IniLexer works almost fine for this format,
but it yield error token. It is because pacman.conf has
a form without assignment like:
UseSyslog
Color
TotalDownload
CheckSpace
VerbosePkgLists
These are flags to switch on.
.. versionadded:: 2.1
"""
name = 'PacmanConf'
url = 'https://www.archlinux.org/pacman/pacman.conf.5.html'
aliases = ['pacmanconf']
filenames = ['pacman.conf']
mimetypes = []
tokens = {
'root': [
# comment
(r'#.*$', Comment.Single),
# section header
(r'^(\s*)(\[.*?\])(\s*)$', bygroups(Whitespace, Keyword, Whitespace)),
# variable definitions
# (Leading space is allowed...)
(r'(\w+)(\s*)(=)',
bygroups(Name.Attribute, Whitespace, Operator)),
# flags to on
(r'^(\s*)(\w+)(\s*)$',
bygroups(Whitespace, Name.Attribute, Whitespace)),
# built-in special values
(words((
'$repo', # repository
'$arch', # architecture
'%o', # outfile
'%u', # url
), suffix=r'\b'),
Name.Variable),
# fallback
(r'\s+', Whitespace),
(r'.', Text),
],
}
class AugeasLexer(RegexLexer):
"""
Lexer for Augeas.
.. versionadded:: 2.4
"""
name = 'Augeas'
url = 'http://augeas.net'
aliases = ['augeas']
filenames = ['*.aug']
tokens = {
'root': [
(r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
(r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Whitespace, Name.Variable)),
(r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Whitespace)),
(r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)),
(r'\(\*', Comment.Multiline, 'comment'),
(r'[*+\-.;=?|]', Operator),
(r'[()\[\]{}]', Operator),
(r'"', String.Double, 'string'),
(r'\/', String.Regex, 'regex'),
(r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)),
(r'.', Name.Variable),
(r'\s+', Whitespace),
],
'string': [
(r'\\.', String.Escape),
(r'[^"]', String.Double),
(r'"', String.Double, '#pop'),
],
'regex': [
(r'\\.', String.Escape),
(r'[^/]', String.Regex),
(r'\/', String.Regex, '#pop'),
],
'comment': [
(r'[^*)]', Comment.Multiline),
(r'\(\*', Comment.Multiline, '#push'),
(r'\*\)', Comment.Multiline, '#pop'),
(r'[)*]', Comment.Multiline)
],
}
class TOMLLexer(RegexLexer):
"""
Lexer for TOML, a simple language
for config files.
.. versionadded:: 2.4
"""
name = 'TOML'
url = 'https://github.com/toml-lang/toml'
aliases = ['toml']
filenames = ['*.toml', 'Pipfile', 'poetry.lock']
tokens = {
'root': [
# Table
(r'^(\s*)(\[.*?\])$', bygroups(Whitespace, Keyword)),
# Basics, comments, strings
(r'[ \t]+', Whitespace),
(r'\n', Whitespace),
(r'#.*?$', Comment.Single),
# Basic string
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Literal string
(r'\'\'\'(.*)\'\'\'', String),
(r'\'[^\']*\'', String),
(r'(true|false)$', Keyword.Constant),
(r'[a-zA-Z_][\w\-]*', Name),
# Datetime
# TODO this needs to be expanded, as TOML is rather flexible:
# https://github.com/toml-lang/toml#offset-date-time
(r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer),
# Numbers
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
(r'\d+[eE][+-]?[0-9]+j?', Number.Float),
# Handle +-inf, +-infinity, +-nan
(r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float),
(r'[+-]?\d+', Number.Integer),
# Punctuation
(r'[]{}:(),;[]', Punctuation),
(r'\.', Punctuation),
# Operators
(r'=', Operator)
]
}
class NestedTextLexer(RegexLexer):
"""
Lexer for NextedText, a human-friendly data
format.
.. versionadded:: 2.9
"""
name = 'NestedText'
url = 'https://nestedtext.org'
aliases = ['nestedtext', 'nt']
filenames = ['*.nt']
_quoted_dict_item = r'^(\s*)({0})(.*?)({0}: ?)(.*?)(\s*)$'
tokens = {
'root': [
(r'^(\s*)(#.*?)$', bygroups(Whitespace, Comment)),
(r'^(\s*)(>)( ?)(.*?)(\s*)$', bygroups(Whitespace, Punctuation, Whitespace, String, Whitespace)),
(r'^(\s*)(-)( ?)(.*?)(\s*)$', bygroups(Whitespace, Punctuation, Whitespace, String, Whitespace)),
(_quoted_dict_item.format("'"), bygroups(Whitespace, Punctuation, Name, Punctuation, String, Whitespace)),
(_quoted_dict_item.format('"'), bygroups(Whitespace, Punctuation, Name, Punctuation, String, Whitespace)),
(r'^(\s*)(.*?)(:)( ?)(.*?)(\s*)$', bygroups(Whitespace, Name, Punctuation, Whitespace, String, Whitespace)),
],
}
class SingularityLexer(RegexLexer):
"""
Lexer for Singularity definition files.
.. versionadded:: 2.6
"""
name = 'Singularity'
url = 'https://www.sylabs.io/guides/3.0/user-guide/definition_files.html'
aliases = ['singularity']
filenames = ['*.def', 'Singularity']
flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
_headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
_section = r'^(%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript))(\s*)'
_appsect = r'^(%app(?:install|help|run|labels|env|test|files))(\s*)'
tokens = {
'root': [
(_section, bygroups(Generic.Heading, Whitespace), 'script'),
(_appsect, bygroups(Generic.Heading, Whitespace), 'script'),
(_headers, bygroups(Whitespace, Keyword, Text)),
(r'\s*#.*?\n', Comment),
(r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number),
(r'[ \t]+', Whitespace),
(r'(?!^\s*%).', Text),
],
'script': [
(r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'),
],
}
def analyse_text(text):
"""This is a quite simple script file, but there are a few keywords
which seem unique to this language."""
result = 0
if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE):
result += 0.5
if re.search(SingularityLexer._section[1:], text):
result += 0.49
return result
class UnixConfigLexer(RegexLexer):
"""
Lexer for Unix/Linux config files using colon-separated values, e.g.
* ``/etc/group``
* ``/etc/passwd``
* ``/etc/shadow``
.. versionadded:: 2.12
"""
name = 'Unix/Linux config files'
aliases = ['unixconfig', 'linuxconfig']
filenames = []
tokens = {
'root': [
(r'^#.*', Comment),
(r'\n', Whitespace),
(r':', Punctuation),
(r'[0-9]+', Number),
(r'((?!\n)[a-zA-Z0-9\_\-\s\(\),]){2,}', Text),
(r'[^:\n]+', String),
],
}
| 41,823 | Python | 34.594894 | 152 | 0.485331 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_asy_builtins.py | """
pygments.lexers._asy_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the asy-function names and asy-variable names of
Asymptote.
Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
ASYFUNCNAME = {
'AND',
'Arc',
'ArcArrow',
'ArcArrows',
'Arrow',
'Arrows',
'Automatic',
'AvantGarde',
'BBox',
'BWRainbow',
'BWRainbow2',
'Bar',
'Bars',
'BeginArcArrow',
'BeginArrow',
'BeginBar',
'BeginDotMargin',
'BeginMargin',
'BeginPenMargin',
'Blank',
'Bookman',
'Bottom',
'BottomTop',
'Bounds',
'Break',
'Broken',
'BrokenLog',
'Ceil',
'Circle',
'CircleBarIntervalMarker',
'Cos',
'Courier',
'CrossIntervalMarker',
'DefaultFormat',
'DefaultLogFormat',
'Degrees',
'Dir',
'DotMargin',
'DotMargins',
'Dotted',
'Draw',
'Drawline',
'Embed',
'EndArcArrow',
'EndArrow',
'EndBar',
'EndDotMargin',
'EndMargin',
'EndPenMargin',
'Fill',
'FillDraw',
'Floor',
'Format',
'Full',
'Gaussian',
'Gaussrand',
'Gaussrandpair',
'Gradient',
'Grayscale',
'Helvetica',
'Hermite',
'HookHead',
'InOutTicks',
'InTicks',
'J',
'Label',
'Landscape',
'Left',
'LeftRight',
'LeftTicks',
'Legend',
'Linear',
'Link',
'Log',
'LogFormat',
'Margin',
'Margins',
'Mark',
'MidArcArrow',
'MidArrow',
'NOT',
'NewCenturySchoolBook',
'NoBox',
'NoMargin',
'NoModifier',
'NoTicks',
'NoTicks3',
'NoZero',
'NoZeroFormat',
'None',
'OR',
'OmitFormat',
'OmitTick',
'OutTicks',
'Ox',
'Oy',
'Palatino',
'PaletteTicks',
'Pen',
'PenMargin',
'PenMargins',
'Pentype',
'Portrait',
'RadialShade',
'Rainbow',
'Range',
'Relative',
'Right',
'RightTicks',
'Rotate',
'Round',
'SQR',
'Scale',
'ScaleX',
'ScaleY',
'ScaleZ',
'Seascape',
'Shift',
'Sin',
'Slant',
'Spline',
'StickIntervalMarker',
'Straight',
'Symbol',
'Tan',
'TeXify',
'Ticks',
'Ticks3',
'TildeIntervalMarker',
'TimesRoman',
'Top',
'TrueMargin',
'UnFill',
'UpsideDown',
'Wheel',
'X',
'XEquals',
'XOR',
'XY',
'XYEquals',
'XYZero',
'XYgrid',
'XZEquals',
'XZZero',
'XZero',
'XZgrid',
'Y',
'YEquals',
'YXgrid',
'YZ',
'YZEquals',
'YZZero',
'YZero',
'YZgrid',
'Z',
'ZX',
'ZXgrid',
'ZYgrid',
'ZapfChancery',
'ZapfDingbats',
'_cputime',
'_draw',
'_eval',
'_image',
'_labelpath',
'_projection',
'_strokepath',
'_texpath',
'aCos',
'aSin',
'aTan',
'abort',
'abs',
'accel',
'acos',
'acosh',
'acot',
'acsc',
'add',
'addArrow',
'addMargins',
'addSaveFunction',
'addnode',
'addnodes',
'addpenarc',
'addpenline',
'addseg',
'adjust',
'alias',
'align',
'all',
'altitude',
'angabscissa',
'angle',
'angpoint',
'animate',
'annotate',
'anticomplementary',
'antipedal',
'apply',
'approximate',
'arc',
'arcarrowsize',
'arccircle',
'arcdir',
'arcfromcenter',
'arcfromfocus',
'arclength',
'arcnodesnumber',
'arcpoint',
'arcsubtended',
'arcsubtendedcenter',
'arctime',
'arctopath',
'array',
'arrow',
'arrow2',
'arrowbase',
'arrowbasepoints',
'arrowsize',
'asec',
'asin',
'asinh',
'ask',
'assert',
'asy',
'asycode',
'asydir',
'asyfigure',
'asyfilecode',
'asyinclude',
'asywrite',
'atan',
'atan2',
'atanh',
'atbreakpoint',
'atexit',
'atime',
'attach',
'attract',
'atupdate',
'autoformat',
'autoscale',
'autoscale3',
'axes',
'axes3',
'axialshade',
'axis',
'axiscoverage',
'azimuth',
'babel',
'background',
'bangles',
'bar',
'barmarksize',
'barsize',
'basealign',
'baseline',
'bbox',
'beep',
'begin',
'beginclip',
'begingroup',
'beginpoint',
'between',
'bevel',
'bezier',
'bezierP',
'bezierPP',
'bezierPPP',
'bezulate',
'bibliography',
'bibliographystyle',
'binarytree',
'binarytreeNode',
'binomial',
'binput',
'bins',
'bisector',
'bisectorpoint',
'blend',
'boutput',
'box',
'bqe',
'breakpoint',
'breakpoints',
'brick',
'buildRestoreDefaults',
'buildRestoreThunk',
'buildcycle',
'bulletcolor',
'canonical',
'canonicalcartesiansystem',
'cartesiansystem',
'case1',
'case2',
'case3',
'cbrt',
'cd',
'ceil',
'center',
'centerToFocus',
'centroid',
'cevian',
'change2',
'changecoordsys',
'checkSegment',
'checkconditionlength',
'checker',
'checklengths',
'checkposition',
'checktriangle',
'choose',
'circle',
'circlebarframe',
'circlemarkradius',
'circlenodesnumber',
'circumcenter',
'circumcircle',
'clamped',
'clear',
'clip',
'clipdraw',
'close',
'cmyk',
'code',
'colatitude',
'collect',
'collinear',
'color',
'colorless',
'colors',
'colorspace',
'comma',
'compassmark',
'complement',
'complementary',
'concat',
'concurrent',
'cone',
'conic',
'conicnodesnumber',
'conictype',
'conj',
'connect',
'containmentTree',
'contains',
'contour',
'contour3',
'controlSpecifier',
'convert',
'coordinates',
'coordsys',
'copy',
'cos',
'cosh',
'cot',
'countIntersections',
'cputime',
'crop',
'cropcode',
'cross',
'crossframe',
'crosshatch',
'crossmarksize',
'csc',
'cubicroots',
'curabscissa',
'curlSpecifier',
'curpoint',
'currentarrow',
'currentexitfunction',
'currentmomarrow',
'currentpolarconicroutine',
'curve',
'cut',
'cutafter',
'cutbefore',
'cyclic',
'cylinder',
'debugger',
'deconstruct',
'defaultdir',
'defaultformat',
'defaultpen',
'defined',
'degenerate',
'degrees',
'delete',
'deletepreamble',
'determinant',
'diagonal',
'diamond',
'diffdiv',
'dir',
'dirSpecifier',
'dirtime',
'display',
'distance',
'divisors',
'do_overpaint',
'dot',
'dotframe',
'dotsize',
'downcase',
'draw',
'drawAll',
'drawDoubleLine',
'drawFermion',
'drawGhost',
'drawGluon',
'drawMomArrow',
'drawPhoton',
'drawScalar',
'drawVertex',
'drawVertexBox',
'drawVertexBoxO',
'drawVertexBoxX',
'drawVertexO',
'drawVertexOX',
'drawVertexTriangle',
'drawVertexTriangleO',
'drawVertexX',
'drawarrow',
'drawarrow2',
'drawline',
'drawtick',
'duplicate',
'elle',
'ellipse',
'ellipsenodesnumber',
'embed',
'embed3',
'empty',
'enclose',
'end',
'endScript',
'endclip',
'endgroup',
'endl',
'endpoint',
'endpoints',
'eof',
'eol',
'equation',
'equations',
'erase',
'erasestep',
'erf',
'erfc',
'error',
'errorbar',
'errorbars',
'eval',
'excenter',
'excircle',
'exit',
'exitXasyMode',
'exitfunction',
'exp',
'expfactors',
'expi',
'expm1',
'exradius',
'extend',
'extension',
'extouch',
'fabs',
'factorial',
'fermat',
'fft',
'fhorner',
'figure',
'file',
'filecode',
'fill',
'filldraw',
'filloutside',
'fillrule',
'filltype',
'find',
'finite',
'finiteDifferenceJacobian',
'firstcut',
'firstframe',
'fit',
'fit2',
'fixedscaling',
'floor',
'flush',
'fmdefaults',
'fmod',
'focusToCenter',
'font',
'fontcommand',
'fontsize',
'foot',
'format',
'frac',
'frequency',
'fromCenter',
'fromFocus',
'fspline',
'functionshade',
'gamma',
'generate_random_backtrace',
'generateticks',
'gergonne',
'getc',
'getint',
'getpair',
'getreal',
'getstring',
'gettriple',
'gluon',
'gouraudshade',
'graph',
'graphic',
'gray',
'grestore',
'grid',
'grid3',
'gsave',
'halfbox',
'hatch',
'hdiffdiv',
'hermite',
'hex',
'histogram',
'history',
'hline',
'hprojection',
'hsv',
'hyperbola',
'hyperbolanodesnumber',
'hyperlink',
'hypot',
'identity',
'image',
'incenter',
'incentral',
'incircle',
'increasing',
'incrementposition',
'indexedTransform',
'indexedfigure',
'initXasyMode',
'initdefaults',
'input',
'inradius',
'insert',
'inside',
'integrate',
'interactive',
'interior',
'interp',
'interpolate',
'intersect',
'intersection',
'intersectionpoint',
'intersectionpoints',
'intersections',
'intouch',
'inverse',
'inversion',
'invisible',
'is3D',
'isDuplicate',
'isogonal',
'isogonalconjugate',
'isotomic',
'isotomicconjugate',
'isparabola',
'italic',
'item',
'key',
'kurtosis',
'kurtosisexcess',
'label',
'labelaxis',
'labelmargin',
'labelpath',
'labels',
'labeltick',
'labelx',
'labelx3',
'labely',
'labely3',
'labelz',
'labelz3',
'lastcut',
'latex',
'latitude',
'latticeshade',
'layer',
'layout',
'ldexp',
'leastsquares',
'legend',
'legenditem',
'length',
'lift',
'light',
'limits',
'line',
'linear',
'linecap',
'lineinversion',
'linejoin',
'linemargin',
'lineskip',
'linetype',
'linewidth',
'link',
'list',
'lm_enorm',
'lm_evaluate_default',
'lm_lmdif',
'lm_lmpar',
'lm_minimize',
'lm_print_default',
'lm_print_quiet',
'lm_qrfac',
'lm_qrsolv',
'locale',
'locate',
'locatefile',
'location',
'log',
'log10',
'log1p',
'logaxiscoverage',
'longitude',
'lookup',
'magnetize',
'makeNode',
'makedraw',
'makepen',
'map',
'margin',
'markangle',
'markangleradius',
'markanglespace',
'markarc',
'marker',
'markinterval',
'marknodes',
'markrightangle',
'markuniform',
'mass',
'masscenter',
'massformat',
'math',
'max',
'max3',
'maxbezier',
'maxbound',
'maxcoords',
'maxlength',
'maxratio',
'maxtimes',
'mean',
'medial',
'median',
'midpoint',
'min',
'min3',
'minbezier',
'minbound',
'minipage',
'minratio',
'mintimes',
'miterlimit',
'momArrowPath',
'momarrowsize',
'monotonic',
'multifigure',
'nativeformat',
'natural',
'needshipout',
'newl',
'newpage',
'newslide',
'newton',
'newtree',
'nextframe',
'nextnormal',
'nextpage',
'nib',
'nodabscissa',
'none',
'norm',
'normalvideo',
'notaknot',
'nowarn',
'numberpage',
'nurb',
'object',
'offset',
'onpath',
'opacity',
'opposite',
'orientation',
'orig_circlenodesnumber',
'orig_circlenodesnumber1',
'orig_draw',
'orig_ellipsenodesnumber',
'orig_ellipsenodesnumber1',
'orig_hyperbolanodesnumber',
'orig_parabolanodesnumber',
'origin',
'orthic',
'orthocentercenter',
'outformat',
'outline',
'outprefix',
'output',
'overloadedMessage',
'overwrite',
'pack',
'pad',
'pairs',
'palette',
'parabola',
'parabolanodesnumber',
'parallel',
'partialsum',
'path',
'path3',
'pattern',
'pause',
'pdf',
'pedal',
'periodic',
'perp',
'perpendicular',
'perpendicularmark',
'phantom',
'phi1',
'phi2',
'phi3',
'photon',
'piecewisestraight',
'point',
'polar',
'polarconicroutine',
'polargraph',
'polygon',
'postcontrol',
'postscript',
'pow10',
'ppoint',
'prc',
'prc0',
'precision',
'precontrol',
'prepend',
'print_random_addresses',
'project',
'projection',
'purge',
'pwhermite',
'quadrant',
'quadraticroots',
'quantize',
'quarticroots',
'quotient',
'radialshade',
'radians',
'radicalcenter',
'radicalline',
'radius',
'rand',
'randompath',
'rd',
'readline',
'realmult',
'realquarticroots',
'rectangle',
'rectangular',
'rectify',
'reflect',
'relabscissa',
'relative',
'relativedistance',
'reldir',
'relpoint',
'reltime',
'remainder',
'remark',
'removeDuplicates',
'rename',
'replace',
'report',
'resetdefaultpen',
'restore',
'restoredefaults',
'reverse',
'reversevideo',
'rf',
'rfind',
'rgb',
'rgba',
'rgbint',
'rms',
'rotate',
'rotateO',
'rotation',
'round',
'roundbox',
'roundedpath',
'roundrectangle',
'samecoordsys',
'sameside',
'sample',
'save',
'savedefaults',
'saveline',
'scale',
'scale3',
'scaleO',
'scaleT',
'scaleless',
'scientific',
'search',
'searchtree',
'sec',
'secondaryX',
'secondaryY',
'seconds',
'section',
'sector',
'seek',
'seekeof',
'segment',
'sequence',
'setpens',
'sgn',
'sgnd',
'sharpangle',
'sharpdegrees',
'shift',
'shiftless',
'shipout',
'shipout3',
'show',
'side',
'simeq',
'simpson',
'sin',
'single',
'sinh',
'size',
'size3',
'skewness',
'skip',
'slant',
'sleep',
'slope',
'slopefield',
'solve',
'solveBVP',
'sort',
'sourceline',
'sphere',
'split',
'sqrt',
'square',
'srand',
'standardizecoordsys',
'startScript',
'startTrembling',
'stdev',
'step',
'stickframe',
'stickmarksize',
'stickmarkspace',
'stop',
'straight',
'straightness',
'string',
'stripdirectory',
'stripextension',
'stripfile',
'strokepath',
'subdivide',
'subitem',
'subpath',
'substr',
'sum',
'surface',
'symmedial',
'symmedian',
'system',
'tab',
'tableau',
'tan',
'tangent',
'tangential',
'tangents',
'tanh',
'tell',
'tensionSpecifier',
'tensorshade',
'tex',
'texcolor',
'texify',
'texpath',
'texpreamble',
'texreset',
'texshipout',
'texsize',
'textpath',
'thick',
'thin',
'tick',
'tickMax',
'tickMax3',
'tickMin',
'tickMin3',
'ticklabelshift',
'ticklocate',
'tildeframe',
'tildemarksize',
'tile',
'tiling',
'time',
'times',
'title',
'titlepage',
'topbox',
'transform',
'transformation',
'transpose',
'tremble',
'trembleFuzz',
'tremble_circlenodesnumber',
'tremble_circlenodesnumber1',
'tremble_draw',
'tremble_ellipsenodesnumber',
'tremble_ellipsenodesnumber1',
'tremble_hyperbolanodesnumber',
'tremble_marknodes',
'tremble_markuniform',
'tremble_parabolanodesnumber',
'triangle',
'triangleAbc',
'triangleabc',
'triangulate',
'tricoef',
'tridiagonal',
'trilinear',
'trim',
'trueMagnetize',
'truepoint',
'tube',
'uncycle',
'unfill',
'uniform',
'unit',
'unitrand',
'unitsize',
'unityroot',
'unstraighten',
'upcase',
'updatefunction',
'uperiodic',
'upscale',
'uptodate',
'usepackage',
'usersetting',
'usetypescript',
'usleep',
'value',
'variance',
'variancebiased',
'vbox',
'vector',
'vectorfield',
'verbatim',
'view',
'vline',
'vperiodic',
'vprojection',
'warn',
'warning',
'windingnumber',
'write',
'xaxis',
'xaxis3',
'xaxis3At',
'xaxisAt',
'xequals',
'xinput',
'xlimits',
'xoutput',
'xpart',
'xscale',
'xscaleO',
'xtick',
'xtick3',
'xtrans',
'yaxis',
'yaxis3',
'yaxis3At',
'yaxisAt',
'yequals',
'ylimits',
'ypart',
'yscale',
'yscaleO',
'ytick',
'ytick3',
'ytrans',
'zaxis3',
'zaxis3At',
'zero',
'zero3',
'zlimits',
'zpart',
'ztick',
'ztick3',
'ztrans'
}
ASYVARNAME = {
'AliceBlue',
'Align',
'Allow',
'AntiqueWhite',
'Apricot',
'Aqua',
'Aquamarine',
'Aspect',
'Azure',
'BeginPoint',
'Beige',
'Bisque',
'Bittersweet',
'Black',
'BlanchedAlmond',
'Blue',
'BlueGreen',
'BlueViolet',
'Both',
'Break',
'BrickRed',
'Brown',
'BurlyWood',
'BurntOrange',
'CCW',
'CW',
'CadetBlue',
'CarnationPink',
'Center',
'Centered',
'Cerulean',
'Chartreuse',
'Chocolate',
'Coeff',
'Coral',
'CornflowerBlue',
'Cornsilk',
'Crimson',
'Crop',
'Cyan',
'Dandelion',
'DarkBlue',
'DarkCyan',
'DarkGoldenrod',
'DarkGray',
'DarkGreen',
'DarkKhaki',
'DarkMagenta',
'DarkOliveGreen',
'DarkOrange',
'DarkOrchid',
'DarkRed',
'DarkSalmon',
'DarkSeaGreen',
'DarkSlateBlue',
'DarkSlateGray',
'DarkTurquoise',
'DarkViolet',
'DeepPink',
'DeepSkyBlue',
'DefaultHead',
'DimGray',
'DodgerBlue',
'Dotted',
'Draw',
'E',
'ENE',
'EPS',
'ESE',
'E_Euler',
'E_PC',
'E_RK2',
'E_RK3BS',
'Emerald',
'EndPoint',
'Euler',
'Fill',
'FillDraw',
'FireBrick',
'FloralWhite',
'ForestGreen',
'Fuchsia',
'Gainsboro',
'GhostWhite',
'Gold',
'Goldenrod',
'Gray',
'Green',
'GreenYellow',
'Honeydew',
'HookHead',
'Horizontal',
'HotPink',
'I',
'IgnoreAspect',
'IndianRed',
'Indigo',
'Ivory',
'JOIN_IN',
'JOIN_OUT',
'JungleGreen',
'Khaki',
'LM_DWARF',
'LM_MACHEP',
'LM_SQRT_DWARF',
'LM_SQRT_GIANT',
'LM_USERTOL',
'Label',
'Lavender',
'LavenderBlush',
'LawnGreen',
'LeftJustified',
'LeftSide',
'LemonChiffon',
'LightBlue',
'LightCoral',
'LightCyan',
'LightGoldenrodYellow',
'LightGreen',
'LightGrey',
'LightPink',
'LightSalmon',
'LightSeaGreen',
'LightSkyBlue',
'LightSlateGray',
'LightSteelBlue',
'LightYellow',
'Lime',
'LimeGreen',
'Linear',
'Linen',
'Log',
'Logarithmic',
'Magenta',
'Mahogany',
'Mark',
'MarkFill',
'Maroon',
'Max',
'MediumAquamarine',
'MediumBlue',
'MediumOrchid',
'MediumPurple',
'MediumSeaGreen',
'MediumSlateBlue',
'MediumSpringGreen',
'MediumTurquoise',
'MediumVioletRed',
'Melon',
'MidPoint',
'MidnightBlue',
'Min',
'MintCream',
'MistyRose',
'Moccasin',
'Move',
'MoveQuiet',
'Mulberry',
'N',
'NE',
'NNE',
'NNW',
'NW',
'NavajoWhite',
'Navy',
'NavyBlue',
'NoAlign',
'NoCrop',
'NoFill',
'NoSide',
'OldLace',
'Olive',
'OliveDrab',
'OliveGreen',
'Orange',
'OrangeRed',
'Orchid',
'Ox',
'Oy',
'PC',
'PaleGoldenrod',
'PaleGreen',
'PaleTurquoise',
'PaleVioletRed',
'PapayaWhip',
'Peach',
'PeachPuff',
'Periwinkle',
'Peru',
'PineGreen',
'Pink',
'Plum',
'PowderBlue',
'ProcessBlue',
'Purple',
'RK2',
'RK3',
'RK3BS',
'RK4',
'RK5',
'RK5DP',
'RK5F',
'RawSienna',
'Red',
'RedOrange',
'RedViolet',
'Rhodamine',
'RightJustified',
'RightSide',
'RosyBrown',
'RoyalBlue',
'RoyalPurple',
'RubineRed',
'S',
'SE',
'SSE',
'SSW',
'SW',
'SaddleBrown',
'Salmon',
'SandyBrown',
'SeaGreen',
'Seashell',
'Sepia',
'Sienna',
'Silver',
'SimpleHead',
'SkyBlue',
'SlateBlue',
'SlateGray',
'Snow',
'SpringGreen',
'SteelBlue',
'Suppress',
'SuppressQuiet',
'Tan',
'TeXHead',
'Teal',
'TealBlue',
'Thistle',
'Ticksize',
'Tomato',
'Turquoise',
'UnFill',
'VERSION',
'Value',
'Vertical',
'Violet',
'VioletRed',
'W',
'WNW',
'WSW',
'Wheat',
'White',
'WhiteSmoke',
'WildStrawberry',
'XYAlign',
'YAlign',
'Yellow',
'YellowGreen',
'YellowOrange',
'addpenarc',
'addpenline',
'align',
'allowstepping',
'angularsystem',
'animationdelay',
'appendsuffix',
'arcarrowangle',
'arcarrowfactor',
'arrow2sizelimit',
'arrowangle',
'arrowbarb',
'arrowdir',
'arrowfactor',
'arrowhookfactor',
'arrowlength',
'arrowsizelimit',
'arrowtexfactor',
'authorpen',
'axis',
'axiscoverage',
'axislabelfactor',
'background',
'backgroundcolor',
'backgroundpen',
'barfactor',
'barmarksizefactor',
'basealign',
'baselinetemplate',
'beveljoin',
'bigvertexpen',
'bigvertexsize',
'black',
'blue',
'bm',
'bottom',
'bp',
'brown',
'bullet',
'byfoci',
'byvertices',
'camerafactor',
'chartreuse',
'circlemarkradiusfactor',
'circlenodesnumberfactor',
'circleprecision',
'circlescale',
'cm',
'codefile',
'codepen',
'codeskip',
'colorPen',
'coloredNodes',
'coloredSegments',
'conditionlength',
'conicnodesfactor',
'count',
'cputimeformat',
'crossmarksizefactor',
'currentcoordsys',
'currentlight',
'currentpatterns',
'currentpen',
'currentpicture',
'currentposition',
'currentprojection',
'curvilinearsystem',
'cuttings',
'cyan',
'darkblue',
'darkbrown',
'darkcyan',
'darkgray',
'darkgreen',
'darkgrey',
'darkmagenta',
'darkolive',
'darkred',
'dashdotted',
'dashed',
'datepen',
'dateskip',
'debuggerlines',
'debugging',
'deepblue',
'deepcyan',
'deepgray',
'deepgreen',
'deepgrey',
'deepmagenta',
'deepred',
'default',
'defaultControl',
'defaultS',
'defaultbackpen',
'defaultcoordsys',
'defaultfilename',
'defaultformat',
'defaultmassformat',
'defaultpen',
'diagnostics',
'differentlengths',
'dot',
'dotfactor',
'dotframe',
'dotted',
'doublelinepen',
'doublelinespacing',
'down',
'duplicateFuzz',
'ellipsenodesnumberfactor',
'eps',
'epsgeo',
'epsilon',
'evenodd',
'extendcap',
'fermionpen',
'figureborder',
'figuremattpen',
'firstnode',
'firststep',
'foregroundcolor',
'fuchsia',
'fuzz',
'gapfactor',
'ghostpen',
'gluonamplitude',
'gluonpen',
'gluonratio',
'gray',
'green',
'grey',
'hatchepsilon',
'havepagenumber',
'heavyblue',
'heavycyan',
'heavygray',
'heavygreen',
'heavygrey',
'heavymagenta',
'heavyred',
'hline',
'hwratio',
'hyperbolanodesnumberfactor',
'identity4',
'ignore',
'inXasyMode',
'inch',
'inches',
'includegraphicscommand',
'inf',
'infinity',
'institutionpen',
'intMax',
'intMin',
'invert',
'invisible',
'itempen',
'itemskip',
'itemstep',
'labelmargin',
'landscape',
'lastnode',
'left',
'legendhskip',
'legendlinelength',
'legendmargin',
'legendmarkersize',
'legendmaxrelativewidth',
'legendvskip',
'lightblue',
'lightcyan',
'lightgray',
'lightgreen',
'lightgrey',
'lightmagenta',
'lightolive',
'lightred',
'lightyellow',
'linemargin',
'lm_infmsg',
'lm_shortmsg',
'longdashdotted',
'longdashed',
'magenta',
'magneticPoints',
'magneticRadius',
'mantissaBits',
'markangleradius',
'markangleradiusfactor',
'markanglespace',
'markanglespacefactor',
'mediumblue',
'mediumcyan',
'mediumgray',
'mediumgreen',
'mediumgrey',
'mediummagenta',
'mediumred',
'mediumyellow',
'middle',
'minDistDefault',
'minblockheight',
'minblockwidth',
'mincirclediameter',
'minipagemargin',
'minipagewidth',
'minvertexangle',
'miterjoin',
'mm',
'momarrowfactor',
'momarrowlength',
'momarrowmargin',
'momarrowoffset',
'momarrowpen',
'monoPen',
'morepoints',
'nCircle',
'newbulletcolor',
'ngraph',
'nil',
'nmesh',
'nobasealign',
'nodeMarginDefault',
'nodesystem',
'nomarker',
'nopoint',
'noprimary',
'nullpath',
'nullpen',
'numarray',
'ocgindex',
'oldbulletcolor',
'olive',
'orange',
'origin',
'overpaint',
'page',
'pageheight',
'pagemargin',
'pagenumberalign',
'pagenumberpen',
'pagenumberposition',
'pagewidth',
'paleblue',
'palecyan',
'palegray',
'palegreen',
'palegrey',
'palemagenta',
'palered',
'paleyellow',
'parabolanodesnumberfactor',
'perpfactor',
'phi',
'photonamplitude',
'photonpen',
'photonratio',
'pi',
'pink',
'plain',
'plus',
'preamblenodes',
'pt',
'purple',
'r3',
'r4a',
'r4b',
'randMax',
'realDigits',
'realEpsilon',
'realMax',
'realMin',
'red',
'relativesystem',
'reverse',
'right',
'roundcap',
'roundjoin',
'royalblue',
'salmon',
'saveFunctions',
'scalarpen',
'sequencereal',
'settings',
'shipped',
'signedtrailingzero',
'solid',
'springgreen',
'sqrtEpsilon',
'squarecap',
'squarepen',
'startposition',
'stdin',
'stdout',
'stepfactor',
'stepfraction',
'steppagenumberpen',
'stepping',
'stickframe',
'stickmarksizefactor',
'stickmarkspacefactor',
'textpen',
'ticksize',
'tildeframe',
'tildemarksizefactor',
'tinv',
'titlealign',
'titlepagepen',
'titlepageposition',
'titlepen',
'titleskip',
'top',
'trailingzero',
'treeLevelStep',
'treeMinNodeWidth',
'treeNodeStep',
'trembleAngle',
'trembleFrequency',
'trembleRandom',
'tremblingMode',
'undefined',
'unitcircle',
'unitsquare',
'up',
'urlpen',
'urlskip',
'version',
'vertexpen',
'vertexsize',
'viewportmargin',
'viewportsize',
'vline',
'white',
'wye',
'xformStack',
'yellow',
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
}
| 27,287 | Python | 15.58845 | 77 | 0.51574 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/berry.py | """
pygments.lexers.berry
~~~~~~~~~~~~~~~~~~~~~
Lexer for Berry.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include, bygroups
from pygments.token import Comment, Whitespace, Operator, Keyword, Name, \
String, Number, Punctuation
__all__ = ['BerryLexer']
class BerryLexer(RegexLexer):
"""
For `berry <http://github.com/berry-lang/berry>`_ source code.
.. versionadded:: 2.12.0
"""
name = 'Berry'
aliases = ['berry', 'be']
filenames = ['*.be']
mimetypes = ['text/x-berry', 'application/x-berry']
_name = r'\b[^\W\d]\w*'
tokens = {
'root': [
include('whitespace'),
include('numbers'),
include('keywords'),
(rf'(def)(\s+)({_name})',
bygroups(Keyword.Declaration, Whitespace, Name.Function)),
(rf'\b(class)(\s+)({_name})',
bygroups(Keyword.Declaration, Whitespace, Name.Class)),
(rf'\b(import)(\s+)({_name})',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
include('expr')
],
'expr': [
(r'[^\S\n]+', Whitespace),
(r'\.\.|[~!%^&*+=|?:<>/-]', Operator),
(r'[(){}\[\],.;]', Punctuation),
include('controls'),
include('builtins'),
include('funccall'),
include('member'),
include('name'),
include('strings')
],
'whitespace': [
(r'\s+', Whitespace),
(r'#-(.|\n)*?-#', Comment.Multiline),
(r'#.*?$', Comment.Single)
],
'keywords': [
(words((
'as', 'break', 'continue', 'import', 'static', 'self', 'super'),
suffix=r'\b'), Keyword.Reserved),
(r'(true|false|nil)\b', Keyword.Constant),
(r'(var|def)\b', Keyword.Declaration)
],
'controls': [
(words((
'if', 'elif', 'else', 'for', 'while', 'do', 'end', 'break',
'continue', 'return', 'try', 'except', 'raise'),
suffix=r'\b'), Keyword)
],
'builtins': [
(words((
'assert', 'bool', 'input', 'classname', 'classof', 'number', 'real',
'bytes', 'compile', 'map', 'list', 'int', 'isinstance', 'print',
'range', 'str', 'super', 'module', 'size', 'issubclass', 'open',
'file', 'type', 'call'),
suffix=r'\b'), Name.Builtin)
],
'numbers': [
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'-?\d+', Number.Integer),
(r'(-?\d+\.?|\.\d)\d*([eE][+-]?\d+)?', Number.Float)
],
'name': [
(_name, Name)
],
'funccall': [
(rf'{_name}(?=\s*\()', Name.Function, '#pop')
],
'member': [
(rf'(?<=\.){_name}\b(?!\()', Name.Attribute, '#pop')
],
'strings': [
(r'"([^\\]|\\.)*?"', String.Double, '#pop'),
(r'\'([^\\]|\\.)*?\'', String.Single, '#pop')
]
}
| 3,211 | Python | 31.12 | 84 | 0.431641 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/xorg.py | """
pygments.lexers.xorg
~~~~~~~~~~~~~~~~~~~~
Lexers for Xorg configs.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Comment, String, Name, Text
__all__ = ['XorgLexer']
class XorgLexer(RegexLexer):
"""Lexer for xorg.conf files."""
name = 'Xorg'
url = 'https://www.x.org/wiki/'
aliases = ['xorg.conf']
filenames = ['xorg.conf']
mimetypes = []
tokens = {
'root': [
(r'\s+', Text),
(r'#.*$', Comment),
(r'((?:Sub)?Section)(\s+)("\w+")',
bygroups(String.Escape, Text, String.Escape)),
(r'(End(?:Sub)?Section)', String.Escape),
(r'(\w+)(\s+)([^\n#]+)',
bygroups(Name.Builtin, Text, Name.Constant)),
],
}
| 902 | Python | 22.763157 | 70 | 0.519956 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/parsers.py | """
pygments.lexers.parsers
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for parser generators.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, DelegatingLexer, \
include, bygroups, using
from pygments.token import Punctuation, Other, Text, Comment, Operator, \
Keyword, Name, String, Number, Whitespace
from pygments.lexers.jvm import JavaLexer
from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers.objective import ObjectiveCLexer
from pygments.lexers.d import DLexer
from pygments.lexers.dotnet import CSharpLexer
from pygments.lexers.ruby import RubyLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.perl import PerlLexer
__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
'AntlrJavaLexer', 'AntlrActionScriptLexer',
'TreetopLexer', 'EbnfLexer']
class RagelLexer(RegexLexer):
"""A pure `Ragel <www.colm.net/open-source/ragel>`_ lexer. Use this
for fragments of Ragel. For ``.rl`` files, use
:class:`RagelEmbeddedLexer` instead (or one of the
language-specific subclasses).
.. versionadded:: 1.1
"""
name = 'Ragel'
url = 'http://www.colm.net/open-source/ragel/'
aliases = ['ragel']
filenames = []
tokens = {
'whitespace': [
(r'\s+', Whitespace)
],
'comments': [
(r'\#.*$', Comment),
],
'keywords': [
(r'(access|action|alphtype)\b', Keyword),
(r'(getkey|write|machine|include)\b', Keyword),
(r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
(r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
],
'numbers': [
(r'0x[0-9A-Fa-f]+', Number.Hex),
(r'[+-]?[0-9]+', Number.Integer),
],
'literals': [
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals
(r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions
],
'identifiers': [
(r'[a-zA-Z_]\w*', Name.Variable),
],
'operators': [
(r',', Operator), # Join
(r'\||&|--?', Operator), # Union, Intersection and Subtraction
(r'\.|<:|:>>?', Operator), # Concatention
(r':', Operator), # Label
(r'->', Operator), # Epsilon Transition
(r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
(r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
(r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
(r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
(r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
(r'>|@|\$|%', Operator), # Transition Actions and Priorities
(r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
(r'!|\^', Operator), # Negation
(r'\(|\)', Operator), # Grouping
],
'root': [
include('literals'),
include('whitespace'),
include('comments'),
include('keywords'),
include('numbers'),
include('identifiers'),
include('operators'),
(r'\{', Punctuation, 'host'),
(r'=', Operator),
(r';', Punctuation),
],
'host': [
(r'(' + r'|'.join(( # keep host code in largest possible chunks
r'[^{}\'"/#]+', # exclude unsafe characters
r'[^\\]\\[{}]', # allow escaped { or }
# strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'\#.*$\n?', # ruby comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# / is safe now that we've handled regex and javadoc comments
r'/',
)) + r')+', Other),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
}
class RagelEmbeddedLexer(RegexLexer):
"""
A lexer for Ragel embedded in a host language file.
This will only highlight Ragel statements. If you want host language
highlighting then call the language-specific Ragel lexer.
.. versionadded:: 1.1
"""
name = 'Embedded Ragel'
aliases = ['ragel-em']
filenames = ['*.rl']
tokens = {
'root': [
(r'(' + r'|'.join(( # keep host code in largest possible chunks
r'[^%\'"/#]+', # exclude unsafe characters
r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
# strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'//.*$\n?', # single line comment
r'\#.*$\n?', # ruby/ragel comment
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression
# / is safe now that we've handled regex and javadoc comments
r'/',
)) + r')+', Other),
# Single Line FSM.
# Please don't put a quoted newline in a single line FSM.
# That's just mean. It will break this.
(r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
using(RagelLexer),
Punctuation, Text)),
# Multi Line FSM.
(r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
],
'multi-line-fsm': [
(r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
r'(' + r'|'.join((
r'[^}\'"\[/#]', # exclude unsafe characters
r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
r'[^\\]\\[{}]', # ...and } is okay if it's escaped
# allow / if it's preceded with one of these symbols
# (ragel EOF actions)
r'(>|\$|%|<|@|<>)/',
# specifically allow regex followed immediately by *
# so it doesn't get mistaken for a comment
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
# allow / as long as it's not followed by another / or by a *
r'/(?=[^/*]|$)',
# We want to match as many of these as we can in one block.
# Not sure if we need the + sign here,
# does it help performance?
)) + r')+',
# strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'//.*$\n?', # single line comment
r'\#.*$\n?', # ruby/ragel comment
)) + r')+', using(RagelLexer)),
(r'\}%%', Punctuation, '#pop'),
]
}
def analyse_text(text):
return '@LANG: indep' in text
class RagelRubyLexer(DelegatingLexer):
"""
A lexer for Ragel in a Ruby host file.
.. versionadded:: 1.1
"""
name = 'Ragel in Ruby Host'
aliases = ['ragel-ruby', 'ragel-rb']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: ruby' in text
class RagelCLexer(DelegatingLexer):
"""
A lexer for Ragel in a C host file.
.. versionadded:: 1.1
"""
name = 'Ragel in C Host'
aliases = ['ragel-c']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(CLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c' in text
class RagelDLexer(DelegatingLexer):
"""
A lexer for Ragel in a D host file.
.. versionadded:: 1.1
"""
name = 'Ragel in D Host'
aliases = ['ragel-d']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(DLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: d' in text
class RagelCppLexer(DelegatingLexer):
"""
A lexer for Ragel in a C++ host file.
.. versionadded:: 1.1
"""
name = 'Ragel in CPP Host'
aliases = ['ragel-cpp']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(CppLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c++' in text
class RagelObjectiveCLexer(DelegatingLexer):
"""
A lexer for Ragel in an Objective C host file.
.. versionadded:: 1.1
"""
name = 'Ragel in Objective C Host'
aliases = ['ragel-objc']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: objc' in text
class RagelJavaLexer(DelegatingLexer):
"""
A lexer for Ragel in a Java host file.
.. versionadded:: 1.1
"""
name = 'Ragel in Java Host'
aliases = ['ragel-java']
filenames = ['*.rl']
def __init__(self, **options):
super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: java' in text
class AntlrLexer(RegexLexer):
"""
Generic `ANTLR`_ Lexer.
Should not be called directly, instead
use DelegatingLexer for your target language.
.. versionadded:: 1.1
.. _ANTLR: http://www.antlr.org/
"""
name = 'ANTLR'
aliases = ['antlr']
filenames = []
_id = r'[A-Za-z]\w*'
_TOKEN_REF = r'[A-Z]\w*'
_RULE_REF = r'[a-z]\w*'
_STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
_INT = r'[0-9]+'
tokens = {
'whitespace': [
(r'\s+', Whitespace),
],
'comments': [
(r'//.*$', Comment),
(r'/\*(.|\n)*?\*/', Comment),
],
'root': [
include('whitespace'),
include('comments'),
(r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
Punctuation)),
# optionsSpec
(r'options\b', Keyword, 'options'),
# tokensSpec
(r'tokens\b', Keyword, 'tokens'),
# attrScope
(r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation), 'action'),
# exception
(r'(catch|finally)\b', Keyword, 'exception'),
# action
(r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
Name.Label, Whitespace, Punctuation), 'action'),
# rule
(r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
bygroups(Keyword, Whitespace, Name.Label, Punctuation),
('rule-alts', 'rule-prelims')),
],
'exception': [
(r'\n', Whitespace, '#pop'),
(r'\s', Whitespace),
include('comments'),
(r'\[', Punctuation, 'nested-arg-action'),
(r'\{', Punctuation, 'action'),
],
'rule-prelims': [
include('whitespace'),
include('comments'),
(r'returns\b', Keyword),
(r'\[', Punctuation, 'nested-arg-action'),
(r'\{', Punctuation, 'action'),
# throwsSpec
(r'(throws)(\s+)(' + _id + ')',
bygroups(Keyword, Whitespace, Name.Label)),
(r'(,)(\s*)(' + _id + ')',
bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
# optionsSpec
(r'options\b', Keyword, 'options'),
# ruleScopeSpec - scope followed by target language code or name of action
# TODO finish implementing other possibilities for scope
# L173 ANTLRv3.g from ANTLR book
(r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
'action'),
(r'(scope)(\s+)(' + _id + r')(\s*)(;)',
bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
# ruleAction
(r'(@' + _id + r')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation), 'action'),
# finished prelims, go to rule alts!
(r':', Punctuation, '#pop')
],
'rule-alts': [
include('whitespace'),
include('comments'),
# These might need to go in a separate 'block' state triggered by (
(r'options\b', Keyword, 'options'),
(r':', Punctuation),
# literals
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'<<([^>]|>[^>])>>', String),
# identifiers
# Tokens start with capital letter.
(r'\$?[A-Z_]\w*', Name.Constant),
# Rules start with small letter.
(r'\$?[a-z_]\w*', Name.Variable),
# operators
(r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
(r',', Punctuation),
(r'\[', Punctuation, 'nested-arg-action'),
(r'\{', Punctuation, 'action'),
(r';', Punctuation, '#pop')
],
'tokens': [
include('whitespace'),
include('comments'),
(r'\{', Punctuation),
(r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ r')?(\s*)(;)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
String, Whitespace, Punctuation)),
(r'\}', Punctuation, '#pop'),
],
'options': [
include('whitespace'),
include('comments'),
(r'\{', Punctuation),
(r'(' + _id + r')(\s*)(=)(\s*)(' +
'|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
Text, Whitespace, Punctuation)),
(r'\}', Punctuation, '#pop'),
],
'action': [
(r'(' + r'|'.join(( # keep host code in largest possible chunks
r'[^${}\'"/\\]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# backslashes are okay, as long as we are not backslashing a %
r'\\(?!%)',
# Now that we've handled regex and javadoc comments
# it's safe to let / through.
r'/',
)) + r')+', Other),
(r'(\\)(%)', bygroups(Punctuation, Other)),
(r'(\$[a-zA-Z]+)(\.?)(text|value)?',
bygroups(Name.Variable, Punctuation, Name.Property)),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
'nested-arg-action': [
(r'(' + r'|'.join(( # keep host code in largest possible chunks.
r'[^$\[\]\'"/]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# Now that we've handled regex and javadoc comments
# it's safe to let / through.
r'/',
)) + r')+', Other),
(r'\[', Punctuation, '#push'),
(r'\]', Punctuation, '#pop'),
(r'(\$[a-zA-Z]+)(\.?)(text|value)?',
bygroups(Name.Variable, Punctuation, Name.Property)),
(r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
]
}
def analyse_text(text):
return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
class AntlrCppLexer(DelegatingLexer):
"""
ANTLR with C++ Target
.. versionadded:: 1.1
"""
name = 'ANTLR With CPP Target'
aliases = ['antlr-cpp']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(CppLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
class AntlrObjectiveCLexer(DelegatingLexer):
"""
ANTLR with Objective-C Target
.. versionadded:: 1.1
"""
name = 'ANTLR With ObjectiveC Target'
aliases = ['antlr-objc']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(ObjectiveCLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
class AntlrCSharpLexer(DelegatingLexer):
"""
ANTLR with C# Target
.. versionadded:: 1.1
"""
name = 'ANTLR With C# Target'
aliases = ['antlr-csharp', 'antlr-c#']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(CSharpLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
class AntlrPythonLexer(DelegatingLexer):
"""
ANTLR with Python Target
.. versionadded:: 1.1
"""
name = 'ANTLR With Python Target'
aliases = ['antlr-python']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(PythonLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
class AntlrJavaLexer(DelegatingLexer):
"""
ANTLR with Java Target
.. versionadded:: 1.
"""
name = 'ANTLR With Java Target'
aliases = ['antlr-java']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(JavaLexer, AntlrLexer, **options)
def analyse_text(text):
# Antlr language is Java by default
return AntlrLexer.analyse_text(text) and 0.9
class AntlrRubyLexer(DelegatingLexer):
"""
ANTLR with Ruby Target
.. versionadded:: 1.1
"""
name = 'ANTLR With Ruby Target'
aliases = ['antlr-ruby', 'antlr-rb']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(RubyLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
class AntlrPerlLexer(DelegatingLexer):
"""
ANTLR with Perl Target
.. versionadded:: 1.1
"""
name = 'ANTLR With Perl Target'
aliases = ['antlr-perl']
filenames = ['*.G', '*.g']
def __init__(self, **options):
super().__init__(PerlLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
class AntlrActionScriptLexer(DelegatingLexer):
"""
ANTLR with ActionScript Target
.. versionadded:: 1.1
"""
name = 'ANTLR With ActionScript Target'
aliases = ['antlr-actionscript', 'antlr-as']
filenames = ['*.G', '*.g']
def __init__(self, **options):
from pygments.lexers.actionscript import ActionScriptLexer
super().__init__(ActionScriptLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
class TreetopBaseLexer(RegexLexer):
"""
A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
Not for direct use; use :class:`TreetopLexer` instead.
.. versionadded:: 1.6
"""
tokens = {
'root': [
include('space'),
(r'require[ \t]+[^\n\r]+[\n\r]', Other),
(r'module\b', Keyword.Namespace, 'module'),
(r'grammar\b', Keyword, 'grammar'),
],
'module': [
include('space'),
include('end'),
(r'module\b', Keyword, '#push'),
(r'grammar\b', Keyword, 'grammar'),
(r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
],
'grammar': [
include('space'),
include('end'),
(r'rule\b', Keyword, 'rule'),
(r'include\b', Keyword, 'include'),
(r'[A-Z]\w*', Name),
],
'include': [
include('space'),
(r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
],
'rule': [
include('space'),
include('end'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
(r'[A-Za-z_]\w*', Name),
(r'[()]', Punctuation),
(r'[?+*/&!~]', Operator),
(r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
(r'([0-9]*)(\.\.)([0-9]*)',
bygroups(Number.Integer, Operator, Number.Integer)),
(r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
(r'\{', Punctuation, 'inline_module'),
(r'\.', String.Regex),
],
'inline_module': [
(r'\{', Other, 'ruby'),
(r'\}', Punctuation, '#pop'),
(r'[^{}]+', Other),
],
'ruby': [
(r'\{', Other, '#push'),
(r'\}', Other, '#pop'),
(r'[^{}]+', Other),
],
'space': [
(r'[ \t\n\r]+', Whitespace),
(r'#[^\n]*', Comment.Single),
],
'end': [
(r'end\b', Keyword, '#pop'),
],
}
class TreetopLexer(DelegatingLexer):
"""
A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
.. versionadded:: 1.6
"""
name = 'Treetop'
aliases = ['treetop']
filenames = ['*.treetop', '*.tt']
def __init__(self, **options):
super().__init__(RubyLexer, TreetopBaseLexer, **options)
class EbnfLexer(RegexLexer):
"""
Lexer for `ISO/IEC 14977 EBNF
<http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
grammars.
.. versionadded:: 2.0
"""
name = 'EBNF'
aliases = ['ebnf']
filenames = ['*.ebnf']
mimetypes = ['text/x-ebnf']
tokens = {
'root': [
include('whitespace'),
include('comment_start'),
include('identifier'),
(r'=', Operator, 'production'),
],
'production': [
include('whitespace'),
include('comment_start'),
include('identifier'),
(r'"[^"]*"', String.Double),
(r"'[^']*'", String.Single),
(r'(\?[^?]*\?)', Name.Entity),
(r'[\[\]{}(),|]', Punctuation),
(r'-', Operator),
(r';', Punctuation, '#pop'),
(r'\.', Punctuation, '#pop'),
],
'whitespace': [
(r'\s+', Text),
],
'comment_start': [
(r'\(\*', Comment.Multiline, 'comment'),
],
'comment': [
(r'[^*)]', Comment.Multiline),
include('comment_start'),
(r'\*\)', Comment.Multiline, '#pop'),
(r'[*)]', Comment.Multiline),
],
'identifier': [
(r'([a-zA-Z][\w \-]*)', Keyword),
],
}
| 25,904 | Python | 31.300499 | 93 | 0.462438 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/r.py | """
pygments.lexers.r
~~~~~~~~~~~~~~~~~
Lexers for the R/S languages.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
line_re = re.compile('.*?\n')
class RConsoleLexer(Lexer):
"""
For R console transcripts or R CMD BATCH output files.
"""
name = 'RConsole'
aliases = ['rconsole', 'rout']
filenames = ['*.Rout']
def get_tokens_unprocessed(self, text):
slexer = SLexer(**self.options)
current_code_block = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>') or line.startswith('+'):
# Colorize the prompt as such,
# then put rest of line into current_code_block
insertions.append((len(current_code_block),
[(0, Generic.Prompt, line[:2])]))
current_code_block += line[2:]
else:
# We have reached a non-prompt line!
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
yield from do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block))
# Reset vars for next code block.
current_code_block = ''
insertions = []
# Now process the actual line itself, this is output from R.
yield match.start(), Generic.Output, line
# If we happen to end on a code block with nothing after it, need to
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
yield from do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block))
class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
.. versionadded:: 0.10
"""
name = 'S'
aliases = ['splus', 's', 'r']
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
tokens = {
'comments': [
(r'#.*$', Comment.Single),
],
'valid_name': [
(valid_name, Name),
],
'punctuation': [
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
r'(?![\w.])',
Keyword.Reserved),
],
'operators': [
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
],
'builtin_symbols': [
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
r'(?![\w.])',
Keyword.Constant),
(r'(T|F)\b', Name.Builtin.Pseudo),
],
'numbers': [
# hex number
(r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
# decimal number
(r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
Number),
],
'statements': [
include('comments'),
# whitespaces
(r'\s+', Whitespace),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
include('valid_name'),
include('numbers'),
include('keywords'),
include('punctuation'),
include('operators'),
],
'root': [
# calls:
(r'(%s)\s*(?=\()' % valid_name, Name.Function),
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
# (r'\{', Punctuation, 'block'),
(r'.', Text),
],
# 'block': [
# include('statements'),
# ('\{', Punctuation, '#push'),
# ('\}', Punctuation, '#pop')
# ],
'string_squote': [
(r'([^\'\\]|\\.)*\'', String, '#pop'),
],
'string_dquote': [
(r'([^"\\]|\\.)*"', String, '#pop'),
],
}
def analyse_text(text):
if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
return 0.11
class RdLexer(RegexLexer):
"""
Pygments Lexer for R documentation (Rd) files
This is a very minimal implementation, highlighting little more
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
.. versionadded:: 1.6
"""
name = 'Rd'
aliases = ['rd']
filenames = ['*.Rd']
mimetypes = ['text/x-r-doc']
# To account for verbatim / LaTeX-like / and R-like areas
# would require parsing.
tokens = {
'root': [
# catch escaped brackets and percent sign
(r'\\[\\{}%]', String.Escape),
# comments
(r'%.*$', Comment),
# special macros with no arguments
(r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
# macros
(r'\\[a-zA-Z]+\b', Keyword),
# special preprocessor macros
(r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
# non-escaped brackets
(r'[{}]', Name.Builtin),
# everything else
(r'[^\\%\n{}]+', Text),
(r'.', Text),
]
}
| 6,185 | Python | 31.387434 | 86 | 0.470008 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/bibtex.py | """
pygments.lexers.bibtex
~~~~~~~~~~~~~~~~~~~~~~
Lexers for BibTeX bibliography data and styles
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, \
words
from pygments.token import Name, Comment, String, Error, Number, Keyword, \
Punctuation, Whitespace
__all__ = ['BibTeXLexer', 'BSTLexer']
class BibTeXLexer(ExtendedRegexLexer):
"""
A lexer for BibTeX bibliography data format.
.. versionadded:: 2.2
"""
name = 'BibTeX'
aliases = ['bibtex', 'bib']
filenames = ['*.bib']
mimetypes = ["text/x-bibtex"]
flags = re.IGNORECASE
ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
def open_brace_callback(self, match, ctx):
opening_brace = match.group()
ctx.opening_brace = opening_brace
yield match.start(), Punctuation, opening_brace
ctx.pos = match.end()
def close_brace_callback(self, match, ctx):
closing_brace = match.group()
if (
ctx.opening_brace == '{' and closing_brace != '}' or
ctx.opening_brace == '(' and closing_brace != ')'
):
yield match.start(), Error, closing_brace
else:
yield match.start(), Punctuation, closing_brace
del ctx.opening_brace
ctx.pos = match.end()
tokens = {
'root': [
include('whitespace'),
(r'@comment(?!ary)', Comment),
('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
('@' + IDENTIFIER, Name.Class,
('closing-brace', 'command-body', 'opening-brace')),
('.+', Comment),
],
'opening-brace': [
include('whitespace'),
(r'[{(]', open_brace_callback, '#pop'),
],
'closing-brace': [
include('whitespace'),
(r'[})]', close_brace_callback, '#pop'),
],
'command-body': [
include('whitespace'),
(r'[^\s\,\}]+', Name.Label, ('#pop', 'fields')),
],
'fields': [
include('whitespace'),
(',', Punctuation, 'field'),
default('#pop'),
],
'field': [
include('whitespace'),
(IDENTIFIER, Name.Attribute, ('value', '=')),
default('#pop'),
],
'=': [
include('whitespace'),
('=', Punctuation, '#pop'),
],
'value': [
include('whitespace'),
(IDENTIFIER, Name.Variable),
('"', String, 'quoted-string'),
(r'\{', String, 'braced-string'),
(r'[\d]+', Number),
('#', Punctuation),
default('#pop'),
],
'quoted-string': [
(r'\{', String, 'braced-string'),
('"', String, '#pop'),
(r'[^\{\"]+', String),
],
'braced-string': [
(r'\{', String, '#push'),
(r'\}', String, '#pop'),
(r'[^\{\}]+', String),
],
'whitespace': [
(r'\s+', Whitespace),
],
}
class BSTLexer(RegexLexer):
"""
A lexer for BibTeX bibliography styles.
.. versionadded:: 2.2
"""
name = 'BST'
aliases = ['bst', 'bst-pybtex']
filenames = ['*.bst']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(words(['read', 'sort']), Keyword),
(words(['execute', 'integers', 'iterate', 'reverse', 'strings']),
Keyword, ('group')),
(words(['function', 'macro']), Keyword, ('group', 'group')),
(words(['entry']), Keyword, ('group', 'group', 'group')),
],
'group': [
include('whitespace'),
(r'\{', Punctuation, ('#pop', 'group-end', 'body')),
],
'group-end': [
include('whitespace'),
(r'\}', Punctuation, '#pop'),
],
'body': [
include('whitespace'),
(r"\'[^#\"\{\}\s]+", Name.Function),
(r'[^#\"\{\}\s]+\$', Name.Builtin),
(r'[^#\"\{\}\s]+', Name.Variable),
(r'"[^\"]*"', String),
(r'#-?\d+', Number),
(r'\{', Punctuation, ('group-end', 'body')),
default('#pop'),
],
'whitespace': [
(r'\s+', Whitespace),
('%.*?$', Comment.Single),
],
}
| 4,723 | Python | 28.525 | 83 | 0.45289 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/pygments/lexers/_csound_builtins.py | """
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
REMOVED_OPCODES = set('''
OSCsendA
beadsynt
beosc
buchla
getrowlin
lua_exec
lua_iaopcall
lua_iaopcall_off
lua_ikopcall
lua_ikopcall_off
lua_iopcall
lua_iopcall_off
lua_opdef
mp3scal_check
mp3scal_load
mp3scal_load2
mp3scal_play
mp3scal_play2
pvsgendy
socksend_k
signalflowgraph
sumTableFilter
systime
tabrowlin
vbap1move
'''.split())
# Opcodes in Csound 6.18.0 using:
# python3 -c "
# import re
# from subprocess import Popen, PIPE
# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
# # Remove opcodes that csound.py treats as keywords.
# keyword_opcodes = [
# 'cggoto', # https://csound.com/docs/manual/cggoto.html
# 'cigoto', # https://csound.com/docs/manual/cigoto.html
# 'cingoto', # (undocumented)
# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
# 'cngoto', # https://csound.com/docs/manual/cngoto.html
# 'cnkgoto', # (undocumented)
# 'endin', # https://csound.com/docs/manual/endin.html
# 'endop', # https://csound.com/docs/manual/endop.html
# 'goto', # https://csound.com/docs/manual/goto.html
# 'igoto', # https://csound.com/docs/manual/igoto.html
# 'instr', # https://csound.com/docs/manual/instr.html
# 'kgoto', # https://csound.com/docs/manual/kgoto.html
# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
# 'loop_le', # https://csound.com/docs/manual/loop_le.html
# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
# 'opcode', # https://csound.com/docs/manual/opcode.html
# 'reinit', # https://csound.com/docs/manual/reinit.html
# 'return', # https://csound.com/docs/manual/return.html
# 'rireturn', # https://csound.com/docs/manual/rireturn.html
# 'rigoto', # https://csound.com/docs/manual/rigoto.html
# 'tigoto', # https://csound.com/docs/manual/tigoto.html
# 'timout' # https://csound.com/docs/manual/timout.html
# ]
# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
# newline = '\n'
# print(f'''OPCODES = set(\'''
# {newline.join(opcodes)}
# \'''.split())
#
# DEPRECATED_OPCODES = set(\'''
# {newline.join(deprecated_opcodes)}
# \'''.split())
# ''')
# "
OPCODES = set('''
ATSadd
ATSaddnz
ATSbufread
ATScross
ATSinfo
ATSinterpread
ATSpartialtap
ATSread
ATSreadnz
ATSsinnoi
FLbox
FLbutBank
FLbutton
FLcloseButton
FLcolor
FLcolor2
FLcount
FLexecButton
FLgetsnap
FLgroup
FLgroupEnd
FLgroup_end
FLhide
FLhvsBox
FLhvsBoxSetValue
FLjoy
FLkeyIn
FLknob
FLlabel
FLloadsnap
FLmouse
FLpack
FLpackEnd
FLpack_end
FLpanel
FLpanelEnd
FLpanel_end
FLprintk
FLprintk2
FLroller
FLrun
FLsavesnap
FLscroll
FLscrollEnd
FLscroll_end
FLsetAlign
FLsetBox
FLsetColor
FLsetColor2
FLsetFont
FLsetPosition
FLsetSize
FLsetSnapGroup
FLsetText
FLsetTextColor
FLsetTextSize
FLsetTextType
FLsetVal
FLsetVal_i
FLsetVali
FLsetsnap
FLshow
FLslidBnk
FLslidBnk2
FLslidBnk2Set
FLslidBnk2Setk
FLslidBnkGetHandle
FLslidBnkSet
FLslidBnkSetk
FLslider
FLtabs
FLtabsEnd
FLtabs_end
FLtext
FLupdate
FLvalue
FLvkeybd
FLvslidBnk
FLvslidBnk2
FLxyin
JackoAudioIn
JackoAudioInConnect
JackoAudioOut
JackoAudioOutConnect
JackoFreewheel
JackoInfo
JackoInit
JackoMidiInConnect
JackoMidiOut
JackoMidiOutConnect
JackoNoteOut
JackoOn
JackoTransport
K35_hpf
K35_lpf
MixerClear
MixerGetLevel
MixerReceive
MixerSend
MixerSetLevel
MixerSetLevel_i
OSCbundle
OSCcount
OSCinit
OSCinitM
OSClisten
OSCraw
OSCsend
OSCsend_lo
S
STKBandedWG
STKBeeThree
STKBlowBotl
STKBlowHole
STKBowed
STKBrass
STKClarinet
STKDrummer
STKFMVoices
STKFlute
STKHevyMetl
STKMandolin
STKModalBar
STKMoog
STKPercFlut
STKPlucked
STKResonate
STKRhodey
STKSaxofony
STKShakers
STKSimple
STKSitar
STKStifKarp
STKTubeBell
STKVoicForm
STKWhistle
STKWurley
a
abs
active
adsr
adsyn
adsynt
adsynt2
aftouch
allpole
alpass
alwayson
ampdb
ampdbfs
ampmidi
ampmidicurve
ampmidid
apoleparams
arduinoRead
arduinoReadF
arduinoStart
arduinoStop
areson
aresonk
atone
atonek
atonex
autocorr
babo
balance
balance2
bamboo
barmodel
bbcutm
bbcuts
betarand
bexprnd
bformdec1
bformdec2
bformenc1
binit
biquad
biquada
birnd
bob
bpf
bpfcos
bqrez
butbp
butbr
buthp
butlp
butterbp
butterbr
butterhp
butterlp
button
buzz
c2r
cabasa
cauchy
cauchyi
cbrt
ceil
cell
cent
centroid
ceps
cepsinv
chanctrl
changed
changed2
chani
chano
chebyshevpoly
checkbox
chn_S
chn_a
chn_k
chnclear
chnexport
chnget
chngeta
chngeti
chngetk
chngetks
chngets
chnmix
chnparams
chnset
chnseta
chnseti
chnsetk
chnsetks
chnsets
chuap
clear
clfilt
clip
clockoff
clockon
cmp
cmplxprod
cntCreate
cntCycles
cntDelete
cntDelete_i
cntRead
cntReset
cntState
comb
combinv
compilecsd
compileorc
compilestr
compress
compress2
connect
control
convle
convolve
copya2ftab
copyf2array
cos
cosh
cosinv
cosseg
cossegb
cossegr
count
count_i
cps2pch
cpsmidi
cpsmidib
cpsmidinn
cpsoct
cpspch
cpstmid
cpstun
cpstuni
cpsxpch
cpumeter
cpuprc
cross2
crossfm
crossfmi
crossfmpm
crossfmpmi
crosspm
crosspmi
crunch
ctlchn
ctrl14
ctrl21
ctrl7
ctrlinit
ctrlpreset
ctrlprint
ctrlprintpresets
ctrlsave
ctrlselect
cuserrnd
dam
date
dates
db
dbamp
dbfsamp
dcblock
dcblock2
dconv
dct
dctinv
deinterleave
delay
delay1
delayk
delayr
delayw
deltap
deltap3
deltapi
deltapn
deltapx
deltapxw
denorm
diff
diode_ladder
directory
diskgrain
diskin
diskin2
dispfft
display
distort
distort1
divz
doppler
dot
downsamp
dripwater
dssiactivate
dssiaudio
dssictls
dssiinit
dssilist
dumpk
dumpk2
dumpk3
dumpk4
duserrnd
dust
dust2
elapsedcycles
elapsedtime
envlpx
envlpxr
ephasor
eqfil
evalstr
event
event_i
eventcycles
eventtime
exciter
exitnow
exp
expcurve
expon
exprand
exprandi
expseg
expsega
expsegb
expsegba
expsegr
fareylen
fareyleni
faustaudio
faustcompile
faustctl
faustdsp
faustgen
faustplay
fft
fftinv
ficlose
filebit
filelen
filenchnls
filepeak
filescal
filesr
filevalid
fillarray
filter2
fin
fini
fink
fiopen
flanger
flashtxt
flooper
flooper2
floor
fluidAllOut
fluidCCi
fluidCCk
fluidControl
fluidEngine
fluidInfo
fluidLoad
fluidNote
fluidOut
fluidProgramSelect
fluidSetInterpMethod
fmanal
fmax
fmb3
fmbell
fmin
fmmetal
fmod
fmpercfl
fmrhode
fmvoice
fmwurlie
fof
fof2
fofilter
fog
fold
follow
follow2
foscil
foscili
fout
fouti
foutir
foutk
fprintks
fprints
frac
fractalnoise
framebuffer
freeverb
ftaudio
ftchnls
ftconv
ftcps
ftexists
ftfree
ftgen
ftgenonce
ftgentmp
ftlen
ftload
ftloadk
ftlptim
ftmorf
ftom
ftprint
ftresize
ftresizei
ftsamplebank
ftsave
ftsavek
ftset
ftslice
ftslicei
ftsr
gain
gainslider
gauss
gaussi
gausstrig
gbuzz
genarray
genarray_i
gendy
gendyc
gendyx
getcfg
getcol
getftargs
getrow
getseed
gogobel
grain
grain2
grain3
granule
gtadsr
gtf
guiro
harmon
harmon2
harmon3
harmon4
hdf5read
hdf5write
hilbert
hilbert2
hrtfearly
hrtfmove
hrtfmove2
hrtfreverb
hrtfstat
hsboscil
hvs1
hvs2
hvs3
hypot
i
ihold
imagecreate
imagefree
imagegetpixel
imageload
imagesave
imagesetpixel
imagesize
in
in32
inch
inh
init
initc14
initc21
initc7
inleta
inletf
inletk
inletkid
inletv
ino
inq
inrg
ins
insglobal
insremot
int
integ
interleave
interp
invalue
inx
inz
jacktransport
jitter
jitter2
joystick
jspline
k
la_i_add_mc
la_i_add_mr
la_i_add_vc
la_i_add_vr
la_i_assign_mc
la_i_assign_mr
la_i_assign_t
la_i_assign_vc
la_i_assign_vr
la_i_conjugate_mc
la_i_conjugate_mr
la_i_conjugate_vc
la_i_conjugate_vr
la_i_distance_vc
la_i_distance_vr
la_i_divide_mc
la_i_divide_mr
la_i_divide_vc
la_i_divide_vr
la_i_dot_mc
la_i_dot_mc_vc
la_i_dot_mr
la_i_dot_mr_vr
la_i_dot_vc
la_i_dot_vr
la_i_get_mc
la_i_get_mr
la_i_get_vc
la_i_get_vr
la_i_invert_mc
la_i_invert_mr
la_i_lower_solve_mc
la_i_lower_solve_mr
la_i_lu_det_mc
la_i_lu_det_mr
la_i_lu_factor_mc
la_i_lu_factor_mr
la_i_lu_solve_mc
la_i_lu_solve_mr
la_i_mc_create
la_i_mc_set
la_i_mr_create
la_i_mr_set
la_i_multiply_mc
la_i_multiply_mr
la_i_multiply_vc
la_i_multiply_vr
la_i_norm1_mc
la_i_norm1_mr
la_i_norm1_vc
la_i_norm1_vr
la_i_norm_euclid_mc
la_i_norm_euclid_mr
la_i_norm_euclid_vc
la_i_norm_euclid_vr
la_i_norm_inf_mc
la_i_norm_inf_mr
la_i_norm_inf_vc
la_i_norm_inf_vr
la_i_norm_max_mc
la_i_norm_max_mr
la_i_print_mc
la_i_print_mr
la_i_print_vc
la_i_print_vr
la_i_qr_eigen_mc
la_i_qr_eigen_mr
la_i_qr_factor_mc
la_i_qr_factor_mr
la_i_qr_sym_eigen_mc
la_i_qr_sym_eigen_mr
la_i_random_mc
la_i_random_mr
la_i_random_vc
la_i_random_vr
la_i_size_mc
la_i_size_mr
la_i_size_vc
la_i_size_vr
la_i_subtract_mc
la_i_subtract_mr
la_i_subtract_vc
la_i_subtract_vr
la_i_t_assign
la_i_trace_mc
la_i_trace_mr
la_i_transpose_mc
la_i_transpose_mr
la_i_upper_solve_mc
la_i_upper_solve_mr
la_i_vc_create
la_i_vc_set
la_i_vr_create
la_i_vr_set
la_k_a_assign
la_k_add_mc
la_k_add_mr
la_k_add_vc
la_k_add_vr
la_k_assign_a
la_k_assign_f
la_k_assign_mc
la_k_assign_mr
la_k_assign_t
la_k_assign_vc
la_k_assign_vr
la_k_conjugate_mc
la_k_conjugate_mr
la_k_conjugate_vc
la_k_conjugate_vr
la_k_current_f
la_k_current_vr
la_k_distance_vc
la_k_distance_vr
la_k_divide_mc
la_k_divide_mr
la_k_divide_vc
la_k_divide_vr
la_k_dot_mc
la_k_dot_mc_vc
la_k_dot_mr
la_k_dot_mr_vr
la_k_dot_vc
la_k_dot_vr
la_k_f_assign
la_k_get_mc
la_k_get_mr
la_k_get_vc
la_k_get_vr
la_k_invert_mc
la_k_invert_mr
la_k_lower_solve_mc
la_k_lower_solve_mr
la_k_lu_det_mc
la_k_lu_det_mr
la_k_lu_factor_mc
la_k_lu_factor_mr
la_k_lu_solve_mc
la_k_lu_solve_mr
la_k_mc_set
la_k_mr_set
la_k_multiply_mc
la_k_multiply_mr
la_k_multiply_vc
la_k_multiply_vr
la_k_norm1_mc
la_k_norm1_mr
la_k_norm1_vc
la_k_norm1_vr
la_k_norm_euclid_mc
la_k_norm_euclid_mr
la_k_norm_euclid_vc
la_k_norm_euclid_vr
la_k_norm_inf_mc
la_k_norm_inf_mr
la_k_norm_inf_vc
la_k_norm_inf_vr
la_k_norm_max_mc
la_k_norm_max_mr
la_k_qr_eigen_mc
la_k_qr_eigen_mr
la_k_qr_factor_mc
la_k_qr_factor_mr
la_k_qr_sym_eigen_mc
la_k_qr_sym_eigen_mr
la_k_random_mc
la_k_random_mr
la_k_random_vc
la_k_random_vr
la_k_subtract_mc
la_k_subtract_mr
la_k_subtract_vc
la_k_subtract_vr
la_k_t_assign
la_k_trace_mc
la_k_trace_mr
la_k_upper_solve_mc
la_k_upper_solve_mr
la_k_vc_set
la_k_vr_set
lag
lagud
lastcycle
lenarray
lfo
lfsr
limit
limit1
lincos
line
linen
linenr
lineto
link_beat_force
link_beat_get
link_beat_request
link_create
link_enable
link_is_enabled
link_metro
link_peers
link_tempo_get
link_tempo_set
linlin
linrand
linseg
linsegb
linsegr
liveconv
locsend
locsig
log
log10
log2
logbtwo
logcurve
loopseg
loopsegp
looptseg
loopxseg
lorenz
loscil
loscil3
loscil3phs
loscilphs
loscilx
lowpass2
lowres
lowresx
lpcanal
lpcfilter
lpf18
lpform
lpfreson
lphasor
lpinterp
lposcil
lposcil3
lposcila
lposcilsa
lposcilsa2
lpread
lpreson
lpshold
lpsholdp
lpslot
lufs
mac
maca
madsr
mags
mandel
mandol
maparray
maparray_i
marimba
massign
max
max_k
maxabs
maxabsaccum
maxaccum
maxalloc
maxarray
mclock
mdelay
median
mediank
metro
metro2
metrobpm
mfb
midglobal
midiarp
midic14
midic21
midic7
midichannelaftertouch
midichn
midicontrolchange
midictrl
mididefault
midifilestatus
midiin
midinoteoff
midinoteoncps
midinoteonkey
midinoteonoct
midinoteonpch
midion
midion2
midiout
midiout_i
midipgm
midipitchbend
midipolyaftertouch
midiprogramchange
miditempo
midremot
min
minabs
minabsaccum
minaccum
minarray
mincer
mirror
mode
modmatrix
monitor
moog
moogladder
moogladder2
moogvcf
moogvcf2
moscil
mp3bitrate
mp3in
mp3len
mp3nchnls
mp3out
mp3scal
mp3sr
mpulse
mrtmsg
ms2st
mtof
mton
multitap
mute
mvchpf
mvclpf1
mvclpf2
mvclpf3
mvclpf4
mvmfilter
mxadsr
nchnls_hw
nestedap
nlalp
nlfilt
nlfilt2
noise
noteoff
noteon
noteondur
noteondur2
notnum
nreverb
nrpn
nsamp
nstance
nstrnum
nstrstr
ntof
ntom
ntrpol
nxtpow2
octave
octcps
octmidi
octmidib
octmidinn
octpch
olabuffer
oscbnk
oscil
oscil1
oscil1i
oscil3
oscili
oscilikt
osciliktp
oscilikts
osciln
oscils
oscilx
out
out32
outall
outc
outch
outh
outiat
outic
outic14
outipat
outipb
outipc
outkat
outkc
outkc14
outkpat
outkpb
outkpc
outleta
outletf
outletk
outletkid
outletv
outo
outq
outq1
outq2
outq3
outq4
outrg
outs
outs1
outs2
outvalue
outx
outz
p
p5gconnect
p5gdata
pan
pan2
pareq
part2txt
partials
partikkel
partikkelget
partikkelset
partikkelsync
passign
paulstretch
pcauchy
pchbend
pchmidi
pchmidib
pchmidinn
pchoct
pchtom
pconvolve
pcount
pdclip
pdhalf
pdhalfy
peak
pgmassign
pgmchn
phaser1
phaser2
phasor
phasorbnk
phs
pindex
pinker
pinkish
pitch
pitchac
pitchamdf
planet
platerev
plltrack
pluck
poisson
pol2rect
polyaft
polynomial
port
portk
poscil
poscil3
pow
powershape
powoftwo
pows
prealloc
prepiano
print
print_type
printarray
printf
printf_i
printk
printk2
printks
printks2
println
prints
printsk
product
pset
ptablew
ptrack
puts
pvadd
pvbufread
pvcross
pvinterp
pvoc
pvread
pvs2array
pvs2tab
pvsadsyn
pvsanal
pvsarp
pvsbandp
pvsbandr
pvsbandwidth
pvsbin
pvsblur
pvsbuffer
pvsbufread
pvsbufread2
pvscale
pvscent
pvsceps
pvscfs
pvscross
pvsdemix
pvsdiskin
pvsdisp
pvsenvftw
pvsfilter
pvsfread
pvsfreeze
pvsfromarray
pvsftr
pvsftw
pvsfwrite
pvsgain
pvsgendy
pvshift
pvsifd
pvsin
pvsinfo
pvsinit
pvslock
pvslpc
pvsmaska
pvsmix
pvsmooth
pvsmorph
pvsosc
pvsout
pvspitch
pvstanal
pvstencil
pvstrace
pvsvoc
pvswarp
pvsynth
pwd
pyassign
pyassigni
pyassignt
pycall
pycall1
pycall1i
pycall1t
pycall2
pycall2i
pycall2t
pycall3
pycall3i
pycall3t
pycall4
pycall4i
pycall4t
pycall5
pycall5i
pycall5t
pycall6
pycall6i
pycall6t
pycall7
pycall7i
pycall7t
pycall8
pycall8i
pycall8t
pycalli
pycalln
pycallni
pycallt
pyeval
pyevali
pyevalt
pyexec
pyexeci
pyexect
pyinit
pylassign
pylassigni
pylassignt
pylcall
pylcall1
pylcall1i
pylcall1t
pylcall2
pylcall2i
pylcall2t
pylcall3
pylcall3i
pylcall3t
pylcall4
pylcall4i
pylcall4t
pylcall5
pylcall5i
pylcall5t
pylcall6
pylcall6i
pylcall6t
pylcall7
pylcall7i
pylcall7t
pylcall8
pylcall8i
pylcall8t
pylcalli
pylcalln
pylcallni
pylcallt
pyleval
pylevali
pylevalt
pylexec
pylexeci
pylexect
pylrun
pylruni
pylrunt
pyrun
pyruni
pyrunt
qinf
qnan
r2c
rand
randc
randh
randi
random
randomh
randomi
rbjeq
readclock
readf
readfi
readk
readk2
readk3
readk4
readks
readscore
readscratch
rect2pol
release
remoteport
remove
repluck
reshapearray
reson
resonbnk
resonk
resonr
resonx
resonxk
resony
resonz
resyn
reverb
reverb2
reverbsc
rewindscore
rezzy
rfft
rifft
rms
rnd
rnd31
rndseed
round
rspline
rtclock
s16b14
s32b14
samphold
sandpaper
sc_lag
sc_lagud
sc_phasor
sc_trig
scale
scale2
scalearray
scanhammer
scanmap
scans
scansmap
scantable
scanu
scanu2
schedkwhen
schedkwhennamed
schedule
schedulek
schedwhen
scoreline
scoreline_i
seed
sekere
select
semitone
sense
sensekey
seqtime
seqtime2
sequ
sequstate
serialBegin
serialEnd
serialFlush
serialPrint
serialRead
serialWrite
serialWrite_i
setcol
setctrl
setksmps
setrow
setscorepos
sfilist
sfinstr
sfinstr3
sfinstr3m
sfinstrm
sfload
sflooper
sfpassign
sfplay
sfplay3
sfplay3m
sfplaym
sfplist
sfpreset
shaker
shiftin
shiftout
signum
sin
sinh
sininv
sinsyn
skf
sleighbells
slicearray
slicearray_i
slider16
slider16f
slider16table
slider16tablef
slider32
slider32f
slider32table
slider32tablef
slider64
slider64f
slider64table
slider64tablef
slider8
slider8f
slider8table
slider8tablef
sliderKawai
sndloop
sndwarp
sndwarpst
sockrecv
sockrecvs
socksend
socksends
sorta
sortd
soundin
space
spat3d
spat3di
spat3dt
spdist
spf
splitrig
sprintf
sprintfk
spsend
sqrt
squinewave
st2ms
statevar
sterrain
stix
strcat
strcatk
strchar
strchark
strcmp
strcmpk
strcpy
strcpyk
strecv
streson
strfromurl
strget
strindex
strindexk
string2array
strlen
strlenk
strlower
strlowerk
strrindex
strrindexk
strset
strstrip
strsub
strsubk
strtod
strtodk
strtol
strtolk
strupper
strupperk
stsend
subinstr
subinstrinit
sum
sumarray
svfilter
svn
syncgrain
syncloop
syncphasor
system
system_i
tab
tab2array
tab2pvs
tab_i
tabifd
table
table3
table3kt
tablecopy
tablefilter
tablefilteri
tablegpw
tablei
tableicopy
tableigpw
tableikt
tableimix
tablekt
tablemix
tableng
tablera
tableseg
tableshuffle
tableshufflei
tablew
tablewa
tablewkt
tablexkt
tablexseg
tabmorph
tabmorpha
tabmorphak
tabmorphi
tabplay
tabrec
tabsum
tabw
tabw_i
tambourine
tan
tanh
taninv
taninv2
tbvcf
tempest
tempo
temposcal
tempoval
timedseq
timeinstk
timeinsts
timek
times
tival
tlineto
tone
tonek
tonex
tradsyn
trandom
transeg
transegb
transegr
trcross
trfilter
trhighest
trigExpseg
trigLinseg
trigexpseg
trigger
trighold
triglinseg
trigphasor
trigseq
trim
trim_i
trirand
trlowest
trmix
trscale
trshift
trsplit
turnoff
turnoff2
turnoff2_i
turnoff3
turnon
tvconv
unirand
unwrap
upsamp
urandom
urd
vactrol
vadd
vadd_i
vaddv
vaddv_i
vaget
valpass
vaset
vbap
vbapg
vbapgmove
vbaplsinit
vbapmove
vbapz
vbapzmove
vcella
vclpf
vco
vco2
vco2ft
vco2ift
vco2init
vcomb
vcopy
vcopy_i
vdel_k
vdelay
vdelay3
vdelayk
vdelayx
vdelayxq
vdelayxs
vdelayxw
vdelayxwq
vdelayxws
vdivv
vdivv_i
vecdelay
veloc
vexp
vexp_i
vexpseg
vexpv
vexpv_i
vibes
vibr
vibrato
vincr
vlimit
vlinseg
vlowres
vmap
vmirror
vmult
vmult_i
vmultv
vmultv_i
voice
vosim
vphaseseg
vport
vpow
vpow_i
vpowv
vpowv_i
vps
vpvoc
vrandh
vrandi
vsubv
vsubv_i
vtaba
vtabi
vtabk
vtable1k
vtablea
vtablei
vtablek
vtablewa
vtablewi
vtablewk
vtabwa
vtabwi
vtabwk
vwrap
waveset
websocket
weibull
wgbow
wgbowedbar
wgbrass
wgclar
wgflute
wgpluck
wgpluck2
wguide1
wguide2
wiiconnect
wiidata
wiirange
wiisend
window
wrap
writescratch
wterrain
wterrain2
xadsr
xin
xout
xtratim
xyscale
zacl
zakinit
zamod
zar
zarg
zaw
zawm
zdf_1pole
zdf_1pole_mode
zdf_2pole
zdf_2pole_mode
zdf_ladder
zfilter2
zir
ziw
ziwm
zkcl
zkmod
zkr
zkw
zkwm
'''.split())
DEPRECATED_OPCODES = set('''
array
bformdec
bformenc
copy2ftab
copy2ttab
hrtfer
ktableseg
lentab
maxtab
mintab
pop
pop_f
ptable
ptable3
ptablei
ptableiw
push
push_f
scalet
sndload
soundout
soundouts
specaddm
specdiff
specdisp
specfilt
spechist
specptrk
specscal
specsum
spectrum
stack
sumtab
tabgen
tableiw
tabmap
tabmap_i
tabslice
tb0
tb0_init
tb1
tb10
tb10_init
tb11
tb11_init
tb12
tb12_init
tb13
tb13_init
tb14
tb14_init
tb15
tb15_init
tb1_init
tb2
tb2_init
tb3
tb3_init
tb4
tb4_init
tb5
tb5_init
tb6
tb6_init
tb7
tb7_init
tb8
tb8_init
tb9
tb9_init
vbap16
vbap4
vbap4move
vbap8
vbap8move
xscanmap
xscans
xscansmap
xscanu
xyin
'''.split())
| 18,414 | Python | 9.339697 | 124 | 0.809113 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/wcwidth/wcwidth.py | """
This is a python implementation of wcwidth() and wcswidth().
https://github.com/jquast/wcwidth
from Markus Kuhn's C code, retrieved from:
http://www.cl.cam.ac.uk/~mgk25/ucs/wcwidth.c
This is an implementation of wcwidth() and wcswidth() (defined in
IEEE Std 1002.1-2001) for Unicode.
http://www.opengroup.org/onlinepubs/007904975/functions/wcwidth.html
http://www.opengroup.org/onlinepubs/007904975/functions/wcswidth.html
In fixed-width output devices, Latin characters all occupy a single
"cell" position of equal width, whereas ideographic CJK characters
occupy two such cells. Interoperability between terminal-line
applications and (teletype-style) character terminals using the
UTF-8 encoding requires agreement on which character should advance
the cursor by how many cell positions. No established formal
standards exist at present on which Unicode character shall occupy
how many cell positions on character terminals. These routines are
a first attempt of defining such behavior based on simple rules
applied to data provided by the Unicode Consortium.
For some graphical characters, the Unicode standard explicitly
defines a character-cell width via the definition of the East Asian
FullWidth (F), Wide (W), Half-width (H), and Narrow (Na) classes.
In all these cases, there is no ambiguity about which width a
terminal shall use. For characters in the East Asian Ambiguous (A)
class, the width choice depends purely on a preference of backward
compatibility with either historic CJK or Western practice.
Choosing single-width for these characters is easy to justify as
the appropriate long-term solution, as the CJK practice of
displaying these characters as double-width comes from historic
implementation simplicity (8-bit encoded characters were displayed
single-width and 16-bit ones double-width, even for Greek,
Cyrillic, etc.) and not any typographic considerations.
Much less clear is the choice of width for the Not East Asian
(Neutral) class. Existing practice does not dictate a width for any
of these characters. It would nevertheless make sense
typographically to allocate two character cells to characters such
as for instance EM SPACE or VOLUME INTEGRAL, which cannot be
represented adequately with a single-width glyph. The following
routines at present merely assign a single-cell width to all
neutral characters, in the interest of simplicity. This is not
entirely satisfactory and should be reconsidered before
establishing a formal standard in this area. At the moment, the
decision which Not East Asian (Neutral) characters should be
represented by double-width glyphs cannot yet be answered by
applying a simple rule from the Unicode database content. Setting
up a proper standard for the behavior of UTF-8 character terminals
will require a careful analysis not only of each Unicode character,
but also of each presentation form, something the author of these
routines has avoided to do so far.
http://www.unicode.org/unicode/reports/tr11/
Latest version: http://www.cl.cam.ac.uk/~mgk25/ucs/wcwidth.c
"""
from __future__ import division
# std imports
import os
import sys
import warnings
# local
from .table_wide import WIDE_EASTASIAN
from .table_zero import ZERO_WIDTH
from .unicode_versions import list_versions
try:
# std imports
from functools import lru_cache
except ImportError:
# lru_cache was added in Python 3.2
# 3rd party
from backports.functools_lru_cache import lru_cache
# global cache
_UNICODE_CMPTABLE = None
_PY3 = (sys.version_info[0] >= 3)
# NOTE: created by hand, there isn't anything identifiable other than
# general Cf category code to identify these, and some characters in Cf
# category code are of non-zero width.
# Also includes some Cc, Mn, Zl, and Zp characters
ZERO_WIDTH_CF = set([
0, # Null (Cc)
0x034F, # Combining grapheme joiner (Mn)
0x200B, # Zero width space
0x200C, # Zero width non-joiner
0x200D, # Zero width joiner
0x200E, # Left-to-right mark
0x200F, # Right-to-left mark
0x2028, # Line separator (Zl)
0x2029, # Paragraph separator (Zp)
0x202A, # Left-to-right embedding
0x202B, # Right-to-left embedding
0x202C, # Pop directional formatting
0x202D, # Left-to-right override
0x202E, # Right-to-left override
0x2060, # Word joiner
0x2061, # Function application
0x2062, # Invisible times
0x2063, # Invisible separator
])
def _bisearch(ucs, table):
"""
Auxiliary function for binary search in interval table.
:arg int ucs: Ordinal value of unicode character.
:arg list table: List of starting and ending ranges of ordinal values,
in form of ``[(start, end), ...]``.
:rtype: int
:returns: 1 if ordinal value ucs is found within lookup table, else 0.
"""
lbound = 0
ubound = len(table) - 1
if ucs < table[0][0] or ucs > table[ubound][1]:
return 0
while ubound >= lbound:
mid = (lbound + ubound) // 2
if ucs > table[mid][1]:
lbound = mid + 1
elif ucs < table[mid][0]:
ubound = mid - 1
else:
return 1
return 0
@lru_cache(maxsize=1000)
def wcwidth(wc, unicode_version='auto'):
r"""
Given one Unicode character, return its printable length on a terminal.
:param str wc: A single Unicode character.
:param str unicode_version: A Unicode version number, such as
``'6.0.0'``, the list of available version levels may be
listed by pairing function :func:`list_versions`.
Any version string may be specified without error -- the nearest
matching version is selected. When ``latest`` (default), the
highest Unicode version level is used.
:return: The width, in cells, necessary to display the character of
Unicode string character, ``wc``. Returns 0 if the ``wc`` argument has
no printable effect on a terminal (such as NUL '\0'), -1 if ``wc`` is
not printable, or has an indeterminate effect on the terminal, such as
a control character. Otherwise, the number of column positions the
character occupies on a graphic terminal (1 or 2) is returned.
:rtype: int
The following have a column width of -1:
- C0 control characters (U+001 through U+01F).
- C1 control characters and DEL (U+07F through U+0A0).
The following have a column width of 0:
- Non-spacing and enclosing combining characters (general
category code Mn or Me in the Unicode database).
- NULL (``U+0000``).
- COMBINING GRAPHEME JOINER (``U+034F``).
- ZERO WIDTH SPACE (``U+200B``) *through*
RIGHT-TO-LEFT MARK (``U+200F``).
- LINE SEPARATOR (``U+2028``) *and*
PARAGRAPH SEPARATOR (``U+2029``).
- LEFT-TO-RIGHT EMBEDDING (``U+202A``) *through*
RIGHT-TO-LEFT OVERRIDE (``U+202E``).
- WORD JOINER (``U+2060``) *through*
INVISIBLE SEPARATOR (``U+2063``).
The following have a column width of 1:
- SOFT HYPHEN (``U+00AD``).
- All remaining characters, including all printable ISO 8859-1
and WGL4 characters, Unicode control characters, etc.
The following have a column width of 2:
- Spacing characters in the East Asian Wide (W) or East Asian
Full-width (F) category as defined in Unicode Technical
Report #11 have a column width of 2.
- Some kinds of Emoji or symbols.
"""
# NOTE: created by hand, there isn't anything identifiable other than
# general Cf category code to identify these, and some characters in Cf
# category code are of non-zero width.
ucs = ord(wc)
if ucs in ZERO_WIDTH_CF:
return 0
# C0/C1 control characters
if ucs < 32 or 0x07F <= ucs < 0x0A0:
return -1
_unicode_version = _wcmatch_version(unicode_version)
# combining characters with zero width
if _bisearch(ucs, ZERO_WIDTH[_unicode_version]):
return 0
# "Wide AastAsian" (and emojis)
return 1 + _bisearch(ucs, WIDE_EASTASIAN[_unicode_version])
def wcswidth(pwcs, n=None, unicode_version='auto'):
"""
Given a unicode string, return its printable length on a terminal.
:param str pwcs: Measure width of given unicode string.
:param int n: When ``n`` is None (default), return the length of the
entire string, otherwise width the first ``n`` characters specified.
:param str unicode_version: An explicit definition of the unicode version
level to use for determination, may be ``auto`` (default), which uses
the Environment Variable, ``UNICODE_VERSION`` if defined, or the latest
available unicode version, otherwise.
:rtype: int
:returns: The width, in cells, necessary to display the first ``n``
characters of the unicode string ``pwcs``. Returns ``-1`` if
a non-printable character is encountered.
"""
# pylint: disable=C0103
# Invalid argument name "n"
end = len(pwcs) if n is None else n
idx = slice(0, end)
width = 0
for char in pwcs[idx]:
wcw = wcwidth(char, unicode_version)
if wcw < 0:
return -1
width += wcw
return width
@lru_cache(maxsize=128)
def _wcversion_value(ver_string):
"""
Integer-mapped value of given dotted version string.
:param str ver_string: Unicode version string, of form ``n.n.n``.
:rtype: tuple(int)
:returns: tuple of digit tuples, ``tuple(int, [...])``.
"""
retval = tuple(map(int, (ver_string.split('.'))))
return retval
@lru_cache(maxsize=8)
def _wcmatch_version(given_version):
"""
Return nearest matching supported Unicode version level.
If an exact match is not determined, the nearest lowest version level is
returned after a warning is emitted. For example, given supported levels
``4.1.0`` and ``5.0.0``, and a version string of ``4.9.9``, then ``4.1.0``
is selected and returned:
>>> _wcmatch_version('4.9.9')
'4.1.0'
>>> _wcmatch_version('8.0')
'8.0.0'
>>> _wcmatch_version('1')
'4.1.0'
:param str given_version: given version for compare, may be ``auto``
(default), to select Unicode Version from Environment Variable,
``UNICODE_VERSION``. If the environment variable is not set, then the
latest is used.
:rtype: str
:returns: unicode string, or non-unicode ``str`` type for python 2
when given ``version`` is also type ``str``.
"""
# Design note: the choice to return the same type that is given certainly
# complicates it for python 2 str-type, but allows us to define an api that
# to use 'string-type', for unicode version level definitions, so all of our
# example code works with all versions of python. That, along with the
# string-to-numeric and comparisons of earliest, latest, matching, or
# nearest, greatly complicates this function.
_return_str = not _PY3 and isinstance(given_version, str)
if _return_str:
unicode_versions = [ucs.encode() for ucs in list_versions()]
else:
unicode_versions = list_versions()
latest_version = unicode_versions[-1]
if given_version in (u'auto', 'auto'):
given_version = os.environ.get(
'UNICODE_VERSION',
'latest' if not _return_str else latest_version.encode())
if given_version in (u'latest', 'latest'):
# default match, when given as 'latest', use the most latest unicode
# version specification level supported.
return latest_version if not _return_str else latest_version.encode()
if given_version in unicode_versions:
# exact match, downstream has specified an explicit matching version
# matching any value of list_versions().
return given_version if not _return_str else given_version.encode()
# The user's version is not supported by ours. We return the newest unicode
# version level that we support below their given value.
try:
cmp_given = _wcversion_value(given_version)
except ValueError:
# submitted value raises ValueError in int(), warn and use latest.
warnings.warn("UNICODE_VERSION value, {given_version!r}, is invalid. "
"Value should be in form of `integer[.]+', the latest "
"supported unicode version {latest_version!r} has been "
"inferred.".format(given_version=given_version,
latest_version=latest_version))
return latest_version if not _return_str else latest_version.encode()
# given version is less than any available version, return earliest
# version.
earliest_version = unicode_versions[0]
cmp_earliest_version = _wcversion_value(earliest_version)
if cmp_given <= cmp_earliest_version:
# this probably isn't what you wanted, the oldest wcwidth.c you will
# find in the wild is likely version 5 or 6, which we both support,
# but it's better than not saying anything at all.
warnings.warn("UNICODE_VERSION value, {given_version!r}, is lower "
"than any available unicode version. Returning lowest "
"version level, {earliest_version!r}".format(
given_version=given_version,
earliest_version=earliest_version))
return earliest_version if not _return_str else earliest_version.encode()
# create list of versions which are less than our equal to given version,
# and return the tail value, which is the highest level we may support,
# or the latest value we support, when completely unmatched or higher
# than any supported version.
#
# function will never complete, always returns.
for idx, unicode_version in enumerate(unicode_versions):
# look ahead to next value
try:
cmp_next_version = _wcversion_value(unicode_versions[idx + 1])
except IndexError:
# at end of list, return latest version
return latest_version if not _return_str else latest_version.encode()
# Maybe our given version has less parts, as in tuple(8, 0), than the
# next compare version tuple(8, 0, 0). Test for an exact match by
# comparison of only the leading dotted piece(s): (8, 0) == (8, 0).
if cmp_given == cmp_next_version[:len(cmp_given)]:
return unicode_versions[idx + 1]
# Or, if any next value is greater than our given support level
# version, return the current value in index. Even though it must
# be less than the given value, its our closest possible match. That
# is, 4.1 is returned for given 4.9.9, where 4.1 and 5.0 are available.
if cmp_next_version > cmp_given:
return unicode_version
assert False, ("Code path unreachable", given_version, unicode_versions)
| 14,942 | Python | 38.427441 | 81 | 0.67849 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/wcwidth/__init__.py | """
wcwidth module.
https://github.com/jquast/wcwidth
"""
# re-export all functions & definitions, even private ones, from top-level
# module path, to allow for 'from wcwidth import _private_func'. Of course,
# user beware that any _private function may disappear or change signature at
# any future version.
# local
from .wcwidth import ZERO_WIDTH # noqa
from .wcwidth import (WIDE_EASTASIAN,
wcwidth,
wcswidth,
_bisearch,
list_versions,
_wcmatch_version,
_wcversion_value)
# The __all__ attribute defines the items exported from statement,
# 'from wcwidth import *', but also to say, "This is the public API".
__all__ = ('wcwidth', 'wcswidth', 'list_versions')
# We also used pkg_resources to load unicode version tables from version.json,
# generated by bin/update-tables.py, but some environments are unable to
# import pkg_resources for one reason or another, yikes!
__version__ = '0.2.6'
| 1,032 | Python | 34.620688 | 78 | 0.643411 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/wcwidth/table_zero.py | """
Exports ZERO_WIDTH table keyed by supporting unicode version level.
This code generated by wcwidth/bin/update-tables.py on 2023-01-14 03:25:41 UTC.
"""
ZERO_WIDTH = {
'4.1.0': (
# Source: DerivedGeneralCategory-4.1.0.txt
# Date: 2005-02-26, 02:35:50 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00486,), # Combining Cyrillic Titlo..Combining Cyrillic Psili
(0x00488, 0x00489,), # Combining Cyrillic Hundr..Combining Cyrillic Milli
(0x00591, 0x005b9,), # Hebrew Accent Etnahta ..Hebrew Point Holam
(0x005bb, 0x005bd,), # Hebrew Point Qubuts ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x00615,), # Arabic Sign Sallallahou ..Arabic Small High Tah
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b43,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00d41, 0x00d43,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01032,), # Myanmar Vowel Sign Ai
(0x01036, 0x01037,), # Myanmar Sign Anusvara ..Myanmar Sign Dot Below
(0x01039, 0x01039,), # Myanmar Sign Virama
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0135f, 0x0135f,), # Ethiopic Combining Gemination Mark
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01dc0, 0x01dc3,), # Combining Dotted Grave A..Combining Suspension Mar
(0x020d0, 0x020eb,), # Combining Left Harpoon A..Combining Long Double So
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe23,), # Combining Ligature Left ..Combining Double Tilde R
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.0.0': (
# Source: DerivedGeneralCategory-5.0.0.txt
# Date: 2006-02-27, 23:41:27 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00486,), # Combining Cyrillic Titlo..Combining Cyrillic Psili
(0x00488, 0x00489,), # Combining Cyrillic Hundr..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x00615,), # Arabic Sign Sallallahou ..Arabic Small High Tah
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b43,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d43,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01032,), # Myanmar Vowel Sign Ai
(0x01036, 0x01037,), # Myanmar Sign Anusvara ..Myanmar Sign Dot Below
(0x01039, 0x01039,), # Myanmar Sign Virama
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0135f, 0x0135f,), # Ethiopic Combining Gemination Mark
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01dc0, 0x01dca,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfe, 0x01dff,), # Combining Left Arrowhead..Combining Right Arrowhea
(0x020d0, 0x020ef,), # Combining Left Harpoon A..Combining Right Arrow Be
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe23,), # Combining Ligature Left ..Combining Double Tilde R
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.1.0': (
# Source: DerivedGeneralCategory-5.1.0.txt
# Date: 2008-03-20, 17:54:57 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0135f, 0x0135f,), # Ethiopic Combining Gemination Mark
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfe, 0x01dff,), # Combining Left Arrowhead..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.2.0': (
# Source: DerivedGeneralCategory-5.2.0.txt
# Date: 2009-08-22, 04:58:21 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00955,), # Devanagari Stress Sign U..Devanagari Vowel Sign Ca
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135f, 0x0135f,), # Ethiopic Combining Gemination Mark
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfd, 0x01dff,), # Combining Almost Equal T..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.0.0': (
# Source: DerivedGeneralCategory-6.0.0.txt
# Date: 2010-08-19, 00:48:09 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.1.0': (
# Source: DerivedGeneralCategory-6.1.0.txt
# Date: 2011-11-27, 05:10:22 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Letter Iotified E
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.2.0': (
# Source: DerivedGeneralCategory-6.2.0.txt
# Date: 2012-05-20, 00:42:34 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Letter Iotified E
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.3.0': (
# Source: DerivedGeneralCategory-6.3.0.txt
# Date: 2013-07-05, 14:08:45 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Letter Iotified E
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'7.0.0': (
# Source: DerivedGeneralCategory-7.0.0.txt
# Date: 2014-02-07, 18:42:12 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x00902,), # Arabic Curly Fatha ..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabindu
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Letter Iotified E
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2d,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11301, 0x11301,), # Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'8.0.0': (
# Source: DerivedGeneralCategory-8.0.0.txt
# Date: 2015-02-13, 13:47:11 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabindu
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'9.0.0': (
# Source: DerivedGeneralCategory-9.0.0.txt
# Date: 2016-06-01, 10:34:26 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d4, 0x008e1,), # Arabic Small High Word A..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabindu
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'10.0.0': (
# Source: DerivedGeneralCategory-10.0.0.txt
# Date: 2017-03-08, 08:41:49 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d4, 0x008e1,), # Arabic Small High Word A..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x11a01, 0x11a06,), # Zanabazar Square Vowel S..Zanabazar Square Vowel S
(0x11a09, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'11.0.0': (
# Source: DerivedGeneralCategory-11.0.0.txt
# Date: 2018-02-21, 05:34:04 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepet
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'12.0.0': (
# Source: DerivedGeneralCategory-12.0.0.txt
# Date: 2019-01-22, 08:18:28 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modifier Bar
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'12.1.0': (
# Source: DerivedGeneralCategory-12.1.0.txt
# Date: 2019-03-10, 10:53:08 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modifier Bar
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'13.0.0': (
# Source: DerivedGeneralCategory-13.0.0.txt
# Date: 2019-10-21, 14:30:32 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b55, 0x00b56,), # Oriya Sign Overline ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00d81, 0x00d81,), # Sinhala Sign Candrabindu
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01ac0,), # Combining Doubled Circum..Combining Latin Small Le
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a82c, 0x0a82c,), # Syloti Nagri Sign Alternate Hasanta
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10eab, 0x10eac,), # Yezidi Combining Hamza M..Yezidi Combining Madda M
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x111cf, 0x111cf,), # Sharada Sign Inverted Candrabindu
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x1193b, 0x1193c,), # Dives Akuru Sign Anusvar..Dives Akuru Sign Candrab
(0x1193e, 0x1193e,), # Dives Akuru Virama
(0x11943, 0x11943,), # Dives Akuru Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modifier Bar
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x16fe4, 0x16fe4,), # Khitan Small Script Filler
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'14.0.0': (
# Source: DerivedGeneralCategory-14.0.0.txt
# Date: 2021-07-10, 00:35:08 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x00898, 0x0089f,), # Arabic Small High Word A..Arabic Half Madda Over M
(0x008ca, 0x008e1,), # Arabic Small High Farsi ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b55, 0x00b56,), # Oriya Sign Overline ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3c, 0x00c3c,), # Telugu Sign Nukta
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00d81, 0x00d81,), # Sinhala Sign Candrabindu
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01733,), # Hanunoo Vowel Sign I ..Hanunoo Vowel Sign U
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x0180f, 0x0180f,), # Mongolian Free Variation Selector Four
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01ace,), # Combining Doubled Circum..Combining Latin Small Le
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01dff,), # Combining Dotted Grave A..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a82c, 0x0a82c,), # Syloti Nagri Sign Alternate Hasanta
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10eab, 0x10eac,), # Yezidi Combining Hamza M..Yezidi Combining Madda M
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x10f82, 0x10f85,), # Old Uyghur Combining Dot..Old Uyghur Combining Two
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11070, 0x11070,), # Brahmi Sign Old Tamil Virama
(0x11073, 0x11074,), # Brahmi Vowel Sign Old Ta..Brahmi Vowel Sign Old Ta
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x110c2, 0x110c2,), # Kaithi Vowel Sign Vocalic R
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x111cf, 0x111cf,), # Sharada Sign Inverted Candrabindu
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x1193b, 0x1193c,), # Dives Akuru Sign Anusvar..Dives Akuru Sign Candrab
(0x1193e, 0x1193e,), # Dives Akuru Virama
(0x11943, 0x11943,), # Dives Akuru Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modifier Bar
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x16fe4, 0x16fe4,), # Khitan Small Script Filler
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1cf00, 0x1cf2d,), # Znamenny Combining Mark ..Znamenny Combining Mark
(0x1cf30, 0x1cf46,), # Znamenny Combining Tonal..Znamenny Priznak Modifie
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ae, 0x1e2ae,), # Toto Sign Rising Tone
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'15.0.0': (
# Source: DerivedGeneralCategory-15.0.0.txt
# Date: 2022-04-26, 23:14:35 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qatan
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscript Alef
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscript Alaph
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x00898, 0x0089f,), # Arabic Small High Word A..Arabic Half Madda Over M
(0x008ca, 0x008e1,), # Arabic Small High Farsi ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama
(0x00b55, 0x00b56,), # Oriya Sign Overline ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Candrabindu Above
(0x00c04, 0x00c04,), # Telugu Sign Combining Anusvara Above
(0x00c3c, 0x00c3c,), # Telugu Sign Nukta
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00d81, 0x00d81,), # Sinhala Sign Candrabindu
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga Paa-pilla
(0x00e31, 0x00e31,), # Thai Character Mai Han-akat
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ece,), # Lao Tone Mai Ek ..(nil)
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung Nyi Zla
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung Sgor Rtags
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gdan
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign Shan Medial Wa
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Council Emphatic Tone
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton Ai
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01733,), # Hanunoo Vowel Sign I ..Hanunoo Vowel Sign U
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x0180f, 0x0180f,), # Mongolian Free Variation Selector Four
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gali Dagalga
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusvara
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign Medial La
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai Sat
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Cryptogrammic Dot
(0x01ab0, 0x01ace,), # Combining Doubled Circum..Combining Latin Small Le
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La Lenga
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepet
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01dff,), # Combining Dotted Grave A..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joiner
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisvara
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasanta
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusvara
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a82c, 0x0a82c,), # Syloti Nagri Sign Alternate Hasanta
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Final Ng
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Final M
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing Tone-2
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign Anap
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign Unap
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spanish Varika
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combining Oblique Stroke
(0x102e0, 0x102e0,), # Coptic Epact Thousands Mark
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10eab, 0x10eac,), # Yezidi Combining Hamza M..Yezidi Combining Madda M
(0x10efd, 0x10eff,), # (nil)
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x10f82, 0x10f85,), # Old Uyghur Combining Dot..Old Uyghur Combining Two
(0x11001, 0x11001,), # Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11070, 0x11070,), # Brahmi Sign Old Tamil Virama
(0x11073, 0x11074,), # Brahmi Vowel Sign Old Ta..Brahmi Vowel Sign Old Ta
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x110c2, 0x110c2,), # Kaithi Vowel Sign Vocalic R
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x111cf, 0x111cf,), # Sharada Sign Inverted Candrabindu
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun
(0x11241, 0x11241,), # (nil)
(0x112df, 0x112df,), # Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short E
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x1193b, 0x1193c,), # Dives Akuru Sign Anusvar..Dives Akuru Sign Candrab
(0x1193e, 0x1193e,), # Dives Akuru Virama
(0x11943, 0x11943,), # Dives Akuru Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoiner
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign E
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusvara
(0x11d97, 0x11d97,), # Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x11f00, 0x11f01,), # (nil)
(0x11f36, 0x11f3a,), # (nil)
(0x11f40, 0x11f40,), # (nil)
(0x11f42, 0x11f42,), # (nil)
(0x13440, 0x13440,), # (nil)
(0x13447, 0x13455,), # (nil)
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modifier Bar
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x16fe4, 0x16fe4,), # Khitan Small Script Filler
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1cf00, 0x1cf2d,), # Znamenny Combining Mark ..Znamenny Combining Mark
(0x1cf30, 0x1cf46,), # Znamenny Combining Tonal..Znamenny Priznak Modifie
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body Tilting From Hip Joints
(0x1da84, 0x1da84,), # Signwriting Location Head Neck
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e08f, 0x1e08f,), # (nil)
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ae, 0x1e2ae,), # Toto Sign Rising Tone
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e4ec, 0x1e4ef,), # (nil)
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
}
| 329,095 | Python | 70.542609 | 82 | 0.63678 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/wcwidth/unicode_versions.py | """
Exports function list_versions() for unicode version level support.
This code generated by wcwidth/bin/update-tables.py on 2023-01-14 00:53:07 UTC.
"""
def list_versions():
"""
Return Unicode version levels supported by this module release.
Any of the version strings returned may be used as keyword argument
``unicode_version`` to the ``wcwidth()`` family of functions.
:returns: Supported Unicode version numbers in ascending sorted order.
:rtype: list[str]
"""
return (
"4.1.0",
"5.0.0",
"5.1.0",
"5.2.0",
"6.0.0",
"6.1.0",
"6.2.0",
"6.3.0",
"7.0.0",
"8.0.0",
"9.0.0",
"10.0.0",
"11.0.0",
"12.0.0",
"12.1.0",
"13.0.0",
"14.0.0",
"15.0.0",
)
| 833 | Python | 20.947368 | 79 | 0.509004 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.repl/pip_prebundle/wcwidth/table_wide.py | """
Exports WIDE_EASTASIAN table keyed by supporting unicode version level.
This code generated by wcwidth/bin/update-tables.py on 2023-01-14 03:25:41 UTC.
"""
WIDE_EASTASIAN = {
'4.1.0': (
# Source: EastAsianWidth-4.1.0.txt
# Date: 2005-03-17, 15:21:00 PST [KW]
#
(0x01100, 0x01159,), # Hangul Choseong Kiyeok ..Hangul Choseong Yeorinhi
(0x0115f, 0x0115f,), # Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312c,), # Bopomofo Letter B ..Bopomofo Letter Gn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031b7,), # Ideographic Annotation L..Bopomofo Final Letter H
(0x031c0, 0x031cf,), # Cjk Stroke T ..Cjk Stroke N
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03243,), # Parenthesized Ideograph ..Parenthesized Ideograph
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04db5,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x09fbb,), # Cjk Unified Ideograph-4e..Cjk Unified Ideograph-9f
(0x0a000, 0x0a48c,), # Yi Syllable It ..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0fa2d,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa30, 0x0fa6a,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa70, 0x0fad9,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'5.0.0': (
# Source: EastAsianWidth-5.0.0.txt
# Date: 2006-02-15, 14:39:00 PST [KW]
#
(0x01100, 0x01159,), # Hangul Choseong Kiyeok ..Hangul Choseong Yeorinhi
(0x0115f, 0x0115f,), # Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312c,), # Bopomofo Letter B ..Bopomofo Letter Gn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031b7,), # Ideographic Annotation L..Bopomofo Final Letter H
(0x031c0, 0x031cf,), # Cjk Stroke T ..Cjk Stroke N
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03243,), # Parenthesized Ideograph ..Parenthesized Ideograph
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04db5,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x09fbb,), # Cjk Unified Ideograph-4e..Cjk Unified Ideograph-9f
(0x0a000, 0x0a48c,), # Yi Syllable It ..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0fa2d,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa30, 0x0fa6a,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa70, 0x0fad9,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'5.1.0': (
# Source: EastAsianWidth-5.1.0.txt
# Date: 2008-03-20, 17:42:00 PDT [KW]
#
(0x01100, 0x01159,), # Hangul Choseong Kiyeok ..Hangul Choseong Yeorinhi
(0x0115f, 0x0115f,), # Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031b7,), # Ideographic Annotation L..Bopomofo Final Letter H
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03243,), # Parenthesized Ideograph ..Parenthesized Ideograph
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04db5,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x09fc3,), # Cjk Unified Ideograph-4e..Cjk Unified Ideograph-9f
(0x0a000, 0x0a48c,), # Yi Syllable It ..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0fa2d,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa30, 0x0fa6a,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fa70, 0x0fad9,), # Cjk Compatibility Ideogr..Cjk Compatibility Ideogr
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'5.2.0': (
# Source: EastAsianWidth-5.2.0.txt
# Date: 2009-06-09, 17:47:00 PDT [KW]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x011a3, 0x011a7,), # Hangul Jungseong A-eu ..Hangul Jungseong O-yae
(0x011fa, 0x011ff,), # Hangul Jongseong Kiyeok-..Hangul Jongseong Ssangni
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031b7,), # Ideographic Annotation L..Bopomofo Final Letter H
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0d7b0, 0x0d7c6,), # Hangul Jungseong O-yeo ..Hangul Jungseong Araea-e
(0x0d7cb, 0x0d7fb,), # Hangul Jongseong Nieun-r..Hangul Jongseong Phieuph
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1f200, 0x1f200,), # Square Hiragana Hoka
(0x1f210, 0x1f231,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'6.0.0': (
# Source: EastAsianWidth-6.0.0.txt
# Date: 2010-08-17, 12:17:00 PDT [KW]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x011a3, 0x011a7,), # Hangul Jungseong A-eu ..Hangul Jungseong O-yae
(0x011fa, 0x011ff,), # Hangul Jongseong Kiyeok-..Hangul Jongseong Ssangni
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0d7b0, 0x0d7c6,), # Hangul Jungseong O-yeo ..Hangul Jungseong Araea-e
(0x0d7cb, 0x0d7fb,), # Hangul Jongseong Nieun-r..Hangul Jongseong Phieuph
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'6.1.0': (
# Source: EastAsianWidth-6.1.0.txt
# Date: 2011-09-19, 18:46:00 GMT [KW]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x011a3, 0x011a7,), # Hangul Jungseong A-eu ..Hangul Jungseong O-yae
(0x011fa, 0x011ff,), # Hangul Jongseong Kiyeok-..Hangul Jongseong Ssangni
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0d7b0, 0x0d7c6,), # Hangul Jungseong O-yeo ..Hangul Jungseong Araea-e
(0x0d7cb, 0x0d7fb,), # Hangul Jongseong Nieun-r..Hangul Jongseong Phieuph
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'6.2.0': (
# Source: EastAsianWidth-6.2.0.txt
# Date: 2012-05-15, 18:30:00 GMT [KW]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'6.3.0': (
# Source: EastAsianWidth-6.3.0.txt
# Date: 2013-02-05, 20:09:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'7.0.0': (
# Source: EastAsianWidth-7.0.0.txt
# Date: 2014-02-28, 23:15:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'8.0.0': (
# Source: EastAsianWidth-8.0.0.txt
# Date: 2015-02-10, 21:00:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23a,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'9.0.0': (
# Source: EastAsianWidth-9.0.0.txt
# Date: 2016-05-27, 17:00:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312d,), # Bopomofo Letter B ..Bopomofo Letter Ih
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe0,), # Tangut Iteration Mark
(0x17000, 0x187ec,), # (nil)
(0x18800, 0x18af2,), # Tangut Component-001 ..Tangut Component-755
(0x1b000, 0x1b001,), # Katakana Letter Archaic ..Hiragana Letter Archaic
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6f6,), # Scooter ..Canoe
(0x1f910, 0x1f91e,), # Zipper-mouth Face ..Hand With Index And Midd
(0x1f920, 0x1f927,), # Face With Cowboy Hat ..Sneezing Face
(0x1f930, 0x1f930,), # Pregnant Woman
(0x1f933, 0x1f93e,), # Selfie ..Handball
(0x1f940, 0x1f94b,), # Wilted Flower ..Martial Arts Uniform
(0x1f950, 0x1f95e,), # Croissant ..Pancakes
(0x1f980, 0x1f991,), # Crab ..Squid
(0x1f9c0, 0x1f9c0,), # Cheese Wedge
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'10.0.0': (
# Source: EastAsianWidth-10.0.0.txt
# Date: 2017-03-08, 02:00:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312e,), # Bopomofo Letter B ..Bopomofo Letter O With D
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe1,), # Tangut Iteration Mark ..Nushu Iteration Mark
(0x17000, 0x187ec,), # (nil)
(0x18800, 0x18af2,), # Tangut Component-001 ..Tangut Component-755
(0x1b000, 0x1b11e,), # Katakana Letter Archaic ..Hentaigana Letter N-mu-m
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6f8,), # Scooter ..Flying Saucer
(0x1f910, 0x1f93e,), # Zipper-mouth Face ..Handball
(0x1f940, 0x1f94c,), # Wilted Flower ..Curling Stone
(0x1f950, 0x1f96b,), # Croissant ..Canned Food
(0x1f980, 0x1f997,), # Crab ..Cricket
(0x1f9c0, 0x1f9c0,), # Cheese Wedge
(0x1f9d0, 0x1f9e6,), # Face With Monocle ..Socks
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'11.0.0': (
# Source: EastAsianWidth-11.0.0.txt
# Date: 2018-05-14, 09:41:59 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe1,), # Tangut Iteration Mark ..Nushu Iteration Mark
(0x17000, 0x187f1,), # (nil)
(0x18800, 0x18af2,), # Tangut Component-001 ..Tangut Component-755
(0x1b000, 0x1b11e,), # Katakana Letter Archaic ..Hentaigana Letter N-mu-m
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6f9,), # Scooter ..Skateboard
(0x1f910, 0x1f93e,), # Zipper-mouth Face ..Handball
(0x1f940, 0x1f970,), # Wilted Flower ..Smiling Face With Smilin
(0x1f973, 0x1f976,), # Face With Party Horn And..Freezing Face
(0x1f97a, 0x1f97a,), # Face With Pleading Eyes
(0x1f97c, 0x1f9a2,), # Lab Coat ..Swan
(0x1f9b0, 0x1f9b9,), # Emoji Component Red Hair..Supervillain
(0x1f9c0, 0x1f9c2,), # Cheese Wedge ..Salt Shaker
(0x1f9d0, 0x1f9ff,), # Face With Monocle ..Nazar Amulet
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'12.0.0': (
# Source: EastAsianWidth-12.0.0.txt
# Date: 2019-01-21, 14:12:58 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x032fe,), # Partnership Sign ..Circled Katakana Wo
(0x03300, 0x04dbf,), # Square Apaato ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe3,), # Tangut Iteration Mark ..Old Chinese Iteration Ma
(0x17000, 0x187f7,), # (nil)
(0x18800, 0x18af2,), # Tangut Component-001 ..Tangut Component-755
(0x1b000, 0x1b11e,), # Katakana Letter Archaic ..Hentaigana Letter N-mu-m
(0x1b150, 0x1b152,), # Hiragana Letter Small Wi..Hiragana Letter Small Wo
(0x1b164, 0x1b167,), # Katakana Letter Small Wi..Katakana Letter Small N
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6d5, 0x1f6d5,), # Hindu Temple
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6fa,), # Scooter ..Auto Rickshaw
(0x1f7e0, 0x1f7eb,), # Large Orange Circle ..Large Brown Square
(0x1f90d, 0x1f971,), # White Heart ..Yawning Face
(0x1f973, 0x1f976,), # Face With Party Horn And..Freezing Face
(0x1f97a, 0x1f9a2,), # Face With Pleading Eyes ..Swan
(0x1f9a5, 0x1f9aa,), # Sloth ..Oyster
(0x1f9ae, 0x1f9ca,), # Guide Dog ..Ice Cube
(0x1f9cd, 0x1f9ff,), # Standing Person ..Nazar Amulet
(0x1fa70, 0x1fa73,), # Ballet Shoes ..Shorts
(0x1fa78, 0x1fa7a,), # Drop Of Blood ..Stethoscope
(0x1fa80, 0x1fa82,), # Yo-yo ..Parachute
(0x1fa90, 0x1fa95,), # Ringed Planet ..Banjo
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'12.1.0': (
# Source: EastAsianWidth-12.1.0.txt
# Date: 2019-03-31, 22:01:58 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031ba,), # Ideographic Annotation L..Bopomofo Letter Zy
(0x031c0, 0x031e3,), # Cjk Stroke T ..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x04dbf,), # Partnership Sign ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe3,), # Tangut Iteration Mark ..Old Chinese Iteration Ma
(0x17000, 0x187f7,), # (nil)
(0x18800, 0x18af2,), # Tangut Component-001 ..Tangut Component-755
(0x1b000, 0x1b11e,), # Katakana Letter Archaic ..Hentaigana Letter N-mu-m
(0x1b150, 0x1b152,), # Hiragana Letter Small Wi..Hiragana Letter Small Wo
(0x1b164, 0x1b167,), # Katakana Letter Small Wi..Katakana Letter Small N
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6d5, 0x1f6d5,), # Hindu Temple
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6fa,), # Scooter ..Auto Rickshaw
(0x1f7e0, 0x1f7eb,), # Large Orange Circle ..Large Brown Square
(0x1f90d, 0x1f971,), # White Heart ..Yawning Face
(0x1f973, 0x1f976,), # Face With Party Horn And..Freezing Face
(0x1f97a, 0x1f9a2,), # Face With Pleading Eyes ..Swan
(0x1f9a5, 0x1f9aa,), # Sloth ..Oyster
(0x1f9ae, 0x1f9ca,), # Guide Dog ..Ice Cube
(0x1f9cd, 0x1f9ff,), # Standing Person ..Nazar Amulet
(0x1fa70, 0x1fa73,), # Ballet Shoes ..Shorts
(0x1fa78, 0x1fa7a,), # Drop Of Blood ..Stethoscope
(0x1fa80, 0x1fa82,), # Yo-yo ..Parachute
(0x1fa90, 0x1fa95,), # Ringed Planet ..Banjo
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'13.0.0': (
# Source: EastAsianWidth-13.0.0.txt
# Date: 2029-01-21, 18:14:00 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031e3,), # Ideographic Annotation L..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x04dbf,), # Partnership Sign ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe4,), # Tangut Iteration Mark ..Khitan Small Script Fill
(0x16ff0, 0x16ff1,), # Vietnamese Alternate Rea..Vietnamese Alternate Rea
(0x17000, 0x187f7,), # (nil)
(0x18800, 0x18cd5,), # Tangut Component-001 ..Khitan Small Script Char
(0x18d00, 0x18d08,), # (nil)
(0x1b000, 0x1b11e,), # Katakana Letter Archaic ..Hentaigana Letter N-mu-m
(0x1b150, 0x1b152,), # Hiragana Letter Small Wi..Hiragana Letter Small Wo
(0x1b164, 0x1b167,), # Katakana Letter Small Wi..Katakana Letter Small N
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6d5, 0x1f6d7,), # Hindu Temple ..Elevator
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6fc,), # Scooter ..Roller Skate
(0x1f7e0, 0x1f7eb,), # Large Orange Circle ..Large Brown Square
(0x1f90c, 0x1f93a,), # Pinched Fingers ..Fencer
(0x1f93c, 0x1f945,), # Wrestlers ..Goal Net
(0x1f947, 0x1f978,), # First Place Medal ..Disguised Face
(0x1f97a, 0x1f9cb,), # Face With Pleading Eyes ..Bubble Tea
(0x1f9cd, 0x1f9ff,), # Standing Person ..Nazar Amulet
(0x1fa70, 0x1fa74,), # Ballet Shoes ..Thong Sandal
(0x1fa78, 0x1fa7a,), # Drop Of Blood ..Stethoscope
(0x1fa80, 0x1fa86,), # Yo-yo ..Nesting Dolls
(0x1fa90, 0x1faa8,), # Ringed Planet ..Rock
(0x1fab0, 0x1fab6,), # Fly ..Feather
(0x1fac0, 0x1fac2,), # Anatomical Heart ..People Hugging
(0x1fad0, 0x1fad6,), # Blueberries ..Teapot
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'14.0.0': (
# Source: EastAsianWidth-14.0.0.txt
# Date: 2021-07-06, 09:58:53 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031e3,), # Ideographic Annotation L..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x04dbf,), # Partnership Sign ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe4,), # Tangut Iteration Mark ..Khitan Small Script Fill
(0x16ff0, 0x16ff1,), # Vietnamese Alternate Rea..Vietnamese Alternate Rea
(0x17000, 0x187f7,), # (nil)
(0x18800, 0x18cd5,), # Tangut Component-001 ..Khitan Small Script Char
(0x18d00, 0x18d08,), # (nil)
(0x1aff0, 0x1aff3,), # Katakana Letter Minnan T..Katakana Letter Minnan T
(0x1aff5, 0x1affb,), # Katakana Letter Minnan T..Katakana Letter Minnan N
(0x1affd, 0x1affe,), # Katakana Letter Minnan N..Katakana Letter Minnan N
(0x1b000, 0x1b122,), # Katakana Letter Archaic ..Katakana Letter Archaic
(0x1b150, 0x1b152,), # Hiragana Letter Small Wi..Hiragana Letter Small Wo
(0x1b164, 0x1b167,), # Katakana Letter Small Wi..Katakana Letter Small N
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6d5, 0x1f6d7,), # Hindu Temple ..Elevator
(0x1f6dd, 0x1f6df,), # Playground Slide ..Ring Buoy
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6fc,), # Scooter ..Roller Skate
(0x1f7e0, 0x1f7eb,), # Large Orange Circle ..Large Brown Square
(0x1f7f0, 0x1f7f0,), # Heavy Equals Sign
(0x1f90c, 0x1f93a,), # Pinched Fingers ..Fencer
(0x1f93c, 0x1f945,), # Wrestlers ..Goal Net
(0x1f947, 0x1f9ff,), # First Place Medal ..Nazar Amulet
(0x1fa70, 0x1fa74,), # Ballet Shoes ..Thong Sandal
(0x1fa78, 0x1fa7c,), # Drop Of Blood ..Crutch
(0x1fa80, 0x1fa86,), # Yo-yo ..Nesting Dolls
(0x1fa90, 0x1faac,), # Ringed Planet ..Hamsa
(0x1fab0, 0x1faba,), # Fly ..Nest With Eggs
(0x1fac0, 0x1fac5,), # Anatomical Heart ..Person With Crown
(0x1fad0, 0x1fad9,), # Blueberries ..Jar
(0x1fae0, 0x1fae7,), # Melting Face ..Bubbles
(0x1faf0, 0x1faf6,), # Hand With Index Finger A..Heart Hands
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
'15.0.0': (
# Source: EastAsianWidth-15.0.0.txt
# Date: 2022-05-24, 17:40:20 GMT [KW, LI]
#
(0x01100, 0x0115f,), # Hangul Choseong Kiyeok ..Hangul Choseong Filler
(0x0231a, 0x0231b,), # Watch ..Hourglass
(0x02329, 0x0232a,), # Left-pointing Angle Brac..Right-pointing Angle Bra
(0x023e9, 0x023ec,), # Black Right-pointing Dou..Black Down-pointing Doub
(0x023f0, 0x023f0,), # Alarm Clock
(0x023f3, 0x023f3,), # Hourglass With Flowing Sand
(0x025fd, 0x025fe,), # White Medium Small Squar..Black Medium Small Squar
(0x02614, 0x02615,), # Umbrella With Rain Drops..Hot Beverage
(0x02648, 0x02653,), # Aries ..Pisces
(0x0267f, 0x0267f,), # Wheelchair Symbol
(0x02693, 0x02693,), # Anchor
(0x026a1, 0x026a1,), # High Voltage Sign
(0x026aa, 0x026ab,), # Medium White Circle ..Medium Black Circle
(0x026bd, 0x026be,), # Soccer Ball ..Baseball
(0x026c4, 0x026c5,), # Snowman Without Snow ..Sun Behind Cloud
(0x026ce, 0x026ce,), # Ophiuchus
(0x026d4, 0x026d4,), # No Entry
(0x026ea, 0x026ea,), # Church
(0x026f2, 0x026f3,), # Fountain ..Flag In Hole
(0x026f5, 0x026f5,), # Sailboat
(0x026fa, 0x026fa,), # Tent
(0x026fd, 0x026fd,), # Fuel Pump
(0x02705, 0x02705,), # White Heavy Check Mark
(0x0270a, 0x0270b,), # Raised Fist ..Raised Hand
(0x02728, 0x02728,), # Sparkles
(0x0274c, 0x0274c,), # Cross Mark
(0x0274e, 0x0274e,), # Negative Squared Cross Mark
(0x02753, 0x02755,), # Black Question Mark Orna..White Exclamation Mark O
(0x02757, 0x02757,), # Heavy Exclamation Mark Symbol
(0x02795, 0x02797,), # Heavy Plus Sign ..Heavy Division Sign
(0x027b0, 0x027b0,), # Curly Loop
(0x027bf, 0x027bf,), # Double Curly Loop
(0x02b1b, 0x02b1c,), # Black Large Square ..White Large Square
(0x02b50, 0x02b50,), # White Medium Star
(0x02b55, 0x02b55,), # Heavy Large Circle
(0x02e80, 0x02e99,), # Cjk Radical Repeat ..Cjk Radical Rap
(0x02e9b, 0x02ef3,), # Cjk Radical Choke ..Cjk Radical C-simplified
(0x02f00, 0x02fd5,), # Kangxi Radical One ..Kangxi Radical Flute
(0x02ff0, 0x02ffb,), # Ideographic Description ..Ideographic Description
(0x03000, 0x0303e,), # Ideographic Space ..Ideographic Variation In
(0x03041, 0x03096,), # Hiragana Letter Small A ..Hiragana Letter Small Ke
(0x03099, 0x030ff,), # Combining Katakana-hirag..Katakana Digraph Koto
(0x03105, 0x0312f,), # Bopomofo Letter B ..Bopomofo Letter Nn
(0x03131, 0x0318e,), # Hangul Letter Kiyeok ..Hangul Letter Araeae
(0x03190, 0x031e3,), # Ideographic Annotation L..Cjk Stroke Q
(0x031f0, 0x0321e,), # Katakana Letter Small Ku..Parenthesized Korean Cha
(0x03220, 0x03247,), # Parenthesized Ideograph ..Circled Ideograph Koto
(0x03250, 0x04dbf,), # Partnership Sign ..Cjk Unified Ideograph-4d
(0x04e00, 0x0a48c,), # Cjk Unified Ideograph-4e..Yi Syllable Yyr
(0x0a490, 0x0a4c6,), # Yi Radical Qot ..Yi Radical Ke
(0x0a960, 0x0a97c,), # Hangul Choseong Tikeut-m..Hangul Choseong Ssangyeo
(0x0ac00, 0x0d7a3,), # Hangul Syllable Ga ..Hangul Syllable Hih
(0x0f900, 0x0faff,), # Cjk Compatibility Ideogr..(nil)
(0x0fe10, 0x0fe19,), # Presentation Form For Ve..Presentation Form For Ve
(0x0fe30, 0x0fe52,), # Presentation Form For Ve..Small Full Stop
(0x0fe54, 0x0fe66,), # Small Semicolon ..Small Equals Sign
(0x0fe68, 0x0fe6b,), # Small Reverse Solidus ..Small Commercial At
(0x0ff01, 0x0ff60,), # Fullwidth Exclamation Ma..Fullwidth Right White Pa
(0x0ffe0, 0x0ffe6,), # Fullwidth Cent Sign ..Fullwidth Won Sign
(0x16fe0, 0x16fe4,), # Tangut Iteration Mark ..Khitan Small Script Fill
(0x16ff0, 0x16ff1,), # Vietnamese Alternate Rea..Vietnamese Alternate Rea
(0x17000, 0x187f7,), # (nil)
(0x18800, 0x18cd5,), # Tangut Component-001 ..Khitan Small Script Char
(0x18d00, 0x18d08,), # (nil)
(0x1aff0, 0x1aff3,), # Katakana Letter Minnan T..Katakana Letter Minnan T
(0x1aff5, 0x1affb,), # Katakana Letter Minnan T..Katakana Letter Minnan N
(0x1affd, 0x1affe,), # Katakana Letter Minnan N..Katakana Letter Minnan N
(0x1b000, 0x1b122,), # Katakana Letter Archaic ..Katakana Letter Archaic
(0x1b132, 0x1b132,), # (nil)
(0x1b150, 0x1b152,), # Hiragana Letter Small Wi..Hiragana Letter Small Wo
(0x1b155, 0x1b155,), # (nil)
(0x1b164, 0x1b167,), # Katakana Letter Small Wi..Katakana Letter Small N
(0x1b170, 0x1b2fb,), # Nushu Character-1b170 ..Nushu Character-1b2fb
(0x1f004, 0x1f004,), # Mahjong Tile Red Dragon
(0x1f0cf, 0x1f0cf,), # Playing Card Black Joker
(0x1f18e, 0x1f18e,), # Negative Squared Ab
(0x1f191, 0x1f19a,), # Squared Cl ..Squared Vs
(0x1f200, 0x1f202,), # Square Hiragana Hoka ..Squared Katakana Sa
(0x1f210, 0x1f23b,), # Squared Cjk Unified Ideo..Squared Cjk Unified Ideo
(0x1f240, 0x1f248,), # Tortoise Shell Bracketed..Tortoise Shell Bracketed
(0x1f250, 0x1f251,), # Circled Ideograph Advant..Circled Ideograph Accept
(0x1f260, 0x1f265,), # Rounded Symbol For Fu ..Rounded Symbol For Cai
(0x1f300, 0x1f320,), # Cyclone ..Shooting Star
(0x1f32d, 0x1f335,), # Hot Dog ..Cactus
(0x1f337, 0x1f37c,), # Tulip ..Baby Bottle
(0x1f37e, 0x1f393,), # Bottle With Popping Cork..Graduation Cap
(0x1f3a0, 0x1f3ca,), # Carousel Horse ..Swimmer
(0x1f3cf, 0x1f3d3,), # Cricket Bat And Ball ..Table Tennis Paddle And
(0x1f3e0, 0x1f3f0,), # House Building ..European Castle
(0x1f3f4, 0x1f3f4,), # Waving Black Flag
(0x1f3f8, 0x1f43e,), # Badminton Racquet And Sh..Paw Prints
(0x1f440, 0x1f440,), # Eyes
(0x1f442, 0x1f4fc,), # Ear ..Videocassette
(0x1f4ff, 0x1f53d,), # Prayer Beads ..Down-pointing Small Red
(0x1f54b, 0x1f54e,), # Kaaba ..Menorah With Nine Branch
(0x1f550, 0x1f567,), # Clock Face One Oclock ..Clock Face Twelve-thirty
(0x1f57a, 0x1f57a,), # Man Dancing
(0x1f595, 0x1f596,), # Reversed Hand With Middl..Raised Hand With Part Be
(0x1f5a4, 0x1f5a4,), # Black Heart
(0x1f5fb, 0x1f64f,), # Mount Fuji ..Person With Folded Hands
(0x1f680, 0x1f6c5,), # Rocket ..Left Luggage
(0x1f6cc, 0x1f6cc,), # Sleeping Accommodation
(0x1f6d0, 0x1f6d2,), # Place Of Worship ..Shopping Trolley
(0x1f6d5, 0x1f6d7,), # Hindu Temple ..Elevator
(0x1f6dc, 0x1f6df,), # (nil) ..Ring Buoy
(0x1f6eb, 0x1f6ec,), # Airplane Departure ..Airplane Arriving
(0x1f6f4, 0x1f6fc,), # Scooter ..Roller Skate
(0x1f7e0, 0x1f7eb,), # Large Orange Circle ..Large Brown Square
(0x1f7f0, 0x1f7f0,), # Heavy Equals Sign
(0x1f90c, 0x1f93a,), # Pinched Fingers ..Fencer
(0x1f93c, 0x1f945,), # Wrestlers ..Goal Net
(0x1f947, 0x1f9ff,), # First Place Medal ..Nazar Amulet
(0x1fa70, 0x1fa7c,), # Ballet Shoes ..Crutch
(0x1fa80, 0x1fa88,), # Yo-yo ..(nil)
(0x1fa90, 0x1fabd,), # Ringed Planet ..(nil)
(0x1fabf, 0x1fac5,), # (nil) ..Person With Crown
(0x1face, 0x1fadb,), # (nil)
(0x1fae0, 0x1fae8,), # Melting Face ..(nil)
(0x1faf0, 0x1faf8,), # Hand With Index Finger A..(nil)
(0x20000, 0x2fffd,), # Cjk Unified Ideograph-20..(nil)
(0x30000, 0x3fffd,), # Cjk Unified Ideograph-30..(nil)
),
}
| 91,708 | Python | 66.582166 | 82 | 0.597505 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/PACKAGE-LICENSES/omni.isaac.lula_test_widget-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. | 412 | Markdown | 57.999992 | 74 | 0.839806 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/config/extension.toml | [core]
reloadable = true
order = 0
[package]
version = "0.1.0"
category = "Simulation"
title = "Lula Test Widget"
description = "Run Simple Tests Using Lula Algorithms"
authors = ["NVIDIA"]
repository = ""
keywords = ["isaac", "lula"]
changelog = "docs/CHANGELOG.md"
readme = "docs/README.md"
icon = "data/icon.png"
[dependencies]
"omni.kit.uiapp" = {}
"omni.isaac.ui" = {}
"omni.isaac.core" = {}
"omni.isaac.lula" = {}
"omni.isaac.motion_generation" = {}
[[python.module]]
name = "omni.isaac.lula_test_widget" | 514 | TOML | 18.807692 | 54 | 0.669261 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/omni/isaac/lula_test_widget/test_scenarios.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from omni.isaac.core.objects.cuboid import VisualCuboid
from omni.isaac.core.objects.cylinder import VisualCylinder
from omni.isaac.core.objects.cone import VisualCone
from omni.isaac.core.prims import XFormPrim
from omni.isaac.core.utils.prims import is_prim_path_valid, delete_prim
from omni.isaac.core.utils.string import find_unique_string_name
from omni.isaac.core.utils.rotations import euler_angles_to_quat
from omni.isaac.core.utils.numpy import rot_matrices_to_quats
from omni.isaac.core.utils.types import ArticulationAction
from .controllers import KinematicsController, TrajectoryController
from omni.isaac.motion_generation import (
MotionPolicyController,
LulaKinematicsSolver,
LulaTaskSpaceTrajectoryGenerator,
ArticulationKinematicsSolver,
ArticulationMotionPolicy,
ArticulationTrajectory,
RmpFlow,
)
import numpy as np
import carb
class LulaTestScenarios:
def __init__(self):
self._target = None
self._obstacles = []
self._trajectory_base_frame = None
self._trajectory_targets = []
self._controller = None
self.timestep = 0
self.lula_ik = None
self.rmpflow = None
self.traj_gen = None
self.use_orientation = True
self.scenario_name = ""
self.rmpflow_debug_mode = False
self._ee_frame_prim = None
self.art_ik = None
def visualize_ee_frame(self, articulation, ee_frame):
if self.lula_ik is None or articulation is None:
return
if self._ee_frame_prim is not None:
delete_prim(self._ee_frame_prim.prim_path)
self.art_ik = ArticulationKinematicsSolver(articulation, self.lula_ik, ee_frame)
position, orientation = self.art_ik.compute_end_effector_pose()
orientation = rot_matrices_to_quats(orientation)
self._ee_frame_prim = self._create_frame_prim(position, orientation, "/Lula/end_effector")
def stop_visualize_ee_frame(self):
if self._ee_frame_prim is not None:
delete_prim(self._ee_frame_prim.prim_path)
self._ee_frame_prim = None
self.art_ik = None
def toggle_rmpflow_debug_mode(self):
self.rmpflow_debug_mode = not self.rmpflow_debug_mode
if self.rmpflow is None:
return
if self.rmpflow_debug_mode:
self.rmpflow.set_ignore_state_updates(True)
self.rmpflow.visualize_collision_spheres()
else:
self.rmpflow.set_ignore_state_updates(False)
self.rmpflow.stop_visualizing_collision_spheres()
def initialize_ik_solver(self, robot_description_path, urdf_path):
self.lula_ik = LulaKinematicsSolver(robot_description_path, urdf_path)
def get_ik_frames(self):
if self.lula_ik is None:
return []
return self.lula_ik.get_all_frame_names()
def on_ik_follow_target(self, articulation, ee_frame_name):
self.scenario_reset()
if self.lula_ik is None:
return
art_ik = ArticulationKinematicsSolver(articulation, self.lula_ik, ee_frame_name)
self._controller = KinematicsController("Lula Kinematics Controller", art_ik)
self._create_target()
def on_custom_trajectory(self, robot_description_path, urdf_path):
self.scenario_reset()
if self.lula_ik is None:
return
self.scenario_name = "Custom Trajectory"
orientation = np.array([0, 1, 0, 0])
rect_path = np.array([[0.3, -0.3, 0.1], [0.3, 0.3, 0.1], [0.3, 0.3, 0.5], [0.3, -0.3, 0.5], [0.3, -0.3, 0.1]])
self.traj_gen = LulaTaskSpaceTrajectoryGenerator(robot_description_path, urdf_path)
self._trajectory_base_frame = XFormPrim("/Trajectory", position=np.array([0, 0, 0]))
for i in range(4):
frame_prim = self._create_frame_prim(rect_path[i], orientation, f"/Trajectory/Target_{i+1}")
self._trajectory_targets.append(frame_prim)
def create_trajectory_controller(self, articulation, ee_frame):
if self.traj_gen is None:
return
positions = np.empty((len(self._trajectory_targets), 3))
orientations = np.empty((len(self._trajectory_targets), 4))
for i, target in enumerate(self._trajectory_targets):
positions[i], orientations[i] = target.get_world_pose()
trajectory = self.traj_gen.compute_task_space_trajectory_from_points(positions, orientations, ee_frame)
art_traj = ArticulationTrajectory(articulation, trajectory, 1 / 60)
self._controller = TrajectoryController("Trajectory Controller", art_traj)
def delete_waypoint(self):
if self.scenario_name == "Custom Trajectory" and len(self._trajectory_targets) > 2:
waypoint = self._trajectory_targets[-1]
delete_prim(waypoint.prim_path)
self._trajectory_targets = self._trajectory_targets[:-1]
def add_waypoint(self):
if self.scenario_name == "Custom Trajectory":
orientation = self._trajectory_targets[-1].get_world_pose()[1]
positions = []
for waypoint in self._trajectory_targets:
positions.append(waypoint.get_world_pose()[0])
waypoint = self._create_frame_prim(
np.mean(positions, axis=0), orientation, f"/Trajectory/Target_{len(self._trajectory_targets)+1}"
)
self._trajectory_targets.append(waypoint)
def on_rmpflow_follow_target_obstacles(self, articulation, **rmp_config):
self.scenario_reset()
self.rmpflow = RmpFlow(**rmp_config)
if self.rmpflow_debug_mode:
self.rmpflow.set_ignore_state_updates(True)
self.rmpflow.visualize_collision_spheres()
self.rmpflow.set_robot_base_pose(*articulation.get_world_pose())
art_rmp = ArticulationMotionPolicy(articulation, self.rmpflow, 1 / 60)
self._controller = MotionPolicyController("RmpFlow Controller", art_rmp)
self._create_target()
self._create_wall()
self._create_wall(position=np.array([0.4, 0, 0.1]), orientation=np.array([1, 0, 0, 0]))
for obstacle in self._obstacles:
self.rmpflow.add_obstacle(obstacle)
def on_rmpflow_follow_sinusoidal_target(self, articulation, **rmp_config):
self.scenario_reset()
self.scenario_name = "Sinusoidal Target"
self.rmpflow = RmpFlow(**rmp_config)
if self.rmpflow_debug_mode:
self.rmpflow.set_ignore_state_updates(True)
self.rmpflow.visualize_collision_spheres()
self.rmpflow.set_robot_base_pose(*articulation.get_world_pose())
art_rmp = ArticulationMotionPolicy(articulation, self.rmpflow, 1 / 60)
self._controller = MotionPolicyController("RmpFlow Controller", art_rmp)
self._create_target()
def get_rmpflow(self):
return self.rmpflow
def _create_target(self, position=None, orientation=None):
if position is None:
position = np.array([0.5, 0, 0.5])
if orientation is None:
orientation = np.array([0, -1, 0, 0])
self._target = VisualCuboid(
"/World/Target", size=0.05, position=position, orientation=orientation, color=np.array([1.0, 0, 0])
)
def _create_frame_prim(self, position, orientation, parent_prim_path):
frame_xform = XFormPrim(parent_prim_path, position=position, orientation=orientation)
line_len = 0.04
line_width = 0.004
cone_radius = 0.01
cone_len = 0.02
x_axis = VisualCylinder(
parent_prim_path + "/X_line",
translation=np.array([line_len / 2, 0, 0]),
orientation=euler_angles_to_quat([0, np.pi / 2, 0]),
color=np.array([1, 0, 0]),
height=line_len,
radius=line_width,
)
x_tip = VisualCone(
parent_prim_path + "/X_tip",
translation=np.array([line_len + cone_len / 2, 0, 0]),
orientation=euler_angles_to_quat([0, np.pi / 2, 0]),
color=np.array([1, 0, 0]),
height=cone_len,
radius=cone_radius,
)
y_axis = VisualCylinder(
parent_prim_path + "/Y_line",
translation=np.array([0, line_len / 2, 0]),
orientation=euler_angles_to_quat([-np.pi / 2, 0, 0]),
color=np.array([0, 1, 0]),
height=line_len,
radius=line_width,
)
y_tip = VisualCone(
parent_prim_path + "/Y_tip",
translation=np.array([0, line_len + cone_len / 2, 0]),
orientation=euler_angles_to_quat([-np.pi / 2, 0, 0]),
color=np.array([0, 1, 0]),
height=cone_len,
radius=cone_radius,
)
z_axis = VisualCylinder(
parent_prim_path + "/Z_line",
translation=np.array([0, 0, line_len / 2]),
orientation=euler_angles_to_quat([0, 0, 0]),
color=np.array([0, 0, 1]),
height=line_len,
radius=line_width,
)
z_tip = VisualCone(
parent_prim_path + "/Z_tip",
translation=np.array([0, 0, line_len + cone_len / 2]),
orientation=euler_angles_to_quat([0, 0, 0]),
color=np.array([0, 0, 1]),
height=cone_len,
radius=cone_radius,
)
return frame_xform
def _create_wall(self, position=None, orientation=None):
cube_prim_path = find_unique_string_name(
initial_name="/World/WallObstacle", is_unique_fn=lambda x: not is_prim_path_valid(x)
)
if position is None:
position = np.array([0.45, -0.15, 0.5])
if orientation is None:
orientation = euler_angles_to_quat(np.array([0, 0, np.pi / 2]))
cube = VisualCuboid(
prim_path=cube_prim_path,
position=position,
orientation=orientation,
size=1.0,
scale=np.array([0.1, 0.5, 0.6]),
color=np.array([0, 0, 1.0]),
)
self._obstacles.append(cube)
def set_use_orientation(self, use_orientation):
self.use_orientation = use_orientation
def full_reset(self):
self.scenario_reset()
self.lula_ik = None
self.use_orientation = True
if self._ee_frame_prim is not None:
delete_prim("/Lula")
self._ee_frame_prim = None
self.art_ik = None
def scenario_reset(self):
if self._target is not None:
delete_prim(self._target.prim_path)
if self._trajectory_base_frame is not None:
delete_prim(self._trajectory_base_frame.prim_path)
for obstacle in self._obstacles:
delete_prim(obstacle.prim_path)
self._target = None
self._obstacles = []
self._trajectory_targets = []
self._trajectory_base_frame = None
self._controller = None
if self.rmpflow is not None:
self.rmpflow.stop_visualizing_collision_spheres()
self.timestep = 0
self.scenario_name = ""
def update_scenario(self, **scenario_params):
if self.scenario_name == "Sinusoidal Target":
w_z = scenario_params["w_z"]
w_xy = scenario_params["w_xy"]
rad_z = scenario_params["rad_z"]
rad_xy = scenario_params["rad_xy"]
height = scenario_params["height"]
z = height + rad_z * np.sin(2 * np.pi * w_z * self.timestep / 60)
a = 2 * np.pi * w_xy * self.timestep / 60
if (a / np.pi) % 4 > 2:
a = -a
x, y = rad_xy * np.cos(a), rad_xy * np.sin(a)
target_position = np.array([x, y, z])
target_orientation = euler_angles_to_quat(np.array([np.pi / 2, 0, np.pi / 2 + a]))
self._target.set_world_pose(target_position, target_orientation)
self.timestep += 1
def get_next_action(self, **scenario_params):
if self._ee_frame_prim is not None:
position, orientation = self.art_ik.compute_end_effector_pose()
orientation = rot_matrices_to_quats(orientation)
self._ee_frame_prim.set_world_pose(position, orientation)
if self._controller is None:
return ArticulationAction()
self.update_scenario(**scenario_params)
if self._target is not None:
position, orientation = self._target.get_local_pose()
if not self.use_orientation:
orientation = None
return self._controller.forward(position, orientation)
else:
return self._controller.forward(np.empty((3,)), None)
| 13,138 | Python | 36.433048 | 118 | 0.607018 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/omni/isaac/lula_test_widget/extension.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import weakref
import asyncio
import gc
import carb
import omni
from pxr import Usd
from omni.kit.window.property.templates import LABEL_WIDTH
import omni.ui as ui
import omni.usd
import omni.timeline
import omni.kit.commands
from omni.kit.menu.utils import add_menu_items, remove_menu_items
from omni.isaac.ui.menu import make_menu_item_description
from omni.isaac.core.utils.prims import get_prim_object_type
from omni.isaac.core.articulations import Articulation
from omni.isaac.ui.widgets import DynamicComboBoxModel
from .test_scenarios import LulaTestScenarios
from omni.isaac.ui.ui_utils import (
add_line_rect_flourish,
btn_builder,
state_btn_builder,
float_builder,
setup_ui_headers,
get_style,
str_builder,
)
from omni.kit.window.extensions import SimpleCheckBox
import omni.physx as _physx
import numpy as np
import os
EXTENSION_NAME = "Lula Test Widget"
MAX_DOF_NUM = 100
def is_yaml_file(path: str):
_, ext = os.path.splitext(path.lower())
return ext in [".yaml", ".YAML"]
def is_urdf_file(path: str):
_, ext = os.path.splitext(path.lower())
return ext in [".urdf", ".URDF"]
def on_filter_yaml_item(item) -> bool:
if not item or item.is_folder:
return not (item.name == "Omniverse" or item.path.startswith("omniverse:"))
return is_yaml_file(item.path)
def on_filter_urdf_item(item) -> bool:
if not item or item.is_folder:
return not (item.name == "Omniverse" or item.path.startswith("omniverse:"))
return is_urdf_file(item.path)
class Extension(omni.ext.IExt):
def on_startup(self, ext_id: str):
"""Initialize extension and UI elements"""
# Events
self._usd_context = omni.usd.get_context()
self._physxIFace = _physx.acquire_physx_interface()
self._physx_subscription = None
self._stage_event_sub = None
self._timeline = omni.timeline.get_timeline_interface()
# Build Window
self._window = ui.Window(
title=EXTENSION_NAME, width=600, height=500, visible=False, dockPreference=ui.DockPreference.LEFT_BOTTOM
)
self._window.set_visibility_changed_fn(self._on_window)
# UI
self._models = {}
self._ext_id = ext_id
self._menu_items = [
make_menu_item_description(ext_id, EXTENSION_NAME, lambda a=weakref.proxy(self): a._menu_callback())
]
add_menu_items(self._menu_items, "Isaac Utils")
# Selection
self._new_window = True
self.new_selection = True
self._selected_index = None
self._selected_prim_path = None
self._prev_art_prim_path = None
# Articulation
self.articulation = None
self.num_dof = 0
self.dof_names = []
self.link_names = []
# Lula Config Files
self._selected_robot_description_file = None
self._selected_robot_urdf_file = None
self._robot_description_file = None
self._robot_urdf_file = None
self._ee_frame_options = []
self._rmpflow_config_yaml = None
# Lula Test Scenarios
self._test_scenarios = LulaTestScenarios()
# Visualize End Effector
self._visualize_end_effector = True
def on_shutdown(self):
self._test_scenarios.full_reset()
self.articulation = None
self._usd_context = None
self._stage_event_sub = None
self._timeline_event_sub = None
self._physx_subscription = None
self._models = {}
remove_menu_items(self._menu_items, "Isaac Utils")
if self._window:
self._window = None
gc.collect()
def _on_window(self, visible):
if self._window.visible:
# Subscribe to Stage and Timeline Events
self._usd_context = omni.usd.get_context()
events = self._usd_context.get_stage_event_stream()
self._stage_event_sub = events.create_subscription_to_pop(self._on_stage_event)
stream = self._timeline.get_timeline_event_stream()
self._timeline_event_sub = stream.create_subscription_to_pop(self._on_timeline_event)
self._build_ui()
if not self._new_window and self.articulation:
self._refresh_ui(self.articulation)
self._new_window = False
else:
self._usd_context = None
self._stage_event_sub = None
self._timeline_event_sub = None
def _menu_callback(self):
self._window.visible = not self._window.visible
# Update the Selection Box if the Timeline is already playing
if self._timeline.is_playing():
self._refresh_selection_combobox()
def _build_ui(self):
# if not self._window:
with self._window.frame:
with ui.VStack(spacing=5, height=0):
self._build_info_ui()
self._build_selection_ui()
self._build_kinematics_ui()
self._build_trajectory_generation_ui()
self._build_rmpflow_ui()
async def dock_window():
await omni.kit.app.get_app().next_update_async()
def dock(space, name, location, pos=0.5):
window = omni.ui.Workspace.get_window(name)
if window and space:
window.dock_in(space, location, pos)
return window
tgt = ui.Workspace.get_window("Viewport")
dock(tgt, EXTENSION_NAME, omni.ui.DockPosition.LEFT, 0.33)
await omni.kit.app.get_app().next_update_async()
self._task = asyncio.ensure_future(dock_window())
def _on_selection(self, prim_path):
"""Creates an Articulation Object from the selected articulation prim path.
Updates the UI with the Selected articulation.
Args:
prim_path (string): path to selected articulation
"""
if prim_path == self._prev_art_prim_path:
return
else:
self._prev_art_prim_path = prim_path
self.new_selection = True
self._prev_link = None
if self.articulation_list and prim_path != "None":
# Create and Initialize the Articulation
self.articulation = Articulation(prim_path)
if not self.articulation.handles_initialized:
self.articulation.initialize()
# Update the entire UI with the selected articulaiton
self._refresh_ui(self.articulation)
# start event subscriptions
if not self._physx_subscription:
self._physx_subscription = self._physxIFace.subscribe_physics_step_events(self._on_physics_step)
# Deselect and Reset
else:
if self.articulation is not None:
self._reset_ui()
self._refresh_selection_combobox()
self.articulation = None
# carb.log_warn("Resetting Articulation Inspector")
def _on_combobox_selection(self, model=None, val=None):
# index = model.get_item_value_model().as_int
index = self._models["ar_selection_model"].get_item_value_model().as_int
if index >= 0 and index < len(self.articulation_list):
self._selected_index = index
item = self.articulation_list[index]
self._selected_prim_path = item
self._on_selection(item)
def _refresh_selection_combobox(self):
self.articulation_list = self.get_all_articulations()
if self._prev_art_prim_path is not None and self._prev_art_prim_path not in self.articulation_list:
self._reset_ui()
self._models["ar_selection_model"] = DynamicComboBoxModel(self.articulation_list)
self._models["ar_selection_combobox"].model = self._models["ar_selection_model"]
self._models["ar_selection_combobox"].model.add_item_changed_fn(self._on_combobox_selection)
# If something was already selected, reselect after refresh
if self._selected_index is not None and self._selected_prim_path is not None:
# If the item is still in the articulation list
if self._selected_prim_path in self.articulation_list:
self._models["ar_selection_combobox"].model.set_item_value_model(
ui.SimpleIntModel(self._selected_index)
)
def _clear_selection_combobox(self):
self._selected_index = None
self._selected_prim_path = None
self.articulation_list = []
self._models["ar_selection_model"] = DynamicComboBoxModel(self.articulation_list)
self._models["ar_selection_combobox"].model = self._models["ar_selection_model"]
self._models["ar_selection_combobox"].model.add_item_changed_fn(self._on_combobox_selection)
def get_all_articulations(self):
"""Get all the articulation objects from the Stage.
Returns:
list(str): list of prim_paths as strings
"""
articulations = ["None"]
stage = self._usd_context.get_stage()
if stage:
for prim in Usd.PrimRange(stage.GetPrimAtPath("/")):
path = str(prim.GetPath())
# Get prim type get_prim_object_type
type = get_prim_object_type(path)
if type == "articulation":
articulations.append(path)
return articulations
def get_articulation_values(self, articulation):
"""Get and store the latest dof_properties from the articulation.
Update the Properties UI.
Args:
articulation (Articulation): Selected Articulation
"""
# Update static dof properties on new selection
if self.new_selection:
self.num_dof = articulation.num_dof
self.dof_names = articulation.dof_names
self.new_selection = False
self._joint_positions = articulation.get_joint_positions()
def _refresh_ee_frame_combobox(self):
if self._robot_description_file is not None and self._robot_urdf_file is not None:
self._test_scenarios.initialize_ik_solver(self._robot_description_file, self._robot_urdf_file)
ee_frames = self._test_scenarios.get_ik_frames()
else:
ee_frames = []
name = "ee_frame"
self._models[name] = DynamicComboBoxModel(ee_frames)
self._models[name + "_combobox"].model = self._models[name]
if len(ee_frames) > 0:
self._models[name].get_item_value_model().set_value(len(ee_frames) - 1)
self._models[name].add_item_changed_fn(self._reset_scenario)
self._ee_frame_options = ee_frames
def _reset_scenario(self, model=None, value=None):
self._enable_lula_dropdowns()
self._set_enable_trajectory_panel(False)
if self.articulation is not None:
self.articulation.post_reset()
def _refresh_ui(self, articulation):
"""Updates the GUI with a new Articulation's properties.
Args:
articulation (Articulation): [description]
"""
# Get the latest articulation values and update the Properties UI
self.get_articulation_values(articulation)
if is_yaml_file(self._models["input_robot_description_file"].get_value_as_string()):
self._enable_load_button
def _reset_ui(self):
"""Reset / Hide UI Elements.
"""
self._clear_selection_combobox()
self._test_scenarios.full_reset()
self._prev_art_prim_path = None
self._visualize_end_effector = True
##################################
# Callbacks
##################################
def _on_stage_event(self, event):
"""Callback for Stage Events
Args:
event (omni.usd.StageEventType): Event Type
"""
# On every stage event check if any articulations have been added/removed from the Stage
self._refresh_selection_combobox()
if event.type == int(omni.usd.StageEventType.SELECTION_CHANGED):
# self._on_selection_changed()
pass
elif event.type == int(omni.usd.StageEventType.OPENED) or event.type == int(omni.usd.StageEventType.CLOSED):
# stage was opened or closed, cleanup
self._physx_subscription = None
def _on_physics_step(self, step):
"""Callback for Physics Step.
Args:
step ([type]): [description]
"""
if self.articulation is not None:
if not self.articulation.handles_initialized:
self.articulation.initialize()
# Get the latest values from the articulation
self.get_articulation_values(self.articulation)
action = self._get_next_action()
self.articulation.get_articulation_controller().apply_action(action)
return
def _get_next_action(self):
if self._test_scenarios.scenario_name == "Sinusoidal Target":
w_xy = self._models["rmpflow_follow_sinusoid_w_xy"].get_value_as_float()
w_z = self._models["rmpflow_follow_sinusoid_w_z"].get_value_as_float()
rad_z = self._models["rmpflow_follow_sinusoid_rad_z"].get_value_as_float()
rad_xy = self._models["rmpflow_follow_sinusoid_rad_xy"].get_value_as_float()
height = self._models["rmpflow_follow_sinusoid_height"].get_value_as_float()
return self._test_scenarios.get_next_action(w_xy=w_xy, w_z=w_z, rad_z=rad_z, rad_xy=rad_xy, height=height)
else:
return self._test_scenarios.get_next_action()
def _on_timeline_event(self, e):
"""Callback for Timeline Events
Args:
event (omni.timeline.TimelineEventType): Event Type
"""
if e.type == int(omni.timeline.TimelineEventType.PLAY):
# BUG: get_all_articulations returns ['None'] after STOP/PLAY <-- articulations show up as xforms
self._refresh_selection_combobox()
elif e.type == int(omni.timeline.TimelineEventType.STOP):
self._reset_ui()
##################################
# UI Builders
##################################
def _build_info_ui(self):
title = EXTENSION_NAME
doc_link = "https://docs.omniverse.nvidia.com/app_isaacsim/app_isaacsim/overview.html"
overview = "This utility is used to help generate and refine the collision sphere representation of a robot. "
overview += "Select the Articulation for which you would like to edit spheres from the dropdown menu. Then select a link from the robot Articulation to begin using the Sphere Editor."
overview += "\n\nPress the 'Open in IDE' button to view the source code."
setup_ui_headers(self._ext_id, __file__, title, doc_link, overview)
def _build_selection_ui(self):
frame = ui.CollapsableFrame(
title="Selection Panel",
height=0,
collapsed=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
# Create a dynamic ComboBox for Articulation Selection
self.articulation_list = []
self._models["ar_selection_model"] = DynamicComboBoxModel(self.articulation_list)
with ui.HStack():
ui.Label(
"Select Articulation",
width=LABEL_WIDTH,
alignment=ui.Alignment.LEFT_CENTER,
tooltip="Select Articulation",
)
self._models["ar_selection_combobox"] = ui.ComboBox(self._models["ar_selection_model"])
add_line_rect_flourish(False)
self._models["ar_selection_combobox"].model.add_item_changed_fn(self._on_combobox_selection)
# Select Robot Description YAML file
def check_file_type(model=None):
path = model.get_value_as_string()
if is_yaml_file(path):
self._selected_robot_description_file = model.get_value_as_string()
self._enable_load_button()
else:
self._selected_robot_description_file = None
carb.log_warn(f"Invalid path to Robot Desctiption YAML: {path}")
kwargs = {
"label": "Robot Description YAML",
"default_val": "",
"tooltip": "Click the Folder Icon to Set Filepath",
"use_folder_picker": True,
"item_filter_fn": on_filter_yaml_item,
"folder_dialog_title": "Select Robot Description YAML file",
"folder_button_title": "Select YAML",
}
self._models["input_robot_description_file"] = str_builder(**kwargs)
self._models["input_robot_description_file"].add_value_changed_fn(check_file_type)
# Select Robot URDF file
def check_urdf_file_type(model=None):
path = model.get_value_as_string()
if is_urdf_file(path):
self._selected_robot_urdf_file = model.get_value_as_string()
self._enable_load_button()
else:
self._selected_robot_urdf_file = None
carb.log_warn(f"Invalid path to Robot URDF: {path}")
kwargs = {
"label": "Robot URDF",
"default_val": "",
"tooltip": "Click the Folder Icon to Set Filepath",
"use_folder_picker": True,
"item_filter_fn": on_filter_urdf_item,
"folder_dialog_title": "Select Robot URDF file",
"folder_button_title": "Select URDF",
}
self._models["input_robot_urdf_file"] = str_builder(**kwargs)
self._models["input_robot_urdf_file"].add_value_changed_fn(check_urdf_file_type)
# Load the currently selected config files
def on_load_config(model=None, val=None):
self._robot_description_file = self._selected_robot_description_file
self._robot_urdf_file = self._selected_robot_urdf_file
self._refresh_ee_frame_combobox()
self._enable_lula_dropdowns()
self._set_enable_trajectory_panel(False)
self._models["load_config_btn"] = btn_builder(
label="Load Selected Config",
text="Load",
tooltip="Load the selected Lula config files",
on_clicked_fn=on_load_config,
)
# Select End Effector Frame Name
name = "ee_frame"
self._models[name] = DynamicComboBoxModel([])
with ui.HStack():
ui.Label(
"Select End Effector Frame",
width=LABEL_WIDTH,
alignment=ui.Alignment.LEFT_CENTER,
tooltip="End Effector Frame to Use when following a target",
)
self._models[name + "_combobox"] = ui.ComboBox(self._models[name])
add_line_rect_flourish(False)
self._models[name].add_item_changed_fn(self._reset_scenario)
# Button for ignoring IK targets
def on_clicked_fn(use_orientation):
self._test_scenarios.set_use_orientation(use_orientation)
with ui.HStack(width=0):
label = "Use Orientation Targets"
ui.Label(label, width=LABEL_WIDTH - 12, alignment=ui.Alignment.LEFT_TOP)
cb = ui.SimpleBoolModel(default_value=1)
SimpleCheckBox(1, on_clicked_fn, model=cb)
# Button for visualizing end effector
def on_vis_ee_clicked_fn(visualize_ee):
self._visalize_end_effector = visualize_ee
if visualize_ee:
self._test_scenarios.visualize_ee_frame(self.articulation, self._get_selected_ee_frame())
else:
self._test_scenarios.stop_visualize_ee_frame()
with ui.HStack(width=0):
label = "Visualize End Effector Pose"
ui.Label(label, width=LABEL_WIDTH - 12, alignment=ui.Alignment.LEFT_TOP)
cb = ui.SimpleBoolModel(default_value=1)
SimpleCheckBox(1, on_vis_ee_clicked_fn, model=cb)
def _build_kinematics_ui(self):
frame = ui.CollapsableFrame(
title="Lula Kinematics Solver",
height=0,
collapsed=True,
enabled=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
self._models["kinematics_frame"] = frame
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
def ik_follow_target(model=None):
ee_frame = self._get_selected_ee_frame()
self.articulation.post_reset()
self._test_scenarios.on_ik_follow_target(self.articulation, ee_frame)
self._models["kinematics_follow_target_btn"] = btn_builder(
label="Follow Target",
text="Follow Target",
tooltip="Use IK to follow a target",
on_clicked_fn=ik_follow_target,
)
def _build_trajectory_generation_ui(self):
frame = ui.CollapsableFrame(
title="Lula Trajectory Generator",
height=0,
collapsed=True,
enabled=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
self._models["trajectory_frame"] = frame
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
def on_custom_trajectory(model=None, val=None):
self.articulation.post_reset()
self._test_scenarios.on_custom_trajectory(self._robot_description_file, self._robot_urdf_file)
self._set_enable_trajectory_panel(True)
self._models["custom_trajectory_btn"] = btn_builder(
label="Custom Trajectory",
text="Custom Trajectory",
tooltip="Create a basic customizable trajectory and unlock the Custom Trajectory Panel",
on_clicked_fn=on_custom_trajectory,
)
frame = ui.CollapsableFrame(
title="Custom Trajectory Panel",
height=0,
collapsed=True,
enabled=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
self._models["trajectory_panel"] = frame
def follow_trajectory(model=None, val=None):
self._test_scenarios.create_trajectory_controller(self.articulation, self._get_selected_ee_frame())
def on_add_waypoint(model=None, val=None):
self._test_scenarios.add_waypoint()
def on_delete_waypoint(model=None, val=None):
self._test_scenarios.delete_waypoint()
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
self._models["follow_trajectory_btn"] = btn_builder(
label="Follow Trajectory",
text="Follow Trajectory",
tooltip="Follow the trajectory shown in front of the robot",
on_clicked_fn=follow_trajectory,
)
self._models["add_trajectory_waypoint_btn"] = btn_builder(
label="Add Waypoint",
text="Add Waypoint",
tooltip="Add waypoint to trajectory",
on_clicked_fn=on_add_waypoint,
)
self._models["remove_trajectory_waypoint_btn"] = btn_builder(
label="Remove Waypoint",
text="Remove Waypoint",
tooltip="Remove waypoint from trajectory",
on_clicked_fn=on_delete_waypoint,
)
def _build_rmpflow_ui(self):
frame = ui.CollapsableFrame(
title="RmpFlow",
height=0,
collapsed=True,
enabled=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
self._models["rmpflow_frame"] = frame
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
def check_file_type(model=None):
path = model.get_value_as_string()
if is_yaml_file(path):
self._rmpflow_config_yaml = model.get_value_as_string()
self._set_enable_rmpflow_buttons(True)
else:
self._rmpflow_config_yaml = None
self._set_enable_rmpflow_buttons(False)
carb.log_warn(f"Invalid path to RmpFlow config YAML: {path}")
kwargs = {
"label": "RmpFlow Config YAML",
"default_val": "",
"tooltip": "Click the Folder Icon to Set Filepath",
"use_folder_picker": True,
"item_filter_fn": on_filter_yaml_item,
"folder_dialog_title": "Select RmpFlow config YAML file",
"folder_button_title": "Select YAML",
}
self._models["input_rmp_config_file"] = str_builder(**kwargs)
self._models["input_rmp_config_file"].add_value_changed_fn(check_file_type)
# TODO: remove hard coded line below
# self._rmpflow_config_yaml = (
# "/home/arudich/Desktop/Denso/Cobotta_Pro_900_Assets/cobotta_rmpflow_config_final.yaml"
# )
def toggle_rmpflow_debug_mode(model=None):
self._test_scenarios.toggle_rmpflow_debug_mode()
self._models["rmpflow_debug_mode"] = state_btn_builder(
label="Debugger",
a_text="Debugging Mode",
b_text="Normal Mode",
tooltip="Toggle Debugging Mode",
on_clicked_fn=toggle_rmpflow_debug_mode,
)
######################################################
# Follow Target
######################################################
def rmpflow_follow_target(model=None):
ee_frame = self._get_selected_ee_frame()
rmpflow_config_dict = {
"end_effector_frame_name": ee_frame,
"maximum_substep_size": 0.0034,
"ignore_robot_state_updates": False,
"robot_description_path": self._robot_description_file,
"urdf_path": self._robot_urdf_file,
"rmpflow_config_path": self._rmpflow_config_yaml,
}
self.articulation.post_reset()
self._test_scenarios.on_rmpflow_follow_target_obstacles(self.articulation, **rmpflow_config_dict)
self._models["rmpflow_follow_target_btn"] = btn_builder(
label="Follow Target",
text="Follow Target",
tooltip="Use RmpFlow to follow a target",
on_clicked_fn=rmpflow_follow_target,
)
self._models["rmpflow_follow_target_btn"].enabled = False
#######################################################
# Sinusoidal Target
#######################################################
def rmpflow_follow_sinusoidal_target(model=None):
ee_frame = self._get_selected_ee_frame()
rmpflow_config_dict = {
"end_effector_frame_name": ee_frame,
"maximum_substep_size": 0.0034,
"ignore_robot_state_updates": False,
"robot_description_path": self._robot_description_file,
"urdf_path": self._robot_urdf_file,
"rmpflow_config_path": self._rmpflow_config_yaml,
}
self.articulation.post_reset()
self._test_scenarios.on_rmpflow_follow_sinusoidal_target(self.articulation, **rmpflow_config_dict)
self._models["rmpflow_follow_sinusoid_btn"] = btn_builder(
label="Follow Sinusoid",
text="Follow Sinusoid",
tooltip="Use RmpFlow to follow a rotating sinusoidal target",
on_clicked_fn=rmpflow_follow_sinusoidal_target,
)
self._models["rmpflow_follow_sinusoid_btn"].enabled = False
frame = ui.CollapsableFrame(
title="Sinusoid Parameters",
height=0,
collapsed=True,
enabled=False,
style=get_style(),
style_type_name_override="CollapsableFrame",
horizontal_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_AS_NEEDED,
vertical_scrollbar_policy=ui.ScrollBarPolicy.SCROLLBAR_ALWAYS_ON,
)
self._models["rmpflow_sinusoidal_target_frame"] = frame
with frame:
with ui.VStack(style=get_style(), spacing=5, height=0):
self._models["rmpflow_follow_sinusoid_w_z"] = float_builder(
label="Vertical Wave Frequency",
default_val=0.05,
tooltip="Speed [rad/sec] at which the target makes vertical oscilations",
)
self._models["rmpflow_follow_sinusoid_rad_z"] = float_builder(
label="Vertical Wave Radius", default_val=0.2, tooltip="Height [m] of vertical oscilations"
)
self._models["rmpflow_follow_sinusoid_w_xy"] = float_builder(
label="Z Axis Rotation Frequency",
default_val=0.05,
tooltip="Speed [rad/sec] at which the target makes a full circle about the z axis",
)
self._models["rmpflow_follow_sinusoid_rad_xy"] = float_builder(
label="Distance From Origin",
default_val=0.5,
tooltip="Distance on the XY plane from the origin [m] of the target",
)
self._models["rmpflow_follow_sinusoid_height"] = float_builder(
label="Sinusoid Height", default_val=0.5, tooltip="Average height of target [m]"
)
def _disable_lula_dropdowns(self):
frame_names = ["kinematics_frame", "trajectory_frame", "rmpflow_frame", "trajectory_panel"]
for n in frame_names:
frame = self._models[n]
frame.enabled = False
frame.collapsed = True
def _enable_load_button(self):
self._models["load_config_btn"].enabled = True
def _enable_lula_dropdowns(self):
if self.articulation is None or self._robot_description_file is None or self._robot_urdf_file is None:
return
frame_names = ["kinematics_frame", "trajectory_frame", "rmpflow_frame"]
for n in frame_names:
frame = self._models[n]
frame.enabled = True
self._test_scenarios.scenario_reset()
self._test_scenarios.initialize_ik_solver(self._robot_description_file, self._robot_urdf_file)
if self._visualize_end_effector:
self._test_scenarios.visualize_ee_frame(self.articulation, self._get_selected_ee_frame())
def _set_enable_trajectory_panel(self, enable):
frame = self._models["trajectory_panel"]
frame.enabled = enable
frame.collapsed = not enable
def _set_enable_rmpflow_buttons(self, enable):
self._models["rmpflow_follow_target_btn"].enabled = enable
self._models["rmpflow_follow_sinusoid_btn"].enabled = enable
def _get_selected_ee_frame(self):
name = "ee_frame"
return self._ee_frame_options[self._models[name].get_item_value_model().as_int]
| 34,702 | Python | 40.810843 | 192 | 0.554464 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/omni/isaac/lula_test_widget/__init__.py | from .extension import *
| 25 | Python | 11.999994 | 24 | 0.76 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/omni/isaac/lula_test_widget/controllers.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from omni.isaac.motion_generation import ArticulationKinematicsSolver, PathPlannerVisualizer, ArticulationTrajectory
from omni.isaac.core.controllers import BaseController
import carb
from omni.isaac.core.utils.types import ArticulationAction
from omni.isaac.core.articulations import Articulation
from omni.isaac.core import objects
from typing import Optional
import numpy as np
class LulaController(BaseController):
def __init__(self):
pass
def forward(
self, target_end_effector_position: np.ndarray, target_end_effector_orientation: Optional[np.ndarray] = None
) -> ArticulationAction:
return
class KinematicsController(LulaController):
def __init__(self, name: str, art_kinematics: ArticulationKinematicsSolver):
BaseController.__init__(self, name)
self._art_kinematics = art_kinematics
def forward(
self, target_end_effector_position: np.ndarray, target_end_effector_orientation: Optional[np.ndarray] = None
) -> ArticulationAction:
action, succ = self._art_kinematics.compute_inverse_kinematics(
target_end_effector_position, target_end_effector_orientation
)
if succ:
return action
else:
carb.log_warn("Failed to compute Inverse Kinematics")
return ArticulationAction()
class TrajectoryController(LulaController):
def __init__(self, name: str, art_trajectory: ArticulationTrajectory):
BaseController.__init__(self, name)
self._art_trajectory = art_trajectory
self._actions = self._art_trajectory.get_action_sequence(1 / 60)
self._action_index = 0
def forward(
self, target_end_effector_position: np.ndarray, target_end_effector_orientation: Optional[np.ndarray] = None
):
if self._action_index == 0:
first_action = self._actions[0]
desired_joint_positions = first_action.joint_positions
robot_articulation = self._art_trajectory.get_robot_articulation()
current_joint_positions = robot_articulation.get_joint_positions()
is_none_mask = desired_joint_positions == None
desired_joint_positions[is_none_mask] = current_joint_positions[is_none_mask]
robot_articulation.set_joint_positions(desired_joint_positions)
action = first_action
elif self._action_index >= len(self._actions):
return ArticulationAction(
self._actions[-1].joint_positions, np.zeros_like(self._actions[-1].joint_velocities)
)
else:
action = self._actions[self._action_index]
self._action_index += 1
return action
class PathPlannerController(LulaController):
def __init__(
self,
name: str,
path_planner_visualizer: PathPlannerVisualizer,
cspace_interpolation_max_dist: float = 0.5,
frames_per_waypoint: int = 30,
):
BaseController.__init__(self, name)
self._path_planner_visualizer = path_planner_visualizer
self._path_planner = path_planner_visualizer.get_path_planner()
self._cspace_interpolation_max_dist = cspace_interpolation_max_dist
self._frames_per_waypoint = frames_per_waypoint
self._plan = None
self._frame_counter = 1
def make_new_plan(
self, target_end_effector_position: np.ndarray, target_end_effector_orientation: Optional[np.ndarray] = None
) -> None:
self._path_planner.set_end_effector_target(target_end_effector_position, target_end_effector_orientation)
self._path_planner.update_world()
self._plan = self._path_planner_visualizer.compute_plan_as_articulation_actions(
max_cspace_dist=self._cspace_interpolation_max_dist
)
if self._plan is None or self._plan == []:
carb.log_warn("No plan could be generated to target pose: " + str(target_end_effector_position))
def forward(
self, target_end_effector_position: np.ndarray, target_end_effector_orientation: Optional[np.ndarray] = None
) -> ArticulationAction:
if self._plan is None:
# This will only happen the first time the forward function is used
self.make_new_plan(target_end_effector_position, target_end_effector_orientation)
if len(self._plan) == 0:
# The plan is completed; return null action to remain in place
self._frame_counter = 1
return ArticulationAction()
if self._frame_counter % self._frames_per_waypoint != 0:
# Stop at each waypoint in the plan for self._frames_per_waypoint frames
self._frame_counter += 1
return self._plan[0]
else:
self._frame_counter += 1
return self._plan.pop(0)
def add_obstacle(self, obstacle: objects, static: bool = False) -> None:
self._path_planner.add_obstacle(obstacle, static)
def remove_obstacle(self, obstacle: objects) -> None:
self._path_planner.remove_obstacle(obstacle)
def reset(self) -> None:
# PathPlannerController will make one plan per reset
self._path_planner.reset()
self._plan = None
self._frame_counter = 1
| 5,686 | Python | 38.220689 | 116 | 0.668484 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/docs/CHANGELOG.md | # Changelog
## [0.1.0] - 2023-01-06
### Added
- Initial version of Lula Test Widget
| 87 | Markdown | 9.999999 | 37 | 0.632184 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.lula_test_widget/docs/README.md | # Usage
To enable this extension, go to the Extension Manager menu and enable omni.isaac.lula_test_widget extension.
| 119 | Markdown | 22.999995 | 108 | 0.789916 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/PACKAGE-LICENSES/omni.isaac.dynamic_control-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. | 412 | Markdown | 57.999992 | 74 | 0.839806 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/config/extension.toml | [core]
reloadable = true
order = 0
[package]
version = "1.2.3"
category = "Simulation"
title = "Isaac Sim Dynamic Control"
description = "Dynamic Control"
authors = ["NVIDIA"]
repository = ""
keywords = ["isaac", "physics"]
changelog = "docs/CHANGELOG.md"
readme = "docs/README.md"
icon = "data/icon.png"
writeTarget.kit = true
[dependencies]
"omni.physx" = {}
"omni.timeline" = {} # Needed for simulation to occur
"omni.kit.numpy.common" = {}
"omni.usd.libs" = {}
"omni.kit.commands" = {}
"omni.kit.pip_archive" = {} # pulls in numpy
"omni.kit.test" = {}
[[python.module]]
name = "omni.isaac.dynamic_control"
[[python.module]]
name = "omni.isaac.dynamic_control.tests"
[[native.plugin]]
path = "bin/*.plugin"
recursive = false
| 735 | TOML | 18.368421 | 53 | 0.672109 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .scripts import *
| 457 | Python | 40.63636 | 76 | 0.803063 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/scripts/extension.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.ext
import omni.kit.commands
import gc
from .. import _dynamic_control
EXTENSION_NAME = "Dynamic Control"
class Extension(omni.ext.IExt):
def on_startup(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
def on_shutdown(self):
_dynamic_control.release_dynamic_control_interface(self._dc)
gc.collect()
| 803 | Python | 31.159999 | 76 | 0.759651 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/scripts/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .extension import *
from . import utils as utils
from . import conversions as conversions
| 529 | Python | 39.769228 | 76 | 0.803403 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/scripts/utils.py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from pxr import Usd
def set_scene_physics_type(gpu=False, scene_path="/physicsScene"):
import omni
from pxr import PhysxSchema, UsdPhysics, UsdGeom, Gf
stage = omni.usd.get_context().get_stage()
scene = stage.GetPrimAtPath(scene_path)
if not scene:
scene = UsdPhysics.Scene.Define(stage, scene_path)
scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
scene.CreateGravityMagnitudeAttr().Set(9.81 / UsdGeom.GetStageMetersPerUnit(stage))
physxSceneAPI = PhysxSchema.PhysxSceneAPI.Get(stage, scene_path)
if physxSceneAPI.GetEnableCCDAttr().HasValue():
physxSceneAPI.GetEnableCCDAttr().Set(True)
else:
physxSceneAPI.CreateEnableCCDAttr(True)
if physxSceneAPI.GetEnableStabilizationAttr().HasValue():
physxSceneAPI.GetEnableStabilizationAttr().Set(True)
else:
physxSceneAPI.CreateEnableStabilizationAttr(True)
if physxSceneAPI.GetSolverTypeAttr().HasValue():
physxSceneAPI.GetSolverTypeAttr().Set("TGS")
else:
physxSceneAPI.CreateSolverTypeAttr("TGS")
if not physxSceneAPI.GetEnableGPUDynamicsAttr().HasValue():
physxSceneAPI.CreateEnableGPUDynamicsAttr(False)
if not physxSceneAPI.GetBroadphaseTypeAttr().HasValue():
physxSceneAPI.CreateBroadphaseTypeAttr("MBP")
if gpu:
physxSceneAPI.GetEnableGPUDynamicsAttr().Set(True)
physxSceneAPI.GetBroadphaseTypeAttr().Set("GPU")
else:
physxSceneAPI.GetEnableGPUDynamicsAttr().Set(False)
physxSceneAPI.GetBroadphaseTypeAttr().Set("MBP")
def set_physics_frequency(frequency=60):
import carb
carb.settings.get_settings().set_bool("/app/runLoops/main/rateLimitEnabled", True)
carb.settings.get_settings().set_int("/app/runLoops/main/rateLimitFrequency", int(frequency))
carb.settings.get_settings().set_int("/persistent/simulation/minFrameRate", int(frequency))
async def simulate(seconds, dc=None, art=None, steps_per_sec=60):
import omni
for frame in range(int(steps_per_sec * seconds)):
if art is not None and dc is not None:
dc.wake_up_articulation(art)
await omni.kit.app.get_app().next_update_async()
async def add_cube(stage, path, size, offset, physics=True, mass=0.0) -> Usd.Prim:
import omni
from pxr import UsdGeom, UsdPhysics
cube_geom = UsdGeom.Cube.Define(stage, path)
cube_prim = stage.GetPrimAtPath(path)
cube_geom.CreateSizeAttr(size)
cube_geom.AddTranslateOp().Set(offset)
await omni.kit.app.get_app().next_update_async() # Need this to avoid flatcache errors
if physics:
rigid_api = UsdPhysics.RigidBodyAPI.Apply(cube_prim)
rigid_api.CreateRigidBodyEnabledAttr(True)
if mass > 0:
mass_api = UsdPhysics.MassAPI.Apply(cube_prim)
mass_api.CreateMassAttr(mass)
UsdPhysics.CollisionAPI.Apply(cube_prim)
await omni.kit.app.get_app().next_update_async()
return cube_prim
| 3,406 | Python | 37.280898 | 97 | 0.719612 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/scripts/conversions.py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from pxr import Gf
from omni.isaac.dynamic_control import _dynamic_control
def _vec3d_quatd_to_dctransform(translation: Gf.Vec3d, quat: Gf.Quatd) -> _dynamic_control.Transform:
pose_t = (translation[0], translation[1], translation[2])
pose_r = (quat.GetImaginary()[0], quat.GetImaginary()[1], quat.GetImaginary()[2], quat.GetReal())
return _dynamic_control.Transform(pose_t, pose_r)
def create_transform(translation, rotation) -> _dynamic_control.Transform:
if isinstance(rotation, Gf.Rotation):
return _vec3d_quatd_to_dctransform(translation, rotation.GetQuat())
if isinstance(rotation, Gf.Quatd):
return _vec3d_quatd_to_dctransform(translation, rotation)
def create_transform_from_mat(mat: Gf.Matrix4d) -> _dynamic_control.Transform:
trans = mat.ExtractTranslation()
q = mat.ExtractRotation().GetQuaternion()
(q_x, q_y, q_z) = q.GetImaginary()
quat = [q_x, q_y, q_z, q.GetReal()]
tr = _dynamic_control.Transform()
tr.p = trans
tr.r = quat
return tr
| 1,454 | Python | 39.416666 | 101 | 0.72696 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/common.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from pxr import Usd
import omni
import typing
import carb
import json
import omni.client
from omni.client._omniclient import Result
def build_server_list() -> typing.List:
"""Return list with all known servers to check
Returns:
all_servers (typing.List): List of servers found
"""
mounted_drives = carb.settings.get_settings().get_settings_dictionary("/persistent/app/omniverse/mountedDrives")
all_servers = []
if mounted_drives is not None:
mounted_dict = json.loads(mounted_drives.get_dict())
for drive in mounted_dict.items():
all_servers.append(drive[1])
else:
carb.log_warn("/persistent/app/omniverse/mountedDrives setting not found")
return all_servers
def check_server(server: str, path: str) -> bool:
"""Check a specific server for a path
Args:
server (str): Name of Nucleus server
path (str): Path to search
Returns:
bool: True if folder is found
"""
carb.log_info("Checking path: {}{}".format(server, path))
# Increase hang detection timeout
omni.client.set_hang_detection_time_ms(10000)
result, _ = omni.client.stat("{}{}".format(server, path))
if result == Result.OK:
carb.log_info("Success: {}{}".format(server, path))
return True
else:
carb.log_info("Failure: {}{} not accessible".format(server, path))
return False
def get_assets_root_path() -> typing.Union[str, None]:
"""Tries to find the root path to the Isaac Sim assets on a Nucleus server
Returns:
url (str): URL of Nucleus server with root path to assets folder.
Returns None if Nucleus server not found.
"""
# 1 - Check /persistent/isaac/asset_root/default setting
carb.log_info("Check /persistent/isaac/asset_root/default setting")
default_asset_root = carb.settings.get_settings().get("/persistent/isaac/asset_root/default")
if default_asset_root:
result = check_server(default_asset_root, "/Isaac")
if result:
result = check_server(default_asset_root, "/NVIDIA")
if result:
carb.log_info("Assets root found at {}".format(default_asset_root))
return default_asset_root
# 2 - Check root on mountedDrives setting
connected_servers = build_server_list()
if len(connected_servers):
for server_name in connected_servers:
# carb.log_info("Found {}".format(server_name))
result = check_server(server_name, "/Isaac")
if result:
result = check_server(server_name, "/NVIDIA")
if result:
carb.log_info("Assets root found at {}".format(server_name))
return server_name
# 3 - Check cloud for /Assets/Isaac/{version_major}.{version_minor} folder
cloud_assets_url = carb.settings.get_settings().get("/persistent/isaac/asset_root/cloud")
carb.log_info("Checking {}...".format(cloud_assets_url))
if cloud_assets_url:
result = check_server(cloud_assets_url, "/Isaac")
if result:
result = check_server(cloud_assets_url, "/NVIDIA")
if result:
carb.log_info("Assets root found at {}".format(cloud_assets_url))
return cloud_assets_url
carb.log_warn("Could not find assets root folder")
return None
async def open_stage_async(usd_path: str) -> bool:
"""
Open the given usd file and replace currently opened stage
Args:
usd_path (str): Path to open
"""
if not Usd.Stage.IsSupportedFile(usd_path):
raise ValueError("Only USD files can be loaded with this method")
usd_context = omni.usd.get_context()
usd_context.disable_save_to_recent_files()
(result, error) = await omni.usd.get_context().open_stage_async(usd_path)
usd_context.enable_save_to_recent_files()
return (result, error)
| 4,352 | Python | 35.88983 | 116 | 0.653263 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_core.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.kit.test
import omni.usd
from omni.isaac.dynamic_control import _dynamic_control
from .common import get_assets_root_path
from pxr import Sdf
import carb
import asyncio
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestCore(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
self._timeline = omni.timeline.get_timeline_interface()
await omni.kit.app.get_app().next_update_async()
await omni.usd.get_context().new_stage_async()
await omni.kit.app.get_app().next_update_async()
pass
# After running each test
async def tearDown(self):
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await asyncio.sleep(1.0)
await omni.kit.app.get_app().next_update_async()
pass
async def test_is_simulating(self):
await omni.kit.app.get_app().next_update_async()
self.assertFalse(self._dc.is_simulating())
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
self.assertTrue(self._dc.is_simulating())
self._timeline.stop()
await omni.kit.app.get_app().next_update_async()
self.assertFalse(self._dc.is_simulating())
await omni.kit.app.get_app().next_update_async()
pass
async def test_print(self):
t = _dynamic_control.Transform((1, 2, 3), (1, 2, 3, 4))
v = _dynamic_control.Velocity((1, 2, 3), (4, 5, 6))
self.assertEqual("(1, 2, 3), (1, 2, 3, 4)", str(t))
self.assertEqual("(1, 2, 3), (4, 5, 6)", str(v))
self.assertEqual("(1, 2, 3), (1, 2, 3, 4), (1, 2, 3), (4, 5, 6)", str(_dynamic_control.RigidBodyState(t, v)))
self.assertEqual("(1, 2, 3)", str(_dynamic_control.DofState(1, 2, 3)))
async def test_delete(self):
self._assets_root_path = get_assets_root_path()
if self._assets_root_path is None:
carb.log_error("Could not find Isaac Sim assets folder")
return
await omni.kit.app.get_app().next_update_async()
self._stage = omni.usd.get_context().get_stage()
await omni.kit.app.get_app().next_update_async()
prim_a = self._stage.DefinePrim("/World/Franka_1", "Xform")
prim_a.GetReferences().AddReference(self._assets_root_path + "/Isaac/Robots/Franka/franka.usd")
prim_b = self._stage.DefinePrim("/World/Franka_2", "Xform")
prim_b.GetReferences().AddReference(self._assets_root_path + "/Isaac/Robots/Franka/franka.usd")
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
self._handle = self._dc.get_articulation("/World/Franka_1")
await omni.kit.app.get_app().next_update_async()
with Sdf.ChangeBlock():
omni.usd.commands.DeletePrimsCommand(["/World/Franka_1"]).do()
omni.usd.commands.DeletePrimsCommand(["/World/Franka_2"]).do()
await omni.kit.app.get_app().next_update_async()
| 3,714 | Python | 44.864197 | 142 | 0.650512 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_articulation_simple.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.kit.test
import math
import carb # carb data types are used as return values, need this
import numpy as np
from pxr import Gf, UsdPhysics, Sdf
import omni.physx as _physx
import asyncio
from omni.isaac.dynamic_control import _dynamic_control
from omni.isaac.dynamic_control import utils as dc_utils
from omni.isaac.dynamic_control import conversions as dc_conversions
# from omni.isaac.core.utils.nucleus import get_assets_root_path
from .common import get_assets_root_path
from .common import open_stage_async
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestArticulationSimple(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
self._physx_interface = omni.physx.acquire_physx_interface()
self._timeline = omni.timeline.get_timeline_interface()
ext_manager = omni.kit.app.get_app().get_extension_manager()
ext_id = ext_manager.get_enabled_extension_id("omni.isaac.dynamic_control")
self._extension_path = ext_manager.get_extension_path(ext_id)
self._assets_root_path = get_assets_root_path()
await omni.kit.app.get_app().next_update_async()
# open remote
self.usd_path = self._assets_root_path + "/Isaac/Robots/Simple/simple_articulation.usd"
(result, error) = await open_stage_async(self.usd_path)
await omni.kit.app.get_app().next_update_async()
self.assertTrue(result) # Make sure the stage loaded
self._stage = omni.usd.get_context().get_stage()
dc_utils.set_physics_frequency(60) # set this after loading
pass
# After running each test
async def tearDown(self):
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await asyncio.sleep(1.0)
await omni.kit.app.get_app().next_update_async()
pass
async def test_load(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# Check the object type to make sure its an articulation
obj_type = self._dc.peek_object_type("/Articulation")
self.assertEqual(obj_type, _dynamic_control.ObjectType.OBJECT_ARTICULATION)
# Get handle to articulation and make sure its valid
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# Use articulation handle to do something and make sure its works
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is not None)
pass
async def test_non_sim(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Articulation should be invalid as sim has not started
obj_type = self._dc.peek_object_type("/Articulation")
self.assertEqual(obj_type, _dynamic_control.ObjectType.OBJECT_NONE)
art = self._dc.get_articulation("/Articulation")
self.assertEqual(art, _dynamic_control.INVALID_HANDLE)
# force physics to load and some information should be valid
self._physx_interface.force_load_physics_from_usd()
obj_type = self._dc.peek_object_type("/Articulation")
self.assertEqual(obj_type, _dynamic_control.ObjectType.OBJECT_ARTICULATION)
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
rb = self._dc.get_rigid_body("/Articulation/Arm")
self.assertNotEqual(rb, _dynamic_control.INVALID_HANDLE)
# Not Implemented yet
# joint = self._dc.get_joint("/Articulation/Arm/RevoluteJoint")
# self.assertNotEqual(joint, _dynamic_control.INVALID_HANDLE)
# dof = self._dc.get_dof("/Articulation/Arm/RevoluteJoint")
# self.assertNotEqual(joint, _dynamic_control.INVALID_HANDLE)
self.assertTrue(
self._dc.peek_object_type("/Articulation/Arm/RevoluteJoint"), _dynamic_control.ObjectType.OBJECT_JOINT
)
# Dof states will still be none
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is None)
dof_props = self._dc.get_articulation_dof_properties(art)
self.assertTrue(dof_props is None)
async def test_physics_manual(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._physx_interface.start_simulation()
self._physx_interface.force_load_physics_from_usd()
dt = 1.0 / 60.0
# manually load and step physics
self._physx_interface.update_simulation(dt, 0)
art = self._dc.get_articulation("/Articulation")
dof_ptr = self._dc.find_articulation_dof(art, "RevoluteJoint")
slider_body = self._dc.find_articulation_body(art, "Slider")
props = self._dc.get_articulation_dof_properties(art)
num_dofs = self._dc.get_articulation_dof_count(art)
# drive in velocity mode for one second
for i in range(num_dofs):
props[i]["stiffness"] = 0
props[i]["damping"] = 1e15
self._dc.set_articulation_dof_properties(art, props)
new_state = [math.radians(45), 0]
self._dc.set_articulation_dof_velocity_targets(art, new_state)
for frame in range(0, 60):
self._physx_interface.update_simulation(dt, frame * dt)
state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
# print(state)
new_pose = self._dc.get_rigid_body_pose(slider_body)
# after one second it should reach this pose
self.assertAlmostEqual(state.pos, math.radians(45), delta=1e-4, msg=f"{state.pos} != {math.radians(45)}")
self.assertAlmostEqual(state.vel, math.radians(45), delta=1e-4, msg=f"{state.vel} != {math.radians(45)}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.06778, 1.06781, 0], atol=1e-2), f"{new_pose.p}"
)
self._physx_interface.update_transformations(
updateToFastCache=False, updateToUsd=True, updateVelocitiesToUsd=True, outputVelocitiesLocalSpace=False
)
self._physx_interface.reset_simulation()
def call_all_articulation_apis(self, art, joint, dof):
self._dc.wake_up_articulation(art)
self._dc.get_articulation_name(art)
self._dc.get_articulation_path(art)
self._dc.get_articulation_body_count(art)
self._dc.get_articulation_body(art, 0)
self._dc.get_articulation_body(art, 100)
self._dc.find_articulation_body(art, "Arm")
self._dc.find_articulation_body(art, "DoesntExist")
self._dc.get_articulation_root_body(art)
self._dc.get_articulation_body_states(art, _dynamic_control.STATE_ALL)
self._dc.get_articulation_properties(art)
self._dc.set_articulation_properties(art, _dynamic_control.ArticulationProperties())
self._dc.get_articulation_joint_count(art)
self._dc.get_articulation_joint(art, 0)
self._dc.get_articulation_joint(art, 100)
self._dc.find_articulation_joint(art, "RevoluteJoint")
self._dc.get_articulation_dof_count(art)
self._dc.get_articulation_dof(art, 0)
self._dc.get_articulation_dof(art, 100)
self._dc.find_articulation_dof(art, "RevoluteJoint")
self._dc.find_articulation_dof(art, "DoesntExist")
self._dc.find_articulation_dof_index(art, "RevoluteJoint")
self._dc.find_articulation_dof_index(art, "DoesntExist")
self._dc.get_articulation_dof_properties(art)
self._dc.set_articulation_dof_properties(art, [])
self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self._dc.set_articulation_dof_states(art, [], _dynamic_control.STATE_ALL)
self._dc.set_articulation_dof_position_targets(art, [])
self._dc.get_articulation_dof_position_targets(art)
self._dc.set_articulation_dof_velocity_targets(art, [])
self._dc.get_articulation_dof_velocity_targets(art)
self._dc.set_articulation_dof_efforts(art, [])
self._dc.get_articulation_dof_efforts(art)
self._dc.get_articulation_dof_masses(art)
self._dc.get_joint_name(joint)
self._dc.get_joint_path(joint)
self._dc.get_joint_type(joint)
self._dc.get_joint_dof_count(joint)
self._dc.get_joint_dof(joint, 0)
self._dc.get_joint_dof(joint, 100)
self._dc.get_joint_parent_body(joint)
self._dc.get_joint_child_body(joint)
self._dc.get_dof_name(dof)
self._dc.get_dof_path(dof)
self._dc.get_dof_type(dof)
self._dc.get_dof_joint(dof)
self._dc.get_dof_parent_body(dof)
self._dc.get_dof_child_body(dof)
self._dc.get_dof_state(dof, _dynamic_control.STATE_ALL)
self._dc.set_dof_state(dof, _dynamic_control.DofState(), _dynamic_control.STATE_ALL)
self._dc.get_dof_position(dof)
self._dc.set_dof_position(dof, 0)
self._dc.get_dof_velocity(dof)
self._dc.set_dof_velocity(dof, 0)
self._dc.get_dof_properties(dof)
self._dc.set_dof_properties(dof, _dynamic_control.DofProperties())
self._dc.set_dof_position_target(dof, 0)
self._dc.set_dof_velocity_target(dof, 0)
self._dc.get_dof_position_target(dof)
self._dc.get_dof_velocity_target(dof)
self._dc.set_dof_effort(dof, 0)
self._dc.get_dof_effort(dof)
async def test_start_stop(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# get articulation handle
art = self._dc.get_articulation("/Articulation")
joint = self._dc.find_articulation_joint(art, "RevoluteJoint")
dof = self._dc.find_articulation_dof(art, "RevoluteJoint")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
self.call_all_articulation_apis(art, joint, dof)
# make sure handle is still valid after a stop/play
self._timeline.stop()
await omni.kit.app.get_app().next_update_async()
self.call_all_articulation_apis(art, joint, dof)
# getting this while stopped should fail
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is None)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is not None)
async def test_delete_joint(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
root_body = self._dc.get_articulation_root_body(art)
new_pose = self._dc.get_rigid_body_pose(root_body)
self.assertAlmostEqual(new_pose.p.z, 0, msg=f"new_pose.p.z = {new_pose.p.z}")
# test to make sure articulation falls when joint is deleted.
self._timeline.stop()
await omni.kit.app.get_app().next_update_async()
omni.usd.commands.DeletePrimsCommand(["/Articulation/CenterPivot/FixedJoint"]).do()
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
await dc_utils.simulate(0.1, self._dc, art)
new_pose = self._dc.get_rigid_body_pose(root_body)
self.assertAlmostEqual(new_pose.p.z, -0.076222, delta=0.02, msg=f"new_pose.p.z = {new_pose.p.z}")
pass
async def test_disable_joint(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
root_body = self._dc.get_articulation_root_body(art)
new_pose = self._dc.get_rigid_body_pose(root_body)
self.assertAlmostEqual(new_pose.p.z, 0, msg=f"new_pose.p.z = {new_pose.p.z}")
# test to make sure articulation falls when joint is disabled.
self._timeline.stop()
await omni.kit.app.get_app().next_update_async()
omni.kit.commands.execute(
"ChangeProperty",
prop_path=Sdf.Path("/Articulation/CenterPivot/FixedJoint.physics:jointEnabled"),
value=False,
prev=None,
)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
await dc_utils.simulate(0.1, self._dc, art)
new_pose = self._dc.get_rigid_body_pose(root_body)
self.assertAlmostEqual(new_pose.p.z, -0.076222, delta=0.02, msg=f"new_pose.p.z = {new_pose.p.z}")
pass
async def test_root_transform(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
root_body = self._dc.get_articulation_root_body(art)
pivot_body = self._dc.find_articulation_body(art, "CenterPivot")
new_pose = dc_conversions.create_transform(Gf.Vec3d(0.100, 0.200, 0.030), Gf.Rotation(Gf.Vec3d(0, 0, 1), 0))
self._dc.set_rigid_body_pose(root_body, new_pose)
await omni.kit.app.get_app().next_update_async()
arm_body = self._dc.find_articulation_body(art, "Arm")
# Check the arm body pose
self.assertEqual(
self._dc.get_rigid_body_pose(arm_body), _dynamic_control.Transform((0.60, 0.20, 0.03), (0, 0, 0, 1))
)
# Move the body that corresponds to the root, should act the same as above
new_pose = dc_conversions.create_transform(Gf.Vec3d(-0.100, 0.200, 0.030), Gf.Rotation(Gf.Vec3d(0, 0, 1), 0))
self._dc.set_rigid_body_pose(pivot_body, new_pose)
await omni.kit.app.get_app().next_update_async()
self.assertEqual(
self._dc.get_rigid_body_pose(arm_body), _dynamic_control.Transform((0.40, 0.20, 0.03), (0, 0, 0, 1))
)
# Rotate the body in place by 45 degrees, x,y of pose should be the same
new_pose = dc_conversions.create_transform(Gf.Vec3d(0, 0, 0), Gf.Rotation(Gf.Vec3d(0, 0, 1), 45))
self._dc.set_rigid_body_pose(pivot_body, new_pose)
await omni.kit.app.get_app().next_update_async()
new_pose = self._dc.get_rigid_body_pose(arm_body)
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [0.3535535, 0.3535535, 0], atol=1e-5),
f"{new_pose.p}",
)
### This will fail as expected because its not a root link
# body = self._dc.find_articulation_body(art, "Arm")
# new_pose = dc_conversions.create_transform(Gf.Vec3d(10.0, 20.0, 3.0), Gf.Rotation(Gf.Vec3d(0, 0, 1), 90))
# self._dc.set_rigid_body_pose(body, new_pose)
pass
async def test_root_velocity(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
delete_cmd = omni.usd.commands.DeletePrimsCommand(["/Articulation/CenterPivot/FixedJoint"])
delete_cmd.do()
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
root_body = self._dc.get_articulation_root_body(art)
pivot_body = self._dc.find_articulation_body(art, "CenterPivot")
self._dc.set_rigid_body_linear_velocity(root_body, (10, 0, 0))
await dc_utils.simulate(0.1, self._dc, art)
lin_vel = self._dc.get_rigid_body_linear_velocity(pivot_body)
self.assertAlmostEqual(lin_vel.x, 10, delta=1e-3, msg=f"lin_vel.x = {lin_vel.x}")
self._dc.set_rigid_body_linear_velocity(root_body, (0, 0, 0))
await dc_utils.simulate(0.1, self._dc, art)
lin_vel = self._dc.get_rigid_body_linear_velocity(pivot_body)
self.assertAlmostEqual(lin_vel.x, 0, delta=1e-3, msg=f"lin_vel.x = {lin_vel.x}")
self._dc.set_rigid_body_angular_velocity(root_body, (10, 0, 0))
await dc_utils.simulate(0.1, self._dc, art)
ang_vel = self._dc.get_rigid_body_angular_velocity(pivot_body)
self.assertTrue(np.allclose([ang_vel.x, ang_vel.y, ang_vel.z], [10, 0, 0], atol=1e-5), f"{ang_vel}")
pass
async def test_get_articulation_dof_states(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
# set gravity sideways to force articulation to have state
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 1.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# get articulation handle
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
await dc_utils.simulate(0.1, self._dc, art)
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_NONE)
zeros = np.zeros(2, dtype=np.float32)
self.assertTrue(np.array_equal(state["pos"], zeros), f'{state["pos"]}')
self.assertTrue(np.array_equal(state["vel"], zeros), f'{state["vel"]}')
self.assertTrue(np.array_equal(state["effort"], zeros), f'{state["effort"]}')
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
self.assertFalse(np.array_equal(state["pos"], zeros), f'{state["pos"]}')
self.assertTrue(np.array_equal(state["vel"], zeros), f'{state["vel"]}')
self.assertTrue(np.array_equal(state["effort"], zeros), f'{state["effort"]}')
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_VEL)
self.assertTrue(np.array_equal(state["pos"], zeros), f'{state["pos"]}')
self.assertFalse(np.array_equal(state["vel"], zeros), f'{state["vel"]}')
self.assertTrue(np.array_equal(state["effort"], zeros), f'{state["effort"]}')
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_EFFORT)
self.assertTrue(np.array_equal(state["pos"], zeros), f'{state["pos"]}')
self.assertTrue(np.array_equal(state["vel"], zeros), f'{state["vel"]}')
self.assertFalse(np.array_equal(state["effort"], zeros), f'{state["effort"]}')
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertFalse(np.array_equal(state["pos"], zeros), f'{state["pos"]}')
self.assertFalse(np.array_equal(state["vel"], zeros), f'{state["vel"]}')
self.assertFalse(np.array_equal(state["effort"], zeros), f'{state["effort"]}')
async def test_set_articulation_dof_states(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# get articulation handle
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
slider_body = self._dc.find_articulation_body(art, "Slider")
await omni.kit.app.get_app().next_update_async()
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
props = self._dc.get_articulation_dof_properties(art)
num_dofs = self._dc.get_articulation_dof_count(art)
# set both dof state and targets for position
for i in range(num_dofs):
props[i]["stiffness"] = 1e8
props[i]["damping"] = 1e8
self._dc.set_articulation_dof_properties(art, props)
# Rotate 45 degrees and set prismatic to 100
new_state = [math.radians(45), 1.00]
state["pos"] = new_state
self._dc.set_articulation_dof_states(art, state, _dynamic_control.STATE_POS)
self._dc.set_articulation_dof_position_targets(art, new_state)
await omni.kit.app.get_app().next_update_async()
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
# check that the states match
# print(new_state, state["pos"])
self.assertTrue(np.allclose(new_state, state["pos"]), f'{new_state}, {state["pos"]}')
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.76777, 1.76777, 0], atol=1e-5), f"{new_pose.p}"
)
# velocity control test
for i in range(num_dofs):
props[i]["stiffness"] = 0
props[i]["damping"] = 1e15
self._dc.set_articulation_dof_properties(art, props)
new_state = [0, -0.10]
state["vel"] = new_state
# set both state and target
self._dc.set_articulation_dof_states(art, state, _dynamic_control.STATE_VEL)
self._dc.set_articulation_dof_velocity_targets(art, new_state)
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async() # need a second step before dof_states are updated
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
g_vel = self._dc.get_rigid_body_linear_velocity(slider_body)
l_vel = self._dc.get_rigid_body_local_linear_velocity(slider_body)
self.assertTrue(
np.allclose([g_vel.x, g_vel.y, g_vel.z], [-0.0707107, -0.0707107, 0], atol=1e-3), f"g_vel {g_vel}"
)
self.assertTrue(np.allclose([l_vel.x, l_vel.y, l_vel.z], [-0.10, 0, 0], atol=1e-3), f"l_vel {l_vel}")
self.assertTrue(np.allclose(new_state, state["vel"], atol=1e-3), f'new_state {new_state} ~= {state["vel"]}')
# effort control for first joint, second joint is position drive
props[0]["stiffness"] = 0
props[0]["damping"] = 0
props[1]["stiffness"] = 1e15
props[1]["damping"] = 1e15
self._dc.set_articulation_dof_properties(art, props)
# reset state of articulation and apply effort
state["pos"] = [0, 0]
state["vel"] = [0, 0]
state["effort"] = [1e1, 0]
self._dc.set_articulation_dof_position_targets(art, [0, 0])
self._dc.set_articulation_dof_velocity_targets(art, [0, 0])
self._dc.set_articulation_dof_states(art, state, _dynamic_control.STATE_ALL)
await dc_utils.simulate(1.0, self._dc, art)
state = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_VEL)
self.assertAlmostEqual(state["vel"][0], 3.5, delta=1e-2, msg=f'{state["vel"][0]}')
async def test_get_gravity_effort(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
gravity = -9.81
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 1.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(gravity)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
slider_body = self._dc.find_articulation_body(art, "Slider")
dof_ptr = self._dc.find_articulation_dof(art, "RevoluteJoint")
arm_body = self._dc.find_articulation_body(art, "Arm")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
props = self._dc.get_articulation_dof_properties(art)
num_dofs = self._dc.get_articulation_dof_count(art)
for i in range(num_dofs):
props[i]["driveMode"] = _dynamic_control.DRIVE_FORCE
props[i]["stiffness"] = 1e10
props[i]["damping"] = 1e10
props[i]["maxEffort"] = 1e10
self._dc.set_articulation_dof_properties(art, props)
await omni.kit.app.get_app().next_update_async()
await dc_utils.simulate(1.0, self._dc, art)
# check both state apis
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_EFFORT)
dof_state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
# compute torque analytically
fg_slider = self._dc.get_rigid_body_properties(slider_body).mass * gravity
fg_arm = self._dc.get_rigid_body_properties(arm_body).mass * gravity
pose_slider = self._dc.get_rigid_body_pose(slider_body)
pose_arm = self._dc.get_rigid_body_pose(arm_body)
torque_0 = pose_arm.p.x * fg_arm + pose_slider.p.x * fg_slider
self.assertAlmostEqual(
-torque_0, dof_states["effort"][0], delta=6, msg=f'{-torque_0} != {dof_states["effort"][0]}'
)
self.assertAlmostEqual(-torque_0, dof_state.effort, delta=6, msg=f"{-torque_0} != {dof_state.effort}")
async def test_dof_state(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# get handles
slider_body = self._dc.find_articulation_body(art, "Slider")
dof_ptr = self._dc.find_articulation_dof(art, "RevoluteJoint")
props = self._dc.get_dof_properties(dof_ptr)
pos_target = math.radians(45)
vel_target = math.radians(45)
# configure for position control
props.damping = 1e8
props.stiffness = 1e8
self._dc.set_dof_properties(dof_ptr, props)
# use set_dof_state api
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(pos_target, 0, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_position_target(dof_ptr, pos_target)
await omni.kit.app.get_app().next_update_async()
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.06066, 1.06066, 0], atol=1e-5), f"{new_pose.p}"
)
# reset state before next test
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_position_target(dof_ptr, 0)
await omni.kit.app.get_app().next_update_async()
# use set_dof_position api
self._dc.set_dof_position(dof_ptr, pos_target)
self._dc.set_dof_position_target(dof_ptr, pos_target)
await omni.kit.app.get_app().next_update_async()
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.06066, 1.06066, 0], atol=1e-5), f"{new_pose.p}"
)
# reset state before next test
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_position_target(dof_ptr, 0)
await omni.kit.app.get_app().next_update_async()
# velocity control
props.damping = 1e15
props.stiffness = 0
self._dc.set_dof_properties(dof_ptr, props)
# use set_dof_state api
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, vel_target, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_velocity_target(dof_ptr, vel_target)
await dc_utils.simulate(1.0, self._dc, art)
state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
new_pose = self._dc.get_rigid_body_pose(slider_body)
# after one second it should reach this pose
self.assertAlmostEqual(state.pos, pos_target, delta=1e-4, msg=f"{state.pos} != {pos_target}")
self.assertAlmostEqual(state.vel, vel_target, delta=1e-4, msg=f"{state.vel} != {vel_target}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.0607, 1.0607, 0], atol=1e-2), f"{new_pose.p}"
)
# reset state before next test
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_velocity_target(dof_ptr, 0)
await omni.kit.app.get_app().next_update_async()
# use set_dof_velocity api
self._dc.set_dof_velocity(dof_ptr, vel_target)
self._dc.set_dof_velocity_target(dof_ptr, vel_target)
await dc_utils.simulate(1.0, self._dc, art)
new_pose = self._dc.get_rigid_body_pose(slider_body)
# after one second it should reach this pose
self.assertAlmostEqual(state.pos, pos_target, delta=1e-4, msg=f"{state.pos} != {pos_target}")
self.assertAlmostEqual(state.vel, vel_target, delta=1e-4, msg=f"{state.vel} != {vel_target}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [1.0607, 1.0607, 0], atol=1e-2), f"{new_pose.p}"
)
# reset state before next test
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
self._dc.set_dof_velocity_target(dof_ptr, 0)
await omni.kit.app.get_app().next_update_async()
# effort control
props.damping = 0
props.stiffness = 0
self._dc.set_dof_properties(dof_ptr, props)
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 1e1), _dynamic_control.STATE_ALL)
await dc_utils.simulate(1.0, self._dc, art)
# use get_dof_state api
state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertAlmostEqual(state.pos, 1.8822, delta=1e-3, msg=f"state.pos = {state.pos}")
self.assertAlmostEqual(state.vel, 3.6385, delta=1e-3, msg=f"state.vel = {state.vel}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [-0.46066, 1.4307, 2.34091e-05], atol=1e-2),
f"{new_pose.p}",
)
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
await omni.kit.app.get_app().next_update_async()
# use set_dof_effort api
self._dc.set_dof_effort(dof_ptr, 1e1)
self.assertEqual(self._dc.get_dof_effort(dof_ptr), 1e1)
await dc_utils.simulate(1.0, self._dc, art)
state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertAlmostEqual(state.pos, 1.8822, delta=1e-3, msg=f"state.pos = {state.pos}")
self.assertAlmostEqual(state.vel, 3.6385, delta=1e-3, msg=f"state.vel = {state.vel}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [-0.46066, 1.4307, 2.34091e-05], atol=1e-2),
f"{new_pose.p}",
)
# reset state before next test
self._dc.set_dof_state(dof_ptr, _dynamic_control.DofState(0, 0, 0), _dynamic_control.STATE_ALL)
await omni.kit.app.get_app().next_update_async()
# use set_articulation_dof_efforts api
self._dc.set_articulation_dof_efforts(art, [1e1, 0])
self.assertTrue(
np.allclose(self._dc.get_articulation_dof_efforts(art), [1e1, 0]),
f"{self._dc.get_articulation_dof_efforts(art)}",
)
await dc_utils.simulate(1.0, self._dc, art)
state = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
new_pose = self._dc.get_rigid_body_pose(slider_body)
self.assertAlmostEqual(state.pos, 1.8822, delta=1e-3, msg=f"state.pos = {state.pos}")
self.assertAlmostEqual(state.vel, 3.6385, delta=1e-3, msg=f"state.vel = {state.vel}")
self.assertTrue(
np.allclose([new_pose.p.x, new_pose.p.y, new_pose.p.z], [-0.46066, 1.4307, 2.34091e-05], atol=1e-2),
f"new_pose.p = {new_pose.p}",
)
# async def test_get_effort(self, gpu=False):
# (result, error) = await open_stage_async(
# self._assets_root_path + "/Isaac/Robots/Simple/revolute_articulation.usd"
# )
# self.assertTrue(result) # Make sure the stage loaded
# self._stage = omni.usd.get_context().get_stage()
# dc_utils.set_scene_physics_type(gpu)
# self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
# gravity = 9.81
# self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
# self._physics_scene.CreateGravityMagnitudeAttr().Set(gravity)
# # sensorAPI = PhysxSchema.PhysxArticulationForceSensorAPI.Apply(self._stage.GetPrimAtPath("/Articulation/Arm"))
# # sensorAPI.CreateConstraintSolverForcesEnabledAttr().Set(True)
# # sensorAPI.CreateForwardDynamicsForcesEnabledAttr().Set(True)
# sensorAPI = PhysxSchema.PhysxArticulationForceSensorAPI.Apply(
# self._stage.GetPrimAtPath("/Articulation/CenterPivot")
# )
# sensorAPI.CreateConstraintSolverForcesEnabledAttr().Set(True)
# sensorAPI.CreateForwardDynamicsForcesEnabledAttr().Set(True)
# await dc_utils.add_cube(self._stage, "/cube", 10, (90, 0, 20), True, 5)
# # Start Simulation and wait
# self._timeline.play()
# await omni.kit.app.get_app().next_update_async()
# art = self._dc.get_articulation("/Articulation")
# slider_body = self._dc.find_articulation_body(art, "Arm")
# dof_ptr = self._dc.find_articulation_dof(art, "RevoluteJoint")
# self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# cube_handle = self._dc.get_rigid_body("/cube")
# cube_props = self._dc.get_rigid_body_properties(cube_handle)
# cube_props.solver_position_iteration_count = 32
# cube_props.solver_velocity_iteration_count = 32
# self._dc.set_rigid_body_properties(cube_handle, cube_props)
# self._timeline.play()
# await omni.kit.app.get_app().next_update_async()
# props = self._dc.get_articulation_dof_properties(art)
# num_dofs = self._dc.get_articulation_dof_count(art)
# props[0]["driveMode"] = _dynamic_control.DRIVE_FORCE
# props[0]["maxEffort"] = 1e10
# props[0]["stiffness"] = 1e15
# props[0]["damping"] = 1e15
# self._dc.set_articulation_dof_properties(art, props)
# # change dof target: modifying current state
# dof_vel = [math.radians(45)]
# # self._dc.set_articulation_dof_velocity_targets(art, dof_vel)
# # await dc_utils.simulate(1.0, self._dc, art)
# for frame in range(60 * 1):
# if art is not None:
# self._dc.wake_up_articulation(art)
# await omni.kit.app.get_app().next_update_async()
# dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_EFFORT)
# # print(dof_states["effort"])
# # fg_slider = self._dc.get_rigid_body_properties(slider_body).mass * gravity
# arm_body = self._dc.find_articulation_body(art, "Arm")
# fg_arm = self._dc.get_rigid_body_properties(arm_body).mass * gravity
# # pose_slider = self._dc.get_rigid_body_pose(slider_body)
# pose_arm = self._dc.get_rigid_body_pose(arm_body)
# torque_0 = pose_arm.p.x * fg_arm
# print(torque_0)
# if cube_handle is not _dynamic_control.INVALID_HANDLE:
# pose_cube = self._dc.get_rigid_body_pose(cube_handle)
# fg_cube = self._dc.get_rigid_body_properties(cube_handle).mass * gravity
# torque_body = fg_cube * pose_cube.p.x
# print(torque_body)
# print(torque_0 + torque_body)
# # self.assertLess(dof_states[0][2], -1000)
# # print(dof_states[0][2])
# # dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_EFFORT)
# # print(dof_states["effort"])
| 37,315 | Python | 50.827778 | 142 | 0.635133 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_articulation_franka.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.kit.test
import carb # carb data types are used as return values, need this
import numpy as np
from pxr import Gf
import asyncio
from omni.isaac.dynamic_control import _dynamic_control
from omni.isaac.dynamic_control import utils as dc_utils
from omni.isaac.dynamic_control import conversions as dc_conversions
from .common import get_assets_root_path
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestArticulationFranka(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
self._timeline = omni.timeline.get_timeline_interface()
await omni.usd.get_context().new_stage_async()
await omni.kit.app.get_app().next_update_async()
self._stage = omni.usd.get_context().get_stage()
prim = self._stage.DefinePrim("/panda", "Xform")
self._assets_root_path = get_assets_root_path()
if self._assets_root_path is None:
carb.log_error("Could not find Isaac Sim assets folder")
return
prim.GetReferences().AddReference(self._assets_root_path + "/Isaac/Robots/Franka/franka.usd")
dc_utils.set_physics_frequency(60)
pass
# After running each test
async def tearDown(self):
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await asyncio.sleep(1.0)
await omni.kit.app.get_app().next_update_async()
pass
async def test_load(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# Check the object type to make sure its an articulation
obj_type = self._dc.peek_object_type("/panda")
self.assertEqual(obj_type, _dynamic_control.ObjectType.OBJECT_ARTICULATION)
# Get handle to articulation and make sure its valud
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# Use articulation handle to do something and make sure its works
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is not None)
# check basics for articulation
num_joints = self._dc.get_articulation_joint_count(art)
num_dofs = self._dc.get_articulation_dof_count(art)
num_bodies = self._dc.get_articulation_body_count(art)
self.assertEqual(num_joints, 11)
self.assertEqual(num_dofs, 9)
self.assertEqual(num_bodies, 12)
# difference between joint and dof
fixed_joint_ptr = self._dc.find_articulation_joint(art, "panda_hand_joint")
fixed_dof_ptr = self._dc.find_articulation_dof(art, "panda_hand_joint")
self.assertNotEqual(fixed_joint_ptr, _dynamic_control.INVALID_HANDLE)
self.assertEqual(fixed_dof_ptr, _dynamic_control.INVALID_HANDLE)
# get joint properties
joint_type = self._dc.get_joint_type(fixed_joint_ptr)
joint_dof_count = self._dc.get_joint_dof_count(fixed_joint_ptr) # dof of the joint
self.assertEqual(joint_type, _dynamic_control.JOINT_FIXED)
self.assertEqual(joint_dof_count, 0)
# get dof states
dof_ptr = self._dc.find_articulation_dof(art, "panda_finger_joint1")
dof_type = self._dc.get_dof_type(dof_ptr)
self.assertEqual(dof_type, _dynamic_control.DOF_TRANSLATION)
dof_state_v1 = self._dc.get_dof_state(dof_ptr, _dynamic_control.STATE_ALL)
# get all dof states for articulation
dof_idx = self._dc.find_articulation_dof_index(art, "panda_finger_joint1")
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertTrue(dof_states is not None)
dof_state_v2 = dof_states["pos"][dof_idx]
# make sure they both match
self.assertAlmostEqual(dof_state_v1.pos, dof_state_v2, msg=f"{dof_state_v1.pos} += {dof_state_v2}")
pass
async def test_teleport(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
root_body = self._dc.get_articulation_root_body(art)
hand_idx = self._dc.find_articulation_body_index(art, "panda_hand")
# teleport joints to target pose
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
targets = self._dc.get_articulation_dof_position_targets(art)
dof_states["pos"] = targets
self._dc.set_articulation_dof_states(art, dof_states, _dynamic_control.STATE_POS)
await omni.kit.app.get_app().next_update_async()
body_states = self._dc.get_articulation_body_states(art, _dynamic_control.STATE_POS)
expected_pos = body_states["pose"]["p"][hand_idx]
self.assertTrue(
np.allclose(
[expected_pos[0], expected_pos[1], expected_pos[2]], [0.36756575, 0.00441444, 0.42769492], atol=1e-5
),
f"[0.36756575, 0.00441444, 0.42769492] != {expected_pos}",
)
new_pose = dc_conversions.create_transform(Gf.Vec3d(0.10, 0.10, 0.10), Gf.Rotation(Gf.Vec3d(0, 0, 1), 90))
self._dc.set_rigid_body_pose(root_body, new_pose)
await omni.kit.app.get_app().next_update_async()
body_states = self._dc.get_articulation_body_states(art, _dynamic_control.STATE_POS)
expected_pos = body_states["pose"]["p"][hand_idx]
self.assertTrue(
np.allclose(
[expected_pos[0], expected_pos[1], expected_pos[2]], [0.09577966, 0.45144385, 0.4985129], atol=1e-5
),
f"[0.09577966, 0.45144385, 0.4985129] != {expected_pos}",
)
pass
async def test_teleport_target(self):
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# turn off gravity because velocity drives will act differently with different damping settings otherwise
body_count = self._dc.get_articulation_body_count(art)
for bodyIdx in range(body_count):
body = self._dc.get_articulation_body(art, bodyIdx)
self._dc.set_rigid_body_disable_gravity(body, True)
franka_joint_names = [
"panda_joint1",
"panda_joint2",
"panda_joint3",
"panda_joint4",
"panda_joint5",
"panda_joint6",
"panda_joint7",
]
# make change to dynamic_control params
props = _dynamic_control.DofProperties()
props.drive_mode = _dynamic_control.DRIVE_FORCE
props.damping = 1e1
props.stiffness = 0
for joint in franka_joint_names:
self._dc.set_dof_properties(self._dc.find_articulation_dof(art, joint), props)
await omni.kit.app.get_app().next_update_async()
# get states with efforts and velocity set to 0
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
pos_targets = self._dc.get_articulation_dof_position_targets(art)
dof_states["pos"] = pos_targets
self._dc.set_articulation_dof_states(art, dof_states, _dynamic_control.STATE_ALL)
await omni.kit.app.get_app().next_update_async()
# record position
dof_states1 = np.array(self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL))
# teleport again from a different position without changing any dc params
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
pos_targets = self._dc.get_articulation_dof_position_targets(art)
dof_states["pos"] = pos_targets
self._dc.set_articulation_dof_states(art, dof_states, _dynamic_control.STATE_ALL)
await omni.kit.app.get_app().next_update_async()
# record position
dof_states2 = np.array(self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL))
# make change to dynamic_control params
props = _dynamic_control.DofProperties()
props.drive_mode = _dynamic_control.DRIVE_FORCE
props.damping = 1e7
props.stiffness = 0
for joint in franka_joint_names:
self._dc.set_dof_properties(self._dc.find_articulation_dof(art, joint), props)
await omni.kit.app.get_app().next_update_async()
# teleport again
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
pos_targets = self._dc.get_articulation_dof_position_targets(art)
dof_states["pos"] = pos_targets
self._dc.set_articulation_dof_states(art, dof_states, _dynamic_control.STATE_ALL)
await omni.kit.app.get_app().next_update_async()
dof_states3 = np.array(self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL))
# print("dof_states1:\n", dof_states1)
# print("dof_states2:\n", dof_states2)
# print("dof_states3:\n", dof_states3)
for i in range(len(dof_states1)):
for j in range(3):
self.assertAlmostEqual(
dof_states1[i][j], dof_states2[i][j], delta=1e-4, msg=f"{dof_states1[i][j]} != {dof_states2[i][j]}"
)
self.assertAlmostEqual(
dof_states1[i][j], dof_states3[i][j], delta=1e-4, msg=f"{dof_states1[i][j]} != {dof_states3[i][j]}"
)
pass
async def test_movement(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# in order for test to pass, self collisions must be disabled
art_props = _dynamic_control.ArticulationProperties()
art_props.solver_position_iteration_count = 32
art_props.solver_velocity_iteration_count = 32
art_props.enable_self_collisions = False
self._dc.set_articulation_properties(art, art_props)
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
props = self._dc.get_articulation_dof_properties(art)
num_dofs = self._dc.get_articulation_dof_count(art)
# set all joints to velocity mode
for i in range(num_dofs):
props["stiffness"][i] = 0
props["damping"][i] = 1e15
dof_states["vel"][i] = -10.0
self._dc.set_articulation_dof_properties(art, props)
self._dc.set_articulation_dof_states(art, dof_states, _dynamic_control.STATE_VEL)
self._dc.set_articulation_dof_velocity_targets(art, dof_states["vel"])
await dc_utils.simulate(1.5, self._dc, art)
# check that we are at the limits
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_POS)
for i in range(num_dofs):
self.assertAlmostEqual(
dof_states["pos"][i],
props["lower"][i],
delta=1e-3,
msg=f'{dof_states["pos"][i]} += {props["lower"][i]}',
)
pass
async def test_position_franka(self, gpu=False):
dc_utils.set_scene_physics_type(gpu)
# Start Simulation and wait
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
dof_ptr_left = self._dc.find_articulation_dof(art, "panda_finger_joint1")
dof_ptr_right = self._dc.find_articulation_dof(art, "panda_finger_joint2")
# set new dof pos target
new_pos_list = [0.02, 0.0, 0.04]
for new_pos in new_pos_list:
for dof_ptr in [dof_ptr_left, dof_ptr_right]:
self.assertTrue(self._dc.set_dof_position_target(dof_ptr, new_pos))
await dc_utils.simulate(2.0, self._dc, art)
for t in [dof_ptr_left, dof_ptr_right]:
self.assertAlmostEqual(
self._dc.get_dof_position(dof_ptr),
new_pos,
delta=0.01,
msg=f"{self._dc.get_dof_position(dof_ptr)} += {new_pos}",
)
self.assertAlmostEqual(
self._dc.get_dof_position_target(dof_ptr),
new_pos,
delta=0.01,
msg=f"{self._dc.get_dof_position_target(dof_ptr)} += {new_pos}",
)
# async def test_masses(self, gpu=False):
# dc_utils.set_scene_physics_type(gpu)
# self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
# self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
# self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
# # Start Simulation and wait
# self._timeline.play()
# await omni.kit.app.get_app().next_update_async()
# art = self._dc.get_articulation("/panda")
# self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# dof_masses = self._dc.get_articulation_dof_masses(art)
# dof_props = self._dc.get_articulation_dof_properties(art)
# num_dofs = self._dc.get_articulation_dof_count(art)
# for i in range(num_dofs):
# print(dof_props[i], dof_masses[i])
# dof_props[i]["driveMode"] = _dynamic_control.DRIVE_FORCE
# dof_props[i]["damping"] = dof_props[i]["damping"] * dof_masses[i]
# dof_props[i]["stiffness"] = dof_props[i]["stiffness"] * dof_masses[i]
# print(dof_masses[i], dof_props[i]["damping"], dof_props[i]["stiffness"])
# self._dc.set_articulation_dof_properties(art, dof_props)
# await dc_utils.simulate(5.0)
# # TODO: Test each property
async def test_physics_no_render(self):
await omni.usd.get_context().new_stage_async()
self._stage = omni.usd.get_context().get_stage()
self._physx_interface = omni.physx.acquire_physx_interface()
self._physx_interface.start_simulation()
self._physx_interface.force_load_physics_from_usd()
prim = self._stage.DefinePrim("/panda", "Xform")
prim.GetReferences().AddReference(self._assets_root_path + "/Isaac/Robots/Franka/franka.usd")
self._physx_interface.force_load_physics_from_usd()
art = self._dc.get_articulation("/panda")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
# do a zero time step, should not crash
self._timeline.play()
omni.physx.acquire_physx_interface().update_simulation(elapsedStep=0, currentTime=0)
self._timeline.stop()
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
| 15,816 | Python | 46.498498 | 142 | 0.62892 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .test_articulation_simple import *
from .test_articulation_franka import *
from .test_articulation_other import *
from .test_core import *
from .test_pickles import *
from .test_rigid import *
| 632 | Python | 38.562498 | 76 | 0.795886 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_articulation_other.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.kit.test
import carb # carb data types are used as return values, need this
import numpy as np
from pxr import Gf, UsdPhysics
import omni.physx as _physx
import asyncio
from omni.isaac.dynamic_control import _dynamic_control
from omni.isaac.dynamic_control import utils as dc_utils
from .common import open_stage_async, get_assets_root_path
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestArticulationOther(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
self._timeline = omni.timeline.get_timeline_interface()
ext_manager = omni.kit.app.get_app().get_extension_manager()
ext_id = ext_manager.get_enabled_extension_id("omni.isaac.dynamic_control")
self._extension_path = ext_manager.get_extension_path(ext_id)
self._assets_root_path = get_assets_root_path()
if self._assets_root_path is None:
carb.log_error("Could not find Isaac Sim assets folder")
return
dc_utils.set_physics_frequency(60)
await omni.kit.app.get_app().next_update_async()
pass
# After running each test
async def tearDown(self):
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await asyncio.sleep(1.0)
await omni.kit.app.get_app().next_update_async()
pass
async def test_articulation_wheeled(self, gpu=False):
(result, error) = await open_stage_async(self._assets_root_path + "/Isaac/Robots/Simple/differential_base.usd")
# Make sure the stage loaded
self.assertTrue(result)
dc_utils.set_scene_physics_type(gpu)
dc_utils.set_physics_frequency(60)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
# wait for robot to fall
art = self._dc.get_articulation("/differential_base")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
await dc_utils.simulate(1.0, self._dc, art)
left_wheel_ptr = self._dc.find_articulation_dof(art, "left_wheel")
right_wheel_ptr = self._dc.find_articulation_dof(art, "right_wheel")
self._dc.set_dof_velocity_target(left_wheel_ptr, -2.5)
self._dc.set_dof_velocity_target(right_wheel_ptr, 2.5)
await dc_utils.simulate(2, self._dc, art)
root_body_ptr = self._dc.get_articulation_root_body(art)
lin_vel = self._dc.get_rigid_body_linear_velocity(root_body_ptr)
ang_vel = self._dc.get_rigid_body_angular_velocity(root_body_ptr)
self.assertAlmostEqual(0, np.linalg.norm(lin_vel), 1)
self.assertAlmostEqual(2.5, ang_vel.z, delta=1e-1)
async def test_articulation_carter(self, gpu=False):
(result, error) = await open_stage_async(self._assets_root_path + "/Isaac/Robots/Carter/carter_v1.usd")
# Make sure the stage loaded
self.assertTrue(result)
dc_utils.set_scene_physics_type(gpu)
dc_utils.set_physics_frequency(60)
self._timeline.play()
# wait for robot to fall
await dc_utils.simulate(1)
art = self._dc.get_articulation("/carter")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
left_wheel_ptr = self._dc.find_articulation_dof(art, "left_wheel")
right_wheel_ptr = self._dc.find_articulation_dof(art, "right_wheel")
left_dof_idx = self._dc.find_articulation_dof_index(art, "left_wheel")
right_dof_idx = self._dc.find_articulation_dof_index(art, "right_wheel")
imu_body_ptr = self._dc.find_articulation_body(art, "imu")
# the wheels are offset 5cm from the wheel mesh, need to account for that in wheelbase
wheel_base = 0.31613607 - 0.05 # in m
wheel_radius = 0.240 # in m
# Set drive target to a small linearvalue
drive_target = 0.05
self._dc.wake_up_articulation(art)
self._dc.set_dof_velocity_target(left_wheel_ptr, drive_target)
self._dc.set_dof_velocity_target(right_wheel_ptr, drive_target)
await dc_utils.simulate(2, self._dc, art)
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
# Check that the current velocity is close to target
self.assertAlmostEqual(drive_target, dof_states["vel"][left_dof_idx], delta=0.01)
self.assertAlmostEqual(drive_target, dof_states["vel"][right_dof_idx], delta=0.01)
# check chassis linear velocity, angular should be zero
lin_vel = self._dc.get_rigid_body_linear_velocity(imu_body_ptr)
ang_vel = self._dc.get_rigid_body_angular_velocity(imu_body_ptr)
self.assertAlmostEqual(drive_target * wheel_radius, np.linalg.norm([lin_vel.x, lin_vel.y, lin_vel.z]), 1)
self.assertAlmostEqual(0, np.linalg.norm([ang_vel.x, ang_vel.y, ang_vel.z]), 1)
# Set drive target to large linear value
self._dc.wake_up_articulation(art)
drive_target = 2.5
self._dc.set_dof_velocity_target(left_wheel_ptr, drive_target)
self._dc.set_dof_velocity_target(right_wheel_ptr, drive_target)
await dc_utils.simulate(1, self._dc, art)
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertAlmostEqual(drive_target, dof_states["vel"][left_dof_idx], delta=0.01)
self.assertAlmostEqual(drive_target, dof_states["vel"][right_dof_idx], delta=0.01)
lin_vel = self._dc.get_rigid_body_linear_velocity(imu_body_ptr)
ang_vel = self._dc.get_rigid_body_angular_velocity(imu_body_ptr)
self.assertAlmostEqual(
drive_target * wheel_radius, np.linalg.norm([lin_vel.x, lin_vel.y, lin_vel.z]), delta=0.2
)
self.assertAlmostEqual(0, np.linalg.norm([ang_vel.x, ang_vel.y, ang_vel.z]), 1)
# stop moving
self._dc.set_dof_velocity_target(left_wheel_ptr, 0)
self._dc.set_dof_velocity_target(right_wheel_ptr, 0)
await dc_utils.simulate(1, self._dc, art)
dof_states = self._dc.get_articulation_dof_states(art, _dynamic_control.STATE_ALL)
self.assertAlmostEqual(0, dof_states["vel"][left_dof_idx], delta=0.01)
self.assertAlmostEqual(0, dof_states["vel"][right_dof_idx], delta=0.01)
# spin at slow velocity
drive_target = 0.05
self._dc.wake_up_articulation(art)
self._dc.set_dof_velocity_target(left_wheel_ptr, -drive_target)
self._dc.set_dof_velocity_target(right_wheel_ptr, drive_target)
await dc_utils.simulate(2, self._dc, art)
lin_vel = self._dc.get_rigid_body_linear_velocity(imu_body_ptr)
ang_vel = self._dc.get_rigid_body_angular_velocity(imu_body_ptr)
# print(np.linalg.norm(lin_vel), ang_vel)
self.assertLess(np.linalg.norm([lin_vel.x, lin_vel.y, lin_vel.z]), 1.5)
# the wheels are offset 5cm from the wheel mesh, need to account for that in wheelbase
self.assertAlmostEqual(drive_target * wheel_radius / wheel_base, ang_vel[2], delta=0.1)
# spin at large velocity
drive_target = 1.0
self._dc.wake_up_articulation(art)
self._dc.set_dof_velocity_target(left_wheel_ptr, -drive_target)
self._dc.set_dof_velocity_target(right_wheel_ptr, drive_target)
await dc_utils.simulate(1, self._dc, art)
lin_vel = self._dc.get_rigid_body_linear_velocity(imu_body_ptr)
ang_vel = self._dc.get_rigid_body_angular_velocity(imu_body_ptr)
# print(np.linalg.norm(lin_vel), ang_vel)
self.assertLess(np.linalg.norm([lin_vel.x, lin_vel.y, lin_vel.z]), 3.5)
self.assertAlmostEqual(drive_target * wheel_radius / wheel_base, ang_vel[2], delta=0.1)
async def test_articulation_position_ur10(self, gpu=False):
(result, error) = await open_stage_async(self._assets_root_path + "/Isaac/Robots/UR10/ur10.usd")
# Make sure the stage loaded
self.assertTrue(result)
dc_utils.set_scene_physics_type(gpu)
dc_utils.set_physics_frequency(60)
# Start Simulation and wait
timeline = omni.timeline.get_timeline_interface()
timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/ur10")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
dof_ptr = self._dc.find_articulation_dof(art, "wrist_1_joint")
new_pos_list = [4.0, 2.0, 0, -2, -4] # over pi, under pi , zero, and inverse.
for new_pos in new_pos_list:
# set new dof pos target
self.assertTrue(self._dc.set_dof_position_target(dof_ptr, new_pos))
await dc_utils.simulate(4.0, self._dc, art)
dof_pos_new = self._dc.get_dof_position(dof_ptr)
self.assertAlmostEqual(dof_pos_new, new_pos, delta=0.02)
dof_target_new = self._dc.get_dof_position_target(dof_ptr)
self.assertAlmostEqual(dof_target_new, new_pos, delta=0.02)
pass
async def test_articulation_position_str(self, gpu=False):
(result, error) = await open_stage_async(self._assets_root_path + "/Isaac/Robots/Transporter/transporter.usd")
# Make sure the stage loaded
self.assertTrue(result)
dc_utils.set_scene_physics_type(gpu)
dc_utils.set_physics_frequency(60)
# await asyncio.sleep(1.0)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Transporter")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
dof_ptr = self._dc.find_articulation_dof(art, "lift_joint")
# set new dof pos target
new_pos_list = [0.04, 0.0, 0.02]
for new_pos in new_pos_list:
self.assertTrue(self._dc.set_dof_position_target(dof_ptr, new_pos))
await dc_utils.simulate(0.5, self._dc, art)
self.assertAlmostEqual(self._dc.get_dof_position(dof_ptr), new_pos, delta=0.01)
self.assertAlmostEqual(self._dc.get_dof_position_target(dof_ptr), new_pos, delta=0.01)
async def test_revolute_masses(self, gpu=False):
(result, error) = await open_stage_async(
self._assets_root_path + "/Isaac/Robots/Simple/revolute_articulation.usd"
)
# Make sure the stage loaded
self.assertTrue(result)
self._stage = omni.usd.get_context().get_stage()
dc_utils.set_scene_physics_type(gpu)
dc_utils.set_physics_frequency(60)
self._physics_scene = UsdPhysics.Scene(self._stage.GetPrimAtPath("/physicsScene"))
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(1000)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
art = self._dc.get_articulation("/Articulation")
self.assertNotEqual(art, _dynamic_control.INVALID_HANDLE)
dof_masses = self._dc.get_articulation_dof_masses(art)
self.assertAlmostEqual(dof_masses[0], 2.0001, delta=1e-2)
| 11,725 | Python | 48.68644 | 142 | 0.657569 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_rigid.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.kit.test
from pxr import Gf, UsdPhysics, Sdf, PhysxSchema
from omni.isaac.dynamic_control import _dynamic_control
from omni.isaac.dynamic_control import utils as dc_utils
from omni.isaac.dynamic_control import conversions as dc_conversions
import numpy as np
import asyncio
class TestRigidBody(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
self._dc = _dynamic_control.acquire_dynamic_control_interface()
self._timeline = omni.timeline.get_timeline_interface()
ext_manager = omni.kit.app.get_app().get_extension_manager()
ext_id = ext_manager.get_enabled_extension_id("omni.isaac.dynamic_control")
self._extension_path = ext_manager.get_extension_path(ext_id)
await omni.usd.get_context().new_stage_async()
await omni.kit.app.get_app().next_update_async()
self._stage = omni.usd.get_context().get_stage()
dc_utils.set_physics_frequency(60)
self._physics_scene = UsdPhysics.Scene.Define(self._stage, Sdf.Path("/physicsScene"))
dc_utils.set_scene_physics_type(gpu=False, scene_path="/physicsScene")
await omni.kit.app.get_app().next_update_async()
pass
# After running each test
async def tearDown(self):
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await asyncio.sleep(1.0)
await omni.kit.app.get_app().next_update_async()
pass
async def test_pose(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
new_pose = _dynamic_control.Transform((1.00, 0, 0), (0, 0, 0, 1))
self._dc.set_rigid_body_pose(handle, new_pose)
await dc_utils.simulate(1.0)
pos = self._dc.get_rigid_body_pose(handle).p
self.assertAlmostEqual(pos.x, 1.00, delta=0.1)
async def test_linear_velocity(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
self._dc.set_rigid_body_linear_velocity(handle, (1.00, 0, 0))
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_linear_velocity(handle)
self.assertAlmostEqual(vel.x, 1.00, delta=0.1)
async def test_angular_velocity(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
self._dc.set_rigid_body_angular_velocity(handle, (5, 0, 0))
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
# cube slows down due to angular damping
self.assertAlmostEqual(vel.x, 4.75, delta=0.1)
# Actual test, notice it is "async" function, so "await" can be used if needed
async def test_gravity(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
pos = self._dc.get_rigid_body_pose(handle).p
self._dc.set_rigid_body_disable_gravity(handle, True)
self._dc.wake_up_rigid_body(handle)
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
await dc_utils.simulate(1.0)
pos = self._dc.get_rigid_body_pose(handle).p
self.assertAlmostEqual(pos.z, 0.999, delta=0.1)
self._dc.set_rigid_body_disable_gravity(handle, False)
self._dc.wake_up_rigid_body(handle)
await dc_utils.simulate(1.0)
pos = self._dc.get_rigid_body_pose(handle).p
self.assertLess(pos.z, 0)
pass
async def test_rigid_body_properties(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
props = self._dc.get_rigid_body_properties(handle)
self._dc.set_rigid_body_properties(handle, props)
await dc_utils.simulate(1.0)
# TODO: Test each property
def call_all_rigid_body_apis(self, handle):
self._dc.get_rigid_body_name(handle)
self._dc.get_rigid_body_path(handle)
self._dc.get_rigid_body_parent_joint(handle)
self._dc.get_rigid_body_child_joint_count(handle)
self._dc.get_rigid_body_child_joint(handle, 0)
self._dc.get_rigid_body_child_joint(handle, 100)
self._dc.get_rigid_body_pose(handle)
self._dc.set_rigid_body_pose(handle, _dynamic_control.Transform())
self._dc.set_rigid_body_disable_gravity(handle, True)
self._dc.set_rigid_body_disable_simulation(handle, False)
self._dc.get_rigid_body_linear_velocity(handle)
self._dc.get_rigid_body_local_linear_velocity(handle)
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
self._dc.get_rigid_body_angular_velocity(handle)
self._dc.set_rigid_body_angular_velocity(handle, (0, 0, 0))
self._dc.apply_body_force(handle, (0, 0, 0), (0, 0, 0), True)
self._dc.apply_body_force(handle, (0, 0, 0), (0, 0, 0), False)
self._dc.get_relative_body_poses(handle, [handle])
self._dc.get_rigid_body_properties(handle)
self._dc.set_rigid_body_properties(handle, _dynamic_control.RigidBodyProperties())
async def test_start_stop(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
self.call_all_rigid_body_apis(handle)
self._timeline.stop()
await omni.kit.app.get_app().next_update_async()
self.call_all_rigid_body_apis(handle)
# compare values from dc to usd to see if they match
async def test_update_usd(self, gpu=False):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
prim = await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
rigid_prim = UsdPhysics.RigidBodyAPI(prim)
await dc_utils.simulate(1.0)
dc_pose = self._dc.get_rigid_body_pose(handle)
usd_pose = dc_conversions.create_transform_from_mat(omni.usd.utils.get_world_transform_matrix(prim))
self.assertTrue(
np.allclose([dc_pose.p.x, dc_pose.p.y, dc_pose.p.z], [usd_pose.p.x, usd_pose.p.y, usd_pose.p.z], atol=1e-2)
)
dc_velocity = self._dc.get_rigid_body_linear_velocity(handle)
usd_velocity = rigid_prim.GetVelocityAttr().Get()
self.assertTrue(np.allclose([dc_velocity.x, dc_velocity.y, dc_velocity.z], usd_velocity, atol=1e-2))
rigid_prim.GetVelocityAttr().Set((0, 0, 0))
await omni.kit.app.get_app().next_update_async()
dc_velocity = self._dc.get_rigid_body_linear_velocity(handle)
usd_velocity = rigid_prim.GetVelocityAttr().Get()
self.assertTrue(np.allclose([dc_velocity.x, dc_velocity.y, dc_velocity.z], usd_velocity, atol=1e-2))
async def test_physics_no_render(self):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, 0.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(0.0)
await dc_utils.add_cube(self._stage, "/cube", 1.00, (0, 0, 1.00))
self._physx_interface = omni.physx.acquire_physx_interface()
self._physx_interface.start_simulation()
self._physx_interface.force_load_physics_from_usd()
handle = self._dc.get_rigid_body("/cube")
self.assertNotEqual(handle, _dynamic_control.INVALID_HANDLE)
self._dc.get_rigid_body_name(handle)
self._dc.get_rigid_body_path(handle)
self._dc.get_rigid_body_parent_joint(handle)
self._dc.get_rigid_body_child_joint_count(handle)
self._dc.get_rigid_body_child_joint(handle, 0)
self._dc.get_rigid_body_child_joint(handle, 100)
self._dc.get_rigid_body_pose(handle)
self._dc.set_rigid_body_pose(handle, _dynamic_control.Transform())
self._dc.set_rigid_body_disable_gravity(handle, True)
self._dc.set_rigid_body_disable_simulation(handle, False)
self._dc.get_rigid_body_linear_velocity(handle)
self._dc.get_rigid_body_local_linear_velocity(handle)
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
self._dc.get_rigid_body_angular_velocity(handle)
self._dc.set_rigid_body_angular_velocity(handle, (0, 0, 0))
self._dc.apply_body_force(handle, (0, 0, 0), (0, 0, 0), True)
self._dc.apply_body_force(handle, (0, 0, 0), (0, 0, 0), False)
self._dc.get_relative_body_poses(handle, [handle])
self._dc.get_rigid_body_properties(handle)
self._dc.set_rigid_body_properties(handle, _dynamic_control.RigidBodyProperties())
current_time = 0
self._physx_interface.update_simulation(elapsedStep=1.0 / 60.0, currentTime=current_time)
self._physx_interface.update_transformations(
updateToFastCache=True, updateToUsd=True, updateVelocitiesToUsd=True, outputVelocitiesLocalSpace=False
)
async def test_apply_body_force(self):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
prim = await dc_utils.add_cube(self._stage, "/cube", 1.00, (2.00, 0, 1.00), True, 1)
# make sure that motion is not damped
physxRigidBodyAPI = PhysxSchema.PhysxRigidBodyAPI.Apply(prim)
physxRigidBodyAPI.CreateLinearDampingAttr(0)
physxRigidBodyAPI.CreateAngularDampingAttr(0)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
pos = self._dc.get_rigid_body_pose(handle).p
self._dc.set_rigid_body_disable_gravity(handle, True)
self._dc.wake_up_rigid_body(handle)
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
# rotate using local force
self._dc.apply_body_force(handle, (0, 0, -1), (-2.00, 0, 0), False)
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[1], -0.2, delta=0.001)
# clear all motion
await omni.kit.app.get_app().next_update_async()
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
self._dc.set_rigid_body_angular_velocity(handle, (0, 0, 0))
new_pose = _dynamic_control.Transform((2.00, 0, 1.00), (0, 0, 0, 1))
self._dc.set_rigid_body_pose(handle, new_pose)
await omni.kit.app.get_app().next_update_async()
# make sure that we stop moving
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[1], 0.0, delta=0.001)
await omni.kit.app.get_app().next_update_async()
# rotate the opposite direction via global force
self._dc.apply_body_force(handle, (0, 0, 1), (0, 0, 0), True)
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[1], 0.2, delta=0.001)
async def test_apply_body_torque(self):
self._physics_scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, 0.0, -1.0))
self._physics_scene.CreateGravityMagnitudeAttr().Set(9.81)
prim = await dc_utils.add_cube(self._stage, "/cube", 1.00, (2.00, 0, 1.00), True, 1)
# make sure that motion is not damped
physxRigidBodyAPI = PhysxSchema.PhysxRigidBodyAPI.Apply(prim)
physxRigidBodyAPI.CreateLinearDampingAttr(0)
physxRigidBodyAPI.CreateAngularDampingAttr(0)
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
handle = self._dc.get_rigid_body("/cube")
pos = self._dc.get_rigid_body_pose(handle).p
self._dc.set_rigid_body_disable_gravity(handle, True)
self._dc.wake_up_rigid_body(handle)
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
# rotate using world torque
self._dc.apply_body_torque(handle, (0, 0, -2.00), True)
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[2], -0.2, delta=0.001)
print(vel)
# clear all motion
await omni.kit.app.get_app().next_update_async()
self._dc.set_rigid_body_linear_velocity(handle, (0, 0, 0))
self._dc.set_rigid_body_angular_velocity(handle, (0, 0, 0))
# flip the rigid body 180 around x so when we apply local torque we rotate the opposite
new_pose = _dynamic_control.Transform((2.00, 0, 1.00), (1, 0, 0, 0))
self._dc.set_rigid_body_pose(handle, new_pose)
await omni.kit.app.get_app().next_update_async()
# make sure that we stop moving
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[1], 0.0, delta=0.001)
await omni.kit.app.get_app().next_update_async()
# shoudl rotate opposite
self._dc.apply_body_torque(handle, (0, 0, -2.00), False)
await dc_utils.simulate(1.0)
vel = self._dc.get_rigid_body_angular_velocity(handle)
self.assertAlmostEqual(vel[2], 0.2, delta=0.001)
print(vel)
| 15,435 | Python | 48.003174 | 119 | 0.645092 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/omni/isaac/dynamic_control/tests/test_pickles.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
# NOTE:
# omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests
# For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html
import omni.kit.test
import carb.tokens
import os
import asyncio
import numpy as np
import carb
import pickle
# Import extension python module we are testing with absolute import path, as if we are external user (other extension)
from omni.isaac.dynamic_control import _dynamic_control
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestPickles(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
await omni.kit.app.get_app().next_update_async()
pass
# After running each test
async def tearDown(self):
await omni.kit.app.get_app().next_update_async()
pass
async def test_pickle_float3(self):
f3_src = carb.Float3(2.0, -1.5, 13.37)
f3_bytes = pickle.dumps(f3_src)
f3_dst = pickle.loads(f3_bytes)
error_f3 = sum(abs(np.array(f3_src)) - abs(np.array(f3_dst)))
self.assertAlmostEqual(error_f3, 0)
async def test_pickle_float4(self):
f4_src = carb.Float4(2.0, -1.5, 13.37, 42)
f4_bytes = pickle.dumps(f4_src)
f4_dst = pickle.loads(f4_bytes)
error_f4 = sum(abs(np.array(f4_src)) - abs(np.array(f4_dst)))
self.assertAlmostEqual(error_f4, 0)
async def test_pickle_transform(self):
tx_src = _dynamic_control.Transform((0.5, 1.25, -1.0), (0.1, 0.2, 0.3, 0.4))
tx_bytes = pickle.dumps(tx_src)
tx_dst = pickle.loads(tx_bytes)
error_p = sum(abs(np.array(tx_src.p)) - abs(np.array(tx_dst.p)))
error_r = sum(abs(np.array(tx_src.r)) - abs(np.array(tx_dst.r)))
self.assertAlmostEqual(error_p, 0)
self.assertAlmostEqual(error_r, 0)
async def test_pickle_velocity(self):
vel_src = _dynamic_control.Velocity((-1.1, -2.2, -3.3), (17, 42, 33))
vel_bytes = pickle.dumps(vel_src)
vel_dst = pickle.loads(vel_bytes)
error_linear = sum(abs(np.array(vel_src.linear)) - abs(np.array(vel_dst.linear)))
error_angular = sum(abs(np.array(vel_src.angular)) - abs(np.array(vel_dst.angular)))
self.assertAlmostEqual(error_linear, 0)
self.assertAlmostEqual(error_angular, 0)
async def test_pickle_rigid_body_state(self):
rbs_src = _dynamic_control.RigidBodyState()
tx_src = _dynamic_control.Transform((0.5, 1.25, -1.0), (0.1, 0.2, 0.3, 0.4))
vel_src = _dynamic_control.Velocity((-1.1, -2.2, -3.3), (17, 42, 33))
rbs_src.pose = tx_src
rbs_src.vel = vel_src
rbs_bytes = pickle.dumps(rbs_src)
rbs_dst = pickle.loads(rbs_bytes)
error_pose_p = sum(abs(np.array(rbs_src.pose.p)) - abs(np.array(rbs_dst.pose.p)))
error_pose_r = sum(abs(np.array(rbs_src.pose.r)) - abs(np.array(rbs_dst.pose.r)))
error_vel_linear = sum(abs(np.array(rbs_src.vel.linear)) - abs(np.array(rbs_dst.vel.linear)))
error_vel_angular = sum(abs(np.array(rbs_src.vel.angular)) - abs(np.array(rbs_dst.vel.angular)))
self.assertAlmostEqual(error_pose_p, 0)
self.assertAlmostEqual(error_pose_r, 0)
self.assertAlmostEqual(error_vel_linear, 0)
self.assertAlmostEqual(error_vel_angular, 0)
async def test_pickle_dof_state(self):
ds_src = _dynamic_control.DofState(2.0, -1.5, 5.5)
ds_bytes = pickle.dumps(ds_src)
ds_dst = pickle.loads(ds_bytes)
error_pos = abs(np.array(ds_src.pos)) - abs(np.array(ds_dst.pos))
error_vel = abs(np.array(ds_src.vel)) - abs(np.array(ds_dst.vel))
error_effort = abs(np.array(ds_src.effort)) - abs(np.array(ds_dst.effort))
self.assertAlmostEqual(error_pos, 0)
self.assertAlmostEqual(error_vel, 0)
self.assertAlmostEqual(error_effort, 0)
async def test_pickle_dof_properties(self):
dp_src = _dynamic_control.DofProperties()
dp_src.type = _dynamic_control.DOF_ROTATION
dp_src.has_limits = True
dp_src.lower = -3.14
dp_src.upper = 1.57
dp_src.drive_mode = _dynamic_control.DRIVE_ACCELERATION
dp_src.max_velocity = 123.4
dp_src.max_effort = 1234.5
dp_src.stiffness = 1e4
dp_src.damping = 1e3
dp_bytes = pickle.dumps(dp_src)
dp_dst = pickle.loads(dp_bytes)
self.assertEqual(dp_dst.type, dp_src.type)
self.assertTrue(dp_dst.has_limits)
self.assertAlmostEqual(dp_dst.lower, dp_src.lower)
self.assertAlmostEqual(dp_dst.upper, dp_src.upper)
self.assertEqual(dp_dst.drive_mode, dp_src.drive_mode)
self.assertAlmostEqual(dp_dst.max_velocity, dp_src.max_velocity)
self.assertAlmostEqual(dp_dst.max_effort, dp_src.max_effort)
self.assertAlmostEqual(dp_dst.stiffness, dp_src.stiffness)
self.assertAlmostEqual(dp_dst.damping, dp_src.damping)
async def test_pickle_attractor_properties(self):
ap_src = _dynamic_control.AttractorProperties()
ap_src.body = 123456789
ap_src.axes = _dynamic_control.AXIS_ALL
ap_src.target.p = (-1, -2, -3)
ap_src.target.r = (1, 2, 3, 4)
ap_src.offset.p = (-0.1, -0.2, -0.3)
ap_src.offset.r = (0.1, 0.2, 0.3, 0.4)
ap_src.stiffness = 1e5
ap_src.damping = 1e4
ap_src.force_limit = 1e3
ap_bytes = pickle.dumps(ap_src)
ap_dst = pickle.loads(ap_bytes)
self.assertEqual(ap_dst.body, ap_src.body)
self.assertEqual(ap_dst.axes, ap_src.axes)
error_target_p = sum(abs(np.array(ap_src.target.p)) - abs(np.array(ap_dst.target.p)))
error_target_r = sum(abs(np.array(ap_src.target.r)) - abs(np.array(ap_dst.target.r)))
error_offset_p = sum(abs(np.array(ap_src.offset.p)) - abs(np.array(ap_dst.offset.p)))
error_offset_r = sum(abs(np.array(ap_src.offset.r)) - abs(np.array(ap_dst.offset.r)))
self.assertAlmostEqual(error_target_p, 0)
self.assertAlmostEqual(error_target_r, 0)
self.assertAlmostEqual(error_offset_p, 0)
self.assertAlmostEqual(error_offset_r, 0)
self.assertAlmostEqual(ap_dst.stiffness, ap_src.stiffness)
self.assertAlmostEqual(ap_dst.damping, ap_src.damping)
self.assertAlmostEqual(ap_dst.force_limit, ap_src.force_limit)
async def test_pickle_articulation_properties(self):
ap_src = _dynamic_control.ArticulationProperties()
ap_src.solver_position_iteration_count = 3
ap_src.solver_velocity_iteration_count = 4
ap_src.enable_self_collisions = True
ap_bytes = pickle.dumps(ap_src)
ap_dst = pickle.loads(ap_bytes)
self.assertEqual(ap_dst.solver_position_iteration_count, ap_src.solver_position_iteration_count)
self.assertEqual(ap_dst.solver_velocity_iteration_count, ap_src.solver_velocity_iteration_count)
self.assertEqual(ap_dst.enable_self_collisions, ap_src.enable_self_collisions)
async def test_pickle_rigid_body_properties(self):
rb_src = _dynamic_control.RigidBodyProperties()
rb_src.mass = 14.0
rb_src.moment = carb.Float3(1.0, 2.0, 3.0)
rb_src.max_depeneration_velocity = 2.0
rb_src.max_contact_impulse = 3.0
rb_src.solver_position_iteration_count = 4
rb_src.solver_velocity_iteration_count = 5
rb_bytes = pickle.dumps(rb_src)
rb_dst = pickle.loads(rb_bytes)
self.assertEqual(rb_dst.mass, rb_src.mass)
error_moment = sum(abs(np.array(rb_dst.moment)) - abs(np.array(rb_src.moment)))
self.assertAlmostEqual(error_moment, 0)
self.assertEqual(rb_dst.max_depeneration_velocity, rb_src.max_depeneration_velocity)
self.assertEqual(rb_dst.max_contact_impulse, rb_src.max_contact_impulse)
self.assertEqual(rb_dst.solver_position_iteration_count, rb_src.solver_position_iteration_count)
self.assertEqual(rb_dst.solver_velocity_iteration_count, rb_src.solver_velocity_iteration_count)
| 8,579 | Python | 46.932961 | 142 | 0.656836 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/docs/CHANGELOG.md | # Changelog
## [1.2.3] - 2023-01-21
### Fixed
- Fix when multiple objects shared the same prim name using the isaac:nameOverride attribute for get_name
## [1.2.2] - 2022-10-20
### Fixed
- test golden values
## [1.2.1] - 2022-10-17
### Fixed
- explicitly handle prim deletion
## [1.2.0] - 2022-09-27
### Changed
- tests to use nucleus assets
### Removed
- usd files local to extension
## [1.1.1] - 2022-09-07
### Fixed
- Fixes for kit 103.5
## [1.1.0] - 2022-08-12
### Added
- cMassLocalPose to DcRigidBodyProperties
## [1.0.1] - 2022-08-09
### Changed
- Removed simple_articulation.usd, test_articulation_simple uses Nucleus asset
## [1.0.0] - 2022-05-11
### Changed
- non-backwards compatible change: dof indexing matches physx tensor API
## [0.2.2] - 2022-04-29
### Fixed
- Handle physx unwrapped revolute joints
## [0.2.1] - 2022-02-13
### Fixed
- Properly delete handles on prim deletion
## [0.2.0] - 2022-01-14
### Fixed
- Error message when waking up a kinematic rigid body
- Error message when setting linear velocity on a body with simulation disabled
- Error message when setting angular velocity on a body with simulation disabled
## [0.1.8] - 2021-08-16
### Added
- get_effort
- get_articulation_dof_efforts
- apply_body_torque
### Fixed
- inconsistent return types
- crash when stepping with a zero timestep as first step
### Changed
- apply_effort -> set_effort
- apply_articulation_dof_efforts -> set_articulation_dof_efforts
- handle refresh messages are printed out as info messages, instead of always printing
- apply_body_force now has a bool to specify if the force is global or local
## [0.1.7] - 2021-08-16
### Added
- Sleep functions for rigid bodies and articulations
### Changed
- return types use size_t instead of int where appropriate
## [0.1.6] - 2021-08-04
### Changed
- DriveMode is now either DRIVE_FORCE or DRIVE_ACCELERATION, default is acceleration
- Position/Velocity drive is not specified via DriveMode
- All API calls verify if simulating, return otherwise
- set_dof_properties will not enable or change drive limits
- set_dof_state takes StateFlags to apply specific states
- get_dof_state takes StateFlags to set which states to get
### Added
- State variables can be printed
- ArticulationProperties to control articulation settings
- RigidBodyProperties can control iteration counts and contact impulse settings
- get_articulation_properties
- set_articulation_properties
- get_articulation_dof_position_targets
- get_articulation_dof_velocity_targets
- get_articulation_dof_masses
- set_rigid_body_properties
- get_dof_properties
- unit tests for most articulation, rigid body, dof and joint apis
- utilities for common scene setup and testing
### Removed
- get_articulation_dof_state_derivatives
- DriveModes DRIVE_NONE, DRIVE_POS, DRIVE_VEL
### Fixed
- apply_body_force now applies a force at a point
- set_dof_properties does not break position/velocity drives
- dof efforts report correct forces/torques due to gravity
- when changing state of a dof or a root link, unrelated state values are not applied anymore
- set_dof_state applies efforts now
- get_dof_properties works correctly now
## [0.1.5] - 2021-07-23
### Added
- Split samples from extension
## [0.1.4] - 2021-07-14
### Added
- now works when running without editor/timeline and only physx events.
- fixed crash with setting dof properties
## [0.1.3] - 2021-05-24
### Added
- force and torque sensors
## [0.1.2] - 2021-02-17
### Added
- update to python 3.7
- update to omni.kit.uiapp
## [0.1.1] - 2020-12-11
### Added
- Add unit tests to extension
## [0.1.0] - 2020-12-03
### Added
- Initial version of Isaac Sim Dynamic Control Extension
| 3,695 | Markdown | 23.64 | 105 | 0.728552 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/docs/README.md | # Usage
To enable this extension, go to the Extension Manager menu and enable omni.isaac.dynamic_control extension.
| 118 | Markdown | 22.799995 | 107 | 0.79661 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.dynamic_control/docs/index.rst | Dynamic Control [omni.isaac.dynamic_control]
######################################################
The Dynamic Control extension provides a set of utilities to control physics objects.
It provides opaque handles for different physics objects that remain valid between PhysX scene resets, which occur whenever play or stop is pressed.
Basic Usage
===========
Start physics simulation, at least one frame of simulation must occur before the Dynamic Control interface will become fully active.
.. code-block:: python
:linenos:
import omni
omni.timeline.get_timeline_interface().play()
Acquire the Dynamic Control interface and interact with an articulation.
The code block below assumes a Franka Emika Panda robot is in the stage with a base path of /Franka
.. code-block:: python
:linenos:
from omni.isaac.dynamic_control import _dynamic_control
dc = _dynamic_control.acquire_dynamic_control_interface()
# Get a handle to the Franka articulation
# This handle will automatically update if simulation is stopped and restarted
art = dc.get_articulation("/Franka")
# Get information about the structure of the articulation
num_joints = dc.get_articulation_joint_count(art)
num_dofs = dc.get_articulation_dof_count(art)
num_bodies = dc.get_articulation_body_count(art)
# Get a specific degree of freedom on an articulation
dof_ptr = dc.find_articulation_dof(art, "panda_joint2")
dof_state = dc.get_dof_state(dof_ptr)
# print position for the degree of freedom
print(dof_state.pos)
# This should be called each frame of simulation if state on the articulation is being changed.
dc.wake_up_articulation(art)
dc.set_dof_position_target(dof_ptr, -1.5)
Acquiring Extension Interface
==============================
.. automethod:: omni.isaac.dynamic_control._dynamic_control.acquire_dynamic_control_interface
.. automethod:: omni.isaac.dynamic_control._dynamic_control.release_dynamic_control_interface
Dynamic Control API
====================
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.DynamicControl
:members:
:undoc-members:
:exclude-members:
Transform and Velocity
======================
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.Transform
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.Velocity
:members:
:undoc-members:
:show-inheritance:
Types
=====
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.ObjectType
:members:
:show-inheritance:
:exclude-members: name
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.DofType
:members:
:show-inheritance:
:exclude-members: name
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.JointType
:members:
:show-inheritance:
:exclude-members: name
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.DriveMode
:members:
:show-inheritance:
:exclude-members: name
Properties
==========
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.ArticulationProperties
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.RigidBodyProperties
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.DofProperties
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.AttractorProperties
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.D6JointProperties
:members:
:undoc-members:
:show-inheritance:
States
==========
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.RigidBodyState
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: omni.isaac.dynamic_control._dynamic_control.DofState
:members:
:undoc-members:
:show-inheritance:
Constants
=========
Object handles
--------------
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.INVALID_HANDLE
State Flags
-----------
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.STATE_NONE
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.STATE_POS
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.STATE_VEL
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.STATE_EFFORT
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.STATE_ALL
Axis Flags
----------
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_NONE
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_X
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_Y
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_Z
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_TWIST
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_SWING_1
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_SWING_2
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_ALL_TRANSLATION
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_ALL_ROTATION
.. autoattribute:: omni.isaac.dynamic_control._dynamic_control.AXIS_ALL
| 5,319 | reStructuredText | 28.72067 | 148 | 0.715172 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.motion_generation/motion_policy_configs/franka/rmpflow/franka_rmpflow_common.yaml | # Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
joint_limit_buffers: [.01, .03, .01, .01, .01, .01, .01]
rmp_params:
cspace_target_rmp:
metric_scalar: 50.
position_gain: 100.
damping_gain: 50.
robust_position_term_thresh: .5
inertia: 1.
cspace_trajectory_rmp:
p_gain: 100.
d_gain: 10.
ff_gain: .25
weight: 50.
cspace_affine_rmp:
final_handover_time_std_dev: .25
weight: 2000.
joint_limit_rmp:
metric_scalar: 1000.
metric_length_scale: .01
metric_exploder_eps: 1e-3
metric_velocity_gate_length_scale: .01
accel_damper_gain: 200.
accel_potential_gain: 1.
accel_potential_exploder_length_scale: .1
accel_potential_exploder_eps: 1e-2
joint_velocity_cap_rmp:
max_velocity: 4. # max_xd
velocity_damping_region: 1.5
damping_gain: 1000.0
metric_weight: 100. # metric_scalar
target_rmp:
accel_p_gain: 30.
accel_d_gain: 85.
accel_norm_eps: .075
metric_alpha_length_scale: .05
min_metric_alpha: .01
max_metric_scalar: 10000
min_metric_scalar: 2500
proximity_metric_boost_scalar: 20.
proximity_metric_boost_length_scale: .02
xi_estimator_gate_std_dev: 20000.
accept_user_weights: false # Values >= .5 are true and < .5 are false
axis_target_rmp:
accel_p_gain: 210.
accel_d_gain: 60.
metric_scalar: 10
proximity_metric_boost_scalar: 3000.
proximity_metric_boost_length_scale: .08
xi_estimator_gate_std_dev: 20000.
accept_user_weights: false
collision_rmp:
damping_gain: 50.
damping_std_dev: .04
damping_robustness_eps: 1e-2
damping_velocity_gate_length_scale: .01
repulsion_gain: 800.
repulsion_std_dev: .01
metric_modulation_radius: .5
metric_scalar: 10000. # Real value should be this.
#metric_scalar: 0. # Turns off collision avoidance.
metric_exploder_std_dev: .02
metric_exploder_eps: .001
damping_rmp:
accel_d_gain: 30.
metric_scalar: 50.
inertia: 100.
canonical_resolve:
max_acceleration_norm: 50.
projection_tolerance: .01
verbose: false
body_cylinders:
- name: base_stem
pt1: [0,0,.333]
pt2: [0,0,0.]
radius: .05
- name: base_tee
pt1: [0,0,.333]
pt2: [0,0,.333]
radius: .15
# Each arm is approx. 1m from (arm) base to gripper center.
# .1661 between links (approx .15)
body_collision_controllers:
- name: panda_link7
radius: .05
- name: panda_wrist_end_pt
radius: .05
- name: panda_hand
radius: .05
- name: panda_face_left
radius: .05
- name: panda_face_right
radius: .05
- name: panda_leftfingertip
radius: .075
- name: panda_rightfingertip
radius: .075
| 3,345 | YAML | 29.697247 | 78 | 0.612257 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.motion_generation/motion_policy_configs/Festo/Cobot/rmpflow/festo_cobot_rmpflow_config.yaml | # Artificially limit the robot joints. For example:
# A joint with range +-pi would be limited to +-(pi-.01)
joint_limit_buffers: [.01, .01, .01, .01, .01, .01]
# RMPflow has many modifiable parameters, but these serve as a great start.
# Most parameters will not need to be modified
rmp_params:
cspace_target_rmp:
metric_scalar: 50.
position_gain: 100.
damping_gain: 50.
robust_position_term_thresh: .5
inertia: 1.
cspace_trajectory_rmp:
p_gain: 100.
d_gain: 10.
ff_gain: .25
weight: 50.
cspace_affine_rmp:
final_handover_time_std_dev: .25
weight: 2000.
joint_limit_rmp:
metric_scalar: 1000.
metric_length_scale: .01
metric_exploder_eps: 1e-3
metric_velocity_gate_length_scale: .01
accel_damper_gain: 200.
accel_potential_gain: 1.
accel_potential_exploder_length_scale: .1
accel_potential_exploder_eps: 1e-2
joint_velocity_cap_rmp:
max_velocity: 4.
velocity_damping_region: 1.5
damping_gain: 1000.0
metric_weight: 100.
target_rmp:
accel_p_gain: 60.
accel_d_gain: 85.
accel_norm_eps: .075
metric_alpha_length_scale: .05
min_metric_alpha: .01
max_metric_scalar: 10000
min_metric_scalar: 2500
proximity_metric_boost_scalar: 20.
proximity_metric_boost_length_scale: .02
xi_estimator_gate_std_dev: 20000.
accept_user_weights: false
axis_target_rmp:
accel_p_gain: 210.
accel_d_gain: 60.
metric_scalar: 10
proximity_metric_boost_scalar: 3000.
proximity_metric_boost_length_scale: .08
xi_estimator_gate_std_dev: 20000.
accept_user_weights: false
collision_rmp:
damping_gain: 50.
damping_std_dev: .04
damping_robustness_eps: 1e-2
damping_velocity_gate_length_scale: .01
repulsion_gain: 800.
repulsion_std_dev: .01
metric_modulation_radius: .5
metric_scalar: 10000.
metric_exploder_std_dev: .02
metric_exploder_eps: .001
damping_rmp:
accel_d_gain: 30.
metric_scalar: 50.
inertia: 100.
canonical_resolve:
max_acceleration_norm: 50.
projection_tolerance: .01
verbose: false
# body_cylinders are used to promote self-collision avoidance between the robot and its base
# The example below defines the robot base to be a capsule defined by the absolute coordinates pt1 and pt2.
# The semantic name provided for each body_cylinder does not need to be present in the robot URDF.
body_cylinders:
- name: base
pt1: [0,0,.15]
pt2: [0,0,0.]
radius: .15
# body_collision_controllers defines spheres located at specified frames in the robot URDF
# These spheres will not be allowed to collide with the capsules enumerated under body_cylinders
# By design, most frames in industrial robots are kinematically unable to collide with the robot base.
# It is often only necessary to define body_collision_controllers near the end effector
body_collision_controllers:
- name: link_6
radius: .05
- name: link_5
radius: .07
| 3,231 | YAML | 32.319587 | 107 | 0.638193 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.motion_generation/motion_policy_configs/Festo/Cobot/rmpflow/festo_cobot_robot_description.yaml | # The robot descriptor defines the generalized coordinates and how to map those
# to the underlying URDF dofs.
api_version: 1.0
# Defines the generalized coordinates. Each generalized coordinate is assumed
# to have an entry in the URDF.
# Lula will only use these joints to control the robot position.
cspace:
- a1
- a2
- a3
- a4
- a5
- a6
default_q: [
0.0,0.0,0.0,-0.0,-0.0,-0.0
]
# Most dimensions of the cspace have a direct corresponding element
# in the URDF. This list of rules defines how unspecified coordinates
# should be extracted or how values in the URDF should be overwritten.
cspace_to_urdf_rules:
# Lula uses collision spheres to define the robot geometry in order to avoid
# collisions with external obstacles. If no spheres are specified, Lula will
# not be able to avoid obstacles.
collision_spheres:
- link_1:
- "center": [0.0, 0.032, 0.29]
"radius": 0.11
- "center": [0.0, 0.073, 0.331]
"radius": 0.08
- link_2:
- "center": [-0.0, 0.024, -0.0]
"radius": 0.08
- "center": [-0.0, 0.018, 0.11]
"radius": 0.07
- "center": [-0.0, 0.021, 0.051]
"radius": 0.08
- "center": [-0.0, 0.315, 0.132]
"radius": 0.06
- "center": [-0.0, 0.26, 0.128]
"radius": 0.062
- "center": [-0.0, 0.202, 0.124]
"radius": 0.064
- "center": [-0.0, 0.143, 0.12]
"radius": 0.066
- "center": [-0.0, 0.082, 0.115]
"radius": 0.068
- "center": [-0.0, 0.336, 0.057]
"radius": 0.06
- "center": [-0.0, 0.326, 0.095]
"radius": 0.06
- link_3:
- "center": [0.0, 0.035, 0.066]
"radius": 0.06
- "center": [0.0, 0.0, 0.0]
"radius": 0.07
- "center": [0.0, 0.001, 0.034]
"radius": 0.065
- link_4:
- "center": [0.0, -0.0, 0.124]
"radius": 0.06
- "center": [-0.0, 0.118, 0.163]
"radius": 0.07
- "center": [0.0, 0.037, 0.136]
"radius": 0.063
- "center": [-0.0, 0.077, 0.149]
"radius": 0.066
- "center": [-0.0, 0.131, 0.315]
"radius": 0.06
- "center": [-0.0, 0.122, 0.203]
"radius": 0.067
- "center": [-0.0, 0.125, 0.242]
"radius": 0.065
- "center": [-0.0, 0.128, 0.279]
"radius": 0.062
- "center": [0.0, 0.096, 0.327]
"radius": 0.05
- link_5:
- "center": [-0.0, -0.051, -0.0]
"radius": 0.06
- "center": [0.0, 0.068, 0.0]
"radius": 0.06
- "center": [-0.0, -0.011, -0.0]
"radius": 0.06
- "center": [0.0, 0.029, 0.0]
"radius": 0.06
- "center": [-0.0, 0.0, -0.028]
"radius": 0.06
- link_6:
- "center": [0.0, -0.0, 0.106]
"radius": 0.05
- "center": [0.017, 0.047, 0.118]
"radius": 0.02
- "center": [-0.008, 0.048, 0.12]
"radius": 0.02
| 2,769 | YAML | 26.425742 | 79 | 0.522571 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.motion_generation/motion_policy_configs/ur10/rmpflow/ur10_rmpflow_config.yaml | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
api_version: 1.0
joint_limit_buffers: [.01, .01, .01, .01, .01, .01]
rmp_params:
cspace_target_rmp:
metric_scalar: 50.
position_gain: 100.
damping_gain: 50.
robust_position_term_thresh: .5
inertia: 1.
cspace_trajectory_rmp:
p_gain: 80.
d_gain: 10.
ff_gain: .25
weight: 50.
cspace_affine_rmp:
final_handover_time_std_dev: .25
weight: 2000.
joint_limit_rmp:
metric_scalar: 1000.
metric_length_scale: .01
metric_exploder_eps: 1e-3
metric_velocity_gate_length_scale: .01
accel_damper_gain: 200.
accel_potential_gain: 1.
accel_potential_exploder_length_scale: .1
accel_potential_exploder_eps: 1e-2
joint_velocity_cap_rmp:
max_velocity: 2.15
velocity_damping_region: 0.5
damping_gain: 300.
metric_weight: 100.
target_rmp:
accel_p_gain: 80.
accel_d_gain: 120.
accel_norm_eps: .075
metric_alpha_length_scale: .05
min_metric_alpha: .01
max_metric_scalar: 10000.
min_metric_scalar: 2500.
proximity_metric_boost_scalar: 20.
proximity_metric_boost_length_scale: .02
accept_user_weights: false
axis_target_rmp:
accel_p_gain: 200.
accel_d_gain: 40.
metric_scalar: 10.
proximity_metric_boost_scalar: 3000.
proximity_metric_boost_length_scale: .05
accept_user_weights: false
collision_rmp:
damping_gain: 50.
damping_std_dev: .04
damping_robustness_eps: 1e-2
damping_velocity_gate_length_scale: .01
repulsion_gain: 1200.
repulsion_std_dev: .01
metric_modulation_radius: .5
metric_scalar: 10000.
metric_exploder_std_dev: .02
metric_exploder_eps: .001
damping_rmp:
accel_d_gain: 30.
metric_scalar: 50.
inertia: 100.
canonical_resolve:
max_acceleration_norm: 50.
projection_tolerance: .01
verbose: false
body_cylinders:
- name: base_link
pt1: [0, 0, 0.22]
pt2: [0, 0, 0]
radius: .09
body_collision_controllers:
- name: wrist_2_link
radius: .04
- name: wrist_3_link
radius: .04
- name: tool0
radius: .04
| 2,717 | YAML | 27.914893 | 76 | 0.616489 |
swadaskar/Isaac_Sim_Folder/exts/omni.isaac.motion_generation/motion_policy_configs/ur10/rmpflow/ur10_robot_description.yaml | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
# The robot description file defines the generalized coordinates and how to map
# those to the underlying URDF DOFs.
api_version: 1.0
# Defines the generalized coordinates. Each generalized coordinate is assumed
# to have an entry in the URDF, except when otherwise specified below under
# cspace_urdf_bridge.
cspace:
- shoulder_pan_joint
- shoulder_lift_joint
- elbow_joint
- wrist_1_joint
- wrist_2_joint
- wrist_3_joint
root_link: world
default_q: [-1.57, -1.57, -1.57, -1.57, 1.57, 0]
collision_spheres:
- upper_arm_link:
- center: [0.0, -0.045, 0.01]
radius: 0.1
- center: [0.0, -0.045, 0.06]
radius: 0.09
- center: [0.0, -0.045, 0.12]
radius: 0.06
- center: [0.0, -0.045, 0.18]
radius: 0.06
- center: [0.0, -0.045, 0.24]
radius: 0.06
- center: [0.0, -0.045, 0.3]
radius: 0.06
- center: [0.0, -0.045, 0.36]
radius: 0.06
- center: [0.0, -0.045, 0.42]
radius: 0.06
- center: [0.0, -0.045, 0.48]
radius: 0.06
- center: [0.0, -0.045, 0.54]
radius: 0.06
- center: [0.0, -0.045, 0.6]
radius: 0.08
- forearm_link:
- center: [0.0, 0.0, 0.0]
radius: 0.08
- center: [0.0, 0.0, 0.06]
radius: 0.07
- center: [0.0, 0.0, 0.12]
radius: 0.05
- center: [0.0, 0.0, 0.18]
radius: 0.05
- center: [0.0, 0.0, 0.24]
radius: 0.05
- center: [0.0, 0.0, 0.30]
radius: 0.05
- center: [0.0, 0.0, 0.36]
radius: 0.05
- center: [0.0, 0.0, 0.42]
radius: 0.05
- center: [0.0, 0.0, 0.48]
radius: 0.05
- center: [0.0, 0.0, 0.54]
radius: 0.05
- center: [0.0, 0.0, 0.57]
radius: 0.065
- wrist_1_link:
- center: [0.0, 0.0, 0.0]
radius: 0.05
- center: [0.0, 0.055, 0.0]
radius: 0.05
- center: [0.0, 0.11, 0.0]
radius: 0.065
- wrist_2_link:
- center: [0.0, 0.0, 0.0]
radius: 0.05
- center: [0.0, 0.0, 0.055]
radius: 0.05
- center: [0.0, 0, 0.11]
radius: 0.065
- wrist_3_link:
- center: [0.0, 0.0, 0.0]
radius: 0.045
- center: [0.0, 0.05, 0.0]
radius: 0.05
| 2,692 | YAML | 27.347368 | 79 | 0.543091 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.