repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
joshlory/droplet | vendor/ace/mode-matlab.js | 21661 | ace.define("ace/mode/matlab_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var MatlabHighlightRules = function() {
var keywords = (
"break|case|catch|classdef|continue|else|elseif|end|for|function|global|if|otherwise|parfor|persistent|return|spmd|switch|try|while"
);
var builtinConstants = (
"true|false|inf|Inf|nan|NaN|eps|pi|ans|nargin|nargout|varargin|varargout"
);
var builtinFunctions = (
"abs|accumarray|acos(?:d|h)?|acot(?:d|h)?|acsc(?:d|h)?|actxcontrol(?:list|select)?|actxGetRunningServer|actxserver|addlistener|addpath|addpref|addtodate|"+
"airy|align|alim|all|allchild|alpha|alphamap|amd|ancestor|and|angle|annotation|any|area|arrayfun|asec(?:d|h)?|asin(?:d|h)?|assert|assignin|atan(?:2|d|h)?|" +
"audiodevinfo|audioplayer|audiorecorder|aufinfo|auread|autumn|auwrite|avifile|aviinfo|aviread|axes|axis|balance|bar(?:3|3h|h)?|base2dec|beep|BeginInvoke|bench|"+
"bessel(?:h|i|j|k|y)|beta|betainc|betaincinv|betaln|bicg|bicgstab|bicgstabl|bin2dec|bitand|bitcmp|bitget|bitmax|bitnot|bitor|bitset|bitshift|bitxor|blanks|blkdiag|"+
"bone|box|brighten|brush|bsxfun|builddocsearchdb|builtin|bvp4c|bvp5c|bvpget|bvpinit|bvpset|bvpxtend|calendar|calllib|callSoapService|camdolly|cameratoolbar|camlight|"+
"camlookat|camorbit|campan|campos|camproj|camroll|camtarget|camup|camva|camzoom|cart2pol|cart2sph|cast|cat|caxis|cd|cdf2rdf|cdfepoch|cdfinfo|cdflib(?:\.(?:close|closeVar|"+
"computeEpoch|computeEpoch16|create|createAttr|createVar|delete|deleteAttr|deleteAttrEntry|deleteAttrgEntry|deleteVar|deleteVarRecords|epoch16Breakdown|epochBreakdown|getAttrEntry|"+
"getAttrgEntry|getAttrMaxEntry|getAttrMaxgEntry|getAttrName|getAttrNum|getAttrScope|getCacheSize|getChecksum|getCompression|getCompressionCacheSize|getConstantNames|"+
"getConstantValue|getCopyright|getFileBackward|getFormat|getLibraryCopyright|getLibraryVersion|getMajority|getName|getNumAttrEntries|getNumAttrgEntries|getNumAttributes|"+
"getNumgAttributes|getReadOnlyMode|getStageCacheSize|getValidate|getVarAllocRecords|getVarBlockingFactor|getVarCacheSize|getVarCompression|getVarData|getVarMaxAllocRecNum|"+
"getVarMaxWrittenRecNum|getVarName|getVarNum|getVarNumRecsWritten|getVarPadValue|getVarRecordData|getVarReservePercent|getVarsMaxWrittenRecNum|getVarSparseRecords|getVersion|"+
"hyperGetVarData|hyperPutVarData|inquire|inquireAttr|inquireAttrEntry|inquireAttrgEntry|inquireVar|open|putAttrEntry|putAttrgEntry|putVarData|putVarRecordData|renameAttr|"+
"renameVar|setCacheSize|setChecksum|setCompression|setCompressionCacheSize|setFileBackward|setFormat|setMajority|setReadOnlyMode|setStageCacheSize|setValidate|"+
"setVarAllocBlockRecords|setVarBlockingFactor|setVarCacheSize|setVarCompression|setVarInitialRecs|setVarPadValue|SetVarReservePercent|setVarsCacheSize|setVarSparseRecords))?|"+
"cdfread|cdfwrite|ceil|cell2mat|cell2struct|celldisp|cellfun|cellplot|cellstr|cgs|checkcode|checkin|checkout|chol|cholinc|cholupdate|circshift|cla|clabel|class|clc|clear|"+
"clearvars|clf|clipboard|clock|close|closereq|cmopts|cmpermute|cmunique|colamd|colon|colorbar|colordef|colormap|colormapeditor|colperm|Combine|comet|comet3|commandhistory|"+
"commandwindow|compan|compass|complex|computer|cond|condeig|condest|coneplot|conj|containers\.Map|contour(?:3|c|f|slice)?|contrast|conv|conv2|convhull|convhulln|convn|cool|"+
"copper|copyfile|copyobj|corrcoef|cos(?:d|h)?|cot(?:d|h)?|cov|cplxpair|cputime|createClassFromWsdl|createSoapMessage|cross|csc(?:d|h)?|csvread|csvwrite|ctranspose|cumprod|"+
"cumsum|cumtrapz|curl|customverctrl|cylinder|daqread|daspect|datacursormode|datatipinfo|date|datenum|datestr|datetick|datevec|dbclear|dbcont|dbdown|dblquad|dbmex|dbquit|"+
"dbstack|dbstatus|dbstep|dbstop|dbtype|dbup|dde23|ddeget|ddesd|ddeset|deal|deblank|dec2base|dec2bin|dec2hex|decic|deconv|del2|delaunay|delaunay3|delaunayn|DelaunayTri|delete|"+
"demo|depdir|depfun|det|detrend|deval|diag|dialog|diary|diff|diffuse|dir|disp|display|dither|divergence|dlmread|dlmwrite|dmperm|doc|docsearch|dos|dot|dragrect|drawnow|dsearch|"+
"dsearchn|dynamicprops|echo|echodemo|edit|eig|eigs|ellipj|ellipke|ellipsoid|empty|enableNETfromNetworkDrive|enableservice|EndInvoke|enumeration|eomday|eq|erf|erfc|erfcinv|"+
"erfcx|erfinv|error|errorbar|errordlg|etime|etree|etreeplot|eval|evalc|evalin|event\.(?:EventData|listener|PropertyEvent|proplistener)|exifread|exist|exit|exp|expint|expm|"+
"expm1|export2wsdlg|eye|ezcontour|ezcontourf|ezmesh|ezmeshc|ezplot|ezplot3|ezpolar|ezsurf|ezsurfc|factor|factorial|fclose|feather|feature|feof|ferror|feval|fft|fft2|fftn|"+
"fftshift|fftw|fgetl|fgets|fieldnames|figure|figurepalette|fileattrib|filebrowser|filemarker|fileparts|fileread|filesep|fill|fill3|filter|filter2|find|findall|findfigs|"+
"findobj|findstr|finish|fitsdisp|fitsinfo|fitsread|fitswrite|fix|flag|flipdim|fliplr|flipud|floor|flow|fminbnd|fminsearch|fopen|format|fplot|fprintf|frame2im|fread|freqspace|"+
"frewind|fscanf|fseek|ftell|FTP|full|fullfile|func2str|functions|funm|fwrite|fzero|gallery|gamma|gammainc|gammaincinv|gammaln|gca|gcbf|gcbo|gcd|gcf|gco|ge|genpath|genvarname|"+
"get|getappdata|getenv|getfield|getframe|getpixelposition|getpref|ginput|gmres|gplot|grabcode|gradient|gray|graymon|grid|griddata(?:3|n)?|griddedInterpolant|gsvd|gt|gtext|"+
"guidata|guide|guihandles|gunzip|gzip|h5create|h5disp|h5info|h5read|h5readatt|h5write|h5writeatt|hadamard|handle|hankel|hdf|hdf5|hdf5info|hdf5read|hdf5write|hdfinfo|"+
"hdfread|hdftool|help|helpbrowser|helpdesk|helpdlg|helpwin|hess|hex2dec|hex2num|hgexport|hggroup|hgload|hgsave|hgsetget|hgtransform|hidden|hilb|hist|histc|hold|home|horzcat|"+
"hostid|hot|hsv|hsv2rgb|hypot|ichol|idivide|ifft|ifft2|ifftn|ifftshift|ilu|im2frame|im2java|imag|image|imagesc|imapprox|imfinfo|imformats|import|importdata|imread|imwrite|"+
"ind2rgb|ind2sub|inferiorto|info|inline|inmem|inpolygon|input|inputdlg|inputname|inputParser|inspect|instrcallback|instrfind|instrfindall|int2str|integral(?:2|3)?|interp(?:1|"+
"1q|2|3|ft|n)|interpstreamspeed|intersect|intmax|intmin|inv|invhilb|ipermute|isa|isappdata|iscell|iscellstr|ischar|iscolumn|isdir|isempty|isequal|isequaln|isequalwithequalnans|"+
"isfield|isfinite|isfloat|isglobal|ishandle|ishghandle|ishold|isinf|isinteger|isjava|iskeyword|isletter|islogical|ismac|ismatrix|ismember|ismethod|isnan|isnumeric|isobject|"+
"isocaps|isocolors|isonormals|isosurface|ispc|ispref|isprime|isprop|isreal|isrow|isscalar|issorted|isspace|issparse|isstr|isstrprop|isstruct|isstudent|isunix|isvarname|"+
"isvector|javaaddpath|javaArray|javachk|javaclasspath|javacomponent|javaMethod|javaMethodEDT|javaObject|javaObjectEDT|javarmpath|jet|keyboard|kron|lasterr|lasterror|"+
"lastwarn|lcm|ldivide|ldl|le|legend|legendre|length|libfunctions|libfunctionsview|libisloaded|libpointer|libstruct|license|light|lightangle|lighting|lin2mu|line|lines|"+
"linkaxes|linkdata|linkprop|linsolve|linspace|listdlg|listfonts|load|loadlibrary|loadobj|log|log10|log1p|log2|loglog|logm|logspace|lookfor|lower|ls|lscov|lsqnonneg|lsqr|"+
"lt|lu|luinc|magic|makehgtform|mat2cell|mat2str|material|matfile|matlab\.io\.MatFile|matlab\.mixin\.(?:Copyable|Heterogeneous(?:\.getDefaultScalarElement)?)|matlabrc|"+
"matlabroot|max|maxNumCompThreads|mean|median|membrane|memmapfile|memory|menu|mesh|meshc|meshgrid|meshz|meta\.(?:class(?:\.fromName)?|DynamicProperty|EnumeratedValue|event|"+
"MetaData|method|package(?:\.(?:fromName|getAllPackages))?|property)|metaclass|methods|methodsview|mex(?:\.getCompilerConfigurations)?|MException|mexext|mfilename|min|minres|"+
"minus|mislocked|mkdir|mkpp|mldivide|mlint|mlintrpt|mlock|mmfileinfo|mmreader|mod|mode|more|move|movefile|movegui|movie|movie2avi|mpower|mrdivide|msgbox|mtimes|mu2lin|"+
"multibandread|multibandwrite|munlock|namelengthmax|nargchk|narginchk|nargoutchk|native2unicode|nccreate|ncdisp|nchoosek|ncinfo|ncread|ncreadatt|ncwrite|ncwriteatt|"+
"ncwriteschema|ndgrid|ndims|ne|NET(?:\.(?:addAssembly|Assembly|convertArray|createArray|createGeneric|disableAutoRelease|enableAutoRelease|GenericClass|invokeGenericMethod|"+
"NetException|setStaticProperty))?|netcdf\.(?:abort|close|copyAtt|create|defDim|defGrp|defVar|defVarChunking|defVarDeflate|defVarFill|defVarFletcher32|delAtt|endDef|getAtt|"+
"getChunkCache|getConstant|getConstantNames|getVar|inq|inqAtt|inqAttID|inqAttName|inqDim|inqDimID|inqDimIDs|inqFormat|inqGrpName|inqGrpNameFull|inqGrpParent|inqGrps|"+
"inqLibVers|inqNcid|inqUnlimDims|inqVar|inqVarChunking|inqVarDeflate|inqVarFill|inqVarFletcher32|inqVarID|inqVarIDs|open|putAtt|putVar|reDef|renameAtt|renameDim|renameVar|"+
"setChunkCache|setDefaultFormat|setFill|sync)|newplot|nextpow2|nnz|noanimate|nonzeros|norm|normest|not|notebook|now|nthroot|null|num2cell|num2hex|num2str|numel|nzmax|"+
"ode(?:113|15i|15s|23|23s|23t|23tb|45)|odeget|odeset|odextend|onCleanup|ones|open|openfig|opengl|openvar|optimget|optimset|or|ordeig|orderfields|ordqz|ordschur|orient|"+
"orth|pack|padecoef|pagesetupdlg|pan|pareto|parseSoapResponse|pascal|patch|path|path2rc|pathsep|pathtool|pause|pbaspect|pcg|pchip|pcode|pcolor|pdepe|pdeval|peaks|perl|perms|"+
"permute|pie|pink|pinv|planerot|playshow|plot|plot3|plotbrowser|plotedit|plotmatrix|plottools|plotyy|plus|pol2cart|polar|poly|polyarea|polyder|polyeig|polyfit|polyint|polyval|"+
"polyvalm|pow2|power|ppval|prefdir|preferences|primes|print|printdlg|printopt|printpreview|prod|profile|profsave|propedit|propertyeditor|psi|publish|PutCharArray|PutFullMatrix|"+
"PutWorkspaceData|pwd|qhull|qmr|qr|qrdelete|qrinsert|qrupdate|quad|quad2d|quadgk|quadl|quadv|questdlg|quit|quiver|quiver3|qz|rand|randi|randn|randperm|RandStream(?:\.(?:create|"+
"getDefaultStream|getGlobalStream|list|setDefaultStream|setGlobalStream))?|rank|rat|rats|rbbox|rcond|rdivide|readasync|real|reallog|realmax|realmin|realpow|realsqrt|record|"+
"rectangle|rectint|recycle|reducepatch|reducevolume|refresh|refreshdata|regexp|regexpi|regexprep|regexptranslate|rehash|rem|Remove|RemoveAll|repmat|reset|reshape|residue|"+
"restoredefaultpath|rethrow|rgb2hsv|rgb2ind|rgbplot|ribbon|rmappdata|rmdir|rmfield|rmpath|rmpref|rng|roots|rose|rosser|rot90|rotate|rotate3d|round|rref|rsf2csf|run|save|saveas|"+
"saveobj|savepath|scatter|scatter3|schur|sec|secd|sech|selectmoveresize|semilogx|semilogy|sendmail|serial|set|setappdata|setdiff|setenv|setfield|setpixelposition|setpref|setstr|"+
"setxor|shading|shg|shiftdim|showplottool|shrinkfaces|sign|sin(?:d|h)?|size|slice|smooth3|snapnow|sort|sortrows|sound|soundsc|spalloc|spaugment|spconvert|spdiags|specular|speye|"+
"spfun|sph2cart|sphere|spinmap|spline|spones|spparms|sprand|sprandn|sprandsym|sprank|spring|sprintf|spy|sqrt|sqrtm|squeeze|ss2tf|sscanf|stairs|startup|std|stem|stem3|stopasync|"+
"str2double|str2func|str2mat|str2num|strcat|strcmp|strcmpi|stream2|stream3|streamline|streamparticles|streamribbon|streamslice|streamtube|strfind|strjust|strmatch|strncmp|"+
"strncmpi|strread|strrep|strtok|strtrim|struct2cell|structfun|strvcat|sub2ind|subplot|subsasgn|subsindex|subspace|subsref|substruct|subvolume|sum|summer|superclasses|superiorto|"+
"support|surf|surf2patch|surface|surfc|surfl|surfnorm|svd|svds|swapbytes|symamd|symbfact|symmlq|symrcm|symvar|system|tan(?:d|h)?|tar|tempdir|tempname|tetramesh|texlabel|text|"+
"textread|textscan|textwrap|tfqmr|throw|tic|Tiff(?:\.(?:getTagNames|getVersion))?|timer|timerfind|timerfindall|times|timeseries|title|toc|todatenum|toeplitz|toolboxdir|trace|"+
"transpose|trapz|treelayout|treeplot|tril|trimesh|triplequad|triplot|TriRep|TriScatteredInterp|trisurf|triu|tscollection|tsearch|tsearchn|tstool|type|typecast|uibuttongroup|"+
"uicontextmenu|uicontrol|uigetdir|uigetfile|uigetpref|uiimport|uimenu|uiopen|uipanel|uipushtool|uiputfile|uiresume|uisave|uisetcolor|uisetfont|uisetpref|uistack|uitable|"+
"uitoggletool|uitoolbar|uiwait|uminus|undocheckout|unicode2native|union|unique|unix|unloadlibrary|unmesh|unmkpp|untar|unwrap|unzip|uplus|upper|urlread|urlwrite|usejava|"+
"userpath|validateattributes|validatestring|vander|var|vectorize|ver|verctrl|verLessThan|version|vertcat|VideoReader(?:\.isPlatformSupported)?|VideoWriter(?:\.getProfiles)?|"+
"view|viewmtx|visdiff|volumebounds|voronoi|voronoin|wait|waitbar|waitfor|waitforbuttonpress|warndlg|warning|waterfall|wavfinfo|wavplay|wavread|wavrecord|wavwrite|web|weekday|"+
"what|whatsnew|which|whitebg|who|whos|wilkinson|winopen|winqueryreg|winter|wk1finfo|wk1read|wk1write|workspace|xlabel|xlim|xlsfinfo|xlsread|xlswrite|xmlread|xmlwrite|xor|xslt|"+
"ylabel|ylim|zeros|zip|zlabel|zlim|zoom|addedvarplot|andrewsplot|anova(?:1|2|n)|ansaribradley|aoctool|barttest|bbdesign|beta(?:cdf|fit|inv|like|pdf|rnd|stat)|bino(?:cdf|fit|inv|"+
"pdf|rnd|stat)|biplot|bootci|bootstrp|boxplot|candexch|candgen|canoncorr|capability|capaplot|caseread|casewrite|categorical|ccdesign|cdfplot|chi2(?:cdf|gof|inv|pdf|rnd|stat)|"+
"cholcov|Classification(?:BaggedEnsemble|Discriminant(?:\.(?:fit|make|template))?|Ensemble|KNN(?:\.(?:fit|template))?|PartitionedEnsemble|PartitionedModel|Tree(?:\.(?:fit|"+
"template))?)|classify|classregtree|cluster|clusterdata|cmdscale|combnk|Compact(?:Classification(?:Discriminant|Ensemble|Tree)|Regression(?:Ensemble|Tree)|TreeBagger)|confusionmat|"+
"controlchart|controlrules|cophenet|copula(?:cdf|fit|param|pdf|rnd|stat)|cordexch|corr|corrcov|coxphfit|createns|crosstab|crossval|cvpartition|datasample|dataset|daugment|dcovary|"+
"dendrogram|dfittool|disttool|dummyvar|dwtest|ecdf|ecdfhist|ev(?:cdf|fit|inv|like|pdf|rnd|stat)|ExhaustiveSearcher|exp(?:cdf|fit|inv|like|pdf|rnd|stat)|factoran|fcdf|ff2n|finv|"+
"fitdist|fitensemble|fpdf|fracfact|fracfactgen|friedman|frnd|fstat|fsurfht|fullfact|gagerr|gam(?:cdf|fit|inv|like|pdf|rnd|stat)|GeneralizedLinearModel(?:\.fit)?|geo(?:cdf|inv|mean|"+
"pdf|rnd|stat)|gev(?:cdf|fit|inv|like|pdf|rnd|stat)|gline|glmfit|glmval|glyphplot|gmdistribution(?:\.fit)?|gname|gp(?:cdf|fit|inv|like|pdf|rnd|stat)|gplotmatrix|grp2idx|grpstats|"+
"gscatter|haltonset|harmmean|hist3|histfit|hmm(?:decode|estimate|generate|train|viterbi)|hougen|hyge(?:cdf|inv|pdf|rnd|stat)|icdf|inconsistent|interactionplot|invpred|iqr|iwishrnd|"+
"jackknife|jbtest|johnsrnd|KDTreeSearcher|kmeans|knnsearch|kruskalwallis|ksdensity|kstest|kstest2|kurtosis|lasso|lassoglm|lassoPlot|leverage|lhsdesign|lhsnorm|lillietest|"+
"LinearModel(?:\.fit)?|linhyptest|linkage|logn(?:cdf|fit|inv|like|pdf|rnd|stat)|lsline|mad|mahal|maineffectsplot|manova1|manovacluster|mdscale|mhsample|mle|mlecov|mnpdf|"+
"mnrfit|mnrnd|mnrval|moment|multcompare|multivarichart|mvn(?:cdf|pdf|rnd)|mvregress|mvregresslike|mvt(?:cdf|pdf|rnd)|NaiveBayes(?:\.fit)?|nan(?:cov|max|mean|median|min|std|"+
"sum|var)|nbin(?:cdf|fit|inv|pdf|rnd|stat)|ncf(?:cdf|inv|pdf|rnd|stat)|nct(?:cdf|inv|pdf|rnd|stat)|ncx2(?:cdf|inv|pdf|rnd|stat)|NeighborSearcher|nlinfit|nlintool|nlmefit|nlmefitsa|"+
"nlparci|nlpredci|nnmf|nominal|NonLinearModel(?:\.fit)?|norm(?:cdf|fit|inv|like|pdf|rnd|stat)|normplot|normspec|ordinal|outlierMeasure|parallelcoords|paretotails|partialcorr|"+
"pcacov|pcares|pdf|pdist|pdist2|pearsrnd|perfcurve|perms|piecewisedistribution|plsregress|poiss(?:cdf|fit|inv|pdf|rnd|tat)|polyconf|polytool|prctile|princomp|ProbDist(?:Kernel|"+
"Parametric|UnivKernel|UnivParam)?|probplot|procrustes|qqplot|qrandset|qrandstream|quantile|randg|random|randsample|randtool|range|rangesearch|ranksum|rayl(?:cdf|fit|inv|pdf|"+
"rnd|stat)|rcoplot|refcurve|refline|regress|Regression(?:BaggedEnsemble|Ensemble|PartitionedEnsemble|PartitionedModel|Tree(?:\.(?:fit|template))?)|regstats|relieff|ridge|"+
"robustdemo|robustfit|rotatefactors|rowexch|rsmdemo|rstool|runstest|sampsizepwr|scatterhist|sequentialfs|signrank|signtest|silhouette|skewness|slicesample|sobolset|squareform|"+
"statget|statset|stepwise|stepwisefit|surfht|tabulate|tblread|tblwrite|tcdf|tdfread|tiedrank|tinv|tpdf|TreeBagger|treedisp|treefit|treeprune|treetest|treeval|trimmean|trnd|tstat|"+
"ttest|ttest2|unid(?:cdf|inv|pdf|rnd|stat)|unif(?:cdf|inv|it|pdf|rnd|stat)|vartest(?:2|n)?|wbl(?:cdf|fit|inv|like|pdf|rnd|stat)|wblplot|wishrnd|x2fx|xptread|zscore|ztest"+
"adapthisteq|analyze75info|analyze75read|applycform|applylut|axes2pix|bestblk|blockproc|bwarea|bwareaopen|bwboundaries|bwconncomp|bwconvhull|bwdist|bwdistgeodesic|bweuler|"+
"bwhitmiss|bwlabel|bwlabeln|bwmorph|bwpack|bwperim|bwselect|bwtraceboundary|bwulterode|bwunpack|checkerboard|col2im|colfilt|conndef|convmtx2|corner|cornermetric|corr2|cp2tform|"+
"cpcorr|cpselect|cpstruct2pairs|dct2|dctmtx|deconvblind|deconvlucy|deconvreg|deconvwnr|decorrstretch|demosaic|dicom(?:anon|dict|info|lookup|read|uid|write)|edge|edgetaper|entropy|"+
"entropyfilt|fan2para|fanbeam|findbounds|fliptform|freqz2|fsamp2|fspecial|ftrans2|fwind1|fwind2|getheight|getimage|getimagemodel|getline|getneighbors|getnhood|getpts|"+
"getrangefromclass|getrect|getsequence|gray2ind|graycomatrix|graycoprops|graydist|grayslice|graythresh|hdrread|hdrwrite|histeq|hough|houghlines|houghpeaks|iccfind|iccread|"+
"iccroot|iccwrite|idct2|ifanbeam|im2bw|im2col|im2double|im2int16|im2java2d|im2single|im2uint16|im2uint8|imabsdiff|imadd|imadjust|ImageAdapter|imageinfo|imagemodel|imapplymatrix|"+
"imattributes|imbothat|imclearborder|imclose|imcolormaptool|imcomplement|imcontour|imcontrast|imcrop|imdilate|imdisplayrange|imdistline|imdivide|imellipse|imerode|imextendedmax|"+
"imextendedmin|imfill|imfilter|imfindcircles|imfreehand|imfuse|imgca|imgcf|imgetfile|imhandles|imhist|imhmax|imhmin|imimposemin|imlincomb|imline|immagbox|immovie|immultiply|imnoise|"+
"imopen|imoverview|imoverviewpanel|impixel|impixelinfo|impixelinfoval|impixelregion|impixelregionpanel|implay|impoint|impoly|impositionrect|improfile|imputfile|impyramid|"+
"imreconstruct|imrect|imregconfig|imregionalmax|imregionalmin|imregister|imresize|imroi|imrotate|imsave|imscrollpanel|imshow|imshowpair|imsubtract|imtool|imtophat|imtransform|"+
"imview|ind2gray|ind2rgb|interfileinfo|interfileread|intlut|ippl|iptaddcallback|iptcheckconn|iptcheckhandle|iptcheckinput|iptcheckmap|iptchecknargin|iptcheckstrs|iptdemos|iptgetapi|"+
"iptGetPointerBehavior|iptgetpref|ipticondir|iptnum2ordinal|iptPointerManager|iptprefs|iptremovecallback|iptSetPointerBehavior|iptsetpref|iptwindowalign|iradon|isbw|isflat|isgray|"+
"isicc|isind|isnitf|isrgb|isrset|lab2double|lab2uint16|lab2uint8|label2rgb|labelmatrix|makecform|makeConstrainToRectFcn|makehdr|makelut|makeresampler|maketform|mat2gray|mean2|"+
"medfilt2|montage|nitfinfo|nitfread|nlfilter|normxcorr2|ntsc2rgb|openrset|ordfilt2|otf2psf|padarray|para2fan|phantom|poly2mask|psf2otf|qtdecomp|qtgetblk|qtsetblk|radon|rangefilt|"+
"reflect|regionprops|registration\.metric\.(?:MattesMutualInformation|MeanSquares)|registration\.optimizer\.(?:OnePlusOneEvolutionary|RegularStepGradientDescent)|rgb2gray|"+
"rgb2ntsc|rgb2ycbcr|roicolor|roifill|roifilt2|roipoly|rsetwrite|std2|stdfilt|strel|stretchlim|subimage|tformarray|tformfwd|tforminv|tonemap|translate|truesize|uintlut|viscircles|"+
"warp|watershed|whitepoint|wiener2|xyz2double|xyz2uint16|ycbcr2rgb|bintprog|color|fgoalattain|fminbnd|fmincon|fminimax|fminsearch|fminunc|fseminf|fsolve|fzero|fzmult|gangstr|ktrlink|"+
"linprog|lsqcurvefit|lsqlin|lsqnonlin|lsqnonneg|optimget|optimset|optimtool|quadprog"
);
var storageType = (
"cell|struct|char|double|single|logical|u?int(?:8|16|32|64)|sparse"
);
var keywordMapper = this.createKeywordMapper({
"storage.type": storageType,
"support.function": builtinFunctions,
"keyword": keywords,
"constant.language": builtinConstants
}, "identifier", true);
this.$rules = {
"start" : [ {
token : "comment",
regex : "%[^\r\n]*"
}, {
token : "string", // " string
regex : '".*?"'
}, {
token : "string", // ' string
regex : "'.*?'"
}, {
token : "constant.numeric", // float
regex : "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"
}, {
token : keywordMapper,
regex : "[a-zA-Z_$][a-zA-Z0-9_$]*\\b"
}, {
token : "keyword.operator",
regex : "\\+|\\-|\\/|\\/\\/|<@>|@>|<@|&|\\^|~|<|>|<=|=>|==|!=|<>|="
}, {
token : "punctuation.operator",
regex : "\\?|\\:|\\,|\\;|\\."
}, {
token : "paren.lparen",
regex : "[\\(]"
}, {
token : "paren.rparen",
regex : "[\\)]"
}, {
token : "text",
regex : "\\s+"
} ]
};
};
oop.inherits(MatlabHighlightRules, TextHighlightRules);
exports.MatlabHighlightRules = MatlabHighlightRules;
});
ace.define("ace/mode/matlab",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/matlab_highlight_rules","ace/range"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var MatlabHighlightRules = require("./matlab_highlight_rules").MatlabHighlightRules;
var Range = require("../range").Range;
var Mode = function() {
this.HighlightRules = MatlabHighlightRules;
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "%";
this.blockComment = {start: "%{", end: "%}"};
this.$id = "ace/mode/matlab";
}).call(Mode.prototype);
exports.Mode = Mode;
});
| mit |
helenagarcia90/moodle-29 | blog/rsslib.php | 11498 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Blog RSS Management
*
* @package core_blog
* @category rss
* @copyright 2010 Andrew Davis
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
require_once($CFG->dirroot.'/lib/rsslib.php');
require_once($CFG->dirroot .'/blog/lib.php');
/**
* Build the URL for the RSS feed
*
* @param int $contextid The context under which the URL should be created
* @param int $userid The id of the user requesting the RSS Feed
* @param string $filtertype The source of the RSS feed (site/course/group/user)
* @param int $filterselect The id of the item defined by $filtertype
* @param int $tagid The id of the row in the tag table that identifies the RSS Feed
* @return string
*/
function blog_rss_get_url($contextid, $userid, $filtertype, $filterselect=0, $tagid=0) {
$componentname = 'blog';
$additionalargs = null;
switch ($filtertype) {
case 'site':
$additionalargs = 'site/'.SITEID;
break;
case 'course':
$additionalargs = 'course/'.$filterselect;
break;
case 'group':
$additionalargs = 'group/'.$filterselect;
break;
case 'user':
$additionalargs = 'user/'.$filterselect;
break;
}
if ($tagid) {
$additionalargs .= '/'.$tagid;
}
return rss_get_url($contextid, $userid, $componentname, $additionalargs);
}
/**
* Print the link for the RSS feed with the correct RSS icon (Theme based)
*
* @param stdClass $context The context under which the URL should be created
* @param string $filtertype The source of the RSS feed (site/course/group/user)
* @param int $filterselect The id of the item defined by $filtertype
* @param int $tagid The id of the row in the tag table that identifies the RSS Feed
* @param string $tooltiptext The tooltip to be displayed with the link
*/
function blog_rss_print_link($context, $filtertype, $filterselect=0, $tagid=0, $tooltiptext='') {
global $CFG, $USER, $OUTPUT;
if (!isloggedin()) {
$userid = $CFG->siteguest;
} else {
$userid = $USER->id;
}
$url = blog_rss_get_url($context->id, $userid, $filtertype, $filterselect, $tagid);
$rsspix = $OUTPUT->pix_url('i/rss');
print '<div class="mdl-right"><a href="'. $url .'"><img src="'. $rsspix .'" title="'. strip_tags($tooltiptext) .'" alt="'.get_string('rss').'" /></a></div>';
}
/**
* Build the URL for the RSS feed amd add it as a header
*
* @param stdClass $context The context under which the URL should be created
* @param string $title Name for the link to be added to the page header
* @param string $filtertype The source of the RSS feed (site/course/group/user)
* @param int $filterselect The id of the item defined by $filtertype
* @param int $tagid The id of the row in the tag table that identifies the RSS Feed
*/
function blog_rss_add_http_header($context, $title, $filtertype, $filterselect=0, $tagid=0) {
global $PAGE, $USER, $CFG;
if (!isloggedin()) {
$userid = $CFG->siteguest;
} else {
$userid = $USER->id;
}
$rsspath = blog_rss_get_url($context->id, $userid, $filtertype, $filterselect, $tagid);
$PAGE->add_alternate_version($title, $rsspath, 'application/rss+xml');
}
/**
* Utility function to extract parameters needed to generate RSS URLs from the blog filters
*
* @param array $filters filters for the blog
* @return array array containing the id of the user/course/group, the relevant context and the filter type: site/user/course/group
*/
function blog_rss_get_params($filters) {
$thingid = $rsscontext = $filtertype = null;
$sitecontext = context_system::instance();
if (!$filters) {
$thingid = SITEID;
$filtertype = 'site';
} else if (array_key_exists('course', $filters)) {
$thingid = $filters['course'];
$filtertype = 'course';
} else if (array_key_exists('user', $filters)) {
$thingid = $filters['user'];
$filtertype = 'user';
} else if (array_key_exists('group', $filters)) {
$thingid = $filters['group'];
$filtertype = 'group';
}
return array($thingid, $rsscontext, $filtertype);
}
/**
* Generate any blog RSS feed via one function
*
* @param stdClass $context The context of the blog for which the feed it being generated
* @param array $args An array of arguements needed to build the feed (contextid, token, componentname, type, id, tagid)
*/
function blog_rss_get_feed($context, $args) {
global $CFG, $SITE, $DB;
if (empty($CFG->enableblogs)) {
debugging('Blogging disabled on this site, RSS feeds are not available');
return null;
}
if (empty($CFG->enablerssfeeds)) {
debugging('Sorry, RSS feeds are disabled on this site');
return '';
}
if ($CFG->bloglevel == BLOG_SITE_LEVEL) {
if (isguestuser()) {
debugging(get_string('nopermissiontoshow', 'error'));
return '';
}
}
$sitecontext = context_system::instance();
if (!has_capability('moodle/blog:view', $sitecontext)) {
return null;
}
$type = clean_param($args[3], PARAM_ALPHA);
$id = clean_param($args[4], PARAM_INT); // Could be groupid / courseid / userid depending on $type.
$tagid=0;
if ($args[5] != 'rss.xml') {
$tagid = clean_param($args[5], PARAM_INT);
} else {
$tagid = 0;
}
$filename = blog_rss_file_name($type, $id, $tagid);
if (file_exists($filename)) {
if (filemtime($filename) + 3600 > time()) {
return $filename; // It's already done so we return cached version.
}
}
$courseid = $groupid = $userid = null;
switch ($type) {
case 'site':
break;
case 'course':
$courseid = $id;
break;
case 'group':
$groupid = $id;
break;
case 'user':
$userid = $id;
break;
}
// Get all the entries from the database.
require_once($CFG->dirroot .'/blog/locallib.php');
$blogheaders = blog_get_headers($courseid, $groupid, $userid, $tagid);
$bloglisting = new blog_listing($blogheaders['filters']);
$blogentries = $bloglisting->get_entries();
// Now generate an array of RSS items.
if ($blogentries) {
$items = array();
foreach ($blogentries as $blogentry) {
$item = null;
$item->author = fullname($DB->get_record('user', array('id' => $blogentry->userid))); // TODO: this is slow.
$item->title = $blogentry->subject;
$item->pubdate = $blogentry->lastmodified;
$item->link = $CFG->wwwroot.'/blog/index.php?entryid='.$blogentry->id;
$summary = file_rewrite_pluginfile_urls($blogentry->summary, 'pluginfile.php',
$sitecontext->id, 'blog', 'post', $blogentry->id);
$item->description = format_text($summary, $blogentry->format);
if ( !empty($CFG->usetags) && ($blogtags = tag_get_tags_array('post', $blogentry->id)) ) {
if ($blogtags) {
$item->tags = $blogtags;
}
$item->tagscheme = $CFG->wwwroot . '/tag';
}
$items[] = $item;
}
$articles = rss_add_items($items); // Change structure to XML.
} else {
$articles = '';
}
// Get header and footer information.
switch ($type) {
case 'user':
$info = fullname($DB->get_record('user', array('id'=>$id), 'firstname,lastname'));
break;
case 'course':
$info = $DB->get_field('course', 'fullname', array('id'=>$id));
$info = format_string($info, true, array('context' => context_course::instance($id)));
break;
case 'site':
$info = format_string($SITE->fullname, true, array('context' => context_course::instance(SITEID)));
break;
case 'group':
$group = groups_get_group($id);
$info = $group->name; // TODO: $DB->get_field('groups', 'name', array('id'=>$id)).
break;
default:
$info = '';
break;
}
if ($tagid) {
$info .= ': '.$DB->get_field('tags', 'text', array('id'=>$tagid));
}
$header = rss_standard_header(get_string($type.'blog', 'blog', $info),
$CFG->wwwroot.'/blog/index.php',
get_string('intro', 'blog'));
$footer = rss_standard_footer();
// Save the XML contents to file.
$rssdata = $header.$articles.$footer;
if (blog_rss_save_file($type, $id, $tagid, $rssdata)) {
return $filename;
} else {
return false; // Couldn't find it or make it.
}
}
/**
* Retrieve the location and file name of a cached RSS feed
*
* @param string $type The source of the RSS feed (site/course/group/user)
* @param int $id The id of the item defined by $type
* @param int $tagid The id of the row in the tag table that identifies the RSS Feed
* @return string
*/
function blog_rss_file_name($type, $id, $tagid=0) {
global $CFG;
if ($tagid) {
return "$CFG->cachedir/rss/blog/$type/$id/$tagid.xml";
} else {
return "$CFG->cachedir/rss/blog/$type/$id.xml";
}
}
/**
* This function saves to file the rss feed specified in the parameters
*
* @param string $type The source of the RSS feed (site/course/group/user)
* @param int $id The id of the item defined by $type
* @param int $tagid The id of the row in the tag table that identifies the RSS Feed
* @param string $contents The contents of the RSS Feed file
* @return bool whether the save was successful or not
*/
function blog_rss_save_file($type, $id, $tagid=0, $contents='') {
global $CFG;
$status = true;
// Blog creates some additional dirs within the rss cache so make sure they all exist.
make_cache_directory('rss/blog');
make_cache_directory('rss/blog/'.$type);
$filename = blog_rss_file_name($type, $id, $tagid);
$expandfilename = false; // We are supplying a full file path.
$status = rss_save_file('blog', $filename, $contents, $expandfilename);
return $status;
}
/**
* Delete the supplied user's cached blog post RSS feed.
* Only user blogs are available by RSS.
* This doesn't call rss_delete_file() as blog RSS caching uses it's own file structure.
*
* @param int $userid
*/
function blog_rss_delete_file($userid) {
$filename = blog_rss_file_name('user', $userid);
if (file_exists($filename)) {
unlink($filename);
}
}
| gpl-3.0 |
toprockdk/battlemaster | node_modules/@angular/common/src/facade/errors.js | 3170 | var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
/**
* Convenience to throw an Error with 'unimplemented' as the message.
* @return {?}
*/
export function unimplemented() {
throw new Error('unimplemented');
}
/**
* \@stable
*/
export var BaseError = (function (_super) {
__extends(BaseError, _super);
/**
* @param {?} message
*/
function BaseError(message) {
_super.call(this, message);
// Errors don't use current this, instead they create a new instance.
// We have to do forward all of our api to the nativeInstance.
// TODO(bradfordcsmith): Remove this hack when
// google/closure-compiler/issues/2102 is fixed.
var nativeError = new Error(message);
this._nativeError = nativeError;
}
Object.defineProperty(BaseError.prototype, "message", {
/**
* @return {?}
*/
get: function () { return this._nativeError.message; },
/**
* @param {?} message
* @return {?}
*/
set: function (message) { this._nativeError.message = message; },
enumerable: true,
configurable: true
});
Object.defineProperty(BaseError.prototype, "name", {
/**
* @return {?}
*/
get: function () { return this._nativeError.name; },
enumerable: true,
configurable: true
});
Object.defineProperty(BaseError.prototype, "stack", {
/**
* @return {?}
*/
get: function () { return ((this._nativeError)).stack; },
/**
* @param {?} value
* @return {?}
*/
set: function (value) { ((this._nativeError)).stack = value; },
enumerable: true,
configurable: true
});
/**
* @return {?}
*/
BaseError.prototype.toString = function () { return this._nativeError.toString(); };
return BaseError;
}(Error));
function BaseError_tsickle_Closure_declarations() {
/**
* \@internal *
* @type {?}
*/
BaseError.prototype._nativeError;
}
/**
* \@stable
*/
export var WrappedError = (function (_super) {
__extends(WrappedError, _super);
/**
* @param {?} message
* @param {?} error
*/
function WrappedError(message, error) {
_super.call(this, message + " caused by: " + (error instanceof Error ? error.message : error));
this.originalError = error;
}
Object.defineProperty(WrappedError.prototype, "stack", {
/**
* @return {?}
*/
get: function () {
return (((this.originalError instanceof Error ? this.originalError : this._nativeError)))
.stack;
},
enumerable: true,
configurable: true
});
return WrappedError;
}(BaseError));
function WrappedError_tsickle_Closure_declarations() {
/** @type {?} */
WrappedError.prototype.originalError;
}
//# sourceMappingURL=errors.js.map | mit |
mlloewen/chinhama | wp-content/plugins/updraftplus/vendor/rackspace/php-opencloud/lib/OpenCloud/ObjectStore/Constants/UrlType.php | 947 | <?php
/**
* Copyright 2012-2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace OpenCloud\ObjectStore\Constants;
/**
* Enumerated constants used in CloudFiles for URL types.
*/
class UrlType
{
const CDN = 'CDN';
const SSL = 'SSL';
const STREAMING = 'Streaming';
const IOS_STREAMING = 'IOS-Streaming';
const TAR = 'tar';
const TAR_GZ = 'tar.gz';
const TAR_BZ2 = 'tar.bz2';
}
| gpl-2.0 |
mattijsbliek/record-client | webpack/webpack-isomorphic-tools.js | 3598 | var WebpackIsomorphicToolsPlugin = require('webpack-isomorphic-tools/plugin');
// see this link for more info on what all of this means
// https://github.com/halt-hammerzeit/webpack-isomorphic-tools
module.exports = {
// when adding "js" extension to asset types
// and then enabling debug mode, it may cause a weird error:
//
// [0] npm run start-prod exited with code 1
// Sending SIGTERM to other processes..
//
// debug: true,
assets: {
images: {
extensions: [
'jpeg',
'jpg',
'png',
'gif'
],
parser: WebpackIsomorphicToolsPlugin.url_loader_parser
},
fonts: {
extensions: [
'woff',
'woff2',
'ttf',
'eot'
],
parser: WebpackIsomorphicToolsPlugin.url_loader_parser
},
svg: {
extension: 'svg',
parser: WebpackIsomorphicToolsPlugin.url_loader_parser
},
// this whole "bootstrap" asset type is only used once in development mode.
// the only place it's used is the Html.js file
// where a <style/> tag is created with the contents of the
// './src/theme/bootstrap.config.js' file.
// (the aforementioned <style/> tag can reduce the white flash
// when refreshing page in development mode)
//
// hooking into 'js' extension require()s isn't the best solution
// and I'm leaving this comment here in case anyone finds a better idea.
bootstrap: {
extension: 'js',
include: ['./src/theme/bootstrap.config.js'],
filter: function(module, regex, options, log) {
function is_bootstrap_style(name) {
return name.indexOf('./src/theme/bootstrap.config.js') >= 0;
}
if (options.development) {
return is_bootstrap_style(module.name) && WebpackIsomorphicToolsPlugin.style_loader_filter(module, regex, options, log);
}
// no need for it in production mode
},
// in development mode there's webpack "style-loader",
// so the module.name is not equal to module.name
path: WebpackIsomorphicToolsPlugin.style_loader_path_extractor,
parser: WebpackIsomorphicToolsPlugin.css_loader_parser
},
style_modules: {
extensions: ['less','scss'],
filter: function(module, regex, options, log) {
if (options.development) {
// in development mode there's webpack "style-loader",
// so the module.name is not equal to module.name
return WebpackIsomorphicToolsPlugin.style_loader_filter(module, regex, options, log);
} else {
// in production mode there's no webpack "style-loader",
// so the module.name will be equal to the asset path
return regex.test(module.name);
}
},
path: function(module, options, log) {
if (options.development) {
// in development mode there's webpack "style-loader",
// so the module.name is not equal to module.name
return WebpackIsomorphicToolsPlugin.style_loader_path_extractor(module, options, log);
} else {
// in production mode there's no webpack "style-loader",
// so the module.name will be equal to the asset path
return module.name;
}
},
parser: function(module, options, log) {
if (options.development) {
return WebpackIsomorphicToolsPlugin.css_modules_loader_parser(module, options, log);
} else {
// in production mode there's Extract Text Loader which extracts CSS text away
return module.source;
}
}
}
}
}
| mit |
IT2-sotuken/telma | platforms/ios/cordova/node_modules/lodash-node/compat/functions/partial.js | 1272 | /**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize exports="node" -o ./compat/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
var createWrapper = require('../internals/createWrapper'),
slice = require('../internals/slice');
/**
* Creates a function that, when called, invokes `func` with any additional
* `partial` arguments prepended to those provided to the new function. This
* method is similar to `_.bind` except it does **not** alter the `this` binding.
*
* @static
* @memberOf _
* @category Functions
* @param {Function} func The function to partially apply arguments to.
* @param {...*} [arg] Arguments to be partially applied.
* @returns {Function} Returns the new partially applied function.
* @example
*
* var greet = function(greeting, name) { return greeting + ' ' + name; };
* var hi = _.partial(greet, 'hi');
* hi('fred');
* // => 'hi fred'
*/
function partial(func) {
return createWrapper(func, 16, slice(arguments, 1));
}
module.exports = partial;
| apache-2.0 |
alexmandujano/django | tests/admin_inlines/urls.py | 183 | from __future__ import absolute_import
from django.conf.urls import patterns, include
from . import admin
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
)
| bsd-3-clause |
fabien-d/electron | chromium_src/chrome/common/print_messages.cc | 1470 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/common/print_messages.h"
#include "base/basictypes.h"
#include "base/strings/string16.h"
#include "ui/gfx/geometry/size.h"
PrintMsg_Print_Params::PrintMsg_Print_Params()
: page_size(),
content_size(),
printable_area(),
margin_top(0),
margin_left(0),
dpi(0),
min_shrink(0),
max_shrink(0),
desired_dpi(0),
document_cookie(0),
selection_only(false),
supports_alpha_blend(false),
print_scaling_option(blink::WebPrintScalingOptionSourceSize),
title(),
url(),
should_print_backgrounds(false) {
}
PrintMsg_Print_Params::~PrintMsg_Print_Params() {}
void PrintMsg_Print_Params::Reset() {
page_size = gfx::Size();
content_size = gfx::Size();
printable_area = gfx::Rect();
margin_top = 0;
margin_left = 0;
dpi = 0;
min_shrink = 0;
max_shrink = 0;
desired_dpi = 0;
document_cookie = 0;
selection_only = false;
supports_alpha_blend = false;
print_scaling_option = blink::WebPrintScalingOptionSourceSize;
title.clear();
url.clear();
should_print_backgrounds = false;
}
PrintMsg_PrintPages_Params::PrintMsg_PrintPages_Params()
: pages() {
}
PrintMsg_PrintPages_Params::~PrintMsg_PrintPages_Params() {}
void PrintMsg_PrintPages_Params::Reset() {
params.Reset();
pages = std::vector<int>();
}
| mit |
nds32/gcc | libjava/classpath/javax/swing/text/html/MultiAttributeSet.java | 5343 | /* MultiAttributeSet.java -- Multiplexes between a set of AttributeSets
Copyright (C) 2006 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package javax.swing.text.html;
import java.util.Enumeration;
import java.util.NoSuchElementException;
import javax.swing.text.AttributeSet;
import javax.swing.text.SimpleAttributeSet;
/**
* An AttributeSet impl that multiplexes between a set of other AttributeSets.
*
* @author Roman Kennke ([email protected])
*/
class MultiAttributeSet
implements AttributeSet
{
/**
* The Enumeration for the multiplexed names.
*/
private class MultiNameEnumeration
implements Enumeration<Object>
{
/**
* The index of the current AttributeSet.
*/
private int index;
/**
* The names Enumeration of the current AttributeSet.
*/
private Enumeration<?> current;
/**
* Creates a new instance.
*/
MultiNameEnumeration()
{
index = 0;
current = multi[0].getAttributeNames();
}
public boolean hasMoreElements()
{
return current.hasMoreElements() || index < multi.length - 1;
}
public Object nextElement()
{
if (! current.hasMoreElements())
{
if (index < multi.length - 1)
{
index++;
current = multi[index].getAttributeNames();
}
else
throw new NoSuchElementException();
}
return current.nextElement();
}
}
/**
* The AttributeSets to multiplex.
*/
AttributeSet[] multi;
/**
* Provided for subclasses that need to initialize via {@link #init}.
*/
MultiAttributeSet()
{
// Nothing to do here.
}
/**
* Creates a new instance.
*
* @param m the AttributeSets to multiplex
*/
MultiAttributeSet(AttributeSet[] m)
{
init(m);
}
/**
* Provided for subclasses to initialize the attribute set.
*
* @param m the attributes to multiplex
*/
void init(AttributeSet[] m)
{
multi = m;
}
public boolean containsAttribute(Object name, Object value)
{
boolean ret = false;
for (int i = 0; i < multi.length && ret == false; i++)
{
if (multi[i].containsAttribute(name, value))
ret = true;
}
return ret;
}
public boolean containsAttributes(AttributeSet attributes)
{
boolean ret = true;
Enumeration<?> e = attributes.getAttributeNames();
while (ret && e.hasMoreElements())
{
Object key = e.nextElement();
ret = attributes.getAttribute(key).equals(getAttribute(key));
}
return ret;
}
public AttributeSet copyAttributes()
{
SimpleAttributeSet copy = new SimpleAttributeSet();
for (int i = 0; i < multi.length; i++)
{
copy.addAttributes(multi[i]);
}
return copy;
}
public Object getAttribute(Object key)
{
Object ret = null;
for (int i = 0; i < multi.length && ret == null; i++)
{
ret = multi[i].getAttribute(key);
}
return ret;
}
public int getAttributeCount()
{
int n = 0;
for (int i = 0; i < multi.length; i++)
{
n += multi[i].getAttributeCount();
}
return n;
}
public Enumeration<?> getAttributeNames()
{
return new MultiNameEnumeration();
}
public AttributeSet getResolveParent()
{
return null;
}
public boolean isDefined(Object attrName)
{
boolean ret = false;
for (int i = 0; i < multi.length && ! ret; i++)
ret = multi[i].isDefined(attrName);
return ret;
}
public boolean isEqual(AttributeSet attr)
{
return getAttributeCount() == attr.getAttributeCount()
&& containsAttributes(attr);
}
}
| gpl-2.0 |
murilobr/spring-boot | spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/PropertyPlaceholderAutoConfigurationTests.java | 2853 | /*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure;
import org.junit.After;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.util.StringUtils;
import static org.junit.Assert.assertEquals;
/**
* Tests for {@link PropertyPlaceholderAutoConfiguration}.
*
* @author Dave Syer
*/
public class PropertyPlaceholderAutoConfigurationTests {
private final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void propertyPlaceholderse() throws Exception {
this.context.register(PropertyPlaceholderAutoConfiguration.class,
PlaceholderConfig.class);
EnvironmentTestUtils.addEnvironment(this.context, "foo:two");
this.context.refresh();
assertEquals("two", this.context.getBean(PlaceholderConfig.class).getFoo());
}
@Test
public void propertyPlaceholdersOverride() throws Exception {
this.context.register(PropertyPlaceholderAutoConfiguration.class,
PlaceholderConfig.class, PlaceholdersOverride.class);
EnvironmentTestUtils.addEnvironment(this.context, "foo:two");
this.context.refresh();
assertEquals("spam", this.context.getBean(PlaceholderConfig.class).getFoo());
}
@Configuration
static class PlaceholderConfig {
@Value("${foo:bar}")
private String foo;
public String getFoo() {
return this.foo;
}
}
@Configuration
static class PlaceholdersOverride {
@Bean
public static PropertySourcesPlaceholderConfigurer morePlaceholders() {
PropertySourcesPlaceholderConfigurer configurer = new PropertySourcesPlaceholderConfigurer();
configurer.setProperties(StringUtils.splitArrayElementsIntoProperties(
new String[] { "foo=spam" }, "="));
configurer.setLocalOverride(true);
configurer.setOrder(0);
return configurer;
}
}
}
| apache-2.0 |
feiskyer/kubernetes | staging/src/k8s.io/apimachinery/pkg/util/sets/string.go | 4801 | /*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by set-gen. DO NOT EDIT.
package sets
import (
"reflect"
"sort"
)
// sets.String is a set of strings, implemented via map[string]struct{} for minimal memory consumption.
type String map[string]Empty
// NewString creates a String from a list of values.
func NewString(items ...string) String {
ss := String{}
ss.Insert(items...)
return ss
}
// StringKeySet creates a String from a keys of a map[string](? extends interface{}).
// If the value passed in is not actually a map, this will panic.
func StringKeySet(theMap interface{}) String {
v := reflect.ValueOf(theMap)
ret := String{}
for _, keyValue := range v.MapKeys() {
ret.Insert(keyValue.Interface().(string))
}
return ret
}
// Insert adds items to the set.
func (s String) Insert(items ...string) String {
for _, item := range items {
s[item] = Empty{}
}
return s
}
// Delete removes all items from the set.
func (s String) Delete(items ...string) String {
for _, item := range items {
delete(s, item)
}
return s
}
// Has returns true if and only if item is contained in the set.
func (s String) Has(item string) bool {
_, contained := s[item]
return contained
}
// HasAll returns true if and only if all items are contained in the set.
func (s String) HasAll(items ...string) bool {
for _, item := range items {
if !s.Has(item) {
return false
}
}
return true
}
// HasAny returns true if any items are contained in the set.
func (s String) HasAny(items ...string) bool {
for _, item := range items {
if s.Has(item) {
return true
}
}
return false
}
// Difference returns a set of objects that are not in s2
// For example:
// s1 = {a1, a2, a3}
// s2 = {a1, a2, a4, a5}
// s1.Difference(s2) = {a3}
// s2.Difference(s1) = {a4, a5}
func (s String) Difference(s2 String) String {
result := NewString()
for key := range s {
if !s2.Has(key) {
result.Insert(key)
}
}
return result
}
// Union returns a new set which includes items in either s1 or s2.
// For example:
// s1 = {a1, a2}
// s2 = {a3, a4}
// s1.Union(s2) = {a1, a2, a3, a4}
// s2.Union(s1) = {a1, a2, a3, a4}
func (s1 String) Union(s2 String) String {
result := NewString()
for key := range s1 {
result.Insert(key)
}
for key := range s2 {
result.Insert(key)
}
return result
}
// Intersection returns a new set which includes the item in BOTH s1 and s2
// For example:
// s1 = {a1, a2}
// s2 = {a2, a3}
// s1.Intersection(s2) = {a2}
func (s1 String) Intersection(s2 String) String {
var walk, other String
result := NewString()
if s1.Len() < s2.Len() {
walk = s1
other = s2
} else {
walk = s2
other = s1
}
for key := range walk {
if other.Has(key) {
result.Insert(key)
}
}
return result
}
// IsSuperset returns true if and only if s1 is a superset of s2.
func (s1 String) IsSuperset(s2 String) bool {
for item := range s2 {
if !s1.Has(item) {
return false
}
}
return true
}
// Equal returns true if and only if s1 is equal (as a set) to s2.
// Two sets are equal if their membership is identical.
// (In practice, this means same elements, order doesn't matter)
func (s1 String) Equal(s2 String) bool {
return len(s1) == len(s2) && s1.IsSuperset(s2)
}
type sortableSliceOfString []string
func (s sortableSliceOfString) Len() int { return len(s) }
func (s sortableSliceOfString) Less(i, j int) bool { return lessString(s[i], s[j]) }
func (s sortableSliceOfString) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
// List returns the contents as a sorted string slice.
func (s String) List() []string {
res := make(sortableSliceOfString, 0, len(s))
for key := range s {
res = append(res, key)
}
sort.Sort(res)
return []string(res)
}
// UnsortedList returns the slice with contents in random order.
func (s String) UnsortedList() []string {
res := make([]string, 0, len(s))
for key := range s {
res = append(res, key)
}
return res
}
// Returns a single element from the set.
func (s String) PopAny() (string, bool) {
for key := range s {
s.Delete(key)
return key, true
}
var zeroValue string
return zeroValue, false
}
// Len returns the size of the set.
func (s String) Len() int {
return len(s)
}
func lessString(lhs, rhs string) bool {
return lhs < rhs
}
| apache-2.0 |
lassemon/eportfolio | node_modules/protractor/lib/driverProviders/mock.js | 1721 | /*
* This is an mock implementation of the Driver Provider.
* It returns a fake webdriver and never actually contacts a selenium
* server.
*/
var webdriver = require('selenium-webdriver'),
q = require('q');
/**
* @constructor
*/
var MockExecutor = function() {
this.driver_ = null;
};
/**
* @param {!webdriver.Command} command The command to execute.
* @param {function(Error, !bot.response.ResponseObject=)} callback the function
* to invoke when the command response is ready.
*/
MockExecutor.prototype.execute = function(command, callback) {
callback(null, {
status: '0',
value: 'test_response'
});
};
var MockDriverProvider = function(config) {
this.config_ = config;
};
/**
* Configure and launch (if applicable) the object's environment.
* @public
* @return {q.promise} A promise which will resolve immediately.
*/
MockDriverProvider.prototype.setupEnv = function() {
return q.fcall(function() {});
};
/**
* Teardown and destroy the environment and do any associated cleanup.
*
* @public
* @return {q.promise} A promise which will resolve immediately.
*/
MockDriverProvider.prototype.teardownEnv = function() {
var deferred = q.defer();
this.driver_.quit().then(function() {
deferred.resolve();
});
return deferred.promise;
};
/**
* Retrieve the webdriver for the runner.
* @public
* @return webdriver instance
*/
MockDriverProvider.prototype.getDriver = function() {
var mockSession = new webdriver.Session('test_session_id', {});
this.driver_ = new webdriver.WebDriver(mockSession, new MockExecutor());
return this.driver_;
};
// new instance w/ each include
module.exports = function(config) {
return new MockDriverProvider(config);
};
| mit |
barcadictni/cdnjs | ajax/libs/dustjs-helpers/1.3.0/dust-helpers.js | 20263 | /*! dustjs-helpers - v1.3.0
* https://github.com/linkedin/dustjs-helpers
* Copyright (c) 2014 Aleksander Williams; Released under the MIT License */
(function(dust){
//using the built in logging method of dust when accessible
var _log = dust.log ? function(mssg) { dust.log(mssg, "INFO"); } : function() {};
function isSelect(context) {
var value = context.current();
return typeof value === "object" && value.isSelect === true;
}
// Utility method : toString() equivalent for functions
function jsonFilter(key, value) {
if (typeof value === "function") {
//to make sure all environments format functions the same way
return value.toString()
//remove all leading and trailing whitespace
.replace(/(^\s+|\s+$)/mg, '')
//remove new line characters
.replace(/\n/mg, '')
//replace , and 0 or more spaces with ", "
.replace(/,\s*/mg, ', ')
//insert space between ){
.replace(/\)\{/mg, ') {')
;
}
return value;
}
// Utility method: to invoke the given filter operation such as eq/gt etc
function filter(chunk, context, bodies, params, filterOp) {
params = params || {};
var body = bodies.block,
actualKey,
expectedValue,
filterOpType = params.filterOpType || '';
// when @eq, @lt etc are used as standalone helpers, key is required and hence check for defined
if ( typeof params.key !== "undefined") {
actualKey = dust.helpers.tap(params.key, chunk, context);
}
else if (isSelect(context)) {
actualKey = context.current().selectKey;
// supports only one of the blocks in the select to be selected
if (context.current().isResolved) {
filterOp = function() { return false; };
}
}
else {
_log("No key specified for filter in:" + filterOpType + " helper ");
return chunk;
}
expectedValue = dust.helpers.tap(params.value, chunk, context);
// coerce both the actualKey and expectedValue to the same type for equality and non-equality compares
if (filterOp(coerce(expectedValue, params.type, context), coerce(actualKey, params.type, context))) {
if (isSelect(context)) {
context.current().isResolved = true;
}
// we want helpers without bodies to fail gracefully so check it first
if(body) {
return chunk.render(body, context);
}
else {
_log("No key specified for filter in:" + filterOpType + " helper ");
return chunk;
}
}
else if (bodies['else']) {
return chunk.render(bodies['else'], context);
}
return chunk;
}
function coerce (value, type, context) {
if (value) {
switch (type || typeof(value)) {
case 'number': return +value;
case 'string': return String(value);
case 'boolean': {
value = (value === 'false' ? false : value);
return Boolean(value);
}
case 'date': return new Date(value);
case 'context': return context.get(value);
}
}
return value;
}
var helpers = {
// Utility helping to resolve dust references in the given chunk
// uses the Chunk.render method to resolve value
/*
Reference resolution rules:
if value exists in JSON:
"" or '' will evaluate to false, boolean false, null, or undefined will evaluate to false,
numeric 0 evaluates to true, so does, string "0", string "null", string "undefined" and string "false".
Also note that empty array -> [] is evaluated to false and empty object -> {} and non-empty object are evaluated to true
The type of the return value is string ( since we concatenate to support interpolated references
if value does not exist in JSON and the input is a single reference: {x}
dust render emits empty string, and we then return false
if values does not exist in JSON and the input is interpolated references : {x} < {y}
dust render emits < and we return the partial output
*/
"tap": function(input, chunk, context) {
// return given input if there is no dust reference to resolve
// dust compiles a string/reference such as {foo} to a function
if (typeof input !== "function") {
return input;
}
var dustBodyOutput = '',
returnValue;
//use chunk render to evaluate output. For simple functions result will be returned from render call,
//for dust body functions result will be output via callback function
returnValue = chunk.tap(function(data) {
dustBodyOutput += data;
return '';
}).render(input, context);
chunk.untap();
//assume it's a simple function call if return result is not a chunk
if (returnValue.constructor !== chunk.constructor) {
//use returnValue as a result of tap
return returnValue;
} else if (dustBodyOutput === '') {
return false;
} else {
return dustBodyOutput;
}
},
"sep": function(chunk, context, bodies) {
var body = bodies.block;
if (context.stack.index === context.stack.of - 1) {
return chunk;
}
if(body) {
return bodies.block(chunk, context);
}
else {
return chunk;
}
},
"idx": function(chunk, context, bodies) {
var body = bodies.block;
if(body) {
return bodies.block(chunk, context.push(context.stack.index));
}
else {
return chunk;
}
},
/**
* contextDump helper
* @param key specifies how much to dump.
* "current" dumps current context. "full" dumps the full context stack.
* @param to specifies where to write dump output.
* Values can be "console" or "output". Default is output.
*/
"contextDump": function(chunk, context, bodies, params) {
var p = params || {},
to = p.to || 'output',
key = p.key || 'current',
dump;
to = dust.helpers.tap(to, chunk, context);
key = dust.helpers.tap(key, chunk, context);
if (key === 'full') {
dump = JSON.stringify(context.stack, jsonFilter, 2);
}
else {
dump = JSON.stringify(context.stack.head, jsonFilter, 2);
}
if (to === 'console') {
_log(dump);
return chunk;
}
else {
return chunk.write(dump);
}
},
/**
if helper for complex evaluation complex logic expressions.
Note : #1 if helper fails gracefully when there is no body block nor else block
#2 Undefined values and false values in the JSON need to be handled specially with .length check
for e.g @if cond=" '{a}'.length && '{b}'.length" is advised when there are chances of the a and b been
undefined or false in the context
#3 Use only when the default ? and ^ dust operators and the select fall short in addressing the given logic,
since eval executes in the global scope
#4 All dust references are default escaped as they are resolved, hence eval will block malicious scripts in the context
Be mindful of evaluating a expression that is passed through the unescape filter -> |s
@param cond, either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. cond="2>3"
a dust reference is also enclosed in double quotes, e.g. cond="'{val}'' > 3"
cond argument should evaluate to a valid javascript expression
**/
"if": function( chunk, context, bodies, params ){
var body = bodies.block,
skip = bodies['else'];
if( params && params.cond){
var cond = params.cond;
cond = dust.helpers.tap(cond, chunk, context);
// eval expressions with given dust references
if(eval(cond)){
if(body) {
return chunk.render( bodies.block, context );
}
else {
_log("Missing body block in the if helper!");
return chunk;
}
}
if(skip){
return chunk.render( bodies['else'], context );
}
}
// no condition
else {
_log("No condition given in the if helper!");
}
return chunk;
},
/**
* math helper
* @param key is the value to perform math against
* @param method is the math method, is a valid string supported by math helper like mod, add, subtract
* @param operand is the second value needed for operations like mod, add, subtract, etc.
* @param round is a flag to assure that an integer is returned
*/
"math": function ( chunk, context, bodies, params ) {
//key and method are required for further processing
if( params && typeof params.key !== "undefined" && params.method ){
var key = params.key,
method = params.method,
// operand can be null for "abs", ceil and floor
operand = params.operand,
round = params.round,
mathOut = null,
operError = function(){
_log("operand is required for this math method");
return null;
};
key = dust.helpers.tap(key, chunk, context);
operand = dust.helpers.tap(operand, chunk, context);
// TODO: handle and tests for negatives and floats in all math operations
switch(method) {
case "mod":
if(operand === 0 || operand === -0) {
_log("operand for divide operation is 0/-0: expect Nan!");
}
mathOut = parseFloat(key) % parseFloat(operand);
break;
case "add":
mathOut = parseFloat(key) + parseFloat(operand);
break;
case "subtract":
mathOut = parseFloat(key) - parseFloat(operand);
break;
case "multiply":
mathOut = parseFloat(key) * parseFloat(operand);
break;
case "divide":
if(operand === 0 || operand === -0) {
_log("operand for divide operation is 0/-0: expect Nan/Infinity!");
}
mathOut = parseFloat(key) / parseFloat(operand);
break;
case "ceil":
mathOut = Math.ceil(parseFloat(key));
break;
case "floor":
mathOut = Math.floor(parseFloat(key));
break;
case "round":
mathOut = Math.round(parseFloat(key));
break;
case "abs":
mathOut = Math.abs(parseFloat(key));
break;
default:
_log("method passed is not supported");
}
if (mathOut !== null){
if (round) {
mathOut = Math.round(mathOut);
}
if (bodies && bodies.block) {
// with bodies act like the select helper with mathOut as the key
// like the select helper bodies['else'] is meaningless and is ignored
return chunk.render(bodies.block, context.push({ isSelect: true, isResolved: false, selectKey: mathOut }));
} else {
// self closing math helper will return the calculated output
return chunk.write(mathOut);
}
} else {
return chunk;
}
}
// no key parameter and no method
else {
_log("Key is a required parameter for math helper along with method/operand!");
}
return chunk;
},
/**
select helper works with one of the eq/ne/gt/gte/lt/lte/default providing the functionality
of branching conditions
@param key, ( required ) either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
**/
"select": function(chunk, context, bodies, params) {
var body = bodies.block;
// key is required for processing, hence check for defined
if( params && typeof params.key !== "undefined"){
// returns given input as output, if the input is not a dust reference, else does a context lookup
var key = dust.helpers.tap(params.key, chunk, context);
// bodies['else'] is meaningless and is ignored
if( body ) {
return chunk.render(bodies.block, context.push({ isSelect: true, isResolved: false, selectKey: key }));
}
else {
_log("Missing body block in the select helper ");
return chunk;
}
}
// no key
else {
_log("No key given in the select helper!");
}
return chunk;
},
/**
eq helper compares the given key is same as the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"eq": function(chunk, context, bodies, params) {
if(params) {
params.filterOpType = "eq";
}
return filter(chunk, context, bodies, params, function(expected, actual) { return actual === expected; });
},
/**
ne helper compares the given key is not the same as the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"ne": function(chunk, context, bodies, params) {
if(params) {
params.filterOpType = "ne";
return filter(chunk, context, bodies, params, function(expected, actual) { return actual !== expected; });
}
return chunk;
},
/**
lt helper compares the given key is less than the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"lt": function(chunk, context, bodies, params) {
if(params) {
params.filterOpType = "lt";
return filter(chunk, context, bodies, params, function(expected, actual) { return actual < expected; });
}
},
/**
lte helper compares the given key is less or equal to the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"lte": function(chunk, context, bodies, params) {
if(params) {
params.filterOpType = "lte";
return filter(chunk, context, bodies, params, function(expected, actual) { return actual <= expected; });
}
return chunk;
},
/**
gt helper compares the given key is greater than the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"gt": function(chunk, context, bodies, params) {
// if no params do no go further
if(params) {
params.filterOpType = "gt";
return filter(chunk, context, bodies, params, function(expected, actual) { return actual > expected; });
}
return chunk;
},
/**
gte helper, compares the given key is greater than or equal to the expected value
It can be used standalone or in conjunction with select for multiple branching
@param key, The actual key to be compared ( optional when helper used in conjunction with select)
either a string literal value or a dust reference
a string literal value, is enclosed in double quotes, e.g. key="foo"
a dust reference may or may not be enclosed in double quotes, e.g. key="{val}" and key=val are both valid
@param value, The expected value to compare to, when helper is used standalone or in conjunction with select
@param type (optional), supported types are number, boolean, string, date, context, defaults to string
Note : use type="number" when comparing numeric
**/
"gte": function(chunk, context, bodies, params) {
if(params) {
params.filterOpType = "gte";
return filter(chunk, context, bodies, params, function(expected, actual) { return actual >= expected; });
}
return chunk;
},
// to be used in conjunction with the select helper
// TODO: fix the helper to do nothing when used standalone
"default": function(chunk, context, bodies, params) {
// does not require any params
if(params) {
params.filterOpType = "default";
}
return filter(chunk, context, bodies, params, function(expected, actual) { return true; });
},
/**
* size helper prints the size of the given key
* Note : size helper is self closing and does not support bodies
* @param key, the element whose size is returned
*/
"size": function( chunk, context, bodies, params ) {
var key, value=0, nr, k;
params = params || {};
key = params.key;
if (!key || key === true) { //undefined, null, "", 0
value = 0;
}
else if(dust.isArray(key)) { //array
value = key.length;
}
else if (!isNaN(parseFloat(key)) && isFinite(key)) { //numeric values
value = key;
}
else if (typeof key === "object") { //object test
//objects, null and array all have typeof ojbect...
//null and array are already tested so typeof is sufficient http://jsperf.com/isobject-tests
nr = 0;
for(k in key){
if(Object.hasOwnProperty.call(key,k)){
nr++;
}
}
value = nr;
} else {
value = (key + '').length; //any other value (strings etc.)
}
return chunk.write(value);
}
};
for (var key in helpers) {
dust.helpers[key] = helpers[key];
}
if(typeof exports !== 'undefined') {
module.exports = dust;
}
})(typeof exports !== 'undefined' ? require('dustjs-linkedin') : dust); | mit |
justdotJS/rowboat | frontend/node_modules/babel-preset-env/node_modules/babel-plugin-transform-es2015-parameters/node_modules/babel-runtime/node_modules/core-js/library/modules/_same-value.js | 190 | // 7.2.9 SameValue(x, y)
module.exports = Object.is || function is(x, y) {
// eslint-disable-next-line no-self-compare
return x === y ? x !== 0 || 1 / x === 1 / y : x != x && y != y;
};
| mit |
lenw/ansible-modules-core | cloud/rackspace/rax_cbs_attachments.py | 6622 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_cbs_attachments
short_description: Manipulate Rackspace Cloud Block Storage Volume Attachments
description:
- Manipulate Rackspace Cloud Block Storage Volume Attachments
version_added: 1.6
options:
device:
description:
- The device path to attach the volume to, e.g. /dev/xvde
default: null
required: true
volume:
description:
- Name or id of the volume to attach/detach
default: null
required: true
server:
description:
- Name or id of the server to attach/detach
default: null
required: true
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
required: true
wait:
description:
- wait for the volume to be in 'in-use'/'available' state before returning
default: "no"
choices:
- "yes"
- "no"
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Attach a Block Storage Volume
gather_facts: False
hosts: local
connection: local
tasks:
- name: Storage volume attach request
local_action:
module: rax_cbs_attachments
credentials: ~/.raxpub
volume: my-volume
server: my-server
device: /dev/xvdd
region: DFW
wait: yes
state: present
register: my_volume
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout):
cbs = pyrax.cloud_blockstorage
cs = pyrax.cloudservers
if cbs is None or cs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
changed = False
instance = {}
volume = rax_find_volume(module, pyrax, volume)
if not volume:
module.fail_json(msg='No matching storage volumes were found')
if state == 'present':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
changed = False
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
else:
try:
volume.attach_to_instance(server, mountpoint=device)
changed = True
except Exception, e:
module.fail_json(msg='%s' % e.message)
volume.get()
for key, value in vars(volume).iteritems():
if (isinstance(value, NON_CALLABLES) and
not key.startswith('_')):
instance[key] = value
result = dict(changed=changed)
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
elif wait:
attempts = wait_timeout / 5
pyrax.utils.wait_until(volume, 'status', 'in-use',
interval=5, attempts=attempts)
volume.get()
result['volume'] = rax_to_dict(volume)
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
elif state == 'absent':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
try:
volume.detach()
if wait:
pyrax.utils.wait_until(volume, 'status', 'available',
interval=3, attempts=0,
verbose=False)
changed = True
except Exception, e:
module.fail_json(msg='%s' % e.message)
volume.get()
changed = True
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
result = dict(changed=changed, volume=rax_to_dict(volume))
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
module.exit_json(changed=changed, volume=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
device=dict(required=True),
volume=dict(required=True),
server=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
device = module.params.get('device')
volume = module.params.get('volume')
server = module.params.get('server')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
main()
| gpl-3.0 |
SrGio/GestorFCT | vendor/symfony/symfony/src/Symfony/Component/Form/Extension/Core/EventListener/ResizeFormListener.php | 5427 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Form\Extension\Core\EventListener;
use Symfony\Component\Form\FormEvents;
use Symfony\Component\Form\FormEvent;
use Symfony\Component\Form\Exception\UnexpectedTypeException;
use Symfony\Component\EventDispatcher\EventSubscriberInterface;
/**
* Resize a collection form element based on the data sent from the client.
*
* @author Bernhard Schussek <[email protected]>
*/
class ResizeFormListener implements EventSubscriberInterface
{
/**
* @var string
*/
protected $type;
/**
* @var array
*/
protected $options;
/**
* Whether children could be added to the group.
*
* @var bool
*/
protected $allowAdd;
/**
* Whether children could be removed from the group.
*
* @var bool
*/
protected $allowDelete;
/**
* @var bool
*/
private $deleteEmpty;
public function __construct($type, array $options = array(), $allowAdd = false, $allowDelete = false, $deleteEmpty = false)
{
$this->type = $type;
$this->allowAdd = $allowAdd;
$this->allowDelete = $allowDelete;
$this->options = $options;
$this->deleteEmpty = $deleteEmpty;
}
public static function getSubscribedEvents()
{
return array(
FormEvents::PRE_SET_DATA => 'preSetData',
FormEvents::PRE_SUBMIT => 'preSubmit',
// (MergeCollectionListener, MergeDoctrineCollectionListener)
FormEvents::SUBMIT => array('onSubmit', 50),
);
}
public function preSetData(FormEvent $event)
{
$form = $event->getForm();
$data = $event->getData();
if (null === $data) {
$data = array();
}
if (!is_array($data) && !($data instanceof \Traversable && $data instanceof \ArrayAccess)) {
throw new UnexpectedTypeException($data, 'array or (\Traversable and \ArrayAccess)');
}
// First remove all rows
foreach ($form as $name => $child) {
$form->remove($name);
}
// Then add all rows again in the correct order
foreach ($data as $name => $value) {
$form->add($name, $this->type, array_replace(array(
'property_path' => '['.$name.']',
), $this->options));
}
}
public function preSubmit(FormEvent $event)
{
$form = $event->getForm();
$data = $event->getData();
if ($data instanceof \Traversable && $data instanceof \ArrayAccess) {
@trigger_error('Support for objects implementing both \Traversable and \ArrayAccess is deprecated since version 3.1 and will be removed in 4.0. Use an array instead.', E_USER_DEPRECATED);
}
if (!is_array($data) && !($data instanceof \Traversable && $data instanceof \ArrayAccess)) {
$data = array();
}
// Remove all empty rows
if ($this->allowDelete) {
foreach ($form as $name => $child) {
if (!isset($data[$name])) {
$form->remove($name);
}
}
}
// Add all additional rows
if ($this->allowAdd) {
foreach ($data as $name => $value) {
if (!$form->has($name)) {
$form->add($name, $this->type, array_replace(array(
'property_path' => '['.$name.']',
), $this->options));
}
}
}
}
public function onSubmit(FormEvent $event)
{
$form = $event->getForm();
$data = $event->getData();
// At this point, $data is an array or an array-like object that already contains the
// new entries, which were added by the data mapper. The data mapper ignores existing
// entries, so we need to manually unset removed entries in the collection.
if (null === $data) {
$data = array();
}
if (!is_array($data) && !($data instanceof \Traversable && $data instanceof \ArrayAccess)) {
throw new UnexpectedTypeException($data, 'array or (\Traversable and \ArrayAccess)');
}
if ($this->deleteEmpty) {
$previousData = $event->getForm()->getData();
foreach ($form as $name => $child) {
$isNew = !isset($previousData[$name]);
// $isNew can only be true if allowAdd is true, so we don't
// need to check allowAdd again
if ($child->isEmpty() && ($isNew || $this->allowDelete)) {
unset($data[$name]);
$form->remove($name);
}
}
}
// The data mapper only adds, but does not remove items, so do this
// here
if ($this->allowDelete) {
$toDelete = array();
foreach ($data as $name => $child) {
if (!$form->has($name)) {
$toDelete[] = $name;
}
}
foreach ($toDelete as $name) {
unset($data[$name]);
}
}
$event->setData($data);
}
}
| mit |
Dignifiedquire/babel | packages/babel/test/fixtures/transformation/es6.modules-amd/export-specifier-default/actual.js | 36 | var a = 1;
export { a as default };
| mit |
elitetestnik/Back-track | libraries/domit/xml_saxy_parser.php | 25299 | <?php
/**
* SAXY is a non-validating, but lightweight and fast SAX parser for PHP, modelled on the Expat parser
* @package saxy-xmlparser
* @subpackage saxy-xmlparser-main
* @version 1.0
* @copyright (C) 2004 John Heinstein. All rights reserved
* @license http://www.gnu.org/copyleft/lesser.html LGPL License
* @author John Heinstein <[email protected]>
* @link http://www.engageinteractive.com/saxy/ SAXY Home Page
* SAXY is Free Software
**/
if (!defined('SAXY_INCLUDE_PATH')) {
define('SAXY_INCLUDE_PATH', (dirname(__FILE__) . "/"));
}
/** current version of SAXY */
define ('SAXY_VERSION', '1.0');
/** default XML namespace */
define ('SAXY_XML_NAMESPACE', 'http://www.w3.org/xml/1998/namespace');
/** saxy parse state, before prolog is encountered */
define('SAXY_STATE_PROLOG_NONE', 0);
/** saxy parse state, in processing instruction */
define('SAXY_STATE_PROLOG_PROCESSINGINSTRUCTION', 1);
/** saxy parse state, an exclamation mark has been encountered */
define('SAXY_STATE_PROLOG_EXCLAMATION', 2);
/** saxy parse state, in DTD */
define('SAXY_STATE_PROLOG_DTD', 3);
/** saxy parse state, an inline DTD */
define('SAXY_STATE_PROLOG_INLINEDTD', 4);
/** saxy parse state, a comment */
define('SAXY_STATE_PROLOG_COMMENT', 5);
/** saxy parse state, processing main document */
define('SAXY_STATE_PARSING', 6);
/** saxy parse state, processing comment in main document */
define('SAXY_STATE_PARSING_COMMENT', 7);
//SAXY error codes; same as EXPAT error codes
/** no error */
define('SAXY_XML_ERROR_NONE', 0);
/** out of memory error */
define('SAXY_XML_ERROR_NO_MEMORY', 1);
/** syntax error */
define('SAXY_XML_ERROR_SYNTAX', 2);
/** no elements in document */
define('SAXY_XML_ERROR_NO_ELEMENTS', 3);
/** invalid token encountered error */
define('SAXY_XML_ERROR_INVALID_TOKEN', 4);
/** unclosed token error */
define('SAXY_XML_ERROR_UNCLOSED_TOKEN', 5);
/** partial character error */
define('SAXY_XML_ERROR_PARTIAL_CHAR', 6);
/** mismatched tag error */
define('SAXY_XML_ERROR_TAG_MISMATCH', 7);
/** duplicate attribute error */
define('SAXY_XML_ERROR_DUPLICATE_ATTRIBUTE', 8);
/** junk after document element error */
define('SAXY_XML_ERROR_JUNK_AFTER_DOC_ELEMENT', 9);
/** parameter enitity reference error */
define('SAXY_XML_ERROR_PARAM_ENTITY_REF', 10);
/** undefined entity error */
define('SAXY_XML_ERROR_UNDEFINED_ENTITY', 11);
/** recursive entity error */
define('SAXY_XML_ERROR_RECURSIVE_ENTITY_REF', 12);
/** asynchronous entity error */
define('SAXY_XML_ERROR_ASYNC_ENTITY', 13);
/** bad character reference error */
define('SAXY_XML_ERROR_BAD_CHAR_REF', 14);
/** binary entity reference error */
define('SAXY_XML_ERROR_BINARY_ENTITY_REF', 15);
/** attribute external entity error */
define('SAXY_XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF', 16);
/** misplaced processing instruction error */
define('SAXY_XML_ERROR_MISPLACED_XML_PI', 17);
/** unknown encoding error */
define('SAXY_XML_ERROR_UNKNOWN_ENCODING', 18);
/** incorrect encoding error */
define('SAXY_XML_ERROR_INCORRECT_ENCODING', 19);
/** unclosed CDATA Section error */
define('SAXY_XML_ERROR_UNCLOSED_CDATA_SECTION', 20);
/** external entity handling error */
define('SAXY_XML_ERROR_EXTERNAL_ENTITY_HANDLING', 21);
require_once(SAXY_INCLUDE_PATH . 'xml_saxy_shared.php');
/**
* The SAX Parser class
*
* @package saxy-xmlparser
* @subpackage saxy-xmlparser-main
* @author John Heinstein <[email protected]>
*/
class SAXY_Parser extends SAXY_Parser_Base {
/** @var int The current error number */
var $errorCode = SAXY_XML_ERROR_NONE;
/** @var Object A reference to the DocType event handler */
var $DTDHandler = null;
/** @var Object A reference to the Comment event handler */
var $commentHandler = null;
/** @var Object A reference to the Processing Instruction event handler */
var $processingInstructionHandler = null;
/** @var Object A reference to the Start Namespace Declaration event handler */
var $startNamespaceDeclarationHandler = null;
/** @var Object A reference to the End Namespace Declaration event handler */
var $endNamespaceDeclarationHandler = null;
/** @var boolean True if SAXY takes namespaces into consideration when parsing element tags */
var $isNamespaceAware = false;
/** @var array An indexed array containing associative arrays of namespace prefixes mapped to their namespace URIs */
var $namespaceMap = array();
/** @var array A stack used to determine when an end namespace event should be fired */
var $namespaceStack = array();
/** @var array A track used to track the uri of the current default namespace */
var $defaultNamespaceStack = array();
/** @var array A stack containing tag names of unclosed elements */
var $elementNameStack = array();
/**
* Constructor for SAX parser
*/
function SAXY_Parser() {
$this->SAXY_Parser_Base();
$this->state = SAXY_STATE_PROLOG_NONE;
} //SAXY_Parser
/**
* Sets a reference to the handler for the DocType event
* @param mixed A reference to the DocType handler
*/
function xml_set_doctype_handler($handler) {
$this->DTDHandler =& $handler;
} //xml_set_doctype_handler
/**
* Sets a reference to the handler for the Comment event
* @param mixed A reference to the Comment handler
*/
function xml_set_comment_handler($handler) {
$this->commentHandler =& $handler;
} //xml_set_comment_handler
/**
* Sets a reference to the handler for the Processing Instruction event
* @param mixed A reference to the Processing Instruction handler
*/
function xml_set_processing_instruction_handler($handler) {
$this->processingInstructionHandler =& $handler;
} //xml_set_processing_instruction_handler
/**
* Sets a reference to the handler for the Start Namespace Declaration event
* @param mixed A reference to the Start Namespace Declaration handler
*/
function xml_set_start_namespace_decl_handler($handler) {
$this->startNamespaceDeclarationHandler =& $handler;
} //xml_set_start_namespace_decl_handler
/**
* Sets a reference to the handler for the End Namespace Declaration event
* @param mixed A reference to the Start Namespace Declaration handler
*/
function xml_set_end_namespace_decl_handler($handler) {
$this->endNamespaceDeclarationHandler =& $handler;
} //xml_set_end_namespace_decl_handler
/**
* Specifies whether SAXY is namespace sensitive
* @param boolean True if SAXY is namespace aware
*/
function setNamespaceAwareness($isNamespaceAware) {
$this->isNamespaceAware =& $isNamespaceAware;
} //setNamespaceAwareness
/**
* Returns the current version of SAXY
* @return Object The current version of SAXY
*/
function getVersion() {
return SAXY_VERSION;
} //getVersion
/**
* Processes the xml prolog, doctype, and any other nodes that exist outside of the main xml document
* @param string The xml text to be processed
* @return string The preprocessed xml text
*/
function preprocessXML($xmlText) {
//strip prolog
$xmlText = trim($xmlText);
$startChar = -1;
$total = strlen($xmlText);
for ($i = 0; $i < $total; $i++) {
// $currentChar = $xmlText{$i};
$currentChar = substr($xmlText, $i, 1);
switch ($this->state) {
case SAXY_STATE_PROLOG_NONE:
if ($currentChar == '<') {
$nextChar = $xmlText{($i + 1)};
if ($nextChar == '?') {
$this->state = SAXY_STATE_PROLOG_PROCESSINGINSTRUCTION;
$this->charContainer = '';
}
else if ($nextChar == '!') {
$this->state = SAXY_STATE_PROLOG_EXCLAMATION;
$this->charContainer .= $currentChar;
break;
}
else {
$this->charContainer = '';
$startChar = $i;
$this->state = SAXY_STATE_PARSING;
return (substr($xmlText, $startChar));
}
}
break;
case SAXY_STATE_PROLOG_EXCLAMATION:
if ($currentChar == 'D') {
$this->state = SAXY_STATE_PROLOG_DTD;
$this->charContainer .= $currentChar;
}
else if ($currentChar == '-') {
$this->state = SAXY_STATE_PROLOG_COMMENT;
$this->charContainer = '';
}
else {
//will trap ! and add it
$this->charContainer .= $currentChar;
}
break;
case SAXY_STATE_PROLOG_PROCESSINGINSTRUCTION:
if ($currentChar == '>') {
$this->state = SAXY_STATE_PROLOG_NONE;
$this->parseProcessingInstruction($this->charContainer);
$this->charContainer = '';
}
else {
$this->charContainer .= $currentChar;
}
break;
case SAXY_STATE_PROLOG_COMMENT:
if ($currentChar == '>') {
$this->state = SAXY_STATE_PROLOG_NONE;
$this->parseComment($this->charContainer);
$this->charContainer = '';
}
else if ($currentChar == '-') {
if ((($xmlText{($i + 1)} == '-') && ($xmlText{($i + 2)} == '>')) ||
($xmlText{($i + 1)} == '>') ||
(($xmlText{($i - 1)} == '-') && ($xmlText{($i - 2)}== '!')) ){
//do nothing
}
else {
$this->charContainer .= $currentChar;
}
}
else {
$this->charContainer .= $currentChar;
}
break;
case SAXY_STATE_PROLOG_DTD:
if ($currentChar == '[') {
$this->charContainer .= $currentChar;
$this->state = SAXY_STATE_PROLOG_INLINEDTD;
}
else if ($currentChar == '>') {
$this->state = SAXY_STATE_PROLOG_NONE;
if ($this->DTDHandler != null) {
$this->fireDTDEvent($this->charContainer . $currentChar);
}
$this->charContainer = '';
}
else {
$this->charContainer .= $currentChar;
}
break;
case SAXY_STATE_PROLOG_INLINEDTD:
$previousChar = $xmlText{($i - 1)};
if (($currentChar == '>') && ($previousChar == ']')){
$this->state = SAXY_STATE_PROLOG_NONE;
if ($this->DTDHandler != null) {
$this->fireDTDEvent($this->charContainer . $currentChar);
}
$this->charContainer = '';
}
else {
$this->charContainer .= $currentChar;
}
break;
}
}
} //preprocessXML
/**
* The controlling method for the parsing process
* @param string The xml text to be processed
* @return boolean True if parsing is successful
*/
function parse ($xmlText) {
$xmlText = $this->preprocessXML($xmlText);
$total = strlen($xmlText);
for ($i = 0; $i < $total; $i++) {
// $currentChar = $xmlText{$i};
$currentChar = substr($xmlText, $i, 1);
switch ($this->state) {
case SAXY_STATE_PARSING:
switch ($currentChar) {
case '<':
if (substr($this->charContainer, 0, SAXY_CDATA_LEN) == SAXY_SEARCH_CDATA) {
$this->charContainer .= $currentChar;
}
else {
$this->parseBetweenTags($this->charContainer);
$this->charContainer = '';
}
break;
case '-':
if (($xmlText{($i - 1)} == '-') && ($xmlText{($i - 2)} == '!')
&& ($xmlText{($i - 3)} == '<')) {
$this->state = SAXY_STATE_PARSING_COMMENT;
$this->charContainer = '';
}
else {
$this->charContainer .= $currentChar;
}
break;
case '>':
if ((substr($this->charContainer, 0, SAXY_CDATA_LEN) == SAXY_SEARCH_CDATA) &&
!(($this->getCharFromEnd($this->charContainer, 0) == ']') &&
($this->getCharFromEnd($this->charContainer, 1) == ']'))) {
$this->charContainer .= $currentChar;
}
else {
$this->parseTag($this->charContainer);
$this->charContainer = '';
}
break;
default:
$this->charContainer .= $currentChar;
}
break;
case SAXY_STATE_PARSING_COMMENT:
switch ($currentChar) {
case '>':
if (($xmlText{($i - 1)} == '-') && ($xmlText{($i - 2)} == '-')) {
$this->fireCommentEvent(substr($this->charContainer, 0,
(strlen($this->charContainer) - 2)));
$this->charContainer = '';
$this->state = SAXY_STATE_PARSING;
}
else {
$this->charContainer .= $currentChar;
}
break;
default:
$this->charContainer .= $currentChar;
}
break;
}
}
return ($this->errorCode == 0);
} //parse
/**
* Parses an element tag
* @param string The interior text of the element tag
*/
function parseTag($tagText) {
$tagText = trim($tagText);
$firstChar = $tagText{0};
$myAttributes = array();
switch ($firstChar) {
case '/':
$tagName = substr($tagText, 1);
$this->_fireEndElementEvent($tagName);
break;
case '!':
$upperCaseTagText = strtoupper($tagText);
if (strpos($upperCaseTagText, SAXY_SEARCH_CDATA) !== false) { //CDATA Section
$total = strlen($tagText);
$openBraceCount = 0;
$textNodeText = '';
for ($i = 0; $i < $total; $i++) {
// $currentChar = $tagText{$i};
$currentChar = substr($tagText, $i, 1);
if (($currentChar == ']') && ($tagText{($i + 1)} == ']')) {
break;
}
else if ($openBraceCount > 1) {
$textNodeText .= $currentChar;
}
else if ($currentChar == '[') { //this won't be reached after the first open brace is found
$openBraceCount ++;
}
}
if ($this->cDataSectionHandler == null) {
$this->fireCharacterDataEvent($textNodeText);
}
else {
$this->fireCDataSectionEvent($textNodeText);
}
}
else if (strpos($upperCaseTagText, SAXY_SEARCH_NOTATION) !== false) { //NOTATION node, discard
return;
}
/*
else if (substr($tagText, 0, 2) == '!-') { //comment node
if ($this->commentHandler != null) {
$this->fireCommentEvent(substr($tagText, 3, (strlen($tagText) - 5)));
}
}
*/
break;
case '?':
//Processing Instruction node
$this->parseProcessingInstruction($tagText);
break;
default:
if ((strpos($tagText, '"') !== false) || (strpos($tagText, "'") !== false)) {
$total = strlen($tagText);
$tagName = '';
for ($i = 0; $i < $total; $i++) {
// $currentChar = $tagText{$i};
$currentChar = substr($tagText, $i, 1);
if (($currentChar == ' ') || ($currentChar == "\t") ||
($currentChar == "\n") || ($currentChar == "\r") ||
($currentChar == "\x0B")) {
$myAttributes = $this->parseAttributes(substr($tagText, $i));
break;
}
else {
$tagName .= $currentChar;
}
}
if (strrpos($tagText, '/') == (strlen($tagText) - 1)) { //check $tagText, but send $tagName
$this->_fireStartElementEvent($tagName, $myAttributes);
$this->_fireEndElementEvent($tagName);
}
else {
$this->_fireStartElementEvent($tagName, $myAttributes);
}
}
else {
if (strpos($tagText, '/') !== false) {
$tagText = trim(substr($tagText, 0, (strrchr($tagText, '/') - 1)));
$this->_fireStartElementEvent($tagText, $myAttributes);
$this->_fireEndElementEvent($tagText);
}
else {
$this->_fireStartElementEvent($tagText, $myAttributes);
}
}
}
} //parseTag
/**
* Fires a start element event and pushes the element name onto the elementName stack
* @param string The start element tag name
* @param Array The start element attributes
*/
function _fireStartElementEvent($tagName, &$myAttributes) {
$this->elementNameStack[] = $tagName;
if ($this->isNamespaceAware) {
$this->detectStartNamespaceDeclaration($myAttributes);
$tagName = $this->expandNamespacePrefix($tagName);
$this->expandAttributePrefixes($myAttributes);
}
$this->fireStartElementEvent($tagName, $myAttributes);
} //_fireStartElementEvent
/**
* Expands attribute prefixes to full namespace uri
* @param Array The start element attributes
*/
function expandAttributePrefixes(&$myAttributes) {
$arTransform = array();
foreach ($myAttributes as $key => $value) {
if (strpos($key, 'xmlns') === false) {
if (strpos($key, ':') !== false) {
$expandedTag = $this->expandNamespacePrefix($key);
$arTransform[$key] = $expandedTag;
}
}
}
foreach ($arTransform as $key => $value) {
$myAttributes[$value] = $myAttributes[$key];
unset($myAttributes[$key]);
}
} //expandAttributePrefixes
/**
* Expands the namespace prefix (if one exists) to the full namespace uri
* @param string The tagName with the namespace prefix
* @return string The tagName, with the prefix expanded to the namespace uri
*/
function expandNamespacePrefix($tagName) {
$stackLen = count($this->defaultNamespaceStack);
$defaultNamespace = $this->defaultNamespaceStack[($stackLen - 1)];
$colonIndex = strpos($tagName, ':');
if ($colonIndex !== false) {
$prefix = substr($tagName, 0, $colonIndex);
if ($prefix != 'xml') {
$tagName = $this->getNamespaceURI($prefix) . substr($tagName, $colonIndex);
}
else {
$tagName = SAXY_XML_NAMESPACE . substr($tagName, $colonIndex);
}
}
else if ($defaultNamespace != '') {
$tagName = $defaultNamespace . ':' . $tagName;
}
return $tagName;
} //expandNamespacePrefix
/**
* Searches the namespaceMap for the specified prefix, and returns the full namespace URI
* @param string The namespace prefix
* @return string The namespace uri
*/
function getNamespaceURI($prefix) {
$total = count($this->namespaceMap);
$uri = $prefix; //in case uri can't be found, just send back prefix
//should really generate an error, but worry about this later
//reset($this->namespaceMap);
for ($i = ($total - 1); $i >= 0; $i--) {
$currMap =& $this->namespaceMap[$i];
if (isset($currMap[$prefix])) {
$uri = $currMap[$prefix];
break;
}
}
return $uri;
} //getNamespaceURI
/**
* Searches the attributes array for an xmlns declaration and fires an event if found
* @param Array The start element attributes
*/
function detectStartNamespaceDeclaration(&$myAttributes) {
$namespaceExists = false;
$namespaceMapUpper = 0;
$userDefinedDefaultNamespace = false;
$total = count($myAttributes);
foreach ($myAttributes as $key => $value) {
if (strpos($key, 'xmlns') !== false) {
//add an array to store all namespaces for the current element
if (!$namespaceExists) {
$this->namespaceMap[] = array();
$namespaceMapUpper = count($this->namespaceMap) - 1;
}
//check for default namespace override, i.e. xmlns='...'
if (strpos($key, ':') !== false) {
$prefix = $namespaceMapKey = substr($key, 6);
$this->namespaceMap[$namespaceMapUpper][$namespaceMapKey] = $value;
}
else {
$prefix = '';
$userDefinedDefaultNamespace = true;
//if default namespace '', store in map using key ':'
$this->namespaceMap[$namespaceMapUpper][':'] = $value;
$this->defaultNamespaceStack[] = $value;
}
$this->fireStartNamespaceDeclarationEvent($prefix, $value);
$namespaceExists = true;
unset($myAttributes[$key]);
}
}
//store the default namespace (inherited from the parent elements so grab last one)
if (!$userDefinedDefaultNamespace) {
$stackLen = count($this->defaultNamespaceStack);
if ($stackLen == 0) {
$this->defaultNamespaceStack[] = '';
}
else {
$this->defaultNamespaceStack[] =
$this->defaultNamespaceStack[($stackLen - 1)];
}
}
$this->namespaceStack[] = $namespaceExists;
} //detectStartNamespaceDeclaration
/**
* Fires an end element event and pops the element name from the elementName stack
* @param string The end element tag name
*/
function _fireEndElementEvent($tagName) {
$lastTagName = array_pop($this->elementNameStack);
//check for mismatched tag error
if ($lastTagName != $tagName) {
$this->errorCode = SAXY_XML_ERROR_TAG_MISMATCH;
}
if ($this->isNamespaceAware) {
$tagName = $this->expandNamespacePrefix($tagName);
$this->fireEndElementEvent($tagName);
$this->detectEndNamespaceDeclaration();
$defaultNamespace = array_pop($this->defaultNamespaceStack);
}
else {
$this->fireEndElementEvent($tagName);
}
} //_fireEndElementEvent
/**
* Determines whether an end namespace declaration event should be fired
*/
function detectEndNamespaceDeclaration() {
$isNamespaceEnded = array_pop($this->namespaceStack);
if ($isNamespaceEnded) {
$map = array_pop($this->namespaceMap);
foreach ($map as $key => $value) {
if ($key == ':') {
$key = '';
}
$this->fireEndNamespaceDeclarationEvent($key);
}
}
} //detectEndNamespaceDeclaration
/**
* Parses a processing instruction
* @param string The interior text of the processing instruction
*/
function parseProcessingInstruction($data) {
$endTarget = 0;
$total = strlen($data);
for ($x = 2; $x < $total; $x++) {
// if (trim($data{$x}) == '') {
if (trim(substr($data, $x, 1)) == '') {
$endTarget = $x;
break;
}
}
$target = substr($data, 1, ($endTarget - 1));
$data = substr($data, ($endTarget + 1), ($total - $endTarget - 2));
if ($this->processingInstructionHandler != null) {
$this->fireProcessingInstructionEvent($target, $data);
}
} //parseProcessingInstruction
/**
* Parses a comment
* @param string The interior text of the comment
*/
function parseComment($data) {
if ($this->commentHandler != null) {
$this->fireCommentEvent($data);
}
} //parseComment
/**
* Fires a doctype event
* @param string The doctype data
*/
function fireDTDEvent($data) {
call_user_func($this->DTDHandler, $this, $data);
} //fireDTDEvent
/**
* Fires a comment event
* @param string The text of the comment
*/
function fireCommentEvent($data) {
call_user_func($this->commentHandler, $this, $data);
} //fireCommentEvent
/**
* Fires a processing instruction event
* @param string The processing instruction data
*/
function fireProcessingInstructionEvent($target, $data) {
call_user_func($this->processingInstructionHandler, $this, $target, $data);
} //fireProcessingInstructionEvent
/**
* Fires a start namespace declaration event
* @param string The namespace prefix
* @param string The namespace uri
*/
function fireStartNamespaceDeclarationEvent($prefix, $uri) {
call_user_func($this->startNamespaceDeclarationHandler, $this, $prefix, $uri);
} //fireStartNamespaceDeclarationEvent
/**
* Fires an end namespace declaration event
* @param string The namespace prefix
*/
function fireEndNamespaceDeclarationEvent($prefix) {
call_user_func($this->endNamespaceDeclarationHandler, $this, $prefix);
} //fireEndNamespaceDeclarationEvent
/**
* Returns the current error code
* @return int The current error code
*/
function xml_get_error_code() {
return $this->errorCode;
} //xml_get_error_code
/**
* Returns a textual description of the error code
* @param int The error code
* @return string The error message
*/
function xml_error_string($code) {
switch ($code) {
case SAXY_XML_ERROR_NONE:
return "No error";
break;
case SAXY_XML_ERROR_NO_MEMORY:
return "Out of memory";
break;
case SAXY_XML_ERROR_SYNTAX:
return "Syntax error";
break;
case SAXY_XML_ERROR_NO_ELEMENTS:
return "No elements in document";
break;
case SAXY_XML_ERROR_INVALID_TOKEN:
return "Invalid token";
break;
case SAXY_XML_ERROR_UNCLOSED_TOKEN:
return "Unclosed token";
break;
case SAXY_XML_ERROR_PARTIAL_CHAR:
return "Partial character";
break;
case SAXY_XML_ERROR_TAG_MISMATCH:
return "Tag mismatch";
break;
case SAXY_XML_ERROR_DUPLICATE_ATTRIBUTE:
return "Duplicate attribute";
break;
case SAXY_XML_ERROR_JUNK_AFTER_DOC_ELEMENT:
return "Junk encountered after document element";
break;
case SAXY_XML_ERROR_PARAM_ENTITY_REF:
return "Parameter entity reference error";
break;
case SAXY_XML_ERROR_UNDEFINED_ENTITY:
return "Undefined entity";
break;
case SAXY_XML_ERROR_RECURSIVE_ENTITY_REF:
return "Recursive entity reference";
break;
case SAXY_XML_ERROR_ASYNC_ENTITY:
return "Asynchronous internal entity found in external entity";
break;
case SAXY_XML_ERROR_BAD_CHAR_REF:
return "Bad character reference";
break;
case SAXY_XML_ERROR_BINARY_ENTITY_REF:
return "Binary entity reference";
break;
case SAXY_XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF:
return "Attribute external entity reference";
break;
case SAXY_XML_ERROR_MISPLACED_XML_PI:
return "Misplaced processing instruction";
break;
case SAXY_XML_ERROR_UNKNOWN_ENCODING:
return "Unknown encoding";
break;
case SAXY_XML_ERROR_INCORRECT_ENCODING:
return "Incorrect encoding";
break;
case SAXY_XML_ERROR_UNCLOSED_CDATA_SECTION:
return "Unclosed CDATA Section";
break;
case SAXY_XML_ERROR_EXTERNAL_ENTITY_HANDLING:
return "Problem in external entity handling";
break;
default:
return "No definition for error code " . $code;
break;
}
} //xml_error_string
} //SAXY_Parser
?>
| apache-2.0 |
pierreg/tensorflow | tensorflow/core/framework/op_segment.cc | 2984 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/framework/op_segment.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/gtl/map_util.h"
#include "tensorflow/core/platform/logging.h"
#include "tensorflow/core/platform/mutex.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
OpSegment::Item::~Item() {
for (auto kv : name_kernel) delete kv.second;
}
OpSegment::OpSegment() {}
OpSegment::~OpSegment() {
for (auto kv : sessions_) delete kv.second;
}
Status OpSegment::FindOrCreate(const string& session_handle,
const string& node_name, OpKernel** kernel,
CreateKernelFn create_fn) {
{
mutex_lock l(mu_);
auto item = gtl::FindPtrOrNull(sessions_, session_handle);
if (item == nullptr) {
return errors::NotFound("Session ", session_handle, " is not found.");
}
*kernel = gtl::FindPtrOrNull(item->name_kernel, node_name);
if (*kernel != nullptr) {
return Status::OK();
}
}
Status s = create_fn(kernel);
if (!s.ok()) {
LOG(ERROR) << "Create kernel failed: " << s;
return s;
}
{
mutex_lock l(mu_);
auto item = gtl::FindPtrOrNull(sessions_, session_handle);
if (item == nullptr) {
return errors::NotFound("Session ", session_handle, " is not found.");
}
OpKernel** p_kernel = &(item->name_kernel[node_name]);
if (*p_kernel == nullptr) {
*p_kernel = *kernel; // Inserts 'kernel' in the map.
} else {
delete *kernel;
*kernel = *p_kernel;
}
}
return Status::OK();
}
void OpSegment::AddHold(const string& session_handle) {
mutex_lock l(mu_);
Item** item = &sessions_[session_handle];
if (*item == nullptr) {
*item = new Item; // num_holds == 1
} else {
++((*item)->num_holds);
}
}
void OpSegment::RemoveHold(const string& session_handle) {
Item* item = nullptr;
{
mutex_lock l(mu_);
auto siter = sessions_.find(session_handle);
if (siter == sessions_.end()) {
VLOG(1) << "Session " << session_handle << " is not found.";
return;
}
item = siter->second;
if (--(item->num_holds) > 0) {
return;
} else {
sessions_.erase(siter);
}
}
delete item;
}
} // end namespace tensorflow
| apache-2.0 |
Samurais/kubernetes | pkg/apiserver/errors.go | 2585 | /*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apiserver
import (
"fmt"
"net/http"
"k8s.io/kubernetes/pkg/api"
etcdstorage "k8s.io/kubernetes/pkg/storage/etcd"
"k8s.io/kubernetes/pkg/util"
)
// statusError is an object that can be converted into an api.Status
type statusError interface {
Status() api.Status
}
// errToAPIStatus converts an error to an api.Status object.
func errToAPIStatus(err error) *api.Status {
switch t := err.(type) {
case statusError:
status := t.Status()
if len(status.Status) == 0 {
status.Status = api.StatusFailure
}
if status.Code == 0 {
switch status.Status {
case api.StatusSuccess:
status.Code = http.StatusOK
case api.StatusFailure:
status.Code = http.StatusInternalServerError
}
}
//TODO: check for invalid responses
return &status
default:
status := http.StatusInternalServerError
switch {
//TODO: replace me with NewConflictErr
case etcdstorage.IsEtcdTestFailed(err):
status = http.StatusConflict
}
// Log errors that were not converted to an error status
// by REST storage - these typically indicate programmer
// error by not using pkg/api/errors, or unexpected failure
// cases.
util.HandleError(fmt.Errorf("apiserver received an error that is not an api.Status: %v", err))
return &api.Status{
Status: api.StatusFailure,
Code: status,
Reason: api.StatusReasonUnknown,
Message: err.Error(),
}
}
}
// notFound renders a simple not found error.
func notFound(w http.ResponseWriter, req *http.Request) {
w.WriteHeader(http.StatusNotFound)
fmt.Fprintf(w, "Not Found: %#v", req.RequestURI)
}
// badGatewayError renders a simple bad gateway error.
func badGatewayError(w http.ResponseWriter, req *http.Request) {
w.WriteHeader(http.StatusBadGateway)
fmt.Fprintf(w, "Bad Gateway: %#v", req.RequestURI)
}
// forbidden renders a simple forbidden error
func forbidden(w http.ResponseWriter, req *http.Request) {
w.WriteHeader(http.StatusForbidden)
fmt.Fprintf(w, "Forbidden: %#v", req.RequestURI)
}
| apache-2.0 |
minhv1993/hatcai | concrete/vendor/doctrine/orm/lib/Doctrine/ORM/Persisters/OneToManyPersister.php | 8753 | <?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the MIT license. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace Doctrine\ORM\Persisters;
use Doctrine\ORM\PersistentCollection;
use Doctrine\ORM\UnitOfWork;
/**
* Persister for one-to-many collections.
*
* @author Roman Borschel <[email protected]>
* @author Guilherme Blanco <[email protected]>
* @author Alexander <[email protected]>
* @since 2.0
*/
class OneToManyPersister extends AbstractCollectionPersister
{
/**
* {@inheritdoc}
*
* @override
*/
public function get(PersistentCollection $coll, $index)
{
$mapping = $coll->getMapping();
$uow = $this->em->getUnitOfWork();
$persister = $uow->getEntityPersister($mapping['targetEntity']);
if (!isset($mapping['indexBy'])) {
throw new \BadMethodCallException("Selecting a collection by index is only supported on indexed collections.");
}
return $persister->load(array($mapping['mappedBy'] => $coll->getOwner(), $mapping['indexBy'] => $index), null, null, array(), 0, 1);
}
/**
* Generates the SQL UPDATE that updates a particular row's foreign
* key to null.
*
* @param \Doctrine\ORM\PersistentCollection $coll
*
* @return string
*
* @override
*/
protected function getDeleteRowSQL(PersistentCollection $coll)
{
$mapping = $coll->getMapping();
$class = $this->em->getClassMetadata($mapping['targetEntity']);
$tableName = $this->quoteStrategy->getTableName($class, $this->platform);
$idColumns = $class->getIdentifierColumnNames();
return 'DELETE FROM ' . $tableName
. ' WHERE ' . implode('= ? AND ', $idColumns) . ' = ?';
}
/**
* {@inheritdoc}
*/
protected function getDeleteRowSQLParameters(PersistentCollection $coll, $element)
{
return array_values($this->uow->getEntityIdentifier($element));
}
/**
* {@inheritdoc}
*
* @throws \BadMethodCallException Not used for OneToManyPersister.
*/
protected function getInsertRowSQL(PersistentCollection $coll)
{
throw new \BadMethodCallException("Insert Row SQL is not used for OneToManyPersister");
}
/**
* {@inheritdoc}
*
* @throws \BadMethodCallException Not used for OneToManyPersister.
*/
protected function getInsertRowSQLParameters(PersistentCollection $coll, $element)
{
throw new \BadMethodCallException("Insert Row SQL is not used for OneToManyPersister");
}
/**
* {@inheritdoc}
*
* @throws \BadMethodCallException Not used for OneToManyPersister.
*/
protected function getUpdateRowSQL(PersistentCollection $coll)
{
throw new \BadMethodCallException("Update Row SQL is not used for OneToManyPersister");
}
/**
* {@inheritdoc}
*
* @throws \BadMethodCallException Not used for OneToManyPersister.
*/
protected function getDeleteSQL(PersistentCollection $coll)
{
throw new \BadMethodCallException("Update Row SQL is not used for OneToManyPersister");
}
/**
* {@inheritdoc}
*
* @throws \BadMethodCallException Not used for OneToManyPersister.
*/
protected function getDeleteSQLParameters(PersistentCollection $coll)
{
throw new \BadMethodCallException("Update Row SQL is not used for OneToManyPersister");
}
/**
* {@inheritdoc}
*/
public function count(PersistentCollection $coll)
{
$mapping = $coll->getMapping();
$targetClass = $this->em->getClassMetadata($mapping['targetEntity']);
$sourceClass = $this->em->getClassMetadata($mapping['sourceEntity']);
$id = $this->em->getUnitOfWork()->getEntityIdentifier($coll->getOwner());
$whereClauses = array();
$params = array();
$joinColumns = $targetClass->associationMappings[$mapping['mappedBy']]['joinColumns'];
foreach ($joinColumns as $joinColumn) {
$whereClauses[] = $joinColumn['name'] . ' = ?';
$params[] = ($targetClass->containsForeignIdentifier)
? $id[$sourceClass->getFieldForColumn($joinColumn['referencedColumnName'])]
: $id[$sourceClass->fieldNames[$joinColumn['referencedColumnName']]];
}
$filterTargetClass = $this->em->getClassMetadata($targetClass->rootEntityName);
foreach ($this->em->getFilters()->getEnabledFilters() as $filter) {
if ($filterExpr = $filter->addFilterConstraint($filterTargetClass, 't')) {
$whereClauses[] = '(' . $filterExpr . ')';
}
}
$sql = 'SELECT count(*)'
. ' FROM ' . $this->quoteStrategy->getTableName($targetClass, $this->platform) . ' t'
. ' WHERE ' . implode(' AND ', $whereClauses);
return $this->conn->fetchColumn($sql, $params);
}
/**
* @param \Doctrine\ORM\PersistentCollection $coll
* @param int $offset
* @param int|null $length
*
* @return \Doctrine\Common\Collections\ArrayCollection
*/
public function slice(PersistentCollection $coll, $offset, $length = null)
{
$mapping = $coll->getMapping();
$uow = $this->em->getUnitOfWork();
$persister = $uow->getEntityPersister($mapping['targetEntity']);
return $persister->getOneToManyCollection($mapping, $coll->getOwner(), $offset, $length);
}
/**
* @param \Doctrine\ORM\PersistentCollection $coll
* @param object $element
*
* @return boolean
*/
public function contains(PersistentCollection $coll, $element)
{
$mapping = $coll->getMapping();
$uow = $this->em->getUnitOfWork();
// shortcut for new entities
$entityState = $uow->getEntityState($element, UnitOfWork::STATE_NEW);
if ($entityState === UnitOfWork::STATE_NEW) {
return false;
}
// Entity is scheduled for inclusion
if ($entityState === UnitOfWork::STATE_MANAGED && $uow->isScheduledForInsert($element)) {
return false;
}
$persister = $uow->getEntityPersister($mapping['targetEntity']);
// only works with single id identifier entities. Will throw an
// exception in Entity Persisters if that is not the case for the
// 'mappedBy' field.
$id = current( $uow->getEntityIdentifier($coll->getOwner()));
return $persister->exists($element, array($mapping['mappedBy'] => $id));
}
/**
* @param \Doctrine\ORM\PersistentCollection $coll
* @param object $element
*
* @return boolean
*/
public function removeElement(PersistentCollection $coll, $element)
{
$mapping = $coll->getMapping();
if ( ! $mapping['orphanRemoval']) {
// no-op: this is not the owning side, therefore no operations should be applied
return false;
}
$uow = $this->em->getUnitOfWork();
// shortcut for new entities
$entityState = $uow->getEntityState($element, UnitOfWork::STATE_NEW);
if ($entityState === UnitOfWork::STATE_NEW) {
return false;
}
// If Entity is scheduled for inclusion, it is not in this collection.
// We can assure that because it would have return true before on array check
if ($entityState === UnitOfWork::STATE_MANAGED && $uow->isScheduledForInsert($element)) {
return false;
}
$this
->uow
->getEntityPersister($mapping['targetEntity'])
->delete($element);
return true;
}
}
| mit |
Bucklash/turnerdesign-master | components/com_content/views/categories/tmpl/default_items.php | 2551 | <?php
/**
* @package Joomla.Site
* @subpackage com_content
*
* @copyright Copyright (C) 2005 - 2015 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('_JEXEC') or die;
$class = ' class="first"';
JHtml::_('bootstrap.tooltip');
$lang = JFactory::getLanguage();
if (count($this->items[$this->parent->id]) > 0 && $this->maxLevelcat != 0) :
?>
<?php foreach($this->items[$this->parent->id] as $id => $item) : ?>
<?php
if ($this->params->get('show_empty_categories_cat') || $item->numitems || count($item->getChildren())) :
if (!isset($this->items[$this->parent->id][$id + 1]))
{
$class = ' class="last"';
}
?>
<div <?php echo $class; ?> >
<?php $class = ''; ?>
<h3 class="page-header item-title">
<a href="<?php echo JRoute::_(ContentHelperRoute::getCategoryRoute($item->id));?>">
<?php echo $this->escape($item->title); ?></a>
<?php if ($this->params->get('show_cat_num_articles_cat') == 1) :?>
<span class="badge badge-info tip hasTooltip" title="<?php echo JHtml::tooltipText('COM_CONTENT_NUM_ITEMS'); ?>">
<?php echo $item->numitems; ?>
</span>
<?php endif; ?>
<?php if (count($item->getChildren()) > 0 && $this->maxLevelcat > 1) : ?>
<a id="category-btn-<?php echo $item->id;?>" href="#category-<?php echo $item->id;?>"
data-toggle="collapse" data-toggle="button" class="btn btn-mini pull-right"><span class="icon-plus"></span></a>
<?php endif;?>
</h3>
<?php if ($this->params->get('show_description_image') && $item->getParams()->get('image')) : ?>
<img src="<?php echo $item->getParams()->get('image'); ?>" alt="<?php echo htmlspecialchars($item->getParams()->get('image_alt')); ?>" />
<?php endif; ?>
<?php if ($this->params->get('show_subcat_desc_cat') == 1) :?>
<?php if ($item->description) : ?>
<div class="category-desc">
<?php echo JHtml::_('content.prepare', $item->description, '', 'com_content.categories'); ?>
</div>
<?php endif; ?>
<?php endif; ?>
<?php if (count($item->getChildren()) > 0 && $this->maxLevelcat > 1) :?>
<div class="collapse fade" id="category-<?php echo $item->id;?>">
<?php
$this->items[$item->id] = $item->getChildren();
$this->parent = $item;
$this->maxLevelcat--;
echo $this->loadTemplate('items');
$this->parent = $item->getParent();
$this->maxLevelcat++;
?>
</div>
<?php endif; ?>
</div>
<?php endif; ?>
<?php endforeach; ?>
<?php endif; ?>
| gpl-2.0 |
dcurado/mycenae | vendor/golang.org/x/text/internal/number/itoa.go | 2260 | // Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// TODO: use build tags once a low-level public API has been established in
// package strconv.
package number
const (
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
)
var shifts = [len(digits) + 1]uint{
1 << 1: 1,
1 << 2: 2,
1 << 3: 3,
1 << 4: 4,
1 << 5: 5,
}
// formatBits computes the string representation of u in the given base.
// If neg is set, u is treated as negative int64 value. If append_ is
// set, the string is appended to dst and the resulting byte slice is
// returned as the first result value; otherwise the string is returned
// as the second result value.
//
func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s string) {
if base < 2 || base > len(digits) {
panic("strconv: illegal AppendInt/FormatInt base")
}
// 2 <= base && base <= len(digits)
var a [64 + 1]byte // +1 for sign of 64bit value in base 2
i := len(a)
if neg {
u = -u
}
// convert bits
if base == 10 {
// common case: use constants for / because
// the compiler can optimize it into a multiply+shift
if ^uintptr(0)>>32 == 0 {
for u > uint64(^uintptr(0)) {
q := u / 1e9
us := uintptr(u - q*1e9) // us % 1e9 fits into a uintptr
for j := 9; j > 0; j-- {
i--
qs := us / 10
a[i] = byte(us - qs*10 + '0')
us = qs
}
u = q
}
}
// u guaranteed to fit into a uintptr
us := uintptr(u)
for us >= 10 {
i--
q := us / 10
a[i] = byte(us - q*10 + '0')
us = q
}
// u < 10
i--
a[i] = byte(us + '0')
} else if s := shifts[base]; s > 0 {
// base is power of 2: use shifts and masks instead of / and %
b := uint64(base)
m := uintptr(b) - 1 // == 1<<s - 1
for u >= b {
i--
a[i] = digits[uintptr(u)&m]
u >>= s
}
// u < base
i--
a[i] = digits[uintptr(u)]
} else {
// general case
b := uint64(base)
for u >= b {
i--
q := u / b
a[i] = digits[uintptr(u-q*b)]
u = q
}
// u < base
i--
a[i] = digits[uintptr(u)]
}
// add sign, if any
if neg {
i--
a[i] = '-'
}
if append_ {
d = append(dst, a[i:]...)
return
}
s = string(a[i:])
return
}
| gpl-3.0 |
ento/homebrew | Library/Formula/ccze.rb | 1656 | class Ccze < Formula
desc "Robust and modular log colorizer"
homepage "https://packages.debian.org/wheezy/ccze"
url "https://mirrors.ocf.berkeley.edu/debian/pool/main/c/ccze/ccze_0.2.1.orig.tar.gz"
mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/c/ccze/ccze_0.2.1.orig.tar.gz"
sha256 "8263a11183fd356a033b6572958d5a6bb56bfd2dba801ed0bff276cfae528aa3"
bottle do
sha256 "7eb127c4017e7530a53e3258f6b013e80fca1a0d30c577813bdc326b8b0e30d3" => :el_capitan
sha256 "3bf7f9c6ab3410d73348d4f0518f4778ca2e832904f992004bd3a438d2fcd036" => :yosemite
sha256 "8714d3dbc5bc165b505180b9833fbcdda609e978c6c821ac7a503cd4226619aa" => :mavericks
end
depends_on "pcre"
# Taken from debian
patch :DATA
def install
system "./configure", "--prefix=#{prefix}",
"--with-builtins=all"
system "make", "install"
# Strange but true: using --mandir above causes the build to fail!
share.install prefix/"man"
end
test do
system "#{bin}/ccze", "--help"
end
end
__END__
diff --git a/src/Makefile.in b/src/Makefile.in
index c6f9892..9b93b65 100644
--- a/src/Makefile.in
+++ b/src/Makefile.in
@@ -22,7 +22,7 @@ WFLAGS_GCC = -Wshadow -Wpointer-arith -Waggregate-return \
-Wbad-function-cast -Wsign-compare -Wchar-subscripts \
-Wcomment -Wformat -Wformat-nonliteral -Wformat-security \
-Wimplicit -Wmain -Wmissing-braces -Wparentheses \
- -Wreturn-type -Wswitch -Wmulticharacter \
+ -Wreturn-type -Wswitch \
-Wmissing-noreturn -Wmissing-declarations @WFLAGS_3X@
WFLAGS_ICC = -Wall -wd193,279,810,869,1418,1419
WFLAGS_3X = -Wsequence-point -Wdiv-by-zero -W -Wunused \
| bsd-2-clause |
asianventure1/comnews2 | administrator/components/com_media/layouts/toolbar/deletemedia.php | 493 | <?php
/**
* @package Joomla.Administrator
* @subpackage com_media
*
* @copyright Copyright (C) 2005 - 2015 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('_JEXEC') or die;
$title = JText::_('JTOOLBAR_DELETE');
?>
<button onclick="MediaManager.submit('folder.delete');" class="btn btn-small">
<span class="icon-remove" title="<?php echo $title; ?>"></span> <?php echo $title; ?>
</button>
| gpl-2.0 |
yoanngern/newsroom14 | vendor/doctrine/dbal/lib/Doctrine/DBAL/SQLParserUtils.php | 8598 | <?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the MIT license. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace Doctrine\DBAL;
/**
* Utility class that parses sql statements with regard to types and parameters.
*
* @link www.doctrine-project.org
* @since 2.0
* @author Benjamin Eberlei <[email protected]>
*/
class SQLParserUtils
{
const POSITIONAL_TOKEN = '\?';
const NAMED_TOKEN = '(?<!:):[a-zA-Z_][a-zA-Z0-9_]*';
// Quote characters within string literals can be preceded by a backslash.
const ESCAPED_SINGLE_QUOTED_TEXT = "'(?:[^'\\\\]|\\\\'?)*'";
const ESCAPED_DOUBLE_QUOTED_TEXT = '"(?:[^"\\\\]|\\\\"?)*"';
const ESCAPED_BACKTICK_QUOTED_TEXT = '`(?:[^`\\\\]|\\\\`?)*`';
/**
* Gets an array of the placeholders in an sql statements as keys and their positions in the query string.
*
* Returns an integer => integer pair (indexed from zero) for a positional statement
* and a string => int[] pair for a named statement.
*
* @param string $statement
* @param boolean $isPositional
*
* @return array
*/
static public function getPlaceholderPositions($statement, $isPositional = true)
{
$match = ($isPositional) ? '?' : ':';
if (strpos($statement, $match) === false) {
return array();
}
$token = ($isPositional) ? self::POSITIONAL_TOKEN : self::NAMED_TOKEN;
$paramMap = array();
foreach (self::getUnquotedStatementFragments($statement) as $fragment) {
preg_match_all("/$token/", $fragment[0], $matches, PREG_OFFSET_CAPTURE);
foreach ($matches[0] as $placeholder) {
if ($isPositional) {
$paramMap[] = $placeholder[1] + $fragment[1];
} else {
$pos = $placeholder[1] + $fragment[1];
$paramMap[$pos] = substr($placeholder[0], 1, strlen($placeholder[0]));
}
}
}
return $paramMap;
}
/**
* For a positional query this method can rewrite the sql statement with regard to array parameters.
*
* @param string $query The SQL query to execute.
* @param array $params The parameters to bind to the query.
* @param array $types The types the previous parameters are in.
*
* @return array
*
* @throws SQLParserUtilsException
*/
static public function expandListParameters($query, $params, $types)
{
$isPositional = is_int(key($params));
$arrayPositions = array();
$bindIndex = -1;
foreach ($types as $name => $type) {
++$bindIndex;
if ($type !== Connection::PARAM_INT_ARRAY && $type !== Connection::PARAM_STR_ARRAY) {
continue;
}
if ($isPositional) {
$name = $bindIndex;
}
$arrayPositions[$name] = false;
}
if (( ! $arrayPositions && $isPositional)) {
return array($query, $params, $types);
}
$paramPos = self::getPlaceholderPositions($query, $isPositional);
if ($isPositional) {
$paramOffset = 0;
$queryOffset = 0;
foreach ($paramPos as $needle => $needlePos) {
if ( ! isset($arrayPositions[$needle])) {
continue;
}
$needle += $paramOffset;
$needlePos += $queryOffset;
$count = count($params[$needle]);
$params = array_merge(
array_slice($params, 0, $needle),
$params[$needle],
array_slice($params, $needle + 1)
);
$types = array_merge(
array_slice($types, 0, $needle),
$count ?
array_fill(0, $count, $types[$needle] - Connection::ARRAY_PARAM_OFFSET) : // array needles are at PDO::PARAM_* + 100
array(),
array_slice($types, $needle + 1)
);
$expandStr = $count ? implode(", ", array_fill(0, $count, "?")) : 'NULL';
$query = substr($query, 0, $needlePos) . $expandStr . substr($query, $needlePos + 1);
$paramOffset += ($count - 1); // Grows larger by number of parameters minus the replaced needle.
$queryOffset += (strlen($expandStr) - 1);
}
return array($query, $params, $types);
}
$queryOffset = 0;
$typesOrd = array();
$paramsOrd = array();
foreach ($paramPos as $pos => $paramName) {
$paramLen = strlen($paramName) + 1;
$value = static::extractParam($paramName, $params, true);
if ( ! isset($arrayPositions[$paramName]) && ! isset($arrayPositions[':' . $paramName])) {
$pos += $queryOffset;
$queryOffset -= ($paramLen - 1);
$paramsOrd[] = $value;
$typesOrd[] = static::extractParam($paramName, $types, false, \PDO::PARAM_STR);
$query = substr($query, 0, $pos) . '?' . substr($query, ($pos + $paramLen));
continue;
}
$count = count($value);
$expandStr = $count > 0 ? implode(', ', array_fill(0, $count, '?')) : 'NULL';
foreach ($value as $val) {
$paramsOrd[] = $val;
$typesOrd[] = static::extractParam($paramName, $types, false) - Connection::ARRAY_PARAM_OFFSET;
}
$pos += $queryOffset;
$queryOffset += (strlen($expandStr) - $paramLen);
$query = substr($query, 0, $pos) . $expandStr . substr($query, ($pos + $paramLen));
}
return array($query, $paramsOrd, $typesOrd);
}
/**
* Slice the SQL statement around pairs of quotes and
* return string fragments of SQL outside of quoted literals.
* Each fragment is captured as a 2-element array:
*
* 0 => matched fragment string,
* 1 => offset of fragment in $statement
*
* @param string $statement
* @return array
*/
static private function getUnquotedStatementFragments($statement)
{
$literal = self::ESCAPED_SINGLE_QUOTED_TEXT . '|' .
self::ESCAPED_DOUBLE_QUOTED_TEXT . '|' .
self::ESCAPED_BACKTICK_QUOTED_TEXT;
preg_match_all("/([^'\"`]+)(?:$literal)?/s", $statement, $fragments, PREG_OFFSET_CAPTURE);
return $fragments[1];
}
/**
* @param string $paramName The name of the parameter (without a colon in front)
* @param array $paramsOrTypes A hash of parameters or types
* @param bool $isParam
* @param mixed $defaultValue An optional default value. If omitted, an exception is thrown
*
* @throws SQLParserUtilsException
* @return mixed
*/
static private function extractParam($paramName, $paramsOrTypes, $isParam, $defaultValue = null)
{
if (array_key_exists($paramName, $paramsOrTypes)) {
return $paramsOrTypes[$paramName];
}
// Hash keys can be prefixed with a colon for compatibility
if (array_key_exists(':' . $paramName, $paramsOrTypes)) {
return $paramsOrTypes[':' . $paramName];
}
if (null !== $defaultValue) {
return $defaultValue;
}
if ($isParam) {
throw SQLParserUtilsException::missingParam($paramName);
}
throw SQLParserUtilsException::missingType($paramName);
}
}
| mit |
soltysh/kubernetes | staging/src/k8s.io/code-generator/cmd/informer-gen/generators/versioninterface.go | 3381 | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generators
import (
"io"
"k8s.io/gengo/generator"
"k8s.io/gengo/namer"
"k8s.io/gengo/types"
"k8s.io/code-generator/cmd/client-gen/generators/util"
)
// versionInterfaceGenerator generates the per-version interface file.
type versionInterfaceGenerator struct {
generator.DefaultGen
outputPackage string
imports namer.ImportTracker
types []*types.Type
filtered bool
internalInterfacesPackage string
}
var _ generator.Generator = &versionInterfaceGenerator{}
func (g *versionInterfaceGenerator) Filter(c *generator.Context, t *types.Type) bool {
if !g.filtered {
g.filtered = true
return true
}
return false
}
func (g *versionInterfaceGenerator) Namers(c *generator.Context) namer.NameSystems {
return namer.NameSystems{
"raw": namer.NewRawNamer(g.outputPackage, g.imports),
}
}
func (g *versionInterfaceGenerator) Imports(c *generator.Context) (imports []string) {
imports = append(imports, g.imports.ImportLines()...)
return
}
func (g *versionInterfaceGenerator) GenerateType(c *generator.Context, t *types.Type, w io.Writer) error {
sw := generator.NewSnippetWriter(w, c, "$", "$")
m := map[string]interface{}{
"interfacesTweakListOptionsFunc": c.Universe.Type(types.Name{Package: g.internalInterfacesPackage, Name: "TweakListOptionsFunc"}),
"interfacesSharedInformerFactory": c.Universe.Type(types.Name{Package: g.internalInterfacesPackage, Name: "SharedInformerFactory"}),
"types": g.types,
}
sw.Do(versionTemplate, m)
for _, typeDef := range g.types {
tags, err := util.ParseClientGenTags(append(typeDef.SecondClosestCommentLines, typeDef.CommentLines...))
if err != nil {
return err
}
m["namespaced"] = !tags.NonNamespaced
m["type"] = typeDef
sw.Do(versionFuncTemplate, m)
}
return sw.Error()
}
var versionTemplate = `
// Interface provides access to all the informers in this group version.
type Interface interface {
$range .types -$
// $.|publicPlural$ returns a $.|public$Informer.
$.|publicPlural$() $.|public$Informer
$end$
}
type version struct {
factory $.interfacesSharedInformerFactory|raw$
namespace string
tweakListOptions $.interfacesTweakListOptionsFunc|raw$
}
// New returns a new Interface.
func New(f $.interfacesSharedInformerFactory|raw$, namespace string, tweakListOptions $.interfacesTweakListOptionsFunc|raw$) Interface {
return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions}
}
`
var versionFuncTemplate = `
// $.type|publicPlural$ returns a $.type|public$Informer.
func (v *version) $.type|publicPlural$() $.type|public$Informer {
return &$.type|private$Informer{factory: v.factory$if .namespaced$, namespace: v.namespace$end$, tweakListOptions: v.tweakListOptions}
}
`
| apache-2.0 |
beannguyen/viettravel | vendor/phpspec/phpspec/src/PhpSpec/Formatter/Html/ReportPassedItem.php | 1111 | <?php
/*
* This file is part of PhpSpec, A php toolset to drive emergent
* design by specification.
*
* (c) Marcello Duarte <[email protected]>
* (c) Konstantin Kudryashov <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace PhpSpec\Formatter\Html;
use PhpSpec\Event\ExampleEvent;
use PhpSpec\Formatter\Template as TemplateInterface;
class ReportPassedItem
{
/**
* @var \PhpSpec\Formatter\Template
*/
private $template;
/**
* @var \PhpSpec\Event\ExampleEvent
*/
private $event;
/**
* @param TemplateInterface $template
* @param ExampleEvent $event
*/
public function __construct(TemplateInterface $template, ExampleEvent $event)
{
$this->template = $template;
$this->event = $event;
}
/**
*
*/
public function write()
{
$this->template->render(Template::DIR.'/Template/ReportPass.html', array(
'title' => $this->event->getTitle()
));
}
}
| apache-2.0 |
fanchlerouge/moodle | admin/tool/monitor/classes/rule.php | 8761 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Class represents a single rule.
*
* @package tool_monitor
* @copyright 2014 onwards Ankit Agarwal <[email protected]>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
namespace tool_monitor;
defined('MOODLE_INTERNAL') || die();
/**
* Class represents a single rule.
*
* @since Moodle 2.8
* @package tool_monitor
* @copyright 2014 onwards Ankit Agarwal <[email protected]>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class rule {
/**
* @var \stdClass The rule object form database.
*/
protected $rule;
/**
* Constructor.
*
* @param \stdClass $rule A rule object from database.
*/
public function __construct($rule) {
$this->rule = $rule;
}
/**
* Can the current user manage this rule?
*
* @return bool true if the current user can manage this rule, else false.
*/
public function can_manage_rule() {
$courseid = $this->courseid;
$context = empty($courseid) ? \context_system::instance() : \context_course::instance($this->courseid);
return has_capability('tool/monitor:managerules', $context);
}
/**
* Api to duplicate a rule in a given courseid.
*
* @param int $finalcourseid Final course id.
*/
public function duplicate_rule($finalcourseid) {
$rule = fullclone($this->rule);
unset($rule->id);
$rule->courseid = $finalcourseid;
$time = time();
$rule->timecreated = $time;
$rule->timemodified = $time;
rule_manager::add_rule($rule);
}
/**
* Delete this rule.
*
* Note: It also removes all associated subscriptions.
*/
public function delete_rule() {
rule_manager::delete_rule($this->id);
}
/**
* Gets the rule subscribe options for a given course and rule.
*
* Could be a select drop down with a list of possible module
* instances or a single link to subscribe if the rule plugin
* is not a module.
*
* @param int $courseid course id
*
* @return \single_select|\moodle_url|string
* @throws \coding_exception
*/
public function get_subscribe_options($courseid) {
global $CFG;
$url = new \moodle_url($CFG->wwwroot. '/admin/tool/monitor/index.php', array(
'courseid' => $courseid,
'ruleid' => $this->id,
'action' => 'subscribe',
'sesskey' => sesskey()
));
if (strpos($this->plugin, 'mod_') !== 0) {
return $url;
} else {
// Single select when the plugin is an activity.
$options = array();
$options[0] = get_string('allmodules', 'tool_monitor');
if ($courseid == 0) {
// They need to be in a course to select module instance.
return get_string('selectcourse', 'tool_monitor');
}
// Let them select an instance.
$cms = get_fast_modinfo($courseid);
$instances = $cms->get_instances_of(str_replace('mod_', '', $this->plugin));
foreach ($instances as $cminfo) {
// Don't list instances that are not visible or available to the user.
if ($cminfo->uservisible && $cminfo->available) {
$options[$cminfo->id] = $cminfo->get_formatted_name();
}
}
return new \single_select($url, 'cmid', $options);
}
}
/**
* Subscribe an user to this rule.
*
* @param int $courseid Course id.
* @param int $cmid Course module id.
* @param int $userid User id.
*
* @throws \coding_exception
*/
public function subscribe_user($courseid, $cmid, $userid = 0) {
global $USER;
if ($this->courseid != $courseid && $this->courseid != 0) {
// Trying to subscribe to a rule that belongs to a different course. Should never happen.
throw new \coding_exception('Can not subscribe to rules from a different course');
}
if ($cmid !== 0) {
$cms = get_fast_modinfo($courseid);
$cminfo = $cms->get_cm($cmid);
if (!$cminfo->uservisible || !$cminfo->available) {
// Trying to subscribe to a hidden or restricted cm. Should never happen.
throw new \coding_exception('You cannot do that');
}
}
$userid = empty($userid) ? $USER->id : $userid;
subscription_manager::create_subscription($this->id, $courseid, $cmid, $userid);
}
/**
* Magic get method.
*
* @param string $prop property to get.
*
* @return mixed
* @throws \coding_exception
*/
public function __get($prop) {
if (property_exists($this->rule, $prop)) {
return $this->rule->$prop;
}
throw new \coding_exception('Property "' . $prop . '" doesn\'t exist');
}
/**
* Return the rule data to be used while setting mform.
*
* @throws \coding_exception
*/
public function get_mform_set_data() {
if (!empty($this->rule)) {
$rule = fullclone($this->rule);
$rule->description = array('text' => $rule->description, 'format' => $rule->descriptionformat);
$rule->template = array('text' => $rule->template, 'format' => $rule->templateformat);
return $rule;
}
throw new \coding_exception('Invalid call to get_mform_set_data.');
}
/**
* Method to get event name.
*
* @return string
* @throws \coding_exception
*/
public function get_event_name() {
$eventclass = $this->eventname;
if (class_exists($eventclass)) {
return $eventclass::get_name();
}
return get_string('eventnotfound', 'tool_monitor');
}
/**
* Get filter description.
*
* @return string
*/
public function get_filters_description() {
$a = new \stdClass();
$a->freq = $this->frequency;
$mins = $this->timewindow / MINSECS; // Convert seconds to minutes.
$a->mins = $mins;
return get_string('freqdesc', 'tool_monitor', $a);
}
/**
* Get properly formatted name of the course associated.
*
* @param \context $context context where this name would be displayed.
* @return string The course fullname.
*/
public function get_course_name($context) {
$courseid = $this->courseid;
if (empty($courseid)) {
return get_string('site');
} else {
$course = get_course($courseid);
return format_string($course->fullname, true, array('context' => $context));
}
}
/**
* Get properly formatted name of the rule associated.
*
* @param \context $context context where this name would be displayed.
* @return string Formatted name of the rule.
*/
public function get_name(\context $context) {
return format_text($this->name, FORMAT_HTML, array('context' => $context));
}
/**
* Get properly formatted description of the rule associated.
*
* @param \context $context context where this description would be displayed.
* @return string Formatted description of the rule.
*/
public function get_description(\context $context) {
return format_text($this->description, $this->descriptionformat, array('context' => $context));
}
/**
* Get name of the plugin associated with this rule
*
* @return string Plugin name.
*/
public function get_plugin_name() {
if ($this->plugin === 'core') {
$string = get_string('core', 'tool_monitor');
} else if (get_string_manager()->string_exists('pluginname', $this->plugin)) {
$string = get_string('pluginname', $this->plugin);
} else {
$string = $this->plugin;
}
return $string;
}
}
| gpl-3.0 |
saimasarifa/ftflFinalproject | vendor/phpunit/phpunit/tests/Regression/GitHub/1216.phpt | 720 | --TEST--
GH-1216: PHPUnit bootstrap must take globals vars even when the file is specified in command line
--FILE--
<?php
$_SERVER['argv'][1] = '--configuration';
$_SERVER['argv'][2] = dirname(__FILE__).'/1216/phpunit1216.xml';
$_SERVER['argv'][3] = '--debug';
$_SERVER['argv'][4] = '--bootstrap';
$_SERVER['argv'][5] = dirname(__FILE__).'/1216/bootstrap1216.php';
$_SERVER['argv'][6] = dirname(__FILE__) . '/1216/Issue1216Test.php';
require __DIR__ . '/../../bootstrap.php';
PHPUnit_TextUI_Command::main();
?>
--EXPECTF--
PHPUnit %s by Sebastian Bergmann and contributors.
Configuration read from %s
Starting test 'Issue1216Test::testConfigAvailableInBootstrap'.
.
Time: %s, Memory: %sMb
OK (1 test, 1 assertion) | mit |
gaearon/cdnjs | ajax/libs/angular-gantt/1.2.6/angular-gantt-labels-plugin.js | 6309 | /*
Project: angular-gantt v1.2.6 - Gantt chart component for AngularJS
Authors: Marco Schweighauser, Rémi Alvergnat
License: MIT
Homepage: http://www.angular-gantt.com
Github: https://github.com/angular-gantt/angular-gantt.git
*/
(function(){
'use strict';
angular.module('gantt.labels', ['gantt', 'gantt.labels.templates']).directive('ganttLabels', ['ganttUtils', '$compile', '$document', '$log', function(utils, $compile, $document, $log) {
// Provides the row sort functionality to any Gantt row
// Uses the sortableState to share the current row
return {
restrict: 'E',
require: '^gantt',
scope: {
enabled: '=?',
header: '=?'
},
link: function(scope, element, attrs, ganttCtrl) {
var api = ganttCtrl.gantt.api;
$log.warn('Angular Gantt Labels plugin is deprecated. Please use Table plugin instead.');
// Load options from global options attribute.
if (scope.options && typeof(scope.options.sortable) === 'object') {
for (var option in scope.options.sortable) {
scope[option] = scope.options[option];
}
}
if (scope.enabled === undefined) {
scope.enabled = true;
}
if (scope.header === undefined) {
scope.header = 'Name';
}
api.directives.on.new(scope, function(directiveName, sideContentScope, sideContentElement) {
if (directiveName === 'ganttSideContent') {
var labelsScope = sideContentScope.$new();
labelsScope.pluginScope = scope;
var ifElement = $document[0].createElement('div');
angular.element(ifElement).attr('data-ng-if', 'pluginScope.enabled');
angular.element(ifElement).addClass('side-element');
var labelsElement = $document[0].createElement('gantt-side-content-labels');
angular.element(ifElement).append(labelsElement);
sideContentElement.append($compile(ifElement)(labelsScope));
}
});
function fitSideWidthToLabels() {
var labels = ganttCtrl.gantt.side.$element[0].getElementsByClassName('gantt-row-label');
var newSideWidth = 0;
angular.forEach(labels, function (label) {
var width = label.children[0].offsetWidth;
newSideWidth = Math.max(newSideWidth, width);
});
if (newSideWidth >= 0) {
api.side.setWidth(newSideWidth);
}
}
api.registerMethod('labels', 'fitSideWidth', fitSideWidthToLabels, this);
}
};
}]);
}());
(function(){
'use strict';
angular.module('gantt.labels').directive('ganttLabelsBody', ['GanttDirectiveBuilder', 'ganttLayout', function(Builder, layout) {
var builder = new Builder('ganttLabelsBody', 'plugins/labels/labelsBody.tmpl.html');
builder.controller = function($scope) {
var hScrollBarHeight = layout.getScrollBarHeight();
$scope.getLabelsCss = function() {
var css = {};
if ($scope.maxHeight) {
var bodyScrollBarHeight = $scope.gantt.scroll.isHScrollbarVisible() ? hScrollBarHeight : 0;
css['max-height'] = $scope.maxHeight - bodyScrollBarHeight - $scope.gantt.header.getHeight() + 'px';
}
return css;
};
};
return builder.build();
}]);
}());
(function(){
'use strict';
angular.module('gantt.labels').directive('ganttLabelsHeader', ['GanttDirectiveBuilder', function(Builder) {
var builder = new Builder('ganttLabelsHeader', 'plugins/labels/labelsHeader.tmpl.html');
return builder.build();
}]);
}());
(function(){
'use strict';
angular.module('gantt.labels').directive('ganttSideContentLabels', ['GanttDirectiveBuilder', function(Builder) {
var builder = new Builder('ganttSideContentLabels', 'plugins/labels/sideContentLabels.tmpl.html');
return builder.build();
}]);
}());
angular.module('gantt.labels.templates', []).run(['$templateCache', function($templateCache) {
$templateCache.put('plugins/labels/labelsBody.tmpl.html',
'<div class="gantt-labels-body" ng-style="getLabelsCss()">\n' +
' <div gantt-vertical-scroll-receiver>\n' +
' <div ng-repeat="row in gantt.rowsManager.visibleRows track by row.model.id">\n' +
' <div gantt-row-label\n' +
' class="gantt-row-label gantt-row-height"\n' +
' ng-class="row.model.classes"\n' +
' ng-style="{\'height\': row.model.height}">\n' +
' <span class="gantt-label-text">{{row.model.name}}</span>\n' +
' </div>\n' +
' </div>\n' +
' </div>\n' +
'</div>\n' +
'');
$templateCache.put('plugins/labels/labelsHeader.tmpl.html',
'<div class="gantt-labels-header">\n' +
' <div ng-show="gantt.columnsManager.columns.length > 0 && gantt.columnsManager.headers.length > 0">\n' +
' <div ng-repeat="header in gantt.columnsManager.headers">\n' +
' <div class="gantt-row-height" ng-class="{\'gantt-labels-header-row\': $last, \'gantt-labels-header-row-last\': $last}"><span>{{$last ? pluginScope.header : ""}}</span></div>\n' +
' </div>\n' +
' </div>\n' +
'</div>\n' +
'');
$templateCache.put('plugins/labels/sideContentLabels.tmpl.html',
'<div class="gantt-side-content-labels">\n' +
' <gantt-labels-header>\n' +
' </gantt-labels-header>\n' +
' <gantt-labels-body>\n' +
' </gantt-labels-body>\n' +
'</div>\n' +
'');
}]);
//# sourceMappingURL=angular-gantt-labels-plugin.js.map | mit |
jplabs/wptasks | wp-content/themes/canvas/functions/js/shortcode-generator/shortcodes/related.js | 291 | wooShortcodeMeta={
attributes:[
{
label:"Limit",
id:"limit",
help:"Number of posts to show (default: 5)."
},
{
label:"Image",
id:"image",
help:"Thumbnail size, 0 = off (default: 0)."
}
],
disablePreview:true,
defaultContent:"",
shortcode:"related_posts"
}; | gpl-2.0 |
xnox/linaro-gcc-4.7-src | gcc/testsuite/g++.dg/expr/call1.C | 216 | namespace NS_1 {
struct A {};
struct foo {};
}
namespace NS_2 {
template <typename T> void foo(T);
template <typename T>
void bar() {
NS_1::A a;
NS_2::foo(a);
}
template void bar<int>();
}
| gpl-2.0 |
nouveller/cdnjs | ajax/libs/codemirror/5.0.0/mode/haxe/haxe.js | 17064 | // CodeMirror, copyright (c) by Marijn Haverbeke and others
// Distributed under an MIT license: http://codemirror.net/LICENSE
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("haxe", function(config, parserConfig) {
var indentUnit = config.indentUnit;
// Tokenizer
var keywords = function(){
function kw(type) {return {type: type, style: "keyword"};}
var A = kw("keyword a"), B = kw("keyword b"), C = kw("keyword c");
var operator = kw("operator"), atom = {type: "atom", style: "atom"}, attribute = {type:"attribute", style: "attribute"};
var type = kw("typedef");
return {
"if": A, "while": A, "else": B, "do": B, "try": B,
"return": C, "break": C, "continue": C, "new": C, "throw": C,
"var": kw("var"), "inline":attribute, "static": attribute, "using":kw("import"),
"public": attribute, "private": attribute, "cast": kw("cast"), "import": kw("import"), "macro": kw("macro"),
"function": kw("function"), "catch": kw("catch"), "untyped": kw("untyped"), "callback": kw("cb"),
"for": kw("for"), "switch": kw("switch"), "case": kw("case"), "default": kw("default"),
"in": operator, "never": kw("property_access"), "trace":kw("trace"),
"class": type, "abstract":type, "enum":type, "interface":type, "typedef":type, "extends":type, "implements":type, "dynamic":type,
"true": atom, "false": atom, "null": atom
};
}();
var isOperatorChar = /[+\-*&%=<>!?|]/;
function chain(stream, state, f) {
state.tokenize = f;
return f(stream, state);
}
function nextUntilUnescaped(stream, end) {
var escaped = false, next;
while ((next = stream.next()) != null) {
if (next == end && !escaped)
return false;
escaped = !escaped && next == "\\";
}
return escaped;
}
// Used as scratch variables to communicate multiple values without
// consing up tons of objects.
var type, content;
function ret(tp, style, cont) {
type = tp; content = cont;
return style;
}
function haxeTokenBase(stream, state) {
var ch = stream.next();
if (ch == '"' || ch == "'")
return chain(stream, state, haxeTokenString(ch));
else if (/[\[\]{}\(\),;\:\.]/.test(ch))
return ret(ch);
else if (ch == "0" && stream.eat(/x/i)) {
stream.eatWhile(/[\da-f]/i);
return ret("number", "number");
}
else if (/\d/.test(ch) || ch == "-" && stream.eat(/\d/)) {
stream.match(/^\d*(?:\.\d*)?(?:[eE][+\-]?\d+)?/);
return ret("number", "number");
}
else if (state.reAllowed && (ch == "~" && stream.eat(/\//))) {
nextUntilUnescaped(stream, "/");
stream.eatWhile(/[gimsu]/);
return ret("regexp", "string-2");
}
else if (ch == "/") {
if (stream.eat("*")) {
return chain(stream, state, haxeTokenComment);
}
else if (stream.eat("/")) {
stream.skipToEnd();
return ret("comment", "comment");
}
else {
stream.eatWhile(isOperatorChar);
return ret("operator", null, stream.current());
}
}
else if (ch == "#") {
stream.skipToEnd();
return ret("conditional", "meta");
}
else if (ch == "@") {
stream.eat(/:/);
stream.eatWhile(/[\w_]/);
return ret ("metadata", "meta");
}
else if (isOperatorChar.test(ch)) {
stream.eatWhile(isOperatorChar);
return ret("operator", null, stream.current());
}
else {
var word;
if(/[A-Z]/.test(ch))
{
stream.eatWhile(/[\w_<>]/);
word = stream.current();
return ret("type", "variable-3", word);
}
else
{
stream.eatWhile(/[\w_]/);
var word = stream.current(), known = keywords.propertyIsEnumerable(word) && keywords[word];
return (known && state.kwAllowed) ? ret(known.type, known.style, word) :
ret("variable", "variable", word);
}
}
}
function haxeTokenString(quote) {
return function(stream, state) {
if (!nextUntilUnescaped(stream, quote))
state.tokenize = haxeTokenBase;
return ret("string", "string");
};
}
function haxeTokenComment(stream, state) {
var maybeEnd = false, ch;
while (ch = stream.next()) {
if (ch == "/" && maybeEnd) {
state.tokenize = haxeTokenBase;
break;
}
maybeEnd = (ch == "*");
}
return ret("comment", "comment");
}
// Parser
var atomicTypes = {"atom": true, "number": true, "variable": true, "string": true, "regexp": true};
function HaxeLexical(indented, column, type, align, prev, info) {
this.indented = indented;
this.column = column;
this.type = type;
this.prev = prev;
this.info = info;
if (align != null) this.align = align;
}
function inScope(state, varname) {
for (var v = state.localVars; v; v = v.next)
if (v.name == varname) return true;
}
function parseHaxe(state, style, type, content, stream) {
var cc = state.cc;
// Communicate our context to the combinators.
// (Less wasteful than consing up a hundred closures on every call.)
cx.state = state; cx.stream = stream; cx.marked = null, cx.cc = cc;
if (!state.lexical.hasOwnProperty("align"))
state.lexical.align = true;
while(true) {
var combinator = cc.length ? cc.pop() : statement;
if (combinator(type, content)) {
while(cc.length && cc[cc.length - 1].lex)
cc.pop()();
if (cx.marked) return cx.marked;
if (type == "variable" && inScope(state, content)) return "variable-2";
if (type == "variable" && imported(state, content)) return "variable-3";
return style;
}
}
}
function imported(state, typename)
{
if (/[a-z]/.test(typename.charAt(0)))
return false;
var len = state.importedtypes.length;
for (var i = 0; i<len; i++)
if(state.importedtypes[i]==typename) return true;
}
function registerimport(importname) {
var state = cx.state;
for (var t = state.importedtypes; t; t = t.next)
if(t.name == importname) return;
state.importedtypes = { name: importname, next: state.importedtypes };
}
// Combinator utils
var cx = {state: null, column: null, marked: null, cc: null};
function pass() {
for (var i = arguments.length - 1; i >= 0; i--) cx.cc.push(arguments[i]);
}
function cont() {
pass.apply(null, arguments);
return true;
}
function register(varname) {
var state = cx.state;
if (state.context) {
cx.marked = "def";
for (var v = state.localVars; v; v = v.next)
if (v.name == varname) return;
state.localVars = {name: varname, next: state.localVars};
}
}
// Combinators
var defaultVars = {name: "this", next: null};
function pushcontext() {
if (!cx.state.context) cx.state.localVars = defaultVars;
cx.state.context = {prev: cx.state.context, vars: cx.state.localVars};
}
function popcontext() {
cx.state.localVars = cx.state.context.vars;
cx.state.context = cx.state.context.prev;
}
function pushlex(type, info) {
var result = function() {
var state = cx.state;
state.lexical = new HaxeLexical(state.indented, cx.stream.column(), type, null, state.lexical, info);
};
result.lex = true;
return result;
}
function poplex() {
var state = cx.state;
if (state.lexical.prev) {
if (state.lexical.type == ")")
state.indented = state.lexical.indented;
state.lexical = state.lexical.prev;
}
}
poplex.lex = true;
function expect(wanted) {
function f(type) {
if (type == wanted) return cont();
else if (wanted == ";") return pass();
else return cont(f);
};
return f;
}
function statement(type) {
if (type == "@") return cont(metadef);
if (type == "var") return cont(pushlex("vardef"), vardef1, expect(";"), poplex);
if (type == "keyword a") return cont(pushlex("form"), expression, statement, poplex);
if (type == "keyword b") return cont(pushlex("form"), statement, poplex);
if (type == "{") return cont(pushlex("}"), pushcontext, block, poplex, popcontext);
if (type == ";") return cont();
if (type == "attribute") return cont(maybeattribute);
if (type == "function") return cont(functiondef);
if (type == "for") return cont(pushlex("form"), expect("("), pushlex(")"), forspec1, expect(")"),
poplex, statement, poplex);
if (type == "variable") return cont(pushlex("stat"), maybelabel);
if (type == "switch") return cont(pushlex("form"), expression, pushlex("}", "switch"), expect("{"),
block, poplex, poplex);
if (type == "case") return cont(expression, expect(":"));
if (type == "default") return cont(expect(":"));
if (type == "catch") return cont(pushlex("form"), pushcontext, expect("("), funarg, expect(")"),
statement, poplex, popcontext);
if (type == "import") return cont(importdef, expect(";"));
if (type == "typedef") return cont(typedef);
return pass(pushlex("stat"), expression, expect(";"), poplex);
}
function expression(type) {
if (atomicTypes.hasOwnProperty(type)) return cont(maybeoperator);
if (type == "function") return cont(functiondef);
if (type == "keyword c") return cont(maybeexpression);
if (type == "(") return cont(pushlex(")"), maybeexpression, expect(")"), poplex, maybeoperator);
if (type == "operator") return cont(expression);
if (type == "[") return cont(pushlex("]"), commasep(expression, "]"), poplex, maybeoperator);
if (type == "{") return cont(pushlex("}"), commasep(objprop, "}"), poplex, maybeoperator);
return cont();
}
function maybeexpression(type) {
if (type.match(/[;\}\)\],]/)) return pass();
return pass(expression);
}
function maybeoperator(type, value) {
if (type == "operator" && /\+\+|--/.test(value)) return cont(maybeoperator);
if (type == "operator" || type == ":") return cont(expression);
if (type == ";") return;
if (type == "(") return cont(pushlex(")"), commasep(expression, ")"), poplex, maybeoperator);
if (type == ".") return cont(property, maybeoperator);
if (type == "[") return cont(pushlex("]"), expression, expect("]"), poplex, maybeoperator);
}
function maybeattribute(type) {
if (type == "attribute") return cont(maybeattribute);
if (type == "function") return cont(functiondef);
if (type == "var") return cont(vardef1);
}
function metadef(type) {
if(type == ":") return cont(metadef);
if(type == "variable") return cont(metadef);
if(type == "(") return cont(pushlex(")"), commasep(metaargs, ")"), poplex, statement);
}
function metaargs(type) {
if(type == "variable") return cont();
}
function importdef (type, value) {
if(type == "variable" && /[A-Z]/.test(value.charAt(0))) { registerimport(value); return cont(); }
else if(type == "variable" || type == "property" || type == "." || value == "*") return cont(importdef);
}
function typedef (type, value)
{
if(type == "variable" && /[A-Z]/.test(value.charAt(0))) { registerimport(value); return cont(); }
else if (type == "type" && /[A-Z]/.test(value.charAt(0))) { return cont(); }
}
function maybelabel(type) {
if (type == ":") return cont(poplex, statement);
return pass(maybeoperator, expect(";"), poplex);
}
function property(type) {
if (type == "variable") {cx.marked = "property"; return cont();}
}
function objprop(type) {
if (type == "variable") cx.marked = "property";
if (atomicTypes.hasOwnProperty(type)) return cont(expect(":"), expression);
}
function commasep(what, end) {
function proceed(type) {
if (type == ",") return cont(what, proceed);
if (type == end) return cont();
return cont(expect(end));
}
return function(type) {
if (type == end) return cont();
else return pass(what, proceed);
};
}
function block(type) {
if (type == "}") return cont();
return pass(statement, block);
}
function vardef1(type, value) {
if (type == "variable"){register(value); return cont(typeuse, vardef2);}
return cont();
}
function vardef2(type, value) {
if (value == "=") return cont(expression, vardef2);
if (type == ",") return cont(vardef1);
}
function forspec1(type, value) {
if (type == "variable") {
register(value);
}
return cont(pushlex(")"), pushcontext, forin, expression, poplex, statement, popcontext);
}
function forin(_type, value) {
if (value == "in") return cont();
}
function functiondef(type, value) {
if (type == "variable") {register(value); return cont(functiondef);}
if (value == "new") return cont(functiondef);
if (type == "(") return cont(pushlex(")"), pushcontext, commasep(funarg, ")"), poplex, typeuse, statement, popcontext);
}
function typeuse(type) {
if(type == ":") return cont(typestring);
}
function typestring(type) {
if(type == "type") return cont();
if(type == "variable") return cont();
if(type == "{") return cont(pushlex("}"), commasep(typeprop, "}"), poplex);
}
function typeprop(type) {
if(type == "variable") return cont(typeuse);
}
function funarg(type, value) {
if (type == "variable") {register(value); return cont(typeuse);}
}
// Interface
return {
startState: function(basecolumn) {
var defaulttypes = ["Int", "Float", "String", "Void", "Std", "Bool", "Dynamic", "Array"];
return {
tokenize: haxeTokenBase,
reAllowed: true,
kwAllowed: true,
cc: [],
lexical: new HaxeLexical((basecolumn || 0) - indentUnit, 0, "block", false),
localVars: parserConfig.localVars,
importedtypes: defaulttypes,
context: parserConfig.localVars && {vars: parserConfig.localVars},
indented: 0
};
},
token: function(stream, state) {
if (stream.sol()) {
if (!state.lexical.hasOwnProperty("align"))
state.lexical.align = false;
state.indented = stream.indentation();
}
if (stream.eatSpace()) return null;
var style = state.tokenize(stream, state);
if (type == "comment") return style;
state.reAllowed = !!(type == "operator" || type == "keyword c" || type.match(/^[\[{}\(,;:]$/));
state.kwAllowed = type != '.';
return parseHaxe(state, style, type, content, stream);
},
indent: function(state, textAfter) {
if (state.tokenize != haxeTokenBase) return 0;
var firstChar = textAfter && textAfter.charAt(0), lexical = state.lexical;
if (lexical.type == "stat" && firstChar == "}") lexical = lexical.prev;
var type = lexical.type, closing = firstChar == type;
if (type == "vardef") return lexical.indented + 4;
else if (type == "form" && firstChar == "{") return lexical.indented;
else if (type == "stat" || type == "form") return lexical.indented + indentUnit;
else if (lexical.info == "switch" && !closing)
return lexical.indented + (/^(?:case|default)\b/.test(textAfter) ? indentUnit : 2 * indentUnit);
else if (lexical.align) return lexical.column + (closing ? 0 : 1);
else return lexical.indented + (closing ? 0 : indentUnit);
},
electricChars: "{}",
blockCommentStart: "/*",
blockCommentEnd: "*/",
lineComment: "//"
};
});
CodeMirror.defineMIME("text/x-haxe", "haxe");
CodeMirror.defineMode("hxml", function () {
return {
startState: function () {
return {
define: false,
inString: false
};
},
token: function (stream, state) {
var ch = stream.peek();
var sol = stream.sol();
///* comments */
if (ch == "#") {
stream.skipToEnd();
return "comment";
}
if (sol && ch == "-") {
var style = "variable-2";
stream.eat(/-/);
if (stream.peek() == "-") {
stream.eat(/-/);
style = "keyword a";
}
if (stream.peek() == "D") {
stream.eat(/[D]/);
style = "keyword c";
state.define = true;
}
stream.eatWhile(/[A-Z]/i);
return style;
}
var ch = stream.peek();
if (state.inString == false && ch == "'") {
state.inString = true;
ch = stream.next();
}
if (state.inString == true) {
if (stream.skipTo("'")) {
} else {
stream.skipToEnd();
}
if (stream.peek() == "'") {
stream.next();
state.inString = false;
}
return "string";
}
stream.next();
return null;
},
lineComment: "#"
};
});
CodeMirror.defineMIME("text/x-hxml", "hxml");
});
| mit |
sunny2601/spree | sample/db/samples/product_option_types.rb | 469 | Spree::Sample.load_sample("products")
size = Spree::OptionType.find_by_presentation!("Size")
color = Spree::OptionType.find_by_presentation!("Color")
ror_baseball_jersey = Spree::Product.find_by_name!("Ruby on Rails Baseball Jersey")
ror_baseball_jersey.option_types = [size, color]
ror_baseball_jersey.save!
spree_baseball_jersey = Spree::Product.find_by_name!("Spree Baseball Jersey")
spree_baseball_jersey.option_types = [size, color]
spree_baseball_jersey.save!
| bsd-3-clause |
davidxkr/active_merchant | lib/active_merchant/billing/gateways/net_registry.rb | 6377 | module ActiveMerchant
module Billing
# Gateway for netregistry.com.au.
#
# Note that NetRegistry itself uses gateway service providers. At the
# time of this writing, there are at least two (Quest and Ingenico).
# This module has only been tested with Quest.
#
# Also note that NetRegistry does not offer a test mode, nor does it
# have support for the authorize/capture/void functionality by default
# (you may arrange for this as described in "Programming for
# NetRegistry's E-commerce Gateway." [http://rubyurl.com/hNG]), and no
# #void functionality is documented. As a result, the #authorize and
# #capture have not yet been tested through a live gateway, and #void
# will raise an error.
#
# If you have this functionality enabled, please consider contributing
# to ActiveMerchant by writing tests/code for these methods, and
# submitting a patch.
#
# In addition to the standard ActiveMerchant functionality, the
# response will contain a 'receipt' parameter
# (response.params['receipt']) if a receipt was issued by the gateway.
class NetRegistryGateway < Gateway
URL = 'https://4tknox.au.com/cgi-bin/themerchant.au.com/ecom/external2.pl'
FILTERED_PARAMS = [ 'card_no', 'card_expiry', 'receipt_array' ]
self.supported_countries = ['AU']
# Note that support for Diners, Amex, and JCB require extra
# steps in setting up your account, as detailed in
# "Programming for NetRegistry's E-commerce Gateway."
# [http://rubyurl.com/hNG]
self.supported_cardtypes = [:visa, :master, :diners_club, :american_express, :jcb]
self.display_name = 'NetRegistry'
self.homepage_url = 'http://www.netregistry.com.au'
TRANSACTIONS = {
:authorization => 'preauth',
:purchase => 'purchase',
:capture => 'completion',
:status => 'status',
:credit => 'refund'
}
# Create a new NetRegistry gateway.
#
# Options :login and :password must be given.
def initialize(options = {})
requires!(options, :login, :password)
@options = options
super
end
# Note that #authorize and #capture only work if your account
# vendor is St George, and if your account has been setup as
# described in "Programming for NetRegistry's E-commerce
# Gateway." [http://rubyurl.com/hNG]
def authorize(money, credit_card, options = {})
params = {
'AMOUNT' => amount(money),
'CCNUM' => credit_card.number,
'CCEXP' => expiry(credit_card)
}
add_request_details(params, options)
commit(:authorization, params)
end
# Note that #authorize and #capture only work if your account
# vendor is St George, and if your account has been setup as
# described in "Programming for NetRegistry's E-commerce
# Gateway." [http://rubyurl.com/hNG]
def capture(money, authorization, options = {})
requires!(options, :credit_card)
credit_card = options[:credit_card]
params = {
'PREAUTHNUM' => authorization,
'AMOUNT' => amount(money),
'CCNUM' => credit_card.number,
'CCEXP' => expiry(credit_card)
}
add_request_details(params, options)
commit(:capture, params)
end
def purchase(money, credit_card, options = {})
params = {
'AMOUNT' => amount(money),
'CCNUM' => credit_card.number,
'CCEXP' => expiry(credit_card)
}
add_request_details(params, options)
commit(:purchase, params)
end
def credit(money, identification, options = {})
params = {
'AMOUNT' => amount(money),
'TXNREF' => identification
}
add_request_details(params, options)
commit(:credit, params)
end
# Specific to NetRegistry.
#
# Run a 'status' command. This lets you view the status of a
# completed transaction.
#
def status(identification)
params = {
'TXNREF' => identification
}
commit(:status, params)
end
private
def add_request_details(params, options)
params['COMMENT'] = options[:description] unless options[:description].blank?
end
# Return the expiry for the given creditcard in the required
# format for a command.
def expiry(credit_card)
month = format(credit_card.month, :two_digits)
year = format(credit_card.year , :two_digits)
"#{month}/#{year}"
end
# Post the a request with the given parameters and return the
# response object.
#
# Login and password are added automatically, and the comment is
# omitted if nil.
def commit(action, params)
# get gateway response
response = parse( ssl_post(URL, post_data(action, params)) )
Response.new(response['status'] == 'approved', message_from(response), response,
:authorization => authorization_from(response, action)
)
end
def post_data(action, params)
params['COMMAND'] = TRANSACTIONS[action]
params['LOGIN'] = "#{@options[:login]}/#{@options[:password]}"
URI.encode(params.map{|k,v| "#{k}=#{v}"}.join('&'))
end
def parse(response)
params = {}
lines = response.split("\n")
# Just incase there is no real response returned
params['status'] = lines[0]
params['response_text'] = lines[1]
started = false
lines.each do |line|
if started
key, val = line.chomp.split(/=/, 2)
params[key] = val unless FILTERED_PARAMS.include?(key)
end
started = line.chomp =~ /^\.$/ unless started
end
params
end
def message_from(response)
response['response_text']
end
def authorization_from(response, command)
case command
when :purchase
response['txn_ref']
when :authorization
response['transaction_no']
end
end
end
end
end
| mit |
podgorskiy/TinyFEM | libs/boost/boost/variant/detail/apply_visitor_binary.hpp | 4654 | //-----------------------------------------------------------------------------
// boost variant/detail/apply_visitor_binary.hpp header file
// See http://www.boost.org for updates, documentation, and revision history.
//-----------------------------------------------------------------------------
//
// Copyright (c) 2002-2003
// Eric Friedman
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_VARIANT_DETAIL_APPLY_VISITOR_BINARY_HPP
#define BOOST_VARIANT_DETAIL_APPLY_VISITOR_BINARY_HPP
#include "boost/config.hpp"
#include "boost/detail/workaround.hpp"
#include "boost/variant/detail/generic_result_type.hpp"
#include "boost/variant/detail/apply_visitor_unary.hpp"
#if BOOST_WORKAROUND(__EDG__, BOOST_TESTED_AT(302))
#include "boost/utility/enable_if.hpp"
#include "boost/mpl/not.hpp"
#include "boost/type_traits/is_const.hpp"
#endif
namespace boost {
//////////////////////////////////////////////////////////////////////////
// function template apply_visitor(visitor, visitable1, visitable2)
//
// Visits visitable1 and visitable2 such that their values (which we
// shall call x and y, respectively) are used as arguments in the
// expression visitor(x, y).
//
namespace detail { namespace variant {
template <typename Visitor, typename Value1>
class apply_visitor_binary_invoke
{
public: // visitor typedefs
typedef typename Visitor::result_type
result_type;
private: // representation
Visitor& visitor_;
Value1& value1_;
public: // structors
apply_visitor_binary_invoke(Visitor& visitor, Value1& value1)
: visitor_(visitor)
, value1_(value1)
{
}
public: // visitor interfaces
template <typename Value2>
BOOST_VARIANT_AUX_GENERIC_RESULT_TYPE(result_type)
operator()(Value2& value2)
{
return visitor_(value1_, value2);
}
private:
apply_visitor_binary_invoke& operator=(const apply_visitor_binary_invoke&);
};
template <typename Visitor, typename Visitable2>
class apply_visitor_binary_unwrap
{
public: // visitor typedefs
typedef typename Visitor::result_type
result_type;
private: // representation
Visitor& visitor_;
Visitable2& visitable2_;
public: // structors
apply_visitor_binary_unwrap(Visitor& visitor, Visitable2& visitable2)
: visitor_(visitor)
, visitable2_(visitable2)
{
}
public: // visitor interfaces
template <typename Value1>
BOOST_VARIANT_AUX_GENERIC_RESULT_TYPE(result_type)
operator()(Value1& value1)
{
apply_visitor_binary_invoke<
Visitor
, Value1
> invoker(visitor_, value1);
return boost::apply_visitor(invoker, visitable2_);
}
private:
apply_visitor_binary_unwrap& operator=(const apply_visitor_binary_unwrap&);
};
}} // namespace detail::variant
//
// nonconst-visitor version:
//
#if !BOOST_WORKAROUND(__EDG__, BOOST_TESTED_AT(302))
# define BOOST_VARIANT_AUX_APPLY_VISITOR_NON_CONST_RESULT_TYPE(V) \
BOOST_VARIANT_AUX_GENERIC_RESULT_TYPE(typename V::result_type) \
/**/
#else // EDG-based compilers
# define BOOST_VARIANT_AUX_APPLY_VISITOR_NON_CONST_RESULT_TYPE(V) \
typename enable_if< \
mpl::not_< is_const< V > > \
, BOOST_VARIANT_AUX_GENERIC_RESULT_TYPE(typename V::result_type) \
>::type \
/**/
#endif // EDG-based compilers workaround
template <typename Visitor, typename Visitable1, typename Visitable2>
inline
BOOST_VARIANT_AUX_APPLY_VISITOR_NON_CONST_RESULT_TYPE(Visitor)
apply_visitor(
Visitor& visitor
, Visitable1& visitable1, Visitable2& visitable2
)
{
::boost::detail::variant::apply_visitor_binary_unwrap<
Visitor, Visitable2
> unwrapper(visitor, visitable2);
return boost::apply_visitor(unwrapper, visitable1);
}
#undef BOOST_VARIANT_AUX_APPLY_VISITOR_NON_CONST_RESULT_TYPE
//
// const-visitor version:
//
#if !BOOST_WORKAROUND(BOOST_MSVC, <= 1300)
template <typename Visitor, typename Visitable1, typename Visitable2>
inline
BOOST_VARIANT_AUX_GENERIC_RESULT_TYPE(
typename Visitor::result_type
)
apply_visitor(
const Visitor& visitor
, Visitable1& visitable1, Visitable2& visitable2
)
{
::boost::detail::variant::apply_visitor_binary_unwrap<
const Visitor, Visitable2
> unwrapper(visitor, visitable2);
return boost::apply_visitor(unwrapper, visitable1);
}
#endif // MSVC7 and below exclusion
} // namespace boost
#endif // BOOST_VARIANT_DETAIL_APPLY_VISITOR_BINARY_HPP
| mit |
taydakov/cdnjs | ajax/libs/qoopido.js/3.6.4/polyfill/array/indexof.js | 403 | /*!
* Qoopido.js library v3.6.4, 2015-4-29
* https://github.com/dlueth/qoopido.js
* (c) 2015 Dirk Lueth
* Dual licensed under MIT and GPL
*/
!function(r){window.qoopido.register("polyfill/array/indexof",r)}(function(r,o,t,i,n,e,f){"use strict";return Array.prototype.indexOf||(Array.prototype.indexOf=function(r){for(var o=this,t=0;o[t]!==f;++t)if(o[t]===r)return t;return-1}),Array.prototype.indexOf}); | mit |
optimusThePrime/recrutementTest | vendor/symfony/symfony/src/Symfony/Component/Serializer/Normalizer/CustomNormalizer.php | 1878 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Serializer\Normalizer;
/**
* @author Jordi Boggiano <[email protected]>
*/
class CustomNormalizer extends SerializerAwareNormalizer implements NormalizerInterface, DenormalizerInterface
{
/**
* {@inheritdoc}
*/
public function normalize($object, $format = null, array $context = array())
{
return $object->normalize($this->serializer, $format, $context);
}
/**
* {@inheritdoc}
*/
public function denormalize($data, $class, $format = null, array $context = array())
{
$object = new $class();
$object->denormalize($this->serializer, $data, $format, $context);
return $object;
}
/**
* Checks if the given class implements the NormalizableInterface.
*
* @param mixed $data Data to normalize.
* @param string $format The format being (de-)serialized from or into.
*
* @return Boolean
*/
public function supportsNormalization($data, $format = null)
{
return $data instanceof NormalizableInterface;
}
/**
* Checks if the given class implements the NormalizableInterface.
*
* @param mixed $data Data to denormalize from.
* @param string $type The class to which the data should be denormalized.
* @param string $format The format being deserialized from.
*
* @return Boolean
*/
public function supportsDenormalization($data, $type, $format = null)
{
$class = new \ReflectionClass($type);
return $class->isSubclassOf('Symfony\Component\Serializer\Normalizer\DenormalizableInterface');
}
}
| mit |
odooo/design | vendor/symfony/symfony/src/Symfony/Bundle/SecurityBundle/Tests/DependencyInjection/SecurityExtensionTest.php | 4363 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Bundle\SecurityBundle\Tests\DependencyInjection;
use Symfony\Bundle\SecurityBundle\DependencyInjection\SecurityExtension;
use Symfony\Bundle\SecurityBundle\SecurityBundle;
use Symfony\Bundle\SecurityBundle\Tests\DependencyInjection\Fixtures\UserProvider\DummyProvider;
use Symfony\Component\DependencyInjection\ContainerBuilder;
class SecurityExtensionTest extends \PHPUnit_Framework_TestCase
{
/**
* @expectedException \Symfony\Component\Config\Definition\Exception\InvalidConfigurationException
* @expectedExceptionMessage The check_path "/some_area/login_check" for login method "form_login" is not matched by the firewall pattern "/secured_area/.*".
*/
public function testInvalidCheckPath()
{
$container = $this->getRawContainer();
$container->loadFromExtension('security', array(
'providers' => array(
'default' => array('id' => 'foo'),
),
'firewalls' => array(
'some_firewall' => array(
'pattern' => '/secured_area/.*',
'form_login' => array(
'check_path' => '/some_area/login_check',
),
),
),
));
$container->compile();
}
/**
* @expectedException \Symfony\Component\Config\Definition\Exception\InvalidConfigurationException
* @expectedExceptionMessage No authentication listener registered for firewall "some_firewall"
*/
public function testFirewallWithoutAuthenticationListener()
{
$container = $this->getRawContainer();
$container->loadFromExtension('security', array(
'providers' => array(
'default' => array('id' => 'foo'),
),
'firewalls' => array(
'some_firewall' => array(
'pattern' => '/.*',
),
),
));
$container->compile();
}
/**
* @expectedException \Symfony\Component\Config\Definition\Exception\InvalidConfigurationException
* @expectedExceptionMessage Unable to create definition for "security.user.provider.concrete.my_foo" user provider
*/
public function testFirewallWithInvalidUserProvider()
{
$container = $this->getRawContainer();
$extension = $container->getExtension('security');
$extension->addUserProviderFactory(new DummyProvider());
$container->loadFromExtension('security', array(
'providers' => array(
'my_foo' => array('foo' => array()),
),
'firewalls' => array(
'some_firewall' => array(
'pattern' => '/.*',
'http_basic' => array(),
),
),
));
$container->compile();
}
public function testDisableRoleHierarchyVoter()
{
$container = $this->getRawContainer();
$container->loadFromExtension('security', array(
'providers' => array(
'default' => array('id' => 'foo'),
),
'role_hierarchy' => null,
'firewalls' => array(
'some_firewall' => array(
'pattern' => '/.*',
'http_basic' => null,
),
),
));
$container->compile();
$this->assertFalse($container->hasDefinition('security.access.role_hierarchy_voter'));
}
protected function getRawContainer()
{
$container = new ContainerBuilder();
$security = new SecurityExtension();
$container->registerExtension($security);
$bundle = new SecurityBundle();
$bundle->build($container);
$container->getCompilerPassConfig()->setOptimizationPasses(array());
$container->getCompilerPassConfig()->setRemovingPasses(array());
return $container;
}
protected function getContainer()
{
$container = $this->getRawContainer();
$container->compile();
return $container;
}
}
| mit |
0111001101111010/it325_website | assignments/midterm/bower_components/jquery-ui/ui/minified/i18n/jquery.ui.datepicker-tr.min.js | 806 | /*! jQuery UI - v1.10.4 - 2014-02-16
* http://jqueryui.com
* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */
jQuery(function(e){e.datepicker.regional.tr={closeText:"kapat",prevText:"<geri",nextText:"ileri>",currentText:"bugün",monthNames:["Ocak","Şubat","Mart","Nisan","Mayıs","Haziran","Temmuz","Ağustos","Eylül","Ekim","Kasım","Aralık"],monthNamesShort:["Oca","Şub","Mar","Nis","May","Haz","Tem","Ağu","Eyl","Eki","Kas","Ara"],dayNames:["Pazar","Pazartesi","Salı","Çarşamba","Perşembe","Cuma","Cumartesi"],dayNamesShort:["Pz","Pt","Sa","Ça","Pe","Cu","Ct"],dayNamesMin:["Pz","Pt","Sa","Ça","Pe","Cu","Ct"],weekHeader:"Hf",dateFormat:"dd.mm.yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},e.datepicker.setDefaults(e.datepicker.regional.tr)}); | apache-2.0 |
mycrazydog/ui-drupal | sites/all/modules/jquery_update/replace/ui/ui/jquery.ui.effect-fade.js | 558 | /*!
* jQuery UI Effects Fade 1.10.2
* http://jqueryui.com
*
* Copyright 2013 jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*
* http://api.jqueryui.com/fade-effect/
*
* Depends:
* jquery.ui.effect.js
*/
(function( $, undefined ) {
$.effects.effect.fade = function( o, done ) {
var el = $( this ),
mode = $.effects.setMode( el, o.mode || "toggle" );
el.animate({
opacity: mode
}, {
queue: false,
duration: o.duration,
easing: o.easing,
complete: done
});
};
})( jQuery );
| gpl-2.0 |
kautzar/drpp4 | wp-content/plugins/easypay/includes/gateways/lib/Stripe/Balance.php | 291 | <?php
class Stripe_Balance extends Stripe_SingletonApiResource
{
/**
* @param string|null $apiKey
*
* @return Stripe_Balance
*/
public static function retrieve($apiKey=null)
{
$class = get_class();
return self::_scopedSingletonRetrieve($class, $apiKey);
}
}
| gpl-2.0 |
dsapala/websocket-proxy | Godeps/_workspace/src/github.com/gorilla/websocket/examples/chat/main.go | 878 | // Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"log"
"net/http"
"text/template"
)
var addr = flag.String("addr", ":8080", "http service address")
var homeTempl = template.Must(template.ParseFiles("home.html"))
func serveHome(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
http.Error(w, "Not found", 404)
return
}
if r.Method != "GET" {
http.Error(w, "Method not allowed", 405)
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
homeTempl.Execute(w, r.Host)
}
func main() {
flag.Parse()
go h.run()
http.HandleFunc("/", serveHome)
http.HandleFunc("/ws", serveWs)
err := http.ListenAndServe(*addr, nil)
if err != nil {
log.Fatal("ListenAndServe: ", err)
}
}
| apache-2.0 |
jonathan-fielding/cdnjs | ajax/libs/yui/3.7.3/loader-base/loader-base-debug.js | 94496 | YUI.add('loader-base', function (Y, NAME) {
/**
* The YUI loader core
* @module loader
* @submodule loader-base
*/
if (!YUI.Env[Y.version]) {
(function() {
var VERSION = Y.version,
BUILD = '/build/',
ROOT = VERSION + BUILD,
CDN_BASE = Y.Env.base,
GALLERY_VERSION = 'gallery-2012.10.10-19-59',
TNT = '2in3',
TNT_VERSION = '4',
YUI2_VERSION = '2.9.0',
COMBO_BASE = CDN_BASE + 'combo?',
META = { version: VERSION,
root: ROOT,
base: Y.Env.base,
comboBase: COMBO_BASE,
skin: { defaultSkin: 'sam',
base: 'assets/skins/',
path: 'skin.css',
after: ['cssreset',
'cssfonts',
'cssgrids',
'cssbase',
'cssreset-context',
'cssfonts-context']},
groups: {},
patterns: {} },
groups = META.groups,
yui2Update = function(tnt, yui2, config) {
var root = TNT + '.' +
(tnt || TNT_VERSION) + '/' +
(yui2 || YUI2_VERSION) + BUILD,
base = (config && config.base) ? config.base : CDN_BASE,
combo = (config && config.comboBase) ? config.comboBase : COMBO_BASE;
groups.yui2.base = base + root;
groups.yui2.root = root;
groups.yui2.comboBase = combo;
},
galleryUpdate = function(tag, config) {
var root = (tag || GALLERY_VERSION) + BUILD,
base = (config && config.base) ? config.base : CDN_BASE,
combo = (config && config.comboBase) ? config.comboBase : COMBO_BASE;
groups.gallery.base = base + root;
groups.gallery.root = root;
groups.gallery.comboBase = combo;
};
groups[VERSION] = {};
groups.gallery = {
ext: false,
combine: true,
comboBase: COMBO_BASE,
update: galleryUpdate,
patterns: { 'gallery-': { },
'lang/gallery-': {},
'gallerycss-': { type: 'css' } }
};
groups.yui2 = {
combine: true,
ext: false,
comboBase: COMBO_BASE,
update: yui2Update,
patterns: {
'yui2-': {
configFn: function(me) {
if (/-skin|reset|fonts|grids|base/.test(me.name)) {
me.type = 'css';
me.path = me.path.replace(/\.js/, '.css');
// this makes skins in builds earlier than
// 2.6.0 work as long as combine is false
me.path = me.path.replace(/\/yui2-skin/,
'/assets/skins/sam/yui2-skin');
}
}
}
}
};
galleryUpdate();
yui2Update();
YUI.Env[VERSION] = META;
}());
}
/*jslint forin: true, maxlen: 350 */
/**
* Loader dynamically loads script and css files. It includes the dependency
* information for the version of the library in use, and will automatically pull in
* dependencies for the modules requested. It can also load the
* files from the Yahoo! CDN, and it can utilize the combo service provided on
* this network to reduce the number of http connections required to download
* YUI files.
*
* @module loader
* @main loader
* @submodule loader-base
*/
var NOT_FOUND = {},
NO_REQUIREMENTS = [],
MAX_URL_LENGTH = 1024,
GLOBAL_ENV = YUI.Env,
GLOBAL_LOADED = GLOBAL_ENV._loaded,
CSS = 'css',
JS = 'js',
INTL = 'intl',
DEFAULT_SKIN = 'sam',
VERSION = Y.version,
ROOT_LANG = '',
YObject = Y.Object,
oeach = YObject.each,
yArray = Y.Array,
_queue = GLOBAL_ENV._loaderQueue,
META = GLOBAL_ENV[VERSION],
SKIN_PREFIX = 'skin-',
L = Y.Lang,
ON_PAGE = GLOBAL_ENV.mods,
modulekey,
_path = function(dir, file, type, nomin) {
var path = dir + '/' + file;
if (!nomin) {
path += '-min';
}
path += '.' + (type || CSS);
return path;
};
if (!YUI.Env._cssLoaded) {
YUI.Env._cssLoaded = {};
}
/**
* The component metadata is stored in Y.Env.meta.
* Part of the loader module.
* @property meta
* @for YUI
*/
Y.Env.meta = META;
/**
* Loader dynamically loads script and css files. It includes the dependency
* info for the version of the library in use, and will automatically pull in
* dependencies for the modules requested. It can load the
* files from the Yahoo! CDN, and it can utilize the combo service provided on
* this network to reduce the number of http connections required to download
* YUI files. You can also specify an external, custom combo service to host
* your modules as well.
var Y = YUI();
var loader = new Y.Loader({
filter: 'debug',
base: '../../',
root: 'build/',
combine: true,
require: ['node', 'dd', 'console']
});
var out = loader.resolve(true);
* @constructor
* @class Loader
* @param {Object} config an optional set of configuration options.
* @param {String} config.base The base dir which to fetch this module from
* @param {String} config.comboBase The Combo service base path. Ex: `http://yui.yahooapis.com/combo?`
* @param {String} config.root The root path to prepend to module names for the combo service. Ex: `2.5.2/build/`
* @param {String|Object} config.filter A filter to apply to result urls. <a href="#property_filter">See filter property</a>
* @param {Object} config.filters Per-component filter specification. If specified for a given component, this overrides the filter config.
* @param {Boolean} config.combine Use a combo service to reduce the number of http connections required to load your dependencies
* @param {Boolean} [config.async=true] Fetch files in async
* @param {Array} config.ignore: A list of modules that should never be dynamically loaded
* @param {Array} config.force A list of modules that should always be loaded when required, even if already present on the page
* @param {HTMLElement|String} config.insertBefore Node or id for a node that should be used as the insertion point for new nodes
* @param {Object} config.jsAttributes Object literal containing attributes to add to script nodes
* @param {Object} config.cssAttributes Object literal containing attributes to add to link nodes
* @param {Number} config.timeout The number of milliseconds before a timeout occurs when dynamically loading nodes. If not set, there is no timeout
* @param {Object} config.context Execution context for all callbacks
* @param {Function} config.onSuccess Callback for the 'success' event
* @param {Function} config.onFailure Callback for the 'failure' event
* @param {Function} config.onCSS Callback for the 'CSSComplete' event. When loading YUI components with CSS the CSS is loaded first, then the script. This provides a moment you can tie into to improve the presentation of the page while the script is loading.
* @param {Function} config.onTimeout Callback for the 'timeout' event
* @param {Function} config.onProgress Callback executed each time a script or css file is loaded
* @param {Object} config.modules A list of module definitions. See <a href="#method_addModule">Loader.addModule</a> for the supported module metadata
* @param {Object} config.groups A list of group definitions. Each group can contain specific definitions for `base`, `comboBase`, `combine`, and accepts a list of `modules`.
* @param {String} config.2in3 The version of the YUI 2 in 3 wrapper to use. The intrinsic support for YUI 2 modules in YUI 3 relies on versions of the YUI 2 components inside YUI 3 module wrappers. These wrappers change over time to accomodate the issues that arise from running YUI 2 in a YUI 3 sandbox.
* @param {String} config.yui2 When using the 2in3 project, you can select the version of YUI 2 to use. Valid values are `2.2.2`, `2.3.1`, `2.4.1`, `2.5.2`, `2.6.0`, `2.7.0`, `2.8.0`, `2.8.1` and `2.9.0` [default] -- plus all versions of YUI 2 going forward.
*/
Y.Loader = function(o) {
var self = this;
//Catch no config passed.
o = o || {};
modulekey = META.md5;
/**
* Internal callback to handle multiple internal insert() calls
* so that css is inserted prior to js
* @property _internalCallback
* @private
*/
// self._internalCallback = null;
/**
* Callback that will be executed when the loader is finished
* with an insert
* @method onSuccess
* @type function
*/
// self.onSuccess = null;
/**
* Callback that will be executed if there is a failure
* @method onFailure
* @type function
*/
// self.onFailure = null;
/**
* Callback for the 'CSSComplete' event. When loading YUI components
* with CSS the CSS is loaded first, then the script. This provides
* a moment you can tie into to improve the presentation of the page
* while the script is loading.
* @method onCSS
* @type function
*/
// self.onCSS = null;
/**
* Callback executed each time a script or css file is loaded
* @method onProgress
* @type function
*/
// self.onProgress = null;
/**
* Callback that will be executed if a timeout occurs
* @method onTimeout
* @type function
*/
// self.onTimeout = null;
/**
* The execution context for all callbacks
* @property context
* @default {YUI} the YUI instance
*/
self.context = Y;
/**
* Data that is passed to all callbacks
* @property data
*/
// self.data = null;
/**
* Node reference or id where new nodes should be inserted before
* @property insertBefore
* @type string|HTMLElement
*/
// self.insertBefore = null;
/**
* The charset attribute for inserted nodes
* @property charset
* @type string
* @deprecated , use cssAttributes or jsAttributes.
*/
// self.charset = null;
/**
* An object literal containing attributes to add to link nodes
* @property cssAttributes
* @type object
*/
// self.cssAttributes = null;
/**
* An object literal containing attributes to add to script nodes
* @property jsAttributes
* @type object
*/
// self.jsAttributes = null;
/**
* The base directory.
* @property base
* @type string
* @default http://yui.yahooapis.com/[YUI VERSION]/build/
*/
self.base = Y.Env.meta.base + Y.Env.meta.root;
/**
* Base path for the combo service
* @property comboBase
* @type string
* @default http://yui.yahooapis.com/combo?
*/
self.comboBase = Y.Env.meta.comboBase;
/*
* Base path for language packs.
*/
// self.langBase = Y.Env.meta.langBase;
// self.lang = "";
/**
* If configured, the loader will attempt to use the combo
* service for YUI resources and configured external resources.
* @property combine
* @type boolean
* @default true if a base dir isn't in the config
*/
self.combine = o.base &&
(o.base.indexOf(self.comboBase.substr(0, 20)) > -1);
/**
* The default seperator to use between files in a combo URL
* @property comboSep
* @type {String}
* @default Ampersand
*/
self.comboSep = '&';
/**
* Max url length for combo urls. The default is 1024. This is the URL
* limit for the Yahoo! hosted combo servers. If consuming
* a different combo service that has a different URL limit
* it is possible to override this default by supplying
* the maxURLLength config option. The config option will
* only take effect if lower than the default.
*
* @property maxURLLength
* @type int
*/
self.maxURLLength = MAX_URL_LENGTH;
/**
* Ignore modules registered on the YUI global
* @property ignoreRegistered
* @default false
*/
self.ignoreRegistered = o.ignoreRegistered;
/**
* Root path to prepend to module path for the combo
* service
* @property root
* @type string
* @default [YUI VERSION]/build/
*/
self.root = Y.Env.meta.root;
/**
* Timeout value in milliseconds. If set, self value will be used by
* the get utility. the timeout event will fire if
* a timeout occurs.
* @property timeout
* @type int
*/
self.timeout = 0;
/**
* A list of modules that should not be loaded, even if
* they turn up in the dependency tree
* @property ignore
* @type string[]
*/
// self.ignore = null;
/**
* A list of modules that should always be loaded, even
* if they have already been inserted into the page.
* @property force
* @type string[]
*/
// self.force = null;
self.forceMap = {};
/**
* Should we allow rollups
* @property allowRollup
* @type boolean
* @default false
*/
self.allowRollup = false;
/**
* A filter to apply to result urls. This filter will modify the default
* path for all modules. The default path for the YUI library is the
* minified version of the files (e.g., event-min.js). The filter property
* can be a predefined filter or a custom filter. The valid predefined
* filters are:
* <dl>
* <dt>DEBUG</dt>
* <dd>Selects the debug versions of the library (e.g., event-debug.js).
* This option will automatically include the Logger widget</dd>
* <dt>RAW</dt>
* <dd>Selects the non-minified version of the library (e.g., event.js).
* </dd>
* </dl>
* You can also define a custom filter, which must be an object literal
* containing a search expression and a replace string:
*
* myFilter: {
* 'searchExp': "-min\\.js",
* 'replaceStr': "-debug.js"
* }
*
* @property filter
* @type string| {searchExp: string, replaceStr: string}
*/
// self.filter = null;
/**
* per-component filter specification. If specified for a given
* component, this overrides the filter config.
* @property filters
* @type object
*/
self.filters = {};
/**
* The list of requested modules
* @property required
* @type {string: boolean}
*/
self.required = {};
/**
* If a module name is predefined when requested, it is checked againsts
* the patterns provided in this property. If there is a match, the
* module is added with the default configuration.
*
* At the moment only supporting module prefixes, but anticipate
* supporting at least regular expressions.
* @property patterns
* @type Object
*/
// self.patterns = Y.merge(Y.Env.meta.patterns);
self.patterns = {};
/**
* The library metadata
* @property moduleInfo
*/
// self.moduleInfo = Y.merge(Y.Env.meta.moduleInfo);
self.moduleInfo = {};
self.groups = Y.merge(Y.Env.meta.groups);
/**
* Provides the information used to skin the skinnable components.
* The following skin definition would result in 'skin1' and 'skin2'
* being loaded for calendar (if calendar was requested), and
* 'sam' for all other skinnable components:
*
* skin: {
* // The default skin, which is automatically applied if not
* // overriden by a component-specific skin definition.
* // Change this in to apply a different skin globally
* defaultSkin: 'sam',
*
* // This is combined with the loader base property to get
* // the default root directory for a skin. ex:
* // http://yui.yahooapis.com/2.3.0/build/assets/skins/sam/
* base: 'assets/skins/',
*
* // Any component-specific overrides can be specified here,
* // making it possible to load different skins for different
* // components. It is possible to load more than one skin
* // for a given component as well.
* overrides: {
* calendar: ['skin1', 'skin2']
* }
* }
* @property skin
* @type {Object}
*/
self.skin = Y.merge(Y.Env.meta.skin);
/*
* Map of conditional modules
* @since 3.2.0
*/
self.conditions = {};
// map of modules with a hash of modules that meet the requirement
// self.provides = {};
self.config = o;
self._internal = true;
self._populateCache();
/**
* Set when beginning to compute the dependency tree.
* Composed of what YUI reports to be loaded combined
* with what has been loaded by any instance on the page
* with the version number specified in the metadata.
* @property loaded
* @type {string: boolean}
*/
self.loaded = GLOBAL_LOADED[VERSION];
/**
* Should Loader fetch scripts in `async`, defaults to `true`
* @property async
*/
self.async = true;
self._inspectPage();
self._internal = false;
self._config(o);
self.forceMap = (self.force) ? Y.Array.hash(self.force) : {};
self.testresults = null;
if (Y.config.tests) {
self.testresults = Y.config.tests;
}
/**
* List of rollup files found in the library metadata
* @property rollups
*/
// self.rollups = null;
/**
* Whether or not to load optional dependencies for
* the requested modules
* @property loadOptional
* @type boolean
* @default false
*/
// self.loadOptional = false;
/**
* All of the derived dependencies in sorted order, which
* will be populated when either calculate() or insert()
* is called
* @property sorted
* @type string[]
*/
self.sorted = [];
/*
* A list of modules to attach to the YUI instance when complete.
* If not supplied, the sorted list of dependencies are applied.
* @property attaching
*/
// self.attaching = null;
/**
* Flag to indicate the dependency tree needs to be recomputed
* if insert is called again.
* @property dirty
* @type boolean
* @default true
*/
self.dirty = true;
/**
* List of modules inserted by the utility
* @property inserted
* @type {string: boolean}
*/
self.inserted = {};
/**
* List of skipped modules during insert() because the module
* was not defined
* @property skipped
*/
self.skipped = {};
// Y.on('yui:load', self.loadNext, self);
self.tested = {};
/*
* Cached sorted calculate results
* @property results
* @since 3.2.0
*/
//self.results = {};
if (self.ignoreRegistered) {
//Clear inpage already processed modules.
self._resetModules();
}
};
Y.Loader.prototype = {
/**
* Checks the cache for modules and conditions, if they do not exist
* process the default metadata and populate the local moduleInfo hash.
* @method _populateCache
* @private
*/
_populateCache: function() {
var self = this,
defaults = META.modules,
cache = GLOBAL_ENV._renderedMods,
i;
if (cache && !self.ignoreRegistered) {
for (i in cache) {
if (cache.hasOwnProperty(i)) {
self.moduleInfo[i] = Y.merge(cache[i]);
}
}
cache = GLOBAL_ENV._conditions;
for (i in cache) {
if (cache.hasOwnProperty(i)) {
self.conditions[i] = Y.merge(cache[i]);
}
}
} else {
for (i in defaults) {
if (defaults.hasOwnProperty(i)) {
self.addModule(defaults[i], i);
}
}
}
},
/**
* Reset modules in the module cache to a pre-processed state so additional
* computations with a different skin or language will work as expected.
* @method _resetModules
* @private
*/
_resetModules: function() {
var self = this, i, o,
mod, name, details;
for (i in self.moduleInfo) {
if (self.moduleInfo.hasOwnProperty(i)) {
mod = self.moduleInfo[i];
name = mod.name;
details = (YUI.Env.mods[name] ? YUI.Env.mods[name].details : null);
if (details) {
self.moduleInfo[name]._reset = true;
self.moduleInfo[name].requires = details.requires || [];
self.moduleInfo[name].optional = details.optional || [];
self.moduleInfo[name].supersedes = details.supercedes || [];
}
if (mod.defaults) {
for (o in mod.defaults) {
if (mod.defaults.hasOwnProperty(o)) {
if (mod[o]) {
mod[o] = mod.defaults[o];
}
}
}
}
delete mod.langCache;
delete mod.skinCache;
if (mod.skinnable) {
self._addSkin(self.skin.defaultSkin, mod.name);
}
}
}
},
/**
Regex that matches a CSS URL. Used to guess the file type when it's not
specified.
@property REGEX_CSS
@type RegExp
@final
@protected
@since 3.5.0
**/
REGEX_CSS: /\.css(?:[?;].*)?$/i,
/**
* Default filters for raw and debug
* @property FILTER_DEFS
* @type Object
* @final
* @protected
*/
FILTER_DEFS: {
RAW: {
'searchExp': '-min\\.js',
'replaceStr': '.js'
},
DEBUG: {
'searchExp': '-min\\.js',
'replaceStr': '-debug.js'
},
COVERAGE: {
'searchExp': '-min\\.js',
'replaceStr': '-coverage.js'
}
},
/*
* Check the pages meta-data and cache the result.
* @method _inspectPage
* @private
*/
_inspectPage: function() {
var self = this, v, m, req, mr, i;
//Inspect the page for CSS only modules and mark them as loaded.
for (i in self.moduleInfo) {
if (self.moduleInfo.hasOwnProperty(i)) {
v = self.moduleInfo[i];
if (v.type && v.type === CSS) {
if (self.isCSSLoaded(v.name)) {
Y.log('Found CSS module on page: ' + v.name, 'info', 'loader');
self.loaded[i] = true;
}
}
}
}
for (i in ON_PAGE) {
if (ON_PAGE.hasOwnProperty(i)) {
v = ON_PAGE[i];
if (v.details) {
m = self.moduleInfo[v.name];
req = v.details.requires;
mr = m && m.requires;
if (m) {
if (!m._inspected && req && mr.length !== req.length) {
// console.log('deleting ' + m.name);
delete m.expanded;
}
} else {
m = self.addModule(v.details, i);
}
m._inspected = true;
}
}
}
},
/*
* returns true if b is not loaded, and is required directly or by means of modules it supersedes.
* @private
* @method _requires
* @param {String} mod1 The first module to compare
* @param {String} mod2 The second module to compare
*/
_requires: function(mod1, mod2) {
var i, rm, after_map, s,
info = this.moduleInfo,
m = info[mod1],
other = info[mod2];
if (!m || !other) {
return false;
}
rm = m.expanded_map;
after_map = m.after_map;
// check if this module should be sorted after the other
// do this first to short circut circular deps
if (after_map && (mod2 in after_map)) {
return true;
}
after_map = other.after_map;
// and vis-versa
if (after_map && (mod1 in after_map)) {
return false;
}
// check if this module requires one the other supersedes
s = info[mod2] && info[mod2].supersedes;
if (s) {
for (i = 0; i < s.length; i++) {
if (this._requires(mod1, s[i])) {
return true;
}
}
}
s = info[mod1] && info[mod1].supersedes;
if (s) {
for (i = 0; i < s.length; i++) {
if (this._requires(mod2, s[i])) {
return false;
}
}
}
// check if this module requires the other directly
// if (r && yArray.indexOf(r, mod2) > -1) {
if (rm && (mod2 in rm)) {
return true;
}
// external css files should be sorted below yui css
if (m.ext && m.type === CSS && !other.ext && other.type === CSS) {
return true;
}
return false;
},
/**
* Apply a new config to the Loader instance
* @method _config
* @private
* @param {Object} o The new configuration
*/
_config: function(o) {
var i, j, val, a, f, group, groupName, self = this,
mods = [], mod;
// apply config values
if (o) {
for (i in o) {
if (o.hasOwnProperty(i)) {
val = o[i];
//TODO This should be a case
if (i === 'require') {
self.require(val);
} else if (i === 'skin') {
//If the config.skin is a string, format to the expected object
if (typeof val === 'string') {
self.skin.defaultSkin = o.skin;
val = {
defaultSkin: val
};
}
Y.mix(self.skin, val, true);
} else if (i === 'groups') {
for (j in val) {
if (val.hasOwnProperty(j)) {
// Y.log('group: ' + j);
groupName = j;
group = val[j];
self.addGroup(group, groupName);
if (group.aliases) {
for (a in group.aliases) {
if (group.aliases.hasOwnProperty(a)) {
self.addAlias(group.aliases[a], a);
}
}
}
}
}
} else if (i === 'modules') {
// add a hash of module definitions
for (j in val) {
if (val.hasOwnProperty(j)) {
self.addModule(val[j], j);
}
}
} else if (i === 'aliases') {
for (j in val) {
if (val.hasOwnProperty(j)) {
self.addAlias(val[j], j);
}
}
} else if (i === 'gallery') {
this.groups.gallery.update(val, o);
} else if (i === 'yui2' || i === '2in3') {
this.groups.yui2.update(o['2in3'], o.yui2, o);
} else {
self[i] = val;
}
}
}
}
// fix filter
f = self.filter;
if (L.isString(f)) {
f = f.toUpperCase();
self.filterName = f;
self.filter = self.FILTER_DEFS[f];
if (f === 'DEBUG') {
self.require('yui-log', 'dump');
}
}
if (self.filterName && self.coverage) {
if (self.filterName === 'COVERAGE' && L.isArray(self.coverage) && self.coverage.length) {
for (i = 0; i < self.coverage.length; i++) {
mod = self.coverage[i];
if (self.moduleInfo[mod] && self.moduleInfo[mod].use) {
mods = [].concat(mods, self.moduleInfo[mod].use);
} else {
mods.push(mod);
}
}
self.filters = self.filters || {};
Y.Array.each(mods, function(mod) {
self.filters[mod] = self.FILTER_DEFS.COVERAGE;
});
self.filterName = 'RAW';
self.filter = self.FILTER_DEFS[self.filterName];
}
}
},
/**
* Returns the skin module name for the specified skin name. If a
* module name is supplied, the returned skin module name is
* specific to the module passed in.
* @method formatSkin
* @param {string} skin the name of the skin.
* @param {string} mod optional: the name of a module to skin.
* @return {string} the full skin module name.
*/
formatSkin: function(skin, mod) {
var s = SKIN_PREFIX + skin;
if (mod) {
s = s + '-' + mod;
}
return s;
},
/**
* Adds the skin def to the module info
* @method _addSkin
* @param {string} skin the name of the skin.
* @param {string} mod the name of the module.
* @param {string} parent parent module if this is a skin of a
* submodule or plugin.
* @return {string} the module name for the skin.
* @private
*/
_addSkin: function(skin, mod, parent) {
var mdef, pkg, name, nmod,
info = this.moduleInfo,
sinf = this.skin,
ext = info[mod] && info[mod].ext;
// Add a module definition for the module-specific skin css
if (mod) {
name = this.formatSkin(skin, mod);
if (!info[name]) {
mdef = info[mod];
pkg = mdef.pkg || mod;
nmod = {
skin: true,
name: name,
group: mdef.group,
type: 'css',
after: sinf.after,
path: (parent || pkg) + '/' + sinf.base + skin +
'/' + mod + '.css',
ext: ext
};
if (mdef.base) {
nmod.base = mdef.base;
}
if (mdef.configFn) {
nmod.configFn = mdef.configFn;
}
this.addModule(nmod, name);
Y.log('Adding skin (' + name + '), ' + parent + ', ' + pkg + ', ' + info[name].path, 'info', 'loader');
}
}
return name;
},
/**
* Adds an alias module to the system
* @method addAlias
* @param {Array} use An array of modules that makes up this alias
* @param {String} name The name of the alias
* @example
* var loader = new Y.Loader({});
* loader.addAlias([ 'node', 'yql' ], 'davglass');
* loader.require(['davglass']);
* var out = loader.resolve(true);
*
* //out.js will contain Node and YQL modules
*/
addAlias: function(use, name) {
YUI.Env.aliases[name] = use;
this.addModule({
name: name,
use: use
});
},
/**
* Add a new module group
* @method addGroup
* @param {Object} config An object containing the group configuration data
* @param {String} config.name required, the group name
* @param {String} config.base The base directory for this module group
* @param {String} config.root The root path to add to each combo resource path
* @param {Boolean} config.combine Should the request be combined
* @param {String} config.comboBase Combo service base path
* @param {Object} config.modules The group of modules
* @param {String} name the group name.
* @example
* var loader = new Y.Loader({});
* loader.addGroup({
* name: 'davglass',
* combine: true,
* comboBase: '/combo?',
* root: '',
* modules: {
* //Module List here
* }
* }, 'davglass');
*/
addGroup: function(o, name) {
var mods = o.modules,
self = this, i, v;
name = name || o.name;
o.name = name;
self.groups[name] = o;
if (o.patterns) {
for (i in o.patterns) {
if (o.patterns.hasOwnProperty(i)) {
o.patterns[i].group = name;
self.patterns[i] = o.patterns[i];
}
}
}
if (mods) {
for (i in mods) {
if (mods.hasOwnProperty(i)) {
v = mods[i];
if (typeof v === 'string') {
v = { name: i, fullpath: v };
}
v.group = name;
self.addModule(v, i);
}
}
}
},
/**
* Add a new module to the component metadata.
* @method addModule
* @param {Object} config An object containing the module data.
* @param {String} config.name Required, the component name
* @param {String} config.type Required, the component type (js or css)
* @param {String} config.path Required, the path to the script from `base`
* @param {Array} config.requires Array of modules required by this component
* @param {Array} [config.optional] Array of optional modules for this component
* @param {Array} [config.supersedes] Array of the modules this component replaces
* @param {Array} [config.after] Array of modules the components which, if present, should be sorted above this one
* @param {Object} [config.after_map] Faster alternative to 'after' -- supply a hash instead of an array
* @param {Number} [config.rollup] The number of superseded modules required for automatic rollup
* @param {String} [config.fullpath] If `fullpath` is specified, this is used instead of the configured `base + path`
* @param {Boolean} [config.skinnable] Flag to determine if skin assets should automatically be pulled in
* @param {Object} [config.submodules] Hash of submodules
* @param {String} [config.group] The group the module belongs to -- this is set automatically when it is added as part of a group configuration.
* @param {Array} [config.lang] Array of BCP 47 language tags of languages for which this module has localized resource bundles, e.g., `["en-GB", "zh-Hans-CN"]`
* @param {Object} [config.condition] Specifies that the module should be loaded automatically if a condition is met. This is an object with up to three fields:
* @param {String} [config.condition.trigger] The name of a module that can trigger the auto-load
* @param {Function} [config.condition.test] A function that returns true when the module is to be loaded.
* @param {String} [config.condition.when] Specifies the load order of the conditional module
* with regard to the position of the trigger module.
* This should be one of three values: `before`, `after`, or `instead`. The default is `after`.
* @param {Object} [config.testresults] A hash of test results from `Y.Features.all()`
* @param {Function} [config.configFn] A function to exectute when configuring this module
* @param {Object} config.configFn.mod The module config, modifying this object will modify it's config. Returning false will delete the module's config.
* @param {String} [name] The module name, required if not in the module data.
* @return {Object} the module definition or null if the object passed in did not provide all required attributes.
*/
addModule: function(o, name) {
name = name || o.name;
if (typeof o === 'string') {
o = { name: name, fullpath: o };
}
var subs, i, l, t, sup, s, smod, plugins, plug,
j, langs, packName, supName, flatSup, flatLang, lang, ret,
overrides, skinname, when, g, p,
conditions = this.conditions, trigger;
//Only merge this data if the temp flag is set
//from an earlier pass from a pattern or else
//an override module (YUI_config) can not be used to
//replace a default module.
if (this.moduleInfo[name] && this.moduleInfo[name].temp) {
//This catches temp modules loaded via a pattern
// The module will be added twice, once from the pattern and
// Once from the actual add call, this ensures that properties
// that were added to the module the first time around (group: gallery)
// are also added the second time around too.
o = Y.merge(this.moduleInfo[name], o);
}
o.name = name;
if (!o || !o.name) {
return null;
}
if (!o.type) {
//Always assume it's javascript unless the CSS pattern is matched.
o.type = JS;
p = o.path || o.fullpath;
if (p && this.REGEX_CSS.test(p)) {
Y.log('Auto determined module type as CSS', 'warn', 'loader');
o.type = CSS;
}
}
if (!o.path && !o.fullpath) {
o.path = _path(name, name, o.type);
}
o.supersedes = o.supersedes || o.use;
o.ext = ('ext' in o) ? o.ext : (this._internal) ? false : true;
// Handle submodule logic
subs = o.submodules;
this.moduleInfo[name] = o;
o.requires = o.requires || [];
/*
Only allowing the cascade of requires information, since
optional and supersedes are far more fine grained than
a blanket requires is.
*/
if (this.requires) {
for (i = 0; i < this.requires.length; i++) {
o.requires.push(this.requires[i]);
}
}
if (o.group && this.groups && this.groups[o.group]) {
g = this.groups[o.group];
if (g.requires) {
for (i = 0; i < g.requires.length; i++) {
o.requires.push(g.requires[i]);
}
}
}
if (!o.defaults) {
o.defaults = {
requires: o.requires ? [].concat(o.requires) : null,
supersedes: o.supersedes ? [].concat(o.supersedes) : null,
optional: o.optional ? [].concat(o.optional) : null
};
}
if (o.skinnable && o.ext && o.temp) {
skinname = this._addSkin(this.skin.defaultSkin, name);
o.requires.unshift(skinname);
}
if (o.requires.length) {
o.requires = this.filterRequires(o.requires) || [];
}
if (!o.langPack && o.lang) {
langs = yArray(o.lang);
for (j = 0; j < langs.length; j++) {
lang = langs[j];
packName = this.getLangPackName(lang, name);
smod = this.moduleInfo[packName];
if (!smod) {
smod = this._addLangPack(lang, o, packName);
}
}
}
if (subs) {
sup = o.supersedes || [];
l = 0;
for (i in subs) {
if (subs.hasOwnProperty(i)) {
s = subs[i];
s.path = s.path || _path(name, i, o.type);
s.pkg = name;
s.group = o.group;
if (s.supersedes) {
sup = sup.concat(s.supersedes);
}
smod = this.addModule(s, i);
sup.push(i);
if (smod.skinnable) {
o.skinnable = true;
overrides = this.skin.overrides;
if (overrides && overrides[i]) {
for (j = 0; j < overrides[i].length; j++) {
skinname = this._addSkin(overrides[i][j],
i, name);
sup.push(skinname);
}
}
skinname = this._addSkin(this.skin.defaultSkin,
i, name);
sup.push(skinname);
}
// looks like we are expected to work out the metadata
// for the parent module language packs from what is
// specified in the child modules.
if (s.lang && s.lang.length) {
langs = yArray(s.lang);
for (j = 0; j < langs.length; j++) {
lang = langs[j];
packName = this.getLangPackName(lang, name);
supName = this.getLangPackName(lang, i);
smod = this.moduleInfo[packName];
if (!smod) {
smod = this._addLangPack(lang, o, packName);
}
flatSup = flatSup || yArray.hash(smod.supersedes);
if (!(supName in flatSup)) {
smod.supersedes.push(supName);
}
o.lang = o.lang || [];
flatLang = flatLang || yArray.hash(o.lang);
if (!(lang in flatLang)) {
o.lang.push(lang);
}
// Y.log('pack ' + packName + ' should supersede ' + supName);
// Add rollup file, need to add to supersedes list too
// default packages
packName = this.getLangPackName(ROOT_LANG, name);
supName = this.getLangPackName(ROOT_LANG, i);
smod = this.moduleInfo[packName];
if (!smod) {
smod = this._addLangPack(lang, o, packName);
}
if (!(supName in flatSup)) {
smod.supersedes.push(supName);
}
// Y.log('pack ' + packName + ' should supersede ' + supName);
// Add rollup file, need to add to supersedes list too
}
}
l++;
}
}
//o.supersedes = YObject.keys(yArray.hash(sup));
o.supersedes = yArray.dedupe(sup);
if (this.allowRollup) {
o.rollup = (l < 4) ? l : Math.min(l - 1, 4);
}
}
plugins = o.plugins;
if (plugins) {
for (i in plugins) {
if (plugins.hasOwnProperty(i)) {
plug = plugins[i];
plug.pkg = name;
plug.path = plug.path || _path(name, i, o.type);
plug.requires = plug.requires || [];
plug.group = o.group;
this.addModule(plug, i);
if (o.skinnable) {
this._addSkin(this.skin.defaultSkin, i, name);
}
}
}
}
if (o.condition) {
t = o.condition.trigger;
if (YUI.Env.aliases[t]) {
t = YUI.Env.aliases[t];
}
if (!Y.Lang.isArray(t)) {
t = [t];
}
for (i = 0; i < t.length; i++) {
trigger = t[i];
when = o.condition.when;
conditions[trigger] = conditions[trigger] || {};
conditions[trigger][name] = o.condition;
// the 'when' attribute can be 'before', 'after', or 'instead'
// the default is after.
if (when && when !== 'after') {
if (when === 'instead') { // replace the trigger
o.supersedes = o.supersedes || [];
o.supersedes.push(trigger);
}
// before the trigger
// the trigger requires the conditional mod,
// so it should appear before the conditional
// mod if we do not intersede.
} else { // after the trigger
o.after = o.after || [];
o.after.push(trigger);
}
}
}
if (o.supersedes) {
o.supersedes = this.filterRequires(o.supersedes);
}
if (o.after) {
o.after = this.filterRequires(o.after);
o.after_map = yArray.hash(o.after);
}
// this.dirty = true;
if (o.configFn) {
ret = o.configFn(o);
if (ret === false) {
Y.log('Config function returned false for ' + name + ', skipping.', 'info', 'loader');
delete this.moduleInfo[name];
delete GLOBAL_ENV._renderedMods[name];
o = null;
}
}
//Add to global cache
if (o) {
if (!GLOBAL_ENV._renderedMods) {
GLOBAL_ENV._renderedMods = {};
}
GLOBAL_ENV._renderedMods[name] = Y.mix(GLOBAL_ENV._renderedMods[name] || {}, o);
GLOBAL_ENV._conditions = conditions;
}
return o;
},
/**
* Add a requirement for one or more module
* @method require
* @param {string[] | string*} what the modules to load.
*/
require: function(what) {
var a = (typeof what === 'string') ? yArray(arguments) : what;
this.dirty = true;
this.required = Y.merge(this.required, yArray.hash(this.filterRequires(a)));
this._explodeRollups();
},
/**
* Grab all the items that were asked for, check to see if the Loader
* meta-data contains a "use" array. If it doesm remove the asked item and replace it with
* the content of the "use".
* This will make asking for: "dd"
* Actually ask for: "dd-ddm-base,dd-ddm,dd-ddm-drop,dd-drag,dd-proxy,dd-constrain,dd-drop,dd-scroll,dd-drop-plugin"
* @private
* @method _explodeRollups
*/
_explodeRollups: function() {
var self = this, m, m2, i, a, v, len, len2,
r = self.required;
if (!self.allowRollup) {
for (i in r) {
if (r.hasOwnProperty(i)) {
m = self.getModule(i);
if (m && m.use) {
len = m.use.length;
for (a = 0; a < len; a++) {
m2 = self.getModule(m.use[a]);
if (m2 && m2.use) {
len2 = m2.use.length;
for (v = 0; v < len2; v++) {
r[m2.use[v]] = true;
}
} else {
r[m.use[a]] = true;
}
}
}
}
}
self.required = r;
}
},
/**
* Explodes the required array to remove aliases and replace them with real modules
* @method filterRequires
* @param {Array} r The original requires array
* @return {Array} The new array of exploded requirements
*/
filterRequires: function(r) {
if (r) {
if (!Y.Lang.isArray(r)) {
r = [r];
}
r = Y.Array(r);
var c = [], i, mod, o, m;
for (i = 0; i < r.length; i++) {
mod = this.getModule(r[i]);
if (mod && mod.use) {
for (o = 0; o < mod.use.length; o++) {
//Must walk the other modules in case a module is a rollup of rollups (datatype)
m = this.getModule(mod.use[o]);
if (m && m.use && (m.name !== mod.name)) {
c = Y.Array.dedupe([].concat(c, this.filterRequires(m.use)));
} else {
c.push(mod.use[o]);
}
}
} else {
c.push(r[i]);
}
}
r = c;
}
return r;
},
/**
* Returns an object containing properties for all modules required
* in order to load the requested module
* @method getRequires
* @param {object} mod The module definition from moduleInfo.
* @return {array} the expanded requirement list.
*/
getRequires: function(mod) {
if (!mod) {
//console.log('returning no reqs for ' + mod.name);
return NO_REQUIREMENTS;
}
if (mod._parsed) {
//console.log('returning requires for ' + mod.name, mod.requires);
return mod.expanded || NO_REQUIREMENTS;
}
//TODO add modue cache here out of scope..
var i, m, j, add, packName, lang, testresults = this.testresults,
name = mod.name, cond,
adddef = ON_PAGE[name] && ON_PAGE[name].details,
d, go, def,
r, old_mod,
o, skinmod, skindef, skinpar, skinname,
intl = mod.lang || mod.intl,
info = this.moduleInfo,
ftests = Y.Features && Y.Features.tests.load,
hash, reparse;
// console.log(name);
// pattern match leaves module stub that needs to be filled out
if (mod.temp && adddef) {
old_mod = mod;
mod = this.addModule(adddef, name);
mod.group = old_mod.group;
mod.pkg = old_mod.pkg;
delete mod.expanded;
}
// console.log('cache: ' + mod.langCache + ' == ' + this.lang);
//If a skin or a lang is different, reparse..
reparse = !((!this.lang || mod.langCache === this.lang) && (mod.skinCache === this.skin.defaultSkin));
if (mod.expanded && !reparse) {
//Y.log('Already expanded ' + name + ', ' + mod.expanded);
return mod.expanded;
}
d = [];
hash = {};
r = this.filterRequires(mod.requires);
if (mod.lang) {
//If a module has a lang attribute, auto add the intl requirement.
d.unshift('intl');
r.unshift('intl');
intl = true;
}
o = this.filterRequires(mod.optional);
// Y.log("getRequires: " + name + " (dirty:" + this.dirty +
// ", expanded:" + mod.expanded + ")");
mod._parsed = true;
mod.langCache = this.lang;
mod.skinCache = this.skin.defaultSkin;
for (i = 0; i < r.length; i++) {
//Y.log(name + ' requiring ' + r[i], 'info', 'loader');
if (!hash[r[i]]) {
d.push(r[i]);
hash[r[i]] = true;
m = this.getModule(r[i]);
if (m) {
add = this.getRequires(m);
intl = intl || (m.expanded_map &&
(INTL in m.expanded_map));
for (j = 0; j < add.length; j++) {
d.push(add[j]);
}
}
}
}
// get the requirements from superseded modules, if any
r = this.filterRequires(mod.supersedes);
if (r) {
for (i = 0; i < r.length; i++) {
if (!hash[r[i]]) {
// if this module has submodules, the requirements list is
// expanded to include the submodules. This is so we can
// prevent dups when a submodule is already loaded and the
// parent is requested.
if (mod.submodules) {
d.push(r[i]);
}
hash[r[i]] = true;
m = this.getModule(r[i]);
if (m) {
add = this.getRequires(m);
intl = intl || (m.expanded_map &&
(INTL in m.expanded_map));
for (j = 0; j < add.length; j++) {
d.push(add[j]);
}
}
}
}
}
if (o && this.loadOptional) {
for (i = 0; i < o.length; i++) {
if (!hash[o[i]]) {
d.push(o[i]);
hash[o[i]] = true;
m = info[o[i]];
if (m) {
add = this.getRequires(m);
intl = intl || (m.expanded_map &&
(INTL in m.expanded_map));
for (j = 0; j < add.length; j++) {
d.push(add[j]);
}
}
}
}
}
cond = this.conditions[name];
if (cond) {
//Set the module to not parsed since we have conditionals and this could change the dependency tree.
mod._parsed = false;
if (testresults && ftests) {
oeach(testresults, function(result, id) {
var condmod = ftests[id].name;
if (!hash[condmod] && ftests[id].trigger === name) {
if (result && ftests[id]) {
hash[condmod] = true;
d.push(condmod);
}
}
});
} else {
for (i in cond) {
if (cond.hasOwnProperty(i)) {
if (!hash[i]) {
def = cond[i];
//first see if they've specfied a ua check
//then see if they've got a test fn & if it returns true
//otherwise just having a condition block is enough
go = def && ((!def.ua && !def.test) || (def.ua && Y.UA[def.ua]) ||
(def.test && def.test(Y, r)));
if (go) {
hash[i] = true;
d.push(i);
m = this.getModule(i);
if (m) {
add = this.getRequires(m);
for (j = 0; j < add.length; j++) {
d.push(add[j]);
}
}
}
}
}
}
}
}
// Create skin modules
if (mod.skinnable) {
skindef = this.skin.overrides;
for (i in YUI.Env.aliases) {
if (YUI.Env.aliases.hasOwnProperty(i)) {
if (Y.Array.indexOf(YUI.Env.aliases[i], name) > -1) {
skinpar = i;
}
}
}
if (skindef && (skindef[name] || (skinpar && skindef[skinpar]))) {
skinname = name;
if (skindef[skinpar]) {
skinname = skinpar;
}
for (i = 0; i < skindef[skinname].length; i++) {
skinmod = this._addSkin(skindef[skinname][i], name);
if (!this.isCSSLoaded(skinmod, this._boot)) {
d.push(skinmod);
}
}
} else {
skinmod = this._addSkin(this.skin.defaultSkin, name);
if (!this.isCSSLoaded(skinmod, this._boot)) {
d.push(skinmod);
}
}
}
mod._parsed = false;
if (intl) {
if (mod.lang && !mod.langPack && Y.Intl) {
lang = Y.Intl.lookupBestLang(this.lang || ROOT_LANG, mod.lang);
//Y.log('Best lang: ' + lang + ', this.lang: ' + this.lang + ', mod.lang: ' + mod.lang);
packName = this.getLangPackName(lang, name);
if (packName) {
d.unshift(packName);
}
}
d.unshift(INTL);
}
mod.expanded_map = yArray.hash(d);
mod.expanded = YObject.keys(mod.expanded_map);
return mod.expanded;
},
/**
* Check to see if named css module is already loaded on the page
* @method isCSSLoaded
* @param {String} name The name of the css file
* @return Boolean
*/
isCSSLoaded: function(name, skip) {
//TODO - Make this call a batching call with name being an array
if (!name || !YUI.Env.cssStampEl || (!skip && this.ignoreRegistered)) {
Y.log('isCSSLoaded was skipped for ' + name, 'warn', 'loader');
return false;
}
var el = YUI.Env.cssStampEl,
ret = false,
mod = YUI.Env._cssLoaded[name],
style = el.currentStyle; //IE
if (mod !== undefined) {
//Y.log('isCSSLoaded was cached for ' + name, 'warn', 'loader');
return mod;
}
//Add the classname to the element
el.className = name;
if (!style) {
style = Y.config.doc.defaultView.getComputedStyle(el, null);
}
if (style && style.display === 'none') {
ret = true;
}
Y.log('Has Skin? ' + name + ' : ' + ret, 'info', 'loader');
el.className = ''; //Reset the classname to ''
YUI.Env._cssLoaded[name] = ret;
return ret;
},
/**
* Returns a hash of module names the supplied module satisfies.
* @method getProvides
* @param {string} name The name of the module.
* @return {object} what this module provides.
*/
getProvides: function(name) {
var m = this.getModule(name), o, s;
// supmap = this.provides;
if (!m) {
return NOT_FOUND;
}
if (m && !m.provides) {
o = {};
s = m.supersedes;
if (s) {
yArray.each(s, function(v) {
Y.mix(o, this.getProvides(v));
}, this);
}
o[name] = true;
m.provides = o;
}
return m.provides;
},
/**
* Calculates the dependency tree, the result is stored in the sorted
* property.
* @method calculate
* @param {object} o optional options object.
* @param {string} type optional argument to prune modules.
*/
calculate: function(o, type) {
if (o || type || this.dirty) {
if (o) {
this._config(o);
}
if (!this._init) {
this._setup();
}
this._explode();
if (this.allowRollup) {
this._rollup();
} else {
this._explodeRollups();
}
this._reduce();
this._sort();
}
},
/**
* Creates a "psuedo" package for languages provided in the lang array
* @method _addLangPack
* @private
* @param {String} lang The language to create
* @param {Object} m The module definition to create the language pack around
* @param {String} packName The name of the package (e.g: lang/datatype-date-en-US)
* @return {Object} The module definition
*/
_addLangPack: function(lang, m, packName) {
var name = m.name,
packPath, conf,
existing = this.moduleInfo[packName];
if (!existing) {
packPath = _path((m.pkg || name), packName, JS, true);
conf = {
path: packPath,
intl: true,
langPack: true,
ext: m.ext,
group: m.group,
supersedes: []
};
if (m.root) {
conf.root = m.root;
}
if (m.base) {
conf.base = m.base;
}
if (m.configFn) {
conf.configFn = m.configFn;
}
this.addModule(conf, packName);
if (lang) {
Y.Env.lang = Y.Env.lang || {};
Y.Env.lang[lang] = Y.Env.lang[lang] || {};
Y.Env.lang[lang][name] = true;
}
}
return this.moduleInfo[packName];
},
/**
* Investigates the current YUI configuration on the page. By default,
* modules already detected will not be loaded again unless a force
* option is encountered. Called by calculate()
* @method _setup
* @private
*/
_setup: function() {
var info = this.moduleInfo, name, i, j, m, l,
packName;
for (name in info) {
if (info.hasOwnProperty(name)) {
m = info[name];
if (m) {
// remove dups
//m.requires = YObject.keys(yArray.hash(m.requires));
m.requires = yArray.dedupe(m.requires);
// Create lang pack modules
//if (m.lang && m.lang.length) {
if (m.lang) {
// Setup root package if the module has lang defined,
// it needs to provide a root language pack
packName = this.getLangPackName(ROOT_LANG, name);
this._addLangPack(null, m, packName);
}
}
}
}
//l = Y.merge(this.inserted);
l = {};
// available modules
if (!this.ignoreRegistered) {
Y.mix(l, GLOBAL_ENV.mods);
}
// add the ignore list to the list of loaded packages
if (this.ignore) {
Y.mix(l, yArray.hash(this.ignore));
}
// expand the list to include superseded modules
for (j in l) {
if (l.hasOwnProperty(j)) {
Y.mix(l, this.getProvides(j));
}
}
// remove modules on the force list from the loaded list
if (this.force) {
for (i = 0; i < this.force.length; i++) {
if (this.force[i] in l) {
delete l[this.force[i]];
}
}
}
Y.mix(this.loaded, l);
this._init = true;
},
/**
* Builds a module name for a language pack
* @method getLangPackName
* @param {string} lang the language code.
* @param {string} mname the module to build it for.
* @return {string} the language pack module name.
*/
getLangPackName: function(lang, mname) {
return ('lang/' + mname + ((lang) ? '_' + lang : ''));
},
/**
* Inspects the required modules list looking for additional
* dependencies. Expands the required list to include all
* required modules. Called by calculate()
* @method _explode
* @private
*/
_explode: function() {
//TODO Move done out of scope
var r = this.required, m, reqs, done = {},
self = this, name, expound;
// the setup phase is over, all modules have been created
self.dirty = false;
self._explodeRollups();
r = self.required;
for (name in r) {
if (r.hasOwnProperty(name)) {
if (!done[name]) {
done[name] = true;
m = self.getModule(name);
if (m) {
expound = m.expound;
if (expound) {
r[expound] = self.getModule(expound);
reqs = self.getRequires(r[expound]);
Y.mix(r, yArray.hash(reqs));
}
reqs = self.getRequires(m);
Y.mix(r, yArray.hash(reqs));
}
}
}
}
// Y.log('After explode: ' + YObject.keys(r));
},
/**
* The default method used to test a module against a pattern
* @method _patternTest
* @private
* @param {String} mname The module being tested
* @param {String} pname The pattern to match
*/
_patternTest: function(mname, pname) {
return (mname.indexOf(pname) > -1);
},
/**
* Get's the loader meta data for the requested module
* @method getModule
* @param {String} mname The module name to get
* @return {Object} The module metadata
*/
getModule: function(mname) {
//TODO: Remove name check - it's a quick hack to fix pattern WIP
if (!mname) {
return null;
}
var p, found, pname,
m = this.moduleInfo[mname],
patterns = this.patterns;
// check the patterns library to see if we should automatically add
// the module with defaults
if (!m || (m && m.ext)) {
// Y.log('testing patterns ' + YObject.keys(patterns));
for (pname in patterns) {
if (patterns.hasOwnProperty(pname)) {
// Y.log('testing pattern ' + i);
p = patterns[pname];
//There is no test method, create a default one that tests
// the pattern against the mod name
if (!p.test) {
p.test = this._patternTest;
}
if (p.test(mname, pname)) {
// use the metadata supplied for the pattern
// as the module definition.
found = p;
break;
}
}
}
}
if (!m) {
if (found) {
if (p.action) {
// Y.log('executing pattern action: ' + pname);
p.action.call(this, mname, pname);
} else {
Y.log('Undefined module: ' + mname + ', matched a pattern: ' +
pname, 'info', 'loader');
// ext true or false?
m = this.addModule(Y.merge(found), mname);
if (found.configFn) {
m.configFn = found.configFn;
}
m.temp = true;
}
}
} else {
if (found && m && found.configFn && !m.configFn) {
m.configFn = found.configFn;
m.configFn(m);
}
}
return m;
},
// impl in rollup submodule
_rollup: function() { },
/**
* Remove superceded modules and loaded modules. Called by
* calculate() after we have the mega list of all dependencies
* @method _reduce
* @return {object} the reduced dependency hash.
* @private
*/
_reduce: function(r) {
r = r || this.required;
var i, j, s, m, type = this.loadType,
ignore = this.ignore ? yArray.hash(this.ignore) : false;
for (i in r) {
if (r.hasOwnProperty(i)) {
m = this.getModule(i);
// remove if already loaded
if (((this.loaded[i] || ON_PAGE[i]) &&
!this.forceMap[i] && !this.ignoreRegistered) ||
(type && m && m.type !== type)) {
delete r[i];
}
if (ignore && ignore[i]) {
delete r[i];
}
// remove anything this module supersedes
s = m && m.supersedes;
if (s) {
for (j = 0; j < s.length; j++) {
if (s[j] in r) {
delete r[s[j]];
}
}
}
}
}
return r;
},
/**
* Handles the queue when a module has been loaded for all cases
* @method _finish
* @private
* @param {String} msg The message from Loader
* @param {Boolean} success A boolean denoting success or failure
*/
_finish: function(msg, success) {
Y.log('loader finishing: ' + msg + ', ' + Y.id + ', ' +
this.data, 'info', 'loader');
_queue.running = false;
var onEnd = this.onEnd;
if (onEnd) {
onEnd.call(this.context, {
msg: msg,
data: this.data,
success: success
});
}
this._continue();
},
/**
* The default Loader onSuccess handler, calls this.onSuccess with a payload
* @method _onSuccess
* @private
*/
_onSuccess: function() {
var self = this, skipped = Y.merge(self.skipped), fn,
failed = [], rreg = self.requireRegistration,
success, msg, i, mod;
for (i in skipped) {
if (skipped.hasOwnProperty(i)) {
delete self.inserted[i];
}
}
self.skipped = {};
for (i in self.inserted) {
if (self.inserted.hasOwnProperty(i)) {
mod = self.getModule(i);
if (mod && rreg && mod.type === JS && !(i in YUI.Env.mods)) {
failed.push(i);
} else {
Y.mix(self.loaded, self.getProvides(i));
}
}
}
fn = self.onSuccess;
msg = (failed.length) ? 'notregistered' : 'success';
success = !(failed.length);
if (fn) {
fn.call(self.context, {
msg: msg,
data: self.data,
success: success,
failed: failed,
skipped: skipped
});
}
self._finish(msg, success);
},
/**
* The default Loader onProgress handler, calls this.onProgress with a payload
* @method _onProgress
* @private
*/
_onProgress: function(e) {
var self = this, i;
//set the internal cache to what just came in.
if (e.data && e.data.length) {
for (i = 0; i < e.data.length; i++) {
e.data[i] = self.getModule(e.data[i].name);
}
}
if (self.onProgress) {
self.onProgress.call(self.context, {
name: e.url,
data: e.data
});
}
},
/**
* The default Loader onFailure handler, calls this.onFailure with a payload
* @method _onFailure
* @private
*/
_onFailure: function(o) {
var f = this.onFailure, msg = [], i = 0, len = o.errors.length;
for (i; i < len; i++) {
msg.push(o.errors[i].error);
}
msg = msg.join(',');
Y.log('load error: ' + msg + ', ' + Y.id, 'error', 'loader');
if (f) {
f.call(this.context, {
msg: msg,
data: this.data,
success: false
});
}
this._finish(msg, false);
},
/**
* The default Loader onTimeout handler, calls this.onTimeout with a payload
* @method _onTimeout
* @private
*/
_onTimeout: function() {
Y.log('loader timeout: ' + Y.id, 'error', 'loader');
var f = this.onTimeout;
if (f) {
f.call(this.context, {
msg: 'timeout',
data: this.data,
success: false
});
}
},
/**
* Sorts the dependency tree. The last step of calculate()
* @method _sort
* @private
*/
_sort: function() {
// create an indexed list
var s = YObject.keys(this.required),
// loaded = this.loaded,
//TODO Move this out of scope
done = {},
p = 0, l, a, b, j, k, moved, doneKey;
// keep going until we make a pass without moving anything
for (;;) {
l = s.length;
moved = false;
// start the loop after items that are already sorted
for (j = p; j < l; j++) {
// check the next module on the list to see if its
// dependencies have been met
a = s[j];
// check everything below current item and move if we
// find a requirement for the current item
for (k = j + 1; k < l; k++) {
doneKey = a + s[k];
if (!done[doneKey] && this._requires(a, s[k])) {
// extract the dependency so we can move it up
b = s.splice(k, 1);
// insert the dependency above the item that
// requires it
s.splice(j, 0, b[0]);
// only swap two dependencies once to short circut
// circular dependencies
done[doneKey] = true;
// keep working
moved = true;
break;
}
}
// jump out of loop if we moved something
if (moved) {
break;
// this item is sorted, move our pointer and keep going
} else {
p++;
}
}
// when we make it here and moved is false, we are
// finished sorting
if (!moved) {
break;
}
}
this.sorted = s;
},
/**
* Handles the actual insertion of script/link tags
* @method _insert
* @private
* @param {Object} source The YUI instance the request came from
* @param {Object} o The metadata to include
* @param {String} type JS or CSS
* @param {Boolean} [skipcalc=false] Do a Loader.calculate on the meta
*/
_insert: function(source, o, type, skipcalc) {
Y.log('private _insert() ' + (type || '') + ', ' + Y.id, "info", "loader");
// restore the state at the time of the request
if (source) {
this._config(source);
}
// build the dependency list
// don't include type so we can process CSS and script in
// one pass when the type is not specified.
var modules = this.resolve(!skipcalc),
self = this, comp = 0, actions = 0,
mods = {}, deps, complete;
self._refetch = [];
if (type) {
//Filter out the opposite type and reset the array so the checks later work
modules[((type === JS) ? CSS : JS)] = [];
}
if (!self.fetchCSS) {
modules.css = [];
}
if (modules.js.length) {
comp++;
}
if (modules.css.length) {
comp++;
}
//console.log('Resolved Modules: ', modules);
complete = function(d) {
actions++;
var errs = {}, i = 0, o = 0, u = '', fn,
modName, resMods;
if (d && d.errors) {
for (i = 0; i < d.errors.length; i++) {
if (d.errors[i].request) {
u = d.errors[i].request.url;
} else {
u = d.errors[i];
}
errs[u] = u;
}
}
if (d && d.data && d.data.length && (d.type === 'success')) {
for (i = 0; i < d.data.length; i++) {
self.inserted[d.data[i].name] = true;
//If the external module has a skin or a lang, reprocess it
if (d.data[i].lang || d.data[i].skinnable) {
delete self.inserted[d.data[i].name];
self._refetch.push(d.data[i].name);
}
}
}
if (actions === comp) {
self._loading = null;
Y.log('Loader actions complete!', 'info', 'loader');
if (self._refetch.length) {
//Get the deps for the new meta-data and reprocess
Y.log('Found potential modules to refetch', 'info', 'loader');
for (i = 0; i < self._refetch.length; i++) {
deps = self.getRequires(self.getModule(self._refetch[i]));
for (o = 0; o < deps.length; o++) {
if (!self.inserted[deps[o]]) {
//We wouldn't be to this point without the module being here
mods[deps[o]] = deps[o];
}
}
}
mods = Y.Object.keys(mods);
if (mods.length) {
Y.log('Refetching modules with new meta-data', 'info', 'loader');
self.require(mods);
resMods = self.resolve(true);
if (resMods.cssMods.length) {
for (i=0; i < resMods.cssMods.length; i++) {
modName = resMods.cssMods[i].name;
delete YUI.Env._cssLoaded[modName];
if (self.isCSSLoaded(modName)) {
self.inserted[modName] = true;
delete self.required[modName];
}
}
self.sorted = [];
self._sort();
}
d = null; //bail
self._insert(); //insert the new deps
}
}
if (d && d.fn) {
Y.log('Firing final Loader callback!', 'info', 'loader');
fn = d.fn;
delete d.fn;
fn.call(self, d);
}
}
};
this._loading = true;
if (!modules.js.length && !modules.css.length) {
Y.log('No modules resolved..', 'warn', 'loader');
actions = -1;
complete({
fn: self._onSuccess
});
return;
}
if (modules.css.length) { //Load CSS first
Y.log('Loading CSS modules', 'info', 'loader');
Y.Get.css(modules.css, {
data: modules.cssMods,
attributes: self.cssAttributes,
insertBefore: self.insertBefore,
charset: self.charset,
timeout: self.timeout,
context: self,
onProgress: function(e) {
self._onProgress.call(self, e);
},
onTimeout: function(d) {
self._onTimeout.call(self, d);
},
onSuccess: function(d) {
d.type = 'success';
d.fn = self._onSuccess;
complete.call(self, d);
},
onFailure: function(d) {
d.type = 'failure';
d.fn = self._onFailure;
complete.call(self, d);
}
});
}
if (modules.js.length) {
Y.log('Loading JS modules', 'info', 'loader');
Y.Get.js(modules.js, {
data: modules.jsMods,
insertBefore: self.insertBefore,
attributes: self.jsAttributes,
charset: self.charset,
timeout: self.timeout,
autopurge: false,
context: self,
async: self.async,
onProgress: function(e) {
self._onProgress.call(self, e);
},
onTimeout: function(d) {
self._onTimeout.call(self, d);
},
onSuccess: function(d) {
d.type = 'success';
d.fn = self._onSuccess;
complete.call(self, d);
},
onFailure: function(d) {
d.type = 'failure';
d.fn = self._onFailure;
complete.call(self, d);
}
});
}
},
/**
* Once a loader operation is completely finished, process any additional queued items.
* @method _continue
* @private
*/
_continue: function() {
if (!(_queue.running) && _queue.size() > 0) {
_queue.running = true;
_queue.next()();
}
},
/**
* inserts the requested modules and their dependencies.
* <code>type</code> can be "js" or "css". Both script and
* css are inserted if type is not provided.
* @method insert
* @param {object} o optional options object.
* @param {string} type the type of dependency to insert.
*/
insert: function(o, type, skipsort) {
Y.log('public insert() ' + (type || '') + ', ' +
Y.Object.keys(this.required), "info", "loader");
var self = this, copy = Y.merge(this);
delete copy.require;
delete copy.dirty;
_queue.add(function() {
self._insert(copy, o, type, skipsort);
});
this._continue();
},
/**
* Executed every time a module is loaded, and if we are in a load
* cycle, we attempt to load the next script. Public so that it
* is possible to call this if using a method other than
* Y.register to determine when scripts are fully loaded
* @method loadNext
* @deprecated
* @param {string} mname optional the name of the module that has
* been loaded (which is usually why it is time to load the next
* one).
*/
loadNext: function() {
Y.log('loadNext was called..', 'error', 'loader');
return;
},
/**
* Apply filter defined for this instance to a url/path
* @method _filter
* @param {string} u the string to filter.
* @param {string} name the name of the module, if we are processing
* a single module as opposed to a combined url.
* @return {string} the filtered string.
* @private
*/
_filter: function(u, name, group) {
var f = this.filter,
hasFilter = name && (name in this.filters),
modFilter = hasFilter && this.filters[name],
groupName = group || (this.moduleInfo[name] ? this.moduleInfo[name].group : null);
if (groupName && this.groups[groupName] && this.groups[groupName].filter) {
modFilter = this.groups[groupName].filter;
hasFilter = true;
}
if (u) {
if (hasFilter) {
f = (L.isString(modFilter)) ? this.FILTER_DEFS[modFilter.toUpperCase()] || null : modFilter;
}
if (f) {
u = u.replace(new RegExp(f.searchExp, 'g'), f.replaceStr);
}
}
return u;
},
/**
* Generates the full url for a module
* @method _url
* @param {string} path the path fragment.
* @param {String} name The name of the module
* @param {String} [base=self.base] The base url to use
* @return {string} the full url.
* @private
*/
_url: function(path, name, base) {
return this._filter((base || this.base || '') + path, name);
},
/**
* Returns an Object hash of file arrays built from `loader.sorted` or from an arbitrary list of sorted modules.
* @method resolve
* @param {Boolean} [calc=false] Perform a loader.calculate() before anything else
* @param {Array} [s=loader.sorted] An override for the loader.sorted array
* @return {Object} Object hash (js and css) of two arrays of file lists
* @example This method can be used as an off-line dep calculator
*
* var Y = YUI();
* var loader = new Y.Loader({
* filter: 'debug',
* base: '../../',
* root: 'build/',
* combine: true,
* require: ['node', 'dd', 'console']
* });
* var out = loader.resolve(true);
*
*/
resolve: function(calc, s) {
var len, i, m, url, group, groupName, j, frag,
comboSource, comboSources, mods, comboBase,
base, urls, u = [], tmpBase, baseLen, resCombos = {},
self = this, comboSep, maxURLLength,
inserted = (self.ignoreRegistered) ? {} : self.inserted,
resolved = { js: [], jsMods: [], css: [], cssMods: [] },
type = self.loadType || 'js', addSingle;
if (self.skin.overrides || self.skin.defaultSkin !== DEFAULT_SKIN || self.ignoreRegistered) {
self._resetModules();
}
if (calc) {
self.calculate();
}
s = s || self.sorted;
addSingle = function(m) {
if (m) {
group = (m.group && self.groups[m.group]) || NOT_FOUND;
//Always assume it's async
if (group.async === false) {
m.async = group.async;
}
url = (m.fullpath) ? self._filter(m.fullpath, s[i]) :
self._url(m.path, s[i], group.base || m.base);
if (m.attributes || m.async === false) {
url = {
url: url,
async: m.async
};
if (m.attributes) {
url.attributes = m.attributes;
}
}
resolved[m.type].push(url);
resolved[m.type + 'Mods'].push(m);
} else {
Y.log('Undefined Module', 'warn', 'loader');
}
};
len = s.length;
// the default combo base
comboBase = self.comboBase;
url = comboBase;
comboSources = {};
for (i = 0; i < len; i++) {
comboSource = comboBase;
m = self.getModule(s[i]);
groupName = m && m.group;
group = self.groups[groupName];
if (groupName && group) {
if (!group.combine || m.fullpath) {
//This is not a combo module, skip it and load it singly later.
addSingle(m);
continue;
}
m.combine = true;
if (group.comboBase) {
comboSource = group.comboBase;
}
if ("root" in group && L.isValue(group.root)) {
m.root = group.root;
}
m.comboSep = group.comboSep || self.comboSep;
m.maxURLLength = group.maxURLLength || self.maxURLLength;
} else {
if (!self.combine) {
//This is not a combo module, skip it and load it singly later.
addSingle(m);
continue;
}
}
comboSources[comboSource] = comboSources[comboSource] || [];
comboSources[comboSource].push(m);
}
for (j in comboSources) {
if (comboSources.hasOwnProperty(j)) {
resCombos[j] = resCombos[j] || { js: [], jsMods: [], css: [], cssMods: [] };
url = j;
mods = comboSources[j];
len = mods.length;
if (len) {
for (i = 0; i < len; i++) {
if (inserted[mods[i]]) {
continue;
}
m = mods[i];
// Do not try to combine non-yui JS unless combo def
// is found
if (m && (m.combine || !m.ext)) {
resCombos[j].comboSep = m.comboSep;
resCombos[j].group = m.group;
resCombos[j].maxURLLength = m.maxURLLength;
frag = ((L.isValue(m.root)) ? m.root : self.root) + (m.path || m.fullpath);
frag = self._filter(frag, m.name);
resCombos[j][m.type].push(frag);
resCombos[j][m.type + 'Mods'].push(m);
} else {
//Add them to the next process..
if (mods[i]) {
addSingle(mods[i]);
}
}
}
}
}
}
for (j in resCombos) {
base = j;
comboSep = resCombos[base].comboSep || self.comboSep;
maxURLLength = resCombos[base].maxURLLength || self.maxURLLength;
Y.log('Using maxURLLength of ' + maxURLLength, 'info', 'loader');
for (type in resCombos[base]) {
if (type === JS || type === CSS) {
urls = resCombos[base][type];
mods = resCombos[base][type + 'Mods'];
len = urls.length;
tmpBase = base + urls.join(comboSep);
baseLen = tmpBase.length;
if (maxURLLength <= base.length) {
Y.log('maxURLLength (' + maxURLLength + ') is lower than the comboBase length (' + base.length + '), resetting to default (' + MAX_URL_LENGTH + ')', 'error', 'loader');
maxURLLength = MAX_URL_LENGTH;
}
if (len) {
if (baseLen > maxURLLength) {
Y.log('Exceeded maxURLLength (' + maxURLLength + ') for ' + type + ', splitting', 'info', 'loader');
u = [];
for (s = 0; s < len; s++) {
u.push(urls[s]);
tmpBase = base + u.join(comboSep);
if (tmpBase.length > maxURLLength) {
m = u.pop();
tmpBase = base + u.join(comboSep);
resolved[type].push(self._filter(tmpBase, null, resCombos[base].group));
u = [];
if (m) {
u.push(m);
}
}
}
if (u.length) {
tmpBase = base + u.join(comboSep);
resolved[type].push(self._filter(tmpBase, null, resCombos[base].group));
}
} else {
resolved[type].push(self._filter(tmpBase, null, resCombos[base].group));
}
}
resolved[type + 'Mods'] = resolved[type + 'Mods'].concat(mods);
}
}
}
resCombos = null;
return resolved;
},
/**
Shortcut to calculate, resolve and load all modules.
var loader = new Y.Loader({
ignoreRegistered: true,
modules: {
mod: {
path: 'mod.js'
}
},
requires: [ 'mod' ]
});
loader.load(function() {
console.log('All modules have loaded..');
});
@method load
@param {Callback} cb Executed after all load operations are complete
*/
load: function(cb) {
if (!cb) {
Y.log('No callback supplied to load()', 'error', 'loader');
return;
}
var self = this,
out = self.resolve(true);
self.data = out;
self.onEnd = function() {
cb.apply(self.context || self, arguments);
};
self.insert();
}
};
}, '@VERSION@', {"requires": ["get", "features"]});
| mit |
rajdeepd/docker | volume/drivers/adapter.go | 1153 | package volumedrivers
import "github.com/docker/docker/volume"
type volumeDriverAdapter struct {
name string
proxy *volumeDriverProxy
}
func (a *volumeDriverAdapter) Name() string {
return a.name
}
func (a *volumeDriverAdapter) Create(name string) (volume.Volume, error) {
err := a.proxy.Create(name)
if err != nil {
return nil, err
}
return &volumeAdapter{
proxy: a.proxy,
name: name,
driverName: a.name}, nil
}
func (a *volumeDriverAdapter) Remove(v volume.Volume) error {
return a.proxy.Remove(v.Name())
}
type volumeAdapter struct {
proxy *volumeDriverProxy
name string
driverName string
eMount string // ephemeral host volume path
}
func (a *volumeAdapter) Name() string {
return a.name
}
func (a *volumeAdapter) DriverName() string {
return a.driverName
}
func (a *volumeAdapter) Path() string {
if len(a.eMount) > 0 {
return a.eMount
}
m, _ := a.proxy.Path(a.name)
return m
}
func (a *volumeAdapter) Mount() (string, error) {
var err error
a.eMount, err = a.proxy.Mount(a.name)
return a.eMount, err
}
func (a *volumeAdapter) Unmount() error {
return a.proxy.Unmount(a.name)
}
| apache-2.0 |
krousey/kubernetes | vendor/github.com/docker/docker/client/image_inspect.go | 878 | package client
import (
"bytes"
"encoding/json"
"io/ioutil"
"net/http"
"github.com/docker/docker/api/types"
"golang.org/x/net/context"
)
// ImageInspectWithRaw returns the image information and its raw representation.
func (cli *Client) ImageInspectWithRaw(ctx context.Context, imageID string) (types.ImageInspect, []byte, error) {
serverResp, err := cli.get(ctx, "/images/"+imageID+"/json", nil, nil)
if err != nil {
if serverResp.statusCode == http.StatusNotFound {
return types.ImageInspect{}, nil, imageNotFoundError{imageID}
}
return types.ImageInspect{}, nil, err
}
defer ensureReaderClosed(serverResp)
body, err := ioutil.ReadAll(serverResp.body)
if err != nil {
return types.ImageInspect{}, nil, err
}
var response types.ImageInspect
rdr := bytes.NewReader(body)
err = json.NewDecoder(rdr).Decode(&response)
return response, body, err
}
| apache-2.0 |
Slaffka/moodel | install/lang/ur/langconfig.php | 1302 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Automatically generated strings for Moodle installer
*
* Do not edit this file manually! It contains just a subset of strings
* needed during the very first steps of installation. This file was
* generated automatically by export-installer.php (which is part of AMOS
* {@link http://docs.moodle.org/dev/Languages/AMOS}) using the
* list of strings defined in /install/stringnames.txt.
*
* @package installer
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
$string['thisdirection'] = 'rtl';
$string['thislanguage'] = 'اردو';
| gpl-3.0 |
AlexTMjugador/mtasa-blue | vendor/google-breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.cc | 13142 | // Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: [email protected] (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
#include <limits>
#include <vector>
#include <google/protobuf/stubs/hash.h>
#include <google/protobuf/compiler/cpp/cpp_helpers.h>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/strutil.h>
#include <google/protobuf/stubs/substitute.h>
namespace google {
namespace protobuf {
namespace compiler {
namespace cpp {
namespace {
string DotsToUnderscores(const string& name) {
return StringReplace(name, ".", "_", true);
}
string DotsToColons(const string& name) {
return StringReplace(name, ".", "::", true);
}
const char* const kKeywordList[] = {
"and", "and_eq", "asm", "auto", "bitand", "bitor", "bool", "break", "case",
"catch", "char", "class", "compl", "const", "const_cast", "continue",
"default", "delete", "do", "double", "dynamic_cast", "else", "enum",
"explicit", "extern", "false", "float", "for", "friend", "goto", "if",
"inline", "int", "long", "mutable", "namespace", "new", "not", "not_eq",
"operator", "or", "or_eq", "private", "protected", "public", "register",
"reinterpret_cast", "return", "short", "signed", "sizeof", "static",
"static_cast", "struct", "switch", "template", "this", "throw", "true", "try",
"typedef", "typeid", "typename", "union", "unsigned", "using", "virtual",
"void", "volatile", "wchar_t", "while", "xor", "xor_eq"
};
hash_set<string> MakeKeywordsMap() {
hash_set<string> result;
for (int i = 0; i < GOOGLE_ARRAYSIZE(kKeywordList); i++) {
result.insert(kKeywordList[i]);
}
return result;
}
hash_set<string> kKeywords = MakeKeywordsMap();
string UnderscoresToCamelCase(const string& input, bool cap_next_letter) {
string result;
// Note: I distrust ctype.h due to locales.
for (int i = 0; i < input.size(); i++) {
if ('a' <= input[i] && input[i] <= 'z') {
if (cap_next_letter) {
result += input[i] + ('A' - 'a');
} else {
result += input[i];
}
cap_next_letter = false;
} else if ('A' <= input[i] && input[i] <= 'Z') {
// Capital letters are left as-is.
result += input[i];
cap_next_letter = false;
} else if ('0' <= input[i] && input[i] <= '9') {
result += input[i];
cap_next_letter = true;
} else {
cap_next_letter = true;
}
}
return result;
}
} // namespace
const char kThickSeparator[] =
"// ===================================================================\n";
const char kThinSeparator[] =
"// -------------------------------------------------------------------\n";
string ClassName(const Descriptor* descriptor, bool qualified) {
// Find "outer", the descriptor of the top-level message in which
// "descriptor" is embedded.
const Descriptor* outer = descriptor;
while (outer->containing_type() != NULL) outer = outer->containing_type();
const string& outer_name = outer->full_name();
string inner_name = descriptor->full_name().substr(outer_name.size());
if (qualified) {
return "::" + DotsToColons(outer_name) + DotsToUnderscores(inner_name);
} else {
return outer->name() + DotsToUnderscores(inner_name);
}
}
string ClassName(const EnumDescriptor* enum_descriptor, bool qualified) {
if (enum_descriptor->containing_type() == NULL) {
if (qualified) {
return DotsToColons(enum_descriptor->full_name());
} else {
return enum_descriptor->name();
}
} else {
string result = ClassName(enum_descriptor->containing_type(), qualified);
result += '_';
result += enum_descriptor->name();
return result;
}
}
string SuperClassName(const Descriptor* descriptor) {
return HasDescriptorMethods(descriptor->file()) ?
"::google::protobuf::Message" : "::google::protobuf::MessageLite";
}
string FieldName(const FieldDescriptor* field) {
string result = field->name();
LowerString(&result);
if (kKeywords.count(result) > 0) {
result.append("_");
}
return result;
}
string FieldConstantName(const FieldDescriptor *field) {
string field_name = UnderscoresToCamelCase(field->name(), true);
string result = "k" + field_name + "FieldNumber";
if (!field->is_extension() &&
field->containing_type()->FindFieldByCamelcaseName(
field->camelcase_name()) != field) {
// This field's camelcase name is not unique. As a hack, add the field
// number to the constant name. This makes the constant rather useless,
// but what can we do?
result += "_" + SimpleItoa(field->number());
}
return result;
}
string FieldMessageTypeName(const FieldDescriptor* field) {
// Note: The Google-internal version of Protocol Buffers uses this function
// as a hook point for hacks to support legacy code.
return ClassName(field->message_type(), true);
}
string StripProto(const string& filename) {
if (HasSuffixString(filename, ".protodevel")) {
return StripSuffixString(filename, ".protodevel");
} else {
return StripSuffixString(filename, ".proto");
}
}
const char* PrimitiveTypeName(FieldDescriptor::CppType type) {
switch (type) {
case FieldDescriptor::CPPTYPE_INT32 : return "::google::protobuf::int32";
case FieldDescriptor::CPPTYPE_INT64 : return "::google::protobuf::int64";
case FieldDescriptor::CPPTYPE_UINT32 : return "::google::protobuf::uint32";
case FieldDescriptor::CPPTYPE_UINT64 : return "::google::protobuf::uint64";
case FieldDescriptor::CPPTYPE_DOUBLE : return "double";
case FieldDescriptor::CPPTYPE_FLOAT : return "float";
case FieldDescriptor::CPPTYPE_BOOL : return "bool";
case FieldDescriptor::CPPTYPE_ENUM : return "int";
case FieldDescriptor::CPPTYPE_STRING : return "::std::string";
case FieldDescriptor::CPPTYPE_MESSAGE: return NULL;
// No default because we want the compiler to complain if any new
// CppTypes are added.
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return NULL;
}
const char* DeclaredTypeMethodName(FieldDescriptor::Type type) {
switch (type) {
case FieldDescriptor::TYPE_INT32 : return "Int32";
case FieldDescriptor::TYPE_INT64 : return "Int64";
case FieldDescriptor::TYPE_UINT32 : return "UInt32";
case FieldDescriptor::TYPE_UINT64 : return "UInt64";
case FieldDescriptor::TYPE_SINT32 : return "SInt32";
case FieldDescriptor::TYPE_SINT64 : return "SInt64";
case FieldDescriptor::TYPE_FIXED32 : return "Fixed32";
case FieldDescriptor::TYPE_FIXED64 : return "Fixed64";
case FieldDescriptor::TYPE_SFIXED32: return "SFixed32";
case FieldDescriptor::TYPE_SFIXED64: return "SFixed64";
case FieldDescriptor::TYPE_FLOAT : return "Float";
case FieldDescriptor::TYPE_DOUBLE : return "Double";
case FieldDescriptor::TYPE_BOOL : return "Bool";
case FieldDescriptor::TYPE_ENUM : return "Enum";
case FieldDescriptor::TYPE_STRING : return "String";
case FieldDescriptor::TYPE_BYTES : return "Bytes";
case FieldDescriptor::TYPE_GROUP : return "Group";
case FieldDescriptor::TYPE_MESSAGE : return "Message";
// No default because we want the compiler to complain if any new
// types are added.
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return "";
}
string DefaultValue(const FieldDescriptor* field) {
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32:
return SimpleItoa(field->default_value_int32());
case FieldDescriptor::CPPTYPE_UINT32:
return SimpleItoa(field->default_value_uint32()) + "u";
case FieldDescriptor::CPPTYPE_INT64:
return "GOOGLE_LONGLONG(" + SimpleItoa(field->default_value_int64()) + ")";
case FieldDescriptor::CPPTYPE_UINT64:
return "GOOGLE_ULONGLONG(" + SimpleItoa(field->default_value_uint64())+ ")";
case FieldDescriptor::CPPTYPE_DOUBLE: {
double value = field->default_value_double();
if (value == numeric_limits<double>::infinity()) {
return "::google::protobuf::internal::Infinity()";
} else if (value == -numeric_limits<double>::infinity()) {
return "-::google::protobuf::internal::Infinity()";
} else if (value != value) {
return "::google::protobuf::internal::NaN()";
} else {
return SimpleDtoa(value);
}
}
case FieldDescriptor::CPPTYPE_FLOAT:
{
float value = field->default_value_float();
if (value == numeric_limits<float>::infinity()) {
return "static_cast<float>(::google::protobuf::internal::Infinity())";
} else if (value == -numeric_limits<float>::infinity()) {
return "static_cast<float>(-::google::protobuf::internal::Infinity())";
} else if (value != value) {
return "static_cast<float>(::google::protobuf::internal::NaN())";
} else {
string float_value = SimpleFtoa(value);
// If floating point value contains a period (.) or an exponent
// (either E or e), then append suffix 'f' to make it a float
// literal.
if (float_value.find_first_of(".eE") != string::npos) {
float_value.push_back('f');
}
return float_value;
}
}
case FieldDescriptor::CPPTYPE_BOOL:
return field->default_value_bool() ? "true" : "false";
case FieldDescriptor::CPPTYPE_ENUM:
// Lazy: Generate a static_cast because we don't have a helper function
// that constructs the full name of an enum value.
return strings::Substitute(
"static_cast< $0 >($1)",
ClassName(field->enum_type(), true),
field->default_value_enum()->number());
case FieldDescriptor::CPPTYPE_STRING:
return "\"" + EscapeTrigraphs(CEscape(field->default_value_string())) +
"\"";
case FieldDescriptor::CPPTYPE_MESSAGE:
return FieldMessageTypeName(field) + "::default_instance()";
}
// Can't actually get here; make compiler happy. (We could add a default
// case above but then we wouldn't get the nice compiler warning when a
// new type is added.)
GOOGLE_LOG(FATAL) << "Can't get here.";
return "";
}
// Convert a file name into a valid identifier.
string FilenameIdentifier(const string& filename) {
string result;
for (int i = 0; i < filename.size(); i++) {
if (ascii_isalnum(filename[i])) {
result.push_back(filename[i]);
} else {
// Not alphanumeric. To avoid any possibility of name conflicts we
// use the hex code for the character.
result.push_back('_');
char buffer[kFastToBufferSize];
result.append(FastHexToBuffer(static_cast<uint8>(filename[i]), buffer));
}
}
return result;
}
// Return the name of the AddDescriptors() function for a given file.
string GlobalAddDescriptorsName(const string& filename) {
return "protobuf_AddDesc_" + FilenameIdentifier(filename);
}
// Return the name of the AssignDescriptors() function for a given file.
string GlobalAssignDescriptorsName(const string& filename) {
return "protobuf_AssignDesc_" + FilenameIdentifier(filename);
}
// Return the name of the ShutdownFile() function for a given file.
string GlobalShutdownFileName(const string& filename) {
return "protobuf_ShutdownFile_" + FilenameIdentifier(filename);
}
// Escape C++ trigraphs by escaping question marks to \?
string EscapeTrigraphs(const string& to_escape) {
return StringReplace(to_escape, "?", "\\?", true);
}
} // namespace cpp
} // namespace compiler
} // namespace protobuf
} // namespace google
| gpl-3.0 |
jojimt/kubernetes | pkg/util/config/config_test.go | 2844 | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"reflect"
"testing"
)
func TestConfigurationChannels(t *testing.T) {
mux := NewMux(nil)
channelOne := mux.Channel("one")
if channelOne != mux.Channel("one") {
t.Error("Didn't get the same muxuration channel back with the same name")
}
channelTwo := mux.Channel("two")
if channelOne == channelTwo {
t.Error("Got back the same muxuration channel for different names")
}
}
type MergeMock struct {
source string
update interface{}
t *testing.T
}
func (m MergeMock) Merge(source string, update interface{}) error {
if m.source != source {
m.t.Errorf("Expected %s, Got %s", m.source, source)
}
if !reflect.DeepEqual(m.update, update) {
m.t.Errorf("Expected %s, Got %s", m.update, update)
}
return nil
}
func TestMergeInvoked(t *testing.T) {
merger := MergeMock{"one", "test", t}
mux := NewMux(&merger)
mux.Channel("one") <- "test"
}
func TestMergeFuncInvoked(t *testing.T) {
ch := make(chan bool)
mux := NewMux(MergeFunc(func(source string, update interface{}) error {
if source != "one" {
t.Errorf("Expected %s, Got %s", "one", source)
}
if update.(string) != "test" {
t.Errorf("Expected %s, Got %s", "test", update)
}
ch <- true
return nil
}))
mux.Channel("one") <- "test"
<-ch
}
func TestSimultaneousMerge(t *testing.T) {
ch := make(chan bool, 2)
mux := NewMux(MergeFunc(func(source string, update interface{}) error {
switch source {
case "one":
if update.(string) != "test" {
t.Errorf("Expected %s, Got %s", "test", update)
}
case "two":
if update.(string) != "test2" {
t.Errorf("Expected %s, Got %s", "test2", update)
}
default:
t.Errorf("Unexpected source, Got %s", update)
}
ch <- true
return nil
}))
source := mux.Channel("one")
source2 := mux.Channel("two")
source <- "test"
source2 <- "test2"
<-ch
<-ch
}
func TestBroadcaster(t *testing.T) {
b := NewBroadcaster()
b.Notify(struct{}{})
ch := make(chan bool, 2)
b.Add(ListenerFunc(func(object interface{}) {
if object != "test" {
t.Errorf("Expected %s, Got %s", "test", object)
}
ch <- true
}))
b.Add(ListenerFunc(func(object interface{}) {
if object != "test" {
t.Errorf("Expected %s, Got %s", "test", object)
}
ch <- true
}))
b.Notify("test")
<-ch
<-ch
}
| apache-2.0 |
Dirbaio/btcd | cmd/showblock/showblock.go | 7596 | // Copyright (c) 2013 Conformal Systems LLC.
// Copyright (c) 2015 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package main
import (
"encoding/binary"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strconv"
"github.com/btcsuite/btclog"
flags "github.com/btcsuite/go-flags"
"github.com/davecgh/go-spew/spew"
"github.com/decred/dcrd/chaincfg"
"github.com/decred/dcrd/chaincfg/chainhash"
"github.com/decred/dcrd/database"
_ "github.com/decred/dcrd/database/ldb"
"github.com/decred/dcrd/wire"
"github.com/decred/dcrutil"
)
type Hash chainhash.Hash
type config struct {
DataDir string `short:"b" long:"datadir" description:"Directory to store data"`
DbType string `long:"dbtype" description:"Database backend"`
TestNet bool `long:"testnet" description:"Use the test network"`
SimNet bool `long:"simnet" description:"Use the simulation test network"`
OutFile string `short:"o" description:"outfile"`
Progress bool `short:"p" description:"show progress"`
ShaString string `short:"s" description:"Block SHA to process" required:"true"`
EShaString string `short:"e" description:"End Block SHA to process"`
RawBlock bool `short:"r" description:"Raw Block"`
FmtBlock bool `short:"f" description:"Format Block"`
ShowTx bool `short:"t" description:"Show transaction"`
}
var (
dcrdHomeDir = dcrutil.AppDataDir("dcrd", false)
defaultDataDir = filepath.Join(dcrdHomeDir, "data")
log btclog.Logger
activeNetParams = &chaincfg.MainNetParams
)
const (
ArgSha = iota
ArgHeight
)
// netName returns the name used when referring to a bitcoin network. At the
// time of writing, dcrd currently places blocks for testnet version 0 in the
// data and log directory "testnet", which does not match the Name field of the
// dcrnet parameters. This function can be used to override this directory name
// as "testnet" when the passed active network matches wire.TestNet.
//
// A proper upgrade to move the data and log directories for this network to
// "testnet" is planned for the future, at which point this function can be
// removed and the network parameter's name used instead.
func netName(netParams *chaincfg.Params) string {
switch netParams.Net {
case wire.TestNet:
return "testnet"
default:
return netParams.Name
}
}
func main() {
end := int64(-1)
cfg := config{
DbType: "leveldb",
DataDir: defaultDataDir,
}
parser := flags.NewParser(&cfg, flags.Default)
_, err := parser.Parse()
if err != nil {
if e, ok := err.(*flags.Error); !ok || e.Type != flags.ErrHelp {
parser.WriteHelp(os.Stderr)
}
return
}
backendLogger := btclog.NewDefaultBackendLogger()
defer backendLogger.Flush()
log = btclog.NewSubsystemLogger(backendLogger, "")
database.UseLogger(log)
// Multiple networks can't be selected simultaneously.
funcName := "main"
numNets := 0
// Count number of network flags passed; assign active network params
// while we're at it
if cfg.TestNet {
numNets++
activeNetParams = &chaincfg.TestNetParams
}
if cfg.SimNet {
numNets++
activeNetParams = &chaincfg.SimNetParams
}
if numNets > 1 {
str := "%s: The testnet, regtest, and simnet params can't be " +
"used together -- choose one of the three"
err := fmt.Errorf(str, funcName)
fmt.Fprintln(os.Stderr, err)
parser.WriteHelp(os.Stderr)
return
}
cfg.DataDir = filepath.Join(cfg.DataDir, netName(activeNetParams))
blockDbNamePrefix := "blocks"
dbName := blockDbNamePrefix + "_" + cfg.DbType
if cfg.DbType == "sqlite" {
dbName = dbName + ".db"
}
dbPath := filepath.Join(cfg.DataDir, dbName)
log.Infof("loading db %v", cfg.DbType)
database, err := database.OpenDB(cfg.DbType, dbPath)
if err != nil {
log.Warnf("db open failed: %v", err)
return
}
defer database.Close()
log.Infof("db load complete")
height, err := getHeight(database, cfg.ShaString)
if err != nil {
log.Infof("Invalid block %v", cfg.ShaString)
return
}
if cfg.EShaString != "" {
end, err = getHeight(database, cfg.EShaString)
if err != nil {
log.Infof("Invalid end block %v", cfg.EShaString)
return
}
} else {
end = height + 1
}
log.Infof("height %v end %v", height, end)
var fo io.WriteCloser
if cfg.OutFile != "" {
fo, err = os.Create(cfg.OutFile)
if err != nil {
log.Warnf("failed to open file %v, err %v", cfg.OutFile, err)
}
defer func() {
if err := fo.Close(); err != nil {
log.Warn("failed to close file %v %v", cfg.OutFile, err)
}
}()
}
for ; height < end; height++ {
if cfg.Progress && height%int64(1) == 0 {
log.Infof("Processing block %v", height)
}
err = DumpBlock(database, height, fo, cfg.RawBlock, cfg.FmtBlock, cfg.ShowTx)
if err != nil {
break
}
}
if cfg.Progress {
height--
log.Infof("Processing block %v", height)
}
}
func getHeight(database database.Db, str string) (int64, error) {
argtype, idx, sha, err := parsesha(str)
if err != nil {
log.Warnf("unable to decode [%v] %v", str, err)
return 0, err
}
switch argtype {
case ArgSha:
// nothing to do
blk, err := database.FetchBlockBySha(sha)
if err != nil {
log.Warnf("unable to locate block sha %v err %v",
sha, err)
return 0, err
}
idx = blk.Height()
case ArgHeight:
}
return idx, nil
}
func DumpBlock(database database.Db, height int64, fo io.Writer, rflag bool, fflag bool, tflag bool) error {
sha, err := database.FetchBlockShaByHeight(height)
if err != nil {
return err
}
blk, err := database.FetchBlockBySha(sha)
if err != nil {
log.Warnf("Failed to fetch block %v, err %v", sha, err)
return err
}
rblk, err := blk.Bytes()
blkid := blk.Height()
if rflag {
log.Infof("Block %v depth %v %v", sha, blkid, spew.Sdump(rblk))
}
mblk := blk.MsgBlock()
if fflag {
log.Infof("Block %v depth %v %v", sha, blkid, spew.Sdump(mblk))
}
if tflag {
log.Infof("Num transactions %v", len(mblk.Transactions))
for i, tx := range mblk.Transactions {
txsha := tx.TxSha()
log.Infof("tx %v: %v", i, &txsha)
}
}
if fo != nil {
// generate and write header values
binary.Write(fo, binary.LittleEndian, uint32(wire.SimNet))
binary.Write(fo, binary.LittleEndian, uint32(len(rblk)))
// write block
fo.Write(rblk)
}
return nil
}
var ntxcnt int64
var txspendcnt int64
var txgivecnt int64
var ErrBadShaPrefix = errors.New("invalid prefix")
var ErrBadShaLen = errors.New("invalid len")
var ErrBadShaChar = errors.New("invalid character")
func parsesha(argstr string) (argtype int, height int64, psha *chainhash.Hash, err error) {
var sha chainhash.Hash
var hashbuf string
switch len(argstr) {
case 64:
hashbuf = argstr
case 66:
if argstr[0:2] != "0x" {
log.Infof("prefix is %v", argstr[0:2])
err = ErrBadShaPrefix
return
}
hashbuf = argstr[2:]
default:
if len(argstr) <= 16 {
// assume value is height
argtype = ArgHeight
var h int
h, err = strconv.Atoi(argstr)
if err == nil {
height = int64(h)
return
}
log.Infof("Unable to parse height %v, err %v", height, err)
}
err = ErrBadShaLen
return
}
var buf [32]byte
for idx, ch := range hashbuf {
var val rune
switch {
case ch >= '0' && ch <= '9':
val = ch - '0'
case ch >= 'a' && ch <= 'f':
val = ch - 'a' + rune(10)
case ch >= 'A' && ch <= 'F':
val = ch - 'A' + rune(10)
default:
err = ErrBadShaChar
return
}
b := buf[31-idx/2]
if idx&1 == 1 {
b |= byte(val)
} else {
b |= (byte(val) << 4)
}
buf[31-idx/2] = b
}
sha.SetBytes(buf[0:32])
psha = &sha
return
}
| isc |
Roasbeef/btcd | blockchain/example_test.go | 3891 | // Copyright (c) 2014-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package blockchain_test
import (
"fmt"
"math/big"
"os"
"path/filepath"
"github.com/roasbeef/btcd/blockchain"
"github.com/roasbeef/btcd/chaincfg"
"github.com/roasbeef/btcd/database"
_ "github.com/roasbeef/btcd/database/ffldb"
"github.com/roasbeef/btcutil"
)
// This example demonstrates how to create a new chain instance and use
// ProcessBlock to attempt to attempt add a block to the chain. As the package
// overview documentation describes, this includes all of the Bitcoin consensus
// rules. This example intentionally attempts to insert a duplicate genesis
// block to illustrate how an invalid block is handled.
func ExampleBlockChain_ProcessBlock() {
// Create a new database to store the accepted blocks into. Typically
// this would be opening an existing database and would not be deleting
// and creating a new database like this, but it is done here so this is
// a complete working example and does not leave temporary files laying
// around.
dbPath := filepath.Join(os.TempDir(), "exampleprocessblock")
_ = os.RemoveAll(dbPath)
db, err := database.Create("ffldb", dbPath, chaincfg.MainNetParams.Net)
if err != nil {
fmt.Printf("Failed to create database: %v\n", err)
return
}
defer os.RemoveAll(dbPath)
defer db.Close()
// Create a new BlockChain instance using the underlying database for
// the main bitcoin network. This example does not demonstrate some
// of the other available configuration options such as specifying a
// notification callback and signature cache. Also, the caller would
// ordinarily keep a reference to the median time source and add time
// values obtained from other peers on the network so the local time is
// adjusted to be in agreement with other peers.
chain, err := blockchain.New(&blockchain.Config{
DB: db,
ChainParams: &chaincfg.MainNetParams,
TimeSource: blockchain.NewMedianTime(),
})
if err != nil {
fmt.Printf("Failed to create chain instance: %v\n", err)
return
}
// Process a block. For this example, we are going to intentionally
// cause an error by trying to process the genesis block which already
// exists.
genesisBlock := btcutil.NewBlock(chaincfg.MainNetParams.GenesisBlock)
isMainChain, isOrphan, err := chain.ProcessBlock(genesisBlock,
blockchain.BFNone)
if err != nil {
fmt.Printf("Failed to process block: %v\n", err)
return
}
fmt.Printf("Block accepted. Is it on the main chain?: %v", isMainChain)
fmt.Printf("Block accepted. Is it an orphan?: %v", isOrphan)
// Output:
// Failed to process block: already have block 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
}
// This example demonstrates how to convert the compact "bits" in a block header
// which represent the target difficulty to a big integer and display it using
// the typical hex notation.
func ExampleCompactToBig() {
// Convert the bits from block 300000 in the main block chain.
bits := uint32(419465580)
targetDifficulty := blockchain.CompactToBig(bits)
// Display it in hex.
fmt.Printf("%064x\n", targetDifficulty.Bytes())
// Output:
// 0000000000000000896c00000000000000000000000000000000000000000000
}
// This example demonstrates how to convert a target difficulty into the compact
// "bits" in a block header which represent that target difficulty .
func ExampleBigToCompact() {
// Convert the target difficulty from block 300000 in the main block
// chain to compact form.
t := "0000000000000000896c00000000000000000000000000000000000000000000"
targetDifficulty, success := new(big.Int).SetString(t, 16)
if !success {
fmt.Println("invalid target difficulty")
return
}
bits := blockchain.BigToCompact(targetDifficulty)
fmt.Println(bits)
// Output:
// 419465580
}
| isc |
dwalldorf/owTracker | src/AppBundle/Exception/NotFoundException.php | 263 | <?php
namespace AppBundle\Exception;
use Symfony\Component\HttpFoundation\Response;
class NotFoundException extends ApiBaseException{
/**
* @return int
*/
public function getHttpStatusCode() {
return Response::HTTP_NOT_FOUND;
}
} | isc |
openstreetmap/iD | test/spec/validations/missing_tag.js | 4376 | describe('iD.validations.missing_tag', function () {
var context;
beforeEach(function() {
context = iD.coreContext().assetPath('../dist/').init();
});
function createWay(tags) {
var n1 = iD.osmNode({id: 'n-1', loc: [4,4]});
var n2 = iD.osmNode({id: 'n-2', loc: [4,5]});
var w = iD.osmWay({id: 'w-1', nodes: ['n-1', 'n-2'], tags: tags});
context.perform(
iD.actionAddEntity(n1),
iD.actionAddEntity(n2),
iD.actionAddEntity(w)
);
}
function createRelation(tags) {
var n1 = iD.osmNode({id: 'n-1', loc: [4,4]});
var n2 = iD.osmNode({id: 'n-2', loc: [4,5]});
var n3 = iD.osmNode({id: 'n-3', loc: [5,5]});
var w = iD.osmWay({id: 'w-1', nodes: ['n-1', 'n-2', 'n-3', 'n-1']});
var r = iD.osmRelation({id: 'r-1', members: [{id: 'w-1'}], tags: tags});
context.perform(
iD.actionAddEntity(n1),
iD.actionAddEntity(n2),
iD.actionAddEntity(n3),
iD.actionAddEntity(w),
iD.actionAddEntity(r)
);
}
function validate() {
var validator = iD.validationMissingTag(context);
var changes = context.history().changes();
var entities = changes.modified.concat(changes.created);
var issues = [];
entities.forEach(function(entity) {
issues = issues.concat(validator(entity, context.graph()));
});
return issues;
}
it('has no errors on init', function() {
var issues = validate();
expect(issues).to.have.lengthOf(0);
});
it('ignores way with descriptive tags', function() {
createWay({ leisure: 'park' });
var issues = validate();
expect(issues).to.have.lengthOf(0);
});
it('ignores multipolygon with descriptive tags', function() {
createRelation({ leisure: 'park', type: 'multipolygon' });
var issues = validate();
expect(issues).to.have.lengthOf(0);
});
it('flags no tags', function() {
createWay({});
var issues = validate();
expect(issues).to.have.lengthOf(1);
var issue = issues[0];
expect(issue.type).to.eql('missing_tag');
expect(issue.subtype).to.eql('any');
expect(issue.entityIds).to.have.lengthOf(1);
expect(issue.entityIds[0]).to.eql('w-1');
});
it('flags no descriptive tags', function() {
createWay({ name: 'Main Street', source: 'Bing' });
var issues = validate();
expect(issues).to.have.lengthOf(1);
var issue = issues[0];
expect(issue.type).to.eql('missing_tag');
expect(issue.subtype).to.eql('descriptive');
expect(issue.entityIds).to.have.lengthOf(1);
expect(issue.entityIds[0]).to.eql('w-1');
});
it('flags no descriptive tags on multipolygon', function() {
createRelation({ name: 'City Park', source: 'Bing', type: 'multipolygon' });
var issues = validate();
expect(issues).to.have.lengthOf(1);
var issue = issues[0];
expect(issue.type).to.eql('missing_tag');
expect(issue.subtype).to.eql('descriptive');
expect(issue.entityIds).to.have.lengthOf(1);
expect(issue.entityIds[0]).to.eql('r-1');
});
it('flags no type tag on relation', function() {
createRelation({ name: 'City Park', source: 'Bing', leisure: 'park' });
var issues = validate();
expect(issues).to.have.lengthOf(1);
var issue = issues[0];
expect(issue.type).to.eql('missing_tag');
expect(issue.subtype).to.eql('relation_type');
expect(issue.entityIds).to.have.lengthOf(1);
expect(issue.entityIds[0]).to.eql('r-1');
});
it('ignores highway with classification', function() {
createWay({ highway: 'primary' });
var issues = validate();
expect(issues).to.have.lengthOf(0);
});
it('flags highway=road', function() {
createWay({ highway: 'road' });
var issues = validate();
expect(issues).to.have.lengthOf(1);
var issue = issues[0];
expect(issue.type).to.eql('missing_tag');
expect(issue.subtype).to.eql('highway_classification');
expect(issue.entityIds).to.have.lengthOf(1);
expect(issue.entityIds[0]).to.eql('w-1');
});
});
| isc |
heroku/heroku-pg | test/commands/killall.js | 1023 | 'use strict'
/* global describe it beforeEach afterEach */
const cli = require('heroku-cli-util')
const expect = require('unexpected')
const nock = require('nock')
const proxyquire = require('proxyquire')
const db = {id: 1, name: 'postgres-1', plan: {name: 'heroku-postgresql:hobby-dev'}}
const fetcher = () => {
return {
addon: () => db
}
}
const cmd = proxyquire('../../commands/killall', {
'../lib/fetcher': fetcher
})
describe('pg:killall', () => {
let pg
beforeEach(() => {
cli.mockConsole()
cli.exit.mock()
pg = nock('https://postgres-starter-api.heroku.com:443')
})
afterEach(() => {
pg.done()
nock.cleanAll()
})
it('waits for all databases to be available', () => {
pg
.post('/client/v11/databases/1/connection_reset').reply(200)
return cmd.run({app: 'myapp', args: {}, flags: {}})
.then(() => expect(cli.stdout, 'to equal', ''))
.then(() => expect(cli.stderr, 'to equal', 'Terminating connections for all credentials... done\n'))
})
})
| isc |
ibc/MediaSoup | worker/deps/libwebrtc/libwebrtc/modules/congestion_controller/rtp/transport_feedback_adapter.cc | 9748 | /*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#define MS_CLASS "webrtc::TransportFeedbackAdapter"
// #define MS_LOG_DEV_LEVEL 3
#include "modules/congestion_controller/rtp/transport_feedback_adapter.h"
#include "api/units/timestamp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "system_wrappers/source/field_trial.h"
#include "mediasoup_helpers.h"
#include "Logger.hpp"
#include "RTC/RTCP/FeedbackRtpTransport.hpp"
#include <stdlib.h>
#include <algorithm>
#include <cmath>
#include <utility>
namespace webrtc {
namespace {
PacketResult NetworkPacketFeedbackFromRtpPacketFeedback(
const webrtc::PacketFeedback& pf) {
PacketResult feedback;
if (pf.arrival_time_ms == webrtc::PacketFeedback::kNotReceived) {
feedback.receive_time = Timestamp::PlusInfinity();
} else {
feedback.receive_time = Timestamp::ms(pf.arrival_time_ms);
}
feedback.sent_packet.sequence_number = pf.long_sequence_number;
feedback.sent_packet.send_time = Timestamp::ms(pf.send_time_ms);
feedback.sent_packet.size = DataSize::bytes(pf.payload_size);
feedback.sent_packet.pacing_info = pf.pacing_info;
feedback.sent_packet.prior_unacked_data =
DataSize::bytes(pf.unacknowledged_data);
return feedback;
}
} // namespace
const int64_t kNoTimestamp = -1;
const int64_t kSendTimeHistoryWindowMs = 60000;
TransportFeedbackAdapter::TransportFeedbackAdapter()
: allow_duplicates_(field_trial::IsEnabled(
"WebRTC-TransportFeedbackAdapter-AllowDuplicates")),
send_time_history_(kSendTimeHistoryWindowMs),
current_offset_ms_(kNoTimestamp),
last_timestamp_us_(kNoTimestamp),
local_net_id_(0),
remote_net_id_(0) {}
TransportFeedbackAdapter::~TransportFeedbackAdapter() {
}
void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info,
size_t overhead_bytes,
Timestamp creation_time) {
{
PacketFeedback packet_feedback(
creation_time.ms(), packet_info.transport_sequence_number,
packet_info.length + overhead_bytes, local_net_id_, remote_net_id_,
packet_info.pacing_info);
if (packet_info.has_rtp_sequence_number) {
packet_feedback.ssrc = packet_info.ssrc;
packet_feedback.rtp_sequence_number = packet_info.rtp_sequence_number;
}
// MS_NOTE: TODO remove.
// MS_DUMP("packet_feedback.arrival_time_ms: %" PRIi64, packet_feedback.arrival_time_ms);
// MS_DUMP("packet_feedback.send_time_ms: %" PRIi64, packet_feedback.send_time_ms);
// MS_DUMP("packet_feedback.sequence_number: %" PRIu16, packet_feedback.sequence_number);
// MS_DUMP("packet_feedback.long_sequence_number: %" PRIi64, packet_feedback.long_sequence_number);
// MS_DUMP("packet_feedback.payload_size: %zu", packet_feedback.payload_size);
// MS_DUMP("packet_feedback.unacknowledged_data: %zu", packet_feedback.unacknowledged_data);
// MS_DUMP("packet_feedback.local_net_id: %" PRIu16, packet_feedback.local_net_id);
// MS_DUMP("packet_feedback.remote_net_id: %" PRIu16, packet_feedback.remote_net_id);
// MS_DUMP("packet_feedback.ssrc: %" PRIu32, packet_feedback.ssrc.value());
// MS_DUMP("packet_feedback.rtp_sequence_number: %" PRIu16, packet_feedback.rtp_sequence_number);
send_time_history_.RemoveOld(creation_time.ms());
send_time_history_.AddNewPacket(std::move(packet_feedback));
}
{
for (auto* observer : observers_) {
observer->OnPacketAdded(packet_info.ssrc,
packet_info.transport_sequence_number);
}
}
}
absl::optional<SentPacket> TransportFeedbackAdapter::ProcessSentPacket(
const rtc::SentPacket& sent_packet) {
// TODO(srte): Only use one way to indicate that packet feedback is used.
if (sent_packet.info.included_in_feedback || sent_packet.packet_id != -1) {
SendTimeHistory::Status send_status = send_time_history_.OnSentPacket(
sent_packet.packet_id, sent_packet.send_time_ms);
absl::optional<PacketFeedback> packet;
if (allow_duplicates_ ||
send_status != SendTimeHistory::Status::kDuplicate) {
packet = send_time_history_.GetPacket(sent_packet.packet_id);
}
if (packet) {
SentPacket msg;
msg.size = DataSize::bytes(packet->payload_size);
msg.send_time = Timestamp::ms(packet->send_time_ms);
msg.sequence_number = packet->long_sequence_number;
msg.prior_unacked_data = DataSize::bytes(packet->unacknowledged_data);
msg.data_in_flight =
send_time_history_.GetOutstandingData(local_net_id_, remote_net_id_);
return msg;
}
} else if (sent_packet.info.included_in_allocation) {
send_time_history_.AddUntracked(sent_packet.info.packet_size_bytes,
sent_packet.send_time_ms);
}
return absl::nullopt;
}
absl::optional<TransportPacketsFeedback>
TransportFeedbackAdapter::ProcessTransportFeedback(
const RTC::RTCP::FeedbackRtpTransportPacket& feedback,
Timestamp feedback_receive_time) {
DataSize prior_in_flight = GetOutstandingData();
last_packet_feedback_vector_ =
GetPacketFeedbackVector(feedback, feedback_receive_time);
{
for (auto* observer : observers_) {
observer->OnPacketFeedbackVector(last_packet_feedback_vector_);
}
}
std::vector<PacketFeedback> feedback_vector = last_packet_feedback_vector_;
if (feedback_vector.empty())
return absl::nullopt;
TransportPacketsFeedback msg;
for (const PacketFeedback& rtp_feedback : feedback_vector) {
if (rtp_feedback.send_time_ms != PacketFeedback::kNoSendTime) {
auto feedback = NetworkPacketFeedbackFromRtpPacketFeedback(rtp_feedback);
msg.packet_feedbacks.push_back(feedback);
} else if (rtp_feedback.arrival_time_ms == PacketFeedback::kNotReceived) {
msg.sendless_arrival_times.push_back(Timestamp::PlusInfinity());
} else {
msg.sendless_arrival_times.push_back(
Timestamp::ms(rtp_feedback.arrival_time_ms));
}
}
{
absl::optional<int64_t> first_unacked_send_time_ms =
send_time_history_.GetFirstUnackedSendTime();
if (first_unacked_send_time_ms)
msg.first_unacked_send_time = Timestamp::ms(*first_unacked_send_time_ms);
}
msg.feedback_time = feedback_receive_time;
msg.prior_in_flight = prior_in_flight;
msg.data_in_flight = GetOutstandingData();
return msg;
}
DataSize TransportFeedbackAdapter::GetOutstandingData() const {
return send_time_history_.GetOutstandingData(local_net_id_, remote_net_id_);
}
std::vector<PacketFeedback> TransportFeedbackAdapter::GetPacketFeedbackVector(
const RTC::RTCP::FeedbackRtpTransportPacket& feedback,
Timestamp feedback_time) {
// Add timestamp deltas to a local time base selected on first packet arrival.
// This won't be the true time base, but makes it easier to manually inspect
// time stamps.
if (last_timestamp_us_ == kNoTimestamp) {
current_offset_ms_ = feedback_time.ms();
} else {
current_offset_ms_ +=
mediasoup_helpers::FeedbackRtpTransport::GetBaseDeltaUs(&feedback, last_timestamp_us_) / 1000;
}
last_timestamp_us_ =
mediasoup_helpers::FeedbackRtpTransport::GetBaseTimeUs(&feedback);
std::vector<PacketFeedback> packet_feedback_vector;
if (feedback.GetPacketStatusCount() == 0) {
MS_WARN_DEV("empty transport feedback packet received");
return packet_feedback_vector;
}
packet_feedback_vector.reserve(feedback.GetPacketStatusCount());
{
size_t failed_lookups = 0;
int64_t offset_us = 0;
int64_t timestamp_ms = 0;
uint16_t seq_num = feedback.GetBaseSequenceNumber();
for (const auto& packet : mediasoup_helpers::FeedbackRtpTransport::GetReceivedPackets(&feedback)) {
// Insert into the vector those unreceived packets which precede this
// iteration's received packet.
for (; seq_num != packet.sequence_number(); ++seq_num) {
PacketFeedback packet_feedback(PacketFeedback::kNotReceived, seq_num);
// Note: Element not removed from history because it might be reported
// as received by another feedback.
if (!send_time_history_.GetFeedback(&packet_feedback, false))
++failed_lookups;
if (packet_feedback.local_net_id == local_net_id_ &&
packet_feedback.remote_net_id == remote_net_id_) {
packet_feedback_vector.push_back(packet_feedback);
}
}
// Handle this iteration's received packet.
offset_us += packet.delta_us();
timestamp_ms = current_offset_ms_ + (offset_us / 1000);
PacketFeedback packet_feedback(timestamp_ms, packet.sequence_number());
if (!send_time_history_.GetFeedback(&packet_feedback, true))
++failed_lookups;
if (packet_feedback.local_net_id == local_net_id_ &&
packet_feedback.remote_net_id == remote_net_id_) {
packet_feedback_vector.push_back(packet_feedback);
}
++seq_num;
}
if (failed_lookups > 0) {
MS_WARN_DEV("failed to lookup send time for %zu"
" packet%s, send time history too small?",
failed_lookups,
(failed_lookups > 1 ? "s" : ""));
}
}
return packet_feedback_vector;
}
std::vector<PacketFeedback>
TransportFeedbackAdapter::GetTransportFeedbackVector() const {
return last_packet_feedback_vector_;
}
} // namespace webrtc
| isc |
sjsyrek/maryamyriameliamurphies.js | source/tuple/func.js | 6471 | /**
* maryamyriameliamurphies.js
* A library of Haskell-style morphisms ported to ES2015 JavaScript.
*
* tuple/func.js
*
* @file Tuple functions.
* @license ISC
*/
/** @module tuple/func */
import {Tuple} from '../tuple';
import {error} from '../error';
/**
* The `unit` object, an empty tuple. Note that `isTuple(unit) === false`, as in Haskell.
* <br>`Haskell> () :: ()`
*/
export const unit = new Tuple();
/**
* Create a new `Tuple` from any number of values. A single value will be returned unaltered,
* and `unit`, the empty tuple, will be returned if no arguments are passed.
* <br>`Haskell> (,) :: a -> b -> (a, b)`
* @param {...*} as - The values to put into a `Tuple`
* @returns {Tuple} A new `Tuple`
* @kind function
* @example
* tuple(10,20); // => (10,20)
*/
export const tuple = (...as) => {
const [x, y] = as;
if (x === undefined) return unit;
if (y === undefined) return x;
return new Tuple(...as);
}
/**
* Extract the first value of a tuple.
* <br>`Haskell> fst :: (a, b) -> a`
* @param {Tuple} p - A `Tuple`
* @returns {*} The first value of the `Tuple`.
* @kind function
* @example
* const tup = tuple(10,20);
* fst(tup); // => 10
*/
export const fst = p => isTuple(p) ? p[1] : error.tupleError(p, fst);
/**
* Extract the second value of a tuple.
* <br>`Haskell> snd :: (a, b) -> b`
* @param {Tuple} p - A `Tuple`
* @returns {*} The second value of the `Tuple`.
* @kind function
* @example
* const tup = tuple(10,20);
* snd(tup); // => 20
*/
export const snd = p => isTuple(p) ? p[2] : error.tupleError(p, snd);
/**
* Convert an uncurried function to a curried function. For example, a function that expects a tuple
* as an argument can be curried into a function that binds one value and returns another function
* that binds the other value. This function can then be called with or without arguments bound, or
* with arguments partially applied. Currying and uncurrying are transitive.
* <br>`Haskell> curry :: ((a, b) -> c) -> a -> b -> c`
* @param {Function} f - The function to curry
* @param {*} x - Any value, the first value of the new tuple argument
* @param {*} y - Any value, the second value of the new tuple argument
* @returns {Function} The curried function
* @kind function
* @example
* const f = p => fst(p) - snd(p);
* const a = curry(f); // a === f()()
* const b = a(100); // b === f(100)()
* const c = b(15); // c === f(100)(15) === 85
* const p = tuple(100, 15);
* const A = curry(f); // A(100)(15) === 85
* const B = uncurry(A); // B(p) === 85
* const C = curry(B); // A(100)(15) === C(100)(15) === 85
*/
export const curry = (f, x, y) => {
if (x === undefined) { return x => y => f.call(f, tuple(x, y)); }
if (y === undefined) { return curry(f)(x); }
return curry(f)(x)(y);
}
/**
* Convert a curried function to a single function that takes a tuple as an argument—mostly useful
* for uncurrying functions previously curried with the `curry` function. This function will not
* work if any arguments are bound to the curried function (it would result in a type error in
* Haskell). Currying and uncurrying are transitive.
* <br>`Haskell> uncurry :: (a -> b -> c) -> (a, b) -> c`
* @param {Function} f - The function to uncurry
* @param {Tuple} p - The tuple from which to extract argument values for the function
* @returns {Function} The uncurried function
* @kind function
* @example
* const f = p => fst(p) - snd(p);
* const p = tuple(100, 15);
* const a = curry(f); // a === f()()
* const b = uncurry(a); // b === f()
* const c = b(p); // c === f({`1`:100,`2`:15}) === 85
* const d = uncurry(a, p) // d === 85
*/
export const uncurry = (f, p) => {
if (p === undefined) {
return p => isTuple(p) ? f.call(f, fst(p)).call(f, snd(p)) : error.tupleError(p, uncurry);
}
return isTuple(p) ? f.call(f, fst(p)).call(f, snd(p)) : error.tupleError(p, uncurry);
}
/**
* Swap the values of a tuple. This function does not modify the original tuple.
* <br>`Haskell> swap :: (a, b) -> (b, a)`
* @param {Tuple} p - A `Tuple`
* @returns {Tuple} A new `Tuple`, with the values of the first tuple swapped
* @kind function
* @example
* const tup = tuple(10,20);
* swap(tup); // => (20,10)
*/
export const swap = p => isTuple(p) ? tuple(snd(p), fst(p)) : error.tupleError(p, swap);
/**
* Determine whether an object is a `Tuple`. The empty tuple, `unit`, returns `false`.
* @param {*} a - Any object
* @returns {boolean} `true` if the object is a `Tuple` or `false` otherwise
* @kind function
*/
export const isTuple = a => a instanceof Tuple && a !== unit ? true : false;
/**
* Check whether a `Tuple` is an empty tuple, or `unit`. Returns `true` if the `Tuple` is `unit`.
* Returns false if the `Tuple` is non-empty. Throws a type error, otherwise.
* @param {Tuple} p - A `Tuple`
* @returns {boolean} `true` if the object is `unit`, `false` if it is a non-empty `Tuple`
* @kind function
* @example
* isUnit(tuple(1,2)); // => false
* isUnit(unit); // => true
* isUnit(tuple(unit, unit)); // => false
*/
export const isUnit = p => {
if (isTuple(p)) { return false; }
if (p === unit) { return true; }
return error.typeError(p, isUnit);
}
/**
* Convert an array into a `Tuple`. Returns the value at index 0 for single element arrays and
* `unit`, the empty tuple, if the array is empty. Note that this function will not work on
* array-like objects.
* @param {Array.<*>} arr - The array to convert
* @returns {Tuple} A new `Tuple`, the converted array
* @kind function
* @example
* const arr = [10,20];
* fromArrayToTuple(arr); // => (10,20)
*/
export const fromArrayToTuple = arr => {
if (Array.isArray(arr) === false) { return error.typeError(arr, fromArrayToTuple); }
if (arr.length === 0) { return unit; }
if (arr.length === 1) { return arr.shift(); }
return Reflect.construct(Tuple, Array.from(arr));
}
/**
* Convert a `Tuple` into an array.
* @param {Tuple} p - The `Tuple` to convert.
* @returns {Array.<*>} A new array, the converted `Tuple`.
* @kind function
* @example
* const tup = tuple(10,20);
* fromTupleToArray(tup); // => [10,20]
*/
export const fromTupleToArray = p =>
isTuple(p) ? Object.getOwnPropertyNames(p).map(k => p[k]) : error.tupleError(p, fromTupleToArray);
| isc |
heroku/heroku-apps | test/commands/ps/stop.js | 795 | 'use strict'
/* globals commands describe beforeEach it */
const cli = require('heroku-cli-util')
const nock = require('nock')
const cmd = commands.find((c) => c.topic === 'ps' && c.command === 'stop')
describe('ps:stop', function () {
beforeEach(() => {
cli.mockConsole()
nock.cleanAll()
})
it('stops all web dynos', function () {
let api = nock('https://api.heroku.com')
.post('/apps/myapp/dynos/web/actions/stop').reply(200)
return cmd.run({app: 'myapp', args: {dyno: 'web'}})
.then(() => api.done())
})
it('stops run.10 dyno', function () {
let api = nock('https://api.heroku.com')
.post('/apps/myapp/dynos/run.10/actions/stop').reply(200)
return cmd.run({app: 'myapp', args: {dyno: 'run.10'}})
.then(() => api.done())
})
})
| isc |
evilsoft/crocks | src/core/isMap.spec.js | 988 | const test = require('tape')
const isFunction = require('./isFunction')
const unit = require('./_unit')
const isMap = require('./isMap')
test('isMap predicate function', t => {
t.ok(isFunction(isMap), 'is a function')
t.ok(isMap(new Map()), 'returns true when passed a Map')
t.notOk(isMap([]), 'returns false when passed an array')
t.notOk(isMap(unit), 'returns false when passed a function')
t.notOk(isMap(undefined), 'returns false when passed undefined')
t.notOk(isMap(null), 'returns false when passed null')
t.notOk(isMap(0), 'returns false when passed a falsey number')
t.notOk(isMap(1), 'returns false when passed a truthy number')
t.notOk(isMap(''), 'returns false when passed a falsey string')
t.notOk(isMap('string'), 'returns false when passed a truthy string')
t.notOk(isMap(false), 'returns false when passed false')
t.notOk(isMap(true), 'returns false when passed true')
t.notOk(isMap({}), 'returns false when passed an object')
t.end()
})
| isc |
xdv/ripple-lib-java | ripple-bouncycastle/src/main/java/org/ripple/bouncycastle/crypto/tls/DTLSReliableHandshake.java | 14103 | package org.ripple.bouncycastle.crypto.tls;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.ripple.bouncycastle.util.Integers;
class DTLSReliableHandshake
{
private final static int MAX_RECEIVE_AHEAD = 10;
private final DTLSRecordLayer recordLayer;
private TlsHandshakeHash hash = new DeferredHash();
private Hashtable currentInboundFlight = new Hashtable();
private Hashtable previousInboundFlight = null;
private Vector outboundFlight = new Vector();
private boolean sending = true;
private int message_seq = 0, next_receive_seq = 0;
DTLSReliableHandshake(TlsContext context, DTLSRecordLayer transport)
{
this.recordLayer = transport;
this.hash.init(context);
}
void notifyHelloComplete()
{
this.hash = this.hash.commit();
}
byte[] getCurrentHash()
{
TlsHandshakeHash copyOfHash = hash.fork();
byte[] result = new byte[copyOfHash.getDigestSize()];
copyOfHash.doFinal(result, 0);
return result;
}
void sendMessage(short msg_type, byte[] body)
throws IOException
{
if (!sending)
{
checkInboundFlight();
sending = true;
outboundFlight.removeAllElements();
}
Message message = new Message(message_seq++, msg_type, body);
outboundFlight.addElement(message);
writeMessage(message);
updateHandshakeMessagesDigest(message);
}
Message receiveMessage()
throws IOException
{
if (sending)
{
sending = false;
prepareInboundFlight();
}
// Check if we already have the next message waiting
{
DTLSReassembler next = (DTLSReassembler)currentInboundFlight.get(Integers.valueOf(next_receive_seq));
if (next != null)
{
byte[] body = next.getBodyIfComplete();
if (body != null)
{
previousInboundFlight = null;
return updateHandshakeMessagesDigest(new Message(next_receive_seq++, next.getType(), body));
}
}
}
byte[] buf = null;
// TODO Check the conditions under which we should reset this
int readTimeoutMillis = 1000;
for (; ; )
{
int receiveLimit = recordLayer.getReceiveLimit();
if (buf == null || buf.length < receiveLimit)
{
buf = new byte[receiveLimit];
}
// TODO Handle records containing multiple handshake messages
try
{
for (; ; )
{
int received = recordLayer.receive(buf, 0, receiveLimit, readTimeoutMillis);
if (received < 0)
{
break;
}
if (received < 12)
{
continue;
}
int fragment_length = TlsUtils.readUint24(buf, 9);
if (received != (fragment_length + 12))
{
continue;
}
int seq = TlsUtils.readUint16(buf, 4);
if (seq > (next_receive_seq + MAX_RECEIVE_AHEAD))
{
continue;
}
short msg_type = TlsUtils.readUint8(buf, 0);
int length = TlsUtils.readUint24(buf, 1);
int fragment_offset = TlsUtils.readUint24(buf, 6);
if (fragment_offset + fragment_length > length)
{
continue;
}
if (seq < next_receive_seq)
{
/*
* NOTE: If we receive the previous flight of incoming messages in full
* again, retransmit our last flight
*/
if (previousInboundFlight != null)
{
DTLSReassembler reassembler = (DTLSReassembler)previousInboundFlight.get(Integers
.valueOf(seq));
if (reassembler != null)
{
reassembler.contributeFragment(msg_type, length, buf, 12, fragment_offset,
fragment_length);
if (checkAll(previousInboundFlight))
{
resendOutboundFlight();
/*
* TODO[DTLS] implementations SHOULD back off handshake packet
* size during the retransmit backoff.
*/
readTimeoutMillis = Math.min(readTimeoutMillis * 2, 60000);
resetAll(previousInboundFlight);
}
}
}
}
else
{
DTLSReassembler reassembler = (DTLSReassembler)currentInboundFlight.get(Integers.valueOf(seq));
if (reassembler == null)
{
reassembler = new DTLSReassembler(msg_type, length);
currentInboundFlight.put(Integers.valueOf(seq), reassembler);
}
reassembler.contributeFragment(msg_type, length, buf, 12, fragment_offset, fragment_length);
if (seq == next_receive_seq)
{
byte[] body = reassembler.getBodyIfComplete();
if (body != null)
{
previousInboundFlight = null;
return updateHandshakeMessagesDigest(new Message(next_receive_seq++,
reassembler.getType(), body));
}
}
}
}
}
catch (IOException e)
{
// NOTE: Assume this is a timeout for the moment
}
resendOutboundFlight();
/*
* TODO[DTLS] implementations SHOULD back off handshake packet size during the
* retransmit backoff.
*/
readTimeoutMillis = Math.min(readTimeoutMillis * 2, 60000);
}
}
void finish()
{
DTLSHandshakeRetransmit retransmit = null;
if (!sending)
{
checkInboundFlight();
}
else if (currentInboundFlight != null)
{
/*
* RFC 6347 4.2.4. In addition, for at least twice the default MSL defined for [TCP],
* when in the FINISHED state, the node that transmits the last flight (the server in an
* ordinary handshake or the client in a resumed handshake) MUST respond to a retransmit
* of the peer's last flight with a retransmit of the last flight.
*/
retransmit = new DTLSHandshakeRetransmit()
{
public void receivedHandshakeRecord(int epoch, byte[] buf, int off, int len)
throws IOException
{
/*
* TODO Need to handle the case where the previous inbound flight contains
* messages from two epochs.
*/
if (len < 12)
{
return;
}
int fragment_length = TlsUtils.readUint24(buf, off + 9);
if (len != (fragment_length + 12))
{
return;
}
int seq = TlsUtils.readUint16(buf, off + 4);
if (seq >= next_receive_seq)
{
return;
}
short msg_type = TlsUtils.readUint8(buf, off);
// TODO This is a hack that only works until we try to support renegotiation
int expectedEpoch = msg_type == HandshakeType.finished ? 1 : 0;
if (epoch != expectedEpoch)
{
return;
}
int length = TlsUtils.readUint24(buf, off + 1);
int fragment_offset = TlsUtils.readUint24(buf, off + 6);
if (fragment_offset + fragment_length > length)
{
return;
}
DTLSReassembler reassembler = (DTLSReassembler)currentInboundFlight.get(Integers.valueOf(seq));
if (reassembler != null)
{
reassembler.contributeFragment(msg_type, length, buf, off + 12, fragment_offset,
fragment_length);
if (checkAll(currentInboundFlight))
{
resendOutboundFlight();
resetAll(currentInboundFlight);
}
}
}
};
}
recordLayer.handshakeSuccessful(retransmit);
}
void resetHandshakeMessagesDigest()
{
hash.reset();
}
/**
* Check that there are no "extra" messages left in the current inbound flight
*/
private void checkInboundFlight()
{
Enumeration e = currentInboundFlight.keys();
while (e.hasMoreElements())
{
Integer key = (Integer)e.nextElement();
if (key.intValue() >= next_receive_seq)
{
// TODO Should this be considered an error?
}
}
}
private void prepareInboundFlight()
{
resetAll(currentInboundFlight);
previousInboundFlight = currentInboundFlight;
currentInboundFlight = new Hashtable();
}
private void resendOutboundFlight()
throws IOException
{
recordLayer.resetWriteEpoch();
for (int i = 0; i < outboundFlight.size(); ++i)
{
writeMessage((Message)outboundFlight.elementAt(i));
}
}
private Message updateHandshakeMessagesDigest(Message message)
throws IOException
{
if (message.getType() != HandshakeType.hello_request)
{
byte[] body = message.getBody();
byte[] buf = new byte[12];
TlsUtils.writeUint8(message.getType(), buf, 0);
TlsUtils.writeUint24(body.length, buf, 1);
TlsUtils.writeUint16(message.getSeq(), buf, 4);
TlsUtils.writeUint24(0, buf, 6);
TlsUtils.writeUint24(body.length, buf, 9);
hash.update(buf, 0, buf.length);
hash.update(body, 0, body.length);
}
return message;
}
private void writeMessage(Message message)
throws IOException
{
int sendLimit = recordLayer.getSendLimit();
int fragmentLimit = sendLimit - 12;
// TODO Support a higher minimum fragment size?
if (fragmentLimit < 1)
{
// TODO Should we be throwing an exception here?
throw new TlsFatalAlert(AlertDescription.internal_error);
}
int length = message.getBody().length;
// NOTE: Must still send a fragment if body is empty
int fragment_offset = 0;
do
{
int fragment_length = Math.min(length - fragment_offset, fragmentLimit);
writeHandshakeFragment(message, fragment_offset, fragment_length);
fragment_offset += fragment_length;
}
while (fragment_offset < length);
}
private void writeHandshakeFragment(Message message, int fragment_offset, int fragment_length)
throws IOException
{
ByteArrayOutputStream buf = new ByteArrayOutputStream();
TlsUtils.writeUint8(message.getType(), buf);
TlsUtils.writeUint24(message.getBody().length, buf);
TlsUtils.writeUint16(message.getSeq(), buf);
TlsUtils.writeUint24(fragment_offset, buf);
TlsUtils.writeUint24(fragment_length, buf);
buf.write(message.getBody(), fragment_offset, fragment_length);
byte[] fragment = buf.toByteArray();
recordLayer.send(fragment, 0, fragment.length);
}
private static boolean checkAll(Hashtable inboundFlight)
{
Enumeration e = inboundFlight.elements();
while (e.hasMoreElements())
{
if (((DTLSReassembler)e.nextElement()).getBodyIfComplete() == null)
{
return false;
}
}
return true;
}
private static void resetAll(Hashtable inboundFlight)
{
Enumeration e = inboundFlight.elements();
while (e.hasMoreElements())
{
((DTLSReassembler)e.nextElement()).reset();
}
}
static class Message
{
private final int message_seq;
private final short msg_type;
private final byte[] body;
private Message(int message_seq, short msg_type, byte[] body)
{
this.message_seq = message_seq;
this.msg_type = msg_type;
this.body = body;
}
public int getSeq()
{
return message_seq;
}
public short getType()
{
return msg_type;
}
public byte[] getBody()
{
return body;
}
}
}
| isc |
leighmcculloch/vagrant-docker-compose | lib/vagrant-docker-compose/cap/linux/docker_compose_get_info.rb | 793 | module VagrantPlugins
module DockerComposeProvisioner
module Cap
module Linux
module DockerComposeGetInfo
def self.docker_compose_get_info(machine, config)
comm = machine.communicate
kernel_name = ""
comm.execute("uname -s") do |type, data|
if type == :stdout
kernel_name << data
end
end
kernel_name.strip!
machine_hardware_name = ""
comm.execute("uname -m") do |type, data|
if type == :stdout
machine_hardware_name << data
end
end
machine_hardware_name.strip!
[machine_hardware_name, kernel_name]
end
end
end
end
end
end
| isc |
generalhenry/nodeschool-interactive | lib/proxy.js | 227 | var httpProxy = require('http-proxy');
var proxy = module.exports = httpProxy.createProxyServer({});
proxy.on('error', handleProxyError);
function handleProxyError(err) {
console.log('proxy error');
console.error(err);
} | isc |
isaacs/st | test/dot-common.js | 846 | const path = require('path')
const http = require('http')
const request = require('request')
const { test, teardown } = require('tap')
const st = require('../st.js')
let address
let server
const opts = {
dot: global.dot,
path: path.join(__dirname, 'fixtures', '.dotted-dir')
}
const mount = st(opts)
const req = (url, cb) => {
let host = address.address
if (address.family === 'IPv6') {
host = `[${host}]`
}
request({ url: `http://${host}:${address.port}${url}` }, cb)
}
test('setup', (t) => {
server = http.createServer((req, res) => {
if (!mount(req, res)) {
res.statusCode = 404
return res.end(`Not a match: ${req.url}`)
}
}).listen(0, '127.0.0.1', () => {
t.pass('listening')
address = server.address()
t.end()
})
})
teardown(() => {
server.close()
})
module.exports.req = req
| isc |
beloblotskiy/avb-scite | SciTEStartup.lua | 137 | dofile (props["SciteDefaultHome"].."\\tools\\COMMON.lua")
dofile (props["SciteDefaultHome"].."\\tools\\highlighting_identical_text.lua") | isc |
vfil/rps | test/LogStore-specs.js | 554 | 'use strict';
var expect = require('chai').expect;
var LogStore = require('../src/js/domain/logStore.js');
describe('LogStore specs:', function () {
it('#record should record gesture for player', function () {
var logStore = LogStore();
logStore.record('p1', 'rock');
logStore.record('p1', 'paper');
logStore.record('p2', 'paper');
logStore.record('p2', 'rock');
expect(logStore.getLogs('p1')).to.eql(['rock', 'paper']);
expect(logStore.getLogs('p2')).to.eql(['paper', 'rock']);
});
});
| isc |
mapbox/minjur | minjur-mp.cpp | 8010 |
#include <cstdlib>
#include <cstring>
#include <getopt.h>
#include <iostream>
#include <memory>
#include <string>
#include <osmium/area/assembler.hpp>
#include <osmium/area/multipolygon_collector.hpp>
#include <osmium/handler/check_order.hpp>
#include <osmium/io/any_input.hpp>
#include <osmium/memory/buffer.hpp>
#include <osmium/osm.hpp>
#include <osmium/osm/tag.hpp>
#include <osmium/visitor.hpp>
// these must be include in this order
#include <osmium/index/map/all.hpp>
#include <osmium/handler/node_locations_for_ways.hpp>
#include "minjur_version.hpp"
#include "json_feature.hpp"
#include "json_handler.hpp"
using index_type = osmium::index::map::Map<osmium::unsigned_object_id_type, osmium::Location>;
using location_handler_type = osmium::handler::NodeLocationsForWays<index_type>;
class JSONAreaHandler : public JSONHandler {
public:
JSONAreaHandler(const std::string& error_file, const std::string& attr_prefix, bool with_id) :
JSONHandler(error_file, attr_prefix, with_id) {
}
void node(const osmium::Node& node) {
if (node.tags().empty()) {
return;
}
try {
JSONFeature feature{attr_names()};
if (with_id()) {
feature.add_id("n", node.id());
}
feature.add_point(node);
feature.add_properties(node);
feature.append_to(buffer());
} catch (const osmium::geometry_error&) {
report_geometry_problem(node, "geometry_error");
} catch (const osmium::invalid_location&) {
report_geometry_problem(node, "invalid_location");
}
maybe_flush();
}
void way(const osmium::Way& way) {
if (way.nodes().size() <= 1) {
return;
}
try {
JSONFeature feature{attr_names()};
if (with_id()) {
feature.add_id("w", way.id());
}
feature.add_linestring(way);
feature.add_properties(way);
feature.append_to(buffer());
} catch (const osmium::geometry_error&) {
report_geometry_problem(way, "geometry_error");
} catch (const osmium::invalid_location&) {
report_geometry_problem(way, "invalid_location");
}
maybe_flush();
}
void area(const osmium::Area& area) {
try {
JSONFeature feature{attr_names()};
if (with_id()) {
feature.add_id("a", area.id());
}
feature.add_multipolygon(area);
feature.add_properties(area);
feature.append_to(buffer());
} catch (const osmium::geometry_error&) {
report_geometry_problem(area, "geometry_error");
} catch (const osmium::invalid_location&) {
report_geometry_problem(area, "invalid_location");
}
maybe_flush();
}
}; // class JSONAreaHandler
/* ================================================== */
void print_help() {
std::cout << "minjur-mp [OPTIONS] INFILE\n\n"
<< "Output is always to stdout.\n"
<< "\nOptions:\n"
<< " -e, --error-file=FILE Write errors to file\n"
<< " -h, --help This help message\n"
<< " -v, --version Display version\n"
<< " -i, --with-id Add unique id to each feature\n"
<< " -l, --location-store=TYPE Set location store\n"
<< " -L, --list-location-stores Show available location stores\n"
<< " -n, --nodes=sparse|dense Are node IDs sparse or dense?\n"
<< " -a, --attr-prefix=PREFIX Optional prefix for attributes, defaults to '@'\n";
}
void print_version() {
std::cout << MINJUR_VERSION_STRING << "\n";
}
int main(int argc, char* argv[]) {
const auto& map_factory = osmium::index::MapFactory<osmium::unsigned_object_id_type, osmium::Location>::instance();
static struct option long_options[] = {
{"error-file", required_argument, 0, 'e'},
{"help", no_argument, 0, 'h'},
{"version", no_argument, 0, 'v'},
{"with-id", no_argument, 0, 'i'},
{"location-store", required_argument, 0, 'l'},
{"list-location-stores", no_argument, 0, 'L'},
{"nodes", required_argument, 0, 'n'},
{"attr-prefix", required_argument, 0, 'a'},
{0, 0, 0, 0}
};
std::string location_store;
std::string error_file;
std::string attr_prefix = "@";
bool nodes_dense = false;
bool with_id = false;
while (true) {
int c = getopt_long(argc, argv, "e:hivl:Ln:a:", long_options, 0);
if (c == -1) {
break;
}
switch (c) {
case 'e':
error_file = optarg;
break;
case 'h':
print_help();
std::exit(0);
case 'v':
print_version();
std::exit(0);
case 'i':
with_id = true;
break;
case 'l':
location_store = optarg;
break;
case 'L':
std::cout << "Available map types:\n";
for (const auto& map_type : map_factory.map_types()) {
std::cout << " " << map_type << "\n";
}
std::exit(0);
case 'n':
if (!std::strcmp(optarg, "sparse")) {
nodes_dense = false;
} else if (!std::strcmp(optarg, "dense")) {
nodes_dense = true;
} else {
std::cerr << "Set --nodes, -n to 'sparse' or 'dense'\n";
std::exit(1);
}
break;
case 'a':
attr_prefix = optarg;
break;
default:
std::exit(1);
}
}
if (location_store.empty()) {
location_store = nodes_dense ? "dense" : "sparse";
if (map_factory.has_map_type(location_store + "_mmap_array")) {
location_store.append("_mmap_array");
} else {
location_store.append("_mem_array");
}
}
std::cerr << "Using the '" << location_store << "' location store. Use -l or -n to change this.\n";
std::string input_filename;
const int remaining_args = argc - optind;
if (remaining_args == 1) {
input_filename = argv[optind];
std::cerr << "Reading from '" << input_filename << "'...\n";
} else {
std::cerr << "Usage: " << argv[0] << " [OPTIONS] INFILE\n";
std::exit(1);
}
osmium::area::Assembler::config_type assembler_config;
osmium::area::MultipolygonCollector<osmium::area::Assembler> collector{assembler_config};
std::cerr << "Pass 1...\n";
osmium::io::Reader reader1{input_filename, osmium::osm_entity_bits::relation};
collector.read_relations(reader1);
reader1.close();
std::cerr << "Pass 1 done\n";
std::unique_ptr<index_type> index = map_factory.create_map(location_store);
location_handler_type location_handler{*index};
location_handler.ignore_errors();
JSONAreaHandler json_handler{error_file, attr_prefix, with_id};
osmium::handler::CheckOrder check_order_handler;
std::cerr << "Pass 2...\n";
osmium::io::Reader reader2{input_filename};
osmium::apply(reader2, check_order_handler, location_handler, json_handler, collector.handler([&json_handler](osmium::memory::Buffer&& buffer) {
osmium::apply(buffer, json_handler);
}));
reader2.close();
std::cerr << "Pass 2 done\n";
if (json_handler.geometry_error_count()) {
std::cerr << "Number of geometry errors (not written to output): " << json_handler.geometry_error_count() << "\n";
}
std::cerr << "Done.\n";
}
| isc |
dennis95/dennix | kernel/src/keyboard.cpp | 5868 | /* Copyright (c) 2016, 2017, 2019, 2020 Dennis Wölfing
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/* kernel/src/keyboard.cpp
* Keyboard.
*/
#include <wchar.h>
#include <dennix/kbkeys.h>
#include <dennix/kernel/keyboard.h>
#define KBLAYOUT KBLAYOUT_US
// US keyboard layout.
static const wchar_t KBLAYOUT_US[] = {
// no modifiers, shift, caps, unused
0, 0, 0, 0,
L'\e', L'\e', L'\e', L'\e',
L'1', L'!', L'1', 0,
L'2', L'@', L'2', 0,
L'3', L'#', L'3', 0,
L'4', L'$', L'4', 0,
L'5', L'%', L'5', 0,
L'6', L'^', L'6', 0,
L'7', L'&', L'7', 0,
L'8', L'*', L'8', 0,
L'9', L'(', L'9', 0,
L'0', L')', L'0', 0,
L'-', L'_', L'-', 0,
L'=', L'+', L'=', 0,
L'\b', L'\b', L'\b', L'\b',
L'\t', L'\t', L'\t', L'\t',
L'q', L'Q', L'Q', 0,
L'w', L'W', L'W', 0,
L'e', L'E', L'E', 0,
L'r', L'R', L'R', 0,
L't', L'T', L'T', 0,
L'y', L'Y', L'Y', 0,
L'u', L'U', L'U', 0,
L'i', L'I', L'I', 0,
L'o', L'O', L'O', 0,
L'p', L'P', L'P', 0,
L'[', L'{', L'[', 0,
L']', L'}', L']', 0,
L'\n', L'\n', L'\n', L'\n',
0, 0, 0, 0, // left Control
L'a', L'A', L'A', 0,
L's', L'S', L'S', 0,
L'd', L'D', L'D', 0,
L'f', L'F', L'F', 0,
L'g', L'G', L'G', 0,
L'h', L'H', L'H', 0,
L'j', L'J', L'J', 0,
L'k', L'K', L'K', 0,
L'l', L'L', L'L', 0,
L';', L':', L';', 0,
L'\'', L'"', L'\'', 0,
L'`', L'~', L'`', 0,
0, 0, 0, 0, // left Shift
L'\\', L'|', L'\\', 0,
L'z', L'Z', L'Z', 0,
L'x', L'X', L'X', 0,
L'c', L'C', L'C', 0,
L'v', L'V', L'V', 0,
L'b', L'B', L'B', 0,
L'n', L'N', L'N', 0,
L'm', L'M', L'M', 0,
L',', L'<', L',', 0,
L'.', L'>', L'.', 0,
L'/', L'?', L'/', 0,
0, 0, 0, 0, // right Shift
L'*', L'*', L'*', L'*',
0, 0, 0, 0, // left Alt
L' ', L' ', L' ', L' ',
0, 0, 0, 0, // Caps Lock
0, 0, 0, 0, // F1
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0, // F10
0, 0, 0, 0, // Num Lock
0, 0, 0, 0, // Scroll Lock
L'7', 0, L'7', L'7',
L'8', 0, L'8', L'8',
L'9', 0, L'9', L'9',
L'-', L'-', L'-', L'-',
L'4', 0, L'4', L'4',
L'5', 0, L'5', L'5',
L'6', 0, L'6', L'6',
L'+', L'+', L'+', L'+',
L'1', 0, L'1', L'1',
L'2', 0, L'2', L'2',
L'3', 0, L'3', L'3',
L'0', 0, L'0', L'0',
L'.', 0, L'.', L'.',
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0, // F11
0, 0, 0, 0, // F12
// Most things below are not printable
};
static const struct {
int key;
const char* sequence;
} sequences[] = {
{ KB_UP, "\e[A" },
{ KB_DOWN, "\e[B" },
{ KB_RIGHT, "\e[C" },
{ KB_LEFT, "\e[D" },
{ KB_END, "\e[F" },
{ KB_HOME, "\e[H" },
{ KB_INSERT, "\e[2~" },
{ KB_DELETE, "\e[3~" },
{ KB_PAGEUP, "\e[5~" },
{ KB_PAGEDOWN, "\e[6~" },
{ KB_F1, "\e[OP" },
{ KB_F2, "\e[OQ" },
{ KB_F3, "\e[OR" },
{ KB_F4, "\e[OS" },
{ KB_F5, "\e[15~" },
{ KB_F6, "\e[17~" },
{ KB_F7, "\e[18~" },
{ KB_F8, "\e[19~" },
{ KB_F9, "\e[20~" },
{ KB_F10, "\e[21~" },
{ KB_F11, "\e[23~" },
{ KB_F12, "\e[24~" },
{ 0, 0 }
};
wchar_t Keyboard::getWideCharFromKey(int key) {
static bool leftShift = false;
static bool rightShift = false;
static bool capsLock = false;
static bool leftControl = false;
static bool rightControl = false;
if (key == KB_LSHIFT) {
leftShift = true;
} else if (key == KB_RSHIFT) {
rightShift = true;
} else if (key == KB_CAPSLOCK) {
capsLock = !capsLock;
} else if (key == KB_LCONTROL) {
leftControl = true;
} else if (key == KB_RCONTROL) {
rightControl = true;
} else if (key == -KB_LSHIFT) {
leftShift = false;
} else if (key == -KB_RSHIFT) {
rightShift = false;
} else if (key == -KB_LCONTROL) {
leftControl = false;
} else if (key == -KB_RCONTROL) {
rightControl = false;
}
if (key < 0) return L'\0';
wchar_t wc = L'\0';
if ((size_t) key < sizeof(KBLAYOUT) / sizeof(wchar_t) / 4) {
size_t index = key << 2;
bool shift = leftShift || rightShift;
if (shift && capsLock) {
// When shift and caps have the same effect they cancel each other.
if (KBLAYOUT[index + 1] != KBLAYOUT[index + 2]) {
index++;
}
} else if (shift) {
index++;
} else if (capsLock) {
index += 2;
}
wc = KBLAYOUT[index];
} else if (key == KB_NUMPAD_ENTER) {
wc = L'\n';
} else if (key == KB_NUMPAD_DIV) {
wc = L'/';
}
bool control = leftControl || rightControl;
if (control && wc >= L'@' && wc <= L'~') {
wc = wc & 0x1F;
} else if (control && wc == L'?') {
wc = L'\x7F';
}
return wc;
}
const char* Keyboard::getSequenceFromKey(int key) {
for (size_t i = 0; sequences[i].key != 0; i++) {
if (sequences[i].key == key) {
return sequences[i].sequence;
}
}
return NULL;
}
| isc |
nolo-metrics/nolo-librato | lib/nolo-librato/version.rb | 81 | module Nolo # :nodoc:
module Librato # :nodoc:
VERSION = '0.1.5'
end
end
| isc |
shoegazer/shuffle-guru | src/actions/loginGenius.js | 37 | export const loginGenius = () => ({}) | isc |
a-hansen/aon | src/test/java/com/comfortanalytics/aon/SerializationTest.java | 4178 | package com.comfortanalytics.aon;
import java.math.BigDecimal;
import java.math.BigInteger;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
public class SerializationTest {
final Aobj largeObj = makeLargeObj();
@Test
public void testAon() {
byte[] buf = Aon.aonBytes(largeObj);
Aobj neu = Aon.readAon(buf);
Assertions.assertEquals(largeObj, neu);
}
@Test
public void testJson() {
Aobj orig = makeJsonObj();
byte[] buf = Aon.jsonBytes(orig);
Aobj neu = Aon.readJson(buf);
Assertions.assertEquals(orig, neu);
}
@Test
public void testMsgPack() {
Aobj orig = Profiling.makeLargeObj();
byte[] buf = Aon.msgPackBytes(orig);
Aobj neu = Aon.readMsgPack(buf);
Assertions.assertEquals(orig, neu);
}
@Test
public void testObjCompare() {
Aobj neu = largeObj.copy();
Assertions.assertEquals(largeObj, neu);
neu.put("foo", "bar");
Assertions.assertNotEquals(largeObj, neu);
}
static Aobj makeJsonObj() {
Aobj primitiveObj = new Aobj()
.put("boolean", true)
.put("double", 100.001d)
.put("long", 100001L)
.put("string", "abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n");
Alist primitiveList = new Alist()
.add(true)
.add(100.001d)
.add(100001L)
.add("abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n");
Aobj complexObj = primitiveObj.copy();
complexObj.put("list", primitiveList.copy())
.put("object", primitiveObj.copy());
Alist complexList = (Alist) primitiveList.copy();
complexList.add(primitiveList.copy());
complexList.add(primitiveObj.copy());
Aobj testObj = new Aobj();
for (int i = 0; i < 100; i++) {
if ((i % 100) == 0) {
Aobj tmp = new Aobj();
tmp.put("object", testObj);
testObj = tmp;
}
testObj.put("object" + i, complexObj.copy());
testObj.put("list" + i, complexList.copy());
}
return testObj;
}
static Aobj makeLargeObj() {
Aobj primitiveObj = new Aobj()
.put("decimal", new BigDecimal(Double.MAX_VALUE))
.put("bigint", new BigInteger("" + Long.MAX_VALUE))
.put("boolean", true)
.put("double", 100.001d)
.put("float", 100.001f)
.put("smallInt", 1)
.put("int", 100001)
.put("long", 100001L)
.put("bytes", "abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n".getBytes())
.put("string", "abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n");
Alist primitiveList = new Alist()
.add(new BigDecimal(Double.MIN_VALUE))
.add(new BigInteger("" + Long.MIN_VALUE))
.add(true)
.add(100.001d)
.add(100.001f)
.add(1)
.add(100001)
.add(100001L)
.add("abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n".getBytes())
.add("abcdefghij\r\njklmnopqrs\u0000\u0001\u0002tuvwxyz\r\n");
Aobj complexObj = primitiveObj.copy();
complexObj.put("list", primitiveList.copy())
.put("object", primitiveObj.copy());
Alist complexList = (Alist) primitiveList.copy();
complexList.add(primitiveList.copy());
complexList.add(primitiveObj.copy());
Aobj testObj = new Aobj();
for (int i = 0; i < 100; i++) {
if ((i % 100) == 0) {
Aobj tmp = new Aobj();
tmp.put("object", testObj);
testObj = tmp;
}
testObj.put("object" + i, complexObj.copy());
testObj.put("list" + i, complexList.copy());
}
return testObj;
}
}
| isc |
larvit/larvitproduct | test/00test.js | 62270 | /* eslint-disable require-jsdoc */
'use strict';
const elasticsearch = require('elasticsearch');
const uuidValidate = require('uuid-validate');
const Intercom = require('larvitamintercom');
const LUtils = require('larvitutils');
const log = new (new LUtils()).Log();
const {ProductLib, Product} = require(__dirname + '/../index.js');
const request = require('request');
const assert = require('assert');
const async = require('async');
const fs = require('fs');
const os = require('os');
const testIndexName = 'something';
let esUrl;
let prodLib;
let es;
before(function (done) {
const tasks = [];
this.timeout(10000);
// Check for empty ES
tasks.push(function (cb) {
let confFile;
if (fs.existsSync(__dirname + '/../config/es_test.json')) {
confFile = __dirname + '/../config/es_test.json';
} else if (process.env.ESCONFFILE) {
confFile = process.env.ESCONFFILE;
} else {
throw new Error('No es config file found');
}
log.verbose('ES config file: "' + confFile + '"');
const esConf = require(confFile);
log.verbose('ES config: ' + JSON.stringify(esConf));
esUrl = 'http://' + esConf.clientOptions.host;
es = new elasticsearch.Client(esConf.clientOptions);
request({'url': esUrl + '/_cat/indices?v', 'json': true}, function (err, response, body) {
if (err) throw err;
for (let i = 0; body[i] !== undefined; i ++) {
const index = body[i];
if (index.index === testIndexName || index.index === testIndexName + '_db_version') {
throw new Error('Elasticsearch "' + prodLib.dataWriter.esIndexName + '" index already exists!');
}
}
cb(err);
});
});
// Create ProductLib
tasks.push(function (cb) {
const libOptions = {};
libOptions.log = log;
libOptions.esIndexName = testIndexName;
libOptions.mode = 'noSync';
libOptions.intercom = new Intercom('loopback interface');
libOptions.amsync = {};
libOptions.amsync.host = null;
libOptions.amsync.minPort = null;
libOptions.amsync.maxPort = null;
libOptions.elasticsearch = es;
prodLib = new ProductLib(libOptions, cb);
});
// Wait for dataWriter to be ready
tasks.push(function (cb) {
prodLib.dataWriter.ready(cb);
});
// Put mappings to ES to match our tests
tasks.push(function (cb) {
prodLib.dataWriter.elasticsearch.indices.putMapping({
'index': prodLib.dataWriter.esIndexName,
'type': 'product',
'body': {
'product': {
'properties': {
'trams': { 'type': 'text', 'fields': { 'keyword': { 'type': 'keyword' } } },
'foo': { 'type': 'text', 'fields': { 'keyword': { 'type': 'keyword' } } },
'artNo': { 'type': 'keyword'},
'supplier': { 'type': 'keyword'},
'boolTest': { 'type': 'boolean'},
'ragg': { 'type': 'boolean'}
}
}
}
}, cb);
});
async.series(tasks, done);
});
describe('Lib', function () {
it('should create a log instance if no one is provided', function (done) {
const LUtils = require('larvitutils');
const lUtils = new LUtils();
const libOptions = {};
libOptions.esIndexName = testIndexName;
libOptions.mode = 'noSync';
libOptions.intercom = new Intercom('loopback interface');
libOptions.elasticsearch = es;
const lib = new ProductLib(libOptions);
assert(lib.log instanceof lUtils.Log);
done();
});
});
describe('Product', function () {
let productUuid;
it('should not instantiate a new plain product object if productLib is missing from options', function (done) {
try {
new Product({});
} catch (error) {
assert.equal(error.message, 'Required option "productLib" is missing');
done();
}
});
it('should instantiate a new plain product object', function (done) {
const product = new Product({'productLib': prodLib});
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(toString.call(product.created), '[object Date]');
done();
});
it('should instantiate a new plain product object with productLib factory function', function (done) {
const product = prodLib.createProduct();
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(toString.call(product.created), '[object Date]');
assert.strictEqual(product.productLib, prodLib);
done();
});
it('should instantiate a new plain product object, with empty object as option', function (done) {
const product = prodLib.createProduct({});
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(toString.call(product.created), '[object Date]');
assert.strictEqual(product.productLib, prodLib);
done();
});
it('should instantiate a new plain product object, with custom uuid', function (done) {
const product = prodLib.createProduct('6a7c9adc-9b73-11e6-9f33-a24fc0d9649c');
product.loadFromDb(function (err) {
if (err) throw err;
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(product.uuid, '6a7c9adc-9b73-11e6-9f33-a24fc0d9649c');
assert.deepStrictEqual(toString.call(product.created), '[object Date]');
done();
});
});
it('should instantiate a new plain product object, with custom uuid as explicit option', function (done) {
const product = prodLib.createProduct({'uuid': '6a7c9adc-9b73-11e6-9f33-a24fc0d9649c'});
product.loadFromDb(function (err) {
if (err) throw err;
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(product.uuid, '6a7c9adc-9b73-11e6-9f33-a24fc0d9649c');
assert.deepStrictEqual(toString.call(product.created), '[object Date]');
done();
});
});
it('should instantiate a new plain product object, with custom created', function (done) {
const manCreated = new Date();
const product = prodLib.createProduct({'created': manCreated});
product.loadFromDb(function (err) {
if (err) throw err;
assert.deepStrictEqual(toString.call(product), '[object Object]');
assert.deepStrictEqual(toString.call(product.attributes), '[object Object]');
assert.deepStrictEqual(uuidValidate(product.uuid, 1), true);
assert.deepStrictEqual(product.created, manCreated);
done();
});
});
it('should save a product', function (done) {
function createProduct(cb) {
const product = prodLib.createProduct();
productUuid = product.uuid;
product.attributes = {
'name': 'Test product #69',
'price': 99,
'weight': 14,
'color': ['blue', 'green']
};
product.save(cb);
}
function checkProduct(cb) {
prodLib.dataWriter.elasticsearch.get({
'index': prodLib.dataWriter.esIndexName,
'type': 'product',
'id': productUuid
}, function (err, result) {
if (err) throw err;
assert.strictEqual(result._id, productUuid);
assert.strictEqual(result.found, true);
assert.strictEqual(result._source.name[0], 'Test product #69');
assert.strictEqual(result._source.price[0], 99);
assert.strictEqual(result._source.weight[0], 14);
assert.strictEqual(result._source.color[0], 'blue');
assert.strictEqual(result._source.color[1], 'green');
cb();
});
}
async.series([createProduct, checkProduct], function (err) {
if (err) throw err;
done();
});
});
it('should load saved product from db', function (done) {
const product = prodLib.createProduct(productUuid);
product.loadFromDb(function (err) {
if (err) throw err;
assert.deepStrictEqual(product.uuid, productUuid);
assert.deepStrictEqual(product.attributes.name[0], 'Test product #69');
assert.deepStrictEqual(product.attributes.price[0], 99);
assert.deepStrictEqual(product.attributes.weight[0], 14);
product.attributes.color.sort();
assert.deepStrictEqual(product.attributes.color[0], 'blue');
assert.deepStrictEqual(product.attributes.color[1], 'green');
done();
});
});
it('should alter an product already saved to db', function (done) {
const tasks = [];
tasks.push(function (cb) {
const product = prodLib.createProduct(productUuid);
product.loadFromDb(function (err) {
if (err) throw err;
product.attributes.boll = ['foo'];
delete product.attributes.weight;
product.save(function (err) {
if (err) throw err;
assert.deepStrictEqual(product.uuid, productUuid);
assert.deepStrictEqual(product.attributes.name, ['Test product #69']);
assert.deepStrictEqual(product.attributes.price, [99]);
assert.deepStrictEqual(product.attributes.weight, undefined);
assert.deepStrictEqual(product.attributes.boll, ['foo']);
product.attributes.color.sort();
assert.deepStrictEqual(product.attributes.color, ['blue', 'green']);
cb();
});
});
});
tasks.push(function (cb) {
const product = prodLib.createProduct(productUuid);
product.loadFromDb(function (err) {
if (err) throw err;
assert.deepStrictEqual(product.uuid, productUuid);
assert.deepStrictEqual(product.attributes.name, ['Test product #69']);
assert.deepStrictEqual(product.attributes.price, [99]);
assert.deepStrictEqual(product.attributes.weight, undefined);
assert.deepStrictEqual(product.attributes.boll, ['foo']);
product.attributes.color.sort();
assert.deepStrictEqual(product.attributes.color, ['blue', 'green']);
cb();
});
});
async.series(tasks, done);
});
it('should remove a product', function (done) {
const tasks = [];
// Add some more products
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.foo = 'bar';
product.attributes.nisse = 'mm';
product.attributes.active = 'true';
product.attributes.bacon = 'yes';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.foo = 'baz';
product.attributes.nisse = 'nej';
product.attributes.active = 'true';
product.attributes.bacon = 'no';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.foo = 'bar';
product.attributes.active = 'true';
product.attributes.bacon = 'narwhal';
product.save(cb);
});
// Get all products before
tasks.push(function (cb) {
prodLib.dataWriter.elasticsearch.search({
'index': prodLib.dataWriter.esIndexName,
'type': 'product'
}, function (err, result) {
if (err) throw err;
assert.strictEqual(result.hits.total, 4);
cb();
});
});
// Remove a product
tasks.push(function (cb) {
const product = prodLib.createProduct(productUuid);
product.rm(cb);
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
// Get all products after
tasks.push(function (cb) {
prodLib.dataWriter.elasticsearch.search({
'index': prodLib.dataWriter.esIndexName,
'type': 'product'
}, function (err, result) {
if (err) throw err;
assert.strictEqual(result.hits.total, 3);
for (let i = 0; result.hits.hits[i] !== undefined; i ++) {
assert.notStrictEqual(result.hits.hits[i]._id, productUuid);
}
cb();
});
});
async.series(tasks, function (err) {
if (err) throw err;
done();
});
});
});
describe('Helpers', function () {
it('should save some more products to play with', function (done) {
const tasks = [];
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.enabled2 = 'true';
product.attributes.enabled = 'true';
product.attributes.country = 'all';
product.attributes.country2 = 'all';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.enabled2 = ['true', 'maybe'];
product.attributes.enabled = ['true', 'maybe'];
product.attributes.country = 'se';
product.attributes.country2 = 'se';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.enabled2 = 'false';
product.attributes.enabled = 'false';
product.attributes.country = 'se';
product.attributes.country2 = 'se';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.enabled2 = ['maybe', 'true'];
product.attributes.enabled = ['true', 'maybe'];
product.attributes.country = 'dk';
product.attributes.country2 = 'dk';
product.save(cb);
});
tasks.push(function (cb) {
const product = prodLib.createProduct();
product.attributes.enabled2 = ['maybe', 'true'];
product.attributes.enabled = ['true', 'maybe'];
product.attributes.country = 'all';
product.attributes.country2 = 'se';
product.save(cb);
});
async.parallel(tasks, function (err) {
if (err) throw err;
// Refresh index
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', function (err) {
if (err) throw err;
done();
});
});
});
it('should get attribute values', function (done) {
prodLib.helpers.getAttributeValues('foo.keyword', function (err, result) {
if (err) throw err;
assert.deepStrictEqual(result, ['bar', 'baz']);
done();
});
});
it('should get empty array on non existing attribute name', function (done) {
prodLib.helpers.getAttributeValues('trams.keyword', function (err, result) {
if (err) throw err;
assert.deepStrictEqual(result, []);
done();
});
});
it('should ignore BOMs in strings', function (done) {
const product = prodLib.createProduct();
product.attributes[Buffer.from('efbbbf70', 'hex').toString()] = 'bulle';
product.save(function (err) {
if (err) throw err;
prodLib.dataWriter.elasticsearch.get({
'index': prodLib.dataWriter.esIndexName,
'type': 'product',
'id': product.uuid
}, function (err, result) {
if (err) throw err;
assert.deepStrictEqual(Object.keys(result._source), ['created', 'p']);
done();
});
});
});
it('should get all keywords', function (done) {
const expectedKeywords = [];
expectedKeywords.push('active.keyword');
expectedKeywords.push('artNo');
expectedKeywords.push('bacon.keyword');
expectedKeywords.push('boll.keyword');
expectedKeywords.push('color.keyword');
expectedKeywords.push('country.keyword');
expectedKeywords.push('country2.keyword');
expectedKeywords.push('enabled.keyword');
expectedKeywords.push('enabled2.keyword');
expectedKeywords.push('foo.keyword');
expectedKeywords.push('name.keyword');
expectedKeywords.push('nisse.keyword');
expectedKeywords.push('p.keyword');
expectedKeywords.push('supplier');
expectedKeywords.push('trams.keyword');
prodLib.helpers.getKeywords(function (err, keywords) {
if (err) throw err;
expectedKeywords.sort();
keywords.sort();
assert.deepStrictEqual(expectedKeywords, keywords);
done();
});
});
it('should get all booleans', function (done) {
const expectedBools = ['ragg', 'boolTest'];
prodLib.helpers.getBooleans(function (err, booleans) {
expectedBools.sort();
booleans.sort();
assert.deepEqual(booleans, expectedBools);
done();
});
});
it('update by query', function (done) {
const tasks = [];
tasks.push(function (cb) {
const queryBody = {};
const updates = {};
queryBody.query = {'bool': {'filter': {'term': {'active': 'true'}}}};
updates.enabled = ['true'];
prodLib.helpers.updateByQuery(queryBody, updates, cb);
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
tasks.push(function (cb) {
request({'url': esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_search', 'json': true}, function (err, response, body) {
if (err) throw err;
for (let i = 0; body.hits.hits[i] !== undefined; i ++) {
const source = body.hits.hits[i]._source;
if (Array.isArray(source.active) && source.active[0] === 'true') {
assert.strictEqual(source.enabled[0], 'true');
}
}
cb();
});
});
async.series(tasks, function (err) {
if (err) throw err;
done();
});
});
it('delete by query', function (done) {
const tasks = [];
let prodBeforeDelete;
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', function (err) {
if (err) throw err;
setTimeout(cb, 200);
});
});
// Pre-calc products
tasks.push(function (cb) {
const reqOptions = {};
reqOptions.url = esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_search';
reqOptions.body = {'size': 1000, 'query': {'match_all': {}}};
reqOptions.json = true;
request(reqOptions, function (err, response, body) {
if (err) throw err;
prodBeforeDelete = body.hits.hits.length;
cb();
});
});
tasks.push(function (cb) {
const queryBody = {};
queryBody.query = {'bool': {'filter': {'term': {'foo': 'bar'}}}};
prodLib.helpers.deleteByQuery(queryBody, cb);
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', function (err) {
if (err) throw err;
setTimeout(cb, 200);
});
});
tasks.push(function (cb) {
const reqOptions = {};
reqOptions.url = esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_search';
reqOptions.body = {'size': 1000, 'query': {'match_all': {}}};
reqOptions.json = true;
request(reqOptions, function (err, response, body) {
if (err) throw err;
assert.strictEqual(body.hits.hits.length, prodBeforeDelete - 2);
cb();
});
});
async.series(tasks, function (err) {
if (err) throw err;
done();
});
});
it('should get all mapped field names', function (done) {
prodLib.helpers.getMappedFieldNames(function (err, names) {
if (err) throw err;
assert.strictEqual(names.length, 20);
assert.notStrictEqual(names.indexOf('price'), - 1);
assert.notStrictEqual(names.indexOf('enabled'), - 1);
done();
});
});
});
describe('Import', function () {
// Make sure the index is refreshed between each test
beforeEach(function (done) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', function (err) {
if (err) throw err;
done();
});
});
function importFromStr(str, options, cb) {
const tmpFile = os.tmpdir() + '/tmp_products.csv';
const tasks = [];
let uuids = [];
let resultErrors = [];
// First create our test file
tasks.push(function (cb) {
fs.writeFile(tmpFile, str, cb);
});
// Import file
tasks.push(function (cb) {
prodLib.importer.fromFile(tmpFile, options, function (err, result, errors) {
uuids = result;
if (err) throw err;
resultErrors = errors;
cb();
});
});
// Remove tmp file
tasks.push(function (cb) {
fs.unlink(tmpFile, cb);
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
async.series(tasks, function (err) {
cb(err, uuids, resultErrors);
});
}
function getProductData(uuids, cb) {
const options = {};
options.method = 'GET';
options.json = true;
options.url = esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_search';
options.body = {'query': {'ids': {'values': uuids}}};
request(options, function (err, response, result) {
if (err) throw err;
return cb(null, result.hits.hits);
});
}
function countProducts(cb) {
request({'url': esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_count', 'json': true}, function (err, response, body) {
if (err) throw err;
cb(err, body.count);
});
}
function uniqueConcat(array) {
for (let i = 0; i < array.length; ++ i) {
for (let j = i + 1; j < array.length; ++ j) {
if (array[i] === array[j]) array.splice(j --, 1);
}
}
return array;
};
function refreshIndex(cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
}
function deleteAllProducts(cb) {
const options = {};
options.method = 'POST';
options.json = true;
options.url = esUrl + '/' + prodLib.dataWriter.esIndexName + '/product/_delete_by_query?refresh';
options.body = {'query': {'match_all': {}}};
request(options, cb);
}
it('very simple test case', function (done) {
const productStr = 'name,price,description\nball,100,it is round\ntv,55,"About 32"" in size"';
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run importer
tasks.push(function (cb) {
importFromStr(productStr, {}, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 2);
cb();
});
});
// Get product data and check it
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 2);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 4);
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.price[0], '100');
assert.strictEqual(product._source.description[0], 'it is round');
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.price[0], '55');
assert.strictEqual(product._source.description[0], 'About 32" in size');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Count total number of products in database
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 2);
cb(err);
});
});
async.series(tasks, done);
});
it('Override static column data', function (done) {
const productStr = 'name,artNo,size,enabled\nball,abc01,3,true\ntv,abc02,14,false\nspoon,abc03,2,true';
const options = {'staticCols': { 'foul': 'nope', 'enabled': 'false'} };
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Import
tasks.push(function (cb) {
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 3);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 3);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 6);
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.artNo[0], 'abc01');
assert.strictEqual(product._source.size[0], '3');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.foul[0], 'nope');
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.artNo[0], 'abc02');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(product._source.enabled[0], 'false');
assert.strictEqual(product._source.foul[0], 'nope');
} else if (product._source.name[0] === 'spoon') {
assert.strictEqual(product._source.artNo[0], 'abc03');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.foul[0], 'nope');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Count products
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 3);
cb(err);
});
});
async.series(tasks, done);
});
it('Create a product, and then update the same product using the import function fromFile. Check that visible is not changed on existing products, but set to the entered value in defaultAttributes when creating new ones', function (done) {
const productStr = 'name,artNo,size,enabled\nballa,abc123,3,true\nballb,abc124,14,false\nballc,abc125,2,true\nballd,abc126,2,true';
const options = {'defaultAttributes': { 'visible': 'true'}, 'findByCols': ['name'] };
const tasks = [];
let productUuid;
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Import
tasks.push(function (cb) {
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 6);
if (product._source.name[0] === 'balla') {
productUuid = product._id;
assert.strictEqual(product._source.artNo[0], 'abc123');
assert.strictEqual(product._source.size[0], '3');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'true');
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(product._source.enabled[0], 'false');
assert.strictEqual(product._source.visible[0], 'true');
} else if (product._source.name[0] === 'ballc') {
assert.strictEqual(product._source.artNo[0], 'abc125');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'true');
} else if (product._source.name[0] === 'balld') {
assert.strictEqual(product._source.artNo[0], 'abc126');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'true');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Change attribute visible on one of the products
tasks.push(function (cb) {
const product = prodLib.createProduct(productUuid);
product.loadFromDb(function (err) {
if (err) throw err;
product.attributes.visible = ['false'];
product.save(function (err) {
if (err) throw err;
cb();
});
});
});
// Import the same products again
tasks.push(function (cb) {
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Get and check product data, product 'balla' should still have visible false.
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 6);
if (product._source.name[0] === 'balla') {
assert.strictEqual(product._source.artNo[0], 'abc123');
assert.strictEqual(product._source.size[0], '3');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'false');
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(product._source.enabled[0], 'false');
assert.strictEqual(product._source.visible[0], 'true');
} else if (product._source.name[0] === 'ballc') {
assert.strictEqual(product._source.artNo[0], 'abc125');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'true');
} else if (product._source.name[0] === 'balld') {
assert.strictEqual(product._source.artNo[0], 'abc126');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.visible[0], 'true');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Count products
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 4);
cb(err);
});
});
async.series(tasks, done);
});
it('Replace by one column', function (done) {
const initProductStr = 'name,artNo,size,description\n' +
'house,abc01,20,huge\n' +
'napkin,food3k,9,small\n' +
'car,abc13,7,vehicle\n' +
'plutt,ieidl3,10,no';
const replProductStr = 'name,artNo,size\n' +
'ball,abc01,15\n' +
'tv,abc02,14\n' +
'car," abc13",2'; // Deliberate space
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run initial report
tasks.push(function (cb) {
importFromStr(initProductStr, {}, function (err, result) {
if (err) throw err;
uuids = result;
cb();
});
});
// Refresh index
tasks.push(refreshIndex);
// Run replacement import
tasks.push(function (cb) {
importFromStr(replProductStr, {'findByCols': ['artNo'], 'removeOldAttributes': true}, function (err, result) {
if (err) throw err;
uuids = uuids.concat(result);
cb();
});
});
// Refresh index
tasks.push(refreshIndex);
// Count hits
tasks.push(function (cb) {
countProducts(function (err, count) {
if (err) throw err;
assert.strictEqual(count, 5);
cb();
});
});
// Check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 5);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.artNo[0], 'abc01');
assert.strictEqual(product._source.size[0], '15');
assert.strictEqual(Object.keys(product._source).length, 4);
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.artNo[0], 'abc02');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(Object.keys(product._source).length, 4);
} else if (product._source.name[0] === 'car') {
assert.strictEqual(product._source.artNo[0], 'abc13');
assert.strictEqual(product._source.size[0], '2');
assert.strictEqual(Object.keys(product._source).length, 4);
} else if (product._source.name[0] === 'napkin') {
assert.strictEqual(product._source.artNo[0], 'food3k');
assert.strictEqual(product._source.size[0], '9');
assert.strictEqual(product._source.description[0], 'small');
assert.strictEqual(Object.keys(product._source).length, 5);
} else if (product._source.name[0] === 'plutt') {
assert.strictEqual(product._source.artNo[0], 'ieidl3');
assert.strictEqual(product._source.size[0], '10');
assert.strictEqual(product._source.description[0], 'no');
assert.strictEqual(Object.keys(product._source).length, 5);
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
it('Replace by two columns', function (done) {
const productStr1 = 'supplier,artNo,name\nurkus ab,bb1,foo\nurkus ab,bb2,bar\nbleff ab,bb1,elk';
const productStr2 = 'supplier,artNo,name\nurkus ab,bb1,MUU\nblimp 18,bb2,tefflon\nbleff ab,bb1,bolk';
const options = {'findByCols': ['artNo', 'supplier']};
const tasks = [];
let preNoProducts;
let uuids1;
let uuids2;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run the import of productStr1
tasks.push(function (cb) {
importFromStr(productStr1, options, function (err, result) {
if (err) throw err;
uuids1 = result;
assert.strictEqual(uuids1.length, 3);
cb();
});
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
// Pre-count products
tasks.push(function (cb) {
countProducts(function (err, count) {
preNoProducts = count;
cb(err);
});
});
// Run the import of productStr2
tasks.push(function (cb) {
importFromStr(productStr2, options, function (err, result) {
if (err) throw err;
uuids2 = result;
assert.strictEqual(uuids2.length, 3);
cb();
});
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
tasks.push(function (cb) {
setTimeout(cb, 1100);
});
// Count hits after index
tasks.push(function (cb) {
countProducts(function (err, count) {
if (err) throw err;
assert.strictEqual(preNoProducts, (count - 1));
cb();
});
});
// Check product data
tasks.push(function (cb) {
getProductData(uuids2, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 3);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 4);
if (product._source.supplier[0] === 'urkus ab' && product._source.artNo[0] === 'bb1') {
assert.strictEqual(product._source.name[0], 'MUU');
} else if (product._source.supplier[0] === 'blimp 18' && product._source.artNo[0] === 'bb2') {
assert.strictEqual(product._source.name[0], 'tefflon');
} else if (product._source.supplier[0] === 'bleff ab' && product._source.artNo[0] === 'bb1') {
assert.strictEqual(product._source.name[0], 'bolk');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
it('Update by two columns', function (done) {
const productStr1 = 'supplier,artNo,name,size\nslam ab,rd1,foo,100\nslam ab,rd2,bar,200\nbang ab,hhv4,elk,300';
const productStr2 = 'supplier,artNo,name\nslam ab,rd1,MUU\npaow,bb2,tefflon\nbang ab,hhv4,bolk';
const options = {'findByCols': ['artNo', 'supplier']};
const tasks = [];
let preNoProducts;
let uuids1;
let uuids2;
// Run the import of productStr1
tasks.push(function (cb) {
importFromStr(productStr1, options, function (err, result) {
if (err) throw err;
uuids1 = result;
assert.strictEqual(uuids1.length, 3);
cb();
});
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
// Pre-count products
tasks.push(function (cb) {
countProducts(function (err, count) {
preNoProducts = count;
cb(err);
});
});
// Run the import of productStr1
tasks.push(function (cb) {
importFromStr(productStr2, options, function (err, result) {
if (err) throw err;
uuids2 = result;
assert.strictEqual(uuids2.length, 3);
cb();
});
});
// Refresh index
tasks.push(function (cb) {
request.post(esUrl + '/' + prodLib.dataWriter.esIndexName + '/_refresh', cb);
});
// Count hits after index
tasks.push(function (cb) {
countProducts(function (err, count) {
if (err) throw err;
assert.strictEqual(preNoProducts, (count - 1));
cb();
});
});
// Check product data
tasks.push(function (cb) {
const uuids = uniqueConcat(uuids1.concat(uuids2));
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.supplier[0] === 'slam ab' && product._source.artNo[0] === 'rd1') {
assert.strictEqual(product._source.name[0], 'MUU');
assert.strictEqual(parseInt(product._source.size[0]), 100);
} else if (product._source.supplier[0] === 'paow' && product._source.artNo[0] === 'bb2') {
assert.strictEqual(product._source.name[0], 'tefflon');
assert.strictEqual(product._source.size, undefined);
} else if (product._source.supplier[0] === 'bang ab' && product._source.artNo[0] === 'hhv4') {
assert.strictEqual(product._source.name[0], 'bolk');
assert.strictEqual(parseInt(product._source.size[0]), 300);
} else if (product._source.supplier[0] === 'slam ab' && product._source.artNo[0] === 'rd2') {
assert.strictEqual(product._source.name[0], 'bar');
assert.strictEqual(parseInt(product._source.size[0]), 200);
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
it('Ignore column values', function (done) {
const productStr = 'name,price,description,foo\nball,100,it is round,N/A\ntv,55,Large sized,bar\nsoffa,1200,n/a,N/A\nbord,20,,n/a';
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run importer
tasks.push(function (cb) {
importFromStr(productStr, {'removeColValsContaining': ['N/A', '']}, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Get product data and check it
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.price[0], '100');
assert.strictEqual(product._source.description[0], 'it is round');
assert.strictEqual(product._source.foo, undefined);
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.price[0], '55');
assert.strictEqual(product._source.description[0], 'Large sized');
assert.strictEqual(product._source.foo[0], 'bar');
} else if (product._source.name[0] === 'soffa') {
assert.strictEqual(product._source.price[0], '1200');
assert.strictEqual(product._source.description[0], 'n/a');
assert.strictEqual(product._source.foo, undefined);
} else if (product._source.name[0] === 'bord') {
assert.strictEqual(product._source.price[0], '20');
assert.strictEqual(product._source.description, undefined);
assert.strictEqual(product._source.foo[0], 'n/a');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Count total number of products in database
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 4);
cb(err);
});
});
async.series(tasks, done);
});
it('Remove values where empty', function (done) {
const productStr = 'name,price,description,foo\n' +
'ball,100,it is round,N/A\n' +
'tv,55,Large sized,bar\n' +
'soffa,1200,n/a,N/A\n' +
'bord,20,untz,n/a';
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run importer
tasks.push(function (cb) {
importFromStr(productStr, {}, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Get product data and check it
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.price[0], '100');
assert.strictEqual(product._source.description[0], 'it is round');
assert.strictEqual(product._source.foo[0], 'N/A');
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.price[0], '55');
assert.strictEqual(product._source.description[0], 'Large sized');
assert.strictEqual(product._source.foo[0], 'bar');
} else if (product._source.name[0] === 'soffa') {
assert.strictEqual(product._source.price[0], '1200');
assert.strictEqual(product._source.description[0], 'n/a');
assert.strictEqual(product._source.foo[0], 'N/A');
} else if (product._source.name[0] === 'bord') {
assert.strictEqual(product._source.price[0], '20');
assert.strictEqual(product._source.description[0], 'untz');
assert.strictEqual(product._source.foo[0], 'n/a');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Count total number of products in database
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 4);
cb(err);
});
});
// Run importer
tasks.push(function (cb) {
const prodStr2 = 'name,price,description,foo\n' +
'ball,100,it is round,\n' +
'tv,55,Large sized,bar\n' +
'soffa,1200,n/a,\n' +
'bord,20,,n/a';
importFromStr(prodStr2, {'removeValWhereEmpty': true, 'findByCols': ['name'], 'removeColValsContaining': ['N/A', 'n/a']}, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Get product data and check it
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 4);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'ball') {
assert.strictEqual(product._source.price[0], '100');
assert.strictEqual(product._source.description[0], 'it is round');
assert.strictEqual(product._source.foo, undefined);
} else if (product._source.name[0] === 'tv') {
assert.strictEqual(product._source.price[0], '55');
assert.strictEqual(product._source.description[0], 'Large sized');
assert.strictEqual(product._source.foo[0], 'bar');
} else if (product._source.name[0] === 'soffa') {
assert.strictEqual(product._source.price[0], '1200');
assert.strictEqual(product._source.description[0], 'n/a');
assert.strictEqual(product._source.foo, undefined);
} else if (product._source.name[0] === 'bord') {
assert.strictEqual(product._source.price[0], '20');
assert.strictEqual(product._source.description, undefined);
assert.strictEqual(product._source.foo[0], 'n/a');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
it('Hook: afterEachCsvRow', function (done) {
const productStr = 'name,price,description,foo\nball,100,it is round,N/A\ntv,55,Large sized,bar\nsoffa,1200,n/a,N/A\nbord,20,,n/a';
const prodNames = [];
const tasks = [];
let uuids;
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Run importer
tasks.push(function (cb) {
const options = {};
options.hooks = {
'afterEachCsvRow': function (stuff, cb) {
prodNames.push(stuff.product.attributes.name[0]);
cb();
}
};
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 4);
cb();
});
});
// Check prodNames
tasks.push(function (cb) {
assert.strictEqual(prodNames.length, 4);
assert.notStrictEqual(prodNames.indexOf('tv'), - 1);
assert.notStrictEqual(prodNames.indexOf('bord'), - 1);
assert.notStrictEqual(prodNames.indexOf('soffa'), - 1);
assert.notStrictEqual(prodNames.indexOf('ball'), - 1);
cb();
});
// Count total number of products in database
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 4);
cb(err);
});
});
async.series(tasks, done);
});
it('Create two products, then try to update the products with fields passed in the option: forbiddenUpdateFieldsMultipleHits. Try different combinations to confirm that the wildcards work', function (done) {
const tasks = [];
const sArtNo = '123456789';
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['artno', 'size', 'sizetype', 'sizestr', 'supplierdiscount', 'supplierlistprice_*', 'sPrice_*', 'manualprice_*'] };
importFromStr(importStr, importOptions, function (err, result) {
if (err) throw err;
assert.strictEqual(result.length, 2);
cb();
});
});
// Fail on size (equals): size
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['size'], 'findByCols': ['sArtNo'] };
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(errors.filter(x => x.message === 'Update not possible; multiple products found and "size" is one of the attriblutes.').length, 2);
assert.strictEqual(errors.length, 2);
assert.strictEqual(result.length, 0);
cb();
});
});
// Fail on sPrice_* (startsWith): sPrice_SEK
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['artno', 'sPrice_*'], 'findByCols': ['sArtNo'] };
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(errors.filter(x => x.message === 'Update not possible; multiple products found and "sPrice_SEK" is one of the attriblutes.').length, 2);
assert.strictEqual(errors.length, 2);
assert.strictEqual(result.length, 0);
cb();
});
});
// Fail on *Type (endsWith): sizeType
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['artno', '*Type'], 'findByCols': ['sArtNo'] };
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(errors.filter(x => x.message === 'Update not possible; multiple products found and "sizeType" is one of the attriblutes.').length, 2);
assert.strictEqual(errors.length, 2);
assert.strictEqual(result.length, 0);
cb();
});
});
// Fail on *Price_* (contains): sPrice_SEK
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,testPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['artno', '*tprice*'], 'findByCols': ['sArtNo'] };
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(errors.filter(x => x.message === 'Update not possible; multiple products found and "testPrice_SEK" is one of the attriblutes.').length, 2);
assert.strictEqual(errors.length, 2);
assert.strictEqual(result.length, 0);
cb();
});
});
// Fail on size (equals): size and show the hit products sArtNo and name in the error message
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo},First product name,123,7000,Burkar\n${sArtNo},Third product name,123,10000,Burkar`;
const importOptions = {'forbiddenUpdateFieldsMultipleHits': ['size'], 'findByCols': ['sArtNo'], 'multipleHitsErrorProductDisplayAttributes': ['sArtNo'] };
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.equal(errors[0].message, 'Update not possible; multiple products found and "size" is one of the attriblutes. (sArtNo: 123456789, 123456789)');
assert.strictEqual(errors.filter(x => x.message === 'Update not possible; multiple products found and "size" is one of the attriblutes. (sArtNo: 123456789, 123456789)').length, 2);
assert.strictEqual(errors.length, 2);
assert.strictEqual(result.length, 0);
cb();
});
});
// Count products
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 2);
cb(err);
});
});
async.series(tasks, done);
});
it('Check that the import can find products by "findByCols" and "findByAdditionalCols" - Should be handled as two different queries, but in one request. "beforeProductLoadFunction" can then be used to handle results', function (done) {
const tasks = [];
const sArtNo1 = '123456781';
const sArtNo2 = '123456782';
const sArtNo3 = '123456783';
const sArtNo4 = '123456784';
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Import two products
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo1},First product name,123,7000,Burkar\n${sArtNo2},Third product name,123,10000,Burkar`;
const importOptions = {};
importFromStr(importStr, importOptions, function (err, result) {
if (err) throw err;
assert.strictEqual(result.length, 2);
cb();
});
});
// Fail to import two new products with taken names
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo3},First product name,123,7000,Burkar\n${sArtNo4},Third product name,123,10000,Burkar`;
const importOptions = {'findByAdditionalCols': ['name'], 'findByCols': ['sArtNo']};
importOptions.filterMatchedProducts = function filterMatchedProducts(options) {
const returnObject = {'products': [], 'err': undefined, 'errors': []};
if (! options) returnObject.err = new Error('filterMatchedProducts got no options!');
else if (! options.products) returnObject.err = new Error('filterMatchedProducts got no options.products!');
if (returnObject.err) {
return returnObject;
}
if (options.attributes.name) {
if ((returnObject.products.length === 0 && options.additionalProductIds.length !== 0)
|| (options.additionalProductIds.length && ! returnObject.products.every(x => options.additionalProductIds.includes(String(x.uuid))))) {
returnObject.err = new Error('Import would create a product with duplicated name, stopping. name: ' + options.attributes.name);
returnObject.errors.push(returnObject.err.message);
returnObject.products = [];
return returnObject;
}
}
return returnObject;
};
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(result.length, 0);
assert.strictEqual(errors.length, 2);
cb();
});
});
// Try to import two products, one with a taken name and one with a free name. Stop the one with a taken name from beeing imported
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo1},name one,123,7000,Burkar\n${sArtNo2},Third product name,123,10000,Burkar`;
const importOptions = {'findByAdditionalCols': ['name'], 'findByCols': ['sArtNo']};
importOptions.filterMatchedProducts = function filterMatchedProducts(options) {
const returnObject = {'products': [], 'err': undefined, 'errors': []};
if (! options) returnObject.err = new Error('filterMatchedProducts got no options!');
else if (! options.products) returnObject.err = new Error('filterMatchedProducts got no options.products!');
if (returnObject.err) {
return returnObject;
}
if (options.attributes.name) {
if ((returnObject.products.length === 0 && options.additionalProductIds.length !== 0)
|| (options.additionalProductIds.length && ! returnObject.products.every(x => options.additionalProductIds.includes(String(x.uuid))))) {
returnObject.err = new Error('Import would create a product with duplicated name, stopping. name: ' + options.attributes.name);
returnObject.errors.push(returnObject.err.message);
returnObject.products = [];
return returnObject;
}
}
return returnObject;
};
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(result.length, 1);
assert.strictEqual(errors.length, 1);
assert.strictEqual(errors[0].message, 'Import would create a product with duplicated name, stopping. name: Third product name');
assert.strictEqual(errors[0].rowNr, 2);
cb();
});
});
// Count products
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 3);
cb(err);
});
});
async.series(tasks, done);
});
it('Check that beforeAssigningAttributes is called and does not crash if something else than a function is passed to options.beforeAssigningAttributes', function (done) {
const tasks = [];
const productStr = 'name,artNo,size,enabled\nballa,abc123,3,true\nballb,abc124,14,false';
const options = {'defaultAttributes': { 'visible': 'true'}, 'findByCols': ['name'] };
const testAttrValue = 'test';
let uuids;
// Set beforeAssigningAttributes to a string - Should not fail
tasks.push(function (cb) {
options.beforeAssigningAttributes = 'hello';
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 2);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 2);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 6);
assert.strictEqual(product._source.test, undefined);
if (product._source.name[0] === 'balla') {
assert.strictEqual(product._source.artNo[0], 'abc123');
assert.strictEqual(product._source.size[0], '3');
assert.strictEqual(product._source.enabled[0], 'true');
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(product._source.enabled[0], 'false');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Import with beforeAssigningAttributes as a function
tasks.push(function (cb) {
options.beforeAssigningAttributes = function beforeAssigningAttributes(options, cb) {
options.products[0].attributes['test'] = testAttrValue + options.attributes.name;
cb();
};
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 2);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 2);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
assert.strictEqual(Object.keys(product._source).length, 7);
if (product._source.name[0] === 'balla') {
assert.strictEqual(product._source.artNo[0], 'abc123');
assert.strictEqual(product._source.size[0], '3');
assert.strictEqual(product._source.enabled[0], 'true');
assert.strictEqual(product._source.test[0], testAttrValue + product._source.name[0]);
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
assert.strictEqual(product._source.size[0], '14');
assert.strictEqual(product._source.enabled[0], 'false');
assert.strictEqual(product._source.test[0], testAttrValue + product._source.name[0]);
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
it('Check that the import report errors contains the needed information to retry the failed import rows', function (done) {
const tasks = [];
const sArtNo1 = '123456781';
const sArtNo2 = '123456782';
// Remove all previous products
tasks.push(function (cb) {
deleteAllProducts(cb);
});
// Try to import two products, one that has everything needed and one with a missing 'findByCols' value. Stop the one with a missing 'findByCols' from beeing imported and save the import data in the report error
tasks.push(function importProduct(cb) {
const importStr = `sArtNo,name,sPrice_SEK,size,sizeType\n${sArtNo1},name one,123,7000,Burkar\n${sArtNo2},Third product name,123,10000,`;
const importOptions = {'findByAdditionalCols': ['name'], 'findByCols': ['sArtNo', 'sizeType']};
importFromStr(importStr, importOptions, function (err, result, errors) {
if (err) throw err;
assert.strictEqual(result.length, 1);
assert.strictEqual(errors.length, 1);
assert.strictEqual(errors[0].message, 'Missing attribute value for "sizeType"');
assert.strictEqual(errors[0].rowNr, 2);
assert.strictEqual(JSON.stringify(errors[0].importAttributes), '{"sArtNo":"' + sArtNo2 + '","name":"Third product name","sPrice_SEK":"123","size":"10000","sizeType":""}');
cb();
});
});
// Count products
tasks.push(function (cb) {
countProducts(function (err, count) {
assert.strictEqual(count, 1);
cb(err);
});
});
async.series(tasks, done);
});
it('Check that keepAttributes is working', function (done) {
const tasks = [];
const options = {'defaultAttributes': { 'visible': 'true'}, 'findByCols': ['name'], 'keepAttributes': ['artNo'] };
let productStr = 'name,artNo,size,enabled\nballa,abc123,3,true\nballb,abc124,14,false';
let uuids;
// Create/import products
tasks.push(function (cb) {
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 2);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 2);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'balla') {
assert.strictEqual(product._source.artNo[0], 'abc123');
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
// Update products
tasks.push(function (cb) {
productStr = 'name,artNo,size,enabled\nballa,123abc,3,true\nballb,124abc,14,false';
importFromStr(productStr, options, function (err, result) {
if (err) throw err;
uuids = result;
assert.strictEqual(uuids.length, 2);
cb();
});
});
// Get and check product data
tasks.push(function (cb) {
getProductData(uuids, function (err, testProducts) {
if (err) throw err;
assert.strictEqual(testProducts.length, 2);
for (let i = 0; testProducts[i] !== undefined; i ++) {
const product = testProducts[i];
if (product._source.name[0] === 'balla') {
assert.strictEqual(product._source.artNo[0], 'abc123');
} else if (product._source.name[0] === 'ballb') {
assert.strictEqual(product._source.artNo[0], 'abc124');
} else {
throw new Error('Unexpected product: ' + JSON.stringify(product));
}
}
cb();
});
});
async.series(tasks, done);
});
});
after(function (done) {
const tasks = [];
// Remove all data from elasticsearch
tasks.push(function (cb) {
if (! esUrl) return cb();
request.delete(esUrl + '/' + prodLib.dataWriter.esIndexName, cb);
});
tasks.push(function (cb) {
if (! esUrl) return cb();
request.delete(esUrl + '/' + prodLib.dataWriter.esIndexName + '_db_version', cb);
});
async.parallel(tasks, done);
});
| isc |
pcarrier/joker | joker-api0/src/main/java/help/joker/api0/Joker.java | 2639 | package help.joker.api0;
import java.util.function.Function;
import java.util.function.Supplier;
public class Joker {
/**
* Invoke a {@link ThrowingSupplier}; if it throws a checked {@link Throwable}, wrap it into
* a {@link SilencedThrowable}.
*/
public static <T> T rte(ThrowingSupplier<T> supplier) {
try {
return supplier.get();
} catch (RuntimeException rte) {
throw rte;
} catch (Throwable throwable) {
throw new SilencedThrowable(throwable);
}
}
/**
* Invoke a {@link ThrowingRunnable}; if it throws a checked {@link Throwable}, wrap it into
* a {@link SilencedThrowable}.
*/
public static void rte(ThrowingRunnable runnable) {
try {
runnable.run();
} catch (RuntimeException rte) {
throw rte;
} catch (Throwable throwable) {
throw new SilencedThrowable(throwable);
}
}
/**
* Transforms a {@link ThrowingSupplier} into a {@link Supplier} that invokes it, and turns
* any thrown checked {@link Throwable} into a {@link SilencedThrowable}.
*/
public static <T> Supplier<T> silencedSupplier(ThrowingSupplier<T> supplier) {
return () -> {
try {
return supplier.get();
} catch (RuntimeException rte) {
throw rte;
} catch (Throwable throwable) {
throw new SilencedThrowable(throwable);
}
};
}
/**
* Transforms a {@link ThrowingFunction} into a {@link Function} that invokes it, and turns
* any thrown checked {@link Throwable} into a {@link SilencedThrowable}.
*/
public static <T, R> Function<T, R> silencedFunction(ThrowingFunction<T, R> function) {
return (t) -> {
try {
return function.apply(t);
} catch (RuntimeException rte) {
throw rte;
} catch (Throwable throwable) {
throw new SilencedThrowable(throwable);
}
};
}
/**
* Transforms a {@link ThrowingRunnable} into a {@link Runnable} that invokes it, and turns
* any thrown checked {@link Throwable} into a {@link SilencedThrowable}.
*/
public static Runnable silencedRunnable(ThrowingRunnable runnable) {
return () -> {
try {
runnable.run();
} catch (RuntimeException rte) {
throw rte;
} catch (Throwable throwable) {
throw new SilencedThrowable(throwable);
}
};
}
}
| isc |
transientlunatic/otter | otter/otter.py | 4100 | # -*- coding: utf-8 -*-
#import uuid
import os
from .html import *
from configparser import ConfigParser
from jinja2 import Template, Environment, FileSystemLoader
from pkg_resources import resource_string, resource_stream, resource_filename
default_config = resource_string(__name__, 'otter.conf')
class Otter():
"""
Otter is a pythonic report writing system designed to produce HTML
reports for long-running or complex jobs where and iPython
notebook would be an impractical way of presenting information.
"""
def __init__(self, filename, config_file=None, **kwargs):
"""
An Otter report is created by this class.
Parameters
----------
filename : str
The path to the location of the report, for example `/home/me/www/report.html`.
config_file: str
The location of the config file which should be used to generate the report.
"""
# Attempt to load in default meta data from a config file
# At the moment just the current directory, but should
# extend to look in home directory and environment variable location too
config = ConfigParser()
#if not config_file:
try:
config.read(default_config)
except TypeError: # Looks like Python 3
config.readfp(default_config.decode("utf-8"))
if config_file:
with open(config_file) as cf:
config.read_string(cf.read())
self.meta = {}
if config.has_section("meta"):
for option in config['meta']:
self.meta[option] = config.get('meta', option)
for option in kwargs.items():
self.meta[option[0]] = option[1]
try:
theme = config.get("theme", "location")
except:
print("Cannot find theme in the config file. Using the default theme.")
try:
theme = resource_filename(__name__, "themes/default/")
except:
print("No theme files found.")
self.env = Environment(loader=FileSystemLoader(theme))
self.reportfolder = filename+"_files"
self.foldername = os.path.basename(filename)+"_files/"
if not os.path.exists(self.reportfolder):
os.makedirs(self.reportfolder)
#self.reportfile= open(filename,"w")
self.reportfile = filename
self.meta.update(kwargs)
self.items = []
# Make an otter report work as a context manager
def __enter__(self):
"""
Execute this code when the context manager is created.
Right now, Otter doesn't actually need anything to be done at the
creation of a context, but that should really change at some point in
the future.
"""
pass
def __exit__(self, type, value, traceback):
"""
When the context ends, the report needs to be rendered.
"""
self.show()
def __add__(self, item):
return self.add(item)
def add(self, item):
if HTMLElement in type(item).mro():
self.items.append(item)
else:
item_ = HTMLElement()
item_ + item
self.items.append(item_)
return self
def show(self):
html = ''
for item in self.items:
html += repr(item)
output_html = self.env.get_template('body.html').render(meta=self.meta, body=html)
self._write(output_html)
def _write(self, text):
with open(self.reportfile, "w") as f:
f.write(text)
def _mkdir_recursive(self, path):
"""
Recursively create the directories required for the report.
Based off code from http://stackoverflow.com/questions/6004073/how-can-i-create-directories-recursively
by `Mars'.
"""
sub_path = os.path.dirname(path)
if not os.path.exists(sub_path):
self._mkdir_recursive(sub_path)
if not os.path.exists(path):
os.mkdir(path)
| isc |
HyVar/hyvar-rec | SpecificationGrammar/SpecificationGrammarLexer.py | 8072 | # Generated from SpecificationGrammar.g4 by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\"")
buf.write("\u00cc\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\3\2\3\2\3\3\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7")
buf.write("\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13")
buf.write("\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16")
buf.write("\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20")
buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22")
buf.write("\3\22\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\25\3\25")
buf.write("\3\25\3\25\3\26\3\26\3\26\3\27\3\27\3\30\3\30\3\30\3\31")
buf.write("\3\31\3\32\3\32\3\33\3\33\3\33\3\34\3\34\3\35\3\35\3\36")
buf.write("\3\36\3\37\3\37\7\37\u00b9\n\37\f\37\16\37\u00bc\13\37")
buf.write("\3 \5 \u00bf\n \3 \6 \u00c2\n \r \16 \u00c3\3!\6!\u00c7")
buf.write("\n!\r!\16!\u00c8\3!\3!\2\2\"\3\3\5\4\7\5\t\6\13\7\r\b")
buf.write("\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22")
buf.write("#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\35")
buf.write("9\36;\37= ?!A\"\3\2\7\5\2C\\aac|\7\2//\62;C\\aac|\3\2")
buf.write("//\3\2\62;\5\2\13\f\17\17\"\"\2\u00cf\2\3\3\2\2\2\2\5")
buf.write("\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2")
buf.write("\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2")
buf.write("\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2")
buf.write("\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2")
buf.write("\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61")
buf.write("\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2")
buf.write("\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\3C\3")
buf.write("\2\2\2\5E\3\2\2\2\7P\3\2\2\2\tR\3\2\2\2\13T\3\2\2\2\r")
buf.write("V\3\2\2\2\17X\3\2\2\2\21a\3\2\2\2\23j\3\2\2\2\25n\3\2")
buf.write("\2\2\27q\3\2\2\2\31u\3\2\2\2\33}\3\2\2\2\35\u0081\3\2")
buf.write("\2\2\37\u0086\3\2\2\2!\u008c\3\2\2\2#\u0091\3\2\2\2%\u0095")
buf.write("\3\2\2\2\'\u0099\3\2\2\2)\u009d\3\2\2\2+\u00a1\3\2\2\2")
buf.write("-\u00a4\3\2\2\2/\u00a6\3\2\2\2\61\u00a9\3\2\2\2\63\u00ab")
buf.write("\3\2\2\2\65\u00ad\3\2\2\2\67\u00b0\3\2\2\29\u00b2\3\2")
buf.write("\2\2;\u00b4\3\2\2\2=\u00b6\3\2\2\2?\u00be\3\2\2\2A\u00c6")
buf.write("\3\2\2\2CD\7*\2\2D\4\3\2\2\2EF\7c\2\2FG\7v\2\2GH\7v\2")
buf.write("\2HI\7t\2\2IJ\7k\2\2JK\7d\2\2KL\7w\2\2LM\7v\2\2MN\7g\2")
buf.write("\2NO\7]\2\2O\6\3\2\2\2PQ\7_\2\2Q\b\3\2\2\2RS\7+\2\2S\n")
buf.write("\3\2\2\2TU\7]\2\2U\f\3\2\2\2VW\7.\2\2W\16\3\2\2\2XY\7")
buf.write("e\2\2YZ\7q\2\2Z[\7p\2\2[\\\7v\2\2\\]\7g\2\2]^\7z\2\2^")
buf.write("_\7v\2\2_`\7]\2\2`\20\3\2\2\2ab\7h\2\2bc\7g\2\2cd\7c\2")
buf.write("\2de\7v\2\2ef\7w\2\2fg\7t\2\2gh\7g\2\2hi\7]\2\2i\22\3")
buf.write("\2\2\2jk\7c\2\2kl\7p\2\2lm\7f\2\2m\24\3\2\2\2no\7q\2\2")
buf.write("op\7t\2\2p\26\3\2\2\2qr\7z\2\2rs\7q\2\2st\7t\2\2t\30\3")
buf.write("\2\2\2uv\7q\2\2vw\7p\2\2wx\7g\2\2xy\7q\2\2yz\7p\2\2z{")
buf.write("\7n\2\2{|\7{\2\2|\32\3\2\2\2}~\7p\2\2~\177\7q\2\2\177")
buf.write("\u0080\7v\2\2\u0080\34\3\2\2\2\u0081\u0082\7v\2\2\u0082")
buf.write("\u0083\7t\2\2\u0083\u0084\7w\2\2\u0084\u0085\7g\2\2\u0085")
buf.write("\36\3\2\2\2\u0086\u0087\7h\2\2\u0087\u0088\7c\2\2\u0088")
buf.write("\u0089\7n\2\2\u0089\u008a\7u\2\2\u008a\u008b\7g\2\2\u008b")
buf.write(" \3\2\2\2\u008c\u008d\7k\2\2\u008d\u008e\7o\2\2\u008e")
buf.write("\u008f\7r\2\2\u008f\u0090\7n\2\2\u0090\"\3\2\2\2\u0091")
buf.write("\u0092\7k\2\2\u0092\u0093\7h\2\2\u0093\u0094\7h\2\2\u0094")
buf.write("$\3\2\2\2\u0095\u0096\7o\2\2\u0096\u0097\7k\2\2\u0097")
buf.write("\u0098\7p\2\2\u0098&\3\2\2\2\u0099\u009a\7o\2\2\u009a")
buf.write("\u009b\7c\2\2\u009b\u009c\7z\2\2\u009c(\3\2\2\2\u009d")
buf.write("\u009e\7c\2\2\u009e\u009f\7d\2\2\u009f\u00a0\7u\2\2\u00a0")
buf.write("*\3\2\2\2\u00a1\u00a2\7>\2\2\u00a2\u00a3\7?\2\2\u00a3")
buf.write(",\3\2\2\2\u00a4\u00a5\7?\2\2\u00a5.\3\2\2\2\u00a6\u00a7")
buf.write("\7@\2\2\u00a7\u00a8\7?\2\2\u00a8\60\3\2\2\2\u00a9\u00aa")
buf.write("\7>\2\2\u00aa\62\3\2\2\2\u00ab\u00ac\7@\2\2\u00ac\64\3")
buf.write("\2\2\2\u00ad\u00ae\7#\2\2\u00ae\u00af\7?\2\2\u00af\66")
buf.write("\3\2\2\2\u00b0\u00b1\7-\2\2\u00b18\3\2\2\2\u00b2\u00b3")
buf.write("\7/\2\2\u00b3:\3\2\2\2\u00b4\u00b5\7,\2\2\u00b5<\3\2\2")
buf.write("\2\u00b6\u00ba\t\2\2\2\u00b7\u00b9\t\3\2\2\u00b8\u00b7")
buf.write("\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00ba")
buf.write("\u00bb\3\2\2\2\u00bb>\3\2\2\2\u00bc\u00ba\3\2\2\2\u00bd")
buf.write("\u00bf\t\4\2\2\u00be\u00bd\3\2\2\2\u00be\u00bf\3\2\2\2")
buf.write("\u00bf\u00c1\3\2\2\2\u00c0\u00c2\t\5\2\2\u00c1\u00c0\3")
buf.write("\2\2\2\u00c2\u00c3\3\2\2\2\u00c3\u00c1\3\2\2\2\u00c3\u00c4")
buf.write("\3\2\2\2\u00c4@\3\2\2\2\u00c5\u00c7\t\6\2\2\u00c6\u00c5")
buf.write("\3\2\2\2\u00c7\u00c8\3\2\2\2\u00c8\u00c6\3\2\2\2\u00c8")
buf.write("\u00c9\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00cb\b!\2\2")
buf.write("\u00cbB\3\2\2\2\7\2\u00ba\u00be\u00c3\u00c8\3\b\2\2")
return buf.getvalue()
class SpecificationGrammarLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
AND = 9
OR = 10
XOR = 11
ONEONLY = 12
NOT = 13
TRUE = 14
FALSE = 15
IMPL = 16
IFF = 17
MIN = 18
MAX = 19
ABS = 20
LEQ = 21
EQ = 22
GEQ = 23
LT = 24
GT = 25
NEQ = 26
PLUS = 27
MINUS = 28
TIMES = 29
ID = 30
INT = 31
WS = 32
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'('", "'attribute['", "']'", "')'", "'['", "','", "'context['",
"'feature['", "'and'", "'or'", "'xor'", "'oneonly'", "'not'",
"'true'", "'false'", "'impl'", "'iff'", "'min'", "'max'", "'abs'",
"'<='", "'='", "'>='", "'<'", "'>'", "'!='", "'+'", "'-'", "'*'" ]
symbolicNames = [ "<INVALID>",
"AND", "OR", "XOR", "ONEONLY", "NOT", "TRUE", "FALSE", "IMPL",
"IFF", "MIN", "MAX", "ABS", "LEQ", "EQ", "GEQ", "LT", "GT",
"NEQ", "PLUS", "MINUS", "TIMES", "ID", "INT", "WS" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "AND", "OR", "XOR", "ONEONLY", "NOT", "TRUE",
"FALSE", "IMPL", "IFF", "MIN", "MAX", "ABS", "LEQ", "EQ",
"GEQ", "LT", "GT", "NEQ", "PLUS", "MINUS", "TIMES", "ID",
"INT", "WS" ]
grammarFileName = "SpecificationGrammar.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| isc |
hayeswise/ingress-intel-total-conversion | dist/plugins/score-cycle-times.user.js | 4602 | // ==UserScript==
// @id iitc-plugin-score-cycle-times@jonatkins
// @name IITC plugin: Show scoreboard cycle/checkpoint times
// @category Info
// @version 0.1.0.20170225.54902
// @namespace https://github.com/jonatkins/ingress-intel-total-conversion
// @updateURL https://github.com/hayeswise/iitc-shadowops/raw/master/dist/plugins/score-cycle-times.meta.js
// @downloadURL https://github.com/hayeswise/iitc-shadowops/raw/master/dist/plugins/score-cycle-times.user.js
// @description [wise-2017-02-25-054902] Show the times used for the septicycle and checkpoints for regional scoreboards.
// @include https://*.ingress.com/intel*
// @include http://*.ingress.com/intel*
// @match https://*.ingress.com/intel*
// @match http://*.ingress.com/intel*
// @include https://*.ingress.com/mission/*
// @include http://*.ingress.com/mission/*
// @match https://*.ingress.com/mission/*
// @match http://*.ingress.com/mission/*
// @grant none
// ==/UserScript==
function wrapper(plugin_info) {
// ensure plugin framework is there, even if iitc is not yet loaded
if(typeof window.plugin !== 'function') window.plugin = function() {};
//PLUGIN AUTHORS: writing a plugin outside of the IITC build environment? if so, delete these lines!!
//(leaving them in place might break the 'About IITC' page or break update checks)
plugin_info.buildName = 'wise';
plugin_info.dateTimeVersion = '20170225.54902';
plugin_info.pluginId = 'score-cycle-times';
//END PLUGIN AUTHORS NOTE
// PLUGIN START ////////////////////////////////////////////////////////
// use own namespace for plugin
window.plugin.scoreCycleTimes = function() {};
window.plugin.scoreCycleTimes.CHECKPOINT = 5*60*60; //5 hours per checkpoint
window.plugin.scoreCycleTimes.CYCLE = 7*25*60*60; //7 25 hour 'days' per cycle
window.plugin.scoreCycleTimes.setup = function() {
// add a div to the sidebar, and basic style
$('#sidebar').append('<div id="score_cycle_times_display"></div>');
$('#score_cycle_times_display').css({'color':'#ffce00'});
window.plugin.scoreCycleTimes.update();
};
window.plugin.scoreCycleTimes.update = function() {
// checkpoint and cycle start times are based on a simple modulus of the timestamp
// no special epoch (other than the unix timestamp/javascript's 1970-01-01 00:00 UTC) is required
// when regional scoreboards were introduced, the first cycle would have started at 2014-01-15 10:00 UTC - but it was
// a few checkpoints in when scores were first added
var now = new Date().getTime();
var cycleStart = Math.floor(now / (window.plugin.scoreCycleTimes.CYCLE*1000)) * (window.plugin.scoreCycleTimes.CYCLE*1000);
var cycleEnd = cycleStart + window.plugin.scoreCycleTimes.CYCLE*1000;
var checkpointStart = Math.floor(now / (window.plugin.scoreCycleTimes.CHECKPOINT*1000)) * (window.plugin.scoreCycleTimes.CHECKPOINT*1000);
var checkpointEnd = checkpointStart + window.plugin.scoreCycleTimes.CHECKPOINT*1000;
var formatRow = function(label,time) {
var timeStr = unixTimeToString(time,true);
timeStr = timeStr.replace(/:00$/,''); //FIXME: doesn't remove seconds from AM/PM formatted dates
return '<tr><td>'+label+'</td><td>'+timeStr+'</td></tr>';
};
var html = '<table>'
+ formatRow('Cycle start', cycleStart)
+ formatRow('Previous checkpoint', checkpointStart)
+ formatRow('Next checkpoint', checkpointEnd)
+ formatRow('Cycle end', cycleEnd)
+ '</table>';
$('#score_cycle_times_display').html(html);
setTimeout ( window.plugin.scoreCycleTimes.update, checkpointEnd-now);
};
var setup = window.plugin.scoreCycleTimes.setup;
// PLUGIN END //////////////////////////////////////////////////////////
setup.info = plugin_info; //add the script info data to the function as a property
if(!window.bootPlugins) window.bootPlugins = [];
window.bootPlugins.push(setup);
// if IITC has already booted, immediately run the 'setup' function
if(window.iitcLoaded && typeof setup === 'function') setup();
} // wrapper end
// inject code into site context
var script = document.createElement('script');
var info = {};
if (typeof GM_info !== 'undefined' && GM_info && GM_info.script) info.script = {version: GM_info.script.version, name: GM_info.script.name, description: GM_info.script.description };
script.appendChild(document.createTextNode('('+ wrapper +')('+JSON.stringify(info)+');'));
(document.body || document.head || document.documentElement).appendChild(script);
| isc |
piranna/usrbinenv | server.js | 1943 | #!/bin/node
var ignoreEnvironment = false
var endLinesWithNull = false
var env = process.env
const NODEJS_BIN = 'node'
function unset(key)
{
if(env[key] == undefined)
{
console.error('Unknown environment key:',key)
process.exit(2)
}
env[key] = undefined
}
var argv = process.argv.slice(2)
// Options
for(; argv.length; argv.shift())
{
var arg = argv[0]
if(arg[0] != '-') break;
switch(arg[0])
{
case '-':
case '-i':
case '--ignore-environment':
ignoreEnvironment = true;
break;
case '-0':
case '--null':
endLinesWithNull = true;
break;
case '-u':
case '--unset':
{
argv.shift()
unset(argv[0])
}
break;
default:
if(arg.substr(0,8) == '--unset=')
{
unset(arg.substr(8))
break
}
console.error('Unknown argument:',arg)
process.exit(1)
}
}
// Environment variabless
if(ignoreEnvironment)
process.env = env = {}
for(; argv.length; argv.shift())
{
var arg = argv[0].split('=')
if(arg.length < 2) break;
var key = arg.shift()
var value = arg.join('=')
env[key] = value;
}
// Exec command or show environment variables
var command = argv.shift()
if(command)
{
command = command.replace(/\s+$/, '')
if(command === NODEJS_BIN)
{
// We are trying to execute a Node.js script, re-use the current instance.
// This require that the Node.js script don't use any execution trick like
// checking "!module.parent" or "require.main === module". If you want your
// package to work both as a library and an executable, define it in two
// diferent scripts and use package.json "main" and "bin" entries.
process.argv = [NODEJS_BIN].concat(argv)
return require(argv[0])
}
require('kexec')(command, argv)
}
else
{
var endLine = endLinesWithNull ? '\0' : '\n'
for(var key in env)
process.stdout.write(key+'='+env[key] + endLine);
}
| isc |
martinezp/marionette-project | assets/js/app/about/show/show_view.js | 225 | ContactManager.module("AboutApp.Show", function(Show, ContactManager, Backbone, Marionette, $, _){
Show.AboutMessage = Marionette.ItemView.extend({
template: "ContactManager.AboutApp.Show.Templates.AboutMessage"
});
}); | isc |
akileez/toolz | src/date/quarterOfTheYear.js | 331 | function quarter (date) {
var month = date.getMonth() + 1
return (Math.ceil(month / 3))
}
module.exports = quarter
// Original Code - mout.js
// function quarter(date){
// var month = date.getMonth();
// if (month < 3) return 1;
// if (month < 6) return 2;
// if (month < 9) return 3;
// return 4;
// }
| isc |
jrick/btcd | blockchain/indexers/txindex.go | 18136 | // Copyright (c) 2016 The btcsuite developers
// Copyright (c) 2016-2017 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package indexers
import (
"errors"
"fmt"
"github.com/decred/dcrd/blockchain"
"github.com/decred/dcrd/chaincfg/chainhash"
"github.com/decred/dcrd/database"
"github.com/decred/dcrd/dcrutil"
"github.com/decred/dcrd/wire"
)
const (
// txIndexName is the human-readable name for the index.
txIndexName = "transaction index"
)
var (
// txIndexKey is the key of the transaction index and the db bucket used
// to house it.
txIndexKey = []byte("txbyhashidx")
// idByHashIndexBucketName is the name of the db bucket used to house
// the block id -> block hash index.
idByHashIndexBucketName = []byte("idbyhashidx")
// hashByIDIndexBucketName is the name of the db bucket used to house
// the block hash -> block id index.
hashByIDIndexBucketName = []byte("hashbyididx")
// errNoBlockIDEntry is an error that indicates a requested entry does
// not exist in the block ID index.
errNoBlockIDEntry = errors.New("no entry in the block ID index")
)
// -----------------------------------------------------------------------------
// The transaction index consists of an entry for every transaction in the main
// chain. In order to significanly optimize the space requirements a separate
// index which provides an internal mapping between each block that has been
// indexed and a unique ID for use within the hash to location mappings. The ID
// is simply a sequentially incremented uint32. This is useful because it is
// only 4 bytes versus 32 bytes hashes and thus saves a ton of space in the
// index.
//
// There are three buckets used in total. The first bucket maps the hash of
// each transaction to the specific block location. The second bucket maps the
// hash of each block to the unique ID and the third maps that ID back to the
// block hash.
//
// NOTE: Although it is technically possible for multiple transactions to have
// the same hash as long as the previous transaction with the same hash is fully
// spent, this code only stores the most recent one because doing otherwise
// would add a non-trivial amount of space and overhead for something that will
// realistically never happen per the probability and even if it did, the old
// one must be fully spent and so the most likely transaction a caller would
// want for a given hash is the most recent one anyways.
//
// The serialized format for keys and values in the block hash to ID bucket is:
// <hash> = <ID>
//
// Field Type Size
// hash chainhash.Hash 32 bytes
// ID uint32 4 bytes
// -----
// Total: 36 bytes
//
// The serialized format for keys and values in the ID to block hash bucket is:
// <ID> = <hash>
//
// Field Type Size
// ID uint32 4 bytes
// hash chainhash.Hash 32 bytes
// -----
// Total: 36 bytes
//
// The serialized format for the keys and values in the tx index bucket is:
//
// <txhash> = <block id><start offset><tx length>
//
// Field Type Size
// txhash chainhash.Hash 32 bytes
// block id uint32 4 bytes
// start offset uint32 4 bytes
// tx length uint32 4 bytes
// -----
// Total: 44 bytes
// -----------------------------------------------------------------------------
// dbPutBlockIDIndexEntry uses an existing database transaction to update or add
// the index entries for the hash to id and id to hash mappings for the provided
// values.
func dbPutBlockIDIndexEntry(dbTx database.Tx, hash *chainhash.Hash, id uint32) error {
// Serialize the height for use in the index entries.
var serializedID [4]byte
byteOrder.PutUint32(serializedID[:], id)
// Add the block hash to ID mapping to the index.
meta := dbTx.Metadata()
hashIndex := meta.Bucket(idByHashIndexBucketName)
if err := hashIndex.Put(hash[:], serializedID[:]); err != nil {
return err
}
// Add the block ID to hash mapping to the index.
idIndex := meta.Bucket(hashByIDIndexBucketName)
return idIndex.Put(serializedID[:], hash[:])
}
// dbRemoveBlockIDIndexEntry uses an existing database transaction remove index
// entries from the hash to id and id to hash mappings for the provided hash.
func dbRemoveBlockIDIndexEntry(dbTx database.Tx, hash *chainhash.Hash) error {
// Remove the block hash to ID mapping.
meta := dbTx.Metadata()
hashIndex := meta.Bucket(idByHashIndexBucketName)
serializedID := hashIndex.Get(hash[:])
if serializedID == nil {
return nil
}
if err := hashIndex.Delete(hash[:]); err != nil {
return err
}
// Remove the block ID to hash mapping.
idIndex := meta.Bucket(hashByIDIndexBucketName)
return idIndex.Delete(serializedID)
}
// dbFetchBlockIDByHash uses an existing database transaction to retrieve the
// block id for the provided hash from the index.
func dbFetchBlockIDByHash(dbTx database.Tx, hash *chainhash.Hash) (uint32, error) {
hashIndex := dbTx.Metadata().Bucket(idByHashIndexBucketName)
serializedID := hashIndex.Get(hash[:])
if serializedID == nil {
return 0, errNoBlockIDEntry
}
return byteOrder.Uint32(serializedID), nil
}
// dbFetchBlockHashBySerializedID uses an existing database transaction to
// retrieve the hash for the provided serialized block id from the index.
func dbFetchBlockHashBySerializedID(dbTx database.Tx, serializedID []byte) (*chainhash.Hash, error) {
idIndex := dbTx.Metadata().Bucket(hashByIDIndexBucketName)
hashBytes := idIndex.Get(serializedID)
if hashBytes == nil {
return nil, errNoBlockIDEntry
}
var hash chainhash.Hash
copy(hash[:], hashBytes)
return &hash, nil
}
// dbFetchBlockHashByID uses an existing database transaction to retrieve the
// hash for the provided block id from the index.
func dbFetchBlockHashByID(dbTx database.Tx, id uint32) (*chainhash.Hash, error) {
var serializedID [4]byte
byteOrder.PutUint32(serializedID[:], id)
return dbFetchBlockHashBySerializedID(dbTx, serializedID[:])
}
// putTxIndexEntry serializes the provided values according to the format
// described about for a transaction index entry. The target byte slice must
// be at least large enough to handle the number of bytes defined by the
// txEntrySize constant or it will panic.
func putTxIndexEntry(target []byte, blockID uint32, txLoc wire.TxLoc) {
byteOrder.PutUint32(target, blockID)
byteOrder.PutUint32(target[4:], uint32(txLoc.TxStart))
byteOrder.PutUint32(target[8:], uint32(txLoc.TxLen))
}
// dbPutTxIndexEntry uses an existing database transaction to update the
// transaction index given the provided serialized data that is expected to have
// been serialized putTxIndexEntry.
func dbPutTxIndexEntry(dbTx database.Tx, txHash *chainhash.Hash, serializedData []byte) error {
txIndex := dbTx.Metadata().Bucket(txIndexKey)
return txIndex.Put(txHash[:], serializedData)
}
// dbFetchTxIndexEntry uses an existing database transaction to fetch the block
// region for the provided transaction hash from the transaction index. When
// there is no entry for the provided hash, nil will be returned for the both
// the region and the error.
func dbFetchTxIndexEntry(dbTx database.Tx, txHash *chainhash.Hash) (*database.BlockRegion, error) {
// Load the record from the database and return now if it doesn't exist.
txIndex := dbTx.Metadata().Bucket(txIndexKey)
serializedData := txIndex.Get(txHash[:])
if len(serializedData) == 0 {
return nil, nil
}
// Ensure the serialized data has enough bytes to properly deserialize.
if len(serializedData) < 12 {
return nil, database.Error{
ErrorCode: database.ErrCorruption,
Description: fmt.Sprintf("corrupt transaction index "+
"entry for %s", txHash),
}
}
// Load the block hash associated with the block ID.
hash, err := dbFetchBlockHashBySerializedID(dbTx, serializedData[0:4])
if err != nil {
return nil, database.Error{
ErrorCode: database.ErrCorruption,
Description: fmt.Sprintf("corrupt transaction index "+
"entry for %s: %v", txHash, err),
}
}
// Deserialize the final entry.
region := database.BlockRegion{Hash: &chainhash.Hash{}}
copy(region.Hash[:], hash[:])
region.Offset = byteOrder.Uint32(serializedData[4:8])
region.Len = byteOrder.Uint32(serializedData[8:12])
return ®ion, nil
}
// dbAddTxIndexEntries uses an existing database transaction to add a
// transaction index entry for every transaction in the parent of the passed
// block (if they were valid) and every stake transaction in the passed block.
func dbAddTxIndexEntries(dbTx database.Tx, block, parent *dcrutil.Block, blockID uint32) error {
// As an optimization, allocate a single slice big enough to hold all
// of the serialized transaction index entries for the block and
// serialize them directly into the slice. Then, pass the appropriate
// subslice to the database to be written. This approach significantly
// cuts down on the number of required allocations.
addEntries := func(txns []*dcrutil.Tx, txLocs []wire.TxLoc, blockID uint32) error {
offset := 0
serializedValues := make([]byte, len(txns)*txEntrySize)
for i, tx := range txns {
putTxIndexEntry(serializedValues[offset:], blockID,
txLocs[i])
endOffset := offset + txEntrySize
err := dbPutTxIndexEntry(dbTx, tx.Hash(),
serializedValues[offset:endOffset:endOffset])
if err != nil {
return err
}
offset += txEntrySize
}
return nil
}
// Add the regular transactions of the parent if voted valid.
if approvesParent(block) && block.Height() > 1 {
// The offset and length of the transactions within the
// serialized parent block.
txLocs, _, err := parent.TxLoc()
if err != nil {
return err
}
parentBlockID, err := dbFetchBlockIDByHash(dbTx, parent.Hash())
if err != nil {
return err
}
err = addEntries(parent.Transactions(), txLocs, parentBlockID)
if err != nil {
return err
}
}
// Add the stake transactions of the current block.
//
// The offset and length of the stake transactions within the serialized
// block.
_, stakeTxLocs, err := block.TxLoc()
if err != nil {
return err
}
return addEntries(block.STransactions(), stakeTxLocs, blockID)
}
// dbRemoveTxIndexEntry uses an existing database transaction to remove the most
// recent transaction index entry for the given hash.
func dbRemoveTxIndexEntry(dbTx database.Tx, txHash *chainhash.Hash) error {
txIndex := dbTx.Metadata().Bucket(txIndexKey)
serializedData := txIndex.Get(txHash[:])
if len(serializedData) == 0 {
return fmt.Errorf("can't remove non-existent transaction %s "+
"from the transaction index", txHash)
}
return txIndex.Delete(txHash[:])
}
// dbRemoveTxIndexEntries uses an existing database transaction to remove the
// latest transaction entry for every transaction in the parent of the passed
// block (if they were valid) and every stake transaction in the passed block.
func dbRemoveTxIndexEntries(dbTx database.Tx, block, parent *dcrutil.Block) error {
removeEntries := func(txns []*dcrutil.Tx) error {
for _, tx := range txns {
err := dbRemoveTxIndexEntry(dbTx, tx.Hash())
if err != nil {
return err
}
}
return nil
}
// Remove all of the regular transactions of the parent if voted valid.
if approvesParent(block) && block.Height() > 1 {
if err := removeEntries(parent.Transactions()); err != nil {
return err
}
}
// Remove the stake transactions of the block being disconnected.
return removeEntries(block.STransactions())
}
// TxIndex implements a transaction by hash index. That is to say, it supports
// querying all transactions by their hash.
type TxIndex struct {
db database.DB
curBlockID uint32
}
// Ensure the TxIndex type implements the Indexer interface.
var _ Indexer = (*TxIndex)(nil)
// Init initializes the hash-based transaction index. In particular, it finds
// the highest used block ID and stores it for later use when connecting or
// disconnecting blocks.
//
// This is part of the Indexer interface.
func (idx *TxIndex) Init() error {
// Find the latest known block id field for the internal block id
// index and initialize it. This is done because it's a lot more
// efficient to do a single search at initialize time than it is to
// write another value to the database on every update.
err := idx.db.View(func(dbTx database.Tx) error {
// Scan forward in large gaps to find a block id that doesn't
// exist yet to serve as an upper bound for the binary search
// below.
var highestKnown, nextUnknown uint32
testBlockID := uint32(1)
increment := uint32(100000)
for {
_, err := dbFetchBlockHashByID(dbTx, testBlockID)
if err != nil {
nextUnknown = testBlockID
break
}
highestKnown = testBlockID
testBlockID += increment
}
log.Tracef("Forward scan (highest known %d, next unknown %d)",
highestKnown, nextUnknown)
// No used block IDs due to new database.
if nextUnknown == 1 {
return nil
}
// Use a binary search to find the final highest used block id.
// This will take at most ceil(log_2(increment)) attempts.
for {
testBlockID = (highestKnown + nextUnknown) / 2
_, err := dbFetchBlockHashByID(dbTx, testBlockID)
if err != nil {
nextUnknown = testBlockID
} else {
highestKnown = testBlockID
}
log.Tracef("Binary scan (highest known %d, next "+
"unknown %d)", highestKnown, nextUnknown)
if highestKnown+1 == nextUnknown {
break
}
}
idx.curBlockID = highestKnown
return nil
})
if err != nil {
return err
}
log.Debugf("Current internal block ID: %d", idx.curBlockID)
return nil
}
// Key returns the database key to use for the index as a byte slice.
//
// This is part of the Indexer interface.
func (idx *TxIndex) Key() []byte {
return txIndexKey
}
// Name returns the human-readable name of the index.
//
// This is part of the Indexer interface.
func (idx *TxIndex) Name() string {
return txIndexName
}
// Create is invoked when the indexer manager determines the index needs
// to be created for the first time. It creates the buckets for the hash-based
// transaction index and the internal block ID indexes.
//
// This is part of the Indexer interface.
func (idx *TxIndex) Create(dbTx database.Tx) error {
meta := dbTx.Metadata()
if _, err := meta.CreateBucket(idByHashIndexBucketName); err != nil {
return err
}
if _, err := meta.CreateBucket(hashByIDIndexBucketName); err != nil {
return err
}
_, err := meta.CreateBucket(txIndexKey)
return err
}
// ConnectBlock is invoked by the index manager when a new block has been
// connected to the main chain. This indexer adds a hash-to-transaction mapping
// for every transaction in the passed block.
//
// This is part of the Indexer interface.
func (idx *TxIndex) ConnectBlock(dbTx database.Tx, block, parent *dcrutil.Block, view *blockchain.UtxoViewpoint) error {
// Increment the internal block ID to use for the block being connected
// and add all of the transactions in the block to the index.
newBlockID := idx.curBlockID + 1
if err := dbAddTxIndexEntries(dbTx, block, parent, newBlockID); err != nil {
return err
}
// Add the new block ID index entry for the block being connected and
// update the current internal block ID accordingly.
err := dbPutBlockIDIndexEntry(dbTx, block.Hash(), newBlockID)
if err != nil {
return err
}
idx.curBlockID = newBlockID
return nil
}
// DisconnectBlock is invoked by the index manager when a block has been
// disconnected from the main chain. This indexer removes the
// hash-to-transaction mapping for every transaction in the block.
//
// This is part of the Indexer interface.
func (idx *TxIndex) DisconnectBlock(dbTx database.Tx, block, parent *dcrutil.Block, view *blockchain.UtxoViewpoint) error {
// Remove all of the transactions in the block from the index.
if err := dbRemoveTxIndexEntries(dbTx, block, parent); err != nil {
return err
}
// Remove the block ID index entry for the block being disconnected and
// decrement the current internal block ID to account for it.
if err := dbRemoveBlockIDIndexEntry(dbTx, block.Hash()); err != nil {
return err
}
idx.curBlockID--
return nil
}
// TxBlockRegion returns the block region for the provided transaction hash
// from the transaction index. The block region can in turn be used to load the
// raw transaction bytes. When there is no entry for the provided hash, nil
// will be returned for the both the entry and the error.
//
// This function is safe for concurrent access.
func (idx *TxIndex) TxBlockRegion(hash chainhash.Hash) (*database.BlockRegion, error) {
var region *database.BlockRegion
err := idx.db.View(func(dbTx database.Tx) error {
var err error
region, err = dbFetchTxIndexEntry(dbTx, &hash)
return err
})
return region, err
}
// NewTxIndex returns a new instance of an indexer that is used to create a
// mapping of the hashes of all transactions in the blockchain to the respective
// block, location within the block, and size of the transaction.
//
// It implements the Indexer interface which plugs into the IndexManager that in
// turn is used by the blockchain package. This allows the index to be
// seamlessly maintained along with the chain.
func NewTxIndex(db database.DB) *TxIndex {
return &TxIndex{db: db}
}
// dropBlockIDIndex drops the internal block id index.
func dropBlockIDIndex(db database.DB) error {
return db.Update(func(dbTx database.Tx) error {
meta := dbTx.Metadata()
err := meta.DeleteBucket(idByHashIndexBucketName)
if err != nil {
return err
}
return meta.DeleteBucket(hashByIDIndexBucketName)
})
}
// DropTxIndex drops the transaction index from the provided database if it
// exists. Since the address index relies on it, the address index will also be
// dropped when it exists.
func DropTxIndex(db database.DB) error {
if err := dropIndex(db, addrIndexKey, addrIndexName); err != nil {
return err
}
return dropIndex(db, txIndexKey, txIndexName)
}
| isc |
hitsl/bouser_db | bouser_db/__init__.py | 182 | # -*- coding: utf-8 -*-
from bouser_db import interfaces, service
from bouser_db import service
__author__ = 'mmalkov'
def make(config):
return service.DataBaseService(config) | isc |
kisom/goutils | cmd/stealchain-server/main.go | 2365 | package main
import (
"crypto/rand"
"crypto/tls"
"crypto/x509"
"encoding/hex"
"encoding/pem"
"flag"
"fmt"
"io/ioutil"
"net"
"os"
"github.com/kisom/goutils/die"
)
func main() {
cfg := &tls.Config{}
var sysRoot, listenAddr, certFile, keyFile string
var verify bool
flag.StringVar(&sysRoot, "ca", "", "provide an alternate CA bundle")
flag.StringVar(&listenAddr, "listen", ":443", "address to listen on")
flag.StringVar(&certFile, "cert", "", "server certificate to present to clients")
flag.StringVar(&keyFile, "key", "", "key for server certificate")
flag.BoolVar(&verify, "verify", false, "verify client certificates")
flag.Parse()
if verify {
cfg.ClientAuth = tls.RequireAndVerifyClientCert
} else {
cfg.ClientAuth = tls.RequestClientCert
}
if certFile == "" {
fmt.Println("[!] missing required flag -cert")
os.Exit(1)
}
if keyFile == "" {
fmt.Println("[!] missing required flag -key")
os.Exit(1)
}
cert, err := tls.LoadX509KeyPair(certFile, keyFile)
if err != nil {
fmt.Printf("[!] could not load server key pair: %v", err)
os.Exit(1)
}
cfg.Certificates = append(cfg.Certificates, cert)
if sysRoot != "" {
pemList, err := ioutil.ReadFile(sysRoot)
die.If(err)
roots := x509.NewCertPool()
if !roots.AppendCertsFromPEM(pemList) {
fmt.Printf("[!] no valid roots found")
roots = nil
}
cfg.RootCAs = roots
}
l, err := net.Listen("tcp", listenAddr)
if err != nil {
fmt.Println(err.Error())
os.Exit(1)
}
for {
conn, err := l.Accept()
if err != nil {
fmt.Println(err.Error())
}
raddr := conn.RemoteAddr()
tconn := tls.Server(conn, cfg)
err = tconn.Handshake()
if err != nil {
fmt.Printf("[+] %v: failed to complete handshake: %v\n", raddr, err)
continue
}
cs := tconn.ConnectionState()
if len(cs.PeerCertificates) == 0 {
fmt.Printf("[+] %v: no chain presented\n", raddr)
continue
}
var chain []byte
for _, cert := range cs.PeerCertificates {
p := &pem.Block{
Type: "CERTIFICATE",
Bytes: cert.Raw,
}
chain = append(chain, pem.EncodeToMemory(p)...)
}
var nonce [16]byte
_, err = rand.Read(nonce[:])
if err != nil {
panic(err)
}
fname := fmt.Sprintf("%v-%v.pem", raddr, hex.EncodeToString(nonce[:]))
err = ioutil.WriteFile(fname, chain, 0644)
die.If(err)
fmt.Printf("%v: [+] wrote %v.\n", raddr, fname)
}
}
| isc |
elisee/space-crew | typings/main.d.ts | 380 | /// <reference path="main\ambient\express\express.d.ts" />
/// <reference path="main\ambient\mime\mime.d.ts" />
/// <reference path="main\ambient\node\node.d.ts" />
/// <reference path="main\ambient\serve-static\serve-static.d.ts" />
/// <reference path="main\ambient\socket.io-client\socket.io-client.d.ts" />
/// <reference path="main\ambient\socket.io\socket.io.d.ts" />
| isc |
io7m/jcanephora | com.io7m.jcanephora.fake/src/main/java/com/io7m/jcanephora/fake/FakeTimerQuery.java | 1699 | /*
* Copyright © 2016 <[email protected]> http://io7m.com
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.io7m.jcanephora.fake;
import com.io7m.jcanephora.core.JCGLTimerQueryType;
final class FakeTimerQuery extends FakeObjectUnshared
implements JCGLTimerQueryType
{
private long time_start;
private long time_end;
private boolean started;
FakeTimerQuery(
final FakeContext ctx,
final int id)
{
super(ctx, id);
}
long getTimeStart()
{
return this.time_start;
}
void setTimeStart(final long t)
{
this.time_start = t;
}
long getTimeEnd()
{
return this.time_end;
}
void setTimeEnd(final long t)
{
this.time_end = t;
}
@Override
public String toString()
{
final StringBuilder sb = new StringBuilder("[TimerQuery ");
sb.append(super.glName());
sb.append(']');
return sb.toString();
}
boolean isStarted()
{
return this.started;
}
void setStarted(final boolean s)
{
this.started = s;
}
}
| isc |
danielschmitz/angular2-codigos | 03/AngularPanel/app/app.component.ts | 191 | import {Component} from '@angular/core';
import {Panel} from './container'
@Component({
selector: 'my-app',
templateUrl: 'app/app.component.html'
})
export class AppComponent { } | isc |
Hdom/cfm | classes/apts.php | 1693 | <?php
/**
* Created by PhpStorm.
* User: DennysO
* Date: 3/30/2015
* Time: 8:49 PM
*/
class apts {
private $con;
private $email;
public function __construct($email){
$this->con = new SQL();
$this->email = $email;
}
public function get_d_apts($d, $m, $y, $did)
{
$u = new user();
$uq = $u->get_user($this->email);
$user = $uq[0];
$sql = "SELECT * FROM apts WHERE uid = '$user->uid' and did = '$did' and day = '$d' and month = '$m' and year = '$y'";
$res = $this->con->query($sql);
if($res)
{
$output = "<td>";
foreach($res as $apt)
{
$output .= "Appointment at ".$apt->time."<br />";
}
return $output."</td>";
}
return "<td></td>";
}
public function get_t_apts($d, $m, $y, $t)
{
$u = new user();
$uq = $u->get_user($this->email);
$user = $uq[0];
$time = $t;
$sql = "SELECT * FROM apts WHERE uid = '$user->uid' and day = '$d' and month = '$m' and year = '$y' and time = '$time'";
$res = $this->con->query($sql);
if($res)
{
foreach($res as $apt)
{
return $apt;
}
}
return false;
}
public function get_apts($d, $m, $y)
{
$u = new user();
$uq = $u->get_user($this->email);
$user = $uq[0];
$sql = "SELECT * FROM apts WHERE day = '$d' and month = '$m' and year = '$y'";
$res = $this->con->query($sql);
if($res)
{
return $res;
}
return false;
}
} | mit |
hyperwallet/java-sdk | src/test/java/com/hyperwallet/clientsdk/model/HyperwalletReceiptPaginationOptionsTest.java | 807 | package com.hyperwallet.clientsdk.model;
import java.util.Date;
/**
* @author fkrauthan
*/
public class HyperwalletReceiptPaginationOptionsTest extends BaseModelTest<HyperwalletReceiptPaginationOptions> {
@Override
protected HyperwalletReceiptPaginationOptions createBaseModel() {
HyperwalletReceiptPaginationOptions options = new HyperwalletReceiptPaginationOptions();
options
.type(HyperwalletReceipt.Type.ADJUSTMENT)
.createdAfter(new Date())
.createdBefore(new Date())
.limit(10)
.sortBy("test-sort-by");
return options;
}
@Override
protected Class<HyperwalletReceiptPaginationOptions> createModelClass() {
return HyperwalletReceiptPaginationOptions.class;
}
}
| mit |
cscorley/swindle | setup.py | 538 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='swindle',
version='0.0.1',
description='A LISP-like that throws away parentheses for Python-style indentation.',
long_description=readme,
author='Christopher S. Corley',
author_email='[email protected]',
url='https://github.com/cscorley/swindle',
license=license,
packages=find_packages(exclude=('tests', 'docs'))
)
| mit |
MarinMarinov/Databases | HW11EntityFramework/HW11EntityFramework/T1CreateDbContextForNorthwind/Category.cs | 1188 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated from a template.
//
// Manual changes to this file may cause unexpected behavior in your application.
// Manual changes to this file will be overwritten if the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace T1CreateDbContextForNorthwind
{
using System;
using System.Collections.Generic;
public partial class Category
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public Category()
{
this.Products = new HashSet<Product>();
}
public int CategoryID { get; set; }
public string CategoryName { get; set; }
public string Description { get; set; }
public byte[] Picture { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<Product> Products { get; set; }
}
}
| mit |
bdosorio/forest | src/common/forest-engine.spec.js | 3705 | describe('ForestEngine', function () {
var ForestEngine;
beforeEach(module('forest.engine'));
beforeEach(inject(function (_ForestEngine_) {
ForestEngine = _ForestEngine_;
}));
it('should have a list of trees', function () {
expect(ForestEngine.trees).toBeDefined();
});
it('should have a list of lumberjacks', function () {
expect(ForestEngine.lumberjacks).toBeDefined();
});
it('should have a list of bears', function () {
expect(ForestEngine.bears).toBeDefined();
});
it('should have a map of unqiue active cells in the board', function () {
expect(ForestEngine.board).toBeDefined();
});
describe('.init', function () {
var size = 10;
beforeEach(function () {
ForestEngine.tress = [];
ForestEngine.lumberjacks = [];
ForestEngine.bears = [];
});
it('should exist', function () {
expect(ForestEngine.init).toBeDefined();
});
it('should have an age of 0', function () {
expect(ForestEngine.age).toEqual(0);
});
it('should create 10% lumberjacks', function () {
ForestEngine.init(size);
expect(ForestEngine.lumberjacks.length).toEqual(Math.ceil((size * size) / 10));
});
it('should create 50% trees', function () {
ForestEngine.init(size);
expect(ForestEngine.trees.length).toEqual(Math.ceil((size * size) / 2));
});
it('should create 2% bears', function () {
ForestEngine.init(size);
expect(ForestEngine.bears.length).toEqual(Math.ceil((size * size) * 0.02));
});
it('should fill the board with the data in the type lists', function () {
ForestEngine.init(size);
expect(_.size(ForestEngine.board)).toEqual(ForestEngine.bears.length + ForestEngine.lumberjacks.length + ForestEngine.trees.length);
});
});
describe('.growTree', function () {
beforeEach(function () {
ForestEngine.trees = [];
});
it('should exist', function () {
expect(ForestEngine.growTree).toBeDefined();
});
it('should initialize and return an objec with the parameters set', function () {
ForestEngine.growTree(1, 1, 1);
expect(ForestEngine.trees[0]).toEqual({x: 1, y: 1, age: 1, hash: '1,1', type: 'T'});
expect(ForestEngine.board['1,1']).toEqual({x: 1, y: 1, age: 1, hash: '1,1', type: 'T'});
});
});
describe('.hireLumberjack', function () {
beforeEach(function () {
ForestEngine.lumberjacks = [];
});
it('should exist', function () {
expect(ForestEngine.hireLumberjack).toBeDefined();
});
it('should initialize a random lumberjack cell', function () {
var boardCount = _.size(ForestEngine.board);
ForestEngine.hireLumberjack();
expect(ForestEngine.lumberjacks[0]).toBeDefined();
expect(_.size(ForestEngine.board)).toBeGreaterThan(boardCount);
});
});
describe('.discoverBear', function () {
beforeEach(function () {
ForestEngine.bears = [];
});
it('should exist', function () {
expect(ForestEngine.discoverBear).toBeDefined();
});
it('should initialize a random bear cell', function () {
var boardCount = _.size(ForestEngine.board);
ForestEngine.discoverBear();
expect(ForestEngine.bears[0]).toBeDefined();
expect(_.size(ForestEngine.board)).toBeGreaterThan(boardCount);
});
});
});
| mit |
AxioDL/PathShagged | Runtime/CGameHintInfo.hpp | 1335 | #pragma once
#include <string_view>
#include <vector>
#include "Runtime/IFactory.hpp"
#include "Runtime/RetroTypes.hpp"
namespace metaforce {
class CGameHintInfo {
public:
struct SHintLocation {
CAssetId x0_mlvlId;
CAssetId x4_mreaId;
TAreaId x8_areaId = kInvalidAreaId;
CAssetId xc_stringId;
SHintLocation(CInputStream&, s32);
};
class CGameHint {
std::string x0_name;
float x10_immediateTime;
float x14_normalTime;
CAssetId x18_stringId;
float x1c_textTime;
std::vector<SHintLocation> x20_locations;
public:
CGameHint(CInputStream&, s32);
float GetNormalTime() const { return x14_normalTime; }
float GetImmediateTime() const { return x10_immediateTime; }
float GetTextTime() const { return x1c_textTime; }
std::string_view GetName() const { return x0_name; }
CAssetId GetStringID() const { return x18_stringId; }
const std::vector<SHintLocation>& GetLocations() const { return x20_locations; }
};
private:
std::vector<CGameHint> x0_hints;
public:
CGameHintInfo(CInputStream&, s32);
const std::vector<CGameHint>& GetHints() const { return x0_hints; }
static int FindHintIndex(std::string_view str);
};
CFactoryFnReturn FHintFactory(const SObjectTag&, CInputStream&, const CVParamTransfer&, CObjectReference*);
} // namespace metaforce
| mit |
JohnWhite80/list_grid_view | library/src/com/handmark/pulltorefresh/library/PullToRefreshBase.java | 46410 | /*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.handmark.pulltorefresh.library;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.handmark.pulltorefresh.library.internal.FlipLoadingLayout;
import com.handmark.pulltorefresh.library.internal.LoadingLayout;
import com.handmark.pulltorefresh.library.internal.RotateLoadingLayout;
import com.handmark.pulltorefresh.library.internal.Utils;
import com.handmark.pulltorefresh.library.internal.ViewCompat;
public abstract class PullToRefreshBase<T extends View> extends LinearLayout implements IPullToRefresh<T> {
// ===========================================================
// Constants
// ===========================================================
static final boolean DEBUG = true;
static final boolean USE_HW_LAYERS = false;
static final String LOG_TAG = "PullToRefresh";
static final float FRICTION = 2.0f;
public static final int SMOOTH_SCROLL_DURATION_MS = 200;
public static final int SMOOTH_SCROLL_LONG_DURATION_MS = 325;
static final int DEMO_SCROLL_INTERVAL = 225;
static final String STATE_STATE = "ptr_state";
static final String STATE_MODE = "ptr_mode";
static final String STATE_CURRENT_MODE = "ptr_current_mode";
static final String STATE_SCROLLING_REFRESHING_ENABLED = "ptr_disable_scrolling";
static final String STATE_SHOW_REFRESHING_VIEW = "ptr_show_refreshing_view";
static final String STATE_SUPER = "ptr_super";
// ===========================================================
// Fields
// ===========================================================
private int mTouchSlop;
private float mLastMotionX, mLastMotionY;
private float mInitialMotionX, mInitialMotionY;
private boolean mIsBeingDragged = false;
private State mState = State.RESET;
private Mode mMode = Mode.BOTH;
private Mode mCurrentMode;
T mRefreshableView;
private FrameLayout mRefreshableViewWrapper;
private boolean mShowViewWhileRefreshing = true;
private boolean mScrollingWhileRefreshingEnabled = false;
private boolean mFilterTouchEvents = true;
private boolean mOverScrollEnabled = true;
private boolean mLayoutVisibilityChangesEnabled = true;
private Interpolator mScrollAnimationInterpolator;
private AnimationStyle mLoadingAnimationStyle = AnimationStyle.getDefault();
private LoadingLayout mHeaderLayout;
private LoadingLayout mFooterLayout;
private OnRefreshListener<T> mOnRefreshListener;
private OnRefreshListener2<T> mOnRefreshListener2;
private OnPullEventListener<T> mOnPullEventListener;
private SmoothScrollRunnable mCurrentSmoothScrollRunnable;
// ===========================================================
// Constructors
// ===========================================================
public PullToRefreshBase(Context context) {
super(context);
init(context, null);
}
public PullToRefreshBase(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public PullToRefreshBase(Context context, Mode mode) {
super(context);
mMode = mode;
init(context, null);
}
public PullToRefreshBase(Context context, Mode mode, AnimationStyle animStyle) {
super(context);
mMode = mode;
mLoadingAnimationStyle = animStyle;
init(context, null);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (DEBUG) {
Log.d(LOG_TAG, "addView: " + child.getClass().getSimpleName());
}
final T refreshableView = getRefreshableView();
if (refreshableView instanceof ViewGroup) {
((ViewGroup) refreshableView).addView(child, index, params);
} else {
throw new UnsupportedOperationException("Refreshable View is not a ViewGroup so can't addView");
}
}
@Override
public final boolean demo() {
if (mMode.showHeaderLoadingLayout() && isReadyForPullStart()) {
smoothScrollToAndBack(-getHeaderSize() * 2);
return true;
} else if (mMode.showFooterLoadingLayout() && isReadyForPullEnd()) {
smoothScrollToAndBack(getFooterSize() * 2);
return true;
}
return false;
}
@Override
public final Mode getCurrentMode() {
return mCurrentMode;
}
@Override
public final boolean getFilterTouchEvents() {
return mFilterTouchEvents;
}
@Override
public final ILoadingLayout getLoadingLayoutProxy() {
return getLoadingLayoutProxy(true, true);
}
@Override
public final ILoadingLayout getLoadingLayoutProxy(boolean includeStart, boolean includeEnd) {
return createLoadingLayoutProxy(includeStart, includeEnd);
}
@Override
public final Mode getMode() {
return mMode;
}
@Override
public final T getRefreshableView() {
return mRefreshableView;
}
@Override
public final boolean getShowViewWhileRefreshing() {
return mShowViewWhileRefreshing;
}
@Override
public final State getState() {
return mState;
}
/**
* @deprecated See {@link #isScrollingWhileRefreshingEnabled()}.
*/
public final boolean isDisableScrollingWhileRefreshing() {
return !isScrollingWhileRefreshingEnabled();
}
@Override
public final boolean isPullToRefreshEnabled() {
return mMode.permitsPullToRefresh();
}
@Override
public final boolean isPullToRefreshOverScrollEnabled() {
return VERSION.SDK_INT >= VERSION_CODES.GINGERBREAD && mOverScrollEnabled
&& OverscrollHelper.isAndroidOverScrollEnabled(mRefreshableView);
}
@Override
public final boolean isRefreshing() {
return mState == State.REFRESHING || mState == State.MANUAL_REFRESHING;
}
@Override
public final boolean isScrollingWhileRefreshingEnabled() {
return mScrollingWhileRefreshingEnabled;
}
@Override
public final boolean onInterceptTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
final int action = event.getAction();
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) {
mIsBeingDragged = false;
return false;
}
if (action != MotionEvent.ACTION_DOWN && mIsBeingDragged) {
return true;
}
switch (action) {
case MotionEvent.ACTION_MOVE: {
// If we're refreshing, and the flag is set. Eat all MOVE events
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (isReadyForPull()) {
final float y = event.getY(), x = event.getX();
final float diff, oppositeDiff, absDiff;
// We need to use the correct values, based on scroll
// direction
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
diff = x - mLastMotionX;
oppositeDiff = y - mLastMotionY;
break;
case VERTICAL:
default:
diff = y - mLastMotionY;
oppositeDiff = x - mLastMotionX;
break;
}
absDiff = Math.abs(diff);
if (absDiff > mTouchSlop && (!mFilterTouchEvents || absDiff > Math.abs(oppositeDiff))) {
if (mMode.showHeaderLoadingLayout() && diff >= 1f && isReadyForPullStart()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_START;
}
} else if (mMode.showFooterLoadingLayout() && diff <= -1f && isReadyForPullEnd()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_END;
}
}
}
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
mIsBeingDragged = false;
}
break;
}
}
return mIsBeingDragged;
}
@Override
public final void onRefreshComplete() {
if (isRefreshing()) {
setState(State.RESET);
}
}
@Override
public final boolean onTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
// If we're refreshing, and the flag is set. Eat the event
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (event.getAction() == MotionEvent.ACTION_DOWN && event.getEdgeFlags() != 0) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_MOVE: {
if (mIsBeingDragged) {
mLastMotionY = event.getY();
mLastMotionX = event.getX();
pullEvent();
return true;
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
return true;
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
if (mIsBeingDragged) {
mIsBeingDragged = false;
if (mState == State.RELEASE_TO_REFRESH
&& (null != mOnRefreshListener || null != mOnRefreshListener2)) {
setState(State.REFRESHING, true);
return true;
}
// If we're already refreshing, just scroll back to the top
if (isRefreshing()) {
smoothScrollTo(0);
return true;
}
// If we haven't returned by here, then we're not in a state
// to pull, so just reset
setState(State.RESET);
return true;
}
break;
}
}
return false;
}
public final void setScrollingWhileRefreshingEnabled(boolean allowScrollingWhileRefreshing) {
mScrollingWhileRefreshingEnabled = allowScrollingWhileRefreshing;
}
/**
* @deprecated See {@link #setScrollingWhileRefreshingEnabled(boolean)}
*/
public void setDisableScrollingWhileRefreshing(boolean disableScrollingWhileRefreshing) {
setScrollingWhileRefreshingEnabled(!disableScrollingWhileRefreshing);
}
@Override
public final void setFilterTouchEvents(boolean filterEvents) {
mFilterTouchEvents = filterEvents;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLastUpdatedLabel(CharSequence label) {
getLoadingLayoutProxy().setLastUpdatedLabel(label);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLoadingDrawable(Drawable drawable) {
getLoadingLayoutProxy().setLoadingDrawable(drawable);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setLoadingDrawable(Drawable drawable, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setLoadingDrawable(
drawable);
}
@Override
public void setLongClickable(boolean longClickable) {
getRefreshableView().setLongClickable(longClickable);
}
@Override
public final void setMode(Mode mode) {
if (mode != mMode) {
if (DEBUG) {
Log.d(LOG_TAG, "Setting mode to: " + mode);
}
mMode = mode;
updateUIForMode();
}
}
public void setOnPullEventListener(OnPullEventListener<T> listener) {
mOnPullEventListener = listener;
}
@Override
public final void setOnRefreshListener(OnRefreshListener<T> listener) {
mOnRefreshListener = listener;
mOnRefreshListener2 = null;
}
@Override
public final void setOnRefreshListener(OnRefreshListener2<T> listener) {
mOnRefreshListener2 = listener;
mOnRefreshListener = null;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setPullLabel(CharSequence pullLabel) {
getLoadingLayoutProxy().setPullLabel(pullLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setPullLabel(CharSequence pullLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setPullLabel(pullLabel);
}
/**
* @param enable Whether Pull-To-Refresh should be used
* @deprecated This simple calls setMode with an appropriate mode based on
* the passed value.
*/
public final void setPullToRefreshEnabled(boolean enable) {
setMode(enable ? Mode.getDefault() : Mode.DISABLED);
}
@Override
public final void setPullToRefreshOverScrollEnabled(boolean enabled) {
mOverScrollEnabled = enabled;
}
@Override
public final void setRefreshing() {
setRefreshing(true);
}
@Override
public final void setRefreshing(boolean doScroll) {
if (!isRefreshing()) {
setState(State.MANUAL_REFRESHING, doScroll);
}
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel) {
getLoadingLayoutProxy().setRefreshingLabel(refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setRefreshingLabel(
refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setReleaseLabel(CharSequence releaseLabel) {
setReleaseLabel(releaseLabel, Mode.BOTH);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setReleaseLabel(CharSequence releaseLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setReleaseLabel(
releaseLabel);
}
public void setScrollAnimationInterpolator(Interpolator interpolator) {
mScrollAnimationInterpolator = interpolator;
}
@Override
public final void setShowViewWhileRefreshing(boolean showView) {
mShowViewWhileRefreshing = showView;
}
/**
* @return Either {@link Orientation#VERTICAL} or
* {@link Orientation#HORIZONTAL} depending on the scroll direction.
*/
public abstract Orientation getPullToRefreshScrollDirection();
final void setState(State state, final boolean... params) {
mState = state;
if (DEBUG) {
Log.d(LOG_TAG, "State: " + mState.name());
}
switch (mState) {
case RESET:
onReset();
break;
case PULL_TO_REFRESH:
onPullToRefresh();
break;
case RELEASE_TO_REFRESH:
onReleaseToRefresh();
break;
case REFRESHING:
case MANUAL_REFRESHING:
onRefreshing(params[0]);
break;
case OVERSCROLLING:
// NO-OP
break;
}
// Call OnPullEventListener
if (null != mOnPullEventListener) {
mOnPullEventListener.onPullEvent(this, mState, mCurrentMode);
}
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, int index, ViewGroup.LayoutParams params) {
super.addView(child, index, params);
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, ViewGroup.LayoutParams params) {
super.addView(child, -1, params);
}
protected LoadingLayout createLoadingLayout(Context context, Mode mode, TypedArray attrs) {
LoadingLayout layout = mLoadingAnimationStyle.createLoadingLayout(context, mode,
getPullToRefreshScrollDirection(), attrs);
layout.setVisibility(View.INVISIBLE);
return layout;
}
/**
* Used internally for {@link #getLoadingLayoutProxy(boolean, boolean)}.
* Allows derivative classes to include any extra LoadingLayouts.
*/
protected LoadingLayoutProxy createLoadingLayoutProxy(final boolean includeStart, final boolean includeEnd) {
LoadingLayoutProxy proxy = new LoadingLayoutProxy();
if (includeStart && mMode.showHeaderLoadingLayout()) {
proxy.addLayout(mHeaderLayout);
}
if (includeEnd && mMode.showFooterLoadingLayout()) {
proxy.addLayout(mFooterLayout);
}
return proxy;
}
/**
* This is implemented by derived classes to return the created View. If you
* need to use a custom View (such as a custom ListView), override this
* method and return an instance of your custom class.
* <p/>
* Be sure to set the ID of the view in this method, especially if you're
* using a ListActivity or ListFragment.
*
* @param context Context to create view with
* @param attrs AttributeSet from wrapped class. Means that anything you
* include in the XML layout declaration will be routed to the
* created View
* @return New instance of the Refreshable View
*/
protected abstract T createRefreshableView(Context context, AttributeSet attrs);
protected final void disableLoadingLayoutVisibilityChanges() {
mLayoutVisibilityChangesEnabled = false;
}
protected final LoadingLayout getFooterLayout() {
return mFooterLayout;
}
protected final int getFooterSize() {
return mFooterLayout.getContentSize();
}
protected final LoadingLayout getHeaderLayout() {
return mHeaderLayout;
}
protected final int getHeaderSize() {
return mHeaderLayout.getContentSize();
}
protected int getPullToRefreshScrollDuration() {
return SMOOTH_SCROLL_DURATION_MS;
}
protected int getPullToRefreshScrollDurationLonger() {
return SMOOTH_SCROLL_LONG_DURATION_MS;
}
protected FrameLayout getRefreshableViewWrapper() {
return mRefreshableViewWrapper;
}
/**
* Allows Derivative classes to handle the XML Attrs without creating a
* TypedArray themsevles
*
* @param a - TypedArray of PullToRefresh Attributes
*/
protected void handleStyledAttributes(TypedArray a) {
}
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the end.
*
* @return true if the View is currently in the correct state (for example,
* bottom of a ListView)
*/
protected abstract boolean isReadyForPullEnd();
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the start.
*
* @return true if the View is currently the correct state (for example, top
* of a ListView)
*/
protected abstract boolean isReadyForPullStart();
/**
* Called by {@link #onRestoreInstanceState(Parcelable)} so that derivative
* classes can handle their saved instance state.
*
* @param savedInstanceState - Bundle which contains saved instance state.
*/
protected void onPtrRestoreInstanceState(Bundle savedInstanceState) {
}
/**
* Called by {@link #onSaveInstanceState()} so that derivative classes can
* save their instance state.
*
* @param saveState - Bundle to be updated with saved state.
*/
protected void onPtrSaveInstanceState(Bundle saveState) {
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#PULL_TO_REFRESH} state.
*/
protected void onPullToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.pullToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.pullToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#REFRESHING} or {@link State#MANUAL_REFRESHING} state.
*
* @param doScroll - Whether the UI should scroll for this event.
*/
protected void onRefreshing(final boolean doScroll) {
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.refreshing();
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.refreshing();
}
if (doScroll) {
if (mShowViewWhileRefreshing) {
// Call Refresh Listener when the Scroll has finished
OnSmoothScrollFinishedListener listener = new OnSmoothScrollFinishedListener() {
@Override
public void onSmoothScrollFinished() {
callRefreshListener();
}
};
switch (mCurrentMode) {
case MANUAL_REFRESH_ONLY:
case PULL_FROM_END:
smoothScrollTo(getFooterSize(), listener);
break;
default:
case PULL_FROM_START:
smoothScrollTo(-getHeaderSize(), listener);
break;
}
} else {
smoothScrollTo(0);
}
} else {
// We're not scrolling, so just call Refresh Listener now
callRefreshListener();
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RELEASE_TO_REFRESH} state.
*/
protected void onReleaseToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.releaseToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.releaseToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RESET} state.
*/
protected void onReset() {
mIsBeingDragged = false;
mLayoutVisibilityChangesEnabled = true;
// Always reset both layouts, just in case...
mHeaderLayout.reset();
mFooterLayout.reset();
smoothScrollTo(0);
}
@Override
protected final void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
setMode(Mode.mapIntToValue(bundle.getInt(STATE_MODE, 0)));
mCurrentMode = Mode.mapIntToValue(bundle.getInt(STATE_CURRENT_MODE, 0));
mScrollingWhileRefreshingEnabled = bundle.getBoolean(STATE_SCROLLING_REFRESHING_ENABLED, false);
mShowViewWhileRefreshing = bundle.getBoolean(STATE_SHOW_REFRESHING_VIEW, true);
// Let super Restore Itself
super.onRestoreInstanceState(bundle.getParcelable(STATE_SUPER));
State viewState = State.mapIntToValue(bundle.getInt(STATE_STATE, 0));
if (viewState == State.REFRESHING || viewState == State.MANUAL_REFRESHING) {
setState(viewState, true);
}
// Now let derivative classes restore their state
onPtrRestoreInstanceState(bundle);
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected final Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
// Let derivative classes get a chance to save state first, that way we
// can make sure they don't overrite any of our values
onPtrSaveInstanceState(bundle);
bundle.putInt(STATE_STATE, mState.getIntValue());
bundle.putInt(STATE_MODE, mMode.getIntValue());
bundle.putInt(STATE_CURRENT_MODE, mCurrentMode.getIntValue());
bundle.putBoolean(STATE_SCROLLING_REFRESHING_ENABLED, mScrollingWhileRefreshingEnabled);
bundle.putBoolean(STATE_SHOW_REFRESHING_VIEW, mShowViewWhileRefreshing);
bundle.putParcelable(STATE_SUPER, super.onSaveInstanceState());
return bundle;
}
@Override
protected final void onSizeChanged(int w, int h, int oldw, int oldh) {
if (DEBUG) {
Log.d(LOG_TAG, String.format("onSizeChanged. W: %d, H: %d", w, h));
}
super.onSizeChanged(w, h, oldw, oldh);
// We need to update the header/footer when our size changes
refreshLoadingViewsSize();
// Update the Refreshable View layout
refreshRefreshableViewSize(w, h);
/**
* As we're currently in a Layout Pass, we need to schedule another one
* to layout any changes we've made here
*/
post(new Runnable() {
@Override
public void run() {
requestLayout();
}
});
}
/**
* Re-measure the Loading Views height, and adjust internal padding as
* necessary
*/
protected final void refreshLoadingViewsSize() {
final int maximumPullScroll = (int) (getMaximumPullScroll() * 1.2f);
int pLeft = getPaddingLeft();
int pTop = getPaddingTop();
int pRight = getPaddingRight();
int pBottom = getPaddingBottom();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setWidth(maximumPullScroll);
pLeft = -maximumPullScroll;
} else {
pLeft = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setWidth(maximumPullScroll);
pRight = -maximumPullScroll;
} else {
pRight = 0;
}
break;
case VERTICAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setHeight(maximumPullScroll);
pTop = -maximumPullScroll;
} else {
pTop = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setHeight(maximumPullScroll);
pBottom = -maximumPullScroll;
} else {
pBottom = 0;
}
break;
}
if (DEBUG) {
Log.d(LOG_TAG, String.format("Setting Padding. L: %d, T: %d, R: %d, B: %d", pLeft, pTop, pRight, pBottom));
}
setPadding(pLeft, pTop, pRight, pBottom);
}
protected final void refreshRefreshableViewSize(int width, int height) {
// We need to set the Height of the Refreshable View to the same as
// this layout
LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) mRefreshableViewWrapper.getLayoutParams();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (lp.width != width) {
lp.width = width;
mRefreshableViewWrapper.requestLayout();
}
break;
case VERTICAL:
if (lp.height != height) {
lp.height = height;
mRefreshableViewWrapper.requestLayout();
}
break;
}
}
/**
* Helper method which just calls scrollTo() in the correct scrolling
* direction.
*
* @param value - New Scroll value
*/
protected final void setHeaderScroll(int value) {
if (DEBUG) {
Log.d(LOG_TAG, "setHeaderScroll: " + value);
}
// Clamp value to with pull scroll range
final int maximumPullScroll = getMaximumPullScroll();
value = Math.min(maximumPullScroll, Math.max(-maximumPullScroll, value));
if (mLayoutVisibilityChangesEnabled) {
if (value < 0) {
mHeaderLayout.setVisibility(View.VISIBLE);
} else if (value > 0) {
mFooterLayout.setVisibility(View.VISIBLE);
} else {
mHeaderLayout.setVisibility(View.INVISIBLE);
mFooterLayout.setVisibility(View.INVISIBLE);
}
}
if (USE_HW_LAYERS) {
/**
* Use a Hardware Layer on the Refreshable View if we've scrolled at
* all. We don't use them on the Header/Footer Views as they change
* often, which would negate any HW layer performance boost.
*/
ViewCompat.setLayerType(mRefreshableViewWrapper, value != 0 ? View.LAYER_TYPE_HARDWARE
: View.LAYER_TYPE_NONE);
}
switch (getPullToRefreshScrollDirection()) {
case VERTICAL:
scrollTo(0, value);
break;
case HORIZONTAL:
scrollTo(value, 0);
break;
}
}
/**
* Smooth Scroll to position using the default duration of
* {@value #SMOOTH_SCROLL_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollTo(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDuration());
}
/**
* Smooth Scroll to position using the default duration of
* {@value #SMOOTH_SCROLL_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
* @param listener - Listener for scroll
*/
protected final void smoothScrollTo(int scrollValue, OnSmoothScrollFinishedListener listener) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDuration(), 0, listener);
}
/**
* Smooth Scroll to position using the longer default duration of
* {@value #SMOOTH_SCROLL_LONG_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollToLonger(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDurationLonger());
}
/**
* Updates the View State when the mode has been set. This does not do any
* checking that the mode is different to current state so always updates.
*/
protected void updateUIForMode() {
// We need to use the correct LayoutParam values, based on scroll
// direction
final LinearLayout.LayoutParams lp = getLoadingLayoutLayoutParams();
// Remove Header, and then add Header Loading View again if needed
if (this == mHeaderLayout.getParent()) {
removeView(mHeaderLayout);
}
if (mMode.showHeaderLoadingLayout()) {
addViewInternal(mHeaderLayout, 0, lp);
}
// Remove Footer, and then add Footer Loading View again if needed
if (this == mFooterLayout.getParent()) {
removeView(mFooterLayout);
}
if (mMode.showFooterLoadingLayout()) {
addViewInternal(mFooterLayout, lp);
}
// Hide Loading Views
refreshLoadingViewsSize();
// If we're not using Mode.BOTH, set mCurrentMode to mMode, otherwise
// set it to pull down
mCurrentMode = (mMode != Mode.BOTH) ? mMode : Mode.PULL_FROM_START;
}
private void addRefreshableView(Context context, T refreshableView) {
mRefreshableViewWrapper = new FrameLayout(context);
mRefreshableViewWrapper.addView(refreshableView, ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
addViewInternal(mRefreshableViewWrapper, new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
}
private void callRefreshListener() {
if (null != mOnRefreshListener) {
mOnRefreshListener.onRefresh(this);
} else if (null != mOnRefreshListener2) {
if (mCurrentMode == Mode.PULL_FROM_START) {
mOnRefreshListener2.onPullDownToRefresh(this);
} else if (mCurrentMode == Mode.PULL_FROM_END) {
mOnRefreshListener2.onPullUpToRefresh(this);
}
}
}
@SuppressWarnings("deprecation")
private void init(Context context, AttributeSet attrs) {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
setOrientation(LinearLayout.HORIZONTAL);
break;
case VERTICAL:
default:
setOrientation(LinearLayout.VERTICAL);
break;
}
setGravity(Gravity.CENTER);
ViewConfiguration config = ViewConfiguration.get(context);
mTouchSlop = config.getScaledTouchSlop();
// Styleables from XML
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PullToRefresh);
if (a.hasValue(R.styleable.PullToRefresh_ptrMode)) {
mMode = Mode.mapIntToValue(a.getInteger(R.styleable.PullToRefresh_ptrMode, 0));
}
if (a.hasValue(R.styleable.PullToRefresh_ptrAnimationStyle)) {
mLoadingAnimationStyle = AnimationStyle.mapIntToValue(a.getInteger(
R.styleable.PullToRefresh_ptrAnimationStyle, 0));
}
// Refreshable View
// By passing the attrs, we can add ListView/GridView params via XML
mRefreshableView = createRefreshableView(context, attrs);
addRefreshableView(context, mRefreshableView);
// We need to create now layouts now
mHeaderLayout = createLoadingLayout(context, Mode.PULL_FROM_START, a);
mFooterLayout = createLoadingLayout(context, Mode.PULL_FROM_END, a);
/**
* Styleables from XML
*/
if (a.hasValue(R.styleable.PullToRefresh_ptrRefreshableViewBackground)) {
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrRefreshableViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
} else if (a.hasValue(R.styleable.PullToRefresh_ptrAdapterViewBackground)) {
Utils.warnDeprecation("ptrAdapterViewBackground", "ptrRefreshableViewBackground");
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrAdapterViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
}
if (a.hasValue(R.styleable.PullToRefresh_ptrOverScroll)) {
mOverScrollEnabled = a.getBoolean(R.styleable.PullToRefresh_ptrOverScroll, true);
}
if (a.hasValue(R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled)) {
mScrollingWhileRefreshingEnabled = a.getBoolean(
R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled, false);
}
// Let the derivative classes have a go at handling attributes, then
// recycle them...
handleStyledAttributes(a);
a.recycle();
// Finally update the UI for the modes
updateUIForMode();
}
private boolean isReadyForPull() {
switch (mMode) {
case PULL_FROM_START:
return isReadyForPullStart();
case PULL_FROM_END:
return isReadyForPullEnd();
case BOTH:
return isReadyForPullEnd() || isReadyForPullStart();
default:
return false;
}
}
/**
* Actions a Pull Event
*
* @return true if the Event has been handled, false if there has been no
* change
*/
private void pullEvent() {
final int newScrollValue;
final int itemDimension;
final float initialMotionValue, lastMotionValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
initialMotionValue = mInitialMotionX;
lastMotionValue = mLastMotionX;
break;
case VERTICAL:
default:
initialMotionValue = mInitialMotionY;
lastMotionValue = mLastMotionY;
break;
}
switch (mCurrentMode) {
case PULL_FROM_END:
newScrollValue = Math.round(Math.max(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getFooterSize();
break;
case PULL_FROM_START:
default:
newScrollValue = Math.round(Math.min(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getHeaderSize();
break;
}
setHeaderScroll(newScrollValue);
if (newScrollValue != 0 && !isRefreshing()) {
float scale = Math.abs(newScrollValue) / (float) itemDimension;
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.onPull(scale);
break;
case PULL_FROM_START:
default:
mHeaderLayout.onPull(scale);
break;
}
if (mState != State.PULL_TO_REFRESH && itemDimension >= Math.abs(newScrollValue)) {
setState(State.PULL_TO_REFRESH);
} else if (mState == State.PULL_TO_REFRESH && itemDimension < Math.abs(newScrollValue)) {
setState(State.RELEASE_TO_REFRESH);
}
}
}
private LinearLayout.LayoutParams getLoadingLayoutLayoutParams() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.MATCH_PARENT);
case VERTICAL:
default:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
}
}
private int getMaximumPullScroll() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return Math.round(getWidth() / FRICTION);
case VERTICAL:
default:
return Math.round(getHeight() / FRICTION);
}
}
/**
* Smooth Scroll to position using the specific duration
*
* @param scrollValue - Position to scroll to
* @param duration - Duration of animation in milliseconds
*/
private final void smoothScrollTo(int scrollValue, long duration) {
smoothScrollTo(scrollValue, duration, 0, null);
}
private final void smoothScrollTo(int newScrollValue, long duration, long delayMillis,
OnSmoothScrollFinishedListener listener) {
if (null != mCurrentSmoothScrollRunnable) {
mCurrentSmoothScrollRunnable.stop();
}
final int oldScrollValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
oldScrollValue = getScrollX();
break;
case VERTICAL:
default:
oldScrollValue = getScrollY();
break;
}
if (oldScrollValue != newScrollValue) {
if (null == mScrollAnimationInterpolator) {
// Default interpolator is a Decelerate Interpolator
mScrollAnimationInterpolator = new DecelerateInterpolator();
}
mCurrentSmoothScrollRunnable = new SmoothScrollRunnable(oldScrollValue, newScrollValue, duration, listener);
if (delayMillis > 0) {
postDelayed(mCurrentSmoothScrollRunnable, delayMillis);
} else {
post(mCurrentSmoothScrollRunnable);
}
}
}
private final void smoothScrollToAndBack(int y) {
smoothScrollTo(y, SMOOTH_SCROLL_DURATION_MS, 0, new OnSmoothScrollFinishedListener() {
@Override
public void onSmoothScrollFinished() {
smoothScrollTo(0, SMOOTH_SCROLL_DURATION_MS, DEMO_SCROLL_INTERVAL, null);
}
});
}
public static enum AnimationStyle {
/**
* This is the default for Android-PullToRefresh. Allows you to use any
* drawable, which is automatically rotated and used as a Progress Bar.
*/
ROTATE,
/**
* This is the old default, and what is commonly used on iOS. Uses an
* arrow image which flips depending on where the user has scrolled.
*/
FLIP;
static AnimationStyle getDefault() {
return ROTATE;
}
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or ROTATE by default.
*/
static AnimationStyle mapIntToValue(int modeInt) {
switch (modeInt) {
case 0x0:
default:
return ROTATE;
case 0x1:
return FLIP;
}
}
LoadingLayout createLoadingLayout(Context context, Mode mode, Orientation scrollDirection, TypedArray attrs) {
switch (this) {
case ROTATE:
default:
return new RotateLoadingLayout(context, mode, scrollDirection, attrs);
case FLIP:
return new FlipLoadingLayout(context, mode, scrollDirection, attrs);
}
}
}
public static enum Mode {
/**
* Disable all Pull-to-Refresh gesture and Refreshing handling
*/
DISABLED(0x0),
/**
* Only allow the user to Pull from the start of the Refreshable View to
* refresh. The start is either the Top or Left, depending on the
* scrolling direction.
*/
PULL_FROM_START(0x1),
/**
* Only allow the user to Pull from the end of the Refreshable View to
* refresh. The start is either the Bottom or Right, depending on the
* scrolling direction.
*/
PULL_FROM_END(0x2),
/**
* Allow the user to both Pull from the start, from the end to refresh.
*/
BOTH(0x3),
/**
* Disables Pull-to-Refresh gesture handling, but allows manually
* setting the Refresh state via
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESH_ONLY(0x4);
/**
* @deprecated Use {@link #PULL_FROM_START} from now on.
*/
public static Mode PULL_DOWN_TO_REFRESH = Mode.PULL_FROM_START;
/**
* @deprecated Use {@link #PULL_FROM_END} from now on.
*/
public static Mode PULL_UP_TO_REFRESH = Mode.PULL_FROM_END;
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or PULL_FROM_START by default.
*/
static Mode mapIntToValue(final int modeInt) {
for (Mode value : Mode.values()) {
if (modeInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return getDefault();
}
static Mode getDefault() {
return PULL_FROM_START;
}
private int mIntValue;
// The modeInt values need to match those from attrs.xml
Mode(int modeInt) {
mIntValue = modeInt;
}
/**
* @return true if the mode permits Pull-to-Refresh
*/
boolean permitsPullToRefresh() {
return !(this == DISABLED || this == MANUAL_REFRESH_ONLY);
}
/**
* @return true if this mode wants the Loading Layout Header to be shown
*/
public boolean showHeaderLoadingLayout() {
return this == PULL_FROM_START || this == BOTH;
}
/**
* @return true if this mode wants the Loading Layout Footer to be shown
*/
public boolean showFooterLoadingLayout() {
return this == PULL_FROM_END || this == BOTH || this == MANUAL_REFRESH_ONLY;
}
int getIntValue() {
return mIntValue;
}
}
// ===========================================================
// Inner, Anonymous Classes, and Enumerations
// ===========================================================
/**
* Simple Listener that allows you to be notified when the user has scrolled
* to the end of the AdapterView. See (
* {@link PullToRefreshAdapterViewBase#setOnLastItemVisibleListener}.
*
* @author Chris Banes
*/
public static interface OnLastItemVisibleListener {
/**
* Called when the user has scrolled to the end of the list
*/
public void onLastItemVisible();
}
/**
* Listener that allows you to be notified when the user has started or
* finished a touch event. Useful when you want to append extra UI events
* (such as sounds). See (
* {@link PullToRefreshAdapterViewBase#setOnPullEventListener}.
*
* @author Chris Banes
*/
public static interface OnPullEventListener<V extends View> {
/**
* Called when the internal state has been changed, usually by the user
* pulling.
*
* @param refreshView - View which has had it's state change.
* @param state - The new state of View.
* @param direction - One of {@link Mode#PULL_FROM_START} or
* {@link Mode#PULL_FROM_END} depending on which direction
* the user is pulling. Only useful when <var>state</var> is
* {@link State#PULL_TO_REFRESH} or
* {@link State#RELEASE_TO_REFRESH}.
*/
public void onPullEvent(final PullToRefreshBase<V> refreshView, State state, Mode direction);
}
/**
* Simple Listener to listen for any callbacks to Refresh.
*
* @author Chris Banes
*/
public static interface OnRefreshListener<V extends View> {
/**
* onRefresh will be called for both a Pull from start, and Pull from
* end
*/
public void onRefresh(final PullToRefreshBase<V> refreshView);
}
/**
* An advanced version of the Listener to listen for callbacks to Refresh.
* This listener is different as it allows you to differentiate between Pull
* Ups, and Pull Downs.
*
* @author Chris Banes
*/
public static interface OnRefreshListener2<V extends View> {
// TODO These methods need renaming to START/END rather than DOWN/UP
/**
* onPullDownToRefresh will be called only when the user has Pulled from
* the start, and released.
*/
public void onPullDownToRefresh(final PullToRefreshBase<V> refreshView);
/**
* onPullUpToRefresh will be called only when the user has Pulled from
* the end, and released.
*/
public void onPullUpToRefresh(final PullToRefreshBase<V> refreshView);
}
public static enum Orientation {
VERTICAL, HORIZONTAL;
}
public static enum State {
/**
* When the UI is in a state which means that user is not interacting
* with the Pull-to-Refresh function.
*/
RESET(0x0),
/**
* When the UI is being pulled by the user, but has not been pulled far
* enough so that it refreshes when released.
*/
PULL_TO_REFRESH(0x1),
/**
* When the UI is being pulled by the user, and <strong>has</strong>
* been pulled far enough so that it will refresh when released.
*/
RELEASE_TO_REFRESH(0x2),
/**
* When the UI is currently refreshing, caused by a pull gesture.
*/
REFRESHING(0x8),
/**
* When the UI is currently refreshing, caused by a call to
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESHING(0x9),
/**
* When the UI is currently overscrolling, caused by a fling on the
* Refreshable View.
*/
OVERSCROLLING(0x10);
/**
* Maps an int to a specific state. This is needed when saving state.
*
* @param stateInt - int to map a State to
* @return State that stateInt maps to
*/
static State mapIntToValue(final int stateInt) {
for (State value : State.values()) {
if (stateInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return RESET;
}
private int mIntValue;
State(int intValue) {
mIntValue = intValue;
}
int getIntValue() {
return mIntValue;
}
}
final class SmoothScrollRunnable implements Runnable {
private final Interpolator mInterpolator;
private final int mScrollToY;
private final int mScrollFromY;
private final long mDuration;
private OnSmoothScrollFinishedListener mListener;
private boolean mContinueRunning = true;
private long mStartTime = -1;
private int mCurrentY = -1;
public SmoothScrollRunnable(int fromY, int toY, long duration, OnSmoothScrollFinishedListener listener) {
mScrollFromY = fromY;
mScrollToY = toY;
mInterpolator = mScrollAnimationInterpolator;
mDuration = duration;
mListener = listener;
}
@Override
public void run() {
/**
* Only set mStartTime if this is the first time we're starting,
* else actually calculate the Y delta
*/
if (mStartTime == -1) {
mStartTime = System.currentTimeMillis();
} else {
/**
* We do do all calculations in long to reduce software float
* calculations. We use 1000 as it gives us good accuracy and
* small rounding errors
*/
long normalizedTime = (1000 * (System.currentTimeMillis() - mStartTime)) / mDuration;
normalizedTime = Math.max(Math.min(normalizedTime, 1000), 0);
final int deltaY = Math.round((mScrollFromY - mScrollToY)
* mInterpolator.getInterpolation(normalizedTime / 1000f));
mCurrentY = mScrollFromY - deltaY;
setHeaderScroll(mCurrentY);
}
// If we're not at the target Y, keep going...
if (mContinueRunning && mScrollToY != mCurrentY) {
ViewCompat.postOnAnimation(PullToRefreshBase.this, this);
} else {
if (null != mListener) {
mListener.onSmoothScrollFinished();
}
}
}
public void stop() {
mContinueRunning = false;
removeCallbacks(this);
}
}
static interface OnSmoothScrollFinishedListener {
void onSmoothScrollFinished();
}
}
| mit |
kerrizor/chatty_exceptions | lib/chatty_exceptions.rb | 265 | require "chatty_exceptions/version"
require 'mactts'
class Exception
attr_reader :initial_message
def initialize(message)
@initial_message = message
super
end
def to_s
Mac::TTS.say(@initial_message, :victoria)
@initial_message
end
end
| mit |
FlorianGrimm/Hefezopf | HefezopfWebHost/Properties/AssemblyInfo.cs | 266 | using System.Reflection;
using System.Runtime.InteropServices;
[assembly: AssemblyTitle("HefezopfWebHost")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyCulture("")]
[assembly: ComVisible(false)]
[assembly: Guid("75451911-112e-4840-b847-dcd4a3328fd5")]
| mit |
32leaves/SDSL | execution/web/app/DatGUI.rb | 1039 | module DatGUI
class Folder
def initialize(parent)
@parent = parent
end
def add_folder(label)
folder = `self.parent.addFolder(label)`
Folder.new folder
end
def open
`self.parent.open()`
end
def close
`self.parent.close()`
end
def add(ref, id, constraint = nil)
if constraint.nil?
Field.new `self.parent.add(ref, id)`
else
Field.new `self.parent.add(ref, id, constraint)`
end
end
end
class Field
attr_accessor :controller
def initialize(controller)
@controller = controller
end
def on_finish(&block)
`self.controller.onFinishChange(function() { #{block.call} })`
end
def name=(value)
`self.controller.name(value)`
end
end
class GUI < Folder
def initialize(auto_place = true)
super `new dat.GUI({ autoPlace: auto_place })`
end
def dom_element
`self.parent.domElement`
end
def destroy
`self.parent.destroy()`
end
end
end | mit |
Hades1996/Proyectos-U | NetBeansProjects/sist_distr/common/src/es/uned/common/Mensaje.java | 426 | package es.uned.common;
import java.io.Serializable;
public class Mensaje implements Serializable {
private static final long serialVersionUID = 6473037307367070437L;
private String cuerpo, remitente;
public Mensaje(String cuerpo, String remitente) {
this.cuerpo = cuerpo;
this.remitente = remitente;
}
public String getCuerpo() {
return cuerpo;
}
public String getRemitente() {
return remitente;
}
}
| mit |
carsonfarmer/mappie | sources.py | 28916 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Map manager classes for mappie
Various classes for downloading, managing, and storing webmap images from
various webmap servers. Currently, mappie contains classes for downloading
and playing with OpenStreetMap map tiles, 3 types of Stamen map tiles, and
4 types of Google map tiles.
"""
# Basic imports
import math
import urllib
import hashlib
import os.path as path
import os
# Local import
from mappie.adjust import XYToLL
class ImageManager(object):
"""Create and manipulate images
Simple abstract interface for creating and manipulating images, to be used
by an MapManager object.
This is a direct copy of an 'ImageManager' from 'osmviz'.
"""
def __init__(self):
self.image = None
def paste_image(self, img, xy):
"""Paste image into internal image at specified top-left coordinate.
To be overridden.
"""
raise NotImplementedError("Subclasses should implement this!")
def load_image_file(self,imagef):
"""Loads specified image file into image object and returns it.
To be overridden.
"""
raise NotImplementedError("Subclasses should implement this!")
def create_image(self,width,height):
"""Create and return image with specified dimensions.
To be overridden.
"""
raise NotImplementedError("Subclasses should implement this!")
def prepare_image(self,width,height,overwrite=True):
"""Create and internally store an image
Parameters
----------
width : int
Width of image to create in pixels.
height : int
Height of image to create in pixels.
overwrite : bool
Whether the image be re-created if it has already been created in a
previous call
"""
if self.image and not overwrite:
raise Exception("Image already prepared. Set 'overwrite' to True "
"if you want to create a new image.")
self.image = self.create_image(width, height)
def destroy_image(self):
"""Destroys internal representation of the image.
"""
if self.image:
del self.image
self.image = None
def paste_image_file(self, imagef, xy):
"""Pastes input image file into internal image at specified location.
Given the filename of an image, and the x,y coordinates of the
location at which to place the top left corner of the contents
of that image, pastes the image into this object's internal image.
Parameters
----------
imagef : str
Filename of the input image to be pasted into the internal image.
xy : bool
x,y coordinates of the location at which to place the top
left corner of the input image
"""
if not self.image:
raise Exception ("Image not prepared!")
try:
img = self.load_image_file(imagef)
except Exception, e:
raise Exception("Could not load image "+str(imagef)+"\n"+str(e))
self.paste_image(img, xy)
del img
def get_image(self):
""" Return the internal image.
Returns some representation of the internal image. The returned value
is not for use by the MapManager.
"""
return self.image
class PILImageManager(ImageManager):
"""An ImageManager which works with PIL images.
This is a direct copy of an 'PILImageManager' from 'osmviz'.
"""
def __init__(self, mode="RGBA"):
"""Constructs a PIL Image Manager.
Parameters
----------
mode : str
The PIL mode in which to create the image.
"""
ImageManager.__init__(self);
self.mode = mode
try: import PIL.Image
except: raise Exception, "PIL could not be imported!"
self.pil_image = PIL.Image
def create_image(self,width,height):
return self.pil_image.new(self.mode, (width, height))
def load_image_file(self,imagef):
return self.pil_image.open(imagef)
def paste_image(self,img,xy):
self.get_image().paste(img, xy)
class MapManager(object):
"""A MapManager manages the retrieval and storage of webmap images.
The basic utility is the `create_map()` method which
automatically gets all the images needed, and tiles them together
into one big image.
"""
def __init__(self, **kwargs):
"""Creates an MapManager.
Parameters
----------
cache : str
Path (relative or absolute) to directory where tiles downloaded
from map server should be saved. Default "/tmp".
server : str
URL of map server from which to retrieve map tiles. This
should be fully qualified, including the protocol.
This may be ignored for some manager subclasses.
image_manager : ImageManager
Instance of an ImageManager which will be used to do all
image manipulation. This is currently ignored if provided.
"""
cache = kwargs.get('cache')
#mgr = kwargs.get('image_manager')
server = kwargs.get('server')
self.cache = None
if cache:
if not os.path.isdir(cache):
try:
os.makedirs(cache, 0766)
self.cache = cache
print "Created cache dir",cache
except:
print "Could not make cache dir",cache
elif not os.access(cache, os.R_OK | os.W_OK):
print "Insufficient privileges on cache dir",cache
else:
self.cache = cache
if not self.cache:
self.cache = ( os.getenv("TMPDIR")
or os.getenv("TMP")
or os.getenv("TEMP")
or "/tmp" )
print "Using %s to cache maptiles." % self.cache
if not os.access(self.cache, os.R_OK | os.W_OK):
print "Insufficient access to %s." % self.cache
raise Exception, "Unable to find/create/use maptile cache directory."
if server:
self.server = server
else:
self.server = "http://tile.openstreetmap.org"
# Make a hash of the server URL to use in cached tile filenames.
md5 = hashlib.md5()
md5.update(self.server)
self.cache_prefix = 'mappie-%s-' % md5.hexdigest()[:5]
self.manager = PILImageManager("RGBA") # use RGBA by default
def get_tile_coord(self, lon_deg, lat_deg, zoom):
"""Get x, y coordinates of map tile based on lat, lon coordinates.
Given lon,lat coords in DEGREES, and a zoom level,
returns the (x,y) coordinate of the corresponding tile #.
(http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Python)
Parameters
----------
lon_deg : float
Longitude in degrees.
lat_deg : float
Latitude in degress.
zoom : int
Zoom level at which to download map.
Returns
----------
(xtile, ytile) : tuple
Tuple containing the x, y coordinates of the required map tile.
"""
lat_rad = lat_deg * math.pi / 180.0
n = 2.0 ** zoom
xtile = int((lon_deg + 180.0) / 360.0 * n)
ytile = int((1.0 - math.log(math.tan(lat_rad) +
(1 / math.cos(lat_rad))) / math.pi) / 2.0 * n)
return(xtile, ytile)
def get_tile_url(self, tile_coord, zoom):
"""Get appropriately formatted url for retrieving a map from the server.
Given x,y coord of the tile to download, and the zoom level,
returns the URL from which to download the image.
Parameters
----------
tile_coord : tuple
Tuple containing the x, y coordinates of the required map tile
zoom : int
Zoom level at which to download map.
Returns
----------
tile_url : str
URL string which specifies which map tile to download from server.
"""
params = (self.maptype,zoom,tile_coord[0],tile_coord[1])
return self.server+"/%s/%d/%d/%d.png" % params
def get_local_filename(self, tile_coord, zoom):
"""Get appropriately named local filename for downloaded map.
Given x,y coord of the tile, and the zoom level,
returns the filename to which the file would be saved
if it was downloaded. That way we don't have to kill
the map server every time the thing runs.
Parameters
----------
tile_coord : tuple
Tuple containing the x, y coords of the tile to be downloaded.
zoom : int
Zoom level at which to download map.
Returns
----------
filename : str
Local file path of file that would be saved if it were downloaded.
"""
params = (self.cache_prefix,self.maptype,zoom,tile_coord[0],tile_coord[1])
return path.join(self.cache, "%s%s_%d_%d_%d.png" % params)
def retrieve_tile_image(self, tile_coord, zoom):
"""Get the actual tile image from the map server.
Given x,y coord of the tile, and the zoom level,
retrieves the file to disk if necessary and
returns the local filename.
"""
filename = self.get_local_filename(tile_coord,zoom)
if not path.isfile(filename):
url = self.get_tile_url(tile_coord,zoom)
try:
urllib.urlretrieve(url, filename=filename);
except Exception, e:
raise Exception, "Unable to retrieve URL: "+url+"\n"+str(e)
return filename
def tile_nw_latlon(self, tile_coord, zoom):
"""
Given x,y coord of the tile, and the zoom level,
returns the (lat,lon) coordinates of the upper
left corner of the tile.
"""
xtile, ytile = tile_coord
n = 2.0 ** zoom
lon_deg = xtile / n * 360.0 - 180.0
lat_rad = math.atan(math.sinh(math.pi * (1 - 2 * ytile / n)))
lat_deg = lat_rad * 180.0 / math.pi
return(lat_deg, lon_deg)
def create_map(self, (minlat, maxlat, minlon, maxlon), zoom, **kwargs):
"""Main map creation function.
Given bounding latlons (in degrees), and a zoom level,
creates an image constructed from map tiles.
Parameters
----------
bbox : tuple (minlat, maxlat, minlon, maxlon) [required]
Bounding box lat/lons (in degrees) of the requested map area.
zoom : int [required]
Zoom level at which to download map.
color : str [optional]
Whether to return a color ('color') or black-and-white ('bw')
version of the requested map. This parameter controls the 'mode'
of the internal image manager.
overwrite : bool [optional]
Whether previously downloaded/created map images should
be overwritten.
Returns
----------
(img, bounds) : (PILImage, tuple)
The constructed image (as returned by the image manager's
"get_image()" method), and bounding box (latmin,latmax,lonmin,lonmax)
which the tiles covers. The returned bounding box may not be the
same as the requested bounding box.
"""
if not self.manager:
raise Exception("No ImageManager was specified, cannot create image.")
color = kwargs.get('color')
if color:
if not isinstance(color, str) or not color in ('color', 'bw'):
print "Wrong color specification, must be 'color' or 'bw'."
print "Defaulting to 'color'"
color = 'color'
if color == 'bw': mode = "L"
else: mode = "RGBA"
self.manager.mode = mode
overwrite = kwargs.get('overwrite')
if overwrite is None:
overwrite = True
if not isinstance(overwrite, bool):
raise Exception("Invalid 'overwrite' argument, must be True/False.")
topleft = minX, minY = self.get_tile_coord(minlon, maxlat, zoom);
bottomright = maxX, maxY = self.get_tile_coord(maxlon, minlat, zoom);
new_maxlat, new_minlon = self.tile_nw_latlon( topleft, zoom )
new_minlat, new_maxlon = self.tile_nw_latlon( (maxX+1,maxY+1), zoom )
# tiles are 256x256
pix_width = (maxX-minX+1)*256
pix_height = (maxY-minY+1)*256
self.manager.prepare_image(pix_width, pix_height, overwrite)
#print "Retrieving %d tiles..." % ( (1+maxX-minX)*(1+maxY-minY) ,)
for x in range(minX,maxX+1):
for y in range(minY,maxY+1):
fname = self.retrieve_tile_image((x,y),zoom)
x_off = 256*(x-minX)
y_off = 256*(y-minY)
self.manager.paste_image_file( fname, (x_off,y_off) )
#print "... done."
return (self.manager.get_image(),
(new_minlat, new_maxlat, new_minlon, new_maxlon))
class StamenManager(MapManager):
"""A StamenManager manages the retrieval and storage of Stamen Map images.
"""
def __init__(self, **kwargs):
"""Creates a StamenManager.
Parameters
----------
cache : str
Path (relative or absolute) to directory where tiles downloaded
from Stamen server should be saved. Default "/tmp".
image_manager : ImageManager
Instance of an ImageManager which will be used to do all
image manipulation. This is currently ignored if provided.
"""
MapManager.__init__(self, **kwargs)
self.server = "http://tile.stamen.com"
def create_map(self, (minlat, maxlat, minlon, maxlon), zoom, **kwargs):
"""Stamen specific map creation function.
Given bounding latlons (in degrees), and an OSM zoom level,
creates an image constructed from OSM tiles.
Parameters
----------
bbox : tuple (minlat, maxlat, minlon, maxlon) [required]
Bounding box lat/lons (in degrees) of the requested map area.
zoom : int [required]
Zoom level at which to download map.
3 (continent) to 18 (building) with default value of 10 (city).
Actual min and max values vary by maptype.
color : str [optional]
Whether to return a color ('color') or black-and-white ('bw')
version of the requested map. This parameter controls the 'mode'
of the internal image manager.
maptype : str [optional]
Type of map to return. This can be one of 'terrain' (default),
'watercolor', or 'toner'.
overwrite : bool [optional]
Whether previously downloaded/created map images should
be overwritten.
Returns
----------
(img, bounds) : (PILImage, tuple)
The constructed image (as returned by the image manager's
"get_image()" method), and bounding box (latmin,latmax,lonmin,lonmax)
which the tiles covers. The returned bounding box may not be the
same as the requested bounding box.
"""
maptype = kwargs.get('maptype')
maptypes = ("terrain", "watercolor", "toner")
if maptype:
if not isinstance(maptype, str) or \
not maptype in maptypes:
raise ValueError, "Invalid maptype specified, must be '%s', '%s', or '%s'." % maptypes
else:
maptype = 'terrain'
self.maptype = maptype
return MapManager.create_map(self, (minlat, maxlat, minlon, maxlon),
zoom, **kwargs)
class OSMManager(MapManager):
"""An OSMManager manages the retrieval and storage of OpenStreetMap images.
"""
def create_map(self, (minlat, maxlat, minlon, maxlon), zoom, **kwargs):
""" Create a webmap image using OpenStreetMap tiles.
Given bounding latlons (in degrees), and an OSM zoom level,
creates an image constructed from OSM tiles.
Parameters
----------
bbox : tuple (minlat, maxlat, minlon, maxlon) [required]
Bounding box lat/lons (in degrees) of the requested map area.
zoom : int [required]
Zoom level at which to download map.
3 (continent) to 18 (building) with default value of 10 (city).
color : str [optional]
Whether to return a color ('color') or black-and-white ('bw')
version of the requested map. This parameter controls the 'mode'
of the internal image manager.
overwrite : bool [optional]
Whether previously downloaded/created map images should
be overwritten.
Returns
----------
(img, bounds) : (PILImage, tuple)
The constructed image (as returned by the image manager's
"get_image()" method), and bounding box (latmin,latmax,lonmin,lonmax)
which the tiles covers. The returned bounding box may not be the
same as the requested bounding box.
"""
self.maptype = "" # leave this blank for OSM maps
return MapManager.create_map(self, (minlat, maxlat, minlon, maxlon),
zoom, **kwargs)
class GoogleManager(MapManager):
"""A GoogleManager manages the retrieval and storage of GoogleMap images.
"""
def __init__(self, **kwargs):
"""Creates a GoogleManager.
Parameters
----------
cache : str
Path (relative or absolute) to directory where tiles downloaded
from Google server should be saved. Default "/tmp".
language : str
String providing language of map labels (for themes with
them) in the format 'en-EN'. Not all languages are supported;
for those which aren't the default language is used.
sensor : bool
Specifies whether the application requesting the static map is
using a sensor to determine the user's location.
region : str
Region localization as a region code specified as a two-character
ccTLD ('top-level domain') value. For more info, see:
https://developers.google.com/maps/documentation/javascript/basics#Region
image_manager : ImageManager
Instance of an ImageManager which will be used to do all
image manipulation. This is currently ignored if provided.
"""
MapManager.__init__(self, **kwargs)
language = kwargs.get('language')
if language:
if not isinstance(language, str):
raise Exception, "Invalid language specification, must be str."
language = "language=%s" % language
else:
language = ""
region = kwargs.get("region")
if region:
if not isinstance(region, str):
raise Exception, "Invalid region specification, must be str."
region = "region=%s" % region
else:
region = ""
sensor = kwargs.get("sensor")
if sensor:
if not isinstance(sensor, bool):
raise Exception, "Invalid sensor specification, must be True or False."
sensor = "sensor=%s" % str(sensor).lower()
else:
sensor = "sensor=%s" % str(False).lower()
params = (language, region, sensor)
self.server = "http://maps.googleapis.com/maps/api/staticmap?%s&%s&%s" % params
def get_tile_url(self, bbox, zoom):
"""Get appropriately formatted and parametrized url for GoogleMaps API.
Given bounding box of the area to return, and the zoom level,
returns the URL from which to download the image.
This version differs from other subclasses in that it uses the
Google V3 API, rather than fetching the map tiles directly.
Parameters
----------
bbox : tuple
Tuple containing the bounding box of the required map area
zoom : int
Zoom level at which to download map.
Returns
----------
api_url : str
URL string/query to send to the GoogleMaps V3 API.
"""
center_url = "center=%s,%s" % (sum(bbox[0:2])/2., sum(bbox[2:4])/2.)
zoom_url = 'zoom=%s' % zoom
size_url = 'size=%sx%s' % self.size
scale_url = 'scale=%s' % self.scale
format_url = 'format=png8' # onle png is supported at the moment
maptype_url = 'maptype=%s' % self.maptype
# Optional stuff
style_url = "style=%s" % self.style if self.style else ""
if self.markers:
markers = ["%s,%s" % (round(z[0], 6), round(z[1], 6)) for z in self.markers]
markers_url = "markers=" + "|".join(markers)
else:
markers_url = ""
if self.paths:
rnd = lambda x: round(x, 6) # save space and time
paths = ["|".join(["%s,%s" % (rnd(z[0]), rnd(z[1])) for z in path])
for path in paths]
paths_url = "path=" + "&path=".join(paths)
else:
paths_url = ""
params_url = "&".join((center_url, zoom_url, size_url, scale_url,
format_url, maptype_url, style_url, markers_url, paths_url))
full_url = self.server + "&" + params_url
import re
full_url = re.sub('[&]+','&', full_url) # Removes 'missing' arguments
if full_url.endswith("&"): full_url = full_url[:-1]
return full_url
def create_map(self, (minlat, maxlat, minlon, maxlon), zoom, **kwargs):
"""Google specific map creation function.
Parameters
----------
bbox : tuple (minlat, maxlat, minlon, maxlon) [required]
Bounding box lat/lons (in degrees) of the requested map area.
zoom : int [required]
Zoom level at which to download map.
3 (continent) to 21 (building) with default value of 10 (city).
color : str [optional]
Whether to return a color ('color') or black-and-white ('bw')
version of the requested map. This paramter controls the 'mode'
of the internal image manager.
maptype : str [optional]
Type of map to return. This can be one of 'terrain' (default),
'satellite', 'roadmap', or 'hybrid'.
overwrite : bool [optional]
Whether previously downloaded/created map images should
be overwritten.
size : tuple [optional]
Rectangular dimensions of map in pixels (horizontal, vertical).
Max size is (640, 640). This parameter is affected in a
multiplicative way by scale.
scale : int [optional]
Multiplicative factor for the number of pixels returned.
Possible values are 1, 2, or 4 (e.g. size=(640,640) and
scale=2 returns an image with 1280x1280 pixels). 4 is
reserved for Google business users only. scale also affects
the size of labels.
style : str [optional]
Character string to be supplied directly to the api for the
style argument. This is a powerful complex specification,see:
https://developers.google.com/maps/documentation/staticmaps/
markers : list [optional]
List of tuples with (latitude, longitude) coordinates for which
google markers should be embedded in the map image.
paths :
List of lists of tuples with (latitude, longitude) coordinates for
which individual paths should be embedded in the map image.
Returns
----------
(img, bounds) : (PILImage, tuple)
The constructed image (as returned by the image manager's
"get_image()" method), and bounding box (latmin,latmax,lonmin,lonmax)
which the tiles covers. The returned bounding box may not be the
same as the requested bounding box.
"""
self.maptype = kwargs.get('maptype')
maptypes = ('terrain', 'satellite', 'roadmap', 'hybrid')
if self.maptype:
if not isinstance(self.maptype, str) or not self.maptype in maptypes:
raise ValueError, "Invalid maptype specified, must be '%s', '%s', %s, or '%s'." % maptypes
else:
self.maptype = 'terrain'
self.size = kwargs.get('size')
if self.size:
if not isinstance(self.size, tuple) or \
not all([s>0 and s <= 640 for s in self.size]):
raise ValueError('Invalid size parameter, must be < 640x640')
else:
self.size = (256, 256)
self.scale = kwargs.get('scale')
if self.scale:
if not self.scale in (1,2,4):
raise ValueError('Invalid scale parameter, must be 1, 2, or 4')
else:
self.scale = 2
self.style = kwargs.get('style')
if self.style: # if its None, leave it None
if not isinstance(self.style, str):
raise ValueError, "Invalid style specified, must be str."
self.markers = kwargs.get('markers')
if self.markers: # if its None, leave it None
if not isinstance(markers, list):
raise ValueError, "Invalid markers specified, must be list of tuples."
self.paths = kwargs.get('paths')
if self.paths: # if its None, leave it None
if not isinstance(paths, list):
raise ValueError, "Invalid paths specified, must be list of lists of tuples."
if not self.manager:
raise Exception, "No ImageManager was specified, cannot create image."
color = kwargs.get('color')
if color:
if not isinstance(color, str) or not color in ('color', 'bw'):
print "Wrong color specification, must be 'color' or 'bw'."
print "Defaulting to 'color'"
color = 'color'
if color == 'bw': mode = "L"
else: mode = "RGBA"
self.manager.mode = mode
overwrite = kwargs.get('overwrite')
if overwrite is None:
overwrite = True
if not isinstance(overwrite, bool):
raise Exception, "Invalid overwrite argument, must be True or False."
pix_width = self.scale*self.size[1]
pix_height = self.scale*self.size[0]
self.manager.prepare_image(pix_width, pix_height, overwrite)
#print "Retrieving tiles..."
fname = self.retrieve_tile_image((minlat, maxlat, minlon, maxlon), zoom)
self.manager.paste_image_file(fname, (0,0))
#print "... done."
# Compute actual bounding box (which is not the same as requested)
centX, centY = sum([minlon, maxlon])/2., sum([minlat, maxlat])/2.
new_minlon, new_maxlat = XYToLL(-self.size[0]/2, -self.size[1]/2,
centX, centY, int(zoom))
new_maxlon, new_minlat = XYToLL(self.size[0]/2, self.size[1]/2,
centX, centY, int(zoom))
return (self.manager.get_image(),
(new_minlat, new_maxlat, new_minlon, new_maxlon))
class _useragenthack(urllib.FancyURLopener):
def __init__(self,*args):
urllib.FancyURLopener.__init__(self,*args)
for i,(header,val) in enumerate(self.addheaders):
if header == "User-Agent":
del self.addheaders[i]
break
self.addheader('User-Agent', 'mappie/1.0 +https://github.com/cfarmer/mappie')
#import httplib
#httplib.HTTPConnection.debuglevel = 1
urllib._urlopener = _useragenthack()
| mit |
TinEye/tineyeservices_python | tineyeservices/mobileengine_request.py | 1115 | # -*- coding: utf-8 -*-
# Copyright (c) 2018 TinEye. All rights reserved worldwide.
from .matchengine_request import MatchEngineRequest
class MobileEngineRequest(MatchEngineRequest):
"""
Class to send requests to a MobileEngine API.
Adding an image using data:
>>> from tineyeservices import MobileEngineRequest, Image
>>> api = MobileEngineRequest(api_url='http://localhost/rest/')
>>> image = Image(filepath='/path/to/image.jpg')
>>> api.add_image(images=[image])
{u'error': [], u'method': u'add', u'result': [], u'status': u'ok'}
Searching for an image using an image URL:
>>> api.search_url(url='https://tineye.com/images/meloncat.jpg')
{'error': [],
'method': 'search',
'result': [{'filepath': 'match1.png',
'score': '97.2',
'overlay': 'overlay/query.png/match1.png[...]'}],
'status': 'ok'}
"""
def __repr__(self):
return "MobileEngineRequest(api_url=%r, username=%r, password=%r)" %\
(self.api_url, self.username, self.password)
| mit |
RisenEsports/RisenEsports.github.io | Console/app/node_modules/webpack-config/src/ConfigFactory.js | 1373 | import {
isFunction,
isObject
} from 'lodash';
import Config from './Config';
import ConfigList from './ConfigList';
import { env } from './ConfigArgv';
/**
* @private
* @type {WeakMap}
*/
const CONTAINER = new WeakMap();
/**
* @class
*/
class ConfigFactory {
/**
* @constructor
* @param {ConfigContainer} container
*/
constructor(container) {
CONTAINER.set(this, container);
}
/**
* @readonly
* @type {ConfigContainer}
*/
get container() {
return CONTAINER.get(this);
}
/**
* @private
* @param {Object|Config} value
* @returns {Config}
*/
initWith(value) {
let config;
if (value instanceof Config) {
config = value.clone();
} else {
config = this.container.resolve(Config).merge(value);
}
return config;
}
/**
* @param {Function|Object|Object[]} value
* @returns {Config|ConfigList}
*/
createConfig(value) {
let config;
if (isFunction(value)) {
value = value(env);
}
if (Array.isArray(value)) {
config = ConfigList.from(value, x => this.initWith(x));
} else if (isObject(value)) {
config = this.initWith(value);
}
return config;
}
}
export default ConfigFactory;
| mit |
Azure/azure-sdk-for-java | sdk/resourcemanagerhybrid/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/implementation/SignedInUsersClientImpl.java | 20189 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.authorization.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.authorization.fluent.SignedInUsersClient;
import com.azure.resourcemanager.authorization.fluent.models.DirectoryObjectInner;
import com.azure.resourcemanager.authorization.fluent.models.UserInner;
import com.azure.resourcemanager.authorization.models.DirectoryObjectListResult;
import com.azure.resourcemanager.authorization.models.GraphErrorException;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in SignedInUsersClient. */
public final class SignedInUsersClientImpl implements SignedInUsersClient {
private final ClientLogger logger = new ClientLogger(SignedInUsersClientImpl.class);
/** The proxy service used to perform REST calls. */
private final SignedInUsersService service;
/** The service client containing this operation class. */
private final GraphRbacManagementClientImpl client;
/**
* Initializes an instance of SignedInUsersClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
SignedInUsersClientImpl(GraphRbacManagementClientImpl client) {
this.service =
RestProxy.create(SignedInUsersService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for GraphRbacManagementClientSignedInUsers to be used by the proxy
* service to perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "GraphRbacManagementC")
private interface SignedInUsersService {
@Headers({"Content-Type: application/json"})
@Get("/{tenantID}/me")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(GraphErrorException.class)
Mono<Response<UserInner>> get(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("tenantID") String tenantId,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("/{tenantID}/me/ownedObjects")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(GraphErrorException.class)
Mono<Response<DirectoryObjectListResult>> listOwnedObjects(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("tenantID") String tenantId,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("/{tenantID}/{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(GraphErrorException.class)
Mono<Response<DirectoryObjectListResult>> listOwnedObjectsNext(
@HostParam("$host") String endpoint,
@PathParam(value = "nextLink", encoded = true) String nextLink,
@QueryParam("api-version") String apiVersion,
@PathParam("tenantID") String tenantId,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Gets the details for the currently logged-in user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details for the currently logged-in user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<UserInner>> getWithResponseAsync(String tenantId) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
return FluxUtil
.withContext(
context ->
service.get(this.client.getEndpoint(), this.client.getApiVersion(), tenantId, accept, context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Gets the details for the currently logged-in user.
*
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details for the currently logged-in user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<UserInner>> getWithResponseAsync(String tenantId, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
context = this.client.mergeContext(context);
return service.get(this.client.getEndpoint(), this.client.getApiVersion(), tenantId, accept, context);
}
/**
* Gets the details for the currently logged-in user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details for the currently logged-in user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UserInner> getAsync(String tenantId) {
return getWithResponseAsync(tenantId)
.flatMap(
(Response<UserInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Gets the details for the currently logged-in user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details for the currently logged-in user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public UserInner get(String tenantId) {
return getAsync(tenantId).block();
}
/**
* Gets the details for the currently logged-in user.
*
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details for the currently logged-in user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<UserInner> getWithResponse(String tenantId, Context context) {
return getWithResponseAsync(tenantId, context).block();
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<DirectoryObjectInner>> listOwnedObjectsSinglePageAsync(String tenantId) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
return FluxUtil
.withContext(
context ->
service
.listOwnedObjects(
this.client.getEndpoint(), this.client.getApiVersion(), tenantId, accept, context))
.<PagedResponse<DirectoryObjectInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().odataNextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<DirectoryObjectInner>> listOwnedObjectsSinglePageAsync(
String tenantId, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
context = this.client.mergeContext(context);
return service
.listOwnedObjects(this.client.getEndpoint(), this.client.getApiVersion(), tenantId, accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().odataNextLink(),
null));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<DirectoryObjectInner> listOwnedObjectsAsync(String tenantId) {
return new PagedFlux<>(
() -> listOwnedObjectsSinglePageAsync(tenantId),
nextLink -> listOwnedObjectsNextSinglePageAsync(nextLink, tenantId));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<DirectoryObjectInner> listOwnedObjectsAsync(String tenantId, Context context) {
return new PagedFlux<>(
() -> listOwnedObjectsSinglePageAsync(tenantId, context),
nextLink -> listOwnedObjectsNextSinglePageAsync(nextLink, tenantId, context));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<DirectoryObjectInner> listOwnedObjects(String tenantId) {
return new PagedIterable<>(listOwnedObjectsAsync(tenantId));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<DirectoryObjectInner> listOwnedObjects(String tenantId, Context context) {
return new PagedIterable<>(listOwnedObjectsAsync(tenantId, context));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param nextLink Next link for the list operation.
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<DirectoryObjectInner>> listOwnedObjectsNextSinglePageAsync(
String nextLink, String tenantId) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
return FluxUtil
.withContext(
context ->
service
.listOwnedObjectsNext(
this.client.getEndpoint(),
nextLink,
this.client.getApiVersion(),
tenantId,
accept,
context))
.<PagedResponse<DirectoryObjectInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().odataNextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the list of directory objects that are owned by the user.
*
* @param nextLink Next link for the list operation.
* @param tenantId The tenant ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of directory objects that are owned by the user.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<DirectoryObjectInner>> listOwnedObjectsNextSinglePageAsync(
String nextLink, String tenantId, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (tenantId == null) {
return Mono.error(new IllegalArgumentException("Parameter tenantId is required and cannot be null."));
}
final String accept = "application/json, text/json";
context = this.client.mergeContext(context);
return service
.listOwnedObjectsNext(
this.client.getEndpoint(), nextLink, this.client.getApiVersion(), tenantId, accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().odataNextLink(),
null));
}
}
| mit |
ndlib/hesburgh_errors | server/app/controllers/application_controller.rb | 347 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
include HesburghErrors::ControllerErrorTrapping
unless Rails.configuration.consider_all_requests_local
setup_controller_errors
end
end
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.