repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
anudr01d/anudr01d.github.io | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/_asybuiltins.py | 369 | 27319 | # -*- coding: utf-8 -*-
"""
pygments.lexers._asybuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the asy-function names and asy-variable names of
Asymptote.
Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
ASYFUNCNAME = set([
'AND',
'Arc',
'ArcArrow',
'ArcArrows',
'Arrow',
'Arrows',
'Automatic',
'AvantGarde',
'BBox',
'BWRainbow',
'BWRainbow2',
'Bar',
'Bars',
'BeginArcArrow',
'BeginArrow',
'BeginBar',
'BeginDotMargin',
'BeginMargin',
'BeginPenMargin',
'Blank',
'Bookman',
'Bottom',
'BottomTop',
'Bounds',
'Break',
'Broken',
'BrokenLog',
'Ceil',
'Circle',
'CircleBarIntervalMarker',
'Cos',
'Courier',
'CrossIntervalMarker',
'DefaultFormat',
'DefaultLogFormat',
'Degrees',
'Dir',
'DotMargin',
'DotMargins',
'Dotted',
'Draw',
'Drawline',
'Embed',
'EndArcArrow',
'EndArrow',
'EndBar',
'EndDotMargin',
'EndMargin',
'EndPenMargin',
'Fill',
'FillDraw',
'Floor',
'Format',
'Full',
'Gaussian',
'Gaussrand',
'Gaussrandpair',
'Gradient',
'Grayscale',
'Helvetica',
'Hermite',
'HookHead',
'InOutTicks',
'InTicks',
'J',
'Label',
'Landscape',
'Left',
'LeftRight',
'LeftTicks',
'Legend',
'Linear',
'Link',
'Log',
'LogFormat',
'Margin',
'Margins',
'Mark',
'MidArcArrow',
'MidArrow',
'NOT',
'NewCenturySchoolBook',
'NoBox',
'NoMargin',
'NoModifier',
'NoTicks',
'NoTicks3',
'NoZero',
'NoZeroFormat',
'None',
'OR',
'OmitFormat',
'OmitTick',
'OutTicks',
'Ox',
'Oy',
'Palatino',
'PaletteTicks',
'Pen',
'PenMargin',
'PenMargins',
'Pentype',
'Portrait',
'RadialShade',
'Rainbow',
'Range',
'Relative',
'Right',
'RightTicks',
'Rotate',
'Round',
'SQR',
'Scale',
'ScaleX',
'ScaleY',
'ScaleZ',
'Seascape',
'Shift',
'Sin',
'Slant',
'Spline',
'StickIntervalMarker',
'Straight',
'Symbol',
'Tan',
'TeXify',
'Ticks',
'Ticks3',
'TildeIntervalMarker',
'TimesRoman',
'Top',
'TrueMargin',
'UnFill',
'UpsideDown',
'Wheel',
'X',
'XEquals',
'XOR',
'XY',
'XYEquals',
'XYZero',
'XYgrid',
'XZEquals',
'XZZero',
'XZero',
'XZgrid',
'Y',
'YEquals',
'YXgrid',
'YZ',
'YZEquals',
'YZZero',
'YZero',
'YZgrid',
'Z',
'ZX',
'ZXgrid',
'ZYgrid',
'ZapfChancery',
'ZapfDingbats',
'_cputime',
'_draw',
'_eval',
'_image',
'_labelpath',
'_projection',
'_strokepath',
'_texpath',
'aCos',
'aSin',
'aTan',
'abort',
'abs',
'accel',
'acos',
'acosh',
'acot',
'acsc',
'add',
'addArrow',
'addMargins',
'addSaveFunction',
'addnode',
'addnodes',
'addpenarc',
'addpenline',
'addseg',
'adjust',
'alias',
'align',
'all',
'altitude',
'angabscissa',
'angle',
'angpoint',
'animate',
'annotate',
'anticomplementary',
'antipedal',
'apply',
'approximate',
'arc',
'arcarrowsize',
'arccircle',
'arcdir',
'arcfromcenter',
'arcfromfocus',
'arclength',
'arcnodesnumber',
'arcpoint',
'arcsubtended',
'arcsubtendedcenter',
'arctime',
'arctopath',
'array',
'arrow',
'arrow2',
'arrowbase',
'arrowbasepoints',
'arrowsize',
'asec',
'asin',
'asinh',
'ask',
'assert',
'asy',
'asycode',
'asydir',
'asyfigure',
'asyfilecode',
'asyinclude',
'asywrite',
'atan',
'atan2',
'atanh',
'atbreakpoint',
'atexit',
'atime',
'attach',
'attract',
'atupdate',
'autoformat',
'autoscale',
'autoscale3',
'axes',
'axes3',
'axialshade',
'axis',
'axiscoverage',
'azimuth',
'babel',
'background',
'bangles',
'bar',
'barmarksize',
'barsize',
'basealign',
'baseline',
'bbox',
'beep',
'begin',
'beginclip',
'begingroup',
'beginpoint',
'between',
'bevel',
'bezier',
'bezierP',
'bezierPP',
'bezierPPP',
'bezulate',
'bibliography',
'bibliographystyle',
'binarytree',
'binarytreeNode',
'binomial',
'binput',
'bins',
'bisector',
'bisectorpoint',
'blend',
'boutput',
'box',
'bqe',
'breakpoint',
'breakpoints',
'brick',
'buildRestoreDefaults',
'buildRestoreThunk',
'buildcycle',
'bulletcolor',
'canonical',
'canonicalcartesiansystem',
'cartesiansystem',
'case1',
'case2',
'case3',
'cbrt',
'cd',
'ceil',
'center',
'centerToFocus',
'centroid',
'cevian',
'change2',
'changecoordsys',
'checkSegment',
'checkconditionlength',
'checker',
'checklengths',
'checkposition',
'checktriangle',
'choose',
'circle',
'circlebarframe',
'circlemarkradius',
'circlenodesnumber',
'circumcenter',
'circumcircle',
'clamped',
'clear',
'clip',
'clipdraw',
'close',
'cmyk',
'code',
'colatitude',
'collect',
'collinear',
'color',
'colorless',
'colors',
'colorspace',
'comma',
'compassmark',
'complement',
'complementary',
'concat',
'concurrent',
'cone',
'conic',
'conicnodesnumber',
'conictype',
'conj',
'connect',
'containmentTree',
'contains',
'contour',
'contour3',
'controlSpecifier',
'convert',
'coordinates',
'coordsys',
'copy',
'cos',
'cosh',
'cot',
'countIntersections',
'cputime',
'crop',
'cropcode',
'cross',
'crossframe',
'crosshatch',
'crossmarksize',
'csc',
'cubicroots',
'curabscissa',
'curlSpecifier',
'curpoint',
'currentarrow',
'currentexitfunction',
'currentmomarrow',
'currentpolarconicroutine',
'curve',
'cut',
'cutafter',
'cutbefore',
'cyclic',
'cylinder',
'debugger',
'deconstruct',
'defaultdir',
'defaultformat',
'defaultpen',
'defined',
'degenerate',
'degrees',
'delete',
'deletepreamble',
'determinant',
'diagonal',
'diamond',
'diffdiv',
'dir',
'dirSpecifier',
'dirtime',
'display',
'distance',
'divisors',
'do_overpaint',
'dot',
'dotframe',
'dotsize',
'downcase',
'draw',
'drawAll',
'drawDoubleLine',
'drawFermion',
'drawGhost',
'drawGluon',
'drawMomArrow',
'drawPhoton',
'drawScalar',
'drawVertex',
'drawVertexBox',
'drawVertexBoxO',
'drawVertexBoxX',
'drawVertexO',
'drawVertexOX',
'drawVertexTriangle',
'drawVertexTriangleO',
'drawVertexX',
'drawarrow',
'drawarrow2',
'drawline',
'drawtick',
'duplicate',
'elle',
'ellipse',
'ellipsenodesnumber',
'embed',
'embed3',
'empty',
'enclose',
'end',
'endScript',
'endclip',
'endgroup',
'endl',
'endpoint',
'endpoints',
'eof',
'eol',
'equation',
'equations',
'erase',
'erasestep',
'erf',
'erfc',
'error',
'errorbar',
'errorbars',
'eval',
'excenter',
'excircle',
'exit',
'exitXasyMode',
'exitfunction',
'exp',
'expfactors',
'expi',
'expm1',
'exradius',
'extend',
'extension',
'extouch',
'fabs',
'factorial',
'fermat',
'fft',
'fhorner',
'figure',
'file',
'filecode',
'fill',
'filldraw',
'filloutside',
'fillrule',
'filltype',
'find',
'finite',
'finiteDifferenceJacobian',
'firstcut',
'firstframe',
'fit',
'fit2',
'fixedscaling',
'floor',
'flush',
'fmdefaults',
'fmod',
'focusToCenter',
'font',
'fontcommand',
'fontsize',
'foot',
'format',
'frac',
'frequency',
'fromCenter',
'fromFocus',
'fspline',
'functionshade',
'gamma',
'generate_random_backtrace',
'generateticks',
'gergonne',
'getc',
'getint',
'getpair',
'getreal',
'getstring',
'gettriple',
'gluon',
'gouraudshade',
'graph',
'graphic',
'gray',
'grestore',
'grid',
'grid3',
'gsave',
'halfbox',
'hatch',
'hdiffdiv',
'hermite',
'hex',
'histogram',
'history',
'hline',
'hprojection',
'hsv',
'hyperbola',
'hyperbolanodesnumber',
'hyperlink',
'hypot',
'identity',
'image',
'incenter',
'incentral',
'incircle',
'increasing',
'incrementposition',
'indexedTransform',
'indexedfigure',
'initXasyMode',
'initdefaults',
'input',
'inradius',
'insert',
'inside',
'integrate',
'interactive',
'interior',
'interp',
'interpolate',
'intersect',
'intersection',
'intersectionpoint',
'intersectionpoints',
'intersections',
'intouch',
'inverse',
'inversion',
'invisible',
'is3D',
'isDuplicate',
'isogonal',
'isogonalconjugate',
'isotomic',
'isotomicconjugate',
'isparabola',
'italic',
'item',
'key',
'kurtosis',
'kurtosisexcess',
'label',
'labelaxis',
'labelmargin',
'labelpath',
'labels',
'labeltick',
'labelx',
'labelx3',
'labely',
'labely3',
'labelz',
'labelz3',
'lastcut',
'latex',
'latitude',
'latticeshade',
'layer',
'layout',
'ldexp',
'leastsquares',
'legend',
'legenditem',
'length',
'lift',
'light',
'limits',
'line',
'linear',
'linecap',
'lineinversion',
'linejoin',
'linemargin',
'lineskip',
'linetype',
'linewidth',
'link',
'list',
'lm_enorm',
'lm_evaluate_default',
'lm_lmdif',
'lm_lmpar',
'lm_minimize',
'lm_print_default',
'lm_print_quiet',
'lm_qrfac',
'lm_qrsolv',
'locale',
'locate',
'locatefile',
'location',
'log',
'log10',
'log1p',
'logaxiscoverage',
'longitude',
'lookup',
'magnetize',
'makeNode',
'makedraw',
'makepen',
'map',
'margin',
'markangle',
'markangleradius',
'markanglespace',
'markarc',
'marker',
'markinterval',
'marknodes',
'markrightangle',
'markuniform',
'mass',
'masscenter',
'massformat',
'math',
'max',
'max3',
'maxbezier',
'maxbound',
'maxcoords',
'maxlength',
'maxratio',
'maxtimes',
'mean',
'medial',
'median',
'midpoint',
'min',
'min3',
'minbezier',
'minbound',
'minipage',
'minratio',
'mintimes',
'miterlimit',
'momArrowPath',
'momarrowsize',
'monotonic',
'multifigure',
'nativeformat',
'natural',
'needshipout',
'newl',
'newpage',
'newslide',
'newton',
'newtree',
'nextframe',
'nextnormal',
'nextpage',
'nib',
'nodabscissa',
'none',
'norm',
'normalvideo',
'notaknot',
'nowarn',
'numberpage',
'nurb',
'object',
'offset',
'onpath',
'opacity',
'opposite',
'orientation',
'orig_circlenodesnumber',
'orig_circlenodesnumber1',
'orig_draw',
'orig_ellipsenodesnumber',
'orig_ellipsenodesnumber1',
'orig_hyperbolanodesnumber',
'orig_parabolanodesnumber',
'origin',
'orthic',
'orthocentercenter',
'outformat',
'outline',
'outprefix',
'output',
'overloadedMessage',
'overwrite',
'pack',
'pad',
'pairs',
'palette',
'parabola',
'parabolanodesnumber',
'parallel',
'partialsum',
'path',
'path3',
'pattern',
'pause',
'pdf',
'pedal',
'periodic',
'perp',
'perpendicular',
'perpendicularmark',
'phantom',
'phi1',
'phi2',
'phi3',
'photon',
'piecewisestraight',
'point',
'polar',
'polarconicroutine',
'polargraph',
'polygon',
'postcontrol',
'postscript',
'pow10',
'ppoint',
'prc',
'prc0',
'precision',
'precontrol',
'prepend',
'print_random_addresses',
'project',
'projection',
'purge',
'pwhermite',
'quadrant',
'quadraticroots',
'quantize',
'quarticroots',
'quotient',
'radialshade',
'radians',
'radicalcenter',
'radicalline',
'radius',
'rand',
'randompath',
'rd',
'readline',
'realmult',
'realquarticroots',
'rectangle',
'rectangular',
'rectify',
'reflect',
'relabscissa',
'relative',
'relativedistance',
'reldir',
'relpoint',
'reltime',
'remainder',
'remark',
'removeDuplicates',
'rename',
'replace',
'report',
'resetdefaultpen',
'restore',
'restoredefaults',
'reverse',
'reversevideo',
'rf',
'rfind',
'rgb',
'rgba',
'rgbint',
'rms',
'rotate',
'rotateO',
'rotation',
'round',
'roundbox',
'roundedpath',
'roundrectangle',
'samecoordsys',
'sameside',
'sample',
'save',
'savedefaults',
'saveline',
'scale',
'scale3',
'scaleO',
'scaleT',
'scaleless',
'scientific',
'search',
'searchtree',
'sec',
'secondaryX',
'secondaryY',
'seconds',
'section',
'sector',
'seek',
'seekeof',
'segment',
'sequence',
'setpens',
'sgn',
'sgnd',
'sharpangle',
'sharpdegrees',
'shift',
'shiftless',
'shipout',
'shipout3',
'show',
'side',
'simeq',
'simpson',
'sin',
'single',
'sinh',
'size',
'size3',
'skewness',
'skip',
'slant',
'sleep',
'slope',
'slopefield',
'solve',
'solveBVP',
'sort',
'sourceline',
'sphere',
'split',
'sqrt',
'square',
'srand',
'standardizecoordsys',
'startScript',
'startTrembling',
'stdev',
'step',
'stickframe',
'stickmarksize',
'stickmarkspace',
'stop',
'straight',
'straightness',
'string',
'stripdirectory',
'stripextension',
'stripfile',
'strokepath',
'subdivide',
'subitem',
'subpath',
'substr',
'sum',
'surface',
'symmedial',
'symmedian',
'system',
'tab',
'tableau',
'tan',
'tangent',
'tangential',
'tangents',
'tanh',
'tell',
'tensionSpecifier',
'tensorshade',
'tex',
'texcolor',
'texify',
'texpath',
'texpreamble',
'texreset',
'texshipout',
'texsize',
'textpath',
'thick',
'thin',
'tick',
'tickMax',
'tickMax3',
'tickMin',
'tickMin3',
'ticklabelshift',
'ticklocate',
'tildeframe',
'tildemarksize',
'tile',
'tiling',
'time',
'times',
'title',
'titlepage',
'topbox',
'transform',
'transformation',
'transpose',
'tremble',
'trembleFuzz',
'tremble_circlenodesnumber',
'tremble_circlenodesnumber1',
'tremble_draw',
'tremble_ellipsenodesnumber',
'tremble_ellipsenodesnumber1',
'tremble_hyperbolanodesnumber',
'tremble_marknodes',
'tremble_markuniform',
'tremble_parabolanodesnumber',
'triangle',
'triangleAbc',
'triangleabc',
'triangulate',
'tricoef',
'tridiagonal',
'trilinear',
'trim',
'trueMagnetize',
'truepoint',
'tube',
'uncycle',
'unfill',
'uniform',
'unit',
'unitrand',
'unitsize',
'unityroot',
'unstraighten',
'upcase',
'updatefunction',
'uperiodic',
'upscale',
'uptodate',
'usepackage',
'usersetting',
'usetypescript',
'usleep',
'value',
'variance',
'variancebiased',
'vbox',
'vector',
'vectorfield',
'verbatim',
'view',
'vline',
'vperiodic',
'vprojection',
'warn',
'warning',
'windingnumber',
'write',
'xaxis',
'xaxis3',
'xaxis3At',
'xaxisAt',
'xequals',
'xinput',
'xlimits',
'xoutput',
'xpart',
'xscale',
'xscaleO',
'xtick',
'xtick3',
'xtrans',
'yaxis',
'yaxis3',
'yaxis3At',
'yaxisAt',
'yequals',
'ylimits',
'ypart',
'yscale',
'yscaleO',
'ytick',
'ytick3',
'ytrans',
'zaxis3',
'zaxis3At',
'zero',
'zero3',
'zlimits',
'zpart',
'ztick',
'ztick3',
'ztrans'
])
ASYVARNAME = set([
'AliceBlue',
'Align',
'Allow',
'AntiqueWhite',
'Apricot',
'Aqua',
'Aquamarine',
'Aspect',
'Azure',
'BeginPoint',
'Beige',
'Bisque',
'Bittersweet',
'Black',
'BlanchedAlmond',
'Blue',
'BlueGreen',
'BlueViolet',
'Both',
'Break',
'BrickRed',
'Brown',
'BurlyWood',
'BurntOrange',
'CCW',
'CW',
'CadetBlue',
'CarnationPink',
'Center',
'Centered',
'Cerulean',
'Chartreuse',
'Chocolate',
'Coeff',
'Coral',
'CornflowerBlue',
'Cornsilk',
'Crimson',
'Crop',
'Cyan',
'Dandelion',
'DarkBlue',
'DarkCyan',
'DarkGoldenrod',
'DarkGray',
'DarkGreen',
'DarkKhaki',
'DarkMagenta',
'DarkOliveGreen',
'DarkOrange',
'DarkOrchid',
'DarkRed',
'DarkSalmon',
'DarkSeaGreen',
'DarkSlateBlue',
'DarkSlateGray',
'DarkTurquoise',
'DarkViolet',
'DeepPink',
'DeepSkyBlue',
'DefaultHead',
'DimGray',
'DodgerBlue',
'Dotted',
'Draw',
'E',
'ENE',
'EPS',
'ESE',
'E_Euler',
'E_PC',
'E_RK2',
'E_RK3BS',
'Emerald',
'EndPoint',
'Euler',
'Fill',
'FillDraw',
'FireBrick',
'FloralWhite',
'ForestGreen',
'Fuchsia',
'Gainsboro',
'GhostWhite',
'Gold',
'Goldenrod',
'Gray',
'Green',
'GreenYellow',
'Honeydew',
'HookHead',
'Horizontal',
'HotPink',
'I',
'IgnoreAspect',
'IndianRed',
'Indigo',
'Ivory',
'JOIN_IN',
'JOIN_OUT',
'JungleGreen',
'Khaki',
'LM_DWARF',
'LM_MACHEP',
'LM_SQRT_DWARF',
'LM_SQRT_GIANT',
'LM_USERTOL',
'Label',
'Lavender',
'LavenderBlush',
'LawnGreen',
'LeftJustified',
'LeftSide',
'LemonChiffon',
'LightBlue',
'LightCoral',
'LightCyan',
'LightGoldenrodYellow',
'LightGreen',
'LightGrey',
'LightPink',
'LightSalmon',
'LightSeaGreen',
'LightSkyBlue',
'LightSlateGray',
'LightSteelBlue',
'LightYellow',
'Lime',
'LimeGreen',
'Linear',
'Linen',
'Log',
'Logarithmic',
'Magenta',
'Mahogany',
'Mark',
'MarkFill',
'Maroon',
'Max',
'MediumAquamarine',
'MediumBlue',
'MediumOrchid',
'MediumPurple',
'MediumSeaGreen',
'MediumSlateBlue',
'MediumSpringGreen',
'MediumTurquoise',
'MediumVioletRed',
'Melon',
'MidPoint',
'MidnightBlue',
'Min',
'MintCream',
'MistyRose',
'Moccasin',
'Move',
'MoveQuiet',
'Mulberry',
'N',
'NE',
'NNE',
'NNW',
'NW',
'NavajoWhite',
'Navy',
'NavyBlue',
'NoAlign',
'NoCrop',
'NoFill',
'NoSide',
'OldLace',
'Olive',
'OliveDrab',
'OliveGreen',
'Orange',
'OrangeRed',
'Orchid',
'Ox',
'Oy',
'PC',
'PaleGoldenrod',
'PaleGreen',
'PaleTurquoise',
'PaleVioletRed',
'PapayaWhip',
'Peach',
'PeachPuff',
'Periwinkle',
'Peru',
'PineGreen',
'Pink',
'Plum',
'PowderBlue',
'ProcessBlue',
'Purple',
'RK2',
'RK3',
'RK3BS',
'RK4',
'RK5',
'RK5DP',
'RK5F',
'RawSienna',
'Red',
'RedOrange',
'RedViolet',
'Rhodamine',
'RightJustified',
'RightSide',
'RosyBrown',
'RoyalBlue',
'RoyalPurple',
'RubineRed',
'S',
'SE',
'SSE',
'SSW',
'SW',
'SaddleBrown',
'Salmon',
'SandyBrown',
'SeaGreen',
'Seashell',
'Sepia',
'Sienna',
'Silver',
'SimpleHead',
'SkyBlue',
'SlateBlue',
'SlateGray',
'Snow',
'SpringGreen',
'SteelBlue',
'Suppress',
'SuppressQuiet',
'Tan',
'TeXHead',
'Teal',
'TealBlue',
'Thistle',
'Ticksize',
'Tomato',
'Turquoise',
'UnFill',
'VERSION',
'Value',
'Vertical',
'Violet',
'VioletRed',
'W',
'WNW',
'WSW',
'Wheat',
'White',
'WhiteSmoke',
'WildStrawberry',
'XYAlign',
'YAlign',
'Yellow',
'YellowGreen',
'YellowOrange',
'addpenarc',
'addpenline',
'align',
'allowstepping',
'angularsystem',
'animationdelay',
'appendsuffix',
'arcarrowangle',
'arcarrowfactor',
'arrow2sizelimit',
'arrowangle',
'arrowbarb',
'arrowdir',
'arrowfactor',
'arrowhookfactor',
'arrowlength',
'arrowsizelimit',
'arrowtexfactor',
'authorpen',
'axis',
'axiscoverage',
'axislabelfactor',
'background',
'backgroundcolor',
'backgroundpen',
'barfactor',
'barmarksizefactor',
'basealign',
'baselinetemplate',
'beveljoin',
'bigvertexpen',
'bigvertexsize',
'black',
'blue',
'bm',
'bottom',
'bp',
'brown',
'bullet',
'byfoci',
'byvertices',
'camerafactor',
'chartreuse',
'circlemarkradiusfactor',
'circlenodesnumberfactor',
'circleprecision',
'circlescale',
'cm',
'codefile',
'codepen',
'codeskip',
'colorPen',
'coloredNodes',
'coloredSegments',
'conditionlength',
'conicnodesfactor',
'count',
'cputimeformat',
'crossmarksizefactor',
'currentcoordsys',
'currentlight',
'currentpatterns',
'currentpen',
'currentpicture',
'currentposition',
'currentprojection',
'curvilinearsystem',
'cuttings',
'cyan',
'darkblue',
'darkbrown',
'darkcyan',
'darkgray',
'darkgreen',
'darkgrey',
'darkmagenta',
'darkolive',
'darkred',
'dashdotted',
'dashed',
'datepen',
'dateskip',
'debuggerlines',
'debugging',
'deepblue',
'deepcyan',
'deepgray',
'deepgreen',
'deepgrey',
'deepmagenta',
'deepred',
'default',
'defaultControl',
'defaultS',
'defaultbackpen',
'defaultcoordsys',
'defaultfilename',
'defaultformat',
'defaultmassformat',
'defaultpen',
'diagnostics',
'differentlengths',
'dot',
'dotfactor',
'dotframe',
'dotted',
'doublelinepen',
'doublelinespacing',
'down',
'duplicateFuzz',
'ellipsenodesnumberfactor',
'eps',
'epsgeo',
'epsilon',
'evenodd',
'extendcap',
'fermionpen',
'figureborder',
'figuremattpen',
'firstnode',
'firststep',
'foregroundcolor',
'fuchsia',
'fuzz',
'gapfactor',
'ghostpen',
'gluonamplitude',
'gluonpen',
'gluonratio',
'gray',
'green',
'grey',
'hatchepsilon',
'havepagenumber',
'heavyblue',
'heavycyan',
'heavygray',
'heavygreen',
'heavygrey',
'heavymagenta',
'heavyred',
'hline',
'hwratio',
'hyperbolanodesnumberfactor',
'identity4',
'ignore',
'inXasyMode',
'inch',
'inches',
'includegraphicscommand',
'inf',
'infinity',
'institutionpen',
'intMax',
'intMin',
'invert',
'invisible',
'itempen',
'itemskip',
'itemstep',
'labelmargin',
'landscape',
'lastnode',
'left',
'legendhskip',
'legendlinelength',
'legendmargin',
'legendmarkersize',
'legendmaxrelativewidth',
'legendvskip',
'lightblue',
'lightcyan',
'lightgray',
'lightgreen',
'lightgrey',
'lightmagenta',
'lightolive',
'lightred',
'lightyellow',
'linemargin',
'lm_infmsg',
'lm_shortmsg',
'longdashdotted',
'longdashed',
'magenta',
'magneticPoints',
'magneticRadius',
'mantissaBits',
'markangleradius',
'markangleradiusfactor',
'markanglespace',
'markanglespacefactor',
'mediumblue',
'mediumcyan',
'mediumgray',
'mediumgreen',
'mediumgrey',
'mediummagenta',
'mediumred',
'mediumyellow',
'middle',
'minDistDefault',
'minblockheight',
'minblockwidth',
'mincirclediameter',
'minipagemargin',
'minipagewidth',
'minvertexangle',
'miterjoin',
'mm',
'momarrowfactor',
'momarrowlength',
'momarrowmargin',
'momarrowoffset',
'momarrowpen',
'monoPen',
'morepoints',
'nCircle',
'newbulletcolor',
'ngraph',
'nil',
'nmesh',
'nobasealign',
'nodeMarginDefault',
'nodesystem',
'nomarker',
'nopoint',
'noprimary',
'nullpath',
'nullpen',
'numarray',
'ocgindex',
'oldbulletcolor',
'olive',
'orange',
'origin',
'overpaint',
'page',
'pageheight',
'pagemargin',
'pagenumberalign',
'pagenumberpen',
'pagenumberposition',
'pagewidth',
'paleblue',
'palecyan',
'palegray',
'palegreen',
'palegrey',
'palemagenta',
'palered',
'paleyellow',
'parabolanodesnumberfactor',
'perpfactor',
'phi',
'photonamplitude',
'photonpen',
'photonratio',
'pi',
'pink',
'plain',
'plus',
'preamblenodes',
'pt',
'purple',
'r3',
'r4a',
'r4b',
'randMax',
'realDigits',
'realEpsilon',
'realMax',
'realMin',
'red',
'relativesystem',
'reverse',
'right',
'roundcap',
'roundjoin',
'royalblue',
'salmon',
'saveFunctions',
'scalarpen',
'sequencereal',
'settings',
'shipped',
'signedtrailingzero',
'solid',
'springgreen',
'sqrtEpsilon',
'squarecap',
'squarepen',
'startposition',
'stdin',
'stdout',
'stepfactor',
'stepfraction',
'steppagenumberpen',
'stepping',
'stickframe',
'stickmarksizefactor',
'stickmarkspacefactor',
'textpen',
'ticksize',
'tildeframe',
'tildemarksizefactor',
'tinv',
'titlealign',
'titlepagepen',
'titlepageposition',
'titlepen',
'titleskip',
'top',
'trailingzero',
'treeLevelStep',
'treeMinNodeWidth',
'treeNodeStep',
'trembleAngle',
'trembleFrequency',
'trembleRandom',
'tremblingMode',
'undefined',
'unitcircle',
'unitsquare',
'up',
'urlpen',
'urlskip',
'version',
'vertexpen',
'vertexsize',
'viewportmargin',
'viewportsize',
'vline',
'white',
'wye',
'xformStack',
'yellow',
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
])
| mit | -7,109,296,885,919,176,000 | 15.607295 | 77 | 0.515722 | false |
jumpstarter-io/horizon | openstack_dashboard/dashboards/project/data_processing/jobs/tests.py | 11 | 1751 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.jobs:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.jobs:details', args=['id'])
class DataProcessingJobTests(test.TestCase):
@test.create_stubs({api.sahara: ('job_list',)})
def test_index(self):
api.sahara.job_list(IsA(http.HttpRequest)) \
.AndReturn(self.jobs.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res,
'project/data_processing.jobs/jobs.html')
self.assertContains(res, 'Jobs')
self.assertContains(res, 'Name')
@test.create_stubs({api.sahara: ('job_get',)})
def test_details(self):
api.sahara.job_get(IsA(http.HttpRequest), IsA(unicode)) \
.AndReturn(self.jobs.list()[0])
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res,
'project/data_processing.jobs/details.html')
self.assertContains(res, 'pigjob')
| apache-2.0 | -7,760,852,926,997,335,000 | 36.255319 | 75 | 0.695031 | false |
Weicong-Lin/pymo-global | android/pgs4a-0.9.6/python-install/lib/python2.7/encodings/cp858.py | 416 | 34271 | """ Python Character Mapping Codec for CP858, modified from cp850.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp858',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x00f0, # LATIN SMALL LETTER ETH
0x00d1: 0x00d0, # LATIN CAPITAL LETTER ETH
0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x00d5: 0x20ac, # EURO SIGN
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x00fe, # LATIN SMALL LETTER THORN
0x00e8: 0x00de, # LATIN CAPITAL LETTER THORN
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2017, # DOUBLE LOW LINE
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
u'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
u'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd7' # 0x009e -> MULTIPLICATION SIGN
u'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\xae' # 0x00a9 -> REGISTERED SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc0' # 0x00b7 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xa9' # 0x00b8 -> COPYRIGHT SIGN
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\xa2' # 0x00bd -> CENT SIGN
u'\xa5' # 0x00be -> YEN SIGN
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\xe3' # 0x00c6 -> LATIN SMALL LETTER A WITH TILDE
u'\xc3' # 0x00c7 -> LATIN CAPITAL LETTER A WITH TILDE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\xf0' # 0x00d0 -> LATIN SMALL LETTER ETH
u'\xd0' # 0x00d1 -> LATIN CAPITAL LETTER ETH
u'\xca' # 0x00d2 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x00d4 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\u20ac' # 0x00d5 -> EURO SIGN
u'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x00d8 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\xa6' # 0x00dd -> BROKEN BAR
u'\xcc' # 0x00de -> LATIN CAPITAL LETTER I WITH GRAVE
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd2' # 0x00e3 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
u'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\xfe' # 0x00e7 -> LATIN SMALL LETTER THORN
u'\xde' # 0x00e8 -> LATIN CAPITAL LETTER THORN
u'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0x00ea -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0x00eb -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
u'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xaf' # 0x00ee -> MACRON
u'\xb4' # 0x00ef -> ACUTE ACCENT
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2017' # 0x00f2 -> DOUBLE LOW LINE
u'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0x00f4 -> PILCROW SIGN
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\xb8' # 0x00f7 -> CEDILLA
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\xa8' # 0x00f9 -> DIAERESIS
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\xb9' # 0x00fb -> SUPERSCRIPT ONE
u'\xb3' # 0x00fc -> SUPERSCRIPT THREE
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x00bd, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a5: 0x00be, # YEN SIGN
0x00a6: 0x00dd, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00a9: 0x00b8, # COPYRIGHT SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00af: 0x00ee, # MACRON
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b8: 0x00f7, # CEDILLA
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x00b7, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x00c7, # LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x00d4, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x00d2, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x00de, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x00d8, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d0: 0x00d1, # LATIN CAPITAL LETTER ETH
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00e3, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x00eb, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x00ea, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00de: 0x00e8, # LATIN CAPITAL LETTER THORN
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x00c6, # LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f0: 0x00d0, # LATIN SMALL LETTER ETH
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x00fe: 0x00e7, # LATIN SMALL LETTER THORN
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x20ac: 0x00d5, # EURO SIGN
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x2017: 0x00f2, # DOUBLE LOW LINE
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| mit | -644,729,594,935,440,800 | 48.098854 | 73 | 0.60077 | false |
rombie/contrail-controller | src/dns/scripts/del_virtual_dns.py | 22 | 2658 | #!/usr/bin/python
#
#Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import argparse
import ConfigParser
from provision_dns import DnsProvisioner
from requests.exceptions import ConnectionError
class DelVirtualDns(object):
def __init__(self, args_str = None):
self._args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
try:
dp_obj = DnsProvisioner(self._args.admin_user, self._args.admin_password,
self._args.admin_tenant_name,
self._args.api_server_ip, self._args.api_server_port)
except ConnectionError:
print 'Connection to API server failed '
return
dp_obj.del_virtual_dns(self._args.fq_name)
#end __init__
def _parse_args(self, args_str):
'''
Eg. python del_virtual_dns.py --fq_name default-domain:vdns1
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help = False)
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {
'api_server_ip' : '127.0.0.1',
'api_server_port' : '8082',
'admin_user': None,
'admin_password': None,
'admin_tenant_name': None
}
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument("--fq_name", help = "Fully qualified Virtual DNS Name")
parser.add_argument("--api_server_ip", help = "IP address of api server")
parser.add_argument("--api_server_port", help = "Port of api server")
parser.add_argument("--admin_user", help = "Name of keystone admin user")
parser.add_argument("--admin_password", help = "Password of keystone admin user")
parser.add_argument("--admin_tenant_name", help = "Tenamt name for keystone admin user")
self._args = parser.parse_args(remaining_argv)
#end _parse_args
# end class DelVirtualDns
def main(args_str = None):
DelVirtualDns(args_str)
#end main
if __name__ == "__main__":
main()
| apache-2.0 | -2,760,698,951,506,925,600 | 32.64557 | 96 | 0.59067 | false |
daenamkim/ansible | test/units/module_utils/network/nso/test_nso.py | 2 | 6821 | # Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
import json
from ansible.compat.tests.mock import patch
from ansible.compat.tests import unittest
from ansible.module_utils.network.nso import nso
MODULE_PREFIX_MAP = '''
{
"ansible-nso": "an",
"tailf-ncs": "ncs"
}
'''
SCHEMA_DATA = {
'/an:id-name-leaf': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-leaf"
},
"data": {
"kind": "leaf",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
},
"name": "id-name-leaf",
"qname": "an:id-name-leaf"
}
}''',
'/an:id-name-values': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {},
"keypath": "/an:id-name-values"
},
"data": {
"kind": "container",
"name": "id-name-values",
"qname": "an:id-name-values",
"children": [
{
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
]
}
]
}
}
''',
'/an:id-name-values/id-name-value': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-values/id-name-value"
},
"data": {
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
],
"children": [
{
"kind": "key",
"name": "name",
"qname": "an:name",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
}
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "value",
"qname": "an:value"
}
]
}
}
'''
}
class MockResponse(object):
def __init__(self, method, params, code, body, headers=None):
if headers is None:
headers = {}
self.method = method
self.params = params
self.code = code
self.body = body
self.headers = dict(headers)
def read(self):
return self.body
def mock_call(calls, url, data=None, headers=None, method=None):
result = calls[0]
del calls[0]
request = json.loads(data)
if result.method != request['method']:
raise ValueError('expected method {0}({1}), got {2}({3})'.format(
result.method, result.params,
request['method'], request['params']))
for key, value in result.params.items():
if key not in request['params']:
raise ValueError('{0} not in parameters'.format(key))
if value != request['params'][key]:
raise ValueError('expected {0} to be {1}, got {2}'.format(
key, value, request['params'][key]))
return result
def get_schema_response(path):
return MockResponse(
'get_schema', {'path': path}, 200, '{{"result": {0}}}'.format(
SCHEMA_DATA[path]))
class TestValueBuilder(unittest.TestCase):
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_identityref_leaf(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-leaf'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP))
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-leaf"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-leaf'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc'))
vb.build(parent, None, 'ansible-nso:id-two', schema)
self.assertEquals(1, len(vb.values))
value = vb.values[0]
self.assertEquals(parent, value.path)
self.assertEquals('set', value.state)
self.assertEquals('an:id-two', value.value)
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_identityref_key(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-values/id-name-value'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP)),
MockResponse('exists', {'path': '/an:id-name-values/id-name-value{an:id-one}'}, 200, '{"result": {"exists": true}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-values"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-values/id-name-value'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc'))
vb.build(parent, 'id-name-value', [{'name': 'ansible-nso:id-one', 'value': '1'}], schema)
self.assertEquals(1, len(vb.values))
value = vb.values[0]
self.assertEquals('{0}/id-name-value{{an:id-one}}/value'.format(parent), value.path)
self.assertEquals('set', value.state)
self.assertEquals('1', value.value)
self.assertEqual(0, len(calls))
| gpl-3.0 | -6,294,200,098,730,728,000 | 26.840816 | 129 | 0.539217 | false |
darkleons/BE | addons/mrp/stock.py | 24 | 18483 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, float_compare
class StockMove(osv.osv):
_inherit = 'stock.move'
_columns = {
'production_id': fields.many2one('mrp.production', 'Production Order for Produced Products', select=True, copy=False),
'raw_material_production_id': fields.many2one('mrp.production', 'Production Order for Raw Materials', select=True),
'consumed_for': fields.many2one('stock.move', 'Consumed for', help='Technical field used to make the traceability of produced products'),
}
def check_tracking(self, cr, uid, move, lot_id, context=None):
super(StockMove, self).check_tracking(cr, uid, move, lot_id, context=context)
if move.product_id.track_production and (move.location_id.usage == 'production' or move.location_dest_id.usage == 'production') and not lot_id:
raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (move.product_id.name))
if move.raw_material_production_id and move.location_dest_id.usage == 'production' and move.raw_material_production_id.product_id.track_production and not move.consumed_for:
raise osv.except_osv(_('Warning!'), _("Because the product %s requires it, you must assign a serial number to your raw material %s to proceed further in your production. Please use the 'Produce' button to do so.") % (move.raw_material_production_id.product_id.name, move.product_id.name))
def _check_phantom_bom(self, cr, uid, move, context=None):
"""check if product associated to move has a phantom bom
return list of ids of mrp.bom for that product """
user_company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
#doing the search as SUPERUSER because a user with the permission to write on a stock move should be able to explode it
#without giving him the right to read the boms.
domain = [
'|', ('product_id', '=', move.product_id.id),
'&', ('product_id', '=', False), ('product_tmpl_id.product_variant_ids', '=', move.product_id.id),
('type', '=', 'phantom'),
'|', ('date_start', '=', False), ('date_start', '<=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
'|', ('date_stop', '=', False), ('date_stop', '>=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
('company_id', '=', user_company)]
return self.pool.get('mrp.bom').search(cr, SUPERUSER_ID, domain, context=context)
def _action_explode(self, cr, uid, move, context=None):
""" Explodes pickings.
@param move: Stock moves
@return: True
"""
bom_obj = self.pool.get('mrp.bom')
move_obj = self.pool.get('stock.move')
prod_obj = self.pool.get("product.product")
proc_obj = self.pool.get("procurement.order")
uom_obj = self.pool.get("product.uom")
to_explode_again_ids = []
processed_ids = []
bis = self._check_phantom_bom(cr, uid, move, context=context)
if bis:
bom_point = bom_obj.browse(cr, SUPERUSER_ID, bis[0], context=context)
factor = uom_obj._compute_qty(cr, SUPERUSER_ID, move.product_uom.id, move.product_uom_qty, bom_point.product_uom.id) / bom_point.product_qty
res = bom_obj._bom_explode(cr, SUPERUSER_ID, bom_point, move.product_id, factor, [], context=context)
state = 'confirmed'
if move.state == 'assigned':
state = 'assigned'
for line in res[0]:
product = prod_obj.browse(cr, uid, line['product_id'], context=context)
if product.type != 'service':
valdef = {
'picking_id': move.picking_id.id if move.picking_id else False,
'product_id': line['product_id'],
'product_uom': line['product_uom'],
'product_uom_qty': line['product_qty'],
'product_uos': line['product_uos'],
'product_uos_qty': line['product_uos_qty'],
'state': state,
'name': line['name'],
'procurement_id': move.procurement_id.id,
'split_from': move.id, #Needed in order to keep sale connection, but will be removed by unlink
}
mid = move_obj.copy(cr, uid, move.id, default=valdef, context=context)
to_explode_again_ids.append(mid)
else:
if prod_obj.need_procurement(cr, uid, [product.id], context=context):
valdef = {
'name': move.rule_id and move.rule_id.name or "/",
'origin': move.origin,
'company_id': move.company_id and move.company_id.id or False,
'date_planned': move.date,
'product_id': line['product_id'],
'product_qty': line['product_qty'],
'product_uom': line['product_uom'],
'product_uos_qty': line['product_uos_qty'],
'product_uos': line['product_uos'],
'group_id': move.group_id.id,
'priority': move.priority,
'partner_dest_id': move.partner_id.id,
}
if move.procurement_id:
proc = proc_obj.copy(cr, uid, move.procurement_id.id, default=valdef, context=context)
else:
proc = proc_obj.create(cr, uid, valdef, context=context)
proc_obj.run(cr, uid, [proc], context=context) #could be omitted
#check if new moves needs to be exploded
if to_explode_again_ids:
for new_move in self.browse(cr, uid, to_explode_again_ids, context=context):
processed_ids.extend(self._action_explode(cr, uid, new_move, context=context))
if not move.split_from and move.procurement_id:
# Check if procurements have been made to wait for
moves = move.procurement_id.move_ids
if len(moves) == 1:
proc_obj.write(cr, uid, [move.procurement_id.id], {'state': 'done'}, context=context)
#delete the move with original product which is not relevant anymore
move_obj.unlink(cr, SUPERUSER_ID, [move.id], context=context)
#return list of newly created move or the move id otherwise, unless there is no move anymore
return processed_ids or (not bis and [move.id]) or []
def action_confirm(self, cr, uid, ids, context=None):
move_ids = []
for move in self.browse(cr, uid, ids, context=context):
#in order to explode a move, we must have a picking_type_id on that move because otherwise the move
#won't be assigned to a picking and it would be weird to explode a move into several if they aren't
#all grouped in the same picking.
if move.picking_type_id:
move_ids.extend(self._action_explode(cr, uid, move, context=context))
else:
move_ids.append(move.id)
#we go further with the list of ids potentially changed by action_explode
return super(StockMove, self).action_confirm(cr, uid, move_ids, context=context)
def action_consume(self, cr, uid, ids, product_qty, location_id=False, restrict_lot_id=False, restrict_partner_id=False,
consumed_for=False, context=None):
""" Consumed product with specific quantity from specific source location.
@param product_qty: Consumed/produced product quantity (= in quantity of UoM of product)
@param location_id: Source location
@param restrict_lot_id: optionnal parameter that allows to restrict the choice of quants on this specific lot
@param restrict_partner_id: optionnal parameter that allows to restrict the choice of quants to this specific partner
@param consumed_for: optionnal parameter given to this function to make the link between raw material consumed and produced product, for a better traceability
@return: New lines created if not everything was consumed for this line
"""
if context is None:
context = {}
res = []
production_obj = self.pool.get('mrp.production')
if product_qty <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
#because of the action_confirm that can create extra moves in case of phantom bom, we need to make 2 loops
ids2 = []
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'draft':
ids2.extend(self.action_confirm(cr, uid, [move.id], context=context))
else:
ids2.append(move.id)
prod_orders = set()
for move in self.browse(cr, uid, ids2, context=context):
prod_orders.add(move.raw_material_production_id.id or move.production_id.id)
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move_qty - product_qty
# Compare with numbers of move uom as we want to avoid a split with 0 qty
quantity_rest_uom = move.product_uom_qty - self.pool.get("product.uom")._compute_qty_obj(cr, uid, move.product_id.uom_id, product_qty, move.product_uom)
if float_compare(quantity_rest_uom, 0, precision_rounding=move.product_uom.rounding) != 0:
new_mov = self.split(cr, uid, move, quantity_rest, context=context)
res.append(new_mov)
vals = {'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
'consumed_for': consumed_for}
if location_id:
vals.update({'location_id': location_id})
self.write(cr, uid, [move.id], vals, context=context)
# Original moves will be the quantities consumed, so they need to be done
self.action_done(cr, uid, ids2, context=context)
if res:
self.action_assign(cr, uid, res, context=context)
if prod_orders:
production_obj.signal_workflow(cr, uid, list(prod_orders), 'button_produce')
return res
def action_scrap(self, cr, uid, ids, product_qty, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None):
""" Move the scrap/damaged product into scrap location
@param product_qty: Scraped product quantity
@param location_id: Scrap location
@return: Scraped lines
"""
res = []
production_obj = self.pool.get('mrp.production')
for move in self.browse(cr, uid, ids, context=context):
new_moves = super(StockMove, self).action_scrap(cr, uid, [move.id], product_qty, location_id,
restrict_lot_id=restrict_lot_id,
restrict_partner_id=restrict_partner_id, context=context)
#If we are not scrapping our whole move, tracking and lot references must not be removed
production_ids = production_obj.search(cr, uid, [('move_lines', 'in', [move.id])])
for prod_id in production_ids:
production_obj.signal_workflow(cr, uid, [prod_id], 'button_produce')
for new_move in new_moves:
production_obj.write(cr, uid, production_ids, {'move_lines': [(4, new_move)]})
res.append(new_move)
return res
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(StockMove, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') == 'assigned':
moves = self.browse(cr, uid, ids, context=context)
orders = list(set([x.raw_material_production_id.id for x in moves if x.raw_material_production_id and x.raw_material_production_id.state == 'confirmed']))
for order_id in orders:
if self.pool.get('mrp.production').test_ready(cr, uid, [order_id]):
workflow.trg_validate(uid, 'mrp.production', order_id, 'moves_ready', cr)
return res
class stock_warehouse(osv.osv):
_inherit = 'stock.warehouse'
_columns = {
'manufacture_to_resupply': fields.boolean('Manufacture in this Warehouse',
help="When products are manufactured, they can be manufactured in this warehouse."),
'manufacture_pull_id': fields.many2one('procurement.rule', 'Manufacture Rule'),
}
def _get_manufacture_pull_rule(self, cr, uid, warehouse, context=None):
route_obj = self.pool.get('stock.location.route')
data_obj = self.pool.get('ir.model.data')
try:
manufacture_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_manufacture')[1]
except:
manufacture_route_id = route_obj.search(cr, uid, [('name', 'like', _('Manufacture'))], context=context)
manufacture_route_id = manufacture_route_id and manufacture_route_id[0] or False
if not manufacture_route_id:
raise osv.except_osv(_('Error!'), _('Can\'t find any generic Manufacture route.'))
return {
'name': self._format_routename(cr, uid, warehouse, _(' Manufacture'), context=context),
'location_id': warehouse.lot_stock_id.id,
'route_id': manufacture_route_id,
'action': 'manufacture',
'picking_type_id': warehouse.int_type_id.id,
'propagate': False,
'warehouse_id': warehouse.id,
}
def create_routes(self, cr, uid, ids, warehouse, context=None):
pull_obj = self.pool.get('procurement.rule')
res = super(stock_warehouse, self).create_routes(cr, uid, ids, warehouse, context=context)
if warehouse.manufacture_to_resupply:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
res['manufacture_pull_id'] = manufacture_pull_id
return res
def write(self, cr, uid, ids, vals, context=None):
pull_obj = self.pool.get('procurement.rule')
if isinstance(ids, (int, long)):
ids = [ids]
if 'manufacture_to_resupply' in vals:
if vals.get("manufacture_to_resupply"):
for warehouse in self.browse(cr, uid, ids, context=context):
if not warehouse.manufacture_pull_id:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
vals['manufacture_pull_id'] = manufacture_pull_id
else:
for warehouse in self.browse(cr, uid, ids, context=context):
if warehouse.manufacture_pull_id:
pull_obj.unlink(cr, uid, warehouse.manufacture_pull_id.id, context=context)
return super(stock_warehouse, self).write(cr, uid, ids, vals, context=None)
def get_all_routes_for_wh(self, cr, uid, warehouse, context=None):
all_routes = super(stock_warehouse, self).get_all_routes_for_wh(cr, uid, warehouse, context=context)
if warehouse.manufacture_to_resupply and warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
all_routes += [warehouse.manufacture_pull_id.route_id.id]
return all_routes
def _handle_renaming(self, cr, uid, warehouse, name, code, context=None):
res = super(stock_warehouse, self)._handle_renaming(cr, uid, warehouse, name, code, context=context)
pull_obj = self.pool.get('procurement.rule')
#change the manufacture pull rule name
if warehouse.manufacture_pull_id:
pull_obj.write(cr, uid, warehouse.manufacture_pull_id.id, {'name': warehouse.manufacture_pull_id.name.replace(warehouse.name, name, 1)}, context=context)
return res
def _get_all_products_to_resupply(self, cr, uid, warehouse, context=None):
res = super(stock_warehouse, self)._get_all_products_to_resupply(cr, uid, warehouse, context=context)
if warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
for product_id in res:
for route in self.pool.get('product.product').browse(cr, uid, product_id, context=context).route_ids:
if route.id == warehouse.manufacture_pull_id.route_id.id:
res.remove(product_id)
break
return res
| agpl-3.0 | 1,518,941,751,383,555,300 | 57.67619 | 300 | 0.594492 | false |
hn8841182/20150623-test02 | static/Brython3.1.0-20150301-090019/Lib/_strptime.py | 518 | 21683 | """Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
import time
import locale
import calendar
from re import compile as re_compile
from re import IGNORECASE
from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
def _getlang():
# Figure out what the current language is set to.
return locale.getlocale(locale.LC_TIME)
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
ATTRIBUTES:
f_weekday -- full weekday names (7-item list)
a_weekday -- abbreviated weekday names (7-item list)
f_month -- full month names (13-item list; dummy value in [0], which
is added by code)
a_month -- abbreviated month names (13-item list, dummy value in
[0], which is added by code)
am_pm -- AM/PM representation (2-item list)
LC_date_time -- format string for date/time representation (string)
LC_date -- format string for date representation (string)
LC_time -- format string for time representation (string)
timezone -- daylight- and non-daylight-savings timezone representation
(2-item list of sets)
lang -- Language used by instance (2-item tuple)
"""
def __init__(self):
"""Set all attributes.
Order of methods called matters for dependency reasons.
The locale language is set at the offset and then checked again before
exiting. This is to make sure that the attributes were not set with a
mix of information from more than one locale. This would most likely
happen when using threads where one thread calls a locale-dependent
function while another thread changes the locale while the function in
the other thread is still running. Proper coding would call for
locks to prevent changing the locale while locale-dependent code is
running. The check here is done in case someone does not think about
doing this.
Only other possible issue is if someone changed the timezone and did
not call tz.tzset . That is an issue for the programmer, though,
since changing the timezone is worthless without that call.
"""
self.lang = _getlang()
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
raise ValueError("locale changed during initialization")
def __pad(self, seq, front):
# Add '' to seq to either the front (is True), else the back.
seq = list(seq)
if front:
seq.insert(0, '')
else:
seq.append('')
return seq
def __calc_weekday(self):
# Set self.a_weekday and self.f_weekday using the calendar
# module.
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
self.a_weekday = a_weekday
self.f_weekday = f_weekday
def __calc_month(self):
# Set self.f_month and self.a_month using the calendar module.
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
f_month = [calendar.month_name[i].lower() for i in range(13)]
self.a_month = a_month
self.f_month = f_month
def __calc_am_pm(self):
# Set self.am_pm by using time.strftime().
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
# magical; just happened to have used it everywhere else where a
# static date was needed.
am_pm = []
for hour in (1, 22):
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
am_pm.append(time.strftime("%p", time_tuple).lower())
self.am_pm = am_pm
def __calc_date_time(self):
# Set self.date_time, self.date, & self.time by using
# time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
# values within the format string is very important; it eliminates
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
date_time = [None, None, None]
date_time[0] = time.strftime("%c", time_tuple).lower()
date_time[1] = time.strftime("%x", time_tuple).lower()
date_time[2] = time.strftime("%X", time_tuple).lower()
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I')]
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
for tz in tz_values])
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
current_format = date_time[offset]
for old, new in replacement_pairs:
# Must deal with possible lack of locale info
# manifesting itself as the empty string (e.g., Swedish's
# lack of AM/PM info) or a platform returning a tuple of empty
# strings (e.g., MacOS 9 having timezone as ('','')).
if old:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
# 2005-01-03 occurs before the first Monday of the year. Otherwise
# %U is used.
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
if '00' in time.strftime(directive, time_tuple):
U_W = '%W'
else:
U_W = '%U'
date_time[offset] = current_format.replace('11', U_W)
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
def __calc_timezone(self):
# Set self.timezone by using time.tzname.
# Do not worry about possibility of time.tzname[0] == timetzname[1]
# and time.daylight; handle that in strptime .
#try:
#time.tzset()
#except AttributeError:
#pass
no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()])
if time.daylight:
has_saving = frozenset([time.tzname[1].lower()])
else:
has_saving = frozenset()
self.timezone = (no_saving, has_saving)
class TimeRE(dict):
"""Handle conversion from format directives to regexes."""
def __init__(self, locale_time=None):
"""Create keys/values.
Order of execution is important for dependency reasons.
"""
if locale_time:
self.locale_time = locale_time
else:
self.locale_time = LocaleTime()
base = super()
base.__init__({
# The " \d" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
'M': r"(?P<M>[0-5]\d|\d)",
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
'w': r"(?P<w>[0-6])",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d[0-5]\d)",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'})
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
def __seqToRE(self, to_convert, directive):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
prevents the possibility of a match occurring for a value that also
a substring of a larger value that should have matched (e.g., 'abc'
matching when 'abcdef' should have been the match).
"""
to_convert = sorted(to_convert, key=len, reverse=True)
for value in to_convert:
if value != '':
break
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
regex = '(?P<%s>%s' % (directive, regex)
return '%s)' % regex
def pattern(self, format):
"""Return regex pattern for the format string.
Need to make sure that any characters that might be interpreted as
regex syntax are escaped.
"""
processed_format = ''
# The sub() call escapes all characters that might be misconstrued
# as regex syntax. Cannot use re.escape since we have to deal with
# format directives (%m, etc.).
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
format = regex_chars.sub(r"\\\1", format)
whitespace_replacement = re_compile('\s+')
format = whitespace_replacement.sub('\s+', format)
while '%' in format:
directive_index = format.index('%')+1
processed_format = "%s%s%s" % (processed_format,
format[:directive_index-1],
self[format[directive_index]])
format = format[directive_index+1:]
return "%s%s" % (processed_format, format)
def compile(self, format):
"""Return a compiled re object for the format string."""
return re_compile(self.pattern(format), IGNORECASE)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
"""Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)."""
first_weekday = datetime_date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
format string."""
for index, arg in enumerate([data_string, format]):
if not isinstance(arg, str):
msg = "strptime() argument {} must be str, not {}"
raise TypeError(msg.format(index, type(arg)))
global _TimeRE_cache, _regex_cache
with _cache_lock:
if _getlang() != _TimeRE_cache.locale_time.lang:
_TimeRE_cache = TimeRE()
_regex_cache.clear()
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
format_regex = _regex_cache.get(format)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(format)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError as err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError("'%s' is a bad directive in format '%s'" %
(bad_directive, format)) from None
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError("stray %% in format '%s'" % format) from None
_regex_cache[format] = format_regex
found = format_regex.match(data_string)
if not found:
raise ValueError("time data %r does not match format %r" %
(data_string, format))
if len(data_string) != found.end():
raise ValueError("unconverted data remains: %s" %
data_string[found.end():])
year = None
month = day = 1
hour = minute = second = fraction = 0
tz = -1
tzoffset = None
# Default to -1 to signify that values not known; not critical to have,
# though
week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to -1 so as to signal need to calculate
# values
weekday = julian = -1
found_dict = found.groupdict()
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
if group_key == 'y':
year = int(found_dict['y'])
# Open Group specification for strptime() states that a %y
#value in the range of [00, 68] is in the century 2000, while
#[69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'm':
month = int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
day = int(found_dict['d'])
elif group_key == 'H':
hour = int(found_dict['H'])
elif group_key == 'I':
hour = int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif group_key == 'M':
minute = int(found_dict['M'])
elif group_key == 'S':
second = int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
s += "0" * (6 - len(s))
fraction = int(s)
elif group_key == 'A':
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
elif group_key == 'a':
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
elif group_key == 'w':
weekday = int(found_dict['w'])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif group_key == 'j':
julian = int(found_dict['j'])
elif group_key in ('U', 'W'):
week_of_year = int(found_dict[group_key])
if group_key == 'U':
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif group_key == 'z':
z = found_dict['z']
tzoffset = int(z[1:3]) * 60 + int(z[3:5])
if z.startswith("-"):
tzoffset = -tzoffset
elif group_key == 'Z':
# Since -1 is default value only need to worry about setting tz if
# it can be something other than -1.
found_zone = found_dict['Z'].lower()
for value, tz_values in enumerate(locale_time.timezone):
if found_zone in tz_values:
# Deal with bad locale setup where timezone names are the
# same and yet time.daylight is true; too ambiguous to
# be able to tell what timezone has daylight savings
if (time.tzname[0] == time.tzname[1] and
time.daylight and found_zone not in ("utc", "gmt")):
break
else:
tz = value
break
leap_year_fix = False
if year is None and month == 2 and day == 29:
year = 1904 # 1904 is first leap year of 20th century
leap_year_fix = True
elif year is None:
year = 1900
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian == -1 and week_of_year != -1 and weekday != -1:
week_starts_Mon = True if week_of_year_start == 0 else False
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
# Cannot pre-calculate datetime_date() since can change in Julian
# calculation and thus could have different value for the day of the week
# calculation.
if julian == -1:
# Need to add 1 to result since first day of the year is 1, not 0.
julian = datetime_date(year, month, day).toordinal() - \
datetime_date(year, 1, 1).toordinal() + 1
else: # Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday == -1:
weekday = datetime_date(year, month, day).weekday()
# Add timezone info
tzname = found_dict.get("Z")
if tzoffset is not None:
gmtoff = tzoffset * 60
else:
gmtoff = None
if leap_year_fix:
# the caller didn't supply a year but asked for Feb 29th. We couldn't
# use the default of 1900 for computations. We set it back to ensure
# that February 29th is smaller than March 1st.
year = 1900
return (year, month, day,
hour, minute, second,
weekday, julian, tz, tzname, gmtoff), fraction
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a time struct based on the input string and the
format string."""
tt = _strptime(data_string, format)[0]
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a class cls instance based on the input string and the
format string."""
tt, fraction = _strptime(data_string, format)
tzname, gmtoff = tt[-2:]
args = tt[:6] + (fraction,)
if gmtoff is not None:
tzdelta = datetime_timedelta(seconds=gmtoff)
if tzname:
tz = datetime_timezone(tzdelta, tzname)
else:
tz = datetime_timezone(tzdelta)
args += (tz,)
return cls(*args)
| gpl-3.0 | -7,604,141,210,089,282,000 | 41.432485 | 105 | 0.54831 | false |
BambooHR/rapid | rapid/master/controllers/api/upgrade_controller.py | 1 | 1295 | """
Copyright (c) 2015 Michael Bright and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask import Response
from rapid.lib.version import Version
from rapid.lib import api_key_required
from rapid.lib.utils import UpgradeUtil
class UpgradeController(object):
def __init__(self, flask_app):
self.flask_app = flask_app
def configure_routing(self):
self.flask_app.add_url_rule('/api/upgrade/<path:version>', 'upgrade_master', api_key_required(self.upgrade_master), methods=['POST'])
def upgrade_master(self, version):
worked = UpgradeUtil.upgrade_version(version, self.flask_app.rapid_config)
return Response("It worked!" if worked else "It didn't work, version {} restored!".format(Version.get_version()), status=200 if worked else 505)
| apache-2.0 | 6,371,753,827,477,354,000 | 37.088235 | 152 | 0.742085 | false |
leilihh/novaha | nova/keymgr/single_key_mgr.py | 10 | 2555 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
An implementation of a key manager that returns a single key in response to
all invocations of get_key.
"""
from nova import exception
from nova.keymgr import mock_key_mgr
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class SingleKeyManager(mock_key_mgr.MockKeyManager):
"""This key manager implementation supports all the methods specified by
the key manager interface. This implementation creates a single key in
response to all invocations of create_key. Side effects
(e.g., raising exceptions) for each method are handled as specified by
the key manager interface.
"""
def __init__(self):
LOG.warn(_('This key manager is insecure and is not recommended for '
'production deployments'))
super(SingleKeyManager, self).__init__()
self.key_id = '00000000-0000-0000-0000-000000000000'
self.key = self._generate_key(key_length=256)
# key should exist by default
self.keys[self.key_id] = self.key
def _generate_hex_key(self, **kwargs):
key_length = kwargs.get('key_length', 256)
return '0' * (key_length / 4) # hex digit => 4 bits
def _generate_key_id(self):
return self.key_id
def store_key(self, ctxt, key, **kwargs):
if key != self.key:
raise exception.KeyManagerError(
reason="cannot store arbitrary keys")
return super(SingleKeyManager, self).store_key(ctxt, key, **kwargs)
def delete_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise exception.KeyManagerError(
reason="cannot delete non-existent key")
LOG.warn(_("Not deleting key %s"), key_id)
| apache-2.0 | 3,408,993,021,928,366,000 | 34.486111 | 78 | 0.668493 | false |
DietPawel/playhistory | app/api.py | 1 | 5417 | from flask import Flask, jsonify, make_response, abort,request, send_from_directory, redirect, render_template, Response
import db, datetime, csv
from time import time
from io import StringIO
DEBUG = False
#path to file to be displayed on index page
INDEX = '/opt/index.html'
app = Flask(__name__, static_url_path='')
@app.route('/', methods=['GET'])
def root():
contents="<p>Set home page in %s !</p>" % (str(INDEX) )
try:
with open(INDEX, 'r') as indexfile:
contents = indexfile.readlines()
except:
pass
return render_template('index.html', index=contents)
@app.route('/hist/', methods=['GET'])
def history_data():
return render_template('hist.html')
@app.route('/day/', methods=['GET'])
def show_day_template():
t = request.args.get('t')
day_name="Dzisiaj"
if t is None:
t = time()
else:
try:
t = int(t)
day_name = datetime.datetime.fromtimestamp(t).strftime("%d.%m.%Y r.")
except:
t = time()
return render_template('day.html', songs = db.get_day(t), debug=DEBUG, t=t, day_name=day_name)
@app.route('/edit/<int:playid>', methods=['GET'])
def edit_play_object(playid):
play = db.get_play_id(playid)
if play is None:
abort(404)
ret = request.args.get('ret')
if(ret is None):
ret = '/day/'
return render_template('edit.html', play = play, ret=ret, debug=DEBUG)
@app.route('/stats/', methods=['GET'])
def stats():
start = request.args.get('startts')
stop = request.args.get('stopts')
if(start is None or stop is None):
start=stop=0
else:
try:
start = int(start)
stop = int(stop) + 86399
except:
start = 0
stop = 0
return render_template('stats.html', data=db.get_stats(start,stop), date_start=start, date_stop=stop)
## db export
@app.route('/download/', methods=['GET'])
def download():
si = StringIO()
cw = csv.writer(si)
cw.writerows(db.generate_all())
output = make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=db_%s.csv" % datetime.datetime.fromtimestamp(time()).strftime("%d_%m_%Y")
output.headers["Content-type"] = "text/csv"
return output
## raport generator
@app.route('/report/', methods=['GET'])
def get_day_report():
t = request.args.get('t')
if t is None:
t = time()
print("t is None");
else:
try:
t=int(t)
print("t is orig");
except:
t = time()
print("t is Str");
content = render_template('report.txt', songs = db.get_day(t), date=t)
return Response(content, mimetype="text/plain", headers={"Content-disposition":"attachment;filename=report_%s.txt"%datetime.datetime.fromtimestamp(t).strftime("%d_%m_%Y")})
## api methods
"""@app.route('/api/v1/day/', methods=['GET'])
def current_day():
return jsonify({'plays':db.get_day(), })
@app.route('/api/v1/day/<int:timestamp>', methods=['GET'])
def day_from_timestamp(timestamp):
return jsonify({'plays':db.get_day(timestamp)})
@app.route('/api/v1/play/<int:play_id>', methods=['GET'])
def get_play_by_id(plastrftimey_id):
play = db.get_play_id(play_id)
if play is None:
abort(404)
return jsonify({'play':db.get_play_id(play_id).__dict__})
"""
@app.route('/api/v1/play/', methods=['POST'])
def add_new_play():
if not request.json or not 'DJ' in request.json or not 'song' in request.json:
abort(400)
play = db.Play(DJ=request.json['DJ'], song=request.json['song'])
play.save()
return jsonify({'status':play.check()})
@app.route('/api/v1/play/<int:play_id>', methods=['DELETE'])
def del_play_id(play_id):
play = db.get_play_id(play_id)
print(1)
if play is None:
abort(404)
print(2)
play.delete()
return jsonify({'status':play.check()})
@app.route('/api/v1/play/<int:play_id>', methods=['PUT'])
def pul_play_id(play_id):
play = db.get_play_id(play_id)
if play is None:
abort(404)
if 'DJ' in request.json and type(request.json['DJ']) != str:
abort(400)
if 'song' in request.json and type(request.json['song']) != str:
abort(400)
if 'date' in request.json and type(request.json['date']) != int:
abort(400)
play.DJ = request.json.get('DJ', play.DJ)
play.song = request.json.get('song', play.song)
play.date = request.json.get('date', play.date)
play.save()
return jsonify({'status':play.check(), 'play':play.__dict__})
### static files
@app.route('/static/<path:path>')
def send_static_www(path):
return send_from_directory('static', path)
### template_tags
@app.template_filter('display')
def display_date_from_timestamp(ts):
return datetime.datetime.fromtimestamp(ts).strftime("%d.%m.%Y r.")
### other
@app.errorhandler(404)
def not_found(error):
#return make_response(jsonify({'error': 'Not found'}), 404)
return make_response('<center style="font-size:6vh;margin-top:20vh;"> 404 </center>')
if __name__ == '__main__':
print("""PlayHistory Copyright (C) 2017 Paweł Dietrich
This program comes with ABSOLUTELY NO WARRANTY; for details check LICENSE file.
This is free software, and you are welcome to redistribute it
under certain conditions.""")
app.run(debug=DEBUG,host="localhost", port=int("80"))
| gpl-3.0 | -2,461,677,984,856,531,500 | 31.431138 | 176 | 0.613368 | false |
dannyperry571/theapprentice | plugin.video.youtube/resources/lib/kodion/utils/search_history.py | 26 | 1063 | import hashlib
from storage import Storage
from .methods import to_utf8
class SearchHistory(Storage):
def __init__(self, filename, max_items=10):
Storage.__init__(self, filename, max_item_count=max_items)
pass
def is_empty(self):
return self._is_empty()
def list(self):
result = []
keys = self._get_ids(oldest_first=False)
for key in keys:
item = self._get(key)
result.append(item[0])
pass
return result
def clear(self):
self._clear()
pass
def _make_id(self, search_text):
m = hashlib.md5()
m.update(to_utf8(search_text))
return m.hexdigest()
def rename(self, old_search_text, new_search_text):
self.remove(old_search_text)
self.update(new_search_text)
pass
def remove(self, search_text):
self._remove(self._make_id(search_text))
pass
def update(self, search_text):
self._set(self._make_id(search_text), search_text)
pass
pass | gpl-2.0 | 7,318,096,972,539,202,000 | 21.166667 | 66 | 0.573848 | false |
SaturdayNeighborhoodHealthClinic/osler | appointment/migrations/0001_initial.py | 2 | 4147 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-08-17 03:20
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
('pttrack', '0005_simplehistory_add_change_reason'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Appointment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('written_datetime', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('clindate', models.DateField(verbose_name=b'Appointment Date')),
('clintime', models.TimeField(default=datetime.datetime(2018, 8, 17, 9, 0, tzinfo=utc), verbose_name=b'Time of Appointment')),
('appointment_type', models.CharField(choices=[(b'PSYCH_NIGHT', b'Psych Night'), (b'ACUTE_FOLLOWUP', b'Acute Followup'), (b'CHRONIC_CARE', b'Chronic Care')], default=b'CHRONIC_CARE', max_length=15, verbose_name=b'Appointment Type')),
('comment', models.TextField(help_text=b'What should happen at this appointment?')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.Provider')),
('author_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.ProviderType')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.Patient')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HistoricalAppointment',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('written_datetime', models.DateTimeField(blank=True, editable=False)),
('last_modified', models.DateTimeField(blank=True, editable=False)),
('clindate', models.DateField(verbose_name=b'Appointment Date')),
('clintime', models.TimeField(default=datetime.datetime(2018, 8, 17, 9, 0, tzinfo=utc), verbose_name=b'Time of Appointment')),
('appointment_type', models.CharField(choices=[(b'PSYCH_NIGHT', b'Psych Night'), (b'ACUTE_FOLLOWUP', b'Acute Followup'), (b'CHRONIC_CARE', b'Chronic Care')], default=b'CHRONIC_CARE', max_length=15, verbose_name=b'Appointment Type')),
('comment', models.TextField(help_text=b'What should happen at this appointment?')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('author', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.Provider')),
('author_type', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.ProviderType')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('patient', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.Patient')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical appointment',
},
),
]
| gpl-3.0 | 2,223,590,742,212,018,200 | 62.8 | 249 | 0.621413 | false |
IntelLabs/numba | numba/cuda/tests/cudapy/test_nondet.py | 5 | 1378 | import numpy as np
from numba import cuda, float32, void
from numba.cuda.testing import unittest, CUDATestCase
def generate_input(n):
A = np.array(np.arange(n * n).reshape(n, n), dtype=np.float32)
B = np.array(np.arange(n) + 0, dtype=A.dtype)
return A, B
class TestCudaNonDet(CUDATestCase):
def test_for_pre(self):
"""Test issue with loop not running due to bad sign-extension at the for loop
precondition.
"""
@cuda.jit(void(float32[:, :], float32[:, :], float32[:]))
def diagproduct(c, a, b):
startX, startY = cuda.grid(2)
gridX = cuda.gridDim.x * cuda.blockDim.x
gridY = cuda.gridDim.y * cuda.blockDim.y
height = c.shape[0]
width = c.shape[1]
for x in range(startX, width, (gridX)):
for y in range(startY, height, (gridY)):
c[y, x] = a[y, x] * b[x]
N = 8
A, B = generate_input(N)
F = np.empty(A.shape, dtype=A.dtype)
blockdim = (32, 8)
griddim = (1, 1)
dA = cuda.to_device(A)
dB = cuda.to_device(B)
dF = cuda.to_device(F, copy=False)
diagproduct[griddim, blockdim](dF, dA, dB)
E = np.dot(A, np.diag(B))
np.testing.assert_array_almost_equal(dF.copy_to_host(), E)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | 1,490,630,722,710,402,300 | 27.122449 | 85 | 0.546444 | false |
lmtierney/selenium | py/test/unit/selenium/webdriver/edge/edge_options_tests.py | 7 | 1807 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.edge.options import Options
@pytest.fixture
def options():
return Options()
def test_raises_exception_with_invalid_page_load_strategy(options):
with pytest.raises(ValueError):
options.page_load_strategy = 'never'
def test_set_page_load_strategy(options):
options.page_load_strategy = 'normal'
assert options._page_load_strategy == 'normal'
def test_get_page_load_strategy(options):
options._page_load_strategy = 'normal'
assert options.page_load_strategy == 'normal'
def test_creates_capabilities(options):
options.page_load_strategy = 'eager'
caps = options.to_capabilities()
assert caps['pageLoadStrategy'] == 'eager'
def test_starts_with_default_capabilities(options):
from selenium.webdriver import DesiredCapabilities
assert options._caps == DesiredCapabilities.EDGE
def test_is_a_baseoptions(options):
from selenium.webdriver.common.options import BaseOptions
assert isinstance(options, BaseOptions)
| apache-2.0 | -4,814,514,105,816,652,000 | 31.267857 | 67 | 0.755396 | false |
gmr/helper | helper/setupext.py | 2 | 2299 | """Add a setuptools command that runs a helper-based application."""
try:
from setuptools import Command
except ImportError:
from distutils.core import Command
try:
from functools import reduce
except ImportError:
pass # use the builtin for py 2.x
from . import parser
from . import platform
class RunCommand(Command):
"""Run a helper-based application.
This extension is installed as a ``distutils.commands``
entry point that provides the *run_helper* command. When
run, it imports a :class:`helper.Controller` subclass by
name, creates a new instance, and runs it in the foreground
until interrupted. The dotted-name of the controller class
and an optional configuration file are provided as command
line parameters.
:param str configuration: the name of a configuration file
to pass to the application *(optional)*
:param str controller: the dotted-name of the Python class
to load and run
"""
description = 'run a helper.Controller'
user_options = [
('configuration=', 'c', 'path to application configuration file'),
('controller=', 'C', 'controller to run'),
]
def initialize_options(self):
"""Initialize parameters."""
self.configuration = None
self.controller = None
def finalize_options(self):
"""Required override that does nothing."""
pass
def run(self):
"""Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
"""
segments = self.controller.split('.')
controller_class = reduce(getattr, segments[1:],
__import__('.'.join(segments[:-1])))
cmd_line = ['-f']
if self.configuration is not None:
cmd_line.extend(['-c', self.configuration])
args = parser.get().parse_args(cmd_line)
controller_instance = controller_class(args, platform)
try:
controller_instance.start()
except KeyboardInterrupt:
controller_instance.stop()
| bsd-3-clause | -8,858,025,610,849,784,000 | 31.842857 | 74 | 0.645063 | false |
divya-csekar/flask-microblog-server | flask/Lib/site-packages/flask/config.py | 781 | 6234 | # -*- coding: utf-8 -*-
"""
flask.config
~~~~~~~~~~~~
Implements the configuration related objects.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import imp
import os
import errno
from werkzeug.utils import import_string
from ._compat import string_types
class ConfigAttribute(object):
"""Makes an attribute forward to the config"""
def __init__(self, name, get_converter=None):
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj, type=None):
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj, value):
obj.config[self.__name__] = value
class Config(dict):
"""Works exactly like a dict but provides ways to fill it from files
or special dictionaries. There are two common patterns to populate the
config.
Either you can fill the config from a config file::
app.config.from_pyfile('yourconfig.cfg')
Or alternatively you can define the configuration options in the
module that calls :meth:`from_object` or provide an import path to
a module that should be loaded. It is also possible to tell it to
use the same module and with that provide the configuration values
just before the call::
DEBUG = True
SECRET_KEY = 'development key'
app.config.from_object(__name__)
In both cases (loading from any Python file or loading from modules),
only uppercase keys are added to the config. This makes it possible to use
lowercase values in the config file for temporary values that are not added
to the config or to define the config keys in the same file that implements
the application.
Probably the most interesting way to load configurations is from an
environment variable pointing to a file::
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
In this case before launching the application you have to set this
environment variable to the file you want to use. On Linux and OS X
use the export statement::
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
On windows use `set` instead.
:param root_path: path to which files are read relative from. When the
config object is created by the application, this is
the application's :attr:`~flask.Flask.root_path`.
:param defaults: an optional dictionary of default values
"""
def __init__(self, root_path, defaults=None):
dict.__init__(self, defaults or {})
self.root_path = root_path
def from_envvar(self, variable_name, silent=False):
"""Loads a configuration from an environment variable pointing to
a configuration file. This is basically just a shortcut with nicer
error messages for this line of code::
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
:param variable_name: name of the environment variable
:param silent: set to `True` if you want silent failure for missing
files.
:return: bool. `True` if able to load config, `False` otherwise.
"""
rv = os.environ.get(variable_name)
if not rv:
if silent:
return False
raise RuntimeError('The environment variable %r is not set '
'and as such configuration could not be '
'loaded. Set this variable and make it '
'point to a configuration file' %
variable_name)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename, silent=False):
"""Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
:param silent: set to `True` if you want silent failure for missing
files.
.. versionadded:: 0.7
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = imp.new_module('config')
d.__file__ = filename
try:
with open(filename) as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(d)
return True
def from_object(self, obj):
"""Updates the values from the given object. An object can be of one
of the following two types:
- a string: in this case the object with that name will be imported
- an actual object reference: that object is used directly
Objects are usually either modules or classes.
Just the uppercase variables in that object are stored in the config.
Example usage::
app.config.from_object('yourapplication.default_config')
from yourapplication import default_config
app.config.from_object(default_config)
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
:param obj: an import name or object
"""
if isinstance(obj, string_types):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))
| bsd-3-clause | 6,417,184,727,938,708,000 | 36.107143 | 79 | 0.620629 | false |
smallyear/linuxLearn | salt/salt/states/keyboard.py | 3 | 2081 | # -*- coding: utf-8 -*-
'''
Management of keyboard layouts
==============================
The keyboard layout can be managed for the system:
.. code-block:: yaml
us:
keyboard.system
Or it can be managed for XOrg:
.. code-block:: yaml
us:
keyboard.xorg
'''
def __virtual__():
'''
Only load if the keyboard module is available in __salt__
'''
return 'keyboard.get_sys' in __salt__
def system(name):
'''
Set the keyboard layout for the system
name
The keyboard layout to use
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if __salt__['keyboard.get_sys']() == name:
ret['result'] = True
ret['comment'] = 'System layout {0} already set'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'System layout {0} needs to be set'.format(name)
return ret
if __salt__['keyboard.set_sys'](name):
ret['changes'] = {'layout': name}
ret['result'] = True
ret['comment'] = 'Set system keyboard layout {0}'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set system keyboard layout'
return ret
def xorg(name):
'''
Set the keyboard layout for XOrg
layout
The keyboard layout to use
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if __salt__['keyboard.get_x']() == name:
ret['result'] = True
ret['comment'] = 'XOrg layout {0} already set'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'XOrg layout {0} needs to be set'.format(name)
return ret
if __salt__['keyboard.set_x'](name):
ret['changes'] = {'layout': name}
ret['result'] = True
ret['comment'] = 'Set XOrg keyboard layout {0}'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set XOrg keyboard layout'
return ret
| apache-2.0 | 7,694,156,515,442,815,000 | 23.77381 | 73 | 0.525709 | false |
newerthcom/savagerebirth | libs/python-2.72/Lib/plat-mac/Carbon/CarbonEvents.py | 81 | 17904 | # Generated from 'CarbonEvents.h'
def FOUR_CHAR_CODE(x): return x
def FOUR_CHAR_CODE(x): return x
false = 0
true = 1
keyAEEventClass = FOUR_CHAR_CODE('evcl')
keyAEEventID = FOUR_CHAR_CODE('evti')
eventAlreadyPostedErr = -9860
eventTargetBusyErr = -9861
eventClassInvalidErr = -9862
eventClassIncorrectErr = -9864
eventHandlerAlreadyInstalledErr = -9866
eventInternalErr = -9868
eventKindIncorrectErr = -9869
eventParameterNotFoundErr = -9870
eventNotHandledErr = -9874
eventLoopTimedOutErr = -9875
eventLoopQuitErr = -9876
eventNotInQueueErr = -9877
eventHotKeyExistsErr = -9878
eventHotKeyInvalidErr = -9879
kEventPriorityLow = 0
kEventPriorityStandard = 1
kEventPriorityHigh = 2
kEventLeaveInQueue = false
kEventRemoveFromQueue = true
kTrackMouseLocationOptionDontConsumeMouseUp = (1 << 0)
kMouseTrackingMouseDown = 1
kMouseTrackingMouseUp = 2
kMouseTrackingMouseExited = 3
kMouseTrackingMouseEntered = 4
kMouseTrackingMouseDragged = 5
kMouseTrackingKeyModifiersChanged = 6
kMouseTrackingUserCancelled = 7
kMouseTrackingTimedOut = 8
kMouseTrackingMouseMoved = 9
kEventAttributeNone = 0
kEventAttributeUserEvent = (1 << 0)
kEventClassMouse = FOUR_CHAR_CODE('mous')
kEventClassKeyboard = FOUR_CHAR_CODE('keyb')
kEventClassTextInput = FOUR_CHAR_CODE('text')
kEventClassApplication = FOUR_CHAR_CODE('appl')
kEventClassAppleEvent = FOUR_CHAR_CODE('eppc')
kEventClassMenu = FOUR_CHAR_CODE('menu')
kEventClassWindow = FOUR_CHAR_CODE('wind')
kEventClassControl = FOUR_CHAR_CODE('cntl')
kEventClassCommand = FOUR_CHAR_CODE('cmds')
kEventClassTablet = FOUR_CHAR_CODE('tblt')
kEventClassVolume = FOUR_CHAR_CODE('vol ')
kEventClassAppearance = FOUR_CHAR_CODE('appm')
kEventClassService = FOUR_CHAR_CODE('serv')
kEventMouseDown = 1
kEventMouseUp = 2
kEventMouseMoved = 5
kEventMouseDragged = 6
kEventMouseWheelMoved = 10
kEventMouseButtonPrimary = 1
kEventMouseButtonSecondary = 2
kEventMouseButtonTertiary = 3
kEventMouseWheelAxisX = 0
kEventMouseWheelAxisY = 1
kEventTextInputUpdateActiveInputArea = 1
kEventTextInputUnicodeForKeyEvent = 2
kEventTextInputOffsetToPos = 3
kEventTextInputPosToOffset = 4
kEventTextInputShowHideBottomWindow = 5
kEventTextInputGetSelectedText = 6
kEventRawKeyDown = 1
kEventRawKeyRepeat = 2
kEventRawKeyUp = 3
kEventRawKeyModifiersChanged = 4
kEventHotKeyPressed = 5
kEventHotKeyReleased = 6
kEventKeyModifierNumLockBit = 16
kEventKeyModifierFnBit = 17
kEventKeyModifierNumLockMask = 1L << kEventKeyModifierNumLockBit
kEventKeyModifierFnMask = 1L << kEventKeyModifierFnBit
kEventAppActivated = 1
kEventAppDeactivated = 2
kEventAppQuit = 3
kEventAppLaunchNotification = 4
kEventAppLaunched = 5
kEventAppTerminated = 6
kEventAppFrontSwitched = 7
kEventAppGetDockTileMenu = 20
kEventAppleEvent = 1
kEventWindowUpdate = 1
kEventWindowDrawContent = 2
kEventWindowActivated = 5
kEventWindowDeactivated = 6
kEventWindowGetClickActivation = 7
kEventWindowShowing = 22
kEventWindowHiding = 23
kEventWindowShown = 24
kEventWindowHidden = 25
kEventWindowCollapsing = 86
kEventWindowCollapsed = 67
kEventWindowExpanding = 87
kEventWindowExpanded = 70
kEventWindowZoomed = 76
kEventWindowBoundsChanging = 26
kEventWindowBoundsChanged = 27
kEventWindowResizeStarted = 28
kEventWindowResizeCompleted = 29
kEventWindowDragStarted = 30
kEventWindowDragCompleted = 31
kEventWindowClosed = 73
kWindowBoundsChangeUserDrag = (1 << 0)
kWindowBoundsChangeUserResize = (1 << 1)
kWindowBoundsChangeSizeChanged = (1 << 2)
kWindowBoundsChangeOriginChanged = (1 << 3)
kWindowBoundsChangeZoom = (1 << 4)
kEventWindowClickDragRgn = 32
kEventWindowClickResizeRgn = 33
kEventWindowClickCollapseRgn = 34
kEventWindowClickCloseRgn = 35
kEventWindowClickZoomRgn = 36
kEventWindowClickContentRgn = 37
kEventWindowClickProxyIconRgn = 38
kEventWindowClickToolbarButtonRgn = 41
kEventWindowClickStructureRgn = 42
kEventWindowCursorChange = 40
kEventWindowCollapse = 66
kEventWindowCollapseAll = 68
kEventWindowExpand = 69
kEventWindowExpandAll = 71
kEventWindowClose = 72
kEventWindowCloseAll = 74
kEventWindowZoom = 75
kEventWindowZoomAll = 77
kEventWindowContextualMenuSelect = 78
kEventWindowPathSelect = 79
kEventWindowGetIdealSize = 80
kEventWindowGetMinimumSize = 81
kEventWindowGetMaximumSize = 82
kEventWindowConstrain = 83
kEventWindowHandleContentClick = 85
kEventWindowProxyBeginDrag = 128
kEventWindowProxyEndDrag = 129
kEventWindowToolbarSwitchMode = 150
kDockChangedUser = 1
kDockChangedOrientation = 2
kDockChangedAutohide = 3
kDockChangedDisplay = 4
kDockChangedItems = 5
kDockChangedUnknown = 6
kEventWindowFocusAcquired = 200
kEventWindowFocusRelinquish = 201
kEventWindowDrawFrame = 1000
kEventWindowDrawPart = 1001
kEventWindowGetRegion = 1002
kEventWindowHitTest = 1003
kEventWindowInit = 1004
kEventWindowDispose = 1005
kEventWindowDragHilite = 1006
kEventWindowModified = 1007
kEventWindowSetupProxyDragImage = 1008
kEventWindowStateChanged = 1009
kEventWindowMeasureTitle = 1010
kEventWindowDrawGrowBox = 1011
kEventWindowGetGrowImageRegion = 1012
kEventWindowPaint = 1013
kEventMenuBeginTracking = 1
kEventMenuEndTracking = 2
kEventMenuChangeTrackingMode = 3
kEventMenuOpening = 4
kEventMenuClosed = 5
kEventMenuTargetItem = 6
kEventMenuMatchKey = 7
kEventMenuEnableItems = 8
kEventMenuPopulate = 9
kEventMenuMeasureItemWidth = 100
kEventMenuMeasureItemHeight = 101
kEventMenuDrawItem = 102
kEventMenuDrawItemContent = 103
kEventMenuDispose = 1001
kMenuContextMenuBar = 1 << 0
kMenuContextPullDown = 1 << 8
kMenuContextPopUp = 1 << 9
kMenuContextSubmenu = 1 << 10
kMenuContextMenuBarTracking = 1 << 16
kMenuContextPopUpTracking = 1 << 17
kMenuContextKeyMatching = 1 << 18
kMenuContextMenuEnabling = 1 << 19
kMenuContextCommandIDSearch = 1 << 20
kEventProcessCommand = 1
kEventCommandProcess = 1
kEventCommandUpdateStatus = 2
kHICommandOK = FOUR_CHAR_CODE('ok ')
kHICommandCancel = FOUR_CHAR_CODE('not!')
kHICommandQuit = FOUR_CHAR_CODE('quit')
kHICommandUndo = FOUR_CHAR_CODE('undo')
kHICommandRedo = FOUR_CHAR_CODE('redo')
kHICommandCut = FOUR_CHAR_CODE('cut ')
kHICommandCopy = FOUR_CHAR_CODE('copy')
kHICommandPaste = FOUR_CHAR_CODE('past')
kHICommandClear = FOUR_CHAR_CODE('clea')
kHICommandSelectAll = FOUR_CHAR_CODE('sall')
kHICommandHide = FOUR_CHAR_CODE('hide')
kHICommandHideOthers = FOUR_CHAR_CODE('hido')
kHICommandShowAll = FOUR_CHAR_CODE('shal')
kHICommandPreferences = FOUR_CHAR_CODE('pref')
kHICommandZoomWindow = FOUR_CHAR_CODE('zoom')
kHICommandMinimizeWindow = FOUR_CHAR_CODE('mini')
kHICommandMinimizeAll = FOUR_CHAR_CODE('mina')
kHICommandMaximizeWindow = FOUR_CHAR_CODE('maxi')
kHICommandMaximizeAll = FOUR_CHAR_CODE('maxa')
kHICommandArrangeInFront = FOUR_CHAR_CODE('frnt')
kHICommandBringAllToFront = FOUR_CHAR_CODE('bfrt')
kHICommandWindowListSeparator = FOUR_CHAR_CODE('wldv')
kHICommandWindowListTerminator = FOUR_CHAR_CODE('wlst')
kHICommandSelectWindow = FOUR_CHAR_CODE('swin')
kHICommandAbout = FOUR_CHAR_CODE('abou')
kHICommandNew = FOUR_CHAR_CODE('new ')
kHICommandOpen = FOUR_CHAR_CODE('open')
kHICommandClose = FOUR_CHAR_CODE('clos')
kHICommandSave = FOUR_CHAR_CODE('save')
kHICommandSaveAs = FOUR_CHAR_CODE('svas')
kHICommandRevert = FOUR_CHAR_CODE('rvrt')
kHICommandPrint = FOUR_CHAR_CODE('prnt')
kHICommandPageSetup = FOUR_CHAR_CODE('page')
kHICommandAppHelp = FOUR_CHAR_CODE('ahlp')
kHICommandFromMenu = (1L << 0)
kHICommandFromControl = (1L << 1)
kHICommandFromWindow = (1L << 2)
kEventControlInitialize = 1000
kEventControlDispose = 1001
kEventControlGetOptimalBounds = 1003
kEventControlDefInitialize = kEventControlInitialize
kEventControlDefDispose = kEventControlDispose
kEventControlHit = 1
kEventControlSimulateHit = 2
kEventControlHitTest = 3
kEventControlDraw = 4
kEventControlApplyBackground = 5
kEventControlApplyTextColor = 6
kEventControlSetFocusPart = 7
kEventControlGetFocusPart = 8
kEventControlActivate = 9
kEventControlDeactivate = 10
kEventControlSetCursor = 11
kEventControlContextualMenuClick = 12
kEventControlClick = 13
kEventControlTrack = 51
kEventControlGetScrollToHereStartPoint = 52
kEventControlGetIndicatorDragConstraint = 53
kEventControlIndicatorMoved = 54
kEventControlGhostingFinished = 55
kEventControlGetActionProcPart = 56
kEventControlGetPartRegion = 101
kEventControlGetPartBounds = 102
kEventControlSetData = 103
kEventControlGetData = 104
kEventControlValueFieldChanged = 151
kEventControlAddedSubControl = 152
kEventControlRemovingSubControl = 153
kEventControlBoundsChanged = 154
kEventControlOwningWindowChanged = 159
kEventControlArbitraryMessage = 201
kControlBoundsChangeSizeChanged = (1 << 2)
kControlBoundsChangePositionChanged = (1 << 3)
kEventTabletPoint = 1
kEventTabletProximity = 2
kEventTabletPointer = 1
kEventVolumeMounted = 1
kEventVolumeUnmounted = 2
typeFSVolumeRefNum = FOUR_CHAR_CODE('voln')
kEventAppearanceScrollBarVariantChanged = 1
kEventServiceCopy = 1
kEventServicePaste = 2
kEventServiceGetTypes = 3
kEventServicePerform = 4
kEventParamDirectObject = FOUR_CHAR_CODE('----')
kEventParamPostTarget = FOUR_CHAR_CODE('ptrg')
typeEventTargetRef = FOUR_CHAR_CODE('etrg')
kEventParamWindowRef = FOUR_CHAR_CODE('wind')
kEventParamGrafPort = FOUR_CHAR_CODE('graf')
kEventParamDragRef = FOUR_CHAR_CODE('drag')
kEventParamMenuRef = FOUR_CHAR_CODE('menu')
kEventParamEventRef = FOUR_CHAR_CODE('evnt')
kEventParamControlRef = FOUR_CHAR_CODE('ctrl')
kEventParamRgnHandle = FOUR_CHAR_CODE('rgnh')
kEventParamEnabled = FOUR_CHAR_CODE('enab')
kEventParamDimensions = FOUR_CHAR_CODE('dims')
kEventParamAvailableBounds = FOUR_CHAR_CODE('avlb')
kEventParamAEEventID = keyAEEventID
kEventParamAEEventClass = keyAEEventClass
kEventParamCGContextRef = FOUR_CHAR_CODE('cntx')
kEventParamDeviceDepth = FOUR_CHAR_CODE('devd')
kEventParamDeviceColor = FOUR_CHAR_CODE('devc')
typeWindowRef = FOUR_CHAR_CODE('wind')
typeGrafPtr = FOUR_CHAR_CODE('graf')
typeGWorldPtr = FOUR_CHAR_CODE('gwld')
typeDragRef = FOUR_CHAR_CODE('drag')
typeMenuRef = FOUR_CHAR_CODE('menu')
typeControlRef = FOUR_CHAR_CODE('ctrl')
typeCollection = FOUR_CHAR_CODE('cltn')
typeQDRgnHandle = FOUR_CHAR_CODE('rgnh')
typeOSStatus = FOUR_CHAR_CODE('osst')
typeCFStringRef = FOUR_CHAR_CODE('cfst')
typeCFIndex = FOUR_CHAR_CODE('cfix')
typeCFTypeRef = FOUR_CHAR_CODE('cfty')
typeCGContextRef = FOUR_CHAR_CODE('cntx')
typeHIPoint = FOUR_CHAR_CODE('hipt')
typeHISize = FOUR_CHAR_CODE('hisz')
typeHIRect = FOUR_CHAR_CODE('hirc')
kEventParamMouseLocation = FOUR_CHAR_CODE('mloc')
kEventParamMouseButton = FOUR_CHAR_CODE('mbtn')
kEventParamClickCount = FOUR_CHAR_CODE('ccnt')
kEventParamMouseWheelAxis = FOUR_CHAR_CODE('mwax')
kEventParamMouseWheelDelta = FOUR_CHAR_CODE('mwdl')
kEventParamMouseDelta = FOUR_CHAR_CODE('mdta')
kEventParamMouseChord = FOUR_CHAR_CODE('chor')
kEventParamTabletEventType = FOUR_CHAR_CODE('tblt')
typeMouseButton = FOUR_CHAR_CODE('mbtn')
typeMouseWheelAxis = FOUR_CHAR_CODE('mwax')
kEventParamKeyCode = FOUR_CHAR_CODE('kcod')
kEventParamKeyMacCharCodes = FOUR_CHAR_CODE('kchr')
kEventParamKeyModifiers = FOUR_CHAR_CODE('kmod')
kEventParamKeyUnicodes = FOUR_CHAR_CODE('kuni')
kEventParamKeyboardType = FOUR_CHAR_CODE('kbdt')
typeEventHotKeyID = FOUR_CHAR_CODE('hkid')
kEventParamTextInputSendRefCon = FOUR_CHAR_CODE('tsrc')
kEventParamTextInputSendComponentInstance = FOUR_CHAR_CODE('tsci')
kEventParamTextInputSendSLRec = FOUR_CHAR_CODE('tssl')
kEventParamTextInputReplySLRec = FOUR_CHAR_CODE('trsl')
kEventParamTextInputSendText = FOUR_CHAR_CODE('tstx')
kEventParamTextInputReplyText = FOUR_CHAR_CODE('trtx')
kEventParamTextInputSendUpdateRng = FOUR_CHAR_CODE('tsup')
kEventParamTextInputSendHiliteRng = FOUR_CHAR_CODE('tshi')
kEventParamTextInputSendClauseRng = FOUR_CHAR_CODE('tscl')
kEventParamTextInputSendPinRng = FOUR_CHAR_CODE('tspn')
kEventParamTextInputSendFixLen = FOUR_CHAR_CODE('tsfx')
kEventParamTextInputSendLeadingEdge = FOUR_CHAR_CODE('tsle')
kEventParamTextInputReplyLeadingEdge = FOUR_CHAR_CODE('trle')
kEventParamTextInputSendTextOffset = FOUR_CHAR_CODE('tsto')
kEventParamTextInputReplyTextOffset = FOUR_CHAR_CODE('trto')
kEventParamTextInputReplyRegionClass = FOUR_CHAR_CODE('trrg')
kEventParamTextInputSendCurrentPoint = FOUR_CHAR_CODE('tscp')
kEventParamTextInputSendDraggingMode = FOUR_CHAR_CODE('tsdm')
kEventParamTextInputReplyPoint = FOUR_CHAR_CODE('trpt')
kEventParamTextInputReplyFont = FOUR_CHAR_CODE('trft')
kEventParamTextInputReplyFMFont = FOUR_CHAR_CODE('trfm')
kEventParamTextInputReplyPointSize = FOUR_CHAR_CODE('trpz')
kEventParamTextInputReplyLineHeight = FOUR_CHAR_CODE('trlh')
kEventParamTextInputReplyLineAscent = FOUR_CHAR_CODE('trla')
kEventParamTextInputReplyTextAngle = FOUR_CHAR_CODE('trta')
kEventParamTextInputSendShowHide = FOUR_CHAR_CODE('tssh')
kEventParamTextInputReplyShowHide = FOUR_CHAR_CODE('trsh')
kEventParamTextInputSendKeyboardEvent = FOUR_CHAR_CODE('tske')
kEventParamTextInputSendTextServiceEncoding = FOUR_CHAR_CODE('tsse')
kEventParamTextInputSendTextServiceMacEncoding = FOUR_CHAR_CODE('tssm')
kEventParamHICommand = FOUR_CHAR_CODE('hcmd')
typeHICommand = FOUR_CHAR_CODE('hcmd')
kEventParamWindowFeatures = FOUR_CHAR_CODE('wftr')
kEventParamWindowDefPart = FOUR_CHAR_CODE('wdpc')
kEventParamCurrentBounds = FOUR_CHAR_CODE('crct')
kEventParamOriginalBounds = FOUR_CHAR_CODE('orct')
kEventParamPreviousBounds = FOUR_CHAR_CODE('prct')
kEventParamClickActivation = FOUR_CHAR_CODE('clac')
kEventParamWindowRegionCode = FOUR_CHAR_CODE('wshp')
kEventParamWindowDragHiliteFlag = FOUR_CHAR_CODE('wdhf')
kEventParamWindowModifiedFlag = FOUR_CHAR_CODE('wmff')
kEventParamWindowProxyGWorldPtr = FOUR_CHAR_CODE('wpgw')
kEventParamWindowProxyImageRgn = FOUR_CHAR_CODE('wpir')
kEventParamWindowProxyOutlineRgn = FOUR_CHAR_CODE('wpor')
kEventParamWindowStateChangedFlags = FOUR_CHAR_CODE('wscf')
kEventParamWindowTitleFullWidth = FOUR_CHAR_CODE('wtfw')
kEventParamWindowTitleTextWidth = FOUR_CHAR_CODE('wttw')
kEventParamWindowGrowRect = FOUR_CHAR_CODE('grct')
kEventParamAttributes = FOUR_CHAR_CODE('attr')
kEventParamDockChangedReason = FOUR_CHAR_CODE('dcrs')
kEventParamPreviousDockRect = FOUR_CHAR_CODE('pdrc')
kEventParamCurrentDockRect = FOUR_CHAR_CODE('cdrc')
typeWindowRegionCode = FOUR_CHAR_CODE('wshp')
typeWindowDefPartCode = FOUR_CHAR_CODE('wdpt')
typeClickActivationResult = FOUR_CHAR_CODE('clac')
kEventParamControlPart = FOUR_CHAR_CODE('cprt')
kEventParamInitCollection = FOUR_CHAR_CODE('icol')
kEventParamControlMessage = FOUR_CHAR_CODE('cmsg')
kEventParamControlParam = FOUR_CHAR_CODE('cprm')
kEventParamControlResult = FOUR_CHAR_CODE('crsl')
kEventParamControlRegion = FOUR_CHAR_CODE('crgn')
kEventParamControlAction = FOUR_CHAR_CODE('caup')
kEventParamControlIndicatorDragConstraint = FOUR_CHAR_CODE('cidc')
kEventParamControlIndicatorRegion = FOUR_CHAR_CODE('cirn')
kEventParamControlIsGhosting = FOUR_CHAR_CODE('cgst')
kEventParamControlIndicatorOffset = FOUR_CHAR_CODE('ciof')
kEventParamControlClickActivationResult = FOUR_CHAR_CODE('ccar')
kEventParamControlSubControl = FOUR_CHAR_CODE('csub')
kEventParamControlOptimalBounds = FOUR_CHAR_CODE('cobn')
kEventParamControlOptimalBaselineOffset = FOUR_CHAR_CODE('cobo')
kEventParamControlDataTag = FOUR_CHAR_CODE('cdtg')
kEventParamControlDataBuffer = FOUR_CHAR_CODE('cdbf')
kEventParamControlDataBufferSize = FOUR_CHAR_CODE('cdbs')
kEventParamControlDrawDepth = FOUR_CHAR_CODE('cddp')
kEventParamControlDrawInColor = FOUR_CHAR_CODE('cdic')
kEventParamControlFeatures = FOUR_CHAR_CODE('cftr')
kEventParamControlPartBounds = FOUR_CHAR_CODE('cpbd')
kEventParamControlOriginalOwningWindow = FOUR_CHAR_CODE('coow')
kEventParamControlCurrentOwningWindow = FOUR_CHAR_CODE('ccow')
typeControlActionUPP = FOUR_CHAR_CODE('caup')
typeIndicatorDragConstraint = FOUR_CHAR_CODE('cidc')
typeControlPartCode = FOUR_CHAR_CODE('cprt')
kEventParamCurrentMenuTrackingMode = FOUR_CHAR_CODE('cmtm')
kEventParamNewMenuTrackingMode = FOUR_CHAR_CODE('nmtm')
kEventParamMenuFirstOpen = FOUR_CHAR_CODE('1sto')
kEventParamMenuItemIndex = FOUR_CHAR_CODE('item')
kEventParamMenuCommand = FOUR_CHAR_CODE('mcmd')
kEventParamEnableMenuForKeyEvent = FOUR_CHAR_CODE('fork')
kEventParamMenuEventOptions = FOUR_CHAR_CODE('meop')
kEventParamMenuContext = FOUR_CHAR_CODE('mctx')
kEventParamMenuItemBounds = FOUR_CHAR_CODE('mitb')
kEventParamMenuMarkBounds = FOUR_CHAR_CODE('mmkb')
kEventParamMenuIconBounds = FOUR_CHAR_CODE('micb')
kEventParamMenuTextBounds = FOUR_CHAR_CODE('mtxb')
kEventParamMenuTextBaseline = FOUR_CHAR_CODE('mtbl')
kEventParamMenuCommandKeyBounds = FOUR_CHAR_CODE('mcmb')
kEventParamMenuVirtualTop = FOUR_CHAR_CODE('mvrt')
kEventParamMenuVirtualBottom = FOUR_CHAR_CODE('mvrb')
kEventParamMenuDrawState = FOUR_CHAR_CODE('mdrs')
kEventParamMenuItemType = FOUR_CHAR_CODE('mitp')
kEventParamMenuItemWidth = FOUR_CHAR_CODE('mitw')
kEventParamMenuItemHeight = FOUR_CHAR_CODE('mith')
typeMenuItemIndex = FOUR_CHAR_CODE('midx')
typeMenuCommand = FOUR_CHAR_CODE('mcmd')
typeMenuTrackingMode = FOUR_CHAR_CODE('mtmd')
typeMenuEventOptions = FOUR_CHAR_CODE('meop')
typeThemeMenuState = FOUR_CHAR_CODE('tmns')
typeThemeMenuItemType = FOUR_CHAR_CODE('tmit')
kEventParamProcessID = FOUR_CHAR_CODE('psn ')
kEventParamLaunchRefCon = FOUR_CHAR_CODE('lref')
kEventParamLaunchErr = FOUR_CHAR_CODE('err ')
kEventParamTabletPointRec = FOUR_CHAR_CODE('tbrc')
kEventParamTabletProximityRec = FOUR_CHAR_CODE('tbpx')
typeTabletPointRec = FOUR_CHAR_CODE('tbrc')
typeTabletProximityRec = FOUR_CHAR_CODE('tbpx')
kEventParamTabletPointerRec = FOUR_CHAR_CODE('tbrc')
typeTabletPointerRec = FOUR_CHAR_CODE('tbrc')
kEventParamNewScrollBarVariant = FOUR_CHAR_CODE('nsbv')
kEventParamScrapRef = FOUR_CHAR_CODE('scrp')
kEventParamServiceCopyTypes = FOUR_CHAR_CODE('svsd')
kEventParamServicePasteTypes = FOUR_CHAR_CODE('svpt')
kEventParamServiceMessageName = FOUR_CHAR_CODE('svmg')
kEventParamServiceUserData = FOUR_CHAR_CODE('svud')
typeScrapRef = FOUR_CHAR_CODE('scrp')
typeCFMutableArrayRef = FOUR_CHAR_CODE('cfma')
# sHandler = NewEventHandlerUPP( x )
kMouseTrackingMousePressed = kMouseTrackingMouseDown
kMouseTrackingMouseReleased = kMouseTrackingMouseUp
| gpl-2.0 | -8,961,275,274,261,275,000 | 38.698448 | 71 | 0.815628 | false |
angelapper/edx-platform | lms/djangoapps/survey/tests/test_utils.py | 8 | 4057 | """
Python tests for the Survey models
"""
from collections import OrderedDict
from django.contrib.auth.models import User
from django.test.client import Client
from survey.models import SurveyForm
from survey.utils import is_survey_required_for_course, is_survey_required_and_unanswered
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class SurveyModelsTests(ModuleStoreTestCase):
"""
All tests for the utils.py file
"""
def setUp(self):
"""
Set up the test data used in the specific tests
"""
super(SurveyModelsTests, self).setUp()
self.client = Client()
# Create two accounts
self.password = 'abc'
self.student = User.objects.create_user('student', '[email protected]', self.password)
self.student2 = User.objects.create_user('student2', '[email protected]', self.password)
self.staff = User.objects.create_user('staff', '[email protected]', self.password)
self.staff.is_staff = True
self.staff.save()
self.test_survey_name = 'TestSurvey'
self.test_form = '<input name="foo"></input>'
self.student_answers = OrderedDict({
'field1': 'value1',
'field2': 'value2',
})
self.student2_answers = OrderedDict({
'field1': 'value3'
})
self.course = CourseFactory.create(
course_survey_required=True,
course_survey_name=self.test_survey_name
)
self.survey = SurveyForm.create(self.test_survey_name, self.test_form)
def test_is_survey_required_for_course(self):
"""
Assert the a requried course survey is when both the flags is set and a survey name
is set on the course descriptor
"""
self.assertTrue(is_survey_required_for_course(self.course))
def test_is_survey_not_required_for_course(self):
"""
Assert that if various data is not available or if the survey is not found
then the survey is not considered required
"""
course = CourseFactory.create()
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False,
course_survey_name=self.test_survey_name
)
self.assertFalse(is_survey_required_for_course(course))
def test_user_not_yet_answered_required_survey(self):
"""
Assert that a new course which has a required survey but user has not answered it yet
"""
self.assertTrue(is_survey_required_and_unanswered(self.student, self.course))
temp_course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(is_survey_required_and_unanswered(self.student, temp_course))
temp_course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(is_survey_required_and_unanswered(self.student, temp_course))
def test_user_has_answered_required_survey(self):
"""
Assert that a new course which has a required survey and user has answers for it
"""
self.survey.save_user_answers(self.student, self.student_answers, None)
self.assertFalse(is_survey_required_and_unanswered(self.student, self.course))
def test_staff_must_answer_survey(self):
"""
Assert that someone with staff level permissions does not have to answer the survey
"""
self.assertFalse(is_survey_required_and_unanswered(self.staff, self.course))
| agpl-3.0 | 5,763,065,887,999,355,000 | 33.974138 | 96 | 0.649495 | false |
glovebx/zulip | zilencer/management/commands/populate_db.py | 113 | 34944 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from django.contrib.sites.models import Site
from zerver.models import Message, UserProfile, Stream, Recipient, Client, \
Subscription, Huddle, get_huddle, Realm, UserMessage, \
get_huddle_hash, clear_database, get_client, get_user_profile_by_id, \
split_email_to_domain, email_to_username
from zerver.lib.actions import do_send_message, set_default_streams, \
do_activate_user, do_deactivate_user, do_change_password, do_change_is_admin
from zerver.lib.parallel import run_parallel
from django.db.models import Count
from django.conf import settings
from zerver.lib.bulk_create import bulk_create_realms, \
bulk_create_streams, bulk_create_users, bulk_create_huddles, \
bulk_create_clients
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import MAX_MESSAGE_LENGTH
from zerver.models import DefaultStream, get_stream
from zilencer.models import Deployment
import ujson
import datetime
import random
import glob
import os
from optparse import make_option
settings.TORNADO_SERVER = None
def create_users(realms, name_list, bot=False):
user_set = set()
for full_name, email in name_list:
short_name = email_to_username(email)
user_set.add((email, full_name, short_name, True))
bulk_create_users(realms, user_set, bot)
def create_streams(realms, realm, stream_list):
stream_set = set()
for stream_name in stream_list:
stream_set.add((realm.domain, stream_name))
bulk_create_streams(realms, stream_set)
class Command(BaseCommand):
help = "Populate a test database"
option_list = BaseCommand.option_list + (
make_option('-n', '--num-messages',
dest='num_messages',
type='int',
default=600,
help='The number of messages to create.'),
make_option('--extra-users',
dest='extra_users',
type='int',
default=0,
help='The number of extra users to create'),
make_option('--huddles',
dest='num_huddles',
type='int',
default=3,
help='The number of huddles to create.'),
make_option('--personals',
dest='num_personals',
type='int',
default=6,
help='The number of personal pairs to create.'),
make_option('--threads',
dest='threads',
type='int',
default=10,
help='The number of threads to use.'),
make_option('--percent-huddles',
dest='percent_huddles',
type='float',
default=15,
help='The percent of messages to be huddles.'),
make_option('--percent-personals',
dest='percent_personals',
type='float',
default=15,
help='The percent of messages to be personals.'),
make_option('--stickyness',
dest='stickyness',
type='float',
default=20,
help='The percent of messages to repeat recent folks.'),
make_option('--nodelete',
action="store_false",
default=True,
dest='delete',
help='Whether to delete all the existing messages.'),
make_option('--test-suite',
default=False,
action="store_true",
help='Whether to delete all the existing messages.'),
make_option('--replay-old-messages',
action="store_true",
default=False,
dest='replay_old_messages',
help='Whether to replace the log of old messages.'),
)
def handle(self, **options):
if options["percent_huddles"] + options["percent_personals"] > 100:
self.stderr.write("Error! More than 100% of messages allocated.\n")
return
if options["delete"]:
# Start by clearing all the data in our database
clear_database()
# Create our two default realms
zulip_realm = Realm.objects.create(domain="zulip.com", name="Zulip Dev")
if options["test_suite"]:
Realm.objects.create(domain="mit.edu")
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
# Create test Users (UserProfiles are automatically created,
# as are subscriptions to the ability to receive personals).
names = [("Othello, the Moor of Venice", "[email protected]"), ("Iago", "[email protected]"),
("Prospero from The Tempest", "[email protected]"),
("Cordelia Lear", "[email protected]"), ("King Hamlet", "[email protected]")]
for i in xrange(options["extra_users"]):
names.append(('Extra User %d' % (i,), 'extrauser%[email protected]' % (i,)))
create_users(realms, names)
iago = UserProfile.objects.get(email="[email protected]")
do_change_is_admin(iago, True)
# Create public streams.
stream_list = ["Verona", "Denmark", "Scotland", "Venice", "Rome"]
create_streams(realms, zulip_realm, stream_list)
recipient_streams = [Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list]
# Create subscriptions to streams
subscriptions_to_add = []
profiles = UserProfile.objects.select_related().all()
for i, profile in enumerate(profiles):
# Subscribe to some streams.
for type_id in recipient_streams[:int(len(recipient_streams) *
float(i)/len(profiles)) + 1]:
r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id)
s = Subscription(recipient=r, user_profile=profile)
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
else:
zulip_realm = Realm.objects.get(domain="zulip.com")
recipient_streams = [klass.type_id for klass in
Recipient.objects.filter(type=Recipient.STREAM)]
# Extract a list of all users
user_profiles = [user_profile.id for user_profile in UserProfile.objects.all()]
# Create several initial huddles
for i in xrange(options["num_huddles"]):
get_huddle(random.sample(user_profiles, random.randint(3, 4)))
# Create several initial pairs for personals
personals_pairs = [random.sample(user_profiles, 2)
for i in xrange(options["num_personals"])]
threads = options["threads"]
jobs = []
for i in xrange(threads):
count = options["num_messages"] / threads
if i < options["num_messages"] % threads:
count += 1
jobs.append((count, personals_pairs, options, self.stdout.write))
for job in jobs:
send_messages(job)
if options["delete"]:
# Create the "website" and "API" clients; if we don't, the
# default values in zerver/decorators.py will not work
# with the Django test suite.
get_client("website")
get_client("API")
if options["test_suite"]:
# Create test users; the MIT ones are needed to test
# the Zephyr mirroring codepaths.
testsuite_mit_users = [
("Fred Sipb (MIT)", "[email protected]"),
("Athena Consulting Exchange User (MIT)", "[email protected]"),
("Esp Classroom (MIT)", "[email protected]"),
]
create_users(realms, testsuite_mit_users)
# These bots are directly referenced from code and thus
# are needed for the test suite.
all_realm_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.INTERNAL_BOTS]
zulip_realm_bots = [
("Zulip New User Bot", "[email protected]"),
("Zulip Error Bot", "[email protected]"),
]
zulip_realm_bots.extend(all_realm_bots)
create_users(realms, zulip_realm_bots, bot=True)
if not options["test_suite"]:
# To keep the messages.json fixtures file for the test
# suite fast, don't add these users and subscriptions
# when running populate_db for the test suite
zulip_stream_list = ["devel", "all", "zulip", "design", "support", "social", "test",
"errors", "sales"]
create_streams(realms, zulip_realm, zulip_stream_list)
# Add a few default streams
for stream_name in ["design", "devel", "social", "support"]:
DefaultStream.objects.create(realm=zulip_realm, stream=get_stream(stream_name, zulip_realm))
# Now subscribe everyone to these streams
subscriptions_to_add = []
profiles = UserProfile.objects.select_related().filter(realm=zulip_realm)
for cls in zulip_stream_list:
stream = Stream.objects.get(name=cls, realm=zulip_realm)
recipient = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
for profile in profiles:
# Subscribe to some streams.
s = Subscription(recipient=recipient, user_profile=profile)
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
# These bots are not needed by the test suite
internal_zulip_users_nosubs = [
("Zulip Commit Bot", "[email protected]"),
("Zulip Trac Bot", "[email protected]"),
("Zulip Nagios Bot", "[email protected]"),
("Zulip Feedback Bot", "[email protected]"),
]
create_users(realms, internal_zulip_users_nosubs, bot=True)
# Mark all messages as read
UserMessage.objects.all().update(flags=UserMessage.flags.read)
self.stdout.write("Successfully populated test database.\n")
if options["replay_old_messages"]:
restore_saved_messages()
recipient_hash = {}
def get_recipient_by_id(rid):
if rid in recipient_hash:
return recipient_hash[rid]
return Recipient.objects.get(id=rid)
def restore_saved_messages():
old_messages = []
duplicate_suppression_hash = {}
stream_dict = {}
user_set = set()
email_set = set([u.email for u in UserProfile.objects.all()])
realm_set = set()
# Initial client_set is nonempty temporarily because we don't have
# clients in logs at all right now -- later we can start with nothing.
client_set = set(["populate_db", "website", "zephyr_mirror"])
huddle_user_set = set()
# First, determine all the objects our messages will need.
print datetime.datetime.now(), "Creating realms/streams/etc..."
def process_line(line):
old_message_json = line.strip()
# Due to populate_db's shakespeare mode, we have a lot of
# duplicate messages in our log that only differ in their
# logged ID numbers (same timestamp, content, etc.). With
# sqlite, bulk creating those messages won't work properly: in
# particular, the first 100 messages will actually only result
# in 20 rows ending up in the target table, which screws up
# the below accounting where for handling changing
# subscriptions, we assume that the Nth row populate_db
# created goes with the Nth non-subscription row of the input
# So suppress the duplicates when using sqlite.
if "sqlite" in settings.DATABASES["default"]["ENGINE"]:
tmp_message = ujson.loads(old_message_json)
tmp_message['id'] = '1'
duplicate_suppression_key = ujson.dumps(tmp_message)
if duplicate_suppression_key in duplicate_suppression_hash:
return
duplicate_suppression_hash[duplicate_suppression_key] = True
old_message = ujson.loads(old_message_json)
message_type = old_message["type"]
# Lower case emails and domains; it will screw up
# deduplication if we don't
def fix_email(email):
return email.strip().lower()
if message_type in ["stream", "huddle", "personal"]:
old_message["sender_email"] = fix_email(old_message["sender_email"])
# Fix the length on too-long messages before we start processing them
if len(old_message["content"]) > MAX_MESSAGE_LENGTH:
old_message["content"] = "[ This message was deleted because it was too long ]"
if message_type in ["subscription_added", "subscription_removed"]:
old_message["domain"] = old_message["domain"].lower()
old_message["user"] = fix_email(old_message["user"])
elif message_type == "subscription_property":
old_message["user"] = fix_email(old_message["user"])
elif message_type == "user_email_changed":
old_message["old_email"] = fix_email(old_message["old_email"])
old_message["new_email"] = fix_email(old_message["new_email"])
elif message_type.startswith("user_"):
old_message["user"] = fix_email(old_message["user"])
elif message_type.startswith("enable_"):
old_message["user"] = fix_email(old_message["user"])
if message_type == 'personal':
old_message["recipient"][0]["email"] = fix_email(old_message["recipient"][0]["email"])
elif message_type == "huddle":
for i in xrange(len(old_message["recipient"])):
old_message["recipient"][i]["email"] = fix_email(old_message["recipient"][i]["email"])
old_messages.append(old_message)
if message_type in ["subscription_added", "subscription_removed"]:
stream_name = old_message["name"].strip()
canon_stream_name = stream_name.lower()
if canon_stream_name not in stream_dict:
stream_dict[(old_message["domain"], canon_stream_name)] = \
(old_message["domain"], stream_name)
elif message_type == "user_created":
user_set.add((old_message["user"], old_message["full_name"], old_message["short_name"], False))
elif message_type == "realm_created":
realm_set.add(old_message["domain"])
if message_type not in ["stream", "huddle", "personal"]:
return
sender_email = old_message["sender_email"]
domain = split_email_to_domain(sender_email)
realm_set.add(domain)
if old_message["sender_email"] not in email_set:
user_set.add((old_message["sender_email"],
old_message["sender_full_name"],
old_message["sender_short_name"],
False))
if 'sending_client' in old_message:
client_set.add(old_message['sending_client'])
if message_type == 'stream':
stream_name = old_message["recipient"].strip()
canon_stream_name = stream_name.lower()
if canon_stream_name not in stream_dict:
stream_dict[(domain, canon_stream_name)] = (domain, stream_name)
elif message_type == 'personal':
u = old_message["recipient"][0]
if u["email"] not in email_set:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
email_set.add(u["email"])
elif message_type == 'huddle':
for u in old_message["recipient"]:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
if u["email"] not in email_set:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
email_set.add(u["email"])
huddle_user_set.add(tuple(sorted(set(u["email"] for u in old_message["recipient"]))))
else:
raise ValueError('Bad message type')
event_glob = os.path.join(settings.EVENT_LOG_DIR, 'events.*')
for filename in sorted(glob.glob(event_glob)):
with file(filename, "r") as message_log:
for line in message_log.readlines():
process_line(line)
stream_recipients = {}
user_recipients = {}
huddle_recipients = {}
# Then, create the objects our messages need.
print datetime.datetime.now(), "Creating realms..."
bulk_create_realms(realm_set)
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
print datetime.datetime.now(), "Creating clients..."
bulk_create_clients(client_set)
clients = {}
for client in Client.objects.all():
clients[client.name] = client
print datetime.datetime.now(), "Creating streams..."
bulk_create_streams(realms, stream_dict.values())
streams = {}
for stream in Stream.objects.all():
streams[stream.id] = stream
for recipient in Recipient.objects.filter(type=Recipient.STREAM):
stream_recipients[(streams[recipient.type_id].realm_id,
streams[recipient.type_id].name.lower())] = recipient
print datetime.datetime.now(), "Creating users..."
bulk_create_users(realms, user_set)
users = {}
users_by_id = {}
for user_profile in UserProfile.objects.select_related().all():
users[user_profile.email] = user_profile
users_by_id[user_profile.id] = user_profile
for recipient in Recipient.objects.filter(type=Recipient.PERSONAL):
user_recipients[users_by_id[recipient.type_id].email] = recipient
print datetime.datetime.now(), "Creating huddles..."
bulk_create_huddles(users, huddle_user_set)
huddles_by_id = {}
for huddle in Huddle.objects.all():
huddles_by_id[huddle.id] = huddle
for recipient in Recipient.objects.filter(type=Recipient.HUDDLE):
huddle_recipients[huddles_by_id[recipient.type_id].huddle_hash] = recipient
# TODO: Add a special entry type in the log that is a subscription
# change and import those as we go to make subscription changes
# take effect!
print datetime.datetime.now(), "Importing subscriptions..."
subscribers = {}
for s in Subscription.objects.select_related().all():
if s.active:
subscribers.setdefault(s.recipient.id, set()).add(s.user_profile.id)
# Then create all the messages, without talking to the DB!
print datetime.datetime.now(), "Importing messages, part 1..."
first_message_id = None
if Message.objects.exists():
first_message_id = Message.objects.all().order_by("-id")[0].id + 1
messages_to_create = []
for idx, old_message in enumerate(old_messages):
message_type = old_message["type"]
if message_type not in ["stream", "huddle", "personal"]:
continue
message = Message()
sender_email = old_message["sender_email"]
domain = split_email_to_domain(sender_email)
realm = realms[domain]
message.sender = users[sender_email]
type_hash = {"stream": Recipient.STREAM,
"huddle": Recipient.HUDDLE,
"personal": Recipient.PERSONAL}
if 'sending_client' in old_message:
message.sending_client = clients[old_message['sending_client']]
elif sender_email in ["[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]"]:
message.sending_client = clients['populate_db']
elif realm.domain == "zulip.com":
message.sending_client = clients["website"]
elif realm.domain == "mit.edu":
message.sending_client = clients['zephyr_mirror']
else:
message.sending_client = clients['populate_db']
message.type = type_hash[message_type]
message.content = old_message["content"]
message.subject = old_message["subject"]
message.pub_date = timestamp_to_datetime(old_message["timestamp"])
if message.type == Recipient.PERSONAL:
message.recipient = user_recipients[old_message["recipient"][0]["email"]]
elif message.type == Recipient.STREAM:
message.recipient = stream_recipients[(realm.id,
old_message["recipient"].lower())]
elif message.type == Recipient.HUDDLE:
huddle_hash = get_huddle_hash([users[u["email"]].id
for u in old_message["recipient"]])
message.recipient = huddle_recipients[huddle_hash]
else:
raise ValueError('Bad message type')
messages_to_create.append(message)
print datetime.datetime.now(), "Importing messages, part 2..."
Message.objects.bulk_create(messages_to_create)
messages_to_create = []
# Finally, create all the UserMessage objects
print datetime.datetime.now(), "Importing usermessages, part 1..."
personal_recipients = {}
for r in Recipient.objects.filter(type = Recipient.PERSONAL):
personal_recipients[r.id] = True
all_messages = Message.objects.all()
user_messages_to_create = []
messages_by_id = {}
for message in all_messages:
messages_by_id[message.id] = message
if len(messages_by_id) == 0:
print datetime.datetime.now(), "No old messages to replay"
return
if first_message_id is None:
first_message_id = min(messages_by_id.keys())
tot_user_messages = 0
pending_subs = {}
current_message_id = first_message_id
pending_colors = {}
for old_message in old_messages:
message_type = old_message["type"]
if message_type == 'subscription_added':
stream_key = (realms[old_message["domain"]].id, old_message["name"].strip().lower())
subscribers.setdefault(stream_recipients[stream_key].id,
set()).add(users[old_message["user"]].id)
pending_subs[(stream_recipients[stream_key].id,
users[old_message["user"]].id)] = True
continue
elif message_type == "subscription_removed":
stream_key = (realms[old_message["domain"]].id, old_message["name"].strip().lower())
user_id = users[old_message["user"]].id
subscribers.setdefault(stream_recipients[stream_key].id, set())
try:
subscribers[stream_recipients[stream_key].id].remove(user_id)
except KeyError:
print "Error unsubscribing %s from %s: not subscribed" % (
old_message["user"], old_message["name"])
pending_subs[(stream_recipients[stream_key].id,
users[old_message["user"]].id)] = False
continue
elif message_type == "user_activated" or message_type == "user_created":
# These are rare, so just handle them the slow way
user_profile = users[old_message["user"]]
join_date = timestamp_to_datetime(old_message['timestamp'])
do_activate_user(user_profile, log=False, join_date=join_date)
# Update the cache of users to show this user as activated
users_by_id[user_profile.id] = user_profile
users[old_message["user"]] = user_profile
continue
elif message_type == "user_deactivated":
user_profile = users[old_message["user"]]
do_deactivate_user(user_profile, log=False)
continue
elif message_type == "user_change_password":
# Just handle these the slow way
user_profile = users[old_message["user"]]
do_change_password(user_profile, old_message["pwhash"], log=False,
hashed_password=True)
continue
elif message_type == "user_change_full_name":
# Just handle these the slow way
user_profile = users[old_message["user"]]
user_profile.full_name = old_message["full_name"]
user_profile.save(update_fields=["full_name"])
continue
elif message_type == "enable_desktop_notifications_changed":
# Just handle these the slow way
user_profile = users[old_message["user"]]
user_profile.enable_desktop_notifications = (old_message["enable_desktop_notifications"] != "false")
user_profile.save(update_fields=["enable_desktop_notifications"])
continue
elif message_type == "enable_sounds_changed":
user_profile = users[old_message["user"]]
user_profile.enable_sounds = (old_message["enable_sounds"] != "false")
user_profile.save(update_fields=["enable_sounds"])
elif message_type == "enable_offline_email_notifications_changed":
user_profile = users[old_message["user"]]
user_profile.enable_offline_email_notifications = (old_message["enable_offline_email_notifications"] != "false")
user_profile.save(update_fields=["enable_offline_email_notifications"])
continue
elif message_type == "enable_offline_push_notifications_changed":
user_profile = users[old_message["user"]]
user_profile.enable_offline_push_notifications = (old_message["enable_offline_push_notifications"] != "false")
user_profile.save(update_fields=["enable_offline_push_notifications"])
continue
elif message_type == "default_streams":
set_default_streams(Realm.objects.get(domain=old_message["domain"]),
old_message["streams"])
continue
elif message_type == "subscription_property":
property_name = old_message.get("property")
if property_name == "stream_color" or property_name == "color":
color = old_message.get("color", old_message.get("value"))
pending_colors[(old_message["user"],
old_message["stream_name"].lower())] = color
elif property_name in ["in_home_view", "notifications"]:
# TODO: Handle this
continue
else:
raise RuntimeError("Unknown property %s" % (property_name,))
continue
elif message_type == "realm_created":
# No action required
continue
elif message_type in ["user_email_changed", "update_onboarding", "update_message"]:
# TODO: Handle these
continue
if message_type not in ["stream", "huddle", "personal"]:
raise RuntimeError("Unexpected message type %s" % (message_type,))
message = messages_by_id[current_message_id]
current_message_id += 1
if message.recipient_id not in subscribers:
# Nobody received this message -- probably due to our
# subscriptions being out-of-date.
continue
recipient_user_ids = set()
for user_profile_id in subscribers[message.recipient_id]:
recipient_user_ids.add(user_profile_id)
if message.recipient_id in personal_recipients:
# Include the sender in huddle recipients
recipient_user_ids.add(message.sender_id)
for user_profile_id in recipient_user_ids:
if users_by_id[user_profile_id].is_active:
um = UserMessage(user_profile_id=user_profile_id,
message=message)
user_messages_to_create.append(um)
if len(user_messages_to_create) > 100000:
tot_user_messages += len(user_messages_to_create)
UserMessage.objects.bulk_create(user_messages_to_create)
user_messages_to_create = []
print datetime.datetime.now(), "Importing usermessages, part 2..."
tot_user_messages += len(user_messages_to_create)
UserMessage.objects.bulk_create(user_messages_to_create)
print datetime.datetime.now(), "Finalizing subscriptions..."
current_subs = {}
current_subs_obj = {}
for s in Subscription.objects.select_related().all():
current_subs[(s.recipient_id, s.user_profile_id)] = s.active
current_subs_obj[(s.recipient_id, s.user_profile_id)] = s
subscriptions_to_add = []
subscriptions_to_change = []
for pending_sub in pending_subs.keys():
(recipient_id, user_profile_id) = pending_sub
current_state = current_subs.get(pending_sub)
if pending_subs[pending_sub] == current_state:
# Already correct in the database
continue
elif current_state is not None:
subscriptions_to_change.append((pending_sub, pending_subs[pending_sub]))
continue
s = Subscription(recipient_id=recipient_id,
user_profile_id=user_profile_id,
active=pending_subs[pending_sub])
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
for (sub, active) in subscriptions_to_change:
current_subs_obj[sub].active = active
current_subs_obj[sub].save(update_fields=["active"])
subs = {}
for sub in Subscription.objects.all():
subs[(sub.user_profile_id, sub.recipient_id)] = sub
# TODO: do restore of subscription colors -- we're currently not
# logging changes so there's little point in having the code :(
print datetime.datetime.now(), "Finished importing %s messages (%s usermessages)" % \
(len(all_messages), tot_user_messages)
site = Site.objects.get_current()
site.domain = 'zulip.com'
site.save()
print datetime.datetime.now(), "Filling in user pointers..."
# Set restored pointers to the very latest messages
for user_profile in UserProfile.objects.all():
try:
top = UserMessage.objects.filter(
user_profile_id=user_profile.id).order_by("-message")[0]
user_profile.pointer = top.message_id
except IndexError:
user_profile.pointer = -1
user_profile.save(update_fields=["pointer"])
print datetime.datetime.now(), "Done replaying old messages"
# Create some test messages, including:
# - multiple streams
# - multiple subjects per stream
# - multiple huddles
# - multiple personals converastions
# - multiple messages per subject
# - both single and multi-line content
def send_messages(data):
(tot_messages, personals_pairs, options, output) = data
random.seed(os.getpid())
texts = file("zilencer/management/commands/test_messages.txt", "r").readlines()
offset = random.randint(0, len(texts))
recipient_streams = [klass.id for klass in
Recipient.objects.filter(type=Recipient.STREAM)]
recipient_huddles = [h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE)]
huddle_members = {}
for h in recipient_huddles:
huddle_members[h] = [s.user_profile.id for s in
Subscription.objects.filter(recipient_id=h)]
num_messages = 0
random_max = 1000000
recipients = {}
while num_messages < tot_messages:
saved_data = ''
message = Message()
message.sending_client = get_client('populate_db')
length = random.randint(1, 5)
lines = (t.strip() for t in texts[offset: offset + length])
message.content = '\n'.join(lines)
offset += length
offset = offset % len(texts)
randkey = random.randint(1, random_max)
if (num_messages > 0 and
random.randint(1, random_max) * 100. / random_max < options["stickyness"]):
# Use an old recipient
message_type, recipient_id, saved_data = recipients[num_messages - 1]
if message_type == Recipient.PERSONAL:
personals_pair = saved_data
random.shuffle(personals_pair)
elif message_type == Recipient.STREAM:
message.subject = saved_data
message.recipient = get_recipient_by_id(recipient_id)
elif message_type == Recipient.HUDDLE:
message.recipient = get_recipient_by_id(recipient_id)
elif (randkey <= random_max * options["percent_huddles"] / 100.):
message_type = Recipient.HUDDLE
message.recipient = get_recipient_by_id(random.choice(recipient_huddles))
elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.):
message_type = Recipient.PERSONAL
personals_pair = random.choice(personals_pairs)
random.shuffle(personals_pair)
elif (randkey <= random_max * 1.0):
message_type = Recipient.STREAM
message.recipient = get_recipient_by_id(random.choice(recipient_streams))
if message_type == Recipient.HUDDLE:
sender_id = random.choice(huddle_members[message.recipient.id])
message.sender = get_user_profile_by_id(sender_id)
elif message_type == Recipient.PERSONAL:
message.recipient = Recipient.objects.get(type=Recipient.PERSONAL,
type_id=personals_pair[0])
message.sender = get_user_profile_by_id(personals_pair[1])
saved_data = personals_pair
elif message_type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
# Pick a random subscriber to the stream
message.sender = random.choice(Subscription.objects.filter(
recipient=message.recipient)).user_profile
message.subject = stream.name + str(random.randint(1, 3))
saved_data = message.subject
message.pub_date = now()
do_send_message(message)
recipients[num_messages] = [message_type, message.recipient.id, saved_data]
num_messages += 1
return tot_messages
| apache-2.0 | 6,141,489,528,990,930,000 | 44.381818 | 124 | 0.589572 | false |
pratikmallya/hue | desktop/core/ext-py/Django-1.6.10/tests/test_client_regress/views.py | 53 | 4346 | import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.serializers.json import DjangoJSONEncoder
from django.test.client import CONTENT_TYPE_RE
from django.template import RequestContext
class CustomTestException(Exception):
pass
def no_template_view(request):
"A simple view that expects a GET request, and returns a rendered template"
return HttpResponse("No template used. Sample content: twice once twice. Content ends.")
def staff_only_view(request):
"A view that can only be visited by staff. Non staff members get an exception"
if request.user.is_staff:
return HttpResponse('')
else:
raise CustomTestException()
def get_view(request):
"A simple login protected view"
return HttpResponse("Hello world")
get_view = login_required(get_view)
def request_data(request, template='base.html', data='sausage'):
"A simple view that returns the request data in the context"
return render_to_response(template, {
'get-foo':request.GET.get('foo',None),
'get-bar':request.GET.get('bar',None),
'post-foo':request.POST.get('foo',None),
'post-bar':request.POST.get('bar',None),
'request-foo':request.REQUEST.get('foo',None),
'request-bar':request.REQUEST.get('bar',None),
'data': data,
})
def view_with_argument(request, name):
"""A view that takes a string argument
The purpose of this view is to check that if a space is provided in
the argument, the test framework unescapes the %20 before passing
the value to the view.
"""
if name == 'Arthur Dent':
return HttpResponse('Hi, Arthur')
else:
return HttpResponse('Howdy, %s' % name)
def login_protected_redirect_view(request):
"A view that redirects all requests to the GET view"
return HttpResponseRedirect('/test_client_regress/get_view/')
login_protected_redirect_view = login_required(login_protected_redirect_view)
def set_session_view(request):
"A view that sets a session variable"
request.session['session_var'] = 'YES'
return HttpResponse('set_session')
def check_session_view(request):
"A view that reads a session variable"
return HttpResponse(request.session.get('session_var', 'NO'))
def request_methods_view(request):
"A view that responds with the request method"
return HttpResponse('request method: %s' % request.method)
def return_unicode(request):
return render_to_response('unicode.html')
def return_undecodable_binary(request):
return HttpResponse(
b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e ReportLab Generated PDF document http://www.reportlab.com'
)
def return_json_file(request):
"A view that parses and returns a JSON string as a file."
match = CONTENT_TYPE_RE.match(request.META['CONTENT_TYPE'])
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
# This just checks that the uploaded data is JSON
obj_dict = json.loads(request.body.decode(charset))
obj_json = json.dumps(obj_dict, cls=DjangoJSONEncoder, ensure_ascii=False)
response = HttpResponse(obj_json.encode(charset), status=200,
content_type='application/json; charset=%s' % charset)
response['Content-Disposition'] = 'attachment; filename=testfile.json'
return response
def check_headers(request):
"A view that responds with value of the X-ARG-CHECK header"
return HttpResponse('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined'))
def body(request):
"A view that is requested with GET and accesses request.body. Refs #14753."
return HttpResponse(request.body)
def read_all(request):
"A view that is requested with accesses request.read()."
return HttpResponse(request.read())
def read_buffer(request):
"A view that is requested with accesses request.read(LARGE_BUFFER)."
return HttpResponse(request.read(99999))
def request_context_view(request):
# Special attribute that won't be present on a plain HttpRequest
request.special_path = request.path
return render_to_response('request_context.html', context_instance=RequestContext(request, {}))
| apache-2.0 | 5,072,012,168,673,717,000 | 36.465517 | 99 | 0.710999 | false |
slozier/ironpython2 | Src/StdLib/Lib/site-packages/win32/test/test_pywintypes.py | 2 | 3632 | import sys
import unittest
import pywintypes
import time
from pywin32_testutil import str2bytes, ob2memory
import datetime
import operator
class TestCase(unittest.TestCase):
def testPyTimeFormat(self):
struct_current = time.localtime()
pytime_current = pywintypes.Time(struct_current)
# try and test all the standard parts of the format
# Note we used to include '%Z' testing, but that was pretty useless as
# it always returned the local timezone.
format_strings = "%a %A %b %B %c %d %H %I %j %m %M %p %S %U %w %W %x %X %y %Y"
for fmt in format_strings.split():
v1 = pytime_current.Format(fmt)
v2 = time.strftime(fmt, struct_current)
self.assertEquals(v1, v2, "format %s failed - %r != %r" % (fmt, v1, v2))
def testPyTimePrint(self):
# This used to crash with an invalid, or too early time.
# We don't really want to check that it does cause a ValueError
# (as hopefully this wont be true forever). So either working, or
# ValueError is OK.
try:
t = pywintypes.Time(-2)
t.Format()
except ValueError:
return
def testTimeInDict(self):
d = {}
d['t1'] = pywintypes.Time(1)
self.failUnlessEqual(d['t1'], pywintypes.Time(1))
def testPyTimeCompare(self):
t1 = pywintypes.Time(100)
t1_2 = pywintypes.Time(100)
t2 = pywintypes.Time(101)
self.failUnlessEqual(t1, t1_2)
self.failUnless(t1 <= t1_2)
self.failUnless(t1_2 >= t1)
self.failIfEqual(t1, t2)
self.failUnless(t1 < t2)
self.failUnless(t2 > t1 )
def testTimeTuple(self):
now = datetime.datetime.now() # has usec...
# timetuple() lost usec - pt must be <=...
pt = pywintypes.Time(now.timetuple())
# *sob* - only if we have a datetime object can we compare like this.
if isinstance(pt, datetime.datetime):
self.failUnless(pt <= now)
def testTimeTuplems(self):
now = datetime.datetime.now() # has usec...
tt = now.timetuple() + (now.microsecond // 1000,)
pt = pywintypes.Time(tt)
# we can't compare if using the old type, as it loses all sub-second res.
if isinstance(pt, datetime.datetime):
self.failUnlessEqual(now, pt)
def testPyTimeFromTime(self):
t1 = pywintypes.Time(time.time())
self.failUnless(pywintypes.Time(t1) is t1)
def testGUID(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
iid2 = pywintypes.IID(ob2memory(iid), True)
self.assertEquals(iid, iid2)
self.assertRaises(ValueError, pywintypes.IID, str2bytes('00'), True) # too short
self.assertRaises(TypeError, pywintypes.IID, 0, True) # no buffer
def testGUIDRichCmp(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
self.failIf(s==None)
self.failIf(None==s)
self.failUnless(s!=None)
self.failUnless(None!=s)
if sys.version_info > (3,0):
self.assertRaises(TypeError, operator.gt, None, s)
self.assertRaises(TypeError, operator.gt, s, None)
self.assertRaises(TypeError, operator.lt, None, s)
self.assertRaises(TypeError, operator.lt, s, None)
def testGUIDInDict(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
d = dict(item=iid)
self.failUnlessEqual(d['item'], iid)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -4,986,215,822,620,276,000 | 35.32 | 88 | 0.604075 | false |
JioCloud/nova | nova/api/openstack/compute/schemas/v3/admin_password.py | 111 | 1078 | # Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
change_password = {
'type': 'object',
'properties': {
'changePassword': {
'type': 'object',
'properties': {
'adminPass': parameter_types.admin_password,
},
'required': ['adminPass'],
'additionalProperties': False,
},
},
'required': ['changePassword'],
'additionalProperties': False,
}
| apache-2.0 | 1,453,528,010,883,358,000 | 32.6875 | 78 | 0.643785 | false |
ktnyt/chainer | chainer/distributions/gamma.py | 2 | 2638 | import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.array import broadcast
from chainer.functions.array import where
from chainer.functions.math import digamma
from chainer.functions.math import exponential
from chainer.functions.math import lgamma
class Gamma(distribution.Distribution):
"""Gamma Distribution.
Args:
k(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
theta(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
"""
def __init__(self, k, theta):
super(Gamma, self).__init__()
self.__k = chainer.as_variable(k)
self.__theta = chainer.as_variable(theta)
@property
def k(self):
return self.__k
@property
def theta(self):
return self.__theta
@property
def batch_shape(self):
return self.k.shape
@property
def entropy(self):
return self.k + exponential.log(self.theta) + lgamma.lgamma(self.k) \
+ (1 - self.k) * digamma.digamma(self.k)
@property
def event_shape(self):
return ()
@property
def _is_gpu(self):
return isinstance(self.k.data, cuda.ndarray)
def log_prob(self, x):
logp = - lgamma.lgamma(self.k) - self.k * exponential.log(self.theta) \
+ (self.k - 1) * exponential.log(x) - x / self.theta
xp = logp.xp
inf = xp.full_like(logp.array, xp.inf)
if isinstance(x, chainer.Variable):
x = x.array
return where.where(xp.asarray(x >= 0), logp, xp.asarray(-inf))
@property
def mean(self):
return self.k * self.theta
def sample_n(self, n):
xp = cuda.get_array_module(self.k)
if xp is cuda.cupy:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape, dtype=self.k.dtype)
else:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape).astype(self.k.dtype)
noise = broadcast.broadcast_to(self.theta, eps.shape) * eps
return noise
@property
def support(self):
return 'positive'
@property
def variance(self):
return self.k * self.theta * self.theta
@distribution.register_kl(Gamma, Gamma)
def _kl_gamma_gamma(dist1, dist2):
return (dist1.k - dist2.k) * digamma.digamma(dist1.k) \
- (lgamma.lgamma(dist1.k) - lgamma.lgamma(dist2.k)) \
+ dist2.k\
* (exponential.log(dist2.theta) - exponential.log(dist1.theta)) \
+ dist1.k * (dist1.theta / dist2.theta - 1)
| mit | -76,927,761,244,264,350 | 27.989011 | 79 | 0.60235 | false |
diegocortassa/TACTIC | src/context/client/tactic-api-python-4.0.api04/Tools/Scripts/ftpmirror.py | 10 | 13254 | #! /usr/bin/env python
"""Mirror a remote ftp subtree into a local directory tree.
usage: ftpmirror [-v] [-q] [-i] [-m] [-n] [-r] [-s pat]
[-l username [-p passwd [-a account]]]
hostname[:port] [remotedir [localdir]]
-v: verbose
-q: quiet
-i: interactive mode
-m: macintosh server (NCSA telnet 2.4) (implies -n -s '*.o')
-n: don't log in
-r: remove local files/directories no longer pertinent
-l username [-p passwd [-a account]]: login info (default .netrc or anonymous)
-s pat: skip files matching pattern
hostname: remote host w/ optional port separated by ':'
remotedir: remote directory (default initial)
localdir: local directory (default current)
"""
import os
import sys
import time
import getopt
import ftplib
import netrc
from fnmatch import fnmatch
# Print usage message and exit
def usage(*args):
sys.stdout = sys.stderr
for msg in args: print msg
print __doc__
sys.exit(2)
verbose = 1 # 0 for -q, 2 for -v
interactive = 0
mac = 0
rmok = 0
nologin = 0
skippats = ['.', '..', '.mirrorinfo']
# Main program: parse command line and start processing
def main():
global verbose, interactive, mac, rmok, nologin
try:
opts, args = getopt.getopt(sys.argv[1:], 'a:bil:mnp:qrs:v')
except getopt.error, msg:
usage(msg)
login = ''
passwd = ''
account = ''
if not args: usage('hostname missing')
host = args[0]
port = 0
if ':' in host:
host, port = host.split(':', 1)
port = int(port)
try:
auth = netrc.netrc().authenticators(host)
if auth is not None:
login, account, passwd = auth
except (netrc.NetrcParseError, IOError):
pass
for o, a in opts:
if o == '-l': login = a
if o == '-p': passwd = a
if o == '-a': account = a
if o == '-v': verbose = verbose + 1
if o == '-q': verbose = 0
if o == '-i': interactive = 1
if o == '-m': mac = 1; nologin = 1; skippats.append('*.o')
if o == '-n': nologin = 1
if o == '-r': rmok = 1
if o == '-s': skippats.append(a)
remotedir = ''
localdir = ''
if args[1:]:
remotedir = args[1]
if args[2:]:
localdir = args[2]
if args[3:]: usage('too many arguments')
#
f = ftplib.FTP()
if verbose: print "Connecting to '%s%s'..." % (host,
(port and ":%d"%port or ""))
f.connect(host,port)
if not nologin:
if verbose:
print 'Logging in as %r...' % (login or 'anonymous')
f.login(login, passwd, account)
if verbose: print 'OK.'
pwd = f.pwd()
if verbose > 1: print 'PWD =', repr(pwd)
if remotedir:
if verbose > 1: print 'cwd(%s)' % repr(remotedir)
f.cwd(remotedir)
if verbose > 1: print 'OK.'
pwd = f.pwd()
if verbose > 1: print 'PWD =', repr(pwd)
#
mirrorsubdir(f, localdir)
# Core logic: mirror one subdirectory (recursively)
def mirrorsubdir(f, localdir):
pwd = f.pwd()
if localdir and not os.path.isdir(localdir):
if verbose: print 'Creating local directory', repr(localdir)
try:
makedir(localdir)
except os.error, msg:
print "Failed to establish local directory", repr(localdir)
return
infofilename = os.path.join(localdir, '.mirrorinfo')
try:
text = open(infofilename, 'r').read()
except IOError, msg:
text = '{}'
try:
info = eval(text)
except (SyntaxError, NameError):
print 'Bad mirror info in', repr(infofilename)
info = {}
subdirs = []
listing = []
if verbose: print 'Listing remote directory %r...' % (pwd,)
f.retrlines('LIST', listing.append)
filesfound = []
for line in listing:
if verbose > 1: print '-->', repr(line)
if mac:
# Mac listing has just filenames;
# trailing / means subdirectory
filename = line.strip()
mode = '-'
if filename[-1:] == '/':
filename = filename[:-1]
mode = 'd'
infostuff = ''
else:
# Parse, assuming a UNIX listing
words = line.split(None, 8)
if len(words) < 6:
if verbose > 1: print 'Skipping short line'
continue
filename = words[-1].lstrip()
i = filename.find(" -> ")
if i >= 0:
# words[0] had better start with 'l'...
if verbose > 1:
print 'Found symbolic link %r' % (filename,)
linkto = filename[i+4:]
filename = filename[:i]
infostuff = words[-5:-1]
mode = words[0]
skip = 0
for pat in skippats:
if fnmatch(filename, pat):
if verbose > 1:
print 'Skip pattern', repr(pat),
print 'matches', repr(filename)
skip = 1
break
if skip:
continue
if mode[0] == 'd':
if verbose > 1:
print 'Remembering subdirectory', repr(filename)
subdirs.append(filename)
continue
filesfound.append(filename)
if info.has_key(filename) and info[filename] == infostuff:
if verbose > 1:
print 'Already have this version of',repr(filename)
continue
fullname = os.path.join(localdir, filename)
tempname = os.path.join(localdir, '@'+filename)
if interactive:
doit = askabout('file', filename, pwd)
if not doit:
if not info.has_key(filename):
info[filename] = 'Not retrieved'
continue
try:
os.unlink(tempname)
except os.error:
pass
if mode[0] == 'l':
if verbose:
print "Creating symlink %r -> %r" % (filename, linkto)
try:
os.symlink(linkto, tempname)
except IOError, msg:
print "Can't create %r: %s" % (tempname, msg)
continue
else:
try:
fp = open(tempname, 'wb')
except IOError, msg:
print "Can't create %r: %s" % (tempname, msg)
continue
if verbose:
print 'Retrieving %r from %r as %r...' % (filename, pwd, fullname)
if verbose:
fp1 = LoggingFile(fp, 1024, sys.stdout)
else:
fp1 = fp
t0 = time.time()
try:
f.retrbinary('RETR ' + filename,
fp1.write, 8*1024)
except ftplib.error_perm, msg:
print msg
t1 = time.time()
bytes = fp.tell()
fp.close()
if fp1 != fp:
fp1.close()
try:
os.unlink(fullname)
except os.error:
pass # Ignore the error
try:
os.rename(tempname, fullname)
except os.error, msg:
print "Can't rename %r to %r: %s" % (tempname, fullname, msg)
continue
info[filename] = infostuff
writedict(info, infofilename)
if verbose and mode[0] != 'l':
dt = t1 - t0
kbytes = bytes / 1024.0
print int(round(kbytes)),
print 'Kbytes in',
print int(round(dt)),
print 'seconds',
if t1 > t0:
print '(~%d Kbytes/sec)' % \
int(round(kbytes/dt),)
print
#
# Remove files from info that are no longer remote
deletions = 0
for filename in info.keys():
if filename not in filesfound:
if verbose:
print "Removing obsolete info entry for",
print repr(filename), "in", repr(localdir or ".")
del info[filename]
deletions = deletions + 1
if deletions:
writedict(info, infofilename)
#
# Remove local files that are no longer in the remote directory
try:
if not localdir: names = os.listdir(os.curdir)
else: names = os.listdir(localdir)
except os.error:
names = []
for name in names:
if name[0] == '.' or info.has_key(name) or name in subdirs:
continue
skip = 0
for pat in skippats:
if fnmatch(name, pat):
if verbose > 1:
print 'Skip pattern', repr(pat),
print 'matches', repr(name)
skip = 1
break
if skip:
continue
fullname = os.path.join(localdir, name)
if not rmok:
if verbose:
print 'Local file', repr(fullname),
print 'is no longer pertinent'
continue
if verbose: print 'Removing local file/dir', repr(fullname)
remove(fullname)
#
# Recursively mirror subdirectories
for subdir in subdirs:
if interactive:
doit = askabout('subdirectory', subdir, pwd)
if not doit: continue
if verbose: print 'Processing subdirectory', repr(subdir)
localsubdir = os.path.join(localdir, subdir)
pwd = f.pwd()
if verbose > 1:
print 'Remote directory now:', repr(pwd)
print 'Remote cwd', repr(subdir)
try:
f.cwd(subdir)
except ftplib.error_perm, msg:
print "Can't chdir to", repr(subdir), ":", repr(msg)
else:
if verbose: print 'Mirroring as', repr(localsubdir)
mirrorsubdir(f, localsubdir)
if verbose > 1: print 'Remote cwd ..'
f.cwd('..')
newpwd = f.pwd()
if newpwd != pwd:
print 'Ended up in wrong directory after cd + cd ..'
print 'Giving up now.'
break
else:
if verbose > 1: print 'OK.'
# Helper to remove a file or directory tree
def remove(fullname):
if os.path.isdir(fullname) and not os.path.islink(fullname):
try:
names = os.listdir(fullname)
except os.error:
names = []
ok = 1
for name in names:
if not remove(os.path.join(fullname, name)):
ok = 0
if not ok:
return 0
try:
os.rmdir(fullname)
except os.error, msg:
print "Can't remove local directory %r: %s" % (fullname, msg)
return 0
else:
try:
os.unlink(fullname)
except os.error, msg:
print "Can't remove local file %r: %s" % (fullname, msg)
return 0
return 1
# Wrapper around a file for writing to write a hash sign every block.
class LoggingFile:
def __init__(self, fp, blocksize, outfp):
self.fp = fp
self.bytes = 0
self.hashes = 0
self.blocksize = blocksize
self.outfp = outfp
def write(self, data):
self.bytes = self.bytes + len(data)
hashes = int(self.bytes) / self.blocksize
while hashes > self.hashes:
self.outfp.write('#')
self.outfp.flush()
self.hashes = self.hashes + 1
self.fp.write(data)
def close(self):
self.outfp.write('\n')
# Ask permission to download a file.
def askabout(filetype, filename, pwd):
prompt = 'Retrieve %s %s from %s ? [ny] ' % (filetype, filename, pwd)
while 1:
reply = raw_input(prompt).strip().lower()
if reply in ['y', 'ye', 'yes']:
return 1
if reply in ['', 'n', 'no', 'nop', 'nope']:
return 0
print 'Please answer yes or no.'
# Create a directory if it doesn't exist. Recursively create the
# parent directory as well if needed.
def makedir(pathname):
if os.path.isdir(pathname):
return
dirname = os.path.dirname(pathname)
if dirname: makedir(dirname)
os.mkdir(pathname, 0777)
# Write a dictionary to a file in a way that can be read back using
# rval() but is still somewhat readable (i.e. not a single long line).
# Also creates a backup file.
def writedict(dict, filename):
dir, fname = os.path.split(filename)
tempname = os.path.join(dir, '@' + fname)
backup = os.path.join(dir, fname + '~')
try:
os.unlink(backup)
except os.error:
pass
fp = open(tempname, 'w')
fp.write('{\n')
for key, value in dict.items():
fp.write('%r: %r,\n' % (key, value))
fp.write('}\n')
fp.close()
try:
os.rename(filename, backup)
except os.error:
pass
os.rename(tempname, filename)
if __name__ == '__main__':
main()
| epl-1.0 | -6,241,551,697,822,772,000 | 31.135 | 82 | 0.501886 | false |
jupyter/jupyterlab | conftest.py | 4 | 1210 | # -*- coding: utf-8 -*-
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import pytest
pytest_plugins = [
"jupyter_server.pytest_plugin",
"jupyterlab_server.pytest_plugin",
"jupyterlab.pytest_plugin"
]
def pytest_addoption(parser):
"""
Adds flags for py.test.
This is called by the pytest API
"""
group = parser.getgroup("general")
group.addoption('--quick', action='store_true',
help="Skip slow tests")
group.addoption('--slow', action='store_true',
help="Run only slow tests")
def pytest_configure(config):
config.addinivalue_line("markers", "slow: mark test as slow to run")
def pytest_collection_modifyitems(config, items):
if config.getoption("--quick"):
skip_slow = pytest.mark.skip(reason="skipping slow test")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
elif config.getoption("--slow"):
skip_quick = pytest.mark.skip(reason="skipping non-slow test")
for item in items:
if "slow" not in item.keywords:
item.add_marker(skip_quick)
| bsd-3-clause | 3,665,209,046,421,171,000 | 27.139535 | 72 | 0.62314 | false |
TEAM-Gummy/platform_external_chromium_org | third_party/closure_linter/closure_linter/common/matcher.py | 284 | 2158 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regular expression based JavaScript matcher classes."""
__author__ = ('[email protected] (Robert Walker)',
'[email protected] (Andy Perelson)')
from closure_linter.common import position
from closure_linter.common import tokens
# Shorthand
Token = tokens.Token
Position = position.Position
class Matcher(object):
"""A token matcher.
Specifies a pattern to match, the type of token it represents, what mode the
token changes to, and what mode the token applies to.
Modes allow more advanced grammars to be incorporated, and are also necessary
to tokenize line by line. We can have different patterns apply to different
modes - i.e. looking for documentation while in comment mode.
Attributes:
regex: The regular expression representing this matcher.
type: The type of token indicated by a successful match.
result_mode: The mode to move to after a successful match.
"""
def __init__(self, regex, token_type, result_mode=None, line_start=False):
"""Create a new matcher template.
Args:
regex: The regular expression to match.
token_type: The type of token a successful match indicates.
result_mode: What mode to change to after a successful match. Defaults to
None, which means to not change the current mode.
line_start: Whether this matcher should only match string at the start
of a line.
"""
self.regex = regex
self.type = token_type
self.result_mode = result_mode
self.line_start = line_start
| bsd-3-clause | 2,526,134,208,970,661,000 | 34.966667 | 80 | 0.726599 | false |
PrasannaVenkadesh/portia | slyd/tests/test_repoman.py | 14 | 9855 | import unittest
from tempfile import mkdtemp
from os.path import join
from shutil import rmtree
from json import dumps, loads
import copy
from .settings import SPEC_DATA_DIR
from slyd.gitstorage.repoman import Repoman
def j(json):
return dumps(json, sort_keys=True, indent=4)
class RepomanTest(unittest.TestCase):
def setUp(self):
self.temp_repos_dir = mkdtemp(dir=SPEC_DATA_DIR,
prefix='test-run-')
Repoman.setup(
storage_backend='dulwich.fsrepo.FsRepo',
location=self.temp_repos_dir
)
def tearDown(self):
rmtree(self.temp_repos_dir)
def get_full_name(self, repo_name):
return join(self.temp_repos_dir, repo_name)
def test_create(self):
Repoman.create_repo(self.get_full_name('my_repo'))
self.assertTrue(Repoman.repo_exists(self.get_full_name('my_repo')))
def test_save_file(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
contents = j({'a': 1})
repoman.save_file('f1', contents, 'testbranch')
self.assertEqual(['f1'], repoman.list_files_for_branch('testbranch'))
self.assertEqual(
contents, repoman.file_contents_for_branch('f1', 'testbranch'))
def test_delete_file(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
contents = j({'a': 1})
repoman.save_file('f1', contents, 'testbranch')
repoman.delete_file('f1', 'testbranch')
self.assertEqual([], repoman.list_files_for_branch('testbranch'))
def test_branch_ops(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.create_branch('b1')
self.assertTrue(repoman.has_branch('b1'))
self.assertEqual(len(repoman.get_branch('b1')), 40)
repoman.delete_branch('b1')
self.assertFalse(repoman.has_branch('b1'))
def test_simple_publish(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2, f3 = j({'a': 1}), j({'b': 2}), j({'c': 3})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('f2', f2, 'b1')
repoman.save_file('x/f3', f3, 'b1')
repoman.save_file('f4', '{}', 'b1')
repoman.delete_file('f4', 'b1')
self.assertTrue(repoman.has_branch('b1'))
self.assertTrue(repoman.has_branch('master'))
self.assertEqual([], repoman.list_files_for_branch('master'))
self.assertTrue(repoman.publish_branch('b1'))
self.assertItemsEqual(['f1', 'f2', 'x/f3'],
repoman.list_files_for_branch('master'))
self.assertEqual([f1, f2, f3],
[repoman.file_contents_for_branch(x, 'b1')
for x in ('f1', 'f2', 'x/f3')])
self.assertEqual([f1, f2, f3],
[repoman.file_contents_for_branch(x, 'master')
for x in ('f1', 'f2', 'x/f3')])
# Only one published revision
self.assertEqual(len(repoman.get_published_revisions()), 1)
# 6 checkpoints, 1 per operation (5) + 1 for the original state.
self.assertEqual(len(repoman.get_branch_checkpoints('b1')), 6)
def test_sequential_publishes(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2 = j({'a': 1}), j({'b': 2})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('x/f2', f2, 'b1')
repoman.publish_branch('b1')
repoman.delete_branch('b1')
# f1 is modified in branch b2
f1 = j({'a': 3})
repoman.save_file('f1', f1, 'b2')
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual([f1, f2],
[repoman.file_contents_for_branch(x, 'master')
for x in ('f1', 'x/f2')])
self.assertEqual(len(repoman.get_published_revisions()), 2)
def test_two_interleaved_publishes_1(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2 = j({'a': 1}), j({'b': 2})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('x/f2', f2, 'b1')
# branch b2 modifies the same files concurrently
f1, f2 = j({'c': 3}), j({'d': 4})
repoman.save_file('f1', f1, 'b2')
repoman.save_file('x/f2', f2, 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'a': 1, 'c': 3}),
repoman.file_contents_for_branch('f1', 'master'))
self.assertEqual(j({'b': 2, 'd': 4}),
repoman.file_contents_for_branch('x/f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 2)
def test_two_interleaved_publishes_2(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1 = j({'a': 1, 'c': 3})
repoman.save_file('f1', f1, 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 adds x/f2.
f2 = j({'b': 2})
repoman.save_file('x/f2', f2, 'b1')
# branch b2 adds a file with the same name but different content
f2 = j({'a': 2, 'c': {'d': 1}})
repoman.save_file('x/f2', f2, 'b2')
repoman.delete_file('f1', 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'a': 2, 'b': 2, 'c': {'d': 1}}),
repoman.file_contents_for_branch('x/f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 3)
@unittest.skip('Broken, TODO check') # TODO
def test_two_interleaved_publishes_3(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1 = j({'a': 1, 'c': 3, 'd': 4, 'e': 5})
repoman.save_file('f1', f1, 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 heavily edits f1
repoman.save_file('f1', j({'b': 2, 'e': 5}), 'b1')
# this case is VERY tricky. branch 2 renames f1 to f2 and changes
# it a bit. The merge algorithm detects the rename and the merged
# output ends up containing all b1 changes + all b2 changes, and the
# file is stored under the name given by branch2
repoman.delete_file('f1', 'b2')
repoman.save_file('f2', j({'a': 1, 'c': 3, 'd': 4, 'e': 6}), 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'b': 2, 'e': 6}),
repoman.file_contents_for_branch('f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 3)
def test_modify_delete(self):
# Although this is usually treated as a conflict, here we just keep the
# modified version and ignore the delete.
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.save_file('f1', j({'a': 1}), 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 deletes f1 and b2 modifies it.
repoman.delete_file('f1', 'b1')
repoman.save_file('f1', j({'a': 2, 'c': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
# master has f1.
self.assertEqual(['f1'], repoman.list_files_for_branch('master'))
self.assertEqual(j({'a': 2, 'c': 3}),
repoman.file_contents_for_branch('f1', 'master'))
def test_unresolved_conflicts_both_modify(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.save_file('f1', j({'a': 1}), 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# both branches update the same key of the same file with different
# values. This conflict must be manually resolved
repoman.save_file('f1', j({'a': 2}), 'b1')
repoman.save_file('f1', j({'a': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertFalse(repoman.publish_branch('b2'))
# the file appears as published by b1 in the master branch
self.assertEqual(j({'a': 2}),
repoman.file_contents_for_branch('f1', 'master'))
# the file in b2 has an unresolved conflict
self.assertIn('__CONFLICT',
j(repoman.file_contents_for_branch('f1', 'b2')))
# b2 solves the conflict, saves again and forces the publish
repoman.save_file('f1', j({'a': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b2', force=True))
self.assertEqual(j({'a': 3}),
repoman.file_contents_for_branch('f1', 'master'))
def test_unresolved_conflicts_both_add(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
# both add the same file with a conflicting key
repoman.save_file('f1', j({'a': 1}), 'b1')
repoman.save_file('f1', j({'a': 2}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertFalse(repoman.publish_branch('b2'))
# the file appears as published by b1 in the master branch
self.assertEqual(j({'a': 1}),
repoman.file_contents_for_branch('f1', 'master'))
# the file in b2 has an unresolved conflict
self.assertIn('__CONFLICT',
j(repoman.file_contents_for_branch('f1', 'b2')))
| bsd-3-clause | 2,509,798,241,879,947,000 | 44.206422 | 79 | 0.577981 | false |
StepicOrg/Stepic-API | examples/get_courses_by_params.py | 1 | 2154 | import json
import requests
def get_token():
client_id = "..."
client_secret = "..."
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepik.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth)
token = json.loads(resp.text)['access_token']
return token
def get_data(pageNum):
api_url = 'https://stepik.org/api/courses?page={}'.format(pageNum)
course = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer ' + get_token()}).text)
return course
def get_chosen_courses(amountOfUnits, courseLang, amountOfDiscuss):
pageNum = 0
hasNextPage = True
listOfChoices = []
while hasNextPage:
try:
pageNum += 1
pageContent = get_data(pageNum)
hasNextPage = pageContent['meta']['has_next']
courses = pageContent['courses']
for course in courses: # Select only active courses (courses with active session)
if ((course['total_units']) > amountOfUnits and (course['language'] == courseLang)
and (course['is_active'] == True) and (course['discussions_count'] > amountOfDiscuss)):
listOfChoices.append({
'course_name': course['slug'],
'amount_of_units': course['total_units'],
'language': course['language'],
'create_date': course['create_date'],
'discussions_count': course['discussions_count']
})
except:
print("Error exception: something was broken!")
print(listOfChoices)
def main():
# Choose values of parameters for a course choice
# Example:
amountOfUnits = 5 # amount of units in a course
courseLang = 'ru' # language of the chosen course
amountOfDiscuss = 30 # number of discussions in a course (as an indicator of the popularity)
get_chosen_courses(amountOfUnits, courseLang, amountOfDiscuss)
main()
| mit | -4,621,145,685,091,097,000 | 35.789474 | 107 | 0.572423 | false |
ajaali/django | django/core/handlers/base.py | 234 | 13346 | from __future__ import unicode_literals
import logging
import sys
import types
import warnings
from django import http
from django.conf import settings
from django.core import signals, urlresolvers
from django.core.exceptions import (
MiddlewareNotUsed, PermissionDenied, SuspiciousOperation,
)
from django.db import connections, transaction
from django.http.multipartparser import MultiPartParserError
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.module_loading import import_string
from django.views import debug
logger = logging.getLogger('django.request')
class BaseHandler(object):
# Changes that are always applied to a response (in this order).
response_fixes = [
http.conditional_content_removal,
]
def __init__(self):
self._request_middleware = None
self._view_middleware = None
self._template_response_middleware = None
self._response_middleware = None
self._exception_middleware = None
def load_middleware(self):
"""
Populate middleware lists from settings.MIDDLEWARE_CLASSES.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._response_middleware = []
self._exception_middleware = []
request_middleware = []
for middleware_path in settings.MIDDLEWARE_CLASSES:
mw_class = import_string(middleware_path)
try:
mw_instance = mw_class()
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if six.text_type(exc):
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
else:
logger.debug('MiddlewareNotUsed: %r', middleware_path)
continue
if hasattr(mw_instance, 'process_request'):
request_middleware.append(mw_instance.process_request)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.append(mw_instance.process_view)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.insert(0, mw_instance.process_template_response)
if hasattr(mw_instance, 'process_response'):
self._response_middleware.insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
self._exception_middleware.insert(0, mw_instance.process_exception)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._request_middleware = request_middleware
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
for db in connections.all():
if (db.settings_dict['ATOMIC_REQUESTS']
and db.alias not in non_atomic_requests):
view = transaction.atomic(using=db.alias)(view)
return view
def get_exception_response(self, request, resolver, status_code, exception):
try:
callback, param_dict = resolver.resolve_error_handler(status_code)
# Unfortunately, inspect.getargspec result is not trustable enough
# depending on the callback wrapping in decorators (frequent for handlers).
# Falling back on try/except:
try:
response = callback(request, **dict(param_dict, exception=exception))
except TypeError:
warnings.warn(
"Error handlers should accept an exception parameter. Update "
"your code as this parameter will be required in Django 2.0",
RemovedInDjango20Warning, stacklevel=2
)
response = callback(request, **param_dict)
except:
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
# Setup default url resolver for this thread, this code is outside
# the try/except so we don't get a spurious "unbound local
# variable" exception in the event an exception is raised before
# resolver is set
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
# Use a flag to check if the response was rendered to prevent
# multiple renderings or to force rendering if necessary.
response_is_rendered = False
try:
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if response is None:
if hasattr(request, 'urlconf'):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
resolver_match = resolver.resolve(request.path_info)
callback, callback_args, callback_kwargs = resolver_match
request.resolver_match = resolver_match
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
# Complain if the view returned None (a common error).
if response is None:
if isinstance(callback, types.FunctionType): # FBV
view_name = callback.__name__
else: # CBV
view_name = callback.__class__.__name__ + '.__call__'
raise ValueError("The view %s.%s didn't return an HttpResponse object. It returned None instead."
% (callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_template_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
response_is_rendered = True
except http.Http404 as exc:
logger.warning('Not Found: %s', request.path,
extra={
'status_code': 404,
'request': request
})
if settings.DEBUG:
response = debug.technical_404_response(request, exc)
else:
response = self.get_exception_response(request, resolver, 404, exc)
except PermissionDenied as exc:
logger.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request
})
response = self.get_exception_response(request, resolver, 403, exc)
except MultiPartParserError as exc:
logger.warning(
'Bad request (Unable to parse request body): %s', request.path,
extra={
'status_code': 400,
'request': request
})
response = self.get_exception_response(request, resolver, 400, exc)
except SuspiciousOperation as exc:
# The request logger receives events for any problematic request
# The security logger receives events for all SuspiciousOperations
security_logger = logging.getLogger('django.security.%s' %
exc.__class__.__name__)
security_logger.error(
force_text(exc),
extra={
'status_code': 400,
'request': request
})
if settings.DEBUG:
return debug.technical_500_response(request, *sys.exc_info(), status_code=400)
response = self.get_exception_response(request, resolver, 400, exc)
except SystemExit:
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
raise
except: # Handle everything else.
# Get the exception info now, in case another exception is thrown later.
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
try:
# Apply response middleware, regardless of the response
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
# Complain if the response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
response = self.apply_response_fixes(request, response)
except: # Any exception should be gathered and handled
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
response._closable_objects.append(request)
# If the exception handler returns a TemplateResponse that has not
# been rendered, force it to be rendered.
if not response_is_rendered and callable(getattr(response, 'render', None)):
response = response.render()
return response
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, raise it.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
raise
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses). Can be overridden by subclasses who want
customised 500 handling.
Be *very* careful when overriding this because the error could be
caused by anything, so assuming something like the database is always
available would be an error.
"""
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
logger.error('Internal Server Error: %s', request.path,
exc_info=exc_info,
extra={
'status_code': 500,
'request': request
}
)
if settings.DEBUG:
return debug.technical_500_response(request, *exc_info)
# If Http500 handler is not installed, re-raise last exception
if resolver.urlconf_module is None:
six.reraise(*exc_info)
# Return an HttpResponse that displays a friendly error message.
callback, param_dict = resolver.resolve_error_handler(500)
return callback(request, **param_dict)
def apply_response_fixes(self, request, response):
"""
Applies each of the functions in self.response_fixes to the request and
response, modifying the response in the process. Returns the new
response.
"""
for func in self.response_fixes:
response = func(request, response)
return response
| bsd-3-clause | 6,913,302,826,365,807,000 | 42.614379 | 113 | 0.589915 | false |
hackendless/heekscnc | roughing_funcs.py | 24 | 13251 | import kurve
import area
from nc.nc import *
import math
# roughing_funcs.py- intended to be used for lathe roughing
# adapted from area_funcs.py and turning.py
# and possibly roughing a profile-approaching the part from the side
# some globals, to save passing variables as parameters too much
area_for_feed_possible = None
tool_radius_for_pocket = None
def make_area_for_roughing(k):
num_spans = kurve.num_spans(k)
if num_spans == 0:
raise "sketch has no spans!"
d, startx, starty, ex, ey, cx, cy = kurve.get_span(k, 0)
d, sx, sy, endx, endy, cx, cy = kurve.get_span(k, num_spans - 1)
a = area.Area()
c = area.Curve()
largey = 7
for span in range(0, num_spans):
d, sx, sy, ex, ey, cx, cy = kurve.get_span(k, span)
if span == 0:# first span
c.append(area.Vertex(0, area.Point(startx, largey), area.Point(0, 0)))
c.append(area.Vertex(d, area.Point(ex, ey), area.Point(cx, cy)))
# close the area
c.append(area.Vertex(0, area.Point(endx, largey), area.Point(0, 0)))
c.append(area.Vertex(0, area.Point(startx, largey), area.Point(0, 0)))
a.append(c)
return a
def cut_curve(curve, need_rapid, p, rapid_down_to_height, final_depth):
prev_p = p
first = True
for vertex in curve.getVertices():
if need_rapid and first:
# rapid across
rapid(vertex.p.x, vertex.p.y)
##rapid down
rapid(z = rapid_down_to_height)
#feed down
feed(z = final_depth)
#x_first=vertex.p.x;y_first=vertex.p.y
first = False
else:
dc = vertex.c - prev_p
if vertex.type == 1:
arc_ccw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y)
elif vertex.type == -1:
arc_cw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y)
else:
feed(vertex.p.x, vertex.p.y)
#rapid(x_first,y_first)
#rapid(x_first)
#rapid(vertex.p.y)
#x_first=vertex.p.x;y_first=vertex.p.y
#rapid(x=(vertex.p.x+1))
prev_p = vertex.p
return prev_p
def cut_curve_lathe(curve, need_rapid, p, rapid_down_to_height, final_depth):
prev_p = p
first = True
l = []
feed(z=0)
for vertex in curve.getVertices():
if need_rapid and first:
# rapid across
rapid(vertex.p.x, vertex.p.y)
first = False
l.append((vertex.p.x,vertex.p.y))
feed(x=l[0][0])
feed(y=l[0][1])
feed(x=l[1][0])
#pull tool away from profile at 45 degree angle- back towards Y+ and X start point
rapid(x=(l[1][0]+(l[2][1]-l[0][1])),y=l[2][1])
rapid(x=l[3][0])
rapid(y=l[0][1])
prev_p = vertex.p
return prev_p
def area_distance(a, old_area):
best_dist = None
for curve in a.getCurves():
for vertex in curve.getVertices():
c = old_area.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
for curve in old_area.getCurves():
for vertex in curve.getVertices():
c = a.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
return best_dist
def make_obround(p0, p1, radius):
dir = p1 - p0
d = dir.length()
dir.normalize()
right = area.Point(dir.y, -dir.x)
obround = area.Area()
c = area.Curve()
vt0 = p0 + right * radius
vt1 = p1 + right * radius
vt2 = p1 - right * radius
vt3 = p0 - right * radius
c.append(area.Vertex(0, vt0, area.Point(0, 0)))
c.append(area.Vertex(0, vt1, area.Point(0, 0)))
c.append(area.Vertex(1, vt2, p1))
c.append(area.Vertex(0, vt3, area.Point(0, 0)))
c.append(area.Vertex(1, vt0, p0))
obround.append(c)
return obround
def feed_possible(p0, p1):
obround = make_obround(p0, p1, tool_radius_for_pocket)
a = area.Area(area_for_feed_possible)
obround.Subtract(a)
if obround.num_curves() > 0:
return False
return True
def cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss):
p = area.Point(0, 0)
first = True
for curve in curve_list:
need_rapid = True
if first == False:
s = curve.FirstVertex().p
if keep_tool_down_if_poss == True:
# see if we can feed across
if feed_possible(p, s):
need_rapid = False
elif s.x == p.x and s.y == p.y:
need_rapid = False
#rapid(p.x,p.y)
if need_rapid:
rapid(z = clearance_height)
p = cut_curve_lathe(curve, need_rapid, p, rapid_down_to_height, depth)
first = False
rapid(z = clearance_height)
def get_curve_list(arealist):
curve_list = list()
for a in arealist:
for curve in a.getCurves():
curve_list.append(curve)
return curve_list
curve_list_for_zigs = []
rightward_for_zigs = True
sin_angle_for_zigs = 0.0
cos_angle_for_zigs = 1.0
sin_minus_angle_for_zigs = 0.0
cos_minus_angle_for_zigs = 1.0
test_count = 0
def make_zig_curve(curve, y0, y):
global test_count
if rightward_for_zigs:
curve.Reverse()
zig = area.Curve()
zig_started = False
zag_found = False
prev_p = None
for vertex in curve.getVertices():
if prev_p != None:
if math.fabs(vertex.p.y - y0) < 0.002:
if zig_started:
zig.append(unrotated_vertex(vertex))
elif math.fabs(prev_p.y - y0) < 0.002 and vertex.type == 0:
zig.append(area.Vertex(0, unrotated_point(prev_p), area.Point(0, 0)))
zig.append(unrotated_vertex(vertex))
zig_started = True
elif zig_started:
zig.append(unrotated_vertex(vertex))
if math.fabs(vertex.p.y - y) < 0.002:
zag_found = True
break
prev_p = vertex.p
if zig_started:
curve_list_for_zigs.append(zig)
def make_zig(a, y0, y):
for curve in a.getCurves():
make_zig_curve(curve, y0, y)
reorder_zig_list_list = []
def add_reorder_zig(curve):
global reorder_zig_list_list
# look in existing lists
s = curve.FirstVertex().p
for curve_list in reorder_zig_list_list:
last_curve = curve_list[len(curve_list) - 1]
e = last_curve.LastVertex().p
if math.fabs(s.x - e.x) < 0.002 and math.fabs(s.y - e.y) < 0.002:
curve_list.append(curve)
return
# else add a new list
curve_list = []
curve_list.append(curve)
reorder_zig_list_list.append(curve_list)
def reorder_zigs():
global curve_list_for_zigs
global reorder_zig_list_list
reorder_zig_list_list = []
for curve in curve_list_for_zigs:
add_reorder_zig(curve)
curve_list_for_zigs = []
for curve_list in reorder_zig_list_list:
for curve in curve_list:
curve_list_for_zigs.append(curve)
def rotated_point(p):
return area.Point(p.x * cos_angle_for_zigs - p.y * sin_angle_for_zigs, p.x * sin_angle_for_zigs + p.y * cos_angle_for_zigs)
def unrotated_point(p):
return area.Point(p.x * cos_minus_angle_for_zigs - p.y * sin_minus_angle_for_zigs, p.x * sin_minus_angle_for_zigs + p.y * cos_minus_angle_for_zigs)
def rotated_vertex(v):
if v.type:
return area.Vertex(v.type, rotated_point(v.p), rotated_point(v.c))
return area.Vertex(v.type, rotated_point(v.p), area.Point(0, 0))
def unrotated_vertex(v):
if v.type:
return area.Vertex(v.type, unrotated_point(v.p), unrotated_point(v.c))
return area.Vertex(v.type, unrotated_point(v.p), area.Point(0, 0))
def rotated_area(a):
an = area.Area()
for curve in a.getCurves():
curve_new = area.Curve()
for v in curve.getVertices():
curve_new.append(rotated_vertex(v))
an.append(curve_new)
return an
def zigzag(a, a_firstoffset, stepover):
if a.num_curves() == 0:
return
global rightward_for_zigs
global curve_list_for_zigs
global test_count
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
a = rotated_area(a)
b = area.Box()
a.GetBox(b)
#x0 = b.MinX() - 1.0
#x1 = b.MaxX() + 1.0
x1 = b.MinX() - 1.0
x0 = b.MaxX() + 1.0
height = b.MaxY() - b.MinY()
num_steps = int(height / stepover + 1)
#y = b.MinY() + 0.1
y = b.MaxY() - 0.1
null_point = area.Point(0, 0)
rightward_for_zigs = True
curve_list_for_zigs = []
test_count = 0
for i in range(0, num_steps):
#collect vertices for a box shape from X+,Y+ toward the curve
#then move the tool Y+ and then back toward the X start position
# ------->
# |
# -------<
test_count = test_count + 1
y0 = y
#y = y + stepover
y = y - stepover
p0 = area.Point(x0, y0)
p1 = area.Point(x0, y)
p2 = area.Point(x1, y)
p3 = area.Point(x1, y0)
c = area.Curve()
c.append(area.Vertex(0, p0, null_point, 0))
c.append(area.Vertex(0, p1, null_point, 0))
c.append(area.Vertex(0, p2, null_point, 1))
c.append(area.Vertex(0, p3, null_point, 0))
c.append(area.Vertex(0, p0, null_point, 1))
a2 = area.Area()
a2.append(c)
a2.Intersect(a)
rightward_for_zigs = (rightward_for_zigs == False)
y10 = y + stepover
#y = y + stepover
y2 = y + stepover*2
p10 = area.Point(x0, y10)
p11 = area.Point(x0, y2)
p12 = area.Point(x1, y2)
p13 = area.Point(x1, y10)
c2 = area.Curve()
c2.append(area.Vertex(0, p10, null_point, 0))
c2.append(area.Vertex(0, p11, null_point, 0))
c2.append(area.Vertex(0, p12, null_point, 1))
c2.append(area.Vertex(0, p13, null_point, 0))
c2.append(area.Vertex(0, p10, null_point, 1))
a3 = area.Area()
a3.append(c2)
a3.Intersect(a)
make_zig(a3, y0, y)
rightward_for_zigs = (rightward_for_zigs == False)
reorder_zigs()
def pocket(a, tool_radius, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height, from_center, keep_tool_down_if_poss, use_zig_zag, zig_angle):
global area_for_feed_possible
global tool_radius_for_pocket
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
tool_radius_for_pocket = tool_radius
radians_angle = zig_angle * math.pi / 180
sin_angle_for_zigs = math.sin(-radians_angle)
cos_angle_for_zigs = math.cos(-radians_angle)
sin_minus_angle_for_zigs = math.sin(radians_angle)
cos_minus_angle_for_zigs = math.cos(radians_angle)
if rapid_down_to_height > clearance_height:
rapid_down_to_height = clearance_height
area.set_round_corner_factor(round_corner_factor)
arealist = list()
area_for_feed_possible = area.Area(a)
area_for_feed_possible.Offset(extra_offset - 0.01)
a_firstoffset = area.Area(a)
a_firstoffset.Offset(tool_radius + extra_offset)
if use_zig_zag:
zigzag(a_firstoffset, a_firstoffset, stepover)
curve_list = curve_list_for_zigs
else:
pass #we're just using zig_zag for roughing
layer_count = int((start_depth - final_depth) / stepdown)
if layer_count * stepdown + 0.00001 < start_depth - final_depth:
layer_count += 1
for i in range(1, layer_count+1):
if i == layer_count:
depth = final_depth
else:
depth = start_depth - i * stepdown
cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss)
def rough_open_prof(k,tool_diameter, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height):
pass
a = make_area_for_roughing(k)
pocket(a, tool_diameter/2, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height, 1, True, True, 0)
#pocket(a7, tool_diameter/2, 0.05, rapid_down_to_height, start_depth, final_depth, 0.075, step_down, 1, clearance, 1, True, True, 0)
| bsd-3-clause | 1,588,618,612,903,993,000 | 29.326241 | 209 | 0.551958 | false |
alexforencich/python-ivi | ivi/agilent/agilentMSOX92004A.py | 2 | 1692 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentMSOX92004A(agilent90000):
"Agilent Infiniium MSOX92004A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSOX92004A')
super(agilentMSOX92004A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 20e9
self._init_channels()
| mit | 6,651,070,387,465,703,000 | 37.454545 | 86 | 0.737589 | false |
DemocracyClub/yournextrepresentative | ynr/apps/cached_counts/views.py | 1 | 3171 | import json
from django.db.models import Count
from django.http import HttpResponse
from django.views.generic import TemplateView
from candidates.models import Ballot
from elections.mixins import ElectionMixin
from elections.models import Election
from parties.models import Party
from popolo.models import Membership
from .models import get_attention_needed_posts
def get_counts(for_json=True):
election_id_to_candidates = {}
qs = (
Membership.objects.all()
.values("ballot__election")
.annotate(count=Count("ballot__election"))
.order_by()
)
for d in qs:
election_id_to_candidates[d["ballot__election"]] = d["count"]
grouped_elections = Election.group_and_order_elections(for_json=for_json)
for era_data in grouped_elections:
for date, elections in era_data["dates"].items():
for role_data in elections:
for election_data in role_data["elections"]:
e = election_data["election"]
total = election_id_to_candidates.get(e.id, 0)
election_counts = {
"id": e.slug,
"html_id": e.slug.replace(".", "-"),
"name": e.name,
"total": total,
}
election_data.update(election_counts)
del election_data["election"]
return grouped_elections
class ReportsHomeView(TemplateView):
template_name = "reports.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["all_elections"] = get_counts()
return context
def get(self, *args, **kwargs):
if self.request.GET.get("format") == "json":
return HttpResponse(
json.dumps(get_counts(for_json=True)),
content_type="application/json",
)
return super().get(*args, **kwargs)
class PartyCountsView(ElectionMixin, TemplateView):
template_name = "party_counts.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = Party.objects.filter(
membership__ballot__election=self.election_data
)
qs = qs.annotate(count=Count("membership"))
qs = qs.order_by("-count", "name")
context["party_counts"] = qs
return context
class ConstituencyCountsView(ElectionMixin, TemplateView):
template_name = "constituency_counts.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = Ballot.objects.filter(election=self.election_data).annotate(
count=Count("membership")
)
qs = qs.select_related("post", "election")
qs = qs.order_by("-count")
context["post_counts"] = qs
return context
class AttentionNeededView(TemplateView):
template_name = "attention_needed.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["post_counts"] = get_attention_needed_posts()
return context
| agpl-3.0 | 5,946,603,774,880,731,000 | 30.71 | 77 | 0.602334 | false |
olt/mapproxy | mapproxy/service/ows.py | 13 | 1357 | # This file is part of the MapProxy project.
# Copyright (C) 2011 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper service handler for all OWS services (/service?).
"""
class OWSServer(object):
"""
Wraps all OWS services (/service?, /ows?, /wms?, /wmts?) and dispatches requests
based on the ``services`` query argument.
"""
def __init__(self, services):
self.names = ['service', 'ows']
self.services = {}
for service in services:
if service.service == 'wms' and 'wms' not in self.names:
self.names.append('wms')
self.services[service.service] = service
def handle(self, req):
service = req.args.get('service', 'wms').lower()
assert service in self.services
return self.services[service].handle(req)
| apache-2.0 | -2,442,527,379,156,336,600 | 34.710526 | 84 | 0.670597 | false |
rfleschenberg/djangocms-cascade | cmsplugin_cascade/migrations/0009_cascadepage.py | 1 | 1447 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-07 22:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('cms', '0013_urlconfrevision'),
('cmsplugin_cascade', '0008_sortableinlinecascadeelement'),
]
operations = [
migrations.CreateModel(
name='CascadePage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('settings', jsonfield.fields.JSONField(blank=True, default={}, help_text='User editable settings for this page.')),
('glossary', jsonfield.fields.JSONField(blank=True, default={}, help_text='Store for arbitrary page data.')),
('extended_object', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='cms.Page')),
('public_extension', models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='draft_extension', to='cmsplugin_cascade.CascadePage')),
],
options={
'db_table': 'cmsplugin_cascade_page',
'verbose_name': 'Cascade Page Settings',
'verbose_name_plural': 'Cascade Page Settings',
},
),
]
| mit | -4,303,575,751,125,042,700 | 42.848485 | 199 | 0.624741 | false |
mszewczy/odoo | addons/account/wizard/account_open_closed_fiscalyear.py | 237 | 2537 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_open_closed_fiscalyear(osv.osv_memory):
_name = "account.open.closed.fiscalyear"
_description = "Choose Fiscal Year"
_columns = {
'fyear_id': fields.many2one('account.fiscalyear', \
'Fiscal Year', required=True, help='Select Fiscal Year which you want to remove entries for its End of year entries journal'),
}
def remove_entries(self, cr, uid, ids, context=None):
move_obj = self.pool.get('account.move')
data = self.browse(cr, uid, ids, context=context)[0]
period_journal = data.fyear_id.end_journal_period_id or False
if not period_journal:
raise osv.except_osv(_('Error!'), _("You have to set the 'End of Year Entries Journal' for this Fiscal Year which is set after generating opening entries from 'Generate Opening Entries'."))
if period_journal.period_id.state == 'done':
raise osv.except_osv(_('Error!'), _("You can not cancel closing entries if the 'End of Year Entries Journal' period is closed."))
ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)])
if ids_move:
cr.execute('delete from account_move where id IN %s', (tuple(ids_move),))
self.invalidate_cache(cr, uid, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,670,354,933,521,232,000 | 48.745098 | 202 | 0.632243 | false |
jendap/tensorflow | tensorflow/contrib/distributions/python/ops/moving_stats.py | 42 | 10125 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for computing moving statistics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
__all__ = [
"assign_moving_mean_variance",
"assign_log_moving_mean_exp",
"moving_mean_variance",
]
def assign_moving_mean_variance(
mean_var, variance_var, value, decay, name=None):
"""Compute exponentially weighted moving {mean,variance} of a streaming value.
The `value` updated exponentially weighted moving `mean_var` and
`variance_var` are given by the following recurrence relations:
```python
variance_var = decay * (variance_var + (1-decay) * (value - mean_var)**2)
mean_var = decay * mean_var + (1 - decay) * value
```
Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses
the lag-1 mean.
For derivation justification, see [Finch (2009; Eq. 143)][1].
Args:
mean_var: `float`-like `Variable` representing the exponentially weighted
moving mean. Same shape as `variance_var` and `value`.
variance_var: `float`-like `Variable` representing the
exponentially weighted moving variance. Same shape as `mean_var` and
`value`.
value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
name: Optional name of the returned operation.
Returns:
mean_var: `Variable` representing the `value`-updated exponentially weighted
moving mean.
variance_var: `Variable` representing the `value`-updated
exponentially weighted moving variance.
Raises:
TypeError: if `mean_var` does not have float type `dtype`.
TypeError: if `mean_var`, `variance_var`, `value`, `decay` have different
`base_dtype`.
#### References
[1]: Tony Finch. Incremental calculation of weighted mean and variance.
_Technical Report_, 2009.
http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf
"""
with ops.name_scope(name, "assign_moving_mean_variance",
[variance_var, mean_var, value, decay]):
with ops.colocate_with(variance_var):
with ops.colocate_with(mean_var):
base_dtype = mean_var.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"mean_var.base_dtype({}) does not have float type "
"`dtype`.".format(base_dtype.name))
if base_dtype != variance_var.dtype.base_dtype:
raise TypeError(
"mean_var.base_dtype({}) != variance_var.base_dtype({})".format(
base_dtype.name,
variance_var.dtype.base_dtype.name))
value = ops.convert_to_tensor(value, dtype=base_dtype, name="value")
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
delta = value - mean_var
with ops.control_dependencies([delta]):
mean_var = state_ops.assign_add(
mean_var,
(1. - decay) * delta)
variance_var = state_ops.assign_sub(
variance_var,
(1. - decay) * (variance_var - decay * math_ops.square(delta)))
return mean_var, variance_var
def assign_log_moving_mean_exp(
log_mean_exp_var, log_value, decay, name=None):
"""Compute the log of the exponentially weighted moving mean of the exp.
If `log_value` is a draw from a stationary random variable, this function
approximates `log(E[exp(log_value)])`, i.e., a weighted log-sum-exp. More
precisely, a `tf.Variable`, `log_mean_exp_var`, is updated by `log_value`
using the following identity:
```none
log_mean_exp_var =
= log(decay exp(log_mean_exp_var) + (1 - decay) exp(log_value))
= log(exp(log_mean_exp_var + log(decay)) + exp(log_value + log1p(-decay)))
= log_mean_exp_var
+ log( exp(log_mean_exp_var - log_mean_exp_var + log(decay))
+ exp(log_value - log_mean_exp_var + log1p(-decay)))
= log_mean_exp_var
+ log_sum_exp([log(decay), log_value - log_mean_exp_var + log1p(-decay)]).
```
In addition to numerical stability, this formulation is advantageous because
`log_mean_exp_var` can be updated in a lock-free manner, i.e., using
`assign_add`. (Note: the updates are not thread-safe; it's just that the
update to the tf.Variable is presumed efficient due to being lock-free.)
Args:
log_mean_exp_var: `float`-like `Variable` representing the log of the
exponentially weighted moving mean of the exp. Same shape as `log_value`.
log_value: `float`-like `Tensor` representing a new (streaming) observation.
Same shape as `log_mean_exp_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
name: Optional name of the returned operation.
Returns:
log_mean_exp_var: A reference to the input 'Variable' tensor with the
`log_value`-updated log of the exponentially weighted moving mean of exp.
Raises:
TypeError: if `log_mean_exp_var` does not have float type `dtype`.
TypeError: if `log_mean_exp_var`, `log_value`, `decay` have different
`base_dtype`.
"""
with ops.name_scope(name, "assign_log_moving_mean_exp",
[log_mean_exp_var, log_value, decay]):
# We want to update the variable in a numerically stable and lock-free way.
# To do this, observe that variable `x` updated by `v` is:
# x = log(w exp(x) + (1-w) exp(v))
# = log(exp(x + log(w)) + exp(v + log1p(-w)))
# = x + log(exp(x - x + log(w)) + exp(v - x + log1p(-w)))
# = x + lse([log(w), v - x + log1p(-w)])
with ops.colocate_with(log_mean_exp_var):
base_dtype = log_mean_exp_var.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"log_mean_exp_var.base_dtype({}) does not have float type "
"`dtype`.".format(base_dtype.name))
log_value = ops.convert_to_tensor(log_value, dtype=base_dtype,
name="log_value")
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
delta = (log_value - log_mean_exp_var)[array_ops.newaxis, ...]
x = array_ops.concat([
math_ops.log(decay) * array_ops.ones_like(delta),
delta + math_ops.log1p(-decay)
], axis=0)
x = math_ops.reduce_logsumexp(x, axis=0)
return log_mean_exp_var.assign_add(x)
def moving_mean_variance(value, decay, collections=None, name=None):
"""Compute exponentially weighted moving {mean,variance} of a streaming value.
The exponentially-weighting moving `mean_var` and `variance_var` are updated
by `value` according to the following recurrence:
```python
variance_var = decay * (variance_var + (1-decay) * (value - mean_var)**2)
mean_var = decay * mean_var + (1 - decay) * value
```
Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses
the lag-`1` mean.
For derivation justification, see [Finch (2009; Eq. 143)][1].
Unlike `assign_moving_mean_variance`, this function handles
variable creation.
Args:
value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
collections: Python list of graph-collections keys to which the internal
variables `mean_var` and `variance_var` are added.
Default value is `[GraphKeys.GLOBAL_VARIABLES]`.
name: Optional name of the returned operation.
Returns:
mean_var: `Variable` representing the `value`-updated exponentially weighted
moving mean.
variance_var: `Variable` representing the `value`-updated
exponentially weighted moving variance.
Raises:
TypeError: if `value_var` does not have float type `dtype`.
TypeError: if `value`, `decay` have different `base_dtype`.
#### References
[1]: Tony Finch. Incremental calculation of weighted mean and variance.
_Technical Report_, 2009.
http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf
"""
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
with variable_scope.variable_scope(
name, "moving_mean_variance", [value, decay]):
value = ops.convert_to_tensor(value, name="value")
base_dtype = value.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"value.base_dtype({}) does not have float type `dtype`.".format(
base_dtype.name))
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
variance_var = variable_scope.get_variable(
"moving_variance",
shape=value.shape,
dtype=value.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
mean_var = variable_scope.get_variable(
"moving_mean",
shape=value.shape,
dtype=value.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
return assign_moving_mean_variance(
mean_var, variance_var, value, decay)
| apache-2.0 | -3,260,370,201,121,433,600 | 40.158537 | 80 | 0.652642 | false |
apache/incubator-airflow | tests/www/test_security.py | 3 | 19165 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import unittest
from unittest import mock
from flask_appbuilder import SQLA, Model, expose, has_access
from flask_appbuilder.security.sqla import models as sqla_models
from flask_appbuilder.views import BaseView, ModelView
from sqlalchemy import Column, Date, Float, Integer, String
from airflow import settings
from airflow.exceptions import AirflowException
from airflow.models import DagModel
from airflow.security import permissions
from airflow.www import app as application
from airflow.www.utils import CustomSQLAInterface
from tests.test_utils import fab_utils
from tests.test_utils.db import clear_db_dags, clear_db_runs
from tests.test_utils.mock_security_manager import MockSecurityManager
READ_WRITE = {permissions.ACTION_CAN_READ, permissions.ACTION_CAN_EDIT}
READ_ONLY = {permissions.ACTION_CAN_READ}
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
log = logging.getLogger(__name__)
class SomeModel(Model):
id = Column(Integer, primary_key=True)
field_string = Column(String(50), unique=True, nullable=False)
field_integer = Column(Integer())
field_float = Column(Float())
field_date = Column(Date())
def __repr__(self):
return str(self.field_string)
class SomeModelView(ModelView):
datamodel = CustomSQLAInterface(SomeModel)
base_permissions = [
'can_list',
'can_show',
'can_add',
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
]
list_columns = ['field_string', 'field_integer', 'field_float', 'field_date']
class SomeBaseView(BaseView):
route_base = ''
@expose('/some_action')
@has_access
def some_action(self):
return "action!"
class TestSecurity(unittest.TestCase):
@classmethod
def setUpClass(cls):
settings.configure_orm()
cls.session = settings.Session
cls.app = application.create_app(testing=True)
cls.appbuilder = cls.app.appbuilder # pylint: disable=no-member
cls.app.config['WTF_CSRF_ENABLED'] = False
cls.security_manager = cls.appbuilder.sm
cls.delete_roles()
def setUp(self):
clear_db_runs()
clear_db_dags()
self.db = SQLA(self.app)
self.appbuilder.add_view(SomeBaseView, "SomeBaseView", category="BaseViews")
self.appbuilder.add_view(SomeModelView, "SomeModelView", category="ModelViews")
log.debug("Complete setup!")
@classmethod
def delete_roles(cls):
for role_name in ['team-a', 'MyRole1', 'MyRole5', 'Test_Role', 'MyRole3', 'MyRole2']:
fab_utils.delete_role(cls.app, role_name)
def expect_user_is_in_role(self, user, rolename):
self.security_manager.init_role(rolename, [])
role = self.security_manager.find_role(rolename)
if not role:
self.security_manager.add_role(rolename)
role = self.security_manager.find_role(rolename)
user.roles = [role]
self.security_manager.update_user(user)
def assert_user_has_dag_perms(self, perms, dag_id, user=None):
for perm in perms:
self.assertTrue(
self._has_dag_perm(perm, dag_id, user),
f"User should have '{perm}' on DAG '{dag_id}'",
)
def assert_user_does_not_have_dag_perms(self, dag_id, perms, user=None):
for perm in perms:
self.assertFalse(
self._has_dag_perm(perm, dag_id, user),
f"User should not have '{perm}' on DAG '{dag_id}'",
)
def _has_dag_perm(self, perm, dag_id, user):
# if not user:
# user = self.user
return self.security_manager.has_access(perm, self.security_manager.prefixed_dag_id(dag_id), user)
def tearDown(self):
clear_db_runs()
clear_db_dags()
self.appbuilder = None
self.app = None
self.db = None
log.debug("Complete teardown!")
def test_init_role_baseview(self):
role_name = 'MyRole3'
role_perms = [('can_some_action', 'SomeBaseView')]
self.security_manager.init_role(role_name, perms=role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_init_role_modelview(self):
role_name = 'MyRole2'
role_perms = [
('can_list', 'SomeModelView'),
('can_show', 'SomeModelView'),
('can_add', 'SomeModelView'),
(permissions.ACTION_CAN_EDIT, 'SomeModelView'),
(permissions.ACTION_CAN_DELETE, 'SomeModelView'),
]
self.security_manager.init_role(role_name, role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_update_and_verify_permission_role(self):
role_name = 'Test_Role'
self.security_manager.init_role(role_name, [])
role = self.security_manager.find_role(role_name)
perm = self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_EDIT, 'RoleModelView')
self.security_manager.add_permission_role(role, perm)
role_perms_len = len(role.permissions)
self.security_manager.init_role(role_name, [])
new_role_perms_len = len(role.permissions)
self.assertEqual(role_perms_len, new_role_perms_len)
def test_get_user_roles(self):
user = mock.MagicMock()
user.is_anonymous = False
roles = self.appbuilder.sm.find_role('Admin')
user.roles = roles
self.assertEqual(self.security_manager.get_user_roles(user), roles)
def test_get_user_roles_for_anonymous_user(self):
viewer_role_perms = {
(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_AUDIT_LOG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_JOB),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_SLA_MISS),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_BROWSE_MENU),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DOCS_LINK),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DOCS_MENU),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_JOB),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_AUDIT_LOG),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_PLUGIN),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_SLA_MISS),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_THIS_FORM_GET, permissions.RESOURCE_RESET_MY_PASSWORD_VIEW),
(permissions.ACTION_CAN_THIS_FORM_POST, permissions.RESOURCE_RESET_MY_PASSWORD_VIEW),
(permissions.ACTION_RESETMYPASSWORD, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_THIS_FORM_GET, permissions.RESOURCE_USERINFO_EDIT_VIEW),
(permissions.ACTION_CAN_THIS_FORM_POST, permissions.RESOURCE_USERINFO_EDIT_VIEW),
(permissions.ACTION_USERINFOEDIT, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_OID_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_LDAP_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_OAUTH_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_REMOTEUSER_MODELVIEW),
}
self.app.config['AUTH_ROLE_PUBLIC'] = 'Viewer'
with self.app.app_context():
user = mock.MagicMock()
user.is_anonymous = True
perms_views = set()
for role in self.security_manager.get_user_roles(user):
perms_views.update(
{(perm_view.permission.name, perm_view.view_menu.name) for perm_view in role.permissions}
)
self.assertEqual(perms_views, viewer_role_perms)
@mock.patch('airflow.www.security.AirflowSecurityManager.get_user_roles')
def test_get_all_permissions_views(self, mock_get_user_roles):
role_name = 'MyRole5'
role_perm = 'can_some_action'
role_vm = 'SomeBaseView'
username = 'get_all_permissions_views'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(role_perm, role_vm),
],
)
role = user.roles[0]
mock_get_user_roles.return_value = [role]
self.assertEqual(self.security_manager.get_all_permissions_views(), {(role_perm, role_vm)})
mock_get_user_roles.return_value = []
self.assertEqual(len(self.security_manager.get_all_permissions_views()), 0)
def test_get_accessible_dag_ids(self):
role_name = 'MyRole1'
permission_action = [permissions.ACTION_CAN_READ]
dag_id = 'dag_id'
username = "ElUser"
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
],
)
dag_model = DagModel(dag_id=dag_id, fileloc="/tmp/dag_.py", schedule_interval="2 2 * * *")
self.session.add(dag_model)
self.session.commit()
self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member
dag_id, access_control={role_name: permission_action}
)
self.assertEqual(self.security_manager.get_accessible_dag_ids(user), {'dag_id'})
def test_dont_get_inaccessible_dag_ids_for_dag_resource_permission(self):
# In this test case,
# get_readable_dag_ids() don't return DAGs to which the user has CAN_EDIT permission
username = "Monsieur User"
role_name = "MyRole1"
permission_action = [permissions.ACTION_CAN_EDIT]
dag_id = "dag_id"
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
],
)
dag_model = DagModel(dag_id=dag_id, fileloc="/tmp/dag_.py", schedule_interval="2 2 * * *")
self.session.add(dag_model)
self.session.commit()
self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member
dag_id, access_control={role_name: permission_action}
)
self.assertEqual(self.security_manager.get_readable_dag_ids(user), set())
@mock.patch('airflow.www.security.AirflowSecurityManager._has_view_access')
def test_has_access(self, mock_has_view_access):
user = mock.MagicMock()
user.is_anonymous = False
mock_has_view_access.return_value = True
self.assertTrue(self.security_manager.has_access('perm', 'view', user))
def test_sync_perm_for_dag_creates_permissions_on_view_menus(self):
test_dag_id = 'TEST_DAG'
prefixed_test_dag_id = f'DAG:{test_dag_id}'
self.security_manager.sync_perm_for_dag(test_dag_id, access_control=None)
self.assertIsNotNone(
self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_READ, prefixed_test_dag_id)
)
self.assertIsNotNone(
self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_EDIT, prefixed_test_dag_id)
)
@mock.patch('airflow.www.security.AirflowSecurityManager._has_perm')
@mock.patch('airflow.www.security.AirflowSecurityManager._has_role')
def test_has_all_dag_access(self, mock_has_role, mock_has_perm):
mock_has_role.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
mock_has_role.return_value = False
mock_has_perm.return_value = False
self.assertFalse(self.security_manager.has_all_dags_access())
mock_has_perm.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
def test_access_control_with_non_existent_role(self):
with self.assertRaises(AirflowException) as context:
self.security_manager.sync_perm_for_dag(
dag_id='access-control-test',
access_control={
'this-role-does-not-exist': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]
},
)
self.assertIn("role does not exist", str(context.exception))
def test_all_dag_access_doesnt_give_non_dag_access(self):
username = 'dag_access_user'
role_name = 'dag_access_role'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
],
)
self.assertTrue(
self.security_manager.has_access(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG, user)
)
self.assertFalse(
self.security_manager.has_access(
permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE, user
)
)
def test_access_control_with_invalid_permission(self):
invalid_permissions = [
'can_varimport', # a real permission, but not a member of DAG_PERMS
'can_eat_pudding', # clearly not a real permission
]
username = "LaUser"
user = fab_utils.create_user(
self.app,
username=username,
role_name='team-a',
)
for permission in invalid_permissions:
self.expect_user_is_in_role(user, rolename='team-a')
with self.assertRaises(AirflowException) as context:
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': {permission}}
)
self.assertIn("invalid permissions", str(context.exception))
def test_access_control_is_set_on_init(self):
username = 'access_control_is_set_on_init'
role_name = 'team-a'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[],
)
self.expect_user_is_in_role(user, rolename='team-a')
self.security_manager.sync_perm_for_dag(
'access_control_test',
access_control={'team-a': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]},
)
self.assert_user_has_dag_perms(
perms=[permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ],
dag_id='access_control_test',
user=user,
)
self.expect_user_is_in_role(user, rolename='NOT-team-a')
self.assert_user_does_not_have_dag_perms(
perms=[permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ],
dag_id='access_control_test',
user=user,
)
def test_access_control_stale_perms_are_revoked(self):
username = 'access_control_stale_perms_are_revoked'
role_name = 'team-a'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[],
)
self.expect_user_is_in_role(user, rolename='team-a')
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': READ_WRITE}
)
self.assert_user_has_dag_perms(perms=READ_WRITE, dag_id='access_control_test', user=user)
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': READ_ONLY}
)
self.assert_user_has_dag_perms(
perms=[permissions.ACTION_CAN_READ], dag_id='access_control_test', user=user
)
self.assert_user_does_not_have_dag_perms(
perms=[permissions.ACTION_CAN_EDIT], dag_id='access_control_test', user=user
)
def test_no_additional_dag_permission_views_created(self):
ab_perm_view_role = sqla_models.assoc_permissionview_role
self.security_manager.sync_roles()
num_pv_before = self.db.session().query(ab_perm_view_role).count()
self.security_manager.sync_roles()
num_pv_after = self.db.session().query(ab_perm_view_role).count()
self.assertEqual(num_pv_before, num_pv_after)
def test_override_role_vm(self):
test_security_manager = MockSecurityManager(appbuilder=self.appbuilder)
self.assertEqual(len(test_security_manager.VIEWER_VMS), 1)
self.assertEqual(test_security_manager.VIEWER_VMS, {'Airflow'})
| apache-2.0 | 3,754,390,603,396,629,000 | 41.028509 | 110 | 0.625828 | false |
marratj/ansible | lib/ansible/plugins/connection/lxd.py | 44 | 4475 | # (c) 2016 Matt Clay <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Matt Clay <[email protected]>
connection: lxd
short_description: Run tasks in lxc containers via lxc CLI
description:
- Run commands or put/fetch files to an existing lxc container using lxc CLI
version_added: "2.0"
options:
remote_addr:
description:
- Container identifier
default: The set user as per docker's configuration
vars:
- name: ansible_host
- name: ansible_lxd_host
executable:
description:
- shell to use for execution inside container
default: /bin/sh
vars:
- name: ansible_executable
- name: ansible_lxd_executable
"""
import os
from distutils.spawn import find_executable
from subprocess import call, Popen, PIPE
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
""" lxd based connections """
transport = "lxd"
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._host = self._play_context.remote_addr
self._lxc_cmd = find_executable("lxc")
if not self._lxc_cmd:
raise AnsibleError("lxc command not found in PATH")
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
self._display.warning('lxd does not support remote_user, using container default: root')
def _connect(self):
"""connect to lxd (nothing to do here) """
super(Connection, self)._connect()
if not self._connected:
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self._host)
self._connected = True
def exec_command(self, cmd, in_data=None, sudoable=True):
""" execute a command on the lxd host """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv(u"EXEC {0}".format(cmd), host=self._host)
local_cmd = [self._lxc_cmd, "exec", self._host, "--", self._play_context.executable, "-c", cmd]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data)
stdout = to_text(stdout)
stderr = to_text(stderr)
if stderr == "error: Container is not running.\n":
raise AnsibleConnectionFailure("container not running: %s" % self._host)
if stderr == "error: not found\n":
raise AnsibleConnectionFailure("container not found: %s" % self._host)
return process.returncode, stdout, stderr
def put_file(self, in_path, out_path):
""" put a file from local to lxd """
super(Connection, self).put_file(in_path, out_path)
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host)
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
local_cmd = [self._lxc_cmd, "file", "push", in_path, self._host + "/" + out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def fetch_file(self, in_path, out_path):
""" fetch a file from lxd to local """
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._host)
local_cmd = [self._lxc_cmd, "file", "pull", self._host + "/" + in_path, out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def close(self):
""" close the connection (nothing to do here) """
super(Connection, self).close()
self._connected = False
| gpl-3.0 | -223,904,128,685,930,720 | 35.382114 | 103 | 0.629721 | false |
golharam/StarCluster | starcluster/sshutils.py | 14 | 32130 | # Copyright 2009-2014 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import sys
import stat
import glob
import atexit
import string
import socket
import fnmatch
import hashlib
import warnings
import posixpath
import scp
import paramiko
from Crypto.PublicKey import RSA
from Crypto.PublicKey import DSA
# windows does not have termios...
try:
import termios
import tty
HAS_TERMIOS = True
except ImportError:
HAS_TERMIOS = False
from starcluster import exception
from starcluster import progressbar
from starcluster.logger import log
class SSHClient(object):
"""
Establishes an SSH connection to a remote host using either password or
private key authentication. Once established, this object allows executing
commands, copying files to/from the remote host, various file querying
similar to os.path.*, and much more.
"""
def __init__(self,
host,
username=None,
password=None,
private_key=None,
private_key_pass=None,
compress=False,
port=22,
timeout=30):
self._host = host
self._port = port
self._pkey = None
self._username = username or os.environ['LOGNAME']
self._password = password
self._timeout = timeout
self._sftp = None
self._scp = None
self._transport = None
self._progress_bar = None
self._compress = compress
if private_key:
self._pkey = self.load_private_key(private_key, private_key_pass)
elif not password:
raise exception.SSHNoCredentialsError()
self._glob = SSHGlob(self)
self.__last_status = None
atexit.register(self.close)
def load_private_key(self, private_key, private_key_pass=None):
# Use Private Key.
log.debug('loading private key %s' % private_key)
if private_key.endswith('rsa') or private_key.count('rsa'):
pkey = self._load_rsa_key(private_key, private_key_pass)
elif private_key.endswith('dsa') or private_key.count('dsa'):
pkey = self._load_dsa_key(private_key, private_key_pass)
else:
log.debug(
"specified key does not end in either rsa or dsa, trying both")
pkey = self._load_rsa_key(private_key, private_key_pass)
if pkey is None:
pkey = self._load_dsa_key(private_key, private_key_pass)
return pkey
def connect(self, host=None, username=None, password=None,
private_key=None, private_key_pass=None, port=None, timeout=30,
compress=None):
host = host or self._host
username = username or self._username
password = password or self._password
compress = compress or self._compress
port = port if port is not None else self._port
pkey = self._pkey
if private_key:
pkey = self.load_private_key(private_key, private_key_pass)
log.debug("connecting to host %s on port %d as user %s" % (host, port,
username))
try:
sock = self._get_socket(host, port)
transport = paramiko.Transport(sock)
transport.banner_timeout = timeout
except socket.error:
raise exception.SSHConnectionError(host, port)
# Enable/disable compression
transport.use_compression(compress)
# Authenticate the transport.
try:
transport.connect(username=username, pkey=pkey, password=password)
except paramiko.AuthenticationException:
raise exception.SSHAuthException(username, host)
except paramiko.SSHException, e:
msg = e.args[0]
raise exception.SSHError(msg)
except socket.error:
raise exception.SSHConnectionError(host, port)
except EOFError:
raise exception.SSHConnectionError(host, port)
except Exception, e:
raise exception.SSHError(str(e))
self.close()
self._transport = transport
try:
assert self.sftp is not None
except paramiko.SFTPError, e:
if 'Garbage packet received' in e:
log.debug("Garbage packet received", exc_info=True)
raise exception.SSHAccessDeniedViaAuthKeys(username)
raise
return self
@property
def transport(self):
"""
This property attempts to return an active SSH transport
"""
if not self._transport or not self._transport.is_active():
self.connect(self._host, self._username, self._password,
port=self._port, timeout=self._timeout,
compress=self._compress)
return self._transport
def get_server_public_key(self):
return self.transport.get_remote_server_key()
def is_active(self):
if self._transport:
return self._transport.is_active()
return False
def _get_socket(self, hostname, port):
addrinfo = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC,
socket.SOCK_STREAM)
for (family, socktype, proto, canonname, sockaddr) in addrinfo:
if socktype == socket.SOCK_STREAM:
af = family
break
else:
raise exception.SSHError(
'No suitable address family for %s' % hostname)
sock = socket.socket(af, socket.SOCK_STREAM)
sock.settimeout(self._timeout)
sock.connect((hostname, port))
return sock
def _load_rsa_key(self, private_key, private_key_pass=None):
private_key_file = os.path.expanduser(private_key)
try:
rsa_key = get_rsa_key(key_location=private_key_file,
passphrase=private_key_pass)
log.debug("Using private key %s (RSA)" % private_key)
return rsa_key
except (paramiko.SSHException, exception.SSHError):
log.error('invalid rsa key or passphrase specified')
def _load_dsa_key(self, private_key, private_key_pass=None):
private_key_file = os.path.expanduser(private_key)
try:
dsa_key = get_dsa_key(key_location=private_key_file,
passphrase=private_key_pass)
log.info("Using private key %s (DSA)" % private_key)
return dsa_key
except (paramiko.SSHException, exception.SSHError):
log.error('invalid dsa key or passphrase specified')
@property
def sftp(self):
"""Establish the SFTP connection."""
if not self._sftp or self._sftp.sock.closed:
log.debug("creating sftp connection")
self._sftp = paramiko.SFTPClient.from_transport(self.transport)
return self._sftp
@property
def scp(self):
"""Initialize the SCP client."""
if not self._scp or not self._scp.transport.is_active():
log.debug("creating scp connection")
self._scp = scp.SCPClient(self.transport,
progress=self._file_transfer_progress,
socket_timeout=self._timeout)
return self._scp
def generate_rsa_key(self):
warnings.warn("This method is deprecated: please use "
"starcluster.sshutils.generate_rsa_key instead")
return generate_rsa_key()
def get_public_key(self, key):
warnings.warn("This method is deprecated: please use "
"starcluster.sshutils.get_public_key instead")
return get_public_key(key)
def load_remote_rsa_key(self, remote_filename):
"""
Returns paramiko.RSAKey object for an RSA key located on the remote
machine
"""
rfile = self.remote_file(remote_filename, 'r')
key = get_rsa_key(key_file_obj=rfile)
rfile.close()
return key
def makedirs(self, path, mode=0755):
"""
Same as os.makedirs - makes a new directory and automatically creates
all parent directories if they do not exist.
mode specifies unix permissions to apply to the new dir
"""
head, tail = posixpath.split(path)
if not tail:
head, tail = posixpath.split(head)
if head and tail and not self.path_exists(head):
try:
self.makedirs(head, mode)
except OSError, e:
# be happy if someone already created the path
if e.errno != os.errno.EEXIST:
raise
# xxx/newdir/. exists if xxx/newdir exists
if tail == posixpath.curdir:
return
self.mkdir(path, mode)
def mkdir(self, path, mode=0755, ignore_failure=False):
"""
Make a new directory on the remote machine
If parent is True, create all parent directories that do not exist
mode specifies unix permissions to apply to the new dir
"""
try:
return self.sftp.mkdir(path, mode)
except IOError:
if not ignore_failure:
raise
def get_remote_file_lines(self, remote_file, regex=None, matching=True):
"""
Returns list of lines in a remote_file
If regex is passed only lines that contain a pattern that matches
regex will be returned
If matching is set to False then only lines *not* containing a pattern
that matches regex will be returned
"""
f = self.remote_file(remote_file, 'r')
flines = f.readlines()
f.close()
if regex is None:
return flines
r = re.compile(regex)
lines = []
for line in flines:
match = r.search(line)
if matching and match:
lines.append(line)
elif not matching and not match:
lines.append(line)
return lines
def remove_lines_from_file(self, remote_file, regex):
"""
Removes lines matching regex from remote_file
"""
if regex in [None, '']:
log.debug('no regex supplied...returning')
return
lines = self.get_remote_file_lines(remote_file, regex, matching=False)
log.debug("new %s after removing regex (%s) matches:\n%s" %
(remote_file, regex, ''.join(lines)))
f = self.remote_file(remote_file)
f.writelines(lines)
f.close()
def unlink(self, remote_file):
return self.sftp.unlink(remote_file)
def remote_file(self, file, mode='w'):
"""
Returns a remote file descriptor
"""
rfile = self.sftp.open(file, mode)
rfile.name = file
return rfile
def path_exists(self, path):
"""
Test whether a remote path exists.
Returns False for broken symbolic links
"""
try:
self.stat(path)
return True
except IOError:
return False
def lpath_exists(self, path):
"""
Test whether a remote path exists.
Returns True for broken symbolic links
"""
try:
self.lstat(path)
return True
except IOError:
return False
def chown(self, uid, gid, remote_path):
"""
Set user (uid) and group (gid) owner for remote_path
"""
return self.sftp.chown(remote_path, uid, gid)
def chmod(self, mode, remote_path):
"""
Apply permissions (mode) to remote_path
"""
return self.sftp.chmod(remote_path, mode)
def ls(self, path):
"""
Return a list containing the names of the entries in the remote path.
"""
return [posixpath.join(path, f) for f in self.sftp.listdir(path)]
def glob(self, pattern):
return self._glob.glob(pattern)
def isdir(self, path):
"""
Return true if the remote path refers to an existing directory.
"""
try:
s = self.stat(path)
except IOError:
return False
return stat.S_ISDIR(s.st_mode)
def isfile(self, path):
"""
Return true if the remote path refers to an existing file.
"""
try:
s = self.stat(path)
except IOError:
return False
return stat.S_ISREG(s.st_mode)
def stat(self, path):
"""
Perform a stat system call on the given remote path.
"""
return self.sftp.stat(path)
def lstat(self, path):
"""
Same as stat but doesn't follow symlinks
"""
return self.sftp.lstat(path)
@property
def progress_bar(self):
if not self._progress_bar:
widgets = ['FileTransfer: ', ' ', progressbar.Percentage(), ' ',
progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA(), ' ',
progressbar.FileTransferSpeed()]
pbar = progressbar.ProgressBar(widgets=widgets,
maxval=1,
force_update=True)
self._progress_bar = pbar
return self._progress_bar
def _file_transfer_progress(self, filename, size, sent):
pbar = self.progress_bar
pbar.widgets[0] = filename
pbar.maxval = size
pbar.update(sent)
if pbar.finished:
pbar.reset()
def _make_list(self, obj):
if not isinstance(obj, (list, tuple)):
return [obj]
return obj
def get(self, remotepaths, localpath=''):
"""
Copies one or more files from the remote host to the local host.
"""
remotepaths = self._make_list(remotepaths)
localpath = localpath or os.getcwd()
globs = []
noglobs = []
for rpath in remotepaths:
if glob.has_magic(rpath):
globs.append(rpath)
else:
noglobs.append(rpath)
globresults = [self.glob(g) for g in globs]
remotepaths = noglobs
for globresult in globresults:
remotepaths.extend(globresult)
recursive = False
for rpath in remotepaths:
if not self.path_exists(rpath):
raise exception.BaseException(
"Remote file or directory does not exist: %s" % rpath)
for rpath in remotepaths:
if self.isdir(rpath):
recursive = True
break
try:
self.scp.get(remotepaths, local_path=localpath,
recursive=recursive)
except Exception, e:
log.debug("get failed: remotepaths=%s, localpath=%s",
str(remotepaths), localpath)
raise exception.SCPException(str(e))
def put(self, localpaths, remotepath='.'):
"""
Copies one or more files from the local host to the remote host.
"""
localpaths = self._make_list(localpaths)
recursive = False
for lpath in localpaths:
if os.path.isdir(lpath):
recursive = True
break
try:
self.scp.put(localpaths, remote_path=remotepath,
recursive=recursive)
except Exception, e:
log.debug("put failed: localpaths=%s, remotepath=%s",
str(localpaths), remotepath)
raise exception.SCPException(str(e))
def execute_async(self, command, source_profile=True):
"""
Executes a remote command so that it continues running even after this
SSH connection closes. The remote process will be put into the
background via nohup. Does not return output or check for non-zero exit
status.
"""
return self.execute(command, detach=True,
source_profile=source_profile)
def get_last_status(self):
return self.__last_status
def get_status(self, command, source_profile=True):
"""
Execute a remote command and return the exit status
"""
channel = self.transport.open_session()
if source_profile:
command = "source /etc/profile && %s" % command
channel.exec_command(command)
self.__last_status = channel.recv_exit_status()
return self.__last_status
def _get_output(self, channel, silent=True, only_printable=False):
"""
Returns the stdout/stderr output from a ssh channel as a list of
strings (non-interactive only)
"""
# stdin = channel.makefile('wb', -1)
stdout = channel.makefile('rb', -1)
stderr = channel.makefile_stderr('rb', -1)
if silent:
output = stdout.readlines() + stderr.readlines()
else:
output = []
line = None
while line != '':
line = stdout.readline()
if only_printable:
line = ''.join(c for c in line if c in string.printable)
if line != '':
output.append(line)
print line,
for line in stderr.readlines():
output.append(line)
print line,
if only_printable:
output = map(lambda line: ''.join(c for c in line if c in
string.printable), output)
output = map(lambda line: line.strip(), output)
return output
def execute(self, command, silent=True, only_printable=False,
ignore_exit_status=False, log_output=True, detach=False,
source_profile=True, raise_on_failure=True):
"""
Execute a remote command and return stdout/stderr
NOTE: this function blocks until the process finishes
kwargs:
silent - don't print the command's output to the console
only_printable - filter the command's output to allow only printable
characters
ignore_exit_status - don't warn about non-zero exit status
log_output - log all remote output to the debug file
detach - detach the remote process so that it continues to run even
after the SSH connection closes (does NOT return output or
check for non-zero exit status if detach=True)
source_profile - if True prefix the command with "source /etc/profile"
raise_on_failure - raise exception.SSHError if command fails
returns List of output lines
"""
channel = self.transport.open_session()
if detach:
command = "nohup %s &" % command
if source_profile:
command = "source /etc/profile && %s" % command
channel.exec_command(command)
channel.close()
self.__last_status = None
return
if source_profile:
command = "source /etc/profile && %s" % command
log.debug("executing remote command: %s" % command)
channel.exec_command(command)
output = self._get_output(channel, silent=silent,
only_printable=only_printable)
exit_status = channel.recv_exit_status()
self.__last_status = exit_status
out_str = '\n'.join(output)
if exit_status != 0:
msg = "remote command '%s' failed with status %d"
msg %= (command, exit_status)
if log_output:
msg += ":\n%s" % out_str
else:
msg += " (no output log requested)"
if not ignore_exit_status:
if raise_on_failure:
raise exception.RemoteCommandFailed(
msg, command, exit_status, out_str)
else:
log.error(msg)
else:
log.debug("(ignored) " + msg)
else:
if log_output:
log.debug("output of '%s':\n%s" % (command, out_str))
else:
log.debug("output of '%s' has been hidden" % command)
return output
def has_required(self, progs):
"""
Same as check_required but returns False if not all commands exist
"""
try:
return self.check_required(progs)
except exception.RemoteCommandNotFound:
return False
def check_required(self, progs):
"""
Checks that all commands in the progs list exist on the remote system.
Returns True if all commands exist and raises exception.CommandNotFound
if not.
"""
for prog in progs:
if not self.which(prog):
raise exception.RemoteCommandNotFound(prog)
return True
def which(self, prog):
return self.execute('which %s' % prog, ignore_exit_status=True)
def get_path(self):
"""Returns the PATH environment variable on the remote machine"""
return self.get_env()['PATH']
def get_env(self):
"""Returns the remote machine's environment as a dictionary"""
env = {}
for line in self.execute('env'):
key, val = line.split('=', 1)
env[key] = val
return env
def close(self):
"""Closes the connection and cleans up."""
if self._sftp:
self._sftp.close()
if self._transport:
self._transport.close()
def _invoke_shell(self, term='screen', cols=80, lines=24):
chan = self.transport.open_session()
chan.get_pty(term, cols, lines)
chan.invoke_shell()
return chan
def get_current_user(self):
if not self.is_active():
return
return self.transport.get_username()
def switch_user(self, user):
"""
Reconnect, if necessary, to host as user
"""
if not self.is_active() or user and self.get_current_user() != user:
self.connect(username=user)
else:
user = user or self._username
log.debug("already connected as user %s" % user)
def interactive_shell(self, user='root'):
orig_user = self.get_current_user()
self.switch_user(user)
chan = self._invoke_shell()
log.info('Starting Pure-Python SSH shell...')
if HAS_TERMIOS:
self._posix_shell(chan)
else:
self._windows_shell(chan)
chan.close()
self.switch_user(orig_user)
def _posix_shell(self, chan):
import select
oldtty = termios.tcgetattr(sys.stdin)
try:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
chan.settimeout(0.0)
# needs to be sent to give vim correct size FIX
chan.send('eval $(resize)\n')
while True:
r, w, e = select.select([chan, sys.stdin], [], [])
if chan in r:
try:
x = chan.recv(1024)
if len(x) == 0:
print '\r\n*** EOF\r\n',
break
sys.stdout.write(x)
sys.stdout.flush()
except socket.timeout:
pass
if sys.stdin in r:
# fixes up arrow problem
x = os.read(sys.stdin.fileno(), 1)
if len(x) == 0:
break
chan.send(x)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
# thanks to Mike Looijmans for this code
def _windows_shell(self, chan):
import threading
sys.stdout.write("Line-buffered terminal emulation. "
"Press F6 or ^Z to send EOF.\r\n\r\n")
def writeall(sock):
while True:
data = sock.recv(256)
if not data:
sys.stdout.write('\r\n*** EOF ***\r\n\r\n')
sys.stdout.flush()
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(chan,))
writer.start()
# needs to be sent to give vim correct size FIX
chan.send('eval $(resize)\n')
try:
while True:
d = sys.stdin.read(1)
if not d:
break
chan.send(d)
except EOFError:
# user hit ^Z or F6
pass
def __del__(self):
"""Attempt to clean up if not explicitly closed."""
log.debug('__del__ called')
self.close()
# for backwards compatibility
Connection = SSHClient
class SSHGlob(object):
def __init__(self, ssh_client):
self.ssh = ssh_client
def glob(self, pathname):
return list(self.iglob(pathname))
def iglob(self, pathname):
"""
Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la fnmatch.
"""
if not glob.has_magic(pathname):
if self.ssh.lpath_exists(pathname):
yield pathname
return
dirname, basename = posixpath.split(pathname)
if not dirname:
for name in self.glob1(posixpath.curdir, basename):
yield name
return
if glob.has_magic(dirname):
dirs = self.iglob(dirname)
else:
dirs = [dirname]
if glob.has_magic(basename):
glob_in_dir = self.glob1
else:
glob_in_dir = self.glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield posixpath.join(dirname, name)
def glob0(self, dirname, basename):
if basename == '':
# `os.path.split()` returns an empty basename for paths ending with
# a directory separator. 'q*x/' should match only directories.
if self.ssh.isdir(dirname):
return [basename]
else:
if self.ssh.lexists(posixpath.join(dirname, basename)):
return [basename]
return []
def glob1(self, dirname, pattern):
if not dirname:
dirname = posixpath.curdir
if isinstance(pattern, unicode) and not isinstance(dirname, unicode):
# enc = sys.getfilesystemencoding() or sys.getdefaultencoding()
# dirname = unicode(dirname, enc)
dirname = unicode(dirname, 'UTF-8')
try:
names = [posixpath.basename(n) for n in self.ssh.ls(dirname)]
except os.error:
return []
if pattern[0] != '.':
names = filter(lambda x: x[0] != '.', names)
return fnmatch.filter(names, pattern)
def insert_char_every_n_chars(string, char='\n', every=64):
return char.join(
string[i:i + every] for i in xrange(0, len(string), every))
def get_rsa_key(key_location=None, key_file_obj=None, passphrase=None,
use_pycrypto=False):
key_fobj = key_file_obj or open(key_location)
try:
if use_pycrypto:
key = RSA.importKey(key_fobj, passphrase=passphrase)
else:
key = paramiko.RSAKey.from_private_key(key_fobj,
password=passphrase)
return key
except (paramiko.SSHException, ValueError):
raise exception.SSHError(
"Invalid RSA private key file or missing passphrase: %s" %
key_location)
def get_dsa_key(key_location=None, key_file_obj=None, passphrase=None,
use_pycrypto=False):
key_fobj = key_file_obj or open(key_location)
try:
key = paramiko.DSSKey.from_private_key(key_fobj,
password=passphrase)
if use_pycrypto:
key = DSA.construct((key.y, key.g, key.p, key.q, key.x))
return key
except (paramiko.SSHException, ValueError):
raise exception.SSHError(
"Invalid DSA private key file or missing passphrase: %s" %
key_location)
def get_public_key(key):
return ' '.join([key.get_name(), key.get_base64()])
def generate_rsa_key():
return paramiko.RSAKey.generate(2048)
def get_private_rsa_fingerprint(key_location=None, key_file_obj=None,
passphrase=None):
"""
Returns the fingerprint of a private RSA key as a 59-character string (40
characters separated every 2 characters by a ':'). The fingerprint is
computed using the SHA1 (hex) digest of the DER-encoded (pkcs8) RSA private
key.
"""
k = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj,
passphrase=passphrase, use_pycrypto=True)
sha1digest = hashlib.sha1(k.exportKey('DER', pkcs=8)).hexdigest()
fingerprint = insert_char_every_n_chars(sha1digest, ':', 2)
key = key_location or key_file_obj
log.debug("rsa private key fingerprint (%s): %s" % (key, fingerprint))
return fingerprint
def get_public_rsa_fingerprint(key_location=None, key_file_obj=None,
passphrase=None):
"""
Returns the fingerprint of the public portion of an RSA key as a
47-character string (32 characters separated every 2 characters by a ':').
The fingerprint is computed using the MD5 (hex) digest of the DER-encoded
RSA public key.
"""
privkey = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj,
passphrase=passphrase, use_pycrypto=True)
pubkey = privkey.publickey()
md5digest = hashlib.md5(pubkey.exportKey('DER')).hexdigest()
fingerprint = insert_char_every_n_chars(md5digest, ':', 2)
key = key_location or key_file_obj
log.debug("rsa public key fingerprint (%s): %s" % (key, fingerprint))
return fingerprint
def test_create_keypair_fingerprint(keypair=None):
"""
TODO: move this to 'live' tests
"""
from starcluster import config
cfg = config.StarClusterConfig().load()
ec2 = cfg.get_easy_ec2()
if keypair is None:
keypair = cfg.keys.keys()[0]
key_location = cfg.get_key(keypair).key_location
localfprint = get_private_rsa_fingerprint(key_location)
ec2fprint = ec2.get_keypair(keypair).fingerprint
print 'local fingerprint: %s' % localfprint
print ' ec2 fingerprint: %s' % ec2fprint
assert localfprint == ec2fprint
def test_import_keypair_fingerprint(keypair):
"""
TODO: move this to 'live' tests
"""
from starcluster import config
cfg = config.StarClusterConfig().load()
ec2 = cfg.get_easy_ec2()
key_location = cfg.get_key(keypair).key_location
localfprint = get_public_rsa_fingerprint(key_location)
ec2fprint = ec2.get_keypair(keypair).fingerprint
print 'local fingerprint: %s' % localfprint
print ' ec2 fingerprint: %s' % ec2fprint
assert localfprint == ec2fprint
| gpl-3.0 | 3,477,695,768,460,747,000 | 34.268935 | 79 | 0.566698 | false |
DARKPOP/external_chromium_org | build/android/avd.py | 46 | 3735 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Launches Android Virtual Devices with a set configuration for testing Chrome.
The script will launch a specified number of Android Virtual Devices (AVD's).
"""
import install_emulator_deps
import logging
import optparse
import os
import re
import sys
from pylib import cmd_helper
from pylib import constants
from pylib.utils import emulator
def main(argv):
# ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
# the emulator to find the system images upon launch.
emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
opt_parser = optparse.OptionParser(description='AVD script.')
opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
'launch. If not specified, new AVD\'s will be created')
opt_parser.add_option('-n', '--num', dest='emulator_count',
help='Number of emulators to launch (default is 1).',
type='int', default='1')
opt_parser.add_option('--abi', default='x86',
help='Platform of emulators to launch (x86 default).')
opt_parser.add_option('--api-level', dest='api_level',
help='API level for the image, e.g. 19 for Android 4.4',
type='int', default=constants.ANDROID_SDK_VERSION)
options, _ = opt_parser.parse_args(argv[1:])
logging.basicConfig(level=logging.INFO,
format='# %(asctime)-15s: %(message)s')
logging.root.setLevel(logging.INFO)
# Check if KVM is enabled for x86 AVD's and check for x86 system images.
# TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
# why don't we just run it?
if options.abi == 'x86':
if not install_emulator_deps.CheckKVM():
logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
'Enable KVM in BIOS and run install_emulator_deps.py')
return 1
elif not install_emulator_deps.CheckX86Image(options.api_level):
logging.critical('ERROR: System image for x86 AVD not installed. Run '
'install_emulator_deps.py')
return 1
if not install_emulator_deps.CheckSDK():
logging.critical('ERROR: Emulator SDK not installed. Run '
'install_emulator_deps.py.')
return 1
# If AVD is specified, check that the SDK has the required target. If not,
# check that the SDK has the desired target for the temporary AVD's.
api_level = options.api_level
if options.name:
android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
'android')
avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
names = re.findall('Name: (\w+)', avds_output)
api_levels = re.findall('API level (\d+)', avds_output)
try:
avd_index = names.index(options.name)
except ValueError:
logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
return 1
api_level = int(api_levels[avd_index])
if not install_emulator_deps.CheckSDKPlatform(api_level):
logging.critical('ERROR: Emulator SDK missing required target for API %d. '
'Run install_emulator_deps.py.')
return 1
if options.name:
emulator.LaunchEmulator(options.name, options.abi)
else:
emulator.LaunchTempEmulators(options.emulator_count, options.abi,
options.api_level, True)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | 9,026,403,345,575,977,000 | 37.90625 | 80 | 0.653012 | false |
jakdept/pythonbook | ch4/picture_grid_test.py | 1 | 1147 | #!/usr/bin/env python3.5
'''
Automate the Boring Stuff with Python
generic testing for chapter 4 projects
Jack Hayhurst
'''
from io import StringIO
import unittest
from unittest.mock import patch
import picture_grid
class TestPictureGrid(unittest.TestCase):
'''tests the picture_grid.py script'''
def test_picture_grid(self):
'''single test to verify picture transfer'''
grid = [['.', '.', '.', '.', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['O', 'O', 'O', 'O', 'O', '.'],
['.', 'O', 'O', 'O', 'O', 'O'],
['O', 'O', 'O', 'O', 'O', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['.', '.', '.', '.', '.', '.']]
output = """
..OO.OO..
.OOOOOOO.
.OOOOOOO.
..OOOOO..
...OOO...
....O....
""".strip()
with patch('sys.stdout', new=StringIO()) as fakeOutput:
picture_grid.picture_grid(grid)
self.assertEqual(fakeOutput.getvalue().strip(), output)
if __name__ == "__main__":
unittest.main()
| mit | 4,971,555,962,770,355,000 | 24.511111 | 67 | 0.428945 | false |
myerpengine/odoo | addons/calendar/controllers/main.py | 36 | 3503 | import simplejson
import openerp
import openerp.addons.web.http as http
from openerp.addons.web.http import request
import openerp.addons.web.controllers.main as webmain
import json
class meeting_invitation(http.Controller):
@http.route('/calendar/meeting/accept', type='http', auth="calendar")
def accept(self, db, token, action, id, **kwargs):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'accepted')])
if attendee_id:
attendee_pool.do_accept(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/decline', type='http', auth="calendar")
def declined(self, db, token, action, id):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'declined')])
if attendee_id:
attendee_pool.do_decline(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/view', type='http', auth="calendar")
def view(self, db, token, action, id, view='calendar'):
registry = openerp.modules.registry.RegistryManager.get(db)
meeting_pool = registry.get('calendar.event')
attendee_pool = registry.get('calendar.attendee')
partner_pool = registry.get('res.partner')
with registry.cursor() as cr:
attendee = attendee_pool.search_read(cr, openerp.SUPERUSER_ID, [('access_token', '=', token)], [])
if attendee and attendee[0] and attendee[0].get('partner_id'):
partner_id = int(attendee[0].get('partner_id')[0])
tz = partner_pool.read(cr, openerp.SUPERUSER_ID, partner_id, ['tz'])['tz']
else:
tz = False
attendee_data = meeting_pool.get_attendee(cr, openerp.SUPERUSER_ID, id, dict(tz=tz))
if attendee:
attendee_data['current_attendee'] = attendee[0]
values = dict(init="s.calendar.event('%s', '%s', '%s', '%s' , '%s');" % (db, action, id, 'form', json.dumps(attendee_data)))
return request.render('web.webclient_bootstrap', values)
# Function used, in RPC to check every 5 minutes, if notification to do for an event or not
@http.route('/calendar/notify', type='json', auth="none")
def notify(self):
registry = openerp.modules.registry.RegistryManager.get(request.session.db)
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("calendar.alarm_manager").get_next_notif(cr, uid, context=context)
return res
@http.route('/calendar/notify_ack', type='json', auth="none")
def notify_ack(self, type=''):
registry = openerp.modules.registry.RegistryManager.get(request.session.db)
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("res.partner").calendar_last_notif_ack(cr, uid, context=context)
return res
| agpl-3.0 | -2,846,991,591,677,144,000 | 48.338028 | 133 | 0.634599 | false |
s20121035/rk3288_android5.1_repo | external/mesa3d/scons/crossmingw.py | 13 | 8201 | """SCons.Tool.gcc
Tool-specific initialization for MinGW (http://www.mingw.org/)
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
See also http://www.scons.org/wiki/CrossCompilingMingw
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import os.path
import string
import SCons.Action
import SCons.Builder
import SCons.Tool
import SCons.Util
# This is what we search for to find mingw:
prefixes32 = SCons.Util.Split("""
mingw32-
mingw32msvc-
i386-mingw32-
i486-mingw32-
i586-mingw32-
i686-mingw32-
i386-mingw32msvc-
i486-mingw32msvc-
i586-mingw32msvc-
i686-mingw32msvc-
i686-pc-mingw32-
i686-w64-mingw32-
""")
prefixes64 = SCons.Util.Split("""
x86_64-w64-mingw32-
amd64-mingw32-
amd64-mingw32msvc-
amd64-pc-mingw32-
""")
def find(env):
if env['machine'] == 'x86_64':
prefixes = prefixes64
else:
prefixes = prefixes32
for prefix in prefixes:
# First search in the SCons path and then the OS path:
if env.WhereIs(prefix + 'gcc') or SCons.Util.WhereIs(prefix + 'gcc'):
return prefix
return ''
def shlib_generator(target, source, env, for_signature):
cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS'])
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
if dll: cmd.extend(['-o', dll])
cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: cmd.append('-Wl,--out-implib,'+implib.get_string(for_signature))
def_target = env.FindIxes(target, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
if def_target: cmd.append('-Wl,--output-def,'+def_target.get_string(for_signature))
return [cmd]
def shlib_emitter(target, source, env):
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")
if not no_import_lib and \
not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'):
# Append an import library to the list of targets.
target.append(env.ReplaceIxes(dll,
'SHLIBPREFIX', 'SHLIBSUFFIX',
'LIBPREFIX', 'LIBSUFFIX'))
# Append a def file target if there isn't already a def file target
# or a def file source. There is no option to disable def file
# target emitting, because I can't figure out why someone would ever
# want to turn it off.
def_source = env.FindIxes(source, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
def_target = env.FindIxes(target, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
if not def_source and not def_target:
target.append(env.ReplaceIxes(dll,
'SHLIBPREFIX', 'SHLIBSUFFIX',
'WIN32DEFPREFIX', 'WIN32DEFSUFFIX'))
return (target, source)
shlib_action = SCons.Action.Action(shlib_generator, '$SHLINKCOMSTR', generator=1)
res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR')
res_builder = SCons.Builder.Builder(action=res_action, suffix='.o',
source_scanner=SCons.Tool.SourceFileScanner)
SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan)
def compile_without_gstabs(env, sources, c_file):
'''This is a hack used to compile some source files without the
-gstabs option.
It seems that some versions of mingw32's gcc (4.4.2 at least) die
when compiling large files with the -gstabs option. -gstabs is
related to debug symbols and can be omitted from the effected
files.
This function compiles the given c_file without -gstabs, removes
the c_file from the sources list, then appends the new .o file to
sources. Then return the new sources list.
'''
# Modify CCFLAGS to not have -gstabs option:
env2 = env.Clone()
flags = str(env2['CCFLAGS'])
flags = flags.replace("-gstabs", "")
env2['CCFLAGS'] = SCons.Util.CLVar(flags)
# Build the special-case files:
obj_file = env2.SharedObject(c_file)
# Replace ".cpp" or ".c" with ".o"
o_file = c_file.replace(".cpp", ".o")
o_file = o_file.replace(".c", ".o")
# Replace the .c files with the specially-compiled .o file
sources.remove(c_file)
sources.append(o_file)
return sources
def generate(env):
mingw_prefix = find(env)
if mingw_prefix:
dir = os.path.dirname(env.WhereIs(mingw_prefix + 'gcc') or SCons.Util.WhereIs(mingw_prefix + 'gcc'))
# The mingw bin directory must be added to the path:
path = env['ENV'].get('PATH', [])
if not path:
path = []
if SCons.Util.is_String(path):
path = string.split(path, os.pathsep)
env['ENV']['PATH'] = string.join([dir] + path, os.pathsep)
# Most of mingw is the same as gcc and friends...
gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas']
for tool in gnu_tools:
SCons.Tool.Tool(tool)(env)
#... but a few things differ:
env['CC'] = mingw_prefix + 'gcc'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['CXX'] = mingw_prefix + 'g++'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = shlib_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env['LINK'] = mingw_prefix + 'g++'
env['AR'] = mingw_prefix + 'ar'
env['RANLIB'] = mingw_prefix + 'ranlib'
env['LINK'] = mingw_prefix + 'g++'
env['AS'] = mingw_prefix + 'as'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['SHOBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = mingw_prefix + 'windres'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS ${INCPREFIX}${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET'
env['BUILDERS']['RES'] = res_builder
# Some setting from the platform also have to be overridden:
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.o'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = '.exe'
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ 'lib', '' ]
env['LIBSUFFIXES'] = [ '.a', '.lib' ]
# MinGW x86 port of gdb does not handle well dwarf debug info which is the
# default in recent gcc versions. The x64 port gdb from mingw-w64 seems to
# handle it fine though, so stick with the default there.
if env['machine'] != 'x86_64':
env.AppendUnique(CCFLAGS = ['-gstabs'])
env.AddMethod(compile_without_gstabs, 'compile_without_gstabs')
def exists(env):
return find(env)
| gpl-3.0 | 3,247,736,349,826,968,000 | 34.197425 | 135 | 0.64041 | false |
eleonrk/SickRage | lib/pbr/tests/test_version.py | 13 | 14100 | # Copyright 2012 Red Hat, Inc.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from testtools import matchers
from pbr.tests import base
from pbr import version
from_pip_string = version.SemanticVersion.from_pip_string
class TestSemanticVersion(base.BaseTestCase):
def test_ordering(self):
ordered_versions = [
"1.2.3.dev6",
"1.2.3.dev7",
"1.2.3.a4.dev12",
"1.2.3.a4.dev13",
"1.2.3.a4",
"1.2.3.a5.dev1",
"1.2.3.a5",
"1.2.3.b3.dev1",
"1.2.3.b3",
"1.2.3.rc2.dev1",
"1.2.3.rc2",
"1.2.3.rc3.dev1",
"1.2.3",
"1.2.4",
"1.3.3",
"2.2.3",
]
for v in ordered_versions:
sv = version.SemanticVersion.from_pip_string(v)
self.expectThat(sv, matchers.Equals(sv))
for left, right in itertools.combinations(ordered_versions, 2):
l_pos = ordered_versions.index(left)
r_pos = ordered_versions.index(right)
if l_pos < r_pos:
m1 = matchers.LessThan
m2 = matchers.GreaterThan
else:
m1 = matchers.GreaterThan
m2 = matchers.LessThan
left_sv = version.SemanticVersion.from_pip_string(left)
right_sv = version.SemanticVersion.from_pip_string(right)
self.expectThat(left_sv, m1(right_sv))
self.expectThat(right_sv, m2(left_sv))
def test_from_pip_string_legacy_alpha(self):
expected = version.SemanticVersion(
1, 2, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('1.2.0rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_postN(self):
# When pbr trunk was incompatible with PEP-440, a stable release was
# made that used postN versions to represent developer builds. As
# we expect only to be parsing versions of our own, we map those
# into dev builds of the next version.
expected = version.SemanticVersion(1, 2, 4, dev_count=5)
parsed = from_pip_string('1.2.3.post5')
self.expectThat(expected, matchers.Equals(parsed))
expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6)
parsed = from_pip_string('1.2.3.0a4.post6')
self.expectThat(expected, matchers.Equals(parsed))
# We can't define a mapping for .postN.devM, so it should raise.
self.expectThat(
lambda: from_pip_string('1.2.3.post5.dev6'),
matchers.raises(ValueError))
def test_from_pip_string_v_version(self):
parsed = from_pip_string('v1.2.3')
expected = version.SemanticVersion(1, 2, 3)
self.expectThat(expected, matchers.Equals(parsed))
expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6)
parsed = from_pip_string('V1.2.3.0a4.post6')
self.expectThat(expected, matchers.Equals(parsed))
self.expectThat(
lambda: from_pip_string('x1.2.3'),
matchers.raises(ValueError))
def test_from_pip_string_legacy_nonzero_lead_in(self):
# reported in bug 1361251
expected = version.SemanticVersion(
0, 0, 1, prerelease_type='a', prerelease=2)
parsed = from_pip_string('0.0.1a2')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_short_nonzero_lead_in(self):
expected = version.SemanticVersion(
0, 1, 0, prerelease_type='a', prerelease=2)
parsed = from_pip_string('0.1a2')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_no_0_prerelease(self):
expected = version.SemanticVersion(
2, 1, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('2.1.0.rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_no_0_prerelease_2(self):
expected = version.SemanticVersion(
2, 0, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('2.0.0.rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_non_440_beta(self):
expected = version.SemanticVersion(
2014, 2, prerelease_type='b', prerelease=2)
parsed = from_pip_string('2014.2.b2')
self.assertEqual(expected, parsed)
def test_from_pip_string_pure_git_hash(self):
self.assertRaises(ValueError, from_pip_string, '6eed5ae')
def test_from_pip_string_non_digit_start(self):
self.assertRaises(ValueError, from_pip_string,
'non-release-tag/2014.12.16-1')
def test_final_version(self):
semver = version.SemanticVersion(1, 2, 3)
self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple())
self.assertEqual("1.2.3", semver.brief_string())
self.assertEqual("1.2.3", semver.debian_string())
self.assertEqual("1.2.3", semver.release_string())
self.assertEqual("1.2.3", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.3"))
def test_parsing_short_forms(self):
semver = version.SemanticVersion(1, 0, 0)
self.assertEqual(semver, from_pip_string("1"))
self.assertEqual(semver, from_pip_string("1.0"))
self.assertEqual(semver, from_pip_string("1.0.0"))
def test_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~dev5", semver.debian_string())
self.assertEqual("1.2.4.dev5", semver.release_string())
self.assertEqual("1.2.3.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5"))
def test_dev_no_git_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~dev5", semver.debian_string())
self.assertEqual("1.2.4.dev5", semver.release_string())
self.assertEqual("1.2.3.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5"))
def test_dev_zero_version(self):
semver = version.SemanticVersion(1, 2, 0, dev_count=5)
self.assertEqual((1, 2, 0, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.0", semver.brief_string())
self.assertEqual("1.2.0~dev5", semver.debian_string())
self.assertEqual("1.2.0.dev5", semver.release_string())
self.assertEqual("1.1.9999.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.0.dev5"))
def test_alpha_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a', 1, 12)
self.assertEqual((1, 2, 4, 'alphadev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0a1.dev12", semver.release_string())
self.assertEqual("1.2.3.a1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a1.dev12"))
def test_alpha_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a', 1)
self.assertEqual((1, 2, 4, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a1", semver.debian_string())
self.assertEqual("1.2.4.0a1", semver.release_string())
self.assertEqual("1.2.3.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a1"))
def test_alpha_zero_version(self):
semver = version.SemanticVersion(1, 2, 0, 'a', 1)
self.assertEqual((1, 2, 0, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.2.0", semver.brief_string())
self.assertEqual("1.2.0~a1", semver.debian_string())
self.assertEqual("1.2.0.0a1", semver.release_string())
self.assertEqual("1.1.9999.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.0.0a1"))
def test_alpha_major_zero_version(self):
semver = version.SemanticVersion(1, 0, 0, 'a', 1)
self.assertEqual((1, 0, 0, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.0.0", semver.brief_string())
self.assertEqual("1.0.0~a1", semver.debian_string())
self.assertEqual("1.0.0.0a1", semver.release_string())
self.assertEqual("0.9999.9999.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.0.0.0a1"))
def test_alpha_default_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a')
self.assertEqual((1, 2, 4, 'alpha', 0), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a0", semver.debian_string())
self.assertEqual("1.2.4.0a0", semver.release_string())
self.assertEqual("1.2.3.a0", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a0"))
def test_beta_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'b', 1, 12)
self.assertEqual((1, 2, 4, 'betadev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~b1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0b1.dev12", semver.release_string())
self.assertEqual("1.2.3.b1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0b1.dev12"))
def test_beta_version(self):
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual((1, 2, 4, 'beta', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~b1", semver.debian_string())
self.assertEqual("1.2.4.0b1", semver.release_string())
self.assertEqual("1.2.3.b1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0b1"))
def test_decrement_nonrelease(self):
# The prior version of any non-release is a release
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual(
version.SemanticVersion(1, 2, 3), semver.decrement())
def test_decrement_nonrelease_zero(self):
# We set an arbitrary max version of 9999 when decrementing versions
# - this is part of handling rpm support.
semver = version.SemanticVersion(1, 0, 0)
self.assertEqual(
version.SemanticVersion(0, 9999, 9999), semver.decrement())
def test_decrement_release(self):
# The next patch version of a release version requires a change to the
# patch level.
semver = version.SemanticVersion(2, 2, 5)
self.assertEqual(
version.SemanticVersion(2, 2, 4), semver.decrement())
def test_increment_nonrelease(self):
# The next patch version of a non-release version is another
# non-release version as the next release doesn't need to be
# incremented.
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual(
version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment())
# Major and minor increments however need to bump things.
self.assertEqual(
version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
self.assertEqual(
version.SemanticVersion(2, 0, 0), semver.increment(major=True))
def test_increment_release(self):
# The next patch version of a release version requires a change to the
# patch level.
semver = version.SemanticVersion(1, 2, 5)
self.assertEqual(
version.SemanticVersion(1, 2, 6), semver.increment())
self.assertEqual(
version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
self.assertEqual(
version.SemanticVersion(2, 0, 0), semver.increment(major=True))
def test_rc_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12)
self.assertEqual((1, 2, 4, 'candidatedev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~rc1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0rc1.dev12", semver.release_string())
self.assertEqual("1.2.3.rc1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0rc1.dev12"))
def test_rc_version(self):
semver = version.SemanticVersion(1, 2, 4, 'rc', 1)
self.assertEqual((1, 2, 4, 'candidate', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~rc1", semver.debian_string())
self.assertEqual("1.2.4.0rc1", semver.release_string())
self.assertEqual("1.2.3.rc1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0rc1"))
def test_to_dev(self):
self.assertEqual(
version.SemanticVersion(1, 2, 3, dev_count=1),
version.SemanticVersion(1, 2, 3).to_dev(1))
self.assertEqual(
version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1),
version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1))
| gpl-3.0 | -7,143,823,228,833,034,000 | 44.337621 | 79 | 0.618298 | false |
hynnet/hiwifi-openwrt-HC5661-HC5761 | staging_dir/host/lib/python2.7/idlelib/WidgetRedirector.py | 143 | 4476 | from Tkinter import *
class WidgetRedirector:
"""Support for redirecting arbitrary widget subcommands.
Some Tk operations don't normally pass through Tkinter. For example, if a
character is inserted into a Text widget by pressing a key, a default Tk
binding to the widget's 'insert' operation is activated, and the Tk library
processes the insert without calling back into Tkinter.
Although a binding to <Key> could be made via Tkinter, what we really want
to do is to hook the Tk 'insert' operation itself.
When a widget is instantiated, a Tcl command is created whose name is the
same as the pathname widget._w. This command is used to invoke the various
widget operations, e.g. insert (for a Text widget). We are going to hook
this command and provide a facility ('register') to intercept the widget
operation.
In IDLE, the function being registered provides access to the top of a
Percolator chain. At the bottom of the chain is a call to the original
Tk widget operation.
"""
def __init__(self, widget):
self._operations = {}
self.widget = widget # widget instance
self.tk = tk = widget.tk # widget's root
w = widget._w # widget's (full) Tk pathname
self.orig = w + "_orig"
# Rename the Tcl command within Tcl:
tk.call("rename", w, self.orig)
# Create a new Tcl command whose name is the widget's pathname, and
# whose action is to dispatch on the operation passed to the widget:
tk.createcommand(w, self.dispatch)
def __repr__(self):
return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__,
self.widget._w)
def close(self):
for operation in list(self._operations):
self.unregister(operation)
widget = self.widget; del self.widget
orig = self.orig; del self.orig
tk = widget.tk
w = widget._w
tk.deletecommand(w)
# restore the original widget Tcl command:
tk.call("rename", orig, w)
def register(self, operation, function):
self._operations[operation] = function
setattr(self.widget, operation, function)
return OriginalCommand(self, operation)
def unregister(self, operation):
if operation in self._operations:
function = self._operations[operation]
del self._operations[operation]
if hasattr(self.widget, operation):
delattr(self.widget, operation)
return function
else:
return None
def dispatch(self, operation, *args):
'''Callback from Tcl which runs when the widget is referenced.
If an operation has been registered in self._operations, apply the
associated function to the args passed into Tcl. Otherwise, pass the
operation through to Tk via the original Tcl function.
Note that if a registered function is called, the operation is not
passed through to Tk. Apply the function returned by self.register()
to *args to accomplish that. For an example, see ColorDelegator.py.
'''
m = self._operations.get(operation)
try:
if m:
return m(*args)
else:
return self.tk.call((self.orig, operation) + args)
except TclError:
return ""
class OriginalCommand:
def __init__(self, redir, operation):
self.redir = redir
self.operation = operation
self.tk = redir.tk
self.orig = redir.orig
self.tk_call = self.tk.call
self.orig_and_operation = (self.orig, self.operation)
def __repr__(self):
return "OriginalCommand(%r, %r)" % (self.redir, self.operation)
def __call__(self, *args):
return self.tk_call(self.orig_and_operation + args)
def main():
root = Tk()
root.wm_protocol("WM_DELETE_WINDOW", root.quit)
text = Text()
text.pack()
text.focus_set()
redir = WidgetRedirector(text)
global previous_tcl_fcn
def my_insert(*args):
print "insert", args
previous_tcl_fcn(*args)
previous_tcl_fcn = redir.register("insert", my_insert)
root.mainloop()
redir.unregister("insert") # runs after first 'close window'
redir.close()
root.mainloop()
root.destroy()
if __name__ == "__main__":
main()
| gpl-2.0 | -7,079,171,522,849,079,000 | 34.52381 | 79 | 0.62042 | false |
eusi/MissionPlanerHM | Lib/site-packages/scipy/constants/__init__.py | 55 | 1121 | """
Various useful constants and conversion formulae
Modules
-------
.. autosummary::
:toctree: generated/
codata - CODATA Recommended Values of Fundamental Physical Const (2006)
constants - Collection of physical constants and conversion factors
Functions
---------
.. autosummary::
:toctree: generated/
C2F - Convert Celsius to Fahrenheit
C2K - Convert Celsius to Kelvin
F2C - Convert Fahrenheit to Celsius
F2K - Convert Fahrenheit to Kelvin
K2C - Convert Kelvin to Celsius
K2F - Convert Kelvin to Fahrenheit
find - Find the codata.physical_constant keys containing a given string
lambda2nu - Convert wavelength to optical frequency
nu2lambda - Convert optical frequency to wavelength
precision - Relative precision in physical_constants indexed by key
unit - Unit in physical_constants indexed by key
value - Value in physical_constants indexed by key
"""
# Modules contributed by BasSw ([email protected])
from codata import *
from constants import *
__all__ = filter(lambda s:not s.startswith('_'),dir())
from numpy.testing import Tester
test = Tester().test
| gpl-3.0 | -7,496,944,512,938,822,000 | 27.025 | 74 | 0.738626 | false |
indico/indico | indico/modules/rb/operations/rooms.py | 4 | 9012 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import date, datetime, time
from dateutil.relativedelta import relativedelta
from flask import session
from sqlalchemy.orm import joinedload, load_only
from indico.core.db import db
from indico.core.db.sqlalchemy.principals import PrincipalType
from indico.core.db.sqlalchemy.util.queries import db_dates_overlap, escape_like
from indico.modules.rb import rb_settings
from indico.modules.rb.models.equipment import EquipmentType, RoomEquipmentAssociation
from indico.modules.rb.models.favorites import favorite_room_table
from indico.modules.rb.models.principals import RoomPrincipal
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.modules.rb.models.room_features import RoomFeature
from indico.modules.rb.models.rooms import Room
from indico.modules.rb.statistics import calculate_rooms_occupancy
from indico.modules.rb.util import rb_is_admin
from indico.util.caching import memoize_redis
def _filter_coordinates(query, filters):
try:
sw_lat = filters['sw_lat']
sw_lng = filters['sw_lng']
ne_lat = filters['ne_lat']
ne_lng = filters['ne_lng']
except KeyError:
return query
return query.filter(Room.latitude >= sw_lat,
Room.latitude <= ne_lat,
Room.longitude >= sw_lng,
Room.longitude <= ne_lng)
def _make_room_text_filter(text):
text = f'%{escape_like(text)}%'
columns = ('site', 'division', 'building', 'floor', 'number', 'comments', 'full_name')
return db.or_(getattr(Room, col).ilike(text) for col in columns)
def _query_managed_rooms(user):
criteria = [db.and_(RoomPrincipal.type == PrincipalType.user,
RoomPrincipal.user_id == user.id,
RoomPrincipal.has_management_permission())]
for group in user.local_groups:
criteria.append(db.and_(RoomPrincipal.type == PrincipalType.local_group,
RoomPrincipal.local_group_id == group.id,
RoomPrincipal.has_management_permission()))
for group in user.iter_all_multipass_groups():
criteria.append(db.and_(RoomPrincipal.type == PrincipalType.multipass_group,
RoomPrincipal.multipass_group_provider == group.provider.name,
db.func.lower(RoomPrincipal.multipass_group_name) == group.name.lower(),
RoomPrincipal.has_management_permission()))
return Room.query.filter(~Room.is_deleted, Room.acl_entries.any(db.or_(*criteria)) | (Room.owner == user))
def _query_all_rooms_for_acl_check():
return (Room.query
.filter(~Room.is_deleted)
.options(load_only('id', 'protection_mode', 'reservations_need_confirmation'),
joinedload('owner').load_only('id'),
joinedload('acl_entries')))
@memoize_redis(900)
def has_managed_rooms(user):
if user.can_get_all_multipass_groups:
return _query_managed_rooms(user).has_rows()
else:
query = _query_all_rooms_for_acl_check()
return any(r.can_manage(user, allow_admin=False) for r in query)
@memoize_redis(900)
def get_managed_room_ids(user):
if user.can_get_all_multipass_groups:
return {id_ for id_, in _query_managed_rooms(user).with_entities(Room.id)}
else:
query = _query_all_rooms_for_acl_check()
return {r.id for r in query if r.can_manage(user, allow_admin=False)}
@memoize_redis(3600)
def get_room_statistics(room):
data = {
'count': {
'id': 'times_booked',
'values': [],
'note': False
},
'percentage': {
'id': 'occupancy',
'values': [],
'note': True
}
}
ranges = [7, 30, 365]
end_date = date.today()
for days in ranges:
start_date = date.today() - relativedelta(days=days)
count = (ReservationOccurrence.query
.join(ReservationOccurrence.reservation)
.join(Reservation.room)
.filter(Room.id == room.id,
ReservationOccurrence.is_valid,
db_dates_overlap(ReservationOccurrence,
'start_dt', datetime.combine(start_date, time()),
'end_dt', datetime.combine(end_date, time.max)))
.count())
percentage = calculate_rooms_occupancy([room], start_date, end_date) * 100
if count > 0 or percentage > 0:
data['count']['values'].append({'days': days, 'value': count})
data['percentage']['values'].append({'days': days, 'value': percentage})
return data
def search_for_rooms(filters, allow_admin=False, availability=None):
"""Search for a room, using the provided filters.
:param filters: The filters, provided as a dictionary
:param allow_admin: A boolean specifying whether admins have override privileges
:param availability: A boolean specifying whether (un)available rooms should be provided,
or `None` in case all rooms should be returned.
"""
query = (Room.query
.outerjoin(favorite_room_table, db.and_(favorite_room_table.c.user_id == session.user.id,
favorite_room_table.c.room_id == Room.id))
.reset_joinpoint() # otherwise filter_by() would apply to the favorite table
.filter(~Room.is_deleted)
.order_by(favorite_room_table.c.user_id.is_(None), db.func.indico.natsort(Room.full_name)))
criteria = {}
if 'capacity' in filters:
query = query.filter(Room.capacity >= filters['capacity'])
if 'building' in filters:
criteria['building'] = filters['building']
if 'division' in filters:
criteria['division'] = filters['division']
query = query.filter_by(**criteria)
if 'text' in filters:
text = ' '.join(filters['text'].strip().split())
if text.startswith('#') and text[1:].isdigit():
query = query.filter(Room.id == int(text[1:]))
else:
query = query.filter(_make_room_text_filter(text))
if filters.get('equipment'):
subquery = (db.session.query(RoomEquipmentAssociation)
.with_entities(db.func.count(RoomEquipmentAssociation.c.room_id))
.filter(RoomEquipmentAssociation.c.room_id == Room.id,
EquipmentType.name.in_(filters['equipment']))
.join(EquipmentType, RoomEquipmentAssociation.c.equipment_id == EquipmentType.id)
.correlate(Room)
.scalar_subquery())
query = query.filter(subquery == len(filters['equipment']))
if filters.get('features'):
for feature in filters['features']:
query = query.filter(Room.available_equipment.any(EquipmentType.features.any(RoomFeature.name == feature)))
if filters.get('favorite'):
query = query.filter(favorite_room_table.c.user_id.isnot(None))
if filters.get('mine'):
ids = get_managed_room_ids(session.user)
query = query.filter(Room.id.in_(ids))
query = _filter_coordinates(query, filters)
if availability is None:
return query
start_dt, end_dt = filters['start_dt'], filters['end_dt']
repeatability = (filters['repeat_frequency'], filters['repeat_interval'])
availability_filters = [Room.filter_available(start_dt, end_dt, repeatability, include_blockings=False,
include_pre_bookings=False)]
if not (allow_admin and rb_is_admin(session.user)):
selected_period_days = (filters['end_dt'] - filters['start_dt']).days
booking_limit_days = db.func.coalesce(Room.booking_limit_days, rb_settings.get('booking_limit'))
criterion = db.and_(Room.filter_bookable_hours(start_dt.time(), end_dt.time()),
Room.filter_nonbookable_periods(start_dt, end_dt),
db.or_(booking_limit_days.is_(None),
selected_period_days <= booking_limit_days))
unbookable_ids = [room.id
for room in query.filter(db.and_(*availability_filters), ~criterion)
if not room.can_override(session.user, allow_admin=False)]
availability_filters.append(~Room.id.in_(unbookable_ids))
availability_criterion = db.and_(*availability_filters)
if availability is False:
availability_criterion = ~availability_criterion
return query.filter(availability_criterion)
| mit | 4,153,696,097,069,037,600 | 44.746193 | 119 | 0.618509 | false |
pschmitt/home-assistant | homeassistant/components/radarr/sensor.py | 16 | 7642 | """Support for Radarr."""
from datetime import datetime, timedelta
import logging
import time
from pytz import timezone
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_PORT,
CONF_SSL,
DATA_BYTES,
DATA_EXABYTES,
DATA_GIGABYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_PETABYTES,
DATA_TERABYTES,
DATA_YOTTABYTES,
DATA_ZETTABYTES,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_DAYS = "days"
CONF_INCLUDED = "include_paths"
CONF_UNIT = "unit"
CONF_URLBASE = "urlbase"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 7878
DEFAULT_URLBASE = ""
DEFAULT_DAYS = "1"
DEFAULT_UNIT = DATA_GIGABYTES
SCAN_INTERVAL = timedelta(minutes=10)
SENSOR_TYPES = {
"diskspace": ["Disk Space", DATA_GIGABYTES, "mdi:harddisk"],
"upcoming": ["Upcoming", "Movies", "mdi:television"],
"wanted": ["Wanted", "Movies", "mdi:television"],
"movies": ["Movies", "Movies", "mdi:television"],
"commands": ["Commands", "Commands", "mdi:code-braces"],
"status": ["Status", "Status", "mdi:information"],
}
ENDPOINTS = {
"diskspace": "{0}://{1}:{2}/{3}api/diskspace",
"upcoming": "{0}://{1}:{2}/{3}api/calendar?start={4}&end={5}",
"movies": "{0}://{1}:{2}/{3}api/movie",
"commands": "{0}://{1}:{2}/{3}api/command",
"status": "{0}://{1}:{2}/{3}api/system/status",
}
# Support to Yottabytes for the future, why not
BYTE_SIZES = [
DATA_BYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_GIGABYTES,
DATA_TERABYTES,
DATA_PETABYTES,
DATA_EXABYTES,
DATA_ZETTABYTES,
DATA_YOTTABYTES,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_DAYS, default=DEFAULT_DAYS): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_INCLUDED, default=[]): cv.ensure_list,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["movies"]): vol.All(
cv.ensure_list, [vol.In(list(SENSOR_TYPES))]
),
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): vol.In(BYTE_SIZES),
vol.Optional(CONF_URLBASE, default=DEFAULT_URLBASE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Radarr platform."""
conditions = config.get(CONF_MONITORED_CONDITIONS)
add_entities([RadarrSensor(hass, config, sensor) for sensor in conditions], True)
class RadarrSensor(Entity):
"""Implementation of the Radarr sensor."""
def __init__(self, hass, conf, sensor_type):
"""Create Radarr entity."""
self.conf = conf
self.host = conf.get(CONF_HOST)
self.port = conf.get(CONF_PORT)
self.urlbase = conf.get(CONF_URLBASE)
if self.urlbase:
self.urlbase = f"{self.urlbase.strip('/')}/"
self.apikey = conf.get(CONF_API_KEY)
self.included = conf.get(CONF_INCLUDED)
self.days = int(conf.get(CONF_DAYS))
self.ssl = "https" if conf.get(CONF_SSL) else "http"
self._state = None
self.data = []
self._tz = timezone(str(hass.config.time_zone))
self.type = sensor_type
self._name = SENSOR_TYPES[self.type][0]
if self.type == "diskspace":
self._unit = conf.get(CONF_UNIT)
else:
self._unit = SENSOR_TYPES[self.type][1]
self._icon = SENSOR_TYPES[self.type][2]
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format("Radarr", self._name)
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def available(self):
"""Return sensor availability."""
return self._available
@property
def unit_of_measurement(self):
"""Return the unit of the sensor."""
return self._unit
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attributes = {}
if self.type == "upcoming":
for movie in self.data:
attributes[to_key(movie)] = get_release_date(movie)
elif self.type == "commands":
for command in self.data:
attributes[command["name"]] = command["state"]
elif self.type == "diskspace":
for data in self.data:
free_space = to_unit(data["freeSpace"], self._unit)
total_space = to_unit(data["totalSpace"], self._unit)
percentage_used = (
0 if total_space == 0 else free_space / total_space * 100
)
attributes[data["path"]] = "{:.2f}/{:.2f}{} ({:.2f}%)".format(
free_space, total_space, self._unit, percentage_used
)
elif self.type == "movies":
for movie in self.data:
attributes[to_key(movie)] = movie["downloaded"]
elif self.type == "status":
attributes = self.data
return attributes
@property
def icon(self):
"""Return the icon of the sensor."""
return self._icon
def update(self):
"""Update the data for the sensor."""
start = get_date(self._tz)
end = get_date(self._tz, self.days)
try:
res = requests.get(
ENDPOINTS[self.type].format(
self.ssl, self.host, self.port, self.urlbase, start, end
),
headers={"X-Api-Key": self.apikey},
timeout=10,
)
except OSError:
_LOGGER.warning("Host %s is not available", self.host)
self._available = False
self._state = None
return
if res.status_code == HTTP_OK:
if self.type in ["upcoming", "movies", "commands"]:
self.data = res.json()
self._state = len(self.data)
elif self.type == "diskspace":
# If included paths are not provided, use all data
if self.included == []:
self.data = res.json()
else:
# Filter to only show lists that are included
self.data = list(
filter(lambda x: x["path"] in self.included, res.json())
)
self._state = "{:.2f}".format(
to_unit(sum([data["freeSpace"] for data in self.data]), self._unit)
)
elif self.type == "status":
self.data = res.json()
self._state = self.data["version"]
self._available = True
def get_date(zone, offset=0):
"""Get date based on timezone and offset of days."""
day = 60 * 60 * 24
return datetime.date(datetime.fromtimestamp(time.time() + day * offset, tz=zone))
def get_release_date(data):
"""Get release date."""
date = data.get("physicalRelease")
if not date:
date = data.get("inCinemas")
return date
def to_key(data):
"""Get key."""
return "{} ({})".format(data["title"], data["year"])
def to_unit(value, unit):
"""Convert bytes to give unit."""
return value / 1024 ** BYTE_SIZES.index(unit)
| apache-2.0 | -6,186,804,518,946,395,000 | 30.841667 | 87 | 0.570139 | false |
andyrooger/OAT | src/interactive/commandui.py | 1 | 6086 | """
Command based UI for the obfuscator.
"""
# OAT - Obfuscation and Analysis Tool
# Copyright (C) 2011 Andy Gurden
#
# This file is part of OAT.
#
# OAT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OAT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OAT. If not, see <http://www.gnu.org/licenses/>.
import cmd
import os
try:
import argparse
except ImportError:
from thirdparty import argparse
class CommandUI(cmd.Cmd):
"""Command UI base class, use self.cmdloop() to run."""
def __init__(self):
cmd.Cmd.__init__(self)
self.prompt = "--) "
self.intro = ("Welcome to OAT - Obfuscation and Analysis Tool!\n"
"If you are confused, type help.")
self._commands = {}
def cmdloop(self, intro = None):
"""Un-KeyboardInterrup-able cmdloop."""
try:
super().cmdloop(intro)
except KeyboardInterrupt:
print()
self.cmdloop("")
def postcmd(self, stop, line):
print()
return stop
def emptyline(self):
pass
def _split_line(self, line):
command, ignore, params = line.partition(" ")
params = params.lstrip()
return (command, params)
def default(self, line):
# Should look through added commands and call the correct one
command, params = self._split_line(line)
try:
todo = self._commands[command]
except KeyError:
return cmd.Cmd.default(self, line)
else:
return todo.do(params)
def do_quit(self, line):
"""Exit the program."""
return True
def do_EOF(self, line):
"""Exit the program. Use CTRL^D."""
print()
return True
def completedefault(self, text, line, begidx, endidx):
# Should look through added commands and call the correct one
command, params = self._split_line(line)
try:
todo = self._commands[command]
except KeyError:
return cmd.Cmd.completedefault(self, text, line, begidx, endidx)
else:
return todo.complete(text, params, begidx, endidx)
def do_help(self, line):
"""Get help on a given subject."""
if not line:
return self.help_topics()
# Should check for help in our added commands or fall back
try:
todo = self._commands[line]
except KeyError:
return cmd.Cmd.do_help(self, line)
else:
return todo.help()
def help_topics(self):
"""Print topics for help. This uses the code from Cmd's implementation."""
cmds_doc = ["help", "quit", "status"] + list(self._commands.keys())
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
def completenames(self, text, *ignored):
return cmd.Cmd.completenames(self, text, ignored) + [name for name in self._commands.keys() if name.startswith(text)]
def add_command(self, command : "interactive.Command to add to the console."):
"""Add a command to the console."""
self._commands[command.id] = command
def do_status(self, line):
"""Show status for the current session."""
for command in self._commands:
self._commands[command].status()
class Command:
"""Base class for any commands to add to the console."""
def __init__(self, id : "Name of the command"):
self._opts = Command.CommandArgs(description = self.run.__doc__,
add_help = False,
prog = id)
self.id = id
def do(self, line):
try:
args = self._opts.parse_args(line.split())
except ValueError as exc:
print("Problem: " + str(exc))
print()
self.help()
return False
except IOError as exc:
print(exc.strerror + ": " + exc.filename)
else:
return self.run(args)
def complete(self, text, line, begidx, endidx):
beg = begidx - len(self.id) - 1
end = endidx - len(self.id) - 1
begarg = line.rfind(" ", None, end) + 1
endarg = end #line.rfind(" ", beg, None)
if begarg == -1:
begarg = 0
if endarg == -1:
endarg = len(line)
arg = line[begarg:endarg]
before = line[:begarg].split()
after = line[endarg:].split()
completions = self.autocomplete(before, arg, after)
return [completion[len(arg)-len(text):] for completion in completions]
def run(self, args): raise NotImplementedError
def autocomplete(self, before, arg, after): return []
def status(self): pass
def help(self):
self._opts.print_help()
class CommandArgs(argparse.ArgumentParser):
"""Child of OptionParser tailored to be used in the command interface."""
def __init__(self, *args, **kwargs):
argparse.ArgumentParser.__init__(self, *args, **kwargs)
def error(self, msg):
raise ValueError(msg)
def path_completer(path : "Path to complete"):
"""Completer for file paths."""
directory, base = os.path.split(path)
entries = []
try:
if directory:
entries = os.listdir(directory)
else:
entries = os.listdir(os.getcwd())
except OSError:
entries = []
suggestions = [os.path.join(directory, file) for file in entries if file.startswith(base)]
return suggestions
| gpl-3.0 | 1,715,927,257,871,010,300 | 29.43 | 125 | 0.581663 | false |
jeffrey4l/nova | nova/api/openstack/compute/contrib/used_limits.py | 62 | 3281 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import quota
QUOTAS = quota.QUOTAS
XMLNS = "http://docs.openstack.org/compute/ext/used_limits/api/v1.1"
ALIAS = "os-used-limits"
authorize = extensions.soft_extension_authorizer('compute', 'used_limits')
authorize_for_admin = extensions.extension_authorizer('compute',
'used_limits_for_admin')
class UsedLimitsController(wsgi.Controller):
def __init__(self, ext_mgr):
self.ext_mgr = ext_mgr
@staticmethod
def _reserved(req):
try:
return int(req.GET['reserved'])
except (ValueError, KeyError):
return False
@wsgi.extends
def index(self, req, resp_obj):
context = req.environ['nova.context']
project_id = self._project_id(context, req)
quotas = QUOTAS.get_project_quotas(context, project_id, usages=True)
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
}
if self.ext_mgr.is_loaded('os-server-group-quotas'):
quota_map['totalServerGroupsUsed'] = 'server_groups'
used_limits = {}
for display_name, key in six.iteritems(quota_map):
if key in quotas:
reserved = (quotas[key]['reserved']
if self._reserved(req) else 0)
used_limits[display_name] = quotas[key]['in_use'] + reserved
resp_obj.obj['limits']['absolute'].update(used_limits)
def _project_id(self, context, req):
if self.ext_mgr.is_loaded('os-used-limits-for-admin'):
if 'tenant_id' in req.GET:
tenant_id = req.GET.get('tenant_id')
target = {
'project_id': tenant_id,
'user_id': context.user_id
}
authorize_for_admin(context, target=target)
return tenant_id
return context.project_id
class Used_limits(extensions.ExtensionDescriptor):
"""Provide data on limited resources that are being used."""
name = "UsedLimits"
alias = ALIAS
namespace = XMLNS
updated = "2012-07-13T00:00:00Z"
def get_controller_extensions(self):
controller = UsedLimitsController(self.ext_mgr)
limits_ext = extensions.ControllerExtension(self, 'limits',
controller=controller)
return [limits_ext]
| apache-2.0 | -1,310,063,815,344,694,300 | 34.27957 | 78 | 0.608961 | false |
jeffery9/mixprint_addons | project_long_term/wizard/project_compute_tasks.py | 63 | 2545 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class project_compute_tasks(osv.osv_memory):
_name = 'project.compute.tasks'
_description = 'Project Compute Tasks'
_columns = {
'project_id': fields.many2one('project.project', 'Project', required=True)
}
def compute_date(self, cr, uid, ids, context=None):
"""
Schedule the tasks according to users and priority.
"""
project_pool = self.pool.get('project.project')
task_pool = self.pool.get('project.task')
if context is None:
context = {}
context['compute_by'] = 'project'
data = self.read(cr, uid, ids, [])[0]
project_id = data['project_id'][0]
project_pool.schedule_tasks(cr, uid, [project_id], context=context)
return self._open_task_list(cr, uid, data, context=context)
def _open_task_list(self, cr, uid, data, context=None):
"""
Return the scheduled task list.
"""
if context is None:
context = {}
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj._get_id(cr, uid, 'project_long_term', 'act_resouce_allocation')
id = mod_obj.read(cr, uid, [result], ['res_id'])[0]['res_id']
result = {}
if not id:
return result
result = act_obj.read(cr, uid, [id], context=context)[0]
result['target'] = 'current'
return result
project_compute_tasks()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 7,048,233,546,083,763,000 | 38.765625 | 88 | 0.591749 | false |
kleientertainment/ds_mod_tools | pkg/win32/Python27/Lib/htmlentitydefs.py | 65 | 18327 | """HTML character entity references."""
# maps the HTML entity name to the Unicode codepoint
name2codepoint = {
'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1
'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1
'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1
'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1
'Alpha': 0x0391, # greek capital letter alpha, U+0391
'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1
'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1
'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1
'Beta': 0x0392, # greek capital letter beta, U+0392
'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1
'Chi': 0x03a7, # greek capital letter chi, U+03A7
'Dagger': 0x2021, # double dagger, U+2021 ISOpub
'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3
'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1
'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1
'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1
'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1
'Epsilon': 0x0395, # greek capital letter epsilon, U+0395
'Eta': 0x0397, # greek capital letter eta, U+0397
'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1
'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3
'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1
'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1
'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1
'Iota': 0x0399, # greek capital letter iota, U+0399
'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1
'Kappa': 0x039a, # greek capital letter kappa, U+039A
'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3
'Mu': 0x039c, # greek capital letter mu, U+039C
'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1
'Nu': 0x039d, # greek capital letter nu, U+039D
'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2
'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1
'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1
'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1
'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3
'Omicron': 0x039f, # greek capital letter omicron, U+039F
'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1
'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1
'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1
'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3
'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3
'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech
'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3
'Rho': 0x03a1, # greek capital letter rho, U+03A1
'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2
'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3
'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1
'Tau': 0x03a4, # greek capital letter tau, U+03A4
'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3
'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1
'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1
'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1
'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3
'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1
'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3
'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1
'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2
'Zeta': 0x0396, # greek capital letter zeta, U+0396
'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1
'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1
'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia
'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1
'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1
'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW
'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3
'amp': 0x0026, # ampersand, U+0026 ISOnum
'and': 0x2227, # logical and = wedge, U+2227 ISOtech
'ang': 0x2220, # angle, U+2220 ISOamso
'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1
'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr
'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1
'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1
'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW
'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3
'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum
'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub
'cap': 0x2229, # intersection = cap, U+2229 ISOtech
'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1
'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia
'cent': 0x00a2, # cent sign, U+00A2 ISOnum
'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3
'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub
'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub
'cong': 0x2245, # approximately equal to, U+2245 ISOtech
'copy': 0x00a9, # copyright sign, U+00A9 ISOnum
'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW
'cup': 0x222a, # union = cup, U+222A ISOtech
'curren': 0x00a4, # currency sign, U+00A4 ISOnum
'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa
'dagger': 0x2020, # dagger, U+2020 ISOpub
'darr': 0x2193, # downwards arrow, U+2193 ISOnum
'deg': 0x00b0, # degree sign, U+00B0 ISOnum
'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3
'diams': 0x2666, # black diamond suit, U+2666 ISOpub
'divide': 0x00f7, # division sign, U+00F7 ISOnum
'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1
'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1
'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1
'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso
'emsp': 0x2003, # em space, U+2003 ISOpub
'ensp': 0x2002, # en space, U+2002 ISOpub
'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3
'equiv': 0x2261, # identical to, U+2261 ISOtech
'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3
'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1
'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1
'euro': 0x20ac, # euro sign, U+20AC NEW
'exist': 0x2203, # there exists, U+2203 ISOtech
'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech
'forall': 0x2200, # for all, U+2200 ISOtech
'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum
'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum
'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum
'frasl': 0x2044, # fraction slash, U+2044 NEW
'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3
'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech
'gt': 0x003e, # greater-than sign, U+003E ISOnum
'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa
'harr': 0x2194, # left right arrow, U+2194 ISOamsa
'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub
'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub
'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1
'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1
'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum
'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1
'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso
'infin': 0x221e, # infinity, U+221E ISOtech
'int': 0x222b, # integral, U+222B ISOtech
'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3
'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum
'isin': 0x2208, # element of, U+2208 ISOtech
'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1
'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3
'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech
'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3
'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech
'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum
'larr': 0x2190, # leftwards arrow, U+2190 ISOnum
'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc
'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum
'le': 0x2264, # less-than or equal to, U+2264 ISOtech
'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc
'lowast': 0x2217, # asterisk operator, U+2217 ISOtech
'loz': 0x25ca, # lozenge, U+25CA ISOpub
'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070
'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed
'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum
'lt': 0x003c, # less-than sign, U+003C ISOnum
'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia
'mdash': 0x2014, # em dash, U+2014 ISOpub
'micro': 0x00b5, # micro sign, U+00B5 ISOnum
'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum
'minus': 0x2212, # minus sign, U+2212 ISOtech
'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3
'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech
'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum
'ndash': 0x2013, # en dash, U+2013 ISOpub
'ne': 0x2260, # not equal to, U+2260 ISOtech
'ni': 0x220b, # contains as member, U+220B ISOtech
'not': 0x00ac, # not sign, U+00AC ISOnum
'notin': 0x2209, # not an element of, U+2209 ISOtech
'nsub': 0x2284, # not a subset of, U+2284 ISOamsn
'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1
'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3
'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1
'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1
'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2
'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1
'oline': 0x203e, # overline = spacing overscore, U+203E NEW
'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3
'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW
'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb
'or': 0x2228, # logical or = vee, U+2228 ISOtech
'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum
'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum
'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1
'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1
'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb
'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1
'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum
'part': 0x2202, # partial differential, U+2202 ISOtech
'permil': 0x2030, # per mille sign, U+2030 ISOtech
'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech
'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3
'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3
'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3
'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum
'pound': 0x00a3, # pound sign, U+00A3 ISOnum
'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech
'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb
'prop': 0x221d, # proportional to, U+221D ISOtech
'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3
'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum
'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech
'radic': 0x221a, # square root = radical sign, U+221A ISOtech
'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech
'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum
'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum
'rceil': 0x2309, # right ceiling, U+2309 ISOamsc
'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum
'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso
'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum
'rfloor': 0x230b, # right floor, U+230B ISOamsc
'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3
'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070
'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed
'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum
'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW
'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2
'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb
'sect': 0x00a7, # section sign, U+00A7 ISOnum
'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum
'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3
'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3
'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech
'spades': 0x2660, # black spade suit, U+2660 ISOpub
'sub': 0x2282, # subset of, U+2282 ISOtech
'sube': 0x2286, # subset of or equal to, U+2286 ISOtech
'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb
'sup': 0x2283, # superset of, U+2283 ISOtech
'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum
'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum
'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum
'supe': 0x2287, # superset of or equal to, U+2287 ISOtech
'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1
'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3
'there4': 0x2234, # therefore, U+2234 ISOtech
'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3
'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW
'thinsp': 0x2009, # thin space, U+2009 ISOpub
'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1
'tilde': 0x02dc, # small tilde, U+02DC ISOdia
'times': 0x00d7, # multiplication sign, U+00D7 ISOnum
'trade': 0x2122, # trade mark sign, U+2122 ISOnum
'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa
'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1
'uarr': 0x2191, # upwards arrow, U+2191 ISOnum
'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1
'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1
'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia
'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW
'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3
'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1
'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso
'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3
'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1
'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum
'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1
'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3
'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070
'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070
}
# maps the Unicode codepoint to the HTML entity name
codepoint2name = {}
# maps the HTML entity name to the character
# (or a character reference if the character is outside the Latin-1 range)
entitydefs = {}
for (name, codepoint) in name2codepoint.iteritems():
codepoint2name[codepoint] = name
if codepoint <= 0xff:
entitydefs[name] = chr(codepoint)
else:
entitydefs[name] = '&#%d;' % codepoint
del name, codepoint
| mit | 160,853,746,287,709,600 | 65.131868 | 110 | 0.640694 | false |
skevy/django | tests/regressiontests/utils/text.py | 51 | 2200 | import unittest
from django.utils import text
class TestUtilsText(unittest.TestCase):
def test_truncate_words(self):
self.assertEqual(u'The quick brown fox jumped over the lazy dog.',
text.truncate_words(u'The quick brown fox jumped over the lazy dog.', 10))
self.assertEqual(u'The quick brown fox ...',
text.truncate_words('The quick brown fox jumped over the lazy dog.', 4))
self.assertEqual(u'The quick brown fox ....',
text.truncate_words('The quick brown fox jumped over the lazy dog.', 4, '....'))
def test_truncate_html_words(self):
self.assertEqual(u'<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 10))
self.assertEqual(u'<p><strong><em>The quick brown fox ...</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4))
self.assertEqual(u'<p><strong><em>The quick brown fox ....</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4, '....'))
self.assertEqual(u'<p><strong><em>The quick brown fox</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4, None))
def test_wrap(self):
digits = '1234 67 9'
self.assertEqual(text.wrap(digits, 100), u'1234 67 9')
self.assertEqual(text.wrap(digits, 9), u'1234 67 9')
self.assertEqual(text.wrap(digits, 8), u'1234 67\n9')
self.assertEqual(text.wrap('short\na long line', 7),
u'short\na long\nline')
self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8),
u'do-not-break-long-words\nplease?\nok')
long_word = 'l%sng' % ('o' * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(text.wrap('a %s word' % long_word, 10),
u'a\n%s\nword' % long_word)
| bsd-3-clause | -4,441,511,200,753,817,600 | 54 | 130 | 0.603636 | false |
batermj/algorithm-challenger | code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Tools/pynche/TypeinViewer.py | 116 | 6102 | """TypeinViewer class.
The TypeinViewer is what you see at the lower right of the main Pynche
widget. It contains three text entry fields, one each for red, green, blue.
Input into these windows is highly constrained; it only allows you to enter
values that are legal for a color axis. This usually means 0-255 for decimal
input and 0x0 - 0xff for hex input.
You can toggle whether you want to view and input the values in either decimal
or hex by clicking on Hexadecimal. By clicking on Update while typing, the
color selection will be made on every change to the text field. Otherwise,
you must hit Return or Tab to select the color.
"""
from tkinter import *
class TypeinViewer:
def __init__(self, switchboard, master=None):
# non-gui ivars
self.__sb = switchboard
optiondb = switchboard.optiondb()
self.__hexp = BooleanVar()
self.__hexp.set(optiondb.get('HEXTYPE', 0))
self.__uwtyping = BooleanVar()
self.__uwtyping.set(optiondb.get('UPWHILETYPE', 0))
# create the gui
self.__frame = Frame(master, relief=RAISED, borderwidth=1)
self.__frame.grid(row=3, column=1, sticky='NSEW')
# Red
self.__xl = Label(self.__frame, text='Red:')
self.__xl.grid(row=0, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=0, column=1)
self.__xox = Label(subframe, text='0x')
self.__xox.grid(row=0, column=0, sticky=E)
self.__xox['font'] = 'courier'
self.__x = Entry(subframe, width=3)
self.__x.grid(row=0, column=1)
self.__x.bindtags(self.__x.bindtags() + ('Normalize', 'Update'))
self.__x.bind_class('Normalize', '<Key>', self.__normalize)
self.__x.bind_class('Update' , '<Key>', self.__maybeupdate)
# Green
self.__yl = Label(self.__frame, text='Green:')
self.__yl.grid(row=1, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=1, column=1)
self.__yox = Label(subframe, text='0x')
self.__yox.grid(row=0, column=0, sticky=E)
self.__yox['font'] = 'courier'
self.__y = Entry(subframe, width=3)
self.__y.grid(row=0, column=1)
self.__y.bindtags(self.__y.bindtags() + ('Normalize', 'Update'))
# Blue
self.__zl = Label(self.__frame, text='Blue:')
self.__zl.grid(row=2, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=2, column=1)
self.__zox = Label(subframe, text='0x')
self.__zox.grid(row=0, column=0, sticky=E)
self.__zox['font'] = 'courier'
self.__z = Entry(subframe, width=3)
self.__z.grid(row=0, column=1)
self.__z.bindtags(self.__z.bindtags() + ('Normalize', 'Update'))
# Update while typing?
self.__uwt = Checkbutton(self.__frame,
text='Update while typing',
variable=self.__uwtyping)
self.__uwt.grid(row=3, column=0, columnspan=2, sticky=W)
# Hex/Dec
self.__hex = Checkbutton(self.__frame,
text='Hexadecimal',
variable=self.__hexp,
command=self.__togglehex)
self.__hex.grid(row=4, column=0, columnspan=2, sticky=W)
def __togglehex(self, event=None):
red, green, blue = self.__sb.current_rgb()
if self.__hexp.get():
label = '0x'
else:
label = ' '
self.__xox['text'] = label
self.__yox['text'] = label
self.__zox['text'] = label
self.update_yourself(red, green, blue)
def __normalize(self, event=None):
ew = event.widget
contents = ew.get()
icursor = ew.index(INSERT)
if contents and contents[0] in 'xX' and self.__hexp.get():
contents = '0' + contents
# Figure out the contents in the current base.
try:
if self.__hexp.get():
v = int(contents, 16)
else:
v = int(contents)
except ValueError:
v = None
# If value is not legal, or empty, delete the last character inserted
# and ring the bell. Don't ring the bell if the field is empty (it'll
# just equal zero.
if v is None:
pass
elif v < 0 or v > 255:
i = ew.index(INSERT)
if event.char:
contents = contents[:i-1] + contents[i:]
icursor -= 1
ew.bell()
elif self.__hexp.get():
contents = hex(v)[2:]
else:
contents = int(v)
ew.delete(0, END)
ew.insert(0, contents)
ew.icursor(icursor)
def __maybeupdate(self, event=None):
if self.__uwtyping.get() or event.keysym in ('Return', 'Tab'):
self.__update(event)
def __update(self, event=None):
redstr = self.__x.get() or '0'
greenstr = self.__y.get() or '0'
bluestr = self.__z.get() or '0'
if self.__hexp.get():
base = 16
else:
base = 10
red, green, blue = [int(x, base) for x in (redstr, greenstr, bluestr)]
self.__sb.update_views(red, green, blue)
def update_yourself(self, red, green, blue):
if self.__hexp.get():
sred, sgreen, sblue = [hex(x)[2:] for x in (red, green, blue)]
else:
sred, sgreen, sblue = red, green, blue
x, y, z = self.__x, self.__y, self.__z
xicursor = x.index(INSERT)
yicursor = y.index(INSERT)
zicursor = z.index(INSERT)
x.delete(0, END)
y.delete(0, END)
z.delete(0, END)
x.insert(0, sred)
y.insert(0, sgreen)
z.insert(0, sblue)
x.icursor(xicursor)
y.icursor(yicursor)
z.icursor(zicursor)
def hexp_var(self):
return self.__hexp
def save_options(self, optiondb):
optiondb['HEXTYPE'] = self.__hexp.get()
optiondb['UPWHILETYPE'] = self.__uwtyping.get()
| apache-2.0 | -2,463,996,484,024,964,600 | 36.666667 | 78 | 0.543265 | false |
ThePletch/ansible | lib/ansible/modules/network/ipinfoio_facts.py | 39 | 4067 | #!/usr/bin/python
#
# (c) 2016, Aleksei Kostiuk <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ipinfoio_facts
short_description: "Retrieve IP geolocation facts of a host's IP address"
description:
- "Gather IP geolocation facts of a host's IP address using ipinfo.io API"
version_added: "2.3"
author: "Aleksei Kostiuk (@akostyuk)"
options:
timeout:
description:
- HTTP connection timeout in seconds
required: false
default: 10
http_agent:
description:
- Set http user agent
required: false
default: "ansible-ipinfoio-module/0.0.1"
notes:
- "Check http://ipinfo.io/ for more information"
'''
EXAMPLES = '''
# Retrieve geolocation data of a host's IP address
- name: get IP geolocation data
ipinfoio_facts:
'''
RETURN = '''
ansible_facts:
description: "Dictionary of ip geolocation facts for a host's IP address"
returned: changed
type: dictionary
contains:
ip:
description: "Public IP address of a host"
type: string
sample: "8.8.8.8"
hostname:
description: Domain name
type: string
sample: "google-public-dns-a.google.com"
country:
description: ISO 3166-1 alpha-2 country code
type: string
sample: "US"
region:
description: State or province name
type: string
sample: "California"
city:
description: City name
type: string
sample: "Mountain View"
loc:
description: Latitude and Longitude of the location
type: string
sample: "37.3860,-122.0838"
org:
description: "organization's name"
type: string
sample: "AS3356 Level 3 Communications, Inc."
postal:
description: Postal code
type: string
sample: "94035"
'''
USER_AGENT = 'ansible-ipinfoio-module/0.0.1'
class IpinfoioFacts(object):
def __init__(self, module):
self.url = 'https://ipinfo.io/json'
self.timeout = module.params.get('timeout')
self.module = module
def get_geo_data(self):
response, info = fetch_url(self.module, self.url, force=True, # NOQA
timeout=self.timeout)
try:
info['status'] == 200
except AssertionError:
self.module.fail_json(msg='Could not get {} page, '
'check for connectivity!'.format(self.url))
else:
try:
content = response.read()
result = self.module.from_json(content.decode('utf8'))
except ValueError:
self.module.fail_json(
msg='Failed to parse the ipinfo.io response: '
'{0} {1}'.format(self.url, content))
else:
return result
def main():
module = AnsibleModule( # NOQA
argument_spec=dict(
http_agent=dict(default=USER_AGENT),
timeout=dict(type='int', default=10),
),
supports_check_mode=True,
)
ipinfoio = IpinfoioFacts(module)
ipinfoio_result = dict(
changed=False, ansible_facts=ipinfoio.get_geo_data())
module.exit_json(**ipinfoio_result)
from ansible.module_utils.basic import * # NOQA
from ansible.module_utils.urls import * # NOQA
if __name__ == '__main__':
main()
| gpl-3.0 | 600,759,931,336,202,600 | 27.843972 | 77 | 0.622572 | false |
hiepthai/django-activity-stream | actstream/migrations/0001_initial.py | 8 | 7969 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from actstream.compat import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Follow'
db.create_table('actstream_follow', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_model_label])),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('actstream', ['Follow'])
# Adding unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.create_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Adding model 'Action'
db.create_table('actstream_action', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actor_content_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='actor', to=orm['contenttypes.ContentType'])),
('actor_object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('verb', self.gf('django.db.models.fields.CharField')(max_length=255)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('target_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='target', null=True, to=orm['contenttypes.ContentType'])),
('target_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('action_object_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='action_object', null=True, to=orm['contenttypes.ContentType'])),
('action_object_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('public', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('actstream', ['Action'])
def backwards(self, orm):
# Removing unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.delete_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Deleting model 'Follow'
db.delete_table('actstream_follow')
# Deleting model 'Action'
db.delete_table('actstream_action')
models = {
'actstream.action': {
'Meta': {'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'actstream.follow': {
'Meta': {'unique_together': "(('user', 'content_type', 'object_id'),)", 'object_name': 'Follow'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_model_label})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['actstream']
| bsd-3-clause | -14,829,090,282,516,132 | 68.295652 | 204 | 0.587903 | false |
jbaiter/spreads | spreadsplug/intervaltrigger.py | 2 | 2100 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Johannes Baiter <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import threading
import time
from spreads.config import OptionTemplate
from spreads.plugin import HookPlugin, TriggerHooksMixin
logger = logging.getLogger('spreadsplug.intervaltrigger')
class IntervalTrigger(HookPlugin, TriggerHooksMixin):
__name__ = 'intervaltrigger'
_loop_thread = None
_exit_event = None
@classmethod
def configuration_template(cls):
return {'interval': OptionTemplate(5.0, "Interval between captures"
" (in seconds)")}
def start_trigger_loop(self, capture_callback):
logger.debug("Starting event loop")
self._exit_event = threading.Event()
self._loop_thread = threading.Thread(target=self._trigger_loop,
args=(capture_callback, ))
self._loop_thread.start()
def stop_trigger_loop(self):
logger.debug("Stopping event loop")
self._exit_event.set()
self._loop_thread.join()
def _trigger_loop(self, capture_func):
interval = self.config['interval'].get(float)
while True:
sleep_time = 0
while sleep_time < interval:
if self._exit_event.is_set():
return
time.sleep(0.01)
sleep_time += 0.01
capture_func()
| agpl-3.0 | -4,346,167,278,326,031,400 | 34 | 75 | 0.647619 | false |
kumar303/rockit | vendor-local/boto/ec2/instance.py | 2 | 17445 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Instance
"""
import boto
from boto.ec2.ec2object import EC2Object, TaggedEC2Object
from boto.resultset import ResultSet
from boto.ec2.address import Address
from boto.ec2.blockdevicemapping import BlockDeviceMapping
from boto.ec2.image import ProductCodes
from boto.ec2.networkinterface import NetworkInterface
from boto.ec2.group import Group
import base64
class Reservation(EC2Object):
"""
Represents a Reservation response object.
:ivar id: The unique ID of the Reservation.
:ivar owner_id: The unique ID of the owner of the Reservation.
:ivar groups: A list of Group objects representing the security
groups associated with launched instances.
:ivar instances: A list of Instance objects launched in this
Reservation.
"""
def __init__(self, connection=None):
EC2Object.__init__(self, connection)
self.id = None
self.owner_id = None
self.groups = []
self.instances = []
def __repr__(self):
return 'Reservation:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'instancesSet':
self.instances = ResultSet([('item', Instance)])
return self.instances
elif name == 'groupSet':
self.groups = ResultSet([('item', Group)])
return self.groups
else:
return None
def endElement(self, name, value, connection):
if name == 'reservationId':
self.id = value
elif name == 'ownerId':
self.owner_id = value
else:
setattr(self, name, value)
def stop_all(self):
for instance in self.instances:
instance.stop()
class Instance(TaggedEC2Object):
"""
Represents an instance.
:ivar id: The unique ID of the Instance.
:ivar groups: A list of Group objects representing the security
groups associated with the instance.
:ivar public_dns_name: The public dns name of the instance.
:ivar private_dns_name: The private dns name of the instance.
:ivar state: The string representation of the instances current state.
:ivar state_code: An integer representation of the instances current state.
:ivar key_name: The name of the SSH key associated with the instance.
:ivar instance_type: The type of instance (e.g. m1.small).
:ivar launch_time: The time the instance was launched.
:ivar image_id: The ID of the AMI used to launch this instance.
:ivar placement: The availability zone in which the instance is running.
:ivar kernel: The kernel associated with the instance.
:ivar ramdisk: The ramdisk associated with the instance.
:ivar architecture: The architecture of the image (i386|x86_64).
:ivar hypervisor: The hypervisor used.
:ivar virtualization_type: The type of virtualization used.
:ivar product_codes: A list of product codes associated with this instance.
:ivar ami_launch_index: This instances position within it's launch group.
:ivar monitored: A boolean indicating whether monitoring is enabled or not.
:ivar spot_instance_request_id: The ID of the spot instance request
if this is a spot instance.
:ivar subnet_id: The VPC Subnet ID, if running in VPC.
:ivar vpc_id: The VPC ID, if running in VPC.
:ivar private_ip_address: The private IP address of the instance.
:ivar ip_address: The public IP address of the instance.
:ivar platform: Platform of the instance (e.g. Windows)
:ivar root_device_name: The name of the root device.
:ivar root_device_type: The root device type (ebs|instance-store).
:ivar block_device_mapping: The Block Device Mapping for the instance.
:ivar state_reason: The reason for the most recent state transition.
:ivar groups: List of security Groups associated with the instance.
:ivar interfaces: List of Elastic Network Interfaces associated with
this instance.
"""
def __init__(self, connection=None):
TaggedEC2Object.__init__(self, connection)
self.id = None
self.dns_name = None
self.public_dns_name = None
self.private_dns_name = None
self.state = None
self.state_code = None
self.key_name = None
self.shutdown_state = None
self.previous_state = None
self.instance_type = None
self.launch_time = None
self.image_id = None
self.placement = None
self.kernel = None
self.ramdisk = None
self.product_codes = ProductCodes()
self.ami_launch_index = None
self.monitored = False
self.spot_instance_request_id = None
self.subnet_id = None
self.vpc_id = None
self.private_ip_address = None
self.ip_address = None
self.requester_id = None
self._in_monitoring_element = False
self.persistent = False
self.root_device_name = None
self.root_device_type = None
self.block_device_mapping = None
self.state_reason = None
self.group_name = None
self.client_token = None
self.eventsSet = None
self.groups = []
self.platform = None
self.interfaces = []
self.hypervisor = None
self.virtualization_type = None
self.architecture = None
def __repr__(self):
return 'Instance:%s' % self.id
def startElement(self, name, attrs, connection):
retval = TaggedEC2Object.startElement(self, name, attrs, connection)
if retval is not None:
return retval
if name == 'monitoring':
self._in_monitoring_element = True
elif name == 'blockDeviceMapping':
self.block_device_mapping = BlockDeviceMapping()
return self.block_device_mapping
elif name == 'productCodes':
return self.product_codes
elif name == 'stateReason':
self.state_reason = SubParse('stateReason')
return self.state_reason
elif name == 'groupSet':
self.groups = ResultSet([('item', Group)])
return self.groups
elif name == "eventsSet":
self.eventsSet = SubParse('eventsSet')
return self.eventsSet
elif name == 'networkInterfaceSet':
self.interfaces = ResultSet([('item', NetworkInterface)])
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.id = value
elif name == 'imageId':
self.image_id = value
elif name == 'dnsName' or name == 'publicDnsName':
self.dns_name = value # backwards compatibility
self.public_dns_name = value
elif name == 'privateDnsName':
self.private_dns_name = value
elif name == 'keyName':
self.key_name = value
elif name == 'amiLaunchIndex':
self.ami_launch_index = value
elif name == 'shutdownState':
self.shutdown_state = value
elif name == 'previousState':
self.previous_state = value
elif name == 'name':
self.state = value
elif name == 'code':
try:
self.state_code = int(value)
except ValueError:
boto.log.warning('Error converting code (%s) to int' % value)
self.state_code = value
elif name == 'instanceType':
self.instance_type = value
elif name == 'rootDeviceName':
self.root_device_name = value
elif name == 'rootDeviceType':
self.root_device_type = value
elif name == 'launchTime':
self.launch_time = value
elif name == 'availabilityZone':
self.placement = value
elif name == 'platform':
self.platform = value
elif name == 'placement':
pass
elif name == 'kernelId':
self.kernel = value
elif name == 'ramdiskId':
self.ramdisk = value
elif name == 'state':
if self._in_monitoring_element:
if value == 'enabled':
self.monitored = True
self._in_monitoring_element = False
elif name == 'spotInstanceRequestId':
self.spot_instance_request_id = value
elif name == 'subnetId':
self.subnet_id = value
elif name == 'vpcId':
self.vpc_id = value
elif name == 'privateIpAddress':
self.private_ip_address = value
elif name == 'ipAddress':
self.ip_address = value
elif name == 'requesterId':
self.requester_id = value
elif name == 'persistent':
if value == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'groupName':
if self._in_monitoring_element:
self.group_name = value
elif name == 'clientToken':
self.client_token = value
elif name == "eventsSet":
self.events = value
elif name == 'hypervisor':
self.hypervisor = value
elif name == 'virtualizationType':
self.virtualization_type = value
elif name == 'architecture':
self.architecture = value
else:
setattr(self, name, value)
def _update(self, updated):
self.__dict__.update(updated.__dict__)
def update(self, validate=False):
"""
Update the instance's state information by making a call to fetch
the current instance attributes from the service.
:type validate: bool
:param validate: By default, if EC2 returns no data about the
instance the update method returns quietly. If
the validate param is True, however, it will
raise a ValueError exception if no data is
returned from EC2.
"""
rs = self.connection.get_all_instances([self.id])
if len(rs) > 0:
r = rs[0]
for i in r.instances:
if i.id == self.id:
self._update(i)
elif validate:
raise ValueError('%s is not a valid Instance ID' % self.id)
return self.state
def terminate(self):
"""
Terminate the instance
"""
rs = self.connection.terminate_instances([self.id])
if len(rs) > 0:
self._update(rs[0])
def stop(self, force=False):
"""
Stop the instance
:type force: bool
:param force: Forces the instance to stop
:rtype: list
:return: A list of the instances stopped
"""
rs = self.connection.stop_instances([self.id], force)
if len(rs) > 0:
self._update(rs[0])
def start(self):
"""
Start the instance.
"""
rs = self.connection.start_instances([self.id])
if len(rs) > 0:
self._update(rs[0])
def reboot(self):
return self.connection.reboot_instances([self.id])
def get_console_output(self):
"""
Retrieves the console output for the instance.
:rtype: :class:`boto.ec2.instance.ConsoleOutput`
:return: The console output as a ConsoleOutput object
"""
return self.connection.get_console_output(self.id)
def confirm_product(self, product_code):
return self.connection.confirm_product_instance(self.id, product_code)
def use_ip(self, ip_address):
if isinstance(ip_address, Address):
ip_address = ip_address.public_ip
return self.connection.associate_address(self.id, ip_address)
def monitor(self):
return self.connection.monitor_instance(self.id)
def unmonitor(self):
return self.connection.unmonitor_instance(self.id)
def get_attribute(self, attribute):
"""
Gets an attribute from this instance.
:type attribute: string
:param attribute: The attribute you need information about
Valid choices are:
instanceType|kernel|ramdisk|userData|
disableApiTermination|
instanceInitiatedShutdownBehavior|
rootDeviceName|blockDeviceMapping
:rtype: :class:`boto.ec2.image.InstanceAttribute`
:return: An InstanceAttribute object representing the value of the
attribute requested
"""
return self.connection.get_instance_attribute(self.id, attribute)
def modify_attribute(self, attribute, value):
"""
Changes an attribute of this instance
:type attribute: string
:param attribute: The attribute you wish to change.
AttributeName - Expected value (default)
instanceType - A valid instance type (m1.small)
kernel - Kernel ID (None)
ramdisk - Ramdisk ID (None)
userData - Base64 encoded String (None)
disableApiTermination - Boolean (true)
instanceInitiatedShutdownBehavior - stop|terminate
rootDeviceName - device name (None)
:type value: string
:param value: The new value for the attribute
:rtype: bool
:return: Whether the operation succeeded or not
"""
return self.connection.modify_instance_attribute(self.id, attribute,
value)
def reset_attribute(self, attribute):
"""
Resets an attribute of this instance to its default value.
:type attribute: string
:param attribute: The attribute to reset. Valid values are:
kernel|ramdisk
:rtype: bool
:return: Whether the operation succeeded or not
"""
return self.connection.reset_instance_attribute(self.id, attribute)
class ConsoleOutput:
def __init__(self, parent=None):
self.parent = parent
self.instance_id = None
self.timestamp = None
self.output = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.instance_id = value
elif name == 'timestamp':
self.timestamp = value
elif name == 'output':
self.output = base64.b64decode(value)
else:
setattr(self, name, value)
class InstanceAttribute(dict):
ValidValues = ['instanceType', 'kernel', 'ramdisk', 'userData',
'disableApiTermination', 'instanceInitiatedShutdownBehavior',
'rootDeviceName', 'blockDeviceMapping', 'sourceDestCheck',
'groupSet']
def __init__(self, parent=None):
dict.__init__(self)
self.instance_id = None
self.request_id = None
self._current_value = None
def startElement(self, name, attrs, connection):
if name == 'blockDeviceMapping':
self[name] = BlockDeviceMapping()
return self[name]
elif name == 'groupSet':
self[name] = ResultSet([('item', Group)])
return self[name]
else:
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.instance_id = value
elif name == 'requestId':
self.request_id = value
elif name == 'value':
self._current_value = value
elif name in self.ValidValues:
self[name] = self._current_value
class SubParse(dict):
def __init__(self, section, parent=None):
dict.__init__(self)
self.section = section
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name != self.section:
self[name] = value
| bsd-3-clause | -833,427,655,962,232,000 | 35.881607 | 80 | 0.596389 | false |
jsquare/hikeplanner | hikes/views.py | 1 | 4162 | from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from hikes.models import Hike
from django.shortcuts import render_to_response, render
from django.contrib.gis import forms
from django.contrib import auth
from django.contrib.auth.forms import UserCreationForm
from django.core.urlresolvers import reverse
from urllib import urlencode
from django.contrib.gis.measure import D
from django.contrib.gis.geos import fromstr, Point
from django.template import RequestContext
from django.core.context_processors import csrf
# Create your views here.
class SearchForm(forms.Form):
start_location = forms.CharField()
start_latitude = forms.FloatField(widget=forms.HiddenInput())
start_longitude = forms.FloatField(widget=forms.HiddenInput())
min_radius = forms.IntegerField(widget=forms.HiddenInput(),initial=0)
max_radius = forms.IntegerField(widget=forms.HiddenInput(),initial=1)
min_length = forms.IntegerField(widget=forms.HiddenInput(),initial=1)
max_length = forms.IntegerField(widget=forms.HiddenInput(),initial=2)
min_terrain = forms.IntegerField(widget=forms.HiddenInput(),initial=0)
max_terrain = forms.IntegerField(widget=forms.HiddenInput(), initial=1)
def home(request):
if request.GET:
form = SearchForm(request.GET)
else:
form = SearchForm()
context = {
'form' : form
}
return render_to_response('search.html', context_instance = RequestContext(request))
def results(request):
form = SearchForm(request.GET)
if not form.is_valid():
url = reverse('home')
params = urlencode(request.GET)
return HttpResponseRedirect('%s?%s' % (url,params))
# Request hikes from db within min and max day limits
min_days = form.cleaned_data['min_length'] # TODO: change db fields to be min/max length instead of days. TODO: convert 'lengths' ==> days
max_days = form.cleaned_data['max_length']
radius = form.cleaned_data['max_radius'] # TODO: support min radius
start_latitude = form.cleaned_data['start_latitude']
start_longitude = form.cleaned_data['start_longitude']
start_location = Point(start_longitude,start_latitude)
hike_list = Hike.objects.filter(days__gte=min_days, days__lte=max_days,location__distance_lt=(start_location, D(km=radius)))
context = {
'hike_list' : hike_list,
'page_title' : 'Hike Results'
}
hike_str = "Here are all the hikes within your limits: {}".format([hike.__unicode__() for hike in hike_list])
return render_to_response('results.html', context)
def hike_detail(request, hike_id, slug=''):
'''
The general information page about a hike.
@param slug: optional, ignored (allows StackOverflow-style URL)
'''
try:
hike = Hike.objects.get(id=hike_id)
except Hike.DoesNotExist:
return HttpResponseNotFound() # TODO
context = {
'hike': hike,
'page_title': hike.name,
}
return render_to_response('hike_detail.html', context)
#Gets called when a user submits login info. Authenticates and redirects user.
def login(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None and user.is_active:
#Verified corect password, user is marked as active, so log them in
auth.login(request, user)
#Redirect to success page
return HttpResponseRedirect("/account/loggedin")
else:
#Show error page
return HttpResponseRedirect("/account/invalid")
#Gets called when user clicks on logout
def logout(request):
auth.logout(request)
#REdirect to success page
return HttpResponseRedirect("/")
def register(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect("/")
else:
args = {}
args.update(csrf(request))
args['form'] = UserCreationForm()
return render_to_response('register.html', args)
| gpl-2.0 | -8,148,998,843,363,708,000 | 35.508772 | 143 | 0.689092 | false |
da-nrw/DNSCore | 3rdParty/fido/fido/argparselocal.py | 6 | 87524 | # -*- coding: utf-8 -*-
# Copyright © 2006-2009 Steven J. Bethard <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'Namespace',
'Action',
'FileType',
'HelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'ArgumentDefaultsHelpFormatter',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
_set = set
except NameError:
from sets import Set as _set
try:
_basestring = basestring
except NameError:
_basestring = str
try:
_sorted = sorted
except NameError:
def _sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
# silence Python 2.6 buggy warnings about Exception.message
if _sys.version_info[:2] == (2, 6):
import warnings
warnings.filterwarnings(
action='ignore',
message='BaseException.message has been deprecated as of Python 2.6',
category=DeprecationWarning,
module='argparse')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return _sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = _set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
inserts[start] = '['
inserts[end] = ']'
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in _sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
parser.parse_args(arg_strings, namespace)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
# all other arguments are used as file names
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
if self.add_help:
self.add_argument(
'-h', '--help', action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
'-v', '--version', action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, _basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
return self._parse_known_args(args, namespace)
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = _set()
seen_non_default_actions = _set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
for char in self.prefix_chars:
option_string = char + explicit_arg[0]
explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
break
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
# AF PATCH: if there was an empty string value such as '', then arg_string[0] failed
# AF PATCH: so I added a len==0 option
if len(arg_string) == 0 or arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, _basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
| gpl-3.0 | -2,687,283,327,449,251,300 | 36.164756 | 96 | 0.560744 | false |
bdyetton/prettychart | website/addons/forward/tests/test_models.py | 44 | 1805 | # -*- coding: utf-8 -*-
from nose.tools import * # PEP8 asserts
from modularodm.exceptions import ValidationError
from tests.base import OsfTestCase
from website.addons.forward.tests.factories import ForwardSettingsFactory
class TestSettingsValidation(OsfTestCase):
def setUp(self):
super(TestSettingsValidation, self).setUp()
self.settings = ForwardSettingsFactory()
def test_validate_url_bad(self):
self.settings.url = 'badurl'
with assert_raises(ValidationError):
self.settings.save()
def test_validate_url_good(self):
self.settings.url = 'http://frozen.pizza.reviews/'
try:
self.settings.save()
except ValidationError:
assert 0
def test_validate_redirect_bool_bad(self):
self.settings.redirect_bool = 'notabool'
with assert_raises(ValidationError):
self.settings.save()
def test_validate_redirect_bool_good(self):
self.settings.redirect_bool = False
try:
self.settings.save()
except ValidationError:
assert 0
def test_validate_redirect_secs_bad(self):
self.settings.redirect_secs = -2
with assert_raises(ValidationError):
self.settings.save()
def test_validate_redirect_secs_good(self):
self.settings.redirect_secs = 20
try:
self.settings.save()
except ValidationError:
assert 0
def test_label_sanitary(self):
self.settings.label = 'safe'
try:
self.settings.save()
except ValidationError:
assert False
def test_label_unsanitary(self):
self.settings.label = 'un<br />safe'
with assert_raises(ValidationError):
self.settings.save()
| apache-2.0 | 5,456,204,765,736,353,000 | 27.650794 | 73 | 0.629363 | false |
weitengchu/rt-thread | components/external/freetype/src/tools/docmaker/formatter.py | 132 | 6019 | #
# formatter.py
#
# Convert parsed content blocks to a structured document (library file).
#
# Copyright 2002, 2004, 2007, 2008, 2014 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
#
# This is the base Formatter class. Its purpose is to convert a content
# processor's data into specific documents (i.e., table of contents, global
# index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example, the
# file `tohtml.py' contains the definition of the `HtmlFormatter' sub-class
# to output HTML.
#
from sources import *
from content import *
from utils import *
################################################################
##
## FORMATTER CLASS
##
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( key = index_key )
def add_identifier( self, name, block ):
if name in self.identifiers:
# duplicate name!
sys.stderr.write( "WARNING: duplicate definition for"
+ " '" + name + "' "
+ "in " + block.location() + ", "
+ "previous definition in "
+ self.identifiers[name].location()
+ "\n" )
else:
self.identifiers[name] = block
#
# formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# formatting a section
#
def section_enter( self, section ):
pass
def block_enter( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def section_dump( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
skip_entry = 0
try:
block = self.identifiers[name]
# `block_names' can contain field names also,
# which we filter out
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
if field.name == name:
skip_entry = 1
except:
skip_entry = 1 # this happens e.g. for `/empty/' entries
if skip_entry:
continue
self.block_enter( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.section_dump( section )
# eof
| gpl-2.0 | -926,340,232,307,868,300 | 25.991031 | 76 | 0.537465 | false |
nccgroup/typofinder | howoldisdomain/whois.py | 2 | 10182 | #
# Typofinder for domain typo discovery
#
# Released as open source by NCC Group Plc - http://www.nccgroup.com/
#
# Simple whois query function
#
# Based on RFC3912
#
# Developed by Matt Summers, matt dot summers at nccgroup dot com
# and Stephen Tomkinson
#
# http://www.github.com/nccgroup/typofinder
#
# Released under AGPL see LICENSE for more information
#
import socket
import codecs
import re
from publicsuffix import PublicSuffixList
import datetime
import pprint
import sys
#Seed the whois server map with special cases that aren't in our whois-servers.txt list nor returned by iana
#Based on http://www.nirsoft.net/whois-servers.txt
FIELD_SEPERATOR = ', '
RATE_LIMITTED_RESPONSES = ("WHOIS LIMIT EXCEEDED",
"Too many simulataneous connections from your host",
"Please try again later.",
"You have been banned for abuse.",
"has exceeded the established limit",
"WHOIS LIMI",
"Still in grace period, wait",
"Permission denied.")
_tld_to_whois = dict()
with open("datasources/whois-servers.txt", "r") as whois_servers:
for line in whois_servers:
if line.startswith(';'):
continue
parts = line.split(' ')
_tld_to_whois['.' + parts[0].strip()] = parts[1].strip()
_psl = PublicSuffixList(input_file=codecs.open("datasources/effective_tld_names.dat", "r", "utf8"))
def _whois_lookup(sServer, sDomain):
"""
Perform the network connection to the Whois Server and query for the given domain.
@param sServer: The hostname of the whois server to query.
@param sDomain: The domain to query for.
@return: The whois result string.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(5)
try:
s.connect((sServer, 43))
except socket.timeout:
return "Timeout connecting to " + sServer
except socket.error:
return "Unable to connect to " + sServer
try:
query = str(codecs.encode(sDomain, "idna"), "ascii") + '\r\n'
except:
#Assumes an encoding error, just send the raw string instead.
query = sDomain + '\r\n'
response = ''
try:
s.send(query.encode())
while len(response) < 10000:
bytes = s.recv(1000)
try:
block = bytes.decode("utf-8")
except:
#If it's not UTF-8, the second most popular encoding appears to be iso-8859-1
block = bytes.decode("iso-8859-1")
if block == '':
break
response = response + block
except socket.error:
pass
finally:
try:
s.shutdown(socket.SHUT_RDWR)
except socket.error:
#Not much more we can do here
pass
finally:
s.close()
return response
def whois(sDomain):
"""
Entry point for this package, which fetches whois data from the appropriate server.
@param sDomain: The domain to query whois for.
@return: The whois result.
"""
sDomain = _psl.get_public_suffix(sDomain)
sLDot = sDomain.find(".")
tld = sDomain[sLDot:]
if tld in _tld_to_whois:
sServer = _tld_to_whois[tld]
else:
sServer = "whois.iana.org"
try:
for sLine in _whois_lookup(sServer, tld).split('\n'):
if "refer:" in sLine or "whois:" in sLine:
sServer = sLine[6:].lstrip()
_tld_to_whois[tld] = sServer
break
except:
pass
result = _recursive_whois(sServer, sDomain)
#Special case to handle the fuzzy matching at the ICANN whois server
if 'To single out one record, look it up with "xxx", where xxx is one of the' in result:
all_domain_records = _whois_lookup(sServer, '=' + sDomain)
all_whois_servers = _extract_field(all_domain_records, "Whois Server")
if all_whois_servers != None:
next_whois_server = all_whois_servers.split(', ')[-1]
return _recursive_whois(next_whois_server, sDomain)
else:
return result
else:
return result
def _recursive_whois(sServer, sDomain):
"""
A recursive whois function which will follow the "Whois Server:" referals.
@param sServer: The hostname of the whois server to query.
@param sDomain: The domain to query for.
@return: The whois result string.
"""
result = _whois_lookup(sServer, sDomain)
next_whois_server = _extract_field(result, "Whois Server")
if next_whois_server and next_whois_server != sServer and not next_whois_server.startswith("http"):
return _recursive_whois(next_whois_server, sDomain)
for error_message in RATE_LIMITTED_RESPONSES:
if error_message in result:
return "Rate limited by " + sServer
if result.strip() == '':
return "Empty response from " + sServer
return result.lstrip()
def _extract_field(whois_blob, *args):
"""
Extract from the given WHOIS result blob the value that is associated with the given field name.
@param whois_blob The whois data to search for the value
@param *args One or more field names (interpreted as regexes) that the requested value may be referred to as.
"""
result = list()
if len(args) == 1:
field_name = args[0]
else:
field_name = "(?:"
field_list = list()
for arg in args:
field_list.append("(?:" + arg + ")")
field_name += "|".join(field_list)
field_name += ")"
regex = field_name + r"\.*:(?: |\t)*(.+)\n"
match_list = re.finditer(regex, whois_blob, flags=re.IGNORECASE)
for match in match_list:
if match.group(1):
value = match.group(1).strip()
if value and value != "null":
result.append(value)
if not result:
return None
else:
return FIELD_SEPERATOR.join(result)
def _date_parse(date_string):
"""
Date parser which attempts to work with a range of date formats.
@param date_string The string representing a date or date/time.
@return A datetime object if one could be parsed, or None
"""
if not date_string:
return None
date_string = date_string.rstrip('.')
date_string = re.sub('(\d)T(\d)', '\g<1>\g<2>', date_string)
date_string = date_string.replace(' ', '')
date_string = date_string.replace('.', '-')
date_string = date_string.rstrip('Z')
#Handle timezones ourselves on python 2.X because the native datetime won't parse them
tz_match = None
if sys.version_info < (3,0):
tz_match = re.match(r"(.*)(\+|-)(\d{2}):?(\d{2})$", date_string)
if tz_match:
date_string = tz_match.group(1)
result = None
for format in ("%Y-%m-%d%H:%M:%S", "%Y-%m-%d%H:%M:%S%z", "%Y-%m-%d", "%d-%b-%Y", "%a%b%d%H:%M:%S%Z%Y", "%Y-%d-%m", "%Y-%m-%d%H:%M:%S-%f", "%d-%b-%Y%H:%M:%S%Z"):
try:
result = datetime.datetime.strptime(date_string, format)
break
except ValueError:
#Attempt the next format
continue
if result and tz_match:
#Manipulate the datetime into UTC if we don't have timezone support
delta = datetime.timedelta(hours=int(tz_match.group(3)), minutes=int(tz_match.group(4)))
if tz_match.group(2) == '-':
result += delta
else:
result -= delta
return result
contact_types = {"registrant": "(?:Registrant|Owner)(?: Contact)?",
"tech": "Tech(?:nical)?(?: Contact)?",
"admin": "Admin(?:istrative)?(?: Contact)?"}
contact_fields = {"name": "(?:Name)?",
"organization": "Organi[sz]ation",
"street": "(?:(?:Street)|(?:Add?ress ?)1?)",
"city": "City",
"state": "State(?:/Province)?",
"country": "Country(?:/Economy)?",
"post_code": "Postal ?Code|zip",
"email": "E-?mail",
"phone": "(?:tele)?phone(?: Number)?",
"phone_ext": "Phone Ext",
"fax": "(?:Fax|Facsimile)[ -]?(?:Number|No)?",
"fax_ext": "Fax Ext"}
registrar_fields = {"name": "Registrar(?: Name)?",
"url": "Registrar (?:(?:URL)|(?:Homepage))",
"abuse_email": "Abuse Contact Email",
"abuse_phone": "Abuse Contact Phone",
"iana_id": "Registrar IANA ID"}
date_fields = {"created": ("Creation Date", "(?:Date )?created(?: on)?", "Registered(?: on)?", "Registration Date"),
"updated": ("(?:Last )?Modified", "Updated Date", "(?:last )?updated?(?: on)?"),
"expires": ("Expiration Date", "Expiry Date", "renewal date", "Expires(?: on)?", "Expire Date")}
def parse(whois_str):
"""
Parses the given whois result string in an attempt to extract common fields.
@param whois_str The raw WHOIS result
@return A dictionary of dictionaries containing the parsed data.
"""
result_dict = {}
for type in contact_types.keys():
person_dict = dict()
for field in contact_fields.keys():
person_dict[field] = _extract_field(whois_str, contact_types[type] + "(?: |-)" + contact_fields[field])
result_dict[type] = person_dict
registrar_dict = dict()
for field in registrar_fields.keys():
registrar_dict[field] = _extract_field(whois_str, registrar_fields[field])
result_dict['registrar'] = registrar_dict
result_dict['reseller'] = {'name': _extract_field(whois_str, "Reseller")}
dates_dict = {}
for field in date_fields.keys():
date_str = _extract_field(whois_str, *date_fields[field])
if date_str:
date_str = date_str.split(FIELD_SEPERATOR)[0]
dates_dict[field] = _date_parse(date_str)
else:
dates_dict[field] = None
result_dict['date'] = dates_dict
return result_dict | agpl-3.0 | -4,323,212,683,029,507,000 | 31.429936 | 164 | 0.570909 | false |
r03ert0/ldsc | test/test_sumstats.py | 3 | 16976 | from __future__ import division
import ldscore.sumstats as s
import ldscore.parse as ps
import unittest
import numpy as np
import pandas as pd
from pandas.util.testing import assert_series_equal, assert_frame_equal
from nose.tools import *
from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_allclose
from nose.plugins.attrib import attr
import os
from ldsc import parser
DIR = os.path.dirname(__file__)
N_REP = 200
s._N_CHR = 2 # having to mock 22 files is annoying
class Mock(object):
'''
Dumb object for mocking args and log
'''
def __init__(self):
pass
def log(self, x):
# pass
print x
log = Mock()
args = Mock()
t = lambda attr: lambda obj: getattr(obj, attr, float('nan'))
def test_check_condnum():
x = np.ones((2, 2))
x[1, 1] += 1e-5
args.invert_anyway = False
assert_raises(ValueError, s._check_ld_condnum, args, log, x)
args.invert_anyway = True
s._check_ld_condnum(args, log, x) # no error
def test_check_variance():
ld = pd.DataFrame({'SNP': ['a', 'b', 'c'],
'LD1': np.ones(3).astype(float),
'LD2': np.arange(3).astype(float)})
ld = ld[['SNP', 'LD1', 'LD2']]
M_annot = np.array([[1, 2]])
M_annot, ld, novar_col = s._check_variance(log, M_annot, ld)
assert_array_equal(M_annot.shape, (1, 1))
assert_array_equal(M_annot, [[2]])
assert_allclose(ld.iloc[:, 1], [0, 1, 2])
assert_array_equal(novar_col, [True, False])
def test_align_alleles():
beta = pd.Series(np.ones(6))
alleles = pd.Series(['ACAC', 'TGTG', 'GTGT', 'AGCT', 'AGTC', 'TCTC'])
beta = s._align_alleles(beta, alleles)
assert_series_equal(beta, pd.Series([1.0, 1, 1, -1, 1, 1]))
def test_filter_bad_alleles():
alleles = pd.Series(['ATAT', 'ATAG', 'DIID', 'ACAC'])
bad_alleles = s._filter_alleles(alleles)
print bad_alleles
assert_series_equal(bad_alleles, pd.Series([False, False, False, True]))
def test_read_annot():
ref_ld_chr = None
ref_ld = os.path.join(DIR, 'annot_test/test')
overlap_matrix, M_tot = s._read_chr_split_files(ref_ld_chr, ref_ld, log, 'annot matrix',
ps.annot, frqfile=None)
assert_array_equal(overlap_matrix, [[1, 0, 0], [0, 2, 2], [0, 2, 2]])
assert_array_equal(M_tot, 3)
frqfile = os.path.join(DIR, 'annot_test/test1')
overlap_matrix, M_tot = s._read_chr_split_files(ref_ld_chr, ref_ld, log, 'annot matrix',
ps.annot, frqfile=frqfile)
assert_array_equal(overlap_matrix, [[1, 0, 0], [0, 1, 1], [0, 1, 1]])
assert_array_equal(M_tot, 2)
def test_valid_snps():
x = {'AC', 'AG', 'CA', 'CT', 'GA', 'GT', 'TC', 'TG'}
assert_equal(x, s.VALID_SNPS)
def test_bases():
x = set(['A', 'T', 'G', 'C'])
assert_equal(x, set(s.BASES))
def test_complement():
assert_equal(s.COMPLEMENT, {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'})
def test_warn_len():
# nothing to test except that it doesn't throw an error at runtime
s._warn_length(log, [1])
def test_match_alleles():
m = {'ACAC',
'ACCA',
'ACGT',
'ACTG',
'AGAG',
'AGCT',
'AGGA',
'AGTC',
'CAAC',
'CACA',
'CAGT',
'CATG',
'CTAG',
'CTCT',
'CTGA',
'CTTC',
'GAAG',
'GACT',
'GAGA',
'GATC',
'GTAC',
'GTCA',
'GTGT',
'GTTG',
'TCAG',
'TCCT',
'TCGA',
'TCTC',
'TGAC',
'TGCA',
'TGGT',
'TGTG'}
assert_equal(m, s.MATCH_ALLELES)
def test_flip_alleles():
m = {'ACAC': False,
'ACCA': True,
'ACGT': True,
'ACTG': False,
'AGAG': False,
'AGCT': True,
'AGGA': True,
'AGTC': False,
'CAAC': True,
'CACA': False,
'CAGT': False,
'CATG': True,
'CTAG': True,
'CTCT': False,
'CTGA': False,
'CTTC': True,
'GAAG': True,
'GACT': False,
'GAGA': False,
'GATC': True,
'GTAC': True,
'GTCA': False,
'GTGT': False,
'GTTG': True,
'TCAG': False,
'TCCT': True,
'TCGA': True,
'TCTC': False,
'TGAC': False,
'TGCA': True,
'TGGT': True,
'TGTG': False}
assert_equal(m, s.FLIP_ALLELES)
def test_strand_ambiguous():
m = {'AC': False,
'AG': False,
'AT': True,
'CA': False,
'CG': True,
'CT': False,
'GA': False,
'GC': True,
'GT': False,
'TA': True,
'TC': False,
'TG': False}
assert_equal(m, s.STRAND_AMBIGUOUS)
@attr('rg')
@attr('slow')
class Test_RG_Statistical():
@classmethod
def setUpClass(cls):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
(DIR + '/simulate_test/sumstats/' + str(i) for i in xrange(N_REP)))
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)
args.intercept_gencov = ','.join(('0' for _ in xrange(N_REP)))
args.intercept_h2 = ','.join(('1' for _ in xrange(N_REP)))
y = s.estimate_rg(args, log)
cls.rg = x
cls.rg_noint = y
def test_rg_ratio(self):
assert_allclose(np.nanmean(map(t('rg_ratio'), self.rg)), 0, atol=0.02)
def test_rg_ratio_noint(self):
assert_allclose(
np.nanmean(map(t('rg_ratio'), self.rg_noint)), 0, atol=0.02)
def test_rg_se(self):
assert_allclose(np.nanmean(map(t('rg_se'), self.rg)), np.nanstd(
map(t('rg_ratio'), self.rg)), atol=0.02)
def test_rg_se_noint(self):
assert_allclose(np.nanmean(map(t('rg_se'), self.rg_noint)), np.nanstd(
map(t('rg_ratio'), self.rg_noint)), atol=0.02)
def test_gencov_tot(self):
assert_allclose(
np.nanmean(map(t('tot'), map(t('gencov'), self.rg))), 0, atol=0.02)
def test_gencov_tot_noint(self):
assert_allclose(
np.nanmean(map(t('tot'), map(t('gencov'), self.rg_noint))), 0, atol=0.02)
def test_gencov_tot_se(self):
assert_allclose(np.nanstd(map(t('tot'), map(t('gencov'), self.rg))), np.nanmean(
map(t('tot_se'), map(t('gencov'), self.rg))), atol=0.02)
def test_gencov_tot_se_noint(self):
assert_allclose(np.nanstd(map(t('tot'), map(t('gencov'), self.rg_noint))), np.nanmean(
map(t('tot_se'), map(t('gencov'), self.rg_noint))), atol=0.02)
def test_gencov_cat(self):
assert_allclose(
np.nanmean(map(t('cat'), map(t('gencov'), self.rg))), [0, 0], atol=0.02)
def test_gencov_cat_noint(self):
assert_allclose(
np.nanmean(map(t('cat'), map(t('gencov'), self.rg_noint))), [0, 0], atol=0.02)
def test_gencov_cat_se(self):
assert_allclose(np.nanstd(map(t('cat'), map(t('gencov'), self.rg))), np.nanmean(
map(t('cat_se'), map(t('gencov'), self.rg))), atol=0.02)
def test_gencov_cat_se_noint(self):
assert_allclose(np.nanstd(map(t('cat'), map(t('gencov'), self.rg_noint))), np.nanmean(
map(t('cat_se'), map(t('gencov'), self.rg_noint))), atol=0.02)
def test_gencov_int(self):
assert_allclose(
np.nanmean(map(t('intercept'), map(t('gencov'), self.rg))), 0, atol=0.1)
def test_gencov_int_se(self):
assert_allclose(np.nanmean(map(t('intercept_se'), map(t('gencov'), self.rg))), np.nanstd(
map(t('intercept'), map(t('gencov'), self.rg))), atol=0.1)
def test_hsq_int(self):
assert_allclose(
np.nanmean(map(t('intercept'), map(t('hsq2'), self.rg))), 1, atol=0.1)
def test_hsq_int_se(self):
assert_allclose(np.nanmean(map(t('intercept_se'), map(t('hsq2'), self.rg))), np.nanstd(
map(t('intercept'), map(t('hsq2'), self.rg))), atol=0.1)
@attr('h2')
@attr('slow')
class Test_H2_Statistical(unittest.TestCase):
@classmethod
def setUpClass(cls):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.chisq_max = 99999
h2 = []
h2_noint = []
for i in xrange(N_REP):
args.intercept_h2 = None
args.h2 = DIR + '/simulate_test/sumstats/' + str(i)
args.out = DIR + '/simulate_test/1'
h2.append(s.estimate_h2(args, log))
args.intercept_h2 = 1
h2_noint.append(s.estimate_h2(args, log))
cls.h2 = h2
cls.h2_noint = h2_noint
def test_tot(self):
assert_allclose(np.nanmean(map(t('tot'), self.h2)), 0.9, atol=0.05)
def test_tot_noint(self):
assert_allclose(
np.nanmean(map(t('tot'), self.h2_noint)), 0.9, atol=0.05)
def test_tot_se(self):
assert_allclose(np.nanmean(map(t('tot_se'), self.h2)), np.nanstd(
map(t('tot'), self.h2)), atol=0.05)
def test_tot_se_noint(self):
assert_allclose(np.nanmean(map(t('tot_se'), self.h2_noint)), np.nanstd(
map(t('tot'), self.h2_noint)), atol=0.05)
def test_cat(self):
x = np.nanmean(map(t('cat'), self.h2_noint), axis=0)
y = np.array((0.3, 0.6)).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_noint(self):
x = np.nanmean(map(t('cat'), self.h2_noint), axis=0)
y = np.array((0.3, 0.6)).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_se(self):
x = np.nanmean(map(t('cat_se'), self.h2), axis=0)
y = np.nanstd(map(t('cat'), self.h2), axis=0).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_se_noint(self):
x = np.nanmean(map(t('cat_se'), self.h2_noint), axis=0)
y = np.nanstd(map(t('cat'), self.h2_noint), axis=0).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_coef(self):
# should be h^2/M = [[0.3, 0.9]] / M
coef = np.array(((0.3, 0.9))) / self.h2[0].M
for h in [self.h2, self.h2_noint]:
assert np.all(np.abs(np.nanmean(map(t('coef'), h), axis=0) - coef) < 1e6)
def test_coef_se(self):
for h in [self.h2, self.h2_noint]:
assert_array_almost_equal(np.nanmean(map(t('coef_se'), h), axis=0),
np.nanstd(map(t('coef'), h), axis=0))
def test_prop(self):
for h in [self.h2, self.h2_noint]:
assert np.all(np.nanmean(map(t('prop'), h), axis=0) - [1/3, 2/3] < 0.02)
def test_prop_se(self):
for h in [self.h2, self.h2_noint]:
assert np.all(np.nanmean(map(t('prop_se'), h), axis=0) - np.nanstd(map(t('prop'), h), axis=0) < 0.02)
def test_int(self):
assert_allclose(np.nanmean(map(t('intercept'), self.h2)), 1, atol=0.1)
def test_int_se(self):
assert_allclose(np.nanstd(map(t('intercept'), self.h2)), np.nanmean(
map(t('intercept_se'), self.h2)), atol=0.1)
class Test_Estimate(unittest.TestCase):
def test_h2_M(self): # check --M works
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/1'
args.out = DIR + '/simulate_test/1'
args.print_cov = True # right now just check no runtime errors
args.print_delete_vals = True
x = s.estimate_h2(args, log)
args.M = str(
float(open(DIR + '/simulate_test/ldscore/oneld_onefile.l2.M_5_50').read()))
y = s.estimate_h2(args, log)
assert_array_almost_equal(x.tot, y.tot)
assert_array_almost_equal(x.tot_se, y.tot_se)
args.M = '1,2'
assert_raises(ValueError, s.estimate_h2, args, log)
args.M = 'foo_bar'
assert_raises(ValueError, s.estimate_h2, args, log)
def test_h2_ref_ld(self): # test different ways of reading ref ld
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/555'
args.out = DIR + '/simulate_test/'
x = s.estimate_h2(args, log)
args.ref_ld = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
y = s.estimate_h2(args, log)
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
z = s.estimate_h2(args, log)
assert_almost_equal(x.tot, y.tot)
assert_array_almost_equal(y.cat, z.cat)
assert_array_almost_equal(x.prop, y.prop)
assert_array_almost_equal(y.coef, z.coef)
assert_array_almost_equal(x.tot_se, y.tot_se)
assert_array_almost_equal(y.cat_se, z.cat_se)
assert_array_almost_equal(x.prop_se, y.prop_se)
assert_array_almost_equal(y.coef_se, z.coef_se)
# test statistical properties (constrain intercept here)
def test_rg_M(self):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)[0]
args.M = open(
DIR + '/simulate_test/ldscore/oneld_onefile.l2.M_5_50', 'rb').read().rstrip('\n')
y = s.estimate_rg(args, log)[0]
assert_array_almost_equal(x.rg_ratio, y.rg_ratio)
assert_array_almost_equal(x.rg_se, y.rg_se)
args.M = '1,2'
assert_raises(ValueError, s.estimate_rg, args, log)
args.M = 'foo_bar'
assert_raises(ValueError, s.estimate_rg, args, log)
def test_rg_ref_ld(self):
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
args.print_cov = True # right now just check no runtime errors
args.print_delete_vals = True
x = s.estimate_rg(args, log)[0]
args.ref_ld = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
y = s.estimate_rg(args, log)[0]
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
z = s.estimate_rg(args, log)[0]
assert_almost_equal(x.rg_ratio, y.rg_ratio)
assert_almost_equal(y.rg_jknife, z.rg_jknife)
assert_almost_equal(x.rg_se, y.rg_se)
def test_no_check_alleles(self):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)[0]
args.no_check_alleles = True
y = s.estimate_rg(args, log)[0]
assert_equal(x.rg_ratio, y.rg_ratio)
assert_almost_equal(x.rg_jknife, y.rg_jknife)
assert_equal(x.rg_se, y.rg_se)
def test_twostep_h2(self):
# make sure two step isn't going crazy
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/1'
args.out = DIR + '/simulate_test/1'
args.chisq_max = 9999999
args.two_step = 999
x = s.estimate_h2(args, log)
args.chisq_max = 9999
args.two_step = 99999
y = s.estimate_h2(args, log)
assert_allclose(x.tot, y.tot, atol=1e-5)
def test_twostep_rg(self):
# make sure two step isn't going crazy
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/rg'
args.two_step = 999
x = s.estimate_rg(args, log)[0]
args.two_step = 99999
y = s.estimate_rg(args, log)[0]
assert_allclose(x.rg_ratio, y.rg_ratio, atol=1e-5)
assert_allclose(x.gencov.tot, y.gencov.tot, atol=1e-5)
| gpl-3.0 | -525,288,307,735,595,800 | 33.858316 | 113 | 0.547184 | false |
PaulVanSchayck/irods | tests/pydevtest/test_resource_types.py | 2 | 222736 | import commands
import getpass
import os
import re
import shutil
import subprocess
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import configuration
import lib
from resource_suite import ResourceSuite, ResourceBase
from test_chunkydevtest import ChunkyDevTest
class Test_Resource_RandomWithinReplication(ResourceSuite, ChunkyDevTest, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc rrResc random", 'STDOUT_SINGLELINE', 'random')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB2")
super(Test_Resource_RandomWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_RandomWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc rrResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource repl 0 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource repl 0 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource repl 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 1 should have double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_RoundRobinWithinReplication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc rrResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB2")
super(Test_Resource_RoundRobinWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_RoundRobinWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc rrResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_next_child_iteration__2884(self):
filename="foobar"
lib.make_file( filename, 100 )
# extract the next resource in the rr from the context string
_, out, _ =self.admin.assert_icommand('ilsresc -l rrResc', 'STDOUT_SINGLELINE', 'demoResc')
for line in out.split('\n'):
if 'context:' in line:
_, _, next_resc = line.partition('context:')
next_resc = next_resc.strip()
# determine the 'other' resource
resc_set = set(['unixB1', 'unixB2'])
remaining_set = resc_set - set([next_resc])
resc_remaining = remaining_set.pop()
# resources listed should be 'next_resc'
self.admin.assert_icommand('iput ' + filename + ' file0') # put file
self.admin.assert_icommand('ils -L file0', 'STDOUT_SINGLELINE', next_resc) # put file
# resources listed should be 'resc_remaining'
self.admin.assert_icommand('iput ' + filename + ' file1') # put file
self.admin.assert_icommand('ils -L file1', 'STDOUT_SINGLELINE', resc_remaining) # put file
# resources listed should be 'next_resc' once again
self.admin.assert_icommand('iput ' + filename + ' file2') # put file
self.admin.assert_icommand('ils -L file2', 'STDOUT_SINGLELINE', next_resc) # put file
os.remove(filename)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource repl 0 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource repl 0 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource repl 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 1 should have double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_Unixfilesystem(ResourceSuite, ChunkyDevTest, unittest.TestCase):
def setUp(self):
hostname = lib.get_hostname()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/demoRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
super(Test_Resource_Unixfilesystem, self).setUp()
def tearDown(self):
super(Test_Resource_Unixfilesystem, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/demoRescVault", ignore_errors=True)
def test_key_value_passthru(self):
env = os.environ.copy()
env['spLogLevel'] = '11'
lib.restart_irods_server(env=env)
lib.make_file('file.txt', 15)
initial_log_size = lib.get_log_size('server')
self.user0.assert_icommand('iput --kv_pass="put_key=val1" file.txt')
assert lib.count_occurrences_of_string_in_log('server', 'key [put_key] - value [val1]', start_index=initial_log_size) in [1, 2] # double print if collection missing
initial_log_size = lib.get_log_size('server')
self.user0.assert_icommand('iget -f --kv_pass="get_key=val3" file.txt other.txt')
assert lib.count_occurrences_of_string_in_log('server', 'key [get_key] - value [val3]', start_index=initial_log_size) in [1, 2] # double print if collection missing
lib.restart_irods_server()
lib.assert_command('rm -f file.txt other.txt')
@unittest.skipIf(configuration.RUN_IN_TOPOLOGY, "Skip for Topology Testing: Checks local file")
def test_ifsck__2650(self):
# local setup
filename = 'fsckfile.txt'
filepath = lib.create_local_testfile(filename)
full_logical_path = '/' + self.admin.zone_name + '/home/' + self.admin.username + '/' + self.admin._session_id + '/' + filename
# assertions
self.admin.assert_icommand('ils -L ' + filename, 'STDERR_SINGLELINE', 'does not exist') # should not be listed
self.admin.assert_icommand('iput -K ' + self.testfile + ' ' + full_logical_path) # iput
self.admin.assert_icommand('ils -L ' + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
file_vault_full_path = os.path.join(lib.get_vault_session_path(self.admin), filename)
# method 1
self.admin.assert_icommand('ichksum -K ' + full_logical_path, 'STDOUT_MULTILINE',
['Total checksum performed = 1, Failed checksum = 0',
'sha2:0MczF/+UQ4lYmtu417LDmMb4mEarpxPShHfg1PhLtQw=']) # ichksum
# method 2
self.admin.assert_icommand("iquest \"select DATA_CHECKSUM where DATA_NAME = '%s'\"" % filename,
'STDOUT_SINGLELINE', ['DATA_CHECKSUM = sha2:0MczF/+UQ4lYmtu417LDmMb4mEarpxPShHfg1PhLtQw=']) # iquest
# method 3
self.admin.assert_icommand('ils -L', 'STDOUT_SINGLELINE', filename) # ils
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path) # ifsck
# change content in vault
with open(file_vault_full_path, 'r+') as f:
f.seek(0)
f.write("x")
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['CORRUPTION', 'checksum not consistent with iRODS object']) # ifsck
# change size in vault
lib.cat(file_vault_full_path, 'extra letters')
self.admin.assert_icommand('ifsck ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['CORRUPTION', 'size not consistent with iRODS object']) # ifsck
# unregister, reregister (to update filesize in iCAT), recalculate checksum, and confirm
self.admin.assert_icommand('irm -U ' + full_logical_path)
self.admin.assert_icommand('ireg ' + file_vault_full_path + ' ' + full_logical_path)
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['WARNING: checksum not available']) # ifsck
self.admin.assert_icommand('ichksum -f ' + full_logical_path, 'STDOUT_MULTILINE',
['Total checksum performed = 1, Failed checksum = 0',
'sha2:zJhArM/en4wfI9lVq+AIFAZa6RTqqdC6LVXf6tPbqxI='])
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path) # ifsck
# local cleanup
os.remove(filepath)
class Test_Resource_Passthru(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
super(Test_Resource_Passthru, self).setUp()
def tearDown(self):
super(Test_Resource_Passthru, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_WeightedPassthru(ResourceBase, unittest.TestCase):
def setUp(self):
hostname = lib.get_hostname()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/unixBVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc w_pt passthru '' 'write=1.0;read=1.0'", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc demoResc w_pt")
admin_session.assert_icommand("iadmin addchildtoresc w_pt unixB")
super(Test_Resource_WeightedPassthru, self).setUp()
def tearDown(self):
super(Test_Resource_WeightedPassthru, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc w_pt unixB")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc w_pt")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmresc unixB")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin rmresc w_pt")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixBVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_weighted_passthrough(self):
filename = "some_local_file.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', "local")
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
def test_weighted_passthrough__2789(self):
### write=1.0;read=1.0
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=1.0'")
filename = "some_local_file_A.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand('icp {0} {1}'.format(self.testfile,filename))
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
self.admin.assert_icommand("irm -f " + filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
self.admin.assert_icommand("irm -f " + filename)
### write=0.9;read=0.0
self.admin.assert_icommand("iadmin modresc w_pt context 'write=0.9;read=0.0'")
filename = "some_local_file_B.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand('icp {0} {1}'.format(self.testfile,filename))
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
self.admin.assert_icommand("irm -f " + filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
self.admin.assert_icommand("irm -f " + filename)
class Test_Resource_Deferred(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc deferred", 'STDOUT_SINGLELINE', 'deferred')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
super(Test_Resource_Deferred, self).setUp()
def tearDown(self):
super(Test_Resource_Deferred, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_Random(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc random", 'STDOUT_SINGLELINE', 'random')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix3Resc")
super(Test_Resource_Random, self).setUp()
def tearDown(self):
super(Test_Resource_Random, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_NonBlocking(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc nonblocking " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/nbVault", 'STDOUT_SINGLELINE', 'nonblocking')
super(Test_Resource_NonBlocking, self).setUp()
def tearDown(self):
super(Test_Resource_NonBlocking, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
class Test_Resource_CompoundWithMockarchive(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc mockarchive " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault univMSSInterface.sh", 'STDOUT_SINGLELINE', 'mockarchive')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_CompoundWithMockarchive, self).setUp()
def tearDown(self):
super(Test_Resource_CompoundWithMockarchive, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("NOTSURE / FIXME ... -K not supported, perhaps")
def test_local_iput_checksum(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_CompoundWithUnivmss(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc univmss " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault univMSSInterface.sh", 'STDOUT_SINGLELINE', 'univmss')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_CompoundWithUnivmss, self).setUp()
def tearDown(self):
super(Test_Resource_CompoundWithUnivmss, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_Compound(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_Compound, self).setUp()
def tearDown(self):
super(Test_Resource_Compound, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree("rm -rf " + lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skip("TEMPORARY")
def test_iget_prefer_from_archive__ticket_1660(self):
# define core.re filepath
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
# new file to put and get
filename = "archivepolicyfile.txt"
filepath = lib.create_local_testfile(filename)
# manipulate core.re (leave as 'when_necessary' - default)
# put the file
self.admin.assert_icommand("iput " + filename) # put file
# manually update the replica in archive vault
output = self.admin.run_icommand('ils -L ' + filename)
archivereplicaphypath = output[1].split()[-1] # split into tokens, get the last one
with open(archivereplicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE\n')
# get file
retrievedfile = "retrieved.txt"
os.system("rm -f %s" % retrievedfile)
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from cache
# confirm retrieved file is same as original
assert 0 == os.system("diff %s %s" % (filepath, retrievedfile))
# manipulate the core.re to add the new policy
shutil.copy(corefile, backupcorefile)
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
# restart the server to reread the new core.re
os.system(lib.get_irods_top_level_dir() + "/iRODS/irodsctl stop")
os.system(lib.get_irods_top_level_dir() + "/tests/zombiereaper.sh")
os.system(lib.get_irods_top_level_dir() + "/iRODS/irodsctl start")
# manually update the replica in archive vault
output = self.admin.run_icommand('ils -L ' + filename)
archivereplicaphypath = output[1].split()[-1] # split into tokens, get the last one
with open(archivereplicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE **AGAIN**\n')
# get the file
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from archive
# confirm this is the new archive file
matchfound = False
with open(retrievedfile) as f:
for line in f:
if "**AGAIN**" in line:
matchfound = True
assert matchfound
# restore the original core.re
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
# local cleanup
os.remove(filepath)
os.remove(retrievedfile)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should be listed
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_ReplicationWithinReplication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc replResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc replResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc replResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc replResc unixB2")
super(Test_Resource_ReplicationWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc replResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc replResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc replResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc replResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " & " + filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should not have a replica 4
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl " + filename) # replicate to default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from grid
# replica 1 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["1 ", self.testfile])
# replica 2 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 ", self.testfile])
# replica 3 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["3 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_ReplicationToTwoCompound(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc compResc1 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc compResc2 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc cacheResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc1")
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc2")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 cacheResc1 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 archiveResc1 archive")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 cacheResc2 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 archiveResc2 archive")
super(Test_Resource_ReplicationToTwoCompound, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationToTwoCompound, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 archiveResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 cacheResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 archiveResc1")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 cacheResc1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc2")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc1")
admin_session.assert_icommand("iadmin rmresc archiveResc2")
admin_session.assert_icommand("iadmin rmresc cacheResc2")
admin_session.assert_icommand("iadmin rmresc archiveResc1")
admin_session.assert_icommand("iadmin rmresc cacheResc1")
admin_session.assert_icommand("iadmin rmresc compResc2")
admin_session.assert_icommand("iadmin rmresc compResc1")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc2Vault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["4 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skipIf(configuration.RUN_IN_TOPOLOGY, "Skip for Topology Testing")
def test_iget_prefer_from_archive__ticket_1660(self):
# define core.re filepath
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
# new file to put and get
filename = "archivepolicyfile.txt"
filepath = lib.create_local_testfile(filename)
# manipulate core.re (leave as 'when_necessary' - default)
# put the file
self.admin.assert_icommand("iput " + filename) # put file
# manually update the replicas in archive vaults
stdout = self.admin.run_icommand('ils -L ' + filename)[1]
print stdout
archive1replicaphypath = stdout.split()[-19] # split into tokens, get the 19th from the end
archive2replicaphypath = stdout.split()[-1] # split into tokens, get the last one
print archive1replicaphypath
print archive2replicaphypath
with open(archive1replicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 1\n')
with open(archive2replicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 2\n')
# get file
retrievedfile = "retrieved.txt"
os.system("rm -f %s" % retrievedfile)
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from cache
# confirm retrieved file is same as original
assert 0 == os.system("diff %s %s" % (filepath, retrievedfile))
print "original file diff confirmed"
# manipulate the core.re to add the new policy
shutil.copy(corefile, backupcorefile)
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
# restart the server to reread the new core.re
lib.restart_irods_server()
# manually update the replicas in archive vaults
stdout = self.admin.run_icommand('ils -L ' + filename)[1]
archivereplica1phypath = stdout.split()[-19] # split into tokens, get the 19th from the end
archivereplica2phypath = stdout.split()[-1] # split into tokens, get the last one
print archive1replicaphypath
print archive2replicaphypath
with open(archivereplica1phypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 1 **AGAIN**\n')
with open(archivereplica2phypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 2 **AGAIN**\n')
# confirm the new content is on disk
with open(archivereplica1phypath) as f:
for line in f:
print line
with open(archivereplica2phypath) as f:
for line in f:
print line
# confirm the core file has new policy
print "----- confirm core has new policy ----"
with open(corefile) as f:
for line in f:
if 'pep_' in line:
print line
else:
print '.',
print "----- confirmation done ----"
self.admin.assert_icommand(['iget', '-f', filename, retrievedfile])
# confirm this is the new archive file
with open(retrievedfile) as f:
for line in f:
print line
if 'AGAIN' in line:
break
else:
assert False
# restore the original core.re
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
# local cleanup
os.remove(filepath)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource archive 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + filename])
# default resource cache 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource cache 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# default resource archive 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 6
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
# should not have a replica 7
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 7 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource cache 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource cache 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# default resource archive 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# default resource archive 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 4x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 4x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 4x - replica 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should be listed 4x - replica 4
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
# back up core file
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
shutil.copy(corefile, backupcorefile)
# manipulate the core.re to add the new policy
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc compResc1 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc compResc2 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc cacheResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc1")
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc2")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 cacheResc1 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 archiveResc1 archive")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 cacheResc2 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 archiveResc2 archive")
super(Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 archiveResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 cacheResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 archiveResc1")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 cacheResc1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc2")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc1")
admin_session.assert_icommand("iadmin rmresc archiveResc2")
admin_session.assert_icommand("iadmin rmresc cacheResc2")
admin_session.assert_icommand("iadmin rmresc archiveResc1")
admin_session.assert_icommand("iadmin rmresc cacheResc1")
admin_session.assert_icommand("iadmin rmresc compResc2")
admin_session.assert_icommand("iadmin rmresc compResc1")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc2Vault", ignore_errors=True)
# restore the original core.re
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["4 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skip("this is tested elsewhere")
def test_iget_prefer_from_archive__ticket_1660(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource archive 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + filename])
# default resource cache 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource cache 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# default resource archive 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 6
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
# should not have a replica 7
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 7 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource cache 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource cache 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# default resource archive 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# default resource archive 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 4x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 4x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 4x - replica 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should be listed 4x - replica 4
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_RoundRobin(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
super(Test_Resource_RoundRobin, self).setUp()
def tearDown(self):
super(Test_Resource_RoundRobin, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_round_robin_mechanism(self):
# local setup
filename = "rrfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
self.user1.assert_icommand("iput " + filename + " file0.txt")
self.user1.assert_icommand("iput " + filename + " file1.txt")
self.user1.assert_icommand("ils -l", 'STDOUT_SINGLELINE', "unix1Resc")
self.user1.assert_icommand("ils -l", 'STDOUT_SINGLELINE', "unix2Resc")
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_Replication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix3Resc")
super(Test_Resource_Replication, self).setUp()
def tearDown(self):
super(Test_Resource_Replication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
def test_irm_specific_replica(self):
# not allowed here - this is a managed replication resource
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 0 ", " & " + self.testfile])
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 1 ", " & " + self.testfile])
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 2 ", " & " + self.testfile])
self.admin.assert_icommand("irm -n 1 " + self.testfile) # try to remove one of the managed replicas
# should be listed 2x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 0 ", " & " + self.testfile])
# should not be listed
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 1 ", " & " + self.testfile])
# should be listed 2x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 2 ", " & " + self.testfile])
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_reliable_iput__ticket_2557(self):
# local setup
# break the second child resource
self.admin.assert_icommand("iadmin modresc unix2Resc path /nopes", 'STDOUT_SINGLELINE', "unix2RescVault")
filename = "reliableputfile.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand_fail("iput " + filename, 'STDOUT_SINGLELINE', "put error") # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', "unix1Resc") # should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', "unix3Resc") # should be listed
# cleanup
oldvault = lib.get_irods_top_level_dir() + "/unix2RescVault"
self.admin.assert_icommand("iadmin modresc unix2Resc path " + oldvault, 'STDOUT_SINGLELINE', "/nopes")
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
# put file
self.admin.assert_icommand("iput " + filename)
# for debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# for debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource 3 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource 3 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
# put file, but trim 'cache' copy (purgec) (backwards compatibility)
self.admin.assert_icommand("iput --purgec " + filename)
# should not be listed (trimmed first replica)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed twice - replica 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# should be listed twice - replica 3 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'wb') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'wb') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - 1 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - 3 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_MultiLayered(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc pass2Resc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc rrResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc pass2Resc")
admin_session.assert_icommand("iadmin addchildtoresc pass2Resc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix3Resc")
super(Test_Resource_MultiLayered, self).setUp()
def tearDown(self):
super(Test_Resource_MultiLayered, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix1Resc")
admin_session.assert_icommand("iadmin rmchildfromresc pass2Resc rrResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc pass2Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc pass2Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
| bsd-3-clause | 1,122,051,511,010,169,500 | 63.523754 | 173 | 0.615347 | false |
dreal/dreal | benchmarks/network/thermostat/thermostat-double-i-p.py | 11 | 3580 |
from gen import *
##########
# shared #
##########
flow_var[0] = """
(declare-fun tau () Real)
"""
flow_dec[0] = """
(define-ode flow_1 ((= d/dt[tau] 1)))
"""
state_dec[0] = """
(declare-fun time_{0} () Real)
(declare-fun tau_{0}_0 () Real)
(declare-fun tau_{0}_t () Real)
"""
state_val[0] = """
(assert (<= 0 time_{0})) (assert (<= time_{0} 1))
(assert (<= 0 tau_{0}_0)) (assert (<= tau_{0}_0 1))
(assert (<= 0 tau_{0}_t)) (assert (<= tau_{0}_t 1))
(assert (and (not (and (= mode_1_{0} 1) (= mode_1_{0} 2)))
(not (and (= mode_2_{0} 1) (= mode_2_{0} 2)))))
"""
cont_cond[0] = ["""
(assert (and (>= tau_{0}_0 0) (<= tau_{0}_0 1)
(>= tau_{0}_t 0) (<= tau_{0}_t 1)
(forall_t 1 [0 time_{0}] (>= tau_{0}_t 0))
(forall_t 2 [0 time_{0}] (<= tau_{0}_t 1))))
(assert (and (= [x1_{0}_t x2_{0}_t tau_{0}_t]
(pintegral 0. time_{0}
[x1_{0}_0 x2_{0}_0 tau_{0}_0]
[holder_{1} holder_{2} holder_{3}]))
(connect holder_{3} flow_1)))"""]
jump_cond[0] = ["""
(assert (and (= tau_{0}_t 1) (= tau_{1}_0 0)))"""]
################
# thermostat 1 #
################
flow_var[1] = """
(declare-fun x1 () Real)
"""
flow_dec[1] = """
(define-ode flow_2 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.01) x1) (* 0.01 x2)))))))
(define-ode flow_3 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.01) x1) (* 0.01 x2))))))
"""
state_dec[1] = """
(declare-fun mode_1_{0} () Int)
(declare-fun x1_{0}_0 () Real)
(declare-fun x1_{0}_t () Real)
"""
state_val[1] = """
(assert (<= -20 x1_{0}_0)) (assert (<= x1_{0}_0 100))
(assert (<= -20 x1_{0}_t)) (assert (<= x1_{0}_t 100))
"""
cont_cond[1] = ["""
(assert (or (and (= mode_1_{0} 2) (connect holder_{1} flow_2))
(and (= mode_1_{0} 1) (connect holder_{1} flow_3))))
(assert (not (and (connect holder_{1} flow_2) (connect holder_{1} flow_3))))"""]
jump_cond[1] = ["""
(assert (and (= x1_{1}_0 x1_{0}_t)))
(assert (or (and (<= x1_{0}_t 20) (= mode_1_{1} 2))
(and (> x1_{0}_t 20) (= mode_1_{1} 1))))"""]
################
# thermostat 2 #
################
flow_var[2] = """
(declare-fun x2 () Real)
"""
flow_dec[2] = """
(define-ode flow_4 ((= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.01) x2) (* 0.01 x1)))))))
(define-ode flow_5 ((= d/dt[x2] (* -0.045 (+ (* (- 1 0.01) x2) (* 0.01 x1))))))
"""
state_dec[2] = """
(declare-fun mode_2_{0} () Int)
(declare-fun x2_{0}_0 () Real)
(declare-fun x2_{0}_t () Real)
"""
state_val[2] = """
(assert (<= -20 x2_{0}_0)) (assert (<= x2_{0}_0 100))
(assert (<= -20 x2_{0}_t)) (assert (<= x2_{0}_t 100))
"""
cont_cond[2] = ["""
(assert (or (and (= mode_2_{0} 2) (connect holder_{2} flow_4))
(and (= mode_2_{0} 1) (connect holder_{2} flow_5))))
(assert (not (and (connect holder_{2} flow_4) (connect holder_{2} flow_5))))"""]
jump_cond[2] = ["""
(assert (and (= x2_{1}_0 x2_{0}_t)))
(assert (or (and (<= x2_{0}_t 20) (= mode_2_{1} 2))
(and (> x2_{0}_t 20) (= mode_2_{1} 1))))"""]
#############
# Init/Goal #
#############
init_cond = """
(assert (= tau_{0}_0 0))
(assert (= mode_1_{0} 2))
(assert (and (>= x1_{0}_0 (- 20 1)) (<= x1_{0}_0 (+ 20 1))))
(assert (= mode_2_{0} 2))
(assert (and (>= x2_{0}_0 (- 20 1)) (<= x2_{0}_0 (+ 20 1))))
"""
goal_cond = """
(assert (or (< x1_{0}_t (- 20 5)) (> x1_{0}_t (+ 20 5))))
(assert (or (< x2_{0}_t (- 20 5)) (> x2_{0}_t (+ 20 5))))
"""
import sys
try:
bound = int(sys.argv[1])
except:
print("Usage:", sys.argv[0], "<Bound>")
else:
generate(bound, 1, [0,1,2], 3, init_cond, goal_cond)
| gpl-2.0 | -9,032,462,611,318,067,000 | 25.131387 | 86 | 0.439665 | false |
fingeronthebutton/robotframework | src/robot/variables/variables.py | 20 | 2701 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import is_list_like
from .filesetter import VariableFileSetter
from .finders import VariableFinder
from .replacer import VariableReplacer
from .store import VariableStore
from .tablesetter import VariableTableSetter
class Variables(object):
"""Represents a set of variables.
Contains methods for replacing variables from list, scalars, and strings.
On top of ${scalar}, @{list} and &{dict} variables, these methods handle
also %{environment} variables.
"""
def __init__(self):
self.store = VariableStore(self)
self._replacer = VariableReplacer(self)
self._finder = VariableFinder(self.store)
def __setitem__(self, name, value):
self.store.add(name, value)
def __getitem__(self, name):
return self._finder.find(name)
def __contains__(self, name):
return name in self.store
def resolve_delayed(self):
self.store.resolve_delayed()
def replace_list(self, items, replace_until=None):
if not is_list_like(items):
raise ValueError("'replace_list' requires list-like input.")
return self._replacer.replace_list(items, replace_until)
def replace_scalar(self, item):
return self._replacer.replace_scalar(item)
def replace_string(self, item, ignore_errors=False):
return self._replacer.replace_string(item, ignore_errors)
def set_from_file(self, path_or_variables, args=None, overwrite=False):
setter = VariableFileSetter(self.store)
return setter.set(path_or_variables, args, overwrite)
def set_from_variable_table(self, variables, overwrite=False):
setter = VariableTableSetter(self.store)
setter.set(variables, overwrite)
def clear(self):
self.store.clear()
def copy(self):
variables = Variables()
variables.store.data = self.store.data.copy()
return variables
def update(self, variables):
self.store.update(variables.store)
def as_dict(self, decoration=True):
return self.store.as_dict(decoration=decoration)
| apache-2.0 | 5,812,669,188,175,279,000 | 32.7625 | 77 | 0.694928 | false |
ashleyh/zoo | callgraph/clang/cindex.py | 1 | 42680 | #===- cindex.py - Python Indexing Library Bindings -----------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
Clang Indexing Library Bindings
===============================
This module provides an interface to the Clang indexing library. It is a
low-level interface to the indexing library which attempts to match the Clang
API directly while also being "pythonic". Notable differences from the C API
are:
* string results are returned as Python strings, not CXString objects.
* null cursors are translated to None.
* access to child cursors is done via iteration, not visitation.
The major indexing objects are:
Index
The top-level object which manages some global library state.
TranslationUnit
High-level object encapsulating the AST for a single translation unit. These
can be loaded from .ast files or parsed on the fly.
Cursor
Generic object for representing a node in the AST.
SourceRange, SourceLocation, and File
Objects representing information about the input source.
Most object information is exposed using properties, when the underlying API
call is efficient.
"""
# TODO
# ====
#
# o API support for invalid translation units. Currently we can't even get the
# diagnostics on failure because they refer to locations in an object that
# will have been invalidated.
#
# o fix memory management issues (currently client must hold on to index and
# translation unit, or risk crashes).
#
# o expose code completion APIs.
#
# o cleanup ctypes wrapping, would be nice to separate the ctypes details more
# clearly, and hide from the external interface (i.e., help(cindex)).
#
# o implement additional SourceLocation, SourceRange, and File methods.
from ctypes import *
def get_cindex_library():
# FIXME: It's probably not the case that the library is actually found in
# this location. We need a better system of identifying and loading the
# CIndex library. It could be on path or elsewhere, or versioned, etc.
import platform
name = platform.system()
if name == 'Darwin':
return cdll.LoadLibrary('libclang.dylib')
elif name == 'Windows':
return cdll.LoadLibrary('libclang.dll')
else:
return cdll.LoadLibrary('libclang.so')
# ctypes doesn't implicitly convert c_void_p to the appropriate wrapper
# object. This is a problem, because it means that from_parameter will see an
# integer and pass the wrong value on platforms where int != void*. Work around
# this by marshalling object arguments as void**.
c_object_p = POINTER(c_void_p)
lib = get_cindex_library()
### Structures and Utility Classes ###
class _CXString(Structure):
"""Helper for transforming CXString results."""
_fields_ = [("spelling", c_char_p), ("free", c_int)]
def __del__(self):
_CXString_dispose(self)
@staticmethod
def from_result(res, fn, args):
assert isinstance(res, _CXString)
return _CXString_getCString(res)
class SourceLocation(Structure):
"""
A SourceLocation represents a particular location within a source file.
"""
_fields_ = [("ptr_data", c_void_p * 2), ("int_data", c_uint)]
_data = None
def _get_instantiation(self):
if self._data is None:
f, l, c, o = c_object_p(), c_uint(), c_uint(), c_uint()
SourceLocation_loc(self, byref(f), byref(l), byref(c), byref(o))
f = File(f) if f else None
self._data = (f, int(l.value), int(c.value), int(c.value))
return self._data
@property
def file(self):
"""Get the file represented by this source location."""
return self._get_instantiation()[0]
@property
def line(self):
"""Get the line represented by this source location."""
return self._get_instantiation()[1]
@property
def column(self):
"""Get the column represented by this source location."""
return self._get_instantiation()[2]
@property
def offset(self):
"""Get the file offset represented by this source location."""
return self._get_instantiation()[3]
def __repr__(self):
return "<SourceLocation file %r, line %r, column %r>" % (
self.file.name if self.file else None, self.line, self.column)
class SourceRange(Structure):
"""
A SourceRange describes a range of source locations within the source
code.
"""
_fields_ = [
("ptr_data", c_void_p * 2),
("begin_int_data", c_uint),
("end_int_data", c_uint)]
# FIXME: Eliminate this and make normal constructor? Requires hiding ctypes
# object.
@staticmethod
def from_locations(start, end):
return SourceRange_getRange(start, end)
@property
def start(self):
"""
Return a SourceLocation representing the first character within a
source range.
"""
return SourceRange_start(self)
@property
def end(self):
"""
Return a SourceLocation representing the last character within a
source range.
"""
return SourceRange_end(self)
def __repr__(self):
return "<SourceRange start %r, end %r>" % (self.start, self.end)
class Diagnostic(object):
"""
A Diagnostic is a single instance of a Clang diagnostic. It includes the
diagnostic severity, the message, the location the diagnostic occurred, as
well as additional source ranges and associated fix-it hints.
"""
Ignored = 0
Note = 1
Warning = 2
Error = 3
Fatal = 4
def __init__(self, ptr):
self.ptr = ptr
def __del__(self):
_clang_disposeDiagnostic(self)
@property
def severity(self):
return _clang_getDiagnosticSeverity(self)
@property
def location(self):
return _clang_getDiagnosticLocation(self)
@property
def spelling(self):
return _clang_getDiagnosticSpelling(self)
@property
def ranges(self):
class RangeIterator:
def __init__(self, diag):
self.diag = diag
def __len__(self):
return int(_clang_getDiagnosticNumRanges(self.diag))
def __getitem__(self, key):
if (key >= len(self)):
raise IndexError
return _clang_getDiagnosticRange(self.diag, key)
return RangeIterator(self)
@property
def fixits(self):
class FixItIterator:
def __init__(self, diag):
self.diag = diag
def __len__(self):
return int(_clang_getDiagnosticNumFixIts(self.diag))
def __getitem__(self, key):
range = SourceRange()
value = _clang_getDiagnosticFixIt(self.diag, key, byref(range))
if len(value) == 0:
raise IndexError
return FixIt(range, value)
return FixItIterator(self)
def __repr__(self):
return "<Diagnostic severity %r, location %r, spelling %r>" % (
self.severity, self.location, self.spelling)
def from_param(self):
return self.ptr
class FixIt(object):
"""
A FixIt represents a transformation to be applied to the source to
"fix-it". The fix-it shouldbe applied by replacing the given source range
with the given value.
"""
def __init__(self, range, value):
self.range = range
self.value = value
def __repr__(self):
return "<FixIt range %r, value %r>" % (self.range, self.value)
### Cursor Kinds ###
class CursorKind(object):
"""
A CursorKind describes the kind of entity that a cursor points to.
"""
# The unique kind objects, indexed by id.
_kinds = []
_name_map = None
def __init__(self, value):
if value >= len(CursorKind._kinds):
CursorKind._kinds += [None] * (value - len(CursorKind._kinds) + 1)
if CursorKind._kinds[value] is not None:
raise ValueError,'CursorKind already loaded'
self.value = value
CursorKind._kinds[value] = self
CursorKind._name_map = None
def from_param(self):
return self.value
@property
def name(self):
"""Get the enumeration name of this cursor kind."""
if self._name_map is None:
self._name_map = {}
for key,value in CursorKind.__dict__.items():
if isinstance(value,CursorKind):
self._name_map[value] = key
return self._name_map[self]
@staticmethod
def from_id(id):
if id >= len(CursorKind._kinds) or CursorKind._kinds[id] is None:
raise ValueError,'Unknown cursor kind'
return CursorKind._kinds[id]
@staticmethod
def get_all_kinds():
"""Return all CursorKind enumeration instances."""
return filter(None, CursorKind._kinds)
def is_declaration(self):
"""Test if this is a declaration kind."""
return CursorKind_is_decl(self)
def is_reference(self):
"""Test if this is a reference kind."""
return CursorKind_is_ref(self)
def is_expression(self):
"""Test if this is an expression kind."""
return CursorKind_is_expr(self)
def is_statement(self):
"""Test if this is a statement kind."""
return CursorKind_is_stmt(self)
def is_invalid(self):
"""Test if this is an invalid kind."""
return CursorKind_is_inv(self)
def __repr__(self):
return 'CursorKind.%s' % (self.name,)
# FIXME: Is there a nicer way to expose this enumeration? We could potentially
# represent the nested structure, or even build a class hierarchy. The main
# things we want for sure are (a) simple external access to kinds, (b) a place
# to hang a description and name, (c) easy to keep in sync with Index.h.
###
# Declaration Kinds
# A declaration whose specific kind is not exposed via this interface.
#
# Unexposed declarations have the same operations as any other kind of
# declaration; one can extract their location information, spelling, find their
# definitions, etc. However, the specific kind of the declaration is not
# reported.
CursorKind.UNEXPOSED_DECL = CursorKind(1)
# A C or C++ struct.
CursorKind.STRUCT_DECL = CursorKind(2)
# A C or C++ union.
CursorKind.UNION_DECL = CursorKind(3)
# A C++ class.
CursorKind.CLASS_DECL = CursorKind(4)
# An enumeration.
CursorKind.ENUM_DECL = CursorKind(5)
# A field (in C) or non-static data member (in C++) in a struct, union, or C++
# class.
CursorKind.FIELD_DECL = CursorKind(6)
# An enumerator constant.
CursorKind.ENUM_CONSTANT_DECL = CursorKind(7)
# A function.
CursorKind.FUNCTION_DECL = CursorKind(8)
# A variable.
CursorKind.VAR_DECL = CursorKind(9)
# A function or method parameter.
CursorKind.PARM_DECL = CursorKind(10)
# An Objective-C @interface.
CursorKind.OBJC_INTERFACE_DECL = CursorKind(11)
# An Objective-C @interface for a category.
CursorKind.OBJC_CATEGORY_DECL = CursorKind(12)
# An Objective-C @protocol declaration.
CursorKind.OBJC_PROTOCOL_DECL = CursorKind(13)
# An Objective-C @property declaration.
CursorKind.OBJC_PROPERTY_DECL = CursorKind(14)
# An Objective-C instance variable.
CursorKind.OBJC_IVAR_DECL = CursorKind(15)
# An Objective-C instance method.
CursorKind.OBJC_INSTANCE_METHOD_DECL = CursorKind(16)
# An Objective-C class method.
CursorKind.OBJC_CLASS_METHOD_DECL = CursorKind(17)
# An Objective-C @implementation.
CursorKind.OBJC_IMPLEMENTATION_DECL = CursorKind(18)
# An Objective-C @implementation for a category.
CursorKind.OBJC_CATEGORY_IMPL_DECL = CursorKind(19)
# A typedef.
CursorKind.TYPEDEF_DECL = CursorKind(20)
# A C++ class method.
CursorKind.CXX_METHOD = CursorKind(21)
# A C++ namespace.
CursorKind.NAMESPACE = CursorKind(22)
# A linkage specification, e.g. 'extern "C"'.
CursorKind.LINKAGE_SPEC = CursorKind(23)
# A C++ constructor.
CursorKind.CONSTRUCTOR = CursorKind(24)
# A C++ destructor.
CursorKind.DESTRUCTOR = CursorKind(25)
# A C++ conversion function.
CursorKind.CONVERSION_FUNCTION = CursorKind(26)
# A C++ template type parameter
CursorKind.TEMPLATE_TYPE_PARAMETER = CursorKind(27)
# A C++ non-type template paramater.
CursorKind.TEMPLATE_NON_TYPE_PARAMETER = CursorKind(28)
# A C++ template template parameter.
CursorKind.TEMPLATE_TEMPLATE_PARAMTER = CursorKind(29)
# A C++ function template.
CursorKind.FUNCTION_TEMPLATE = CursorKind(30)
# A C++ class template.
CursorKind.CLASS_TEMPLATE = CursorKind(31)
# A C++ class template partial specialization.
CursorKind.CLASS_TEMPLATE_PARTIAL_SPECIALIZATION = CursorKind(32)
# A C++ namespace alias declaration.
CursorKind.NAMESPACE_ALIAS = CursorKind(33)
# A C++ using directive
CursorKind.USING_DIRECTIVE = CursorKind(34)
# A C++ using declaration
CursorKind.USING_DECLARATION = CursorKind(35)
###
# Reference Kinds
CursorKind.OBJC_SUPER_CLASS_REF = CursorKind(40)
CursorKind.OBJC_PROTOCOL_REF = CursorKind(41)
CursorKind.OBJC_CLASS_REF = CursorKind(42)
# A reference to a type declaration.
#
# A type reference occurs anywhere where a type is named but not
# declared. For example, given:
# typedef unsigned size_type;
# size_type size;
#
# The typedef is a declaration of size_type (CXCursor_TypedefDecl),
# while the type of the variable "size" is referenced. The cursor
# referenced by the type of size is the typedef for size_type.
CursorKind.TYPE_REF = CursorKind(43)
CursorKind.CXX_BASE_SPECIFIER = CursorKind(44)
# A reference to a class template, function template, template
# template parameter, or class template partial specialization.
CursorKind.TEMPLATE_REF = CursorKind(45)
# A reference to a namespace or namepsace alias.
CursorKind.NAMESPACE_REF = CursorKind(46)
# A reference to a member of a struct, union, or class that occurs in
# some non-expression context, e.g., a designated initializer.
CursorKind.MEMBER_REF = CursorKind(47)
# A reference to a labeled statement.
CursorKind.LABEL_REF = CursorKind(48)
# A reference toa a set of overloaded functions or function templates
# that has not yet been resolved to a specific function or function template.
CursorKind.OVERLOADED_DECL_REF = CursorKind(49)
###
# Invalid/Error Kinds
CursorKind.INVALID_FILE = CursorKind(70)
CursorKind.NO_DECL_FOUND = CursorKind(71)
CursorKind.NOT_IMPLEMENTED = CursorKind(72)
CursorKind.INVALID_CODE = CursorKind(73)
###
# Expression Kinds
# An expression whose specific kind is not exposed via this interface.
#
# Unexposed expressions have the same operations as any other kind of
# expression; one can extract their location information, spelling, children,
# etc. However, the specific kind of the expression is not reported.
CursorKind.UNEXPOSED_EXPR = CursorKind(100)
# An expression that refers to some value declaration, such as a function,
# varible, or enumerator.
CursorKind.DECL_REF_EXPR = CursorKind(101)
# An expression that refers to a member of a struct, union, class, Objective-C
# class, etc.
CursorKind.MEMBER_REF_EXPR = CursorKind(102)
# An expression that calls a function.
CursorKind.CALL_EXPR = CursorKind(103)
# An expression that sends a message to an Objective-C object or class.
CursorKind.OBJC_MESSAGE_EXPR = CursorKind(104)
# An expression that represents a block literal.
CursorKind.BLOCK_EXPR = CursorKind(105)
# A statement whose specific kind is not exposed via this interface.
#
# Unexposed statements have the same operations as any other kind of statement;
# one can extract their location information, spelling, children, etc. However,
# the specific kind of the statement is not reported.
CursorKind.UNEXPOSED_STMT = CursorKind(200)
# A labelled statement in a function.
CursorKind.LABEL_STMT = CursorKind(201)
###
# Other Kinds
# Cursor that represents the translation unit itself.
#
# The translation unit cursor exists primarily to act as the root cursor for
# traversing the contents of a translation unit.
CursorKind.TRANSLATION_UNIT = CursorKind(300)
###
# Attributes
# An attribute whoe specific kind is note exposed via this interface
CursorKind.UNEXPOSED_ATTR = CursorKind(400)
CursorKind.IB_ACTION_ATTR = CursorKind(401)
CursorKind.IB_OUTLET_ATTR = CursorKind(402)
CursorKind.IB_OUTLET_COLLECTION_ATTR = CursorKind(403)
###
# Preprocessing
CursorKind.PREPROCESSING_DIRECTIVE = CursorKind(500)
CursorKind.MACRO_DEFINITION = CursorKind(501)
CursorKind.MACRO_INSTANTIATION = CursorKind(502)
CursorKind.INCLUSION_DIRECTIVE = CursorKind(503)
### Cursors ###
class Cursor(Structure):
"""
The Cursor class represents a reference to an element within the AST. It
acts as a kind of iterator.
"""
_fields_ = [("_kind_id", c_int), ("data", c_void_p * 3)]
def __eq__(self, other):
return Cursor_eq(self, other)
def __ne__(self, other):
return not Cursor_eq(self, other)
def is_definition(self):
"""
Returns true if the declaration pointed at by the cursor is also a
definition of that entity.
"""
return Cursor_is_def(self)
def get_definition(self):
"""
If the cursor is a reference to a declaration or a declaration of
some entity, return a cursor that points to the definition of that
entity.
"""
# TODO: Should probably check that this is either a reference or
# declaration prior to issuing the lookup.
return Cursor_def(self)
def get_usr(self):
"""Return the Unified Symbol Resultion (USR) for the entity referenced
by the given cursor (or None).
A Unified Symbol Resolution (USR) is a string that identifies a
particular entity (function, class, variable, etc.) within a
program. USRs can be compared across translation units to determine,
e.g., when references in one translation refer to an entity defined in
another translation unit."""
return Cursor_usr(self)
@property
def kind(self):
"""Return the kind of this cursor."""
return CursorKind.from_id(self._kind_id)
@property
def spelling(self):
"""Return the spelling of the entity pointed at by the cursor."""
if not self.kind.is_declaration():
# FIXME: clang_getCursorSpelling should be fixed to not assert on
# this, for consistency with clang_getCursorUSR.
return None
return Cursor_spelling(self)
@property
def location(self):
"""
Return the source location (the starting character) of the entity
pointed at by the cursor.
"""
return Cursor_loc(self)
@property
def extent(self):
"""
Return the source range (the range of text) occupied by the entity
pointed at by the cursor.
"""
return Cursor_extent(self)
def get_children(self):
"""Return an iterator for accessing the children of this cursor."""
# FIXME: Expose iteration from CIndex, PR6125.
def visitor(child, parent, children):
# FIXME: Document this assertion in API.
# FIXME: There should just be an isNull method.
assert child != Cursor_null()
children.append(child)
return 1 # continue
children = []
Cursor_visit(self, Cursor_visit_callback(visitor), children)
return iter(children)
@staticmethod
def from_result(res, fn, args):
assert isinstance(res, Cursor)
# FIXME: There should just be an isNull method.
if res == Cursor_null():
return None
return res
## CIndex Objects ##
# CIndex objects (derived from ClangObject) are essentially lightweight
# wrappers attached to some underlying object, which is exposed via CIndex as
# a void*.
class ClangObject(object):
"""
A helper for Clang objects. This class helps act as an intermediary for
the ctypes library and the Clang CIndex library.
"""
def __init__(self, obj):
assert isinstance(obj, c_object_p) and obj
self.obj = self._as_parameter_ = obj
def from_param(self):
return self._as_parameter_
class _CXUnsavedFile(Structure):
"""Helper for passing unsaved file arguments."""
_fields_ = [("name", c_char_p), ("contents", c_char_p), ('length', c_ulong)]
## Diagnostic Conversion ##
_clang_getNumDiagnostics = lib.clang_getNumDiagnostics
_clang_getNumDiagnostics.argtypes = [c_object_p]
_clang_getNumDiagnostics.restype = c_uint
_clang_getDiagnostic = lib.clang_getDiagnostic
_clang_getDiagnostic.argtypes = [c_object_p, c_uint]
_clang_getDiagnostic.restype = c_object_p
_clang_disposeDiagnostic = lib.clang_disposeDiagnostic
_clang_disposeDiagnostic.argtypes = [Diagnostic]
_clang_getDiagnosticSeverity = lib.clang_getDiagnosticSeverity
_clang_getDiagnosticSeverity.argtypes = [Diagnostic]
_clang_getDiagnosticSeverity.restype = c_int
_clang_getDiagnosticLocation = lib.clang_getDiagnosticLocation
_clang_getDiagnosticLocation.argtypes = [Diagnostic]
_clang_getDiagnosticLocation.restype = SourceLocation
_clang_getDiagnosticSpelling = lib.clang_getDiagnosticSpelling
_clang_getDiagnosticSpelling.argtypes = [Diagnostic]
_clang_getDiagnosticSpelling.restype = _CXString
_clang_getDiagnosticSpelling.errcheck = _CXString.from_result
_clang_getDiagnosticNumRanges = lib.clang_getDiagnosticNumRanges
_clang_getDiagnosticNumRanges.argtypes = [Diagnostic]
_clang_getDiagnosticNumRanges.restype = c_uint
_clang_getDiagnosticRange = lib.clang_getDiagnosticRange
_clang_getDiagnosticRange.argtypes = [Diagnostic, c_uint]
_clang_getDiagnosticRange.restype = SourceRange
_clang_getDiagnosticNumFixIts = lib.clang_getDiagnosticNumFixIts
_clang_getDiagnosticNumFixIts.argtypes = [Diagnostic]
_clang_getDiagnosticNumFixIts.restype = c_uint
_clang_getDiagnosticFixIt = lib.clang_getDiagnosticFixIt
_clang_getDiagnosticFixIt.argtypes = [Diagnostic, c_uint, POINTER(SourceRange)]
_clang_getDiagnosticFixIt.restype = _CXString
_clang_getDiagnosticFixIt.errcheck = _CXString.from_result
###
class CompletionChunk:
class Kind:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return "<ChunkKind: %s>" % self
def __init__(self, completionString, key):
self.cs = completionString
self.key = key
def __repr__(self):
return "{'" + self.spelling + "', " + str(self.kind) + "}"
@property
def spelling(self):
return _clang_getCompletionChunkText(self.cs, self.key).spelling
@property
def kind(self):
res = _clang_getCompletionChunkKind(self.cs, self.key)
return completionChunkKindMap[res]
@property
def string(self):
res = _clang_getCompletionChunkCompletionString(self.cs, self.key)
if (res):
return CompletionString(res)
else:
None
def isKindOptional(self):
return self.kind == completionChunkKindMap[0]
def isKindTypedText(self):
return self.kind == completionChunkKindMap[1]
def isKindPlaceHolder(self):
return self.kind == completionChunkKindMap[3]
def isKindInformative(self):
return self.kind == completionChunkKindMap[4]
def isKindResultType(self):
return self.kind == completionChunkKindMap[15]
completionChunkKindMap = {
0: CompletionChunk.Kind("Optional"),
1: CompletionChunk.Kind("TypedText"),
2: CompletionChunk.Kind("Text"),
3: CompletionChunk.Kind("Placeholder"),
4: CompletionChunk.Kind("Informative"),
5: CompletionChunk.Kind("CurrentParameter"),
6: CompletionChunk.Kind("LeftParen"),
7: CompletionChunk.Kind("RightParen"),
8: CompletionChunk.Kind("LeftBracket"),
9: CompletionChunk.Kind("RightBracket"),
10: CompletionChunk.Kind("LeftBrace"),
11: CompletionChunk.Kind("RightBrace"),
12: CompletionChunk.Kind("LeftAngle"),
13: CompletionChunk.Kind("RightAngle"),
14: CompletionChunk.Kind("Comma"),
15: CompletionChunk.Kind("ResultType"),
16: CompletionChunk.Kind("Colon"),
17: CompletionChunk.Kind("SemiColon"),
18: CompletionChunk.Kind("Equal"),
19: CompletionChunk.Kind("HorizontalSpace"),
20: CompletionChunk.Kind("VerticalSpace")}
class CompletionString(ClangObject):
class Availability:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return "<Availability: %s>" % self
def __len__(self):
return _clang_getNumCompletionChunks(self.obj)
def __getitem__(self, key):
if len(self) <= key:
raise IndexError
return CompletionChunk(self.obj, key)
@property
def priority(self):
return _clang_getCompletionPriority(self.obj)
@property
def availability(self):
res = _clang_getCompletionAvailability(self.obj)
return availabilityKinds[res]
def __repr__(self):
return " | ".join([str(a) for a in self]) \
+ " || Priority: " + str(self.priority) \
+ " || Availability: " + str(self.availability)
availabilityKinds = {
0: CompletionChunk.Kind("Available"),
1: CompletionChunk.Kind("Deprecated"),
2: CompletionChunk.Kind("NotAvailable")}
class CodeCompletionResult(Structure):
_fields_ = [('cursorKind', c_int), ('completionString', c_object_p)]
def __repr__(self):
return str(CompletionString(self.completionString))
@property
def kind(self):
return CursorKind.from_id(self.cursorKind)
@property
def string(self):
return CompletionString(self.completionString)
class CCRStructure(Structure):
_fields_ = [('results', POINTER(CodeCompletionResult)),
('numResults', c_int)]
def __len__(self):
return self.numResults
def __getitem__(self, key):
if len(self) <= key:
raise IndexError
return self.results[key]
class CodeCompletionResults(ClangObject):
def __init__(self, ptr):
assert isinstance(ptr, POINTER(CCRStructure)) and ptr
self.ptr = self._as_parameter_ = ptr
def from_param(self):
return self._as_parameter_
def __del__(self):
CodeCompletionResults_dispose(self)
@property
def results(self):
return self.ptr.contents
@property
def diagnostics(self):
class DiagnosticsItr:
def __init__(self, ccr):
self.ccr= ccr
def __len__(self):
return int(_clang_codeCompleteGetNumDiagnostics(self.ccr))
def __getitem__(self, key):
return _clang_codeCompleteGetDiagnostic(self.ccr, key)
return DiagnosticsItr(self)
class Index(ClangObject):
"""
The Index type provides the primary interface to the Clang CIndex library,
primarily by providing an interface for reading and parsing translation
units.
"""
@staticmethod
def create(excludeDecls=False):
"""
Create a new Index.
Parameters:
excludeDecls -- Exclude local declarations from translation units.
"""
return Index(Index_create(excludeDecls, 0))
def __del__(self):
Index_dispose(self)
def read(self, path):
"""Load the translation unit from the given AST file."""
ptr = TranslationUnit_read(self, path)
return TranslationUnit(ptr) if ptr else None
def parse(self, path, args = [], unsaved_files = [], options = 0):
"""
Load the translation unit from the given source code file by running
clang and generating the AST before loading. Additional command line
parameters can be passed to clang via the args parameter.
In-memory contents for files can be provided by passing a list of pairs
to as unsaved_files, the first item should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
arg_array = 0
if len(args):
arg_array = (c_char_p * len(args))(* args)
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_parse(self, path, arg_array, len(args),
unsaved_files_array, len(unsaved_files),
options)
return TranslationUnit(ptr) if ptr else None
class TranslationUnit(ClangObject):
"""
The TranslationUnit class represents a source code translation unit and
provides read-only access to its top-level declarations.
"""
def __init__(self, ptr):
ClangObject.__init__(self, ptr)
def __del__(self):
TranslationUnit_dispose(self)
@property
def cursor(self):
"""Retrieve the cursor that represents the given translation unit."""
return TranslationUnit_cursor(self)
@property
def spelling(self):
"""Get the original translation unit source file name."""
return TranslationUnit_spelling(self)
def get_includes(self):
"""
Return an iterable sequence of FileInclusion objects that describe the
sequence of inclusions in a translation unit. The first object in
this sequence is always the input file. Note that this method will not
recursively iterate over header files included through precompiled
headers.
"""
def visitor(fobj, lptr, depth, includes):
loc = lptr.contents
includes.append(FileInclusion(loc.file, File(fobj), loc, depth))
# Automatically adapt CIndex/ctype pointers to python objects
includes = []
TranslationUnit_includes(self,
TranslationUnit_includes_callback(visitor),
includes)
return iter(includes)
@property
def diagnostics(self):
"""
Return an iterable (and indexable) object containing the diagnostics.
"""
class DiagIterator:
def __init__(self, tu):
self.tu = tu
def __len__(self):
return int(_clang_getNumDiagnostics(self.tu))
def __getitem__(self, key):
diag = _clang_getDiagnostic(self.tu, key)
if not diag:
raise IndexError
return Diagnostic(diag)
return DiagIterator(self)
def reparse(self, unsaved_files = [], options = 0):
"""
Reparse an already parsed translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_reparse(self, len(unsaved_files),
unsaved_files_array,
options)
def codeComplete(self, path, line, column, unsaved_files = [], options = 0):
"""
Code complete in this translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_codeComplete(self, path,
line, column,
unsaved_files_array,
len(unsaved_files),
options)
return CodeCompletionResults(ptr) if ptr else None
class File(ClangObject):
"""
The File class represents a particular source file that is part of a
translation unit.
"""
@property
def name(self):
"""Return the complete file and path name of the file."""
return File_name(self)
@property
def time(self):
"""Return the last modification time of the file."""
return File_time(self)
class FileInclusion(object):
"""
The FileInclusion class represents the inclusion of one source file by
another via a '#include' directive or as the input file for the translation
unit. This class provides information about the included file, the including
file, the location of the '#include' directive and the depth of the included
file in the stack. Note that the input file has depth 0.
"""
def __init__(self, src, tgt, loc, depth):
self.source = src
self.include = tgt
self.location = loc
self.depth = depth
@property
def is_input_file(self):
"""True if the included file is the input file."""
return self.depth == 0
# Additional Functions and Types
# String Functions
_CXString_dispose = lib.clang_disposeString
_CXString_dispose.argtypes = [_CXString]
_CXString_getCString = lib.clang_getCString
_CXString_getCString.argtypes = [_CXString]
_CXString_getCString.restype = c_char_p
# Source Location Functions
SourceLocation_loc = lib.clang_getInstantiationLocation
SourceLocation_loc.argtypes = [SourceLocation, POINTER(c_object_p),
POINTER(c_uint), POINTER(c_uint),
POINTER(c_uint)]
# Source Range Functions
SourceRange_getRange = lib.clang_getRange
SourceRange_getRange.argtypes = [SourceLocation, SourceLocation]
SourceRange_getRange.restype = SourceRange
SourceRange_start = lib.clang_getRangeStart
SourceRange_start.argtypes = [SourceRange]
SourceRange_start.restype = SourceLocation
SourceRange_end = lib.clang_getRangeEnd
SourceRange_end.argtypes = [SourceRange]
SourceRange_end.restype = SourceLocation
# CursorKind Functions
CursorKind_is_decl = lib.clang_isDeclaration
CursorKind_is_decl.argtypes = [CursorKind]
CursorKind_is_decl.restype = bool
CursorKind_is_ref = lib.clang_isReference
CursorKind_is_ref.argtypes = [CursorKind]
CursorKind_is_ref.restype = bool
CursorKind_is_expr = lib.clang_isExpression
CursorKind_is_expr.argtypes = [CursorKind]
CursorKind_is_expr.restype = bool
CursorKind_is_stmt = lib.clang_isStatement
CursorKind_is_stmt.argtypes = [CursorKind]
CursorKind_is_stmt.restype = bool
CursorKind_is_inv = lib.clang_isInvalid
CursorKind_is_inv.argtypes = [CursorKind]
CursorKind_is_inv.restype = bool
# Cursor Functions
# TODO: Implement this function
Cursor_get = lib.clang_getCursor
Cursor_get.argtypes = [TranslationUnit, SourceLocation]
Cursor_get.restype = Cursor
Cursor_null = lib.clang_getNullCursor
Cursor_null.restype = Cursor
Cursor_usr = lib.clang_getCursorUSR
Cursor_usr.argtypes = [Cursor]
Cursor_usr.restype = _CXString
Cursor_usr.errcheck = _CXString.from_result
Cursor_is_def = lib.clang_isCursorDefinition
Cursor_is_def.argtypes = [Cursor]
Cursor_is_def.restype = bool
Cursor_def = lib.clang_getCursorDefinition
Cursor_def.argtypes = [Cursor]
Cursor_def.restype = Cursor
Cursor_def.errcheck = Cursor.from_result
Cursor_eq = lib.clang_equalCursors
Cursor_eq.argtypes = [Cursor, Cursor]
Cursor_eq.restype = c_uint
Cursor_spelling = lib.clang_getCursorSpelling
Cursor_spelling.argtypes = [Cursor]
Cursor_spelling.restype = _CXString
Cursor_spelling.errcheck = _CXString.from_result
Cursor_loc = lib.clang_getCursorLocation
Cursor_loc.argtypes = [Cursor]
Cursor_loc.restype = SourceLocation
Cursor_extent = lib.clang_getCursorExtent
Cursor_extent.argtypes = [Cursor]
Cursor_extent.restype = SourceRange
Cursor_ref = lib.clang_getCursorReferenced
Cursor_ref.argtypes = [Cursor]
Cursor_ref.restype = Cursor
Cursor_ref.errcheck = Cursor.from_result
Cursor_visit_callback = CFUNCTYPE(c_int, Cursor, Cursor, py_object)
Cursor_visit = lib.clang_visitChildren
Cursor_visit.argtypes = [Cursor, Cursor_visit_callback, py_object]
Cursor_visit.restype = c_uint
# Index Functions
Index_create = lib.clang_createIndex
Index_create.argtypes = [c_int, c_int]
Index_create.restype = c_object_p
Index_dispose = lib.clang_disposeIndex
Index_dispose.argtypes = [Index]
# Translation Unit Functions
TranslationUnit_read = lib.clang_createTranslationUnit
TranslationUnit_read.argtypes = [Index, c_char_p]
TranslationUnit_read.restype = c_object_p
TranslationUnit_parse = lib.clang_parseTranslationUnit
TranslationUnit_parse.argtypes = [Index, c_char_p, c_void_p,
c_int, c_void_p, c_int, c_int]
TranslationUnit_parse.restype = c_object_p
TranslationUnit_reparse = lib.clang_reparseTranslationUnit
TranslationUnit_reparse.argtypes = [TranslationUnit, c_int, c_void_p, c_int]
TranslationUnit_reparse.restype = c_int
TranslationUnit_codeComplete = lib.clang_codeCompleteAt
TranslationUnit_codeComplete.argtypes = [TranslationUnit, c_char_p, c_int,
c_int, c_void_p, c_int, c_int]
TranslationUnit_codeComplete.restype = POINTER(CCRStructure)
TranslationUnit_cursor = lib.clang_getTranslationUnitCursor
TranslationUnit_cursor.argtypes = [TranslationUnit]
TranslationUnit_cursor.restype = Cursor
TranslationUnit_cursor.errcheck = Cursor.from_result
TranslationUnit_spelling = lib.clang_getTranslationUnitSpelling
TranslationUnit_spelling.argtypes = [TranslationUnit]
TranslationUnit_spelling.restype = _CXString
TranslationUnit_spelling.errcheck = _CXString.from_result
TranslationUnit_dispose = lib.clang_disposeTranslationUnit
TranslationUnit_dispose.argtypes = [TranslationUnit]
TranslationUnit_includes_callback = CFUNCTYPE(None,
c_object_p,
POINTER(SourceLocation),
c_uint, py_object)
TranslationUnit_includes = lib.clang_getInclusions
TranslationUnit_includes.argtypes = [TranslationUnit,
TranslationUnit_includes_callback,
py_object]
# File Functions
File_name = lib.clang_getFileName
File_name.argtypes = [File]
File_name.restype = c_char_p
File_time = lib.clang_getFileTime
File_time.argtypes = [File]
File_time.restype = c_uint
# Code completion
CodeCompletionResults_dispose = lib.clang_disposeCodeCompleteResults
CodeCompletionResults_dispose.argtypes = [CodeCompletionResults]
_clang_codeCompleteGetNumDiagnostics = lib.clang_codeCompleteGetNumDiagnostics
_clang_codeCompleteGetNumDiagnostics.argtypes = [CodeCompletionResults]
_clang_codeCompleteGetNumDiagnostics.restype = c_int
_clang_codeCompleteGetDiagnostic = lib.clang_codeCompleteGetDiagnostic
_clang_codeCompleteGetDiagnostic.argtypes = [CodeCompletionResults, c_int]
_clang_codeCompleteGetDiagnostic.restype = Diagnostic
_clang_getCompletionChunkText = lib.clang_getCompletionChunkText
_clang_getCompletionChunkText.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkText.restype = _CXString
_clang_getCompletionChunkKind = lib.clang_getCompletionChunkKind
_clang_getCompletionChunkKind.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkKind.restype = c_int
_clang_getCompletionChunkCompletionString = lib.clang_getCompletionChunkCompletionString
_clang_getCompletionChunkCompletionString.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkCompletionString.restype = c_object_p
_clang_getNumCompletionChunks = lib.clang_getNumCompletionChunks
_clang_getNumCompletionChunks.argtypes = [c_void_p]
_clang_getNumCompletionChunks.restype = c_int
_clang_getCompletionAvailability = lib.clang_getCompletionAvailability
_clang_getCompletionAvailability.argtypes = [c_void_p]
_clang_getCompletionAvailability.restype = c_int
_clang_getCompletionPriority = lib.clang_getCompletionPriority
_clang_getCompletionPriority.argtypes = [c_void_p]
_clang_getCompletionPriority.restype = c_int
###
__all__ = ['Index', 'TranslationUnit', 'Cursor', 'CursorKind',
'Diagnostic', 'FixIt', 'CodeCompletionResults', 'SourceRange',
'SourceLocation', 'File']
| gpl-3.0 | 3,310,479,464,577,405,400 | 31.730061 | 88 | 0.665745 | false |
blakfeld/ansible | lib/ansible/plugins/action/script.py | 15 | 4083 | # (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if self._connection_info.check_mode:
return dict(skipped=True, msg='check mode not supported for this module')
if not tmp:
tmp = self._make_tmp_path()
creates = self._task.args.get('creates')
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
removes = self._task.args.get('removes')
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=removes), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and not stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
# the script name is the first item in the raw params, so we split it
# out now so we know the file name we need to transfer to the remote,
# and everything else is an argument to the script which we need later
# to append to the remote command
parts = self._task.args.get('_raw_params', '').strip().split()
source = parts[0]
args = ' '.join(parts[1:])
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim(source)
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
self._connection.put_file(source, tmp_src)
sudoable = True
# set file permissions, more permissive when the copy is done as a different user
if self._connection_info.become and self._connection_info.become_user != 'root':
chmod_mode = 'a+rx'
sudoable = False
else:
chmod_mode = '+rx'
self._remote_chmod(tmp, chmod_mode, tmp_src, sudoable=sudoable)
# add preparation steps to one ssh roundtrip executing the script
env_string = self._compute_environment_string()
script_cmd = ' '.join([env_string, tmp_src, args])
result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=sudoable)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
self._remove_tmp_path(tmp)
result['changed'] = True
return result
| gpl-3.0 | -7,500,735,715,795,243,000 | 41.978947 | 143 | 0.643644 | false |
Y3K/django | tests/prefetch_related/models.py | 255 | 7972 | import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Basic tests
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors')
favorite_authors = models.ManyToManyField(
'self', through='FavoriteAuthors', symmetrical=False, related_name='favors_me')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class AuthorWithAge(Author):
author = models.OneToOneField(Author, models.CASCADE, parent_link=True)
age = models.IntegerField()
class FavoriteAuthors(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='i_like')
likes_author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='likes_me')
class Meta:
ordering = ['id']
@python_2_unicode_compatible
class AuthorAddress(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='addresses')
address = models.TextField()
class Meta:
ordering = ['id']
def __str__(self):
return self.address
@python_2_unicode_compatible
class Book(models.Model):
title = models.CharField(max_length=255)
authors = models.ManyToManyField(Author, related_name='books')
def __str__(self):
return self.title
class Meta:
ordering = ['id']
class BookWithYear(Book):
book = models.OneToOneField(Book, models.CASCADE, parent_link=True)
published_year = models.IntegerField()
aged_authors = models.ManyToManyField(
AuthorWithAge, related_name='books_with_year')
class Bio(models.Model):
author = models.OneToOneField(Author, models.CASCADE)
books = models.ManyToManyField(Book, blank=True)
@python_2_unicode_compatible
class Reader(models.Model):
name = models.CharField(max_length=50)
books_read = models.ManyToManyField(Book, related_name='read_by')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class BookReview(models.Model):
book = models.ForeignKey(BookWithYear, models.CASCADE)
notes = models.TextField(null=True, blank=True)
# Models for default manager tests
class Qualification(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ['id']
class TeacherManager(models.Manager):
def get_queryset(self):
return super(TeacherManager, self).get_queryset().prefetch_related('qualifications')
@python_2_unicode_compatible
class Teacher(models.Model):
name = models.CharField(max_length=50)
qualifications = models.ManyToManyField(Qualification)
objects = TeacherManager()
def __str__(self):
return "%s (%s)" % (self.name, ", ".join(q.name for q in self.qualifications.all()))
class Meta:
ordering = ['id']
class Department(models.Model):
name = models.CharField(max_length=50)
teachers = models.ManyToManyField(Teacher)
class Meta:
ordering = ['id']
# GenericRelation/GenericForeignKey tests
@python_2_unicode_compatible
class TaggedItem(models.Model):
tag = models.SlugField()
content_type = models.ForeignKey(
ContentType,
models.CASCADE,
related_name="taggeditem_set2",
)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
created_by_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set3',
)
created_by_fkey = models.PositiveIntegerField(null=True)
created_by = GenericForeignKey('created_by_ct', 'created_by_fkey',)
favorite_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set4',
)
favorite_fkey = models.CharField(max_length=64, null=True)
favorite = GenericForeignKey('favorite_ct', 'favorite_fkey')
def __str__(self):
return self.tag
class Meta:
ordering = ['id']
class Bookmark(models.Model):
url = models.URLField()
tags = GenericRelation(TaggedItem, related_query_name='bookmarks')
favorite_tags = GenericRelation(TaggedItem,
content_type_field='favorite_ct',
object_id_field='favorite_fkey',
related_query_name='favorite_bookmarks')
class Meta:
ordering = ['id']
class Comment(models.Model):
comment = models.TextField()
# Content-object field
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_pk = models.TextField()
content_object = GenericForeignKey(ct_field="content_type", fk_field="object_pk")
class Meta:
ordering = ['id']
# Models for lookup ordering tests
class House(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=255)
owner = models.ForeignKey('Person', models.SET_NULL, null=True)
main_room = models.OneToOneField('Room', models.SET_NULL, related_name='main_room_of', null=True)
class Meta:
ordering = ['id']
class Room(models.Model):
name = models.CharField(max_length=50)
house = models.ForeignKey(House, models.CASCADE, related_name='rooms')
class Meta:
ordering = ['id']
class Person(models.Model):
name = models.CharField(max_length=50)
houses = models.ManyToManyField(House, related_name='occupants')
@property
def primary_house(self):
# Assume business logic forces every person to have at least one house.
return sorted(self.houses.all(), key=lambda house: -house.rooms.count())[0]
@property
def all_houses(self):
return list(self.houses.all())
class Meta:
ordering = ['id']
# Models for nullable FK tests
@python_2_unicode_compatible
class Employee(models.Model):
name = models.CharField(max_length=50)
boss = models.ForeignKey('self', models.SET_NULL, null=True, related_name='serfs')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Ticket #19607
@python_2_unicode_compatible
class LessonEntry(models.Model):
name1 = models.CharField(max_length=200)
name2 = models.CharField(max_length=200)
def __str__(self):
return "%s %s" % (self.name1, self.name2)
@python_2_unicode_compatible
class WordEntry(models.Model):
lesson_entry = models.ForeignKey(LessonEntry, models.CASCADE)
name = models.CharField(max_length=200)
def __str__(self):
return "%s (%s)" % (self.name, self.id)
# Ticket #21410: Regression when related_name="+"
@python_2_unicode_compatible
class Author2(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors+')
favorite_books = models.ManyToManyField('Book', related_name='+')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Models for many-to-many with UUID pk test:
class Pet(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=20)
people = models.ManyToManyField(Person, related_name='pets')
class Flea(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
current_room = models.ForeignKey(Room, models.SET_NULL, related_name='fleas', null=True)
pets_visited = models.ManyToManyField(Pet, related_name='fleas_hosted')
people_visited = models.ManyToManyField(Person, related_name='fleas_hosted')
| bsd-3-clause | 2,923,532,245,882,389,000 | 26.777003 | 102 | 0.673482 | false |
BryanCutler/spark | python/pyspark/tests/test_readwrite.py | 23 | 14386 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import tempfile
import unittest
from pyspark.testing.utils import ReusedPySparkTestCase, SPARK_HOME
class InputFormatTests(ReusedPySparkTestCase):
@classmethod
def setUpClass(cls):
ReusedPySparkTestCase.setUpClass()
cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(cls.tempdir.name)
cls.sc._jvm.WriteInputFormatTestDataGenerator.generateData(cls.tempdir.name, cls.sc._jsc)
@classmethod
def tearDownClass(cls):
ReusedPySparkTestCase.tearDownClass()
shutil.rmtree(cls.tempdir.name)
def test_oldhadoop(self):
basepath = self.tempdir.name
ints = sorted(self.sc.hadoopFile(basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
ei = [(1, u'aa'), (1, u'aa'), (2, u'aa'), (2, u'bb'), (2, u'bb'), (3, u'cc')]
self.assertEqual(ints, ei)
hellopath = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
oldconf = {"mapreduce.input.fileinputformat.inputdir": hellopath}
hello = self.sc.hadoopRDD("org.apache.hadoop.mapred.TextInputFormat",
"org.apache.hadoop.io.LongWritable",
"org.apache.hadoop.io.Text",
conf=oldconf).collect()
result = [(0, u'Hello World!')]
self.assertEqual(hello, result)
def test_newhadoop(self):
basepath = self.tempdir.name
ints = sorted(self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
ei = [(1, u'aa'), (1, u'aa'), (2, u'aa'), (2, u'bb'), (2, u'bb'), (3, u'cc')]
self.assertEqual(ints, ei)
hellopath = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
newconf = {"mapreduce.input.fileinputformat.inputdir": hellopath}
hello = self.sc.newAPIHadoopRDD("org.apache.hadoop.mapreduce.lib.input.TextInputFormat",
"org.apache.hadoop.io.LongWritable",
"org.apache.hadoop.io.Text",
conf=newconf).collect()
result = [(0, u'Hello World!')]
self.assertEqual(hello, result)
def test_newolderror(self):
basepath = self.tempdir.name
self.assertRaises(Exception, lambda: self.sc.hadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
def test_bad_inputs(self):
basepath = self.tempdir.name
self.assertRaises(Exception, lambda: self.sc.sequenceFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.io.NotValidWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.hadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.NotValidInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.NotValidInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
def test_converters(self):
# use of custom converters
basepath = self.tempdir.name
maps = sorted(self.sc.sequenceFile(
basepath + "/sftestdata/sfmap/",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable",
keyConverter="org.apache.spark.api.python.TestInputKeyConverter",
valueConverter="org.apache.spark.api.python.TestInputValueConverter").collect())
em = [(u'\x01', []),
(u'\x01', [3.0]),
(u'\x02', [1.0]),
(u'\x02', [1.0]),
(u'\x03', [2.0])]
self.assertEqual(maps, em)
def test_binary_files(self):
path = os.path.join(self.tempdir.name, "binaryfiles")
os.mkdir(path)
data = b"short binary data"
with open(os.path.join(path, "part-0000"), 'wb') as f:
f.write(data)
[(p, d)] = self.sc.binaryFiles(path).collect()
self.assertTrue(p.endswith("part-0000"))
self.assertEqual(d, data)
def test_binary_records(self):
path = os.path.join(self.tempdir.name, "binaryrecords")
os.mkdir(path)
with open(os.path.join(path, "part-0000"), 'w') as f:
for i in range(100):
f.write('%04d' % i)
result = self.sc.binaryRecords(path, 4).map(int).collect()
self.assertEqual(list(range(100)), result)
class OutputFormatTests(ReusedPySparkTestCase):
def setUp(self):
self.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(self.tempdir.name)
def tearDown(self):
shutil.rmtree(self.tempdir.name, ignore_errors=True)
def test_oldhadoop(self):
basepath = self.tempdir.name
dict_data = [(1, {}),
(1, {"row1": 1.0}),
(2, {"row2": 2.0})]
self.sc.parallelize(dict_data).saveAsHadoopFile(
basepath + "/oldhadoop/",
"org.apache.hadoop.mapred.SequenceFileOutputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable")
result = self.sc.hadoopFile(
basepath + "/oldhadoop/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable").collect()
for v in result:
self.assertTrue(v, dict_data)
conf = {
"mapred.output.format.class": "org.apache.hadoop.mapred.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.MapWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/olddataset/"
}
self.sc.parallelize(dict_data).saveAsHadoopDataset(conf)
input_conf = {"mapreduce.input.fileinputformat.inputdir": basepath + "/olddataset/"}
result = self.sc.hadoopRDD(
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable",
conf=input_conf).collect()
for v in result:
self.assertTrue(v, dict_data)
def test_newhadoop(self):
basepath = self.tempdir.name
data = [(1, ""),
(1, "a"),
(2, "bcdf")]
self.sc.parallelize(data).saveAsNewAPIHadoopFile(
basepath + "/newhadoop/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text")
result = sorted(self.sc.newAPIHadoopFile(
basepath + "/newhadoop/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
self.assertEqual(result, data)
conf = {
"mapreduce.job.outputformat.class":
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.Text",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/newdataset/"
}
self.sc.parallelize(data).saveAsNewAPIHadoopDataset(conf)
input_conf = {"mapreduce.input.fileinputformat.inputdir": basepath + "/newdataset/"}
new_dataset = sorted(self.sc.newAPIHadoopRDD(
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text",
conf=input_conf).collect())
self.assertEqual(new_dataset, data)
def test_newolderror(self):
basepath = self.tempdir.name
rdd = self.sc.parallelize(range(1, 4)).map(lambda x: (x, "a" * x))
self.assertRaises(Exception, lambda: rdd.saveAsHadoopFile(
basepath + "/newolderror/saveAsHadoopFile/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat"))
self.assertRaises(Exception, lambda: rdd.saveAsNewAPIHadoopFile(
basepath + "/newolderror/saveAsNewAPIHadoopFile/",
"org.apache.hadoop.mapred.SequenceFileOutputFormat"))
def test_bad_inputs(self):
basepath = self.tempdir.name
rdd = self.sc.parallelize(range(1, 4)).map(lambda x: (x, "a" * x))
self.assertRaises(Exception, lambda: rdd.saveAsHadoopFile(
basepath + "/badinputs/saveAsHadoopFile/",
"org.apache.hadoop.mapred.NotValidOutputFormat"))
self.assertRaises(Exception, lambda: rdd.saveAsNewAPIHadoopFile(
basepath + "/badinputs/saveAsNewAPIHadoopFile/",
"org.apache.hadoop.mapreduce.lib.output.NotValidOutputFormat"))
def test_converters(self):
# use of custom converters
basepath = self.tempdir.name
data = [(1, {3.0: u'bb'}),
(2, {1.0: u'aa'}),
(3, {2.0: u'dd'})]
self.sc.parallelize(data).saveAsNewAPIHadoopFile(
basepath + "/converters/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
keyConverter="org.apache.spark.api.python.TestOutputKeyConverter",
valueConverter="org.apache.spark.api.python.TestOutputValueConverter")
converted = sorted(self.sc.sequenceFile(basepath + "/converters/").collect())
expected = [(u'1', 3.0),
(u'2', 1.0),
(u'3', 2.0)]
self.assertEqual(converted, expected)
def test_reserialization(self):
basepath = self.tempdir.name
x = range(1, 5)
y = range(1001, 1005)
data = list(zip(x, y))
rdd = self.sc.parallelize(x).zip(self.sc.parallelize(y))
rdd.saveAsSequenceFile(basepath + "/reserialize/sequence")
result1 = sorted(self.sc.sequenceFile(basepath + "/reserialize/sequence").collect())
self.assertEqual(result1, data)
rdd.saveAsHadoopFile(
basepath + "/reserialize/hadoop",
"org.apache.hadoop.mapred.SequenceFileOutputFormat")
result2 = sorted(self.sc.sequenceFile(basepath + "/reserialize/hadoop").collect())
self.assertEqual(result2, data)
rdd.saveAsNewAPIHadoopFile(
basepath + "/reserialize/newhadoop",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat")
result3 = sorted(self.sc.sequenceFile(basepath + "/reserialize/newhadoop").collect())
self.assertEqual(result3, data)
conf4 = {
"mapred.output.format.class": "org.apache.hadoop.mapred.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/reserialize/dataset"}
rdd.saveAsHadoopDataset(conf4)
result4 = sorted(self.sc.sequenceFile(basepath + "/reserialize/dataset").collect())
self.assertEqual(result4, data)
conf5 = {"mapreduce.job.outputformat.class":
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/reserialize/newdataset"
}
rdd.saveAsNewAPIHadoopDataset(conf5)
result5 = sorted(self.sc.sequenceFile(basepath + "/reserialize/newdataset").collect())
self.assertEqual(result5, data)
def test_malformed_RDD(self):
basepath = self.tempdir.name
# non-batch-serialized RDD[[(K, V)]] should be rejected
data = [[(1, "a")], [(2, "aa")], [(3, "aaa")]]
rdd = self.sc.parallelize(data, len(data))
self.assertRaises(Exception, lambda: rdd.saveAsSequenceFile(
basepath + "/malformed/sequence"))
if __name__ == "__main__":
from pyspark.tests.test_readwrite import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 | 8,499,323,396,713,133,000 | 44.815287 | 100 | 0.616016 | false |
michaelconnor00/gbdxtools | gbdxtools/catalog_search_aoi.py | 1 | 3959 | """
GBDX Catalog Search Helper Functions.
This set of functions is used for breaking up a large AOI into smaller AOIs to search, because the catalog API
can only handle 2 square degrees at a time.
"""
from builtins import zip
from builtins import range
from pygeoif import geometry
import json
def point_in_poly(x,y,poly):
n = len(poly)
inside = False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x,p1y = p2x,p2y
return inside
# range() but for float steps
def xfrange(start, stop, step):
while start < stop:
yield start
start += step
else:
yield stop
def dedup_records(records):
# 0.5 seconds for 5k records
#print "Records: %s" % len(records)
ids = set( [r['identifier'] for r in records] )
#print "Ids: %s" % len(ids)
deduped = []
for r in records:
if r['identifier'] in ids:
deduped.append(r)
ids = ids - set( [ r['identifier'] ] )
#print "Deduped: %s" % len(deduped)
return deduped
def bbox_in_poly(bbox,poly):
W, S, E, N = bbox.bounds
points = [(W,N),(E,N),(E,S),(W,S)]
for p in points:
if point_in_poly(p[0],p[1], poly.exterior.coords ):
return True
def records_in_polygon(records,polygon):
# Filter out the records that are not inside the polygon
output_records = []
for record in records:
recordwkt = record['properties']['footprintWkt']
record_polygon = geometry.from_wkt(recordwkt)
if bbox_in_poly(record_polygon,polygon):
output_records.append(record)
#print "Filtered in polygon: %s" % len(output_records)
return output_records
def polygon_from_bounds( bounds ):
W, S, E, N = bounds
return geometry.Polygon( ( (W,N),(E,N),(E,S),(W,S),(W,N) ) )
def search_materials_in_multiple_small_searches(search_request, gbdx_connection, base_url):
D = 1.4 # the size in degrees of the side of a square that we will search
searchAreaWkt = search_request['searchAreaWkt']
searchAreaPolygon = geometry.from_wkt(searchAreaWkt)
W, S, E, N = searchAreaPolygon.bounds
Ys = [i for i in xfrange(S,N,D)]
Xs = [i for i in xfrange(W,E,D)]
# Handle point searches:
if W == E and N == S:
Ys = [S, N]
Xs = [W, E]
# print Xs
# print Ys
# print searchAreaWkt
records = []
# Loop pairwise
row = 0
col = 0
for y, y1 in zip(Ys, Ys[1:]):
row = row + 1
for x, x1 in zip(Xs, Xs[1:]):
col = col + 1
bbox = (x, y, x1, y1)
subsearchpoly = polygon_from_bounds(bbox)
# # verify that the subsearchpoly is inside the searchAreaPolygon. If not break.
if not bbox_in_poly(subsearchpoly,searchAreaPolygon) and not bbox_in_poly(searchAreaPolygon, subsearchpoly) and not (y == y1 and x == x1):
pass
else:
search_request['searchAreaWkt'] = subsearchpoly.wkt
url = '%(base_url)s/search?includeRelationships=false' % {
'base_url': base_url
}
headers = {'Content-Type':'application/json'}
r = gbdx_connection.post(url, headers=headers, data=json.dumps(search_request))
r.raise_for_status()
records = records + r.json()['results']
records = dedup_records(records)
# this next line works, but filters too much stuff. It removes some items intersecting the polygon.
#records = records_in_polygon(records, searchAreaPolygon) # this takes quite a while to run, so leave it commented
return records | mit | 8,279,207,759,648,245,000 | 29.697674 | 150 | 0.579439 | false |
jjscarafia/odoo | addons/base_import/tests/test_cases.py | 84 | 13383 | # -*- encoding: utf-8 -*-
import unittest2
from openerp.tests.common import TransactionCase
from .. import models
ID_FIELD = {
'id': 'id',
'name': 'id',
'string': "External ID",
'required': False,
'fields': [],
}
def make_field(name='value', string='unknown', required=False, fields=[]):
return [
ID_FIELD,
{'id': name, 'name': name, 'string': string, 'required': required, 'fields': fields},
]
def sorted_fields(fields):
""" recursively sort field lists to ease comparison """
recursed = [dict(field, fields=sorted_fields(field['fields'])) for field in fields]
return sorted(recursed, key=lambda field: field['id'])
class BaseImportCase(TransactionCase):
def assertEqualFields(self, fields1, fields2):
self.assertEqual(sorted_fields(fields1), sorted_fields(fields2))
class test_basic_fields(BaseImportCase):
def get_fields(self, field):
return self.registry('base_import.import')\
.get_fields(self.cr, self.uid, 'base_import.tests.models.' + field)
def test_base(self):
""" A basic field is not required """
self.assertEqualFields(self.get_fields('char'), make_field())
def test_required(self):
""" Required fields should be flagged (so they can be fill-required) """
self.assertEqualFields(self.get_fields('char.required'), make_field(required=True))
def test_readonly(self):
""" Readonly fields should be filtered out"""
self.assertEqualFields(self.get_fields('char.readonly'), [ID_FIELD])
def test_readonly_states(self):
""" Readonly fields with states should not be filtered out"""
self.assertEqualFields(self.get_fields('char.states'), make_field())
def test_readonly_states_noreadonly(self):
""" Readonly fields with states having nothing to do with
readonly should still be filtered out"""
self.assertEqualFields(self.get_fields('char.noreadonly'), [ID_FIELD])
def test_readonly_states_stillreadonly(self):
""" Readonly fields with readonly states leaving them readonly
always... filtered out"""
self.assertEqualFields(self.get_fields('char.stillreadonly'), [ID_FIELD])
def test_m2o(self):
""" M2O fields should allow import of themselves (name_get),
their id and their xid"""
self.assertEqualFields(self.get_fields('m2o'), make_field(fields=[
{'id': 'value', 'name': 'id', 'string': 'External ID', 'required': False, 'fields': []},
{'id': 'value', 'name': '.id', 'string': 'Database ID', 'required': False, 'fields': []},
]))
def test_m2o_required(self):
""" If an m2o field is required, its three sub-fields are
required as well (the client has to handle that: requiredness
is id-based)
"""
self.assertEqualFields(self.get_fields('m2o.required'), make_field(required=True, fields=[
{'id': 'value', 'name': 'id', 'string': 'External ID', 'required': True, 'fields': []},
{'id': 'value', 'name': '.id', 'string': 'Database ID', 'required': True, 'fields': []},
]))
class test_o2m(BaseImportCase):
def get_fields(self, field):
return self.registry('base_import.import')\
.get_fields(self.cr, self.uid, 'base_import.tests.models.' + field)
def test_shallow(self):
self.assertEqualFields(self.get_fields('o2m'), make_field(fields=[
ID_FIELD,
# FIXME: should reverse field be ignored?
{'id': 'parent_id', 'name': 'parent_id', 'string': 'unknown', 'required': False, 'fields': [
{'id': 'parent_id', 'name': 'id', 'string': 'External ID', 'required': False, 'fields': []},
{'id': 'parent_id', 'name': '.id', 'string': 'Database ID', 'required': False, 'fields': []},
]},
{'id': 'value', 'name': 'value', 'string': 'unknown', 'required': False, 'fields': []},
]))
class test_match_headers_single(TransactionCase):
def test_match_by_name(self):
match = self.registry('base_import.import')._match_header(
'f0', [{'name': 'f0'}], {})
self.assertEqual(match, [{'name': 'f0'}])
def test_match_by_string(self):
match = self.registry('base_import.import')._match_header(
'some field', [{'name': 'bob', 'string': "Some Field"}], {})
self.assertEqual(match, [{'name': 'bob', 'string': "Some Field"}])
def test_nomatch(self):
match = self.registry('base_import.import')._match_header(
'should not be', [{'name': 'bob', 'string': "wheee"}], {})
self.assertEqual(match, [])
def test_recursive_match(self):
f = {
'name': 'f0',
'string': "My Field",
'fields': [
{'name': 'f0', 'string': "Sub field 0", 'fields': []},
{'name': 'f1', 'string': "Sub field 2", 'fields': []},
]
}
match = self.registry('base_import.import')._match_header(
'f0/f1', [f], {})
self.assertEqual(match, [f, f['fields'][1]])
def test_recursive_nomatch(self):
""" Match first level, fail to match second level
"""
f = {
'name': 'f0',
'string': "My Field",
'fields': [
{'name': 'f0', 'string': "Sub field 0", 'fields': []},
{'name': 'f1', 'string': "Sub field 2", 'fields': []},
]
}
match = self.registry('base_import.import')._match_header(
'f0/f2', [f], {})
self.assertEqual(match, [])
class test_match_headers_multiple(TransactionCase):
def test_noheaders(self):
self.assertEqual(
self.registry('base_import.import')._match_headers(
[], [], {}),
(None, None)
)
def test_nomatch(self):
self.assertEqual(
self.registry('base_import.import')._match_headers(
iter([
['foo', 'bar', 'baz', 'qux'],
['v1', 'v2', 'v3', 'v4'],
]),
[],
{'headers': True}),
(
['foo', 'bar', 'baz', 'qux'],
dict.fromkeys(range(4))
)
)
def test_mixed(self):
self.assertEqual(
self.registry('base_import.import')._match_headers(
iter(['foo bar baz qux/corge'.split()]),
[
{'name': 'bar', 'string': 'Bar'},
{'name': 'bob', 'string': 'Baz'},
{'name': 'qux', 'string': 'Qux', 'fields': [
{'name': 'corge', 'fields': []},
]}
],
{'headers': True}),
(['foo', 'bar', 'baz', 'qux/corge'], {
0: None,
1: ['bar'],
2: ['bob'],
3: ['qux', 'corge'],
})
)
class test_preview(TransactionCase):
def make_import(self):
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'res.users',
'file': u"로그인,언어\nbob,1\n".encode('euc_kr'),
})
return Import, id
def test_encoding(self):
Import, id = self.make_import()
result = Import.parse_preview(self.cr, self.uid, id, {
'quoting': '"',
'separator': ',',
})
self.assertTrue('error' in result)
def test_csv_errors(self):
Import, id = self.make_import()
result = Import.parse_preview(self.cr, self.uid, id, {
'quoting': 'foo',
'separator': ',',
'encoding': 'euc_kr',
})
self.assertTrue('error' in result)
def test_csv_errors(self):
Import, id = self.make_import()
result = Import.parse_preview(self.cr, self.uid, id, {
'quoting': '"',
'separator': 'bob',
'encoding': 'euc_kr',
})
self.assertTrue('error' in result)
def test_success(self):
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
'bar,3,4\n'
'qux,5,6\n'
})
result = Import.parse_preview(self.cr, self.uid, id, {
'quoting': '"',
'separator': ',',
'headers': True,
})
self.assertEqual(result['matches'], {0: ['name'], 1: ['somevalue'], 2: None})
self.assertEqual(result['headers'], ['name', 'Some Value', 'Counter'])
# Order depends on iteration order of fields_get
self.assertItemsEqual(result['fields'], [
ID_FIELD,
{'id': 'name', 'name': 'name', 'string': 'Name', 'required':False, 'fields': []},
{'id': 'somevalue', 'name': 'somevalue', 'string': 'Some Value', 'required':True, 'fields': []},
{'id': 'othervalue', 'name': 'othervalue', 'string': 'Other Variable', 'required':False, 'fields': []},
])
self.assertEqual(result['preview'], [
['foo', '1', '2'],
['bar', '3', '4'],
['qux', '5', '6'],
])
# Ensure we only have the response fields we expect
self.assertItemsEqual(result.keys(), ['matches', 'headers', 'fields', 'preview'])
class test_convert_import_data(TransactionCase):
""" Tests conversion of base_import.import input into data which
can be fed to Model.import_data
"""
def test_all(self):
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
'bar,3,4\n'
'qux,5,6\n'
})
record = Import.browse(self.cr, self.uid, id)
data, fields = Import._convert_import_data(
record, ['name', 'somevalue', 'othervalue'],
{'quoting': '"', 'separator': ',', 'headers': True,})
self.assertItemsEqual(fields, ['name', 'somevalue', 'othervalue'])
self.assertItemsEqual(data, [
('foo', '1', '2'),
('bar', '3', '4'),
('qux', '5', '6'),
])
def test_filtered(self):
""" If ``False`` is provided as field mapping for a column,
that column should be removed from importable data
"""
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
'bar,3,4\n'
'qux,5,6\n'
})
record = Import.browse(self.cr, self.uid, id)
data, fields = Import._convert_import_data(
record, ['name', False, 'othervalue'],
{'quoting': '"', 'separator': ',', 'headers': True,})
self.assertItemsEqual(fields, ['name', 'othervalue'])
self.assertItemsEqual(data, [
('foo', '2'),
('bar', '4'),
('qux', '6'),
])
def test_norow(self):
""" If a row is composed only of empty values (due to having
filtered out non-empty values from it), it should be removed
"""
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
',3,\n'
',5,6\n'
})
record = Import.browse(self.cr, self.uid, id)
data, fields = Import._convert_import_data(
record, ['name', False, 'othervalue'],
{'quoting': '"', 'separator': ',', 'headers': True,})
self.assertItemsEqual(fields, ['name', 'othervalue'])
self.assertItemsEqual(data, [
('foo', '2'),
('', '6'),
])
def test_nofield(self):
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
})
record = Import.browse(self.cr, self.uid, id)
self.assertRaises(
ValueError,
Import._convert_import_data,
record, [],
{'quoting': '"', 'separator': ',', 'headers': True,})
def test_falsefields(self):
Import = self.registry('base_import.import')
id = Import.create(self.cr, self.uid, {
'res_model': 'base_import.tests.models.preview',
'file': 'name,Some Value,Counter\n'
'foo,1,2\n'
})
record = Import.browse(self.cr, self.uid, id)
self.assertRaises(
ValueError,
Import._convert_import_data,
record, [False, False, False],
{'quoting': '"', 'separator': ',', 'headers': True,})
| agpl-3.0 | 6,780,955,096,681,731,000 | 36.354749 | 115 | 0.5126 | false |
shimpe/pyvectortween | vectortween/SequentialAnimation.py | 1 | 2872 | from copy import deepcopy
import numpy as np
from vectortween.Animation import Animation
from vectortween.Mapping import Mapping
from vectortween.Tween import Tween
def normalize(x):
return x / sum(x)
class SequentialAnimation(Animation):
def __init__(self, list_of_animations=None, timeweight=None, repeats=None, tween=None):
super().__init__(None, None)
if tween is None:
tween = ['linear']
if timeweight is None:
timeweight = []
if list_of_animations is None:
list_of_animations = []
if repeats is None:
repeats = 1
self.ListOfAnimations = []
self.ListOfAnimationTimeWeight = np.array([])
self.CumulativeNormalizedTimeWeights = np.array([])
self.T = Tween(*tween)
if list_of_animations:
for r in range(repeats):
if not timeweight:
for a in list_of_animations:
self.add(a, 1)
else:
for a, t in zip(list_of_animations, timeweight):
self.add(a, t)
def add(self, anim, timeweight=1):
self.ListOfAnimations.append(deepcopy(anim))
self.ListOfAnimationTimeWeight = np.append(self.ListOfAnimationTimeWeight, [timeweight])
self.CumulativeNormalizedTimeWeights = np.cumsum(normalize(self.ListOfAnimationTimeWeight))
def make_frame(self, frame, birthframe, startframe, stopframe, deathframe, noiseframe=None):
if birthframe is None:
birthframe = startframe
if deathframe is None:
deathframe = stopframe
if frame < birthframe:
return None
if frame > deathframe:
return None
if frame < startframe:
return self.ListOfAnimations[0].make_frame(frame, birthframe, startframe, stopframe, deathframe, noiseframe)
if frame > stopframe:
return self.ListOfAnimations[-1].make_frame(frame, birthframe, startframe, stopframe, deathframe, noiseframe)
t = self.T.tween2(frame, startframe, stopframe)
if t is None:
return None
for i, w in enumerate(self.CumulativeNormalizedTimeWeights):
if t <= w:
if i == 0: # reached the end of the cumulative weights
relativestartframe = 0
else:
relativestartframe = self.CumulativeNormalizedTimeWeights[i - 1]
relativestopframe = self.CumulativeNormalizedTimeWeights[i]
absstartframe = Mapping.linlin(relativestartframe, 0, 1, startframe, stopframe)
absstopframe = Mapping.linlin(relativestopframe, 0, 1, startframe, stopframe)
return self.ListOfAnimations[i].make_frame(frame, birthframe, absstartframe, absstopframe, deathframe, noiseframe)
| mit | 6,247,802,363,189,065,000 | 40.028571 | 130 | 0.619777 | false |
spaceof7/QGIS | python/plugins/MetaSearch/pavement.py | 67 | 7402 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2014 Tom Kralidis ([email protected])
#
# This source is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
from configparser import ConfigParser
import getpass
import os
import shutil
import xml.etree.ElementTree as etree
import xmlrpc.client
import zipfile
from paver.easy import (call_task, cmdopts, error, info, options, path,
sh, task, Bunch)
from owslib.csw import CatalogueServiceWeb # spellok
PLUGIN_NAME = 'MetaSearch'
BASEDIR = os.path.abspath(os.path.dirname(__file__))
USERDIR = os.path.expanduser('~')
with open('metadata.txt') as mf:
cp = ConfigParser()
cp.readfp(mf)
VERSION = cp.get('general', 'version')
options(
base=Bunch(
home=BASEDIR,
plugin=path(BASEDIR),
ui=path(BASEDIR) / 'plugin' / PLUGIN_NAME / 'ui',
install=path('%s/.qgis3/python/plugins/MetaSearch' % USERDIR),
ext_libs=path('plugin/MetaSearch/ext-libs'),
tmp=path(path('%s/MetaSearch-dist' % USERDIR)),
version=VERSION
),
upload=Bunch(
host='plugins.qgis.org',
port=80,
endpoint='plugins/RPC2/'
)
)
@task
def clean():
"""clean environment"""
if os.path.exists(options.base.install):
if os.path.islink(options.base.install):
os.unlink(options.base.install)
else:
shutil.rmtree(options.base.install)
if os.path.exists(options.base.tmp):
shutil.rmtree(options.base.tmp)
if os.path.exists(options.base.ext_libs):
shutil.rmtree(options.base.ext_libs)
for ui_file in os.listdir(options.base.ui):
if ui_file.endswith('.py') and ui_file != '__init__.py':
os.remove(options.base.plugin / 'ui' / ui_file)
os.remove(path(options.base.home) / '%s.pro' % PLUGIN_NAME)
sh('git clean -dxf')
@task
def install():
"""install plugin into user QGIS environment"""
plugins_dir = path(USERDIR) / '.qgis3/python/plugins'
if os.path.exists(options.base.install):
if os.path.islink(options.base.install):
os.unlink(options.base.install)
else:
shutil.rmtree(options.base.install)
if not os.path.exists(plugins_dir):
raise OSError('The directory %s does not exist.' % plugins_dir)
if not hasattr(os, 'symlink'):
shutil.copytree(options.base.plugin, options.base.install)
elif not os.path.exists(options.base.install):
os.symlink(options.base.plugin, options.base.install)
@task
def package():
"""create zip file of plugin"""
skip_files = [
'AUTHORS.txt',
'CMakeLists.txt',
'requirements.txt',
'requirements-dev.txt',
'pavement.txt'
]
package_file = get_package_filename()
if not os.path.exists(options.base.tmp):
options.base.tmp.mkdir()
if os.path.exists(package_file):
os.unlink(package_file)
with zipfile.ZipFile(package_file, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(options.base.plugin):
for file_add in files:
if file_add.endswith('.pyc') or file_add in skip_files:
continue
filepath = os.path.join(root, file_add)
relpath = os.path.join(PLUGIN_NAME, os.path.relpath(filepath))
zipf.write(filepath, relpath)
return package_file # return name of created zipfile
@task
@cmdopts([
('user=', 'u', 'OSGeo userid'),
])
def upload():
"""upload package zipfile to server"""
user = options.get('user', False)
if not user:
raise ValueError('OSGeo userid required')
password = getpass.getpass('Enter your password: ')
if password.strip() == '':
raise ValueError('password required')
call_task('package')
zipf = get_package_filename()
url = 'http://%s:%s@%s:%d/%s' % (user, password, options.upload.host,
options.upload.port,
options.upload.endpoint)
info('Uploading to http://%s/%s' % (options.upload.host,
options.upload.endpoint))
server = xmlrpc.client.ServerProxy(url, verbose=False)
try:
with open(zipf) as zfile:
plugin_id, version_id = \
server.plugin.upload(xmlrpc.client.Binary(zfile.read()))
info('Plugin ID: %s', plugin_id)
info('Version ID: %s', version_id)
except xmlrpc.client.Fault as err:
error('ERROR: fault error')
error('Fault code: %d', err.faultCode)
error('Fault string: %s', err.faultString)
except xmlrpc.client.ProtocolError as err:
error('Error: Protocol error')
error("%s : %s", err.errcode, err.errmsg)
if err.errcode == 403:
error('Invalid name and password')
@task
def test_default_csw_connections():
"""test that the default CSW connections work"""
relpath = 'resources%sconnections-default.xml' % os.sep
csw_connections_xml = options.base.plugin / relpath
conns = etree.parse(csw_connections_xml)
for conn in conns.findall('csw'):
try:
csw = CatalogueServiceWeb(conn.attrib.get('url')) # spellok
info('Success: %s', csw.identification.title)
csw.getrecords2()
except Exception as err:
raise ValueError('ERROR: %s', err)
@task
@cmdopts([
('filename=', 'f', 'Path to file of CSW URLs'),
])
def generate_csw_connections_file():
"""generate a CSW connections file from a flat file of CSW URLs"""
filename = options.get('filename', False)
if not filename:
raise ValueError('path to file of CSW URLs required')
conns = etree.Element('qgsCSWConnections')
conns.attrib['version'] = '1.0'
with open(filename) as connsfh:
for line in connsfh:
url = line.strip()
if not url: # blank line
continue
try:
csw = CatalogueServiceWeb(url) # spellok
title = str(csw.identification.title)
etree.SubElement(conns, 'csw', name=title, url=url)
except Exception as err:
error('ERROR on CSW %s: %s', url, err)
with open('%s.xml' % filename, 'w') as connsxmlfh:
connsxmlfh.write(etree.tostring(conns, encoding='utf-8'))
def get_package_filename():
"""return filepath of plugin zipfile"""
filename = '%s-%s.zip' % (PLUGIN_NAME, options.base.version)
package_file = '%s/%s' % (options.base.tmp, filename)
return package_file
| gpl-2.0 | 5,971,240,272,225,238,000 | 31.182609 | 79 | 0.605917 | false |
trabucayre/periphondemand | periphondemand/toolchains/synthesis/vivado/vivado.py | 2 | 26838 | #! /usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Name: ise.py
# Purpose:
# Author: Gwenhael Goavec-Merou <[email protected]>
# Created: 21/07/2015
# -----------------------------------------------------------------------------
# Copyright (2008) Armadeus Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# -----------------------------------------------------------------------------
# Revision list :
#
# Date By Changes
#
# -----------------------------------------------------------------------------
""" Manage Vivado toolchain """
import os
from periphondemand.bin.define import BINARYPROJECTPATH
from periphondemand.bin.define import BINARY_PREFIX
from periphondemand.bin.define import SYNTHESISPATH
from periphondemand.bin.define import OBJSPATH
from periphondemand.bin.define import VHDLEXT
from periphondemand.bin.define import TCLEXT
from periphondemand.bin.define import XILINX_BITSTREAM_SUFFIX
from periphondemand.bin.define import XILINX_BINARY_SUFFIX
from periphondemand.bin.define import COLOR_END
from periphondemand.bin.define import COLOR_SHELL
from periphondemand.bin.utils.settings import Settings
from periphondemand.bin.utils.poderror import PodError
from periphondemand.bin.utils.display import Display
from periphondemand.bin.utils import wrappersystem as sy
from periphondemand.bin.toolchain.synthesis import Synthesis
SETTINGS = Settings()
DISPLAY = Display()
class Vivado(Synthesis):
""" Manage specific synthesis part for
vivado toolchain
"""
SYNTH_CMD = "vivado"
name = "vivado"
def __init__(self, parent):
""" constructor
"""
Synthesis.__init__(self, parent)
tool = self.synthesis_toolcommandname
command = "-version"
cont = []
cont = list(os.popen(tool + " " + command))
self.version = cont[0].split(" ")[1][1:]
self.base_version = self.version.split(".")[0]
@classmethod
def constraints_file_extension(cls):
return ("xdc")
def need_block_design(self):
""" Check if design need to generate a block
design file
"""
list_bd_comp = []
for component in self.project.instances:
bd_node = component.get_nodes("vivado")
if not (len(bd_node) == 0):
list_bd_comp.append(component)
return list_bd_comp
def generate_block_design(self, component):
""" Generate the block design file for xilinx fpga """
out = "set design_name " + component.name + "_bd\n\n"
out += "# CHECKING IF PROJECT EXISTS\n"
out += 'if { [get_projects -quiet] eq "" } {\n'
out += ' puts "ERROR: Please open or create a project!"\n'
out += " return 1\n"
out += "}\n\n\n"
out += "# Creating design if needed\n\n"
out += " # USE CASES:\n"
out += " # 8) No opened design, design_name not in project.\n"
out += " # 9) Current opened design, has components, but " + \
"diff names, design_name not in project.\n\n"
out += ' puts "INFO: Currently there is no design ' + \
'<$design_name> in project, so creating one..."\n\n'
out += " create_bd_design $design_name\n\n"
out += ' puts "INFO: Making design <$design_name> as ' + \
'current_bd_design."\n'
out += " current_bd_design $design_name\n\n"
out += 'puts "INFO: Currently the variable <design_name> ' + \
'is equal to \\"$design_name\\"."\n\n'
out += "\n\n\n"
out += \
"# Procedure to create entire design; Provide " + \
"argument to make\n" + \
'# procedure reusable. If parentCell is "", will ' + \
'use root.\n' + \
"proc create_root_design { parentCell } {\n\n" + \
' if { $parentCell eq "" } {\n' + \
" set parentCell [get_bd_cells /]\n" + \
" }\n\n" + \
" # Get object for parentCell\n" + \
" set parentObj [get_bd_cells $parentCell]\n" + \
' if { $parentObj == "" } {\n' + \
' puts "ERROR: Unable to find parent cell ' + \
' <$parentCell>!"\n' + \
" return\n" + \
" }\n\n"
out += \
" # Make sure parentObj is hier blk\n" + \
" set parentType [get_property TYPE $parentObj]\n" + \
' if { $parentType ne "hier" } {\n' + \
' puts "ERROR: Parent <$parentObj> has TYPE = ' + \
' <$parentType>. ' + \
'Expected to be <hier>."\n' + \
" return\n" + \
" }\n\n" + \
" # Save current instance; Restore later\n" + \
" set oldCurInst [current_bd_instance .]\n\n" + \
" # Set parent object as current\n" + \
" current_bd_instance $parentObj\n"
out += "\n\n"
out += " # Create interface ports\n"
vivado_node = component.get_nodes("vivado")
for vivado_if in vivado_node[0].get_subnodes("vivado_interfaces",
"vivado_interface"):
if_params = vivado_if.get_nodes("parameter")
out += " set " + vivado_if.get_attr_value("instance_name") + \
" [ create_bd_intf_port -mode " + \
vivado_if.get_attr_value("mode") + " " + \
vivado_if.get_attr_value("options") + " " + \
vivado_if.get_attr_value("name") + " " + \
vivado_if.get_attr_value("instance_name") + " ]\n"
if if_params != []:
out += " set_property -dict [ list "
for param in if_params:
out += " CONFIG." + param.get_attr_value("name") + \
" {" + param.text + "} "
out += " ] $" + \
vivado_if.get_attr_value("instance_name") + "\n"
out += "\n"
out += " # Create ports\n"
for vivado_if in vivado_node[0].get_subnodes("vivado_ports",
"vivado_port"):
if_params = vivado_if.get_nodes("parameter")
out += " set " + vivado_if.get_attr_value("instance_name") + \
" [ create_bd_port -dir " + \
vivado_if.get_attr_value("direction")
# if if_from != None:
# out += " -from " + if_from)
# if if_to != None:
# out += " -to " + if_to)
out += " -type " + vivado_if.get_attr_value("type") + \
" " + vivado_if.get_attr_value("instance_name") + " ]\n"
if if_params != []:
out += " set_property -dict [ list "
for param in if_params:
out += "CONFIG." + param.get_attr_value("name") + \
" {" + param.text + "} "
out += " ] $" + vivado_if.get_attr_value("instance_name") + \
"\n"
out += "\n"
vivado_comps = vivado_node[0].get_subnodes("vivado_components",
"vivado_component")
for comp in vivado_comps:
cp_params = comp.get_nodes("parameter")
out += " # Create instance: " + \
comp.get_attr_value("instance_name") + \
", and set properties\n"
out += " set " + comp.get_attr_value("instance_name") + \
" [ create_bd_cell " + \
"-type " + comp.get_attr_value("type") + " " + \
comp.get_attr_value("options") + " " + \
comp.get_attr_value("name") + \
" " + comp.get_attr_value("instance_name") + " ]\n"
if cp_params != []:
out += " set_property -dict [ list "
for param in cp_params:
out += "CONFIG." + param.get_attr_value("name") + \
" {" + param.text + "} \\\n"
out += " ] $" + comp.get_attr_value("instance_name") + "\n"
out += "\n"
out += " # Create interface connections\n"
vivado_conns = vivado_node[0].get_subnodes("ifs_connections",
"connection")
for conn in vivado_conns:
out += " connect_bd_intf_net -intf_net " + \
conn.get_attr_value("src")
for dest in conn.get_nodes("dest"):
dest_type = dest.get_attr_value("type")
out += " [get_bd_intf_"
if dest_type == "port":
out += "ports"
else:
out += "pins"
out += " " + dest.get_attr_value("name") + "]"
out += "\n"
out += "\n"
out += " # Create port connections\n"
vivado_conns = vivado_node[0].get_subnodes("ports_connections",
"connection")
for conn in vivado_conns:
out += " connect_bd_net -net " + conn.get_attr_value("src")
for dest in conn.get_nodes("dest"):
dest_type = dest.get_attr_value("type")
out += " [get_bd_"
if dest_type == "port":
out += "ports"
else:
out += "pins"
out += " " + dest.get_attr_value("name") + "]"
out += "\n"
out += "\n"
out += " # Create address segments\n" + \
" create_bd_addr_seg -range 0x10000 -offset 0x43C00000 " + \
"[get_bd_addr_spaces processing_system7_0/Data] " + \
"[get_bd_addr_segs M00_AXI/Reg] SEG_M00_AXI_Reg\n \n\n"
out += " # Restore current instance\n" + \
" current_bd_instance $oldCurInst\n\n" + \
" save_bd_design\n"
out += "}\n"
out += "# End of create_root_design()\n\n\n"
out += "#####################################################\n"
out += "# MAIN FLOW\n"
out += "#####################################################\n"
out += "\n"
out += 'create_root_design ""\n\n\n'
tclfile = open(self.project.projectpath + SYNTHESISPATH + "/" +
component.name + "_bd.tcl", "w")
tclfile.write(out)
def add_constraints_file(self, filename):
""" return line for constraints file insertion
"""
out = "# Set 'constrs_1' fileset object\n"
out += "set obj [get_filesets constrs_1]\n"
out += "\n"
out += "# Add/Import constrs file and set constrs file properties\n"
out += 'set file "[file normalize "..' + SYNTHESISPATH + "/" + \
self.project.name + '.xdc"]"\n'
out += "set file_added [add_files -norecurse -fileset $obj $file]\n"
out += 'set file "..' + SYNTHESISPATH + "/" + \
self.project.name + '.xdc"\n'
out += "set file [file normalize $file]\n"
out += 'set file_obj [get_files -of_objects ' + \
'[get_filesets constrs_1] [list "*$file"]]\n'
out += 'set_property "file_type" "XDC" $file_obj\n'
out += "\n"
out += "\n"
return out
def generatelibraryconstraints(self):
# TODO
""" Adds constraints specified by a component, such as placement
for a PLL, multiplier, etc. or clock informations about PLL
output signals
"""
out = "# components constraints \n"
for instance in self.project.instances:
if instance.constraints != []:
for constraint in instance.constraints:
inst_name = instance.instancename
attr_name = str(constraint.get_attr_value("name"))
constr_type = constraint.get_attr_value("type")
sig_type = constraint.get_attr_value("sig_type")
if sig_type is None:
sig_type = "ports"
if constr_type == "clk":
frequency = constraint.get_attr_value("frequency")
freq = " %g" % ((1000 / float(frequency)))
out += "create_clock -period " + freq + \
" -name " + inst_name + "_" + attr_name + \
" [get_" + sig_type + " " + inst_name
if sig_type == "ports":
out += "_"
else:
out += "/"
out += attr_name + "]\n"
elif constr_type == "placement":
out += 'INST "' + inst_name + "/" + \
attr_name + '" LOC=' + \
constraint.get_attr_value("loc") + ";\n"
elif constr_type == "false_path":
# GGM : add verification : this attributes are
# mandatory for false_path
src_type = constraint.get_attr_value("src_type")
dest_type = constraint.get_attr_value("dest_type")
out += "set_false_path -from [get_"
if src_type == "clocks" or src_type == "inst_clocks":
out += "clocks "
else:
out += src_type
if src_type == "inst_clocks":
out += inst_name + "_"
elif src_type == "pins":
out += inst_name + "/"
out += constraint.get_attr_value("src") + \
"] -to [get_"
if dest_type == "clocks" or dest_type == "inst_clocks":
out += "clocks "
else:
out += src_type
if dest_type == "inst_clocks":
out += inst_name + "_"
elif dest_type == "pins":
out += inst_name + "/"
out += constraint.get_attr_value("dest") + "]\n"
elif constr_type == "input_delay":
out += "set_input_delay -clock " + inst_name + "_" + \
constraint.get_attr_value("src") + " " + \
constraint.get_attr_value("value") + " " + \
"[get_" + sig_type + " " + inst_name
if sig_type == "ports":
out += "_"
else:
out += "/"
out += constraint.get_attr_value("dest") + "]\n"
else:
raise PodError("component " + instance.name +
" has an unknown type " +
constr_type, 0)
return out
@classmethod
def addforcepinout(cls, port):
""" Generate line for pin
"""
constr = port.get_attr_value("constr_hidden")
if constr is not None and constr == "1":
return ""
out = 'NET "force_' + str(port.name)
out += '" LOC="' + str(port.position) + \
'" | IOSTANDARD=' + str(port.standard)
if port.getDrive() is not None:
out += " | DRIVE=" + str(port.drive)
out += r'; # ' + str(port.name) + '\n'
return out
@classmethod
def addclockconstraints(cls, connect, frequency):
""" Generate clock constraints
"""
out = "NET \"" + connect["instance_dest"] + \
"_" + connect["port_dest"] + '" TNM_NET = "' + \
connect["instance_dest"] + "_" + connect["port_dest"] + \
"\";\n"
out += "TIMESPEC \"TS_" + connect["instance_dest"] + \
"_" + connect["port_dest"] + '" = PERIOD "' + \
connect["instance_dest"] + "_" + connect["port_dest"] + \
"\" " + "%g" % ((1000 / float(frequency))) + \
" ns HIGH 50 %;\n"
return out
def addpinconstraints(self, connect, port):
""" Generate constraints for a pin
"""
constr = port.get_attr_value("constr_hidden")
if constr is not None and constr == "1":
return ""
instancedest =\
self.project.get_instance(connect["instance_dest"])
interfacedest = \
instancedest.get_interface(connect["interface_dest"])
portdest = interfacedest.get_port(connect["port_dest"])
get_ports = "[get_ports "
if portdest.size != 1:
get_ports += '{'
get_ports += connect["instance_dest"] + \
"_" + connect["port_dest"]
if portdest.size != 1:
if portdest.is_fully_connected():
get_ports += "[" + connect["pin_dest"] + "]"
else:
get_ports += "_pin" + connect["pin_dest"]
get_ports += '}'
get_ports += ']'
out = 'set_property PACKAGE_PIN ' + str(port.position)
out += " " + get_ports + "\n"
# TODO
# if portdest.getPortOption() != None:
# out = out + ' | '+str(portdest.getPortOption())
# elif port.getPortOption() != None:
# out = out + ' | '+str(port.getPortOption())
out += 'set_property IOSTANDARD '
if portdest.standard is not None:
out += str(portdest.standard) + " "
else:
out += str(port.standard)
out += " " + get_ports + "\n"
# if portdest.getDrive() != None:
# out = out + " | DRIVE="+str(portdest.getDrive())
# elif port.getDrive() != None:
# out = out + " | DRIVE="+str(port.getDrive())
# out = out+r'; # '+str(port.name)+'\n'
return out
def project_base_creation(self):
""" return string
for project creation
"""
platform = self.project.platform
proj_name = self.project.name
out = "# Set the reference directory for source file relative " + \
"paths (by default the value is script directory path)\n"
out += 'set origin_dir "..' + OBJSPATH + '/"'
out += "\n"
out += "\n"
out += "# Create project\n"
out += "create_project -part " + platform.device + \
" " + self.project.name + "\n"
out += "\n"
out += "# Set the directory path for the new project\n"
out += "set proj_dir [get_property directory [current_project]]\n"
out += "\n"
out += "# Set project properties\n"
out += "set obj [get_projects " + proj_name + "]\n"
if platform.board_part is not None:
out += 'set_property "board_part" "' + \
platform.board_part + '" $obj\n'
out += 'set_property "default_lib" "xil_defaultlib" $obj\n'
out += 'set_property "simulator_language" "Mixed" $obj\n'
out += 'set_property "target_language" "VHDL" $obj\n'
out += "\n"
return out
def project_base_configuration(self):
""" return basic project
configuration
"""
out = "# Create 'sources_1' fileset (if not found)\n"
out += "if {[string equal [get_filesets -quiet sources_1] \"\"]} {\n"
out += " create_fileset -srcset sources_1\n"
out += "}\n"
out += "\n"
out += "# Set 'sources_1' fileset object\n"
out += "set obj [get_filesets sources_1]\n"
out += "set files [list \\\n"
out += ' "[file normalize "..' + SYNTHESISPATH + "/top_" + \
self.project.name + VHDLEXT + '"]"\\\n'
out += "]\n"
out += "add_files -norecurse -fileset $obj $files\n"
out += "\n"
out += "# Set 'sources_1' fileset file properties for remote files\n"
out += "\n"
out += "# Set 'sources_1' fileset file properties for local files\n"
out += "# None\n"
out += "\n"
out += "# Set 'sources_1' fileset properties\n"
out += "set obj [get_filesets sources_1]\n"
out += 'set_property "top" "top_' + self.project.name + '" $obj\n'
out += "\n"
out += "# Create 'constrs_1' fileset (if not found)\n"
out += "if {[string equal [get_filesets -quiet constrs_1] \"\"]} {\n"
out += " create_fileset -constrset constrs_1\n"
out += "}\n"
out += "\n"
return out
@classmethod
def add_file_to_tcl(cls, filename):
out = "set obj [get_filesets sources_1]\n"
out += 'set file "[file normalize "' + filename + '"]"\n'
out += "set file_added [add_files -norecurse -fileset $obj $file]\n"
return out
def insert_tools_specific_commands(self):
""" return lines for misc stuff
specific to a tool
"""
platform = self.project.platform
proj_name = self.project.name
out = "# Create 'sim_1' fileset (if not found)\n"
out += 'if {[string equal [get_filesets -quiet sim_1] ""]} {\n'
out += " create_fileset -simset sim_1\n"
out += "}\n\n"
out += "# Set 'sim_1' fileset object\n"
out += "set obj [get_filesets sim_1]\n"
out += "# Empty (no sources present)\n\n"
out += "# Set 'sim_1' fileset properties\n"
out += "set obj [get_filesets sim_1]\n"
out += 'set_property "top" "top_' + self.project.name + '" $obj\n'
out += "\n"
out += "# Create 'synth_1' run (if not found)\n"
out += "if {[string equal [get_runs -quiet synth_1] \"\"]} {\n"
out += " create_run -name synth_1 -part " + platform.device + \
' -flow {Vivado Synthesis ' + self.base_version + '} ' + \
'-strategy "Vivado Synthesis Defaults" -constrset constrs_1\n'
out += "} else {\n"
out += ' set_property strategy "Vivado Synthesis Defaults" ' + \
"[get_runs synth_1]\n"
out += ' set_property flow "Vivado Synthesis ' + \
self.base_version + '" [get_runs synth_1]\n'
out += "}\n"
out += "set obj [get_runs synth_1]\n\n"
out += "# set the current synth run\n"
out += "current_run -synthesis [get_runs synth_1]\n\n"
out += "# Create 'impl_1' run (if not found)\n"
out += "if {[string equal [get_runs -quiet impl_1] \"\"]} {\n"
out += " create_run -name impl_1 -part " + platform.device + \
" -flow {Vivado Implementation " + self.base_version + "} " + \
'-strategy "Vivado Implementation Defaults" ' + \
'-constrset constrs_1 -parent_run synth_1\n'
out += "} else {\n"
out += ' set_property strategy "Vivado Implementation Defaults" ' + \
"[get_runs impl_1]\n"
out += ' set_property flow "Vivado Implementation ' + \
self.base_version + '" [get_runs impl_1]\n'
out += "}\n"
out += "set obj [get_runs impl_1]\n\n"
out += 'set_property "needs_refresh" "1" $obj\n'
out += 'set_property "steps.write_bitstream.args.bin_file" "1" $obj\n'
list_bd_comp = self.need_block_design()
if len(list_bd_comp):
out += "load_features ipintegrator\n"
for component in list_bd_comp:
bd_name = component.name + "_bd" + TCLEXT
self.generate_block_design(component)
out += "source .." + SYNTHESISPATH + "/" + bd_name + "\n"
out += "\n\n"
out += "# set the current impl run\n"
out += "current_run -implementation [get_runs impl_1]\n\n"
out += 'puts "INFO: Project created: ' + proj_name + '"\n'
out += "# set the current impl run\n"
out += "current_run -implementation [get_runs impl_1]\n"
if len(list_bd_comp):
for component in list_bd_comp:
out += "generate_target all [get_files " + \
"./" + proj_name + ".srcs/sources_1/bd/" + \
component.name + "_bd/" + component.name + "_bd.bd]\n"
return out
@classmethod
def insert_tools_gen_cmds(cls):
""" return lines for bitstream generation
"""
out = "launch_runs synth_1\n"
out += "wait_on_run synth_1\n"
out += "## do implementation\n"
out += "launch_runs impl_1\n"
out += "wait_on_run impl_1\n"
out += "## make bit file\n"
out += "launch_runs impl_1 -to_step write_bitstream\n"
out += "wait_on_run impl_1\n"
out += "exit\n"
return out
@property
def ext_files(self):
""" return list of bitstream files extension
"""
return [XILINX_BITSTREAM_SUFFIX, XILINX_BINARY_SUFFIX]
def generate_bitstream(self):
""" generate the bitstream """
commandname = self.synthesis_toolcommandname
scriptpath = os.path.join(self.parent.projectpath + SYNTHESISPATH,
self.tcl_scriptname)
pwd = sy.pwd()
sy.del_all(self.project.projectpath + OBJSPATH)
sy.chdir(self.project.projectpath + SYNTHESISPATH)
commandname += " -mode tcl"
scriptname = "-source " + scriptpath + " -tclargs build"
binpath = self.project.projectpath + OBJSPATH + "/" + \
self.project.name + ".runs/impl_1/"
for line in sy.launch_as_shell(commandname, scriptname):
if SETTINGS.color() == 1:
print(COLOR_SHELL + line + COLOR_END),
else:
print("SHELL>" + line),
for ext_file in self.ext_files:
try:
sy.cp_file(binpath + BINARY_PREFIX + self.project.name +
ext_file,
self.project.projectpath + BINARYPROJECTPATH + "/")
except IOError:
raise PodError("Can't copy bitstream")
sy.chdir(pwd)
| lgpl-2.1 | -1,396,601,286,510,807,600 | 41.46519 | 79 | 0.47906 | false |
omaciel/pylyglot | translations/views.py | 1 | 2607 | # -*- encoding: utf-8 -*-
# vim: ts=4 sw=4 expandtab ai
#
# This file is part of Pylyglot.
#
# Pylyglot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pylyglot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pylyglot. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.http import urlencode
from core.forms import SearchForm
from core.models import Translation
from django.db.models import Count
from django.views.generic import ListView
from django.views.generic import ListView
class SearchableTranslationListView(ListView):
model = Translation
paginate_by = 20
template_name = 'translations/translation_list.html'
def get_queryset(self):
queryset = super(SearchableTranslationListView, self).get_queryset()
self.query = self.request.GET.get('query', '')
self.short_name = self.request.GET.get('languages', '')
if self.query and self.short_name:
return queryset.filter(
sentence__msgid__icontains=self.query,
language__short_name=self.short_name,
obsolete=False,
).values(
'sentence__msgid',
'msgstr',
'sentence__length',
'package__name',
).order_by(
'sentence__length',
'sentence__msgid',
'msgstr'
).distinct()
else:
return queryset.none()
def get_context_data(self, **kwargs):
kwargs.update({
'query': self.query,
'short_name': self.short_name,
'form': SearchForm(self.request.GET or None),
'is_searching': ('query' in self.request.GET and
'languages' in self.request.GET),
'pagination_extra': urlencode({
'languages': self.short_name,
'query': self.query,
}),
})
return super(SearchableTranslationListView,
self).get_context_data(**kwargs)
| gpl-3.0 | 9,005,606,434,864,418,000 | 35.71831 | 76 | 0.613349 | false |
jim-thisplace/exercises-in-programming-style | 33-restful/tf-33.py | 17 | 3653 | #!/usr/bin/env python
import re, string, sys
with open("../stop_words.txt") as f:
stops = set(f.read().split(",")+list(string.ascii_lowercase))
# The "database"
data = {}
# Internal functions of the "server"-side application
def error_state():
return "Something wrong", ["get", "default", None]
# The "server"-side application handlers
def default_get_handler(args):
rep = "What would you like to do?"
rep += "\n1 - Quit" + "\n2 - Upload file"
links = {"1" : ["post", "execution", None], "2" : ["get", "file_form", None]}
return rep, links
def quit_handler(args):
sys.exit("Goodbye cruel world...")
def upload_get_handler(args):
return "Name of file to upload?", ["post", "file"]
def upload_post_handler(args):
def create_data(filename):
if filename in data:
return
word_freqs = {}
with open(filename) as f:
for w in [x.lower() for x in re.split("[^a-zA-Z]+", f.read()) if len(x) > 0 and x.lower() not in stops]:
word_freqs[w] = word_freqs.get(w, 0) + 1
word_freqsl = word_freqs.items()
word_freqsl.sort(lambda x, y: cmp(y[1], x[1]))
data[filename] = word_freqsl
if args == None:
return error_state()
filename = args[0]
try:
create_data(filename)
except:
return error_state()
return word_get_handler([filename, 0])
def word_get_handler(args):
def get_word(filename, word_index):
if word_index < len(data[filename]):
return data[filename][word_index]
else:
return ("no more words", 0)
filename = args[0]; word_index = args[1]
word_info = get_word(filename, word_index)
rep = '\n#{0}: {1} - {2}'.format(word_index+1, word_info[0], word_info[1])
rep += "\n\nWhat would you like to do next?"
rep += "\n1 - Quit" + "\n2 - Upload file"
rep += "\n3 - See next most-frequently occurring word"
links = {"1" : ["post", "execution", None],
"2" : ["get", "file_form", None],
"3" : ["get", "word", [filename, word_index+1]]}
return rep, links
# Handler registration
handlers = {"post_execution" : quit_handler,
"get_default" : default_get_handler,
"get_file_form" : upload_get_handler,
"post_file" : upload_post_handler,
"get_word" : word_get_handler }
# The "server" core
def handle_request(verb, uri, args):
def handler_key(verb, uri):
return verb + "_" + uri
if handler_key(verb, uri) in handlers:
return handlers[handler_key(verb, uri)](args)
else:
return handlers[handler_key("get", "default")](args)
# A very simple client "browser"
def render_and_get_input(state_representation, links):
print state_representation
sys.stdout.flush()
if type(links) is dict: # many possible next states
input = sys.stdin.readline().strip()
if input in links:
return links[input]
else:
return ["get", "default", None]
elif type(links) is list: # only one possible next state
if links[0] == "post": # get "form" data
input = sys.stdin.readline().strip()
links.append([input]) # add the data at the end
return links
else: # get action, don't get user input
return links
else:
return ["get", "default", None]
request = ["get", "default", None]
while True:
# "server"-side computation
state_representation, links = handle_request(*request)
# "client"-side computation
request = render_and_get_input(state_representation, links)
| mit | -7,315,360,128,486,364,000 | 33.140187 | 116 | 0.582809 | false |
ivanmarcin/kubernetes | cluster/saltbase/salt/_states/container_bridge.py | 96 | 5593 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import salt.exceptions
import salt.utils.ipaddr as ipaddr
def ensure(name, cidr, mtu=1460):
'''
Ensure that a bridge (named <name>) is configured for contianers.
Under the covers we will make sure that
- The bridge exists
- The MTU is set
- The correct network is added to the bridge
- iptables is set up for MASQUARADE for egress
cidr:
The cidr range in the form of 10.244.x.0/24
mtu:
The MTU to set on the interface
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
iptables_rule = {
'table': 'nat',
'chain': 'POSTROUTING',
'rule': '-o eth0 -j MASQUERADE \! -d 10.0.0.0/8'
}
def bridge_exists(name):
'Determine if a bridge exists already.'
out = __salt__['cmd.run_stdout']('brctl show {0}'.format(name))
for line in out.splitlines():
# get rid of first line
if line.startswith('bridge name'):
continue
# get rid of ^\n's
vals = line.split()
if not vals:
continue
if len(vals) > 1:
return True
return False
def get_ip_addr_details(name):
'For the given interface, get address details.'
out = __salt__['cmd.run']('ip addr show dev {0}'.format(name))
ret = { 'networks': [] }
for line in out.splitlines():
match = re.match(
r'^\d*:\s+([\w.\-]+)(?:@)?([\w.\-]+)?:\s+<(.+)>.*mtu (\d+)',
line)
if match:
iface, parent, attrs, mtu = match.groups()
if 'UP' in attrs.split(','):
ret['up'] = True
else:
ret['up'] = False
if parent:
ret['parent'] = parent
ret['mtu'] = int(mtu)
continue
cols = line.split()
if len(cols) > 2 and cols[0] == 'inet':
ret['networks'].append(cols[1])
return ret
def get_current_state():
'Helper that returns a dict of current bridge state.'
ret = {}
ret['name'] = name
ret['exists'] = bridge_exists(name)
if ret['exists']:
ret['details'] = get_ip_addr_details(name)
else:
ret['details'] = {}
# This module function is strange and returns True if the rule exists.
# If not, it returns a string with the error from the call to iptables.
ret['iptables_rule_exists'] = \
__salt__['iptables.check'](**iptables_rule) == True
return ret
# This is a little hacky. I should probably import a real library for this
# but this'll work for now.
try:
cidr_network = ipaddr.IPv4Network(cidr, strict=True)
except Exception:
raise salt.exceptions.SaltInvocationError(
'Invalid CIDR \'{0}\''.format(cidr))
desired_network = '{0}/{1}'.format(
str(ipaddr.IPv4Address(cidr_network._ip + 1)),
str(cidr_network.prefixlen))
current_state = get_current_state()
if (current_state['exists']
and current_state['details']['mtu'] == mtu
and desired_network in current_state['details']['networks']
and current_state['details']['up']
and current_state['iptables_rule_exists']):
ret['result'] = True
ret['comment'] = 'System already in the correct state'
return ret
# The state of the system does need to be changed. Check if we're running
# in ``test=true`` mode.
if __opts__['test'] == True:
ret['comment'] = 'The state of "{0}" will be changed.'.format(name)
ret['changes'] = {
'old': current_state,
'new': 'Create and configure bridge'
}
# Return ``None`` when running with ``test=true``.
ret['result'] = None
return ret
# Finally, make the actual change and return the result.
if not current_state['exists']:
__salt__['cmd.run']('brctl addbr {0}'.format(name))
new_state = get_current_state()
if new_state['details']['mtu'] != mtu:
__salt__['cmd.run'](
'ip link set dev {0} mtu {1}'.format(name, str(mtu)))
new_state = get_current_state()
if desired_network not in new_state['details']['networks']:
__salt__['cmd.run'](
'ip addr add {0} dev {1}'.format(desired_network, name))
new_state = get_current_state()
if not new_state['details']['up']:
__salt__['cmd.run'](
'ip link set dev {0} up'.format(name))
new_state = get_current_state()
if not new_state['iptables_rule_exists']:
__salt__['iptables.append'](**iptables_rule)
new_state = get_current_state()
ret['comment'] = 'The state of "{0}" was changed!'.format(name)
ret['changes'] = {
'old': current_state,
'new': new_state,
}
ret['result'] = True
return ret
| apache-2.0 | 6,241,363,958,323,254,000 | 33.312883 | 79 | 0.55784 | false |
morreene/tradenews | venv/Lib/site-packages/flask/testsuite/reqctx.py | 557 | 5960 | # -*- coding: utf-8 -*-
"""
flask.testsuite.reqctx
~~~~~~~~~~~~~~~~~~~~~~
Tests the request context.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
try:
from greenlet import greenlet
except ImportError:
greenlet = None
from flask.testsuite import FlaskTestCase
class RequestContextTestCase(FlaskTestCase):
def test_teardown_on_pop(self):
buffer = []
app = flask.Flask(__name__)
@app.teardown_request
def end_of_request(exception):
buffer.append(exception)
ctx = app.test_request_context()
ctx.push()
self.assert_equal(buffer, [])
ctx.pop()
self.assert_equal(buffer, [None])
def test_proper_test_request_context(self):
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return None
@app.route('/', subdomain='foo')
def sub():
return None
with app.test_request_context('/'):
self.assert_equal(flask.url_for('index', _external=True), 'http://localhost.localdomain:5000/')
with app.test_request_context('/'):
self.assert_equal(flask.url_for('sub', _external=True), 'http://foo.localhost.localdomain:5000/')
try:
with app.test_request_context('/', environ_overrides={'HTTP_HOST': 'localhost'}):
pass
except Exception as e:
self.assert_true(isinstance(e, ValueError))
self.assert_equal(str(e), "the server name provided " +
"('localhost.localdomain:5000') does not match the " + \
"server name from the WSGI environment ('localhost')")
try:
app.config.update(SERVER_NAME='localhost')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost'}):
pass
except ValueError as e:
raise ValueError(
"No ValueError exception should have been raised \"%s\"" % e
)
try:
app.config.update(SERVER_NAME='localhost:80')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost:80'}):
pass
except ValueError as e:
raise ValueError(
"No ValueError exception should have been raised \"%s\"" % e
)
def test_context_binding(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
@app.route('/meh')
def meh():
return flask.request.url
with app.test_request_context('/?name=World'):
self.assert_equal(index(), 'Hello World!')
with app.test_request_context('/meh'):
self.assert_equal(meh(), 'http://localhost/meh')
self.assert_true(flask._request_ctx_stack.top is None)
def test_context_test(self):
app = flask.Flask(__name__)
self.assert_false(flask.request)
self.assert_false(flask.has_request_context())
ctx = app.test_request_context()
ctx.push()
try:
self.assert_true(flask.request)
self.assert_true(flask.has_request_context())
finally:
ctx.pop()
def test_manual_context_binding(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
ctx = app.test_request_context('/?name=World')
ctx.push()
self.assert_equal(index(), 'Hello World!')
ctx.pop()
try:
index()
except RuntimeError:
pass
else:
self.assert_true(0, 'expected runtime error')
def test_greenlet_context_copying(self):
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
def g():
self.assert_false(flask.request)
self.assert_false(flask.current_app)
with reqctx:
self.assert_true(flask.request)
self.assert_equal(flask.current_app, app)
self.assert_equal(flask.request.path, '/')
self.assert_equal(flask.request.args['foo'], 'bar')
self.assert_false(flask.request)
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
self.assert_equal(rv.data, b'Hello World!')
result = greenlets[0].run()
self.assert_equal(result, 42)
def test_greenlet_context_copying_api(self):
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
@flask.copy_current_request_context
def g():
self.assert_true(flask.request)
self.assert_equal(flask.current_app, app)
self.assert_equal(flask.request.path, '/')
self.assert_equal(flask.request.args['foo'], 'bar')
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
self.assert_equal(rv.data, b'Hello World!')
result = greenlets[0].run()
self.assert_equal(result, 42)
# Disable test if we don't have greenlets available
if greenlet is None:
test_greenlet_context_copying = None
test_greenlet_context_copying_api = None
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RequestContextTestCase))
return suite
| bsd-3-clause | -2,546,403,071,072,841,700 | 31.216216 | 109 | 0.550503 | false |
lyoshenka/PyPagekite | pagekite/ui/basic.py | 2 | 9580 | """
This is the "basic" text-mode user interface class.
"""
#############################################################################
LICENSE = """\
This file is part of pagekite.py.
Copyright 2010-2013, the Beanstalks Project ehf. and Bjarni Runar Einarsson
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License, or (at your option) any
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see: <http://www.gnu.org/licenses/>
"""
#############################################################################
import re
import sys
import time
from nullui import NullUi
from pagekite.common import *
HTML_BR_RE = re.compile(r'<(br|/p|/li|/tr|/h\d)>\s*')
HTML_LI_RE = re.compile(r'<li>\s*')
HTML_NBSP_RE = re.compile(r' ')
HTML_TAGS_RE = re.compile(r'<[^>\s][^>]*>')
def clean_html(text):
return HTML_LI_RE.sub(' * ',
HTML_NBSP_RE.sub('_',
HTML_BR_RE.sub('\n', text)))
def Q(text):
return HTML_TAGS_RE.sub('', clean_html(text))
class BasicUi(NullUi):
"""Stdio based user interface."""
DAEMON_FRIENDLY = False
WANTS_STDERR = True
EMAIL_RE = re.compile(r'^[a-z0-9!#$%&\'\*\+\/=?^_`{|}~-]+'
'(?:\.[a-z0-9!#$%&\'*+/=?^_`{|}~-]+)*@'
'(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)*'
'(?:[a-zA-Z]{2,4}|museum)$')
def Notify(self, message, prefix=' ',
popup=False, color=None, now=None, alignright=''):
now = int(now or time.time())
color = color or self.NORM
# We suppress duplicates that are either new or still on the screen.
keys = self.notify_history.keys()
if len(keys) > 20:
for key in keys:
if self.notify_history[key] < now-300:
del self.notify_history[key]
message = '%s' % message
if message not in self.notify_history:
# Display the time now and then.
if (not alignright and
(now >= (self.last_tick + 60)) and
(len(message) < 68)):
try:
self.last_tick = now
d = datetime.datetime.fromtimestamp(now)
alignright = '[%2.2d:%2.2d]' % (d.hour, d.minute)
except:
pass # Fails on Python 2.2
if not now or now > 0:
self.notify_history[message] = now
msg = '\r%s %s%s%s%s%s\n' % ((prefix * 3)[0:3], color, message, self.NORM,
' ' * (75-len(message)-len(alignright)),
alignright)
self.wfile.write(msg)
self.Status(self.status_tag, self.status_msg)
def NotifyMOTD(self, frontend, motd_message):
lc = 1
self.Notify(' ')
for line in Q(motd_message).splitlines():
self.Notify((line.strip() or ' ' * (lc+2)),
prefix=' ++', color=self.WHITE)
lc += 1
self.Notify(' ' * (lc+2), alignright='[MOTD from %s]' % frontend)
self.Notify(' ')
def Status(self, tag, message=None, color=None):
self.status_tag = tag
self.status_col = color or self.status_col or self.NORM
self.status_msg = '%s' % (message or self.status_msg)
if not self.in_wizard:
message = self.status_msg
msg = ('\r << pagekite.py [%s]%s %s%s%s\r%s'
) % (tag, ' ' * (8-len(tag)),
self.status_col, message[:52],
' ' * (52-len(message)), self.NORM)
self.wfile.write(msg)
if tag == 'exiting':
self.wfile.write('\n')
def Welcome(self, pre=None):
if self.in_wizard:
self.wfile.write('%s%s%s' % (self.CLEAR, self.WHITE, self.in_wizard))
if self.welcome:
self.wfile.write('%s\r%s\n' % (self.NORM, Q(self.welcome)))
self.welcome = None
if self.in_wizard and self.wizard_tell:
self.wfile.write('\n%s\r' % self.NORM)
for line in self.wizard_tell: self.wfile.write('*** %s\n' % Q(line))
self.wizard_tell = None
if pre:
self.wfile.write('\n%s\r' % self.NORM)
for line in pre: self.wfile.write(' %s\n' % Q(line))
self.wfile.write('\n%s\r' % self.NORM)
def StartWizard(self, title):
self.Welcome()
banner = '>>> %s' % title
banner = ('%s%s[CTRL+C = Cancel]\n') % (banner, ' ' * (62-len(banner)))
self.in_wizard = banner
self.tries = 200
def Retry(self):
self.tries -= 1
return self.tries
def EndWizard(self, quietly=False):
if self.wizard_tell:
self.Welcome()
self.in_wizard = None
if sys.platform in ('win32', 'os2', 'os2emx') and not quietly:
self.wfile.write('\n<<< press ENTER to continue >>>\n')
self.rfile.readline()
def Spacer(self):
self.wfile.write('\n')
def Readline(self):
line = self.rfile.readline()
if line:
return line.strip()
else:
raise IOError('EOF')
def AskEmail(self, question, default=None, pre=[],
wizard_hint=False, image=None, back=None, welcome=True):
if welcome: self.Welcome(pre)
while self.Retry():
self.wfile.write(' => %s ' % (Q(question), ))
answer = self.Readline()
if default and answer == '': return default
if self.EMAIL_RE.match(answer.lower()): return answer
if back is not None and answer == 'back': return back
raise Exception('Too many tries')
def AskLogin(self, question, default=None, email=None, pre=None,
wizard_hint=False, image=None, back=None):
self.Welcome(pre)
def_email, def_pass = default or (email, None)
self.wfile.write(' %s\n' % (Q(question), ))
if not email:
email = self.AskEmail('Your e-mail:',
default=def_email, back=back, welcome=False)
if email == back: return back
import getpass
self.wfile.write(' => ')
return (email, getpass.getpass() or def_pass)
def AskYesNo(self, question, default=None, pre=[], yes='yes', no='no',
wizard_hint=False, image=None, back=None):
self.Welcome(pre)
yn = ((default is True) and '[Y/n]'
) or ((default is False) and '[y/N]'
) or ('[y/n]')
while self.Retry():
self.wfile.write(' => %s %s ' % (Q(question), yn))
answer = self.Readline().lower()
if default is not None and answer == '': answer = default and 'y' or 'n'
if back is not None and answer.startswith('b'): return back
if answer in ('y', 'n'): return (answer == 'y')
raise Exception('Too many tries')
def AskQuestion(self, question, pre=[], default=None, prompt=' =>',
wizard_hint=False, image=None, back=None):
self.Welcome(pre)
self.wfile.write('%s %s ' % (prompt, Q(question)))
return self.Readline()
def AskKiteName(self, domains, question, pre=[], default=None,
wizard_hint=False, image=None, back=None):
self.Welcome(pre)
if len(domains) == 1:
self.wfile.write(('\n (Note: the ending %s will be added for you.)'
) % domains[0])
else:
self.wfile.write('\n Please use one of the following domains:\n')
for domain in domains:
self.wfile.write('\n *%s' % domain)
self.wfile.write('\n')
while self.Retry():
self.wfile.write('\n => %s ' % Q(question))
answer = self.Readline().lower()
if back is not None and answer == 'back':
return back
elif len(domains) == 1:
answer = answer.replace(domains[0], '')
if answer and SERVICE_SUBDOMAIN_RE.match(answer):
return answer+domains[0]
else:
for domain in domains:
if answer.endswith(domain):
answer = answer.replace(domain, '')
if answer and SERVICE_SUBDOMAIN_RE.match(answer):
return answer+domain
self.wfile.write(' (Please only use characters A-Z, 0-9, - and _.)')
raise Exception('Too many tries')
def AskMultipleChoice(self, choices, question, pre=[], default=None,
wizard_hint=False, image=None, back=None):
self.Welcome(pre)
for i in range(0, len(choices)):
self.wfile.write((' %s %d) %s\n'
) % ((default==i+1) and '*' or ' ', i+1, choices[i]))
self.wfile.write('\n')
while self.Retry():
d = default and (', default=%d' % default) or ''
self.wfile.write(' => %s [1-%d%s] ' % (Q(question), len(choices), d))
try:
answer = self.Readline().strip()
if back is not None and answer.startswith('b'): return back
choice = int(answer or default)
if choice > 0 and choice <= len(choices): return choice
except (ValueError, IndexError):
pass
raise Exception('Too many tries')
def Tell(self, lines, error=False, back=None):
if self.in_wizard:
self.wizard_tell = lines
else:
self.Welcome()
for line in lines: self.wfile.write(' %s\n' % line)
if error: self.wfile.write('\n')
return True
def Working(self, message):
if self.in_wizard:
pending_messages = self.wizard_tell or []
self.wizard_tell = pending_messages + [message+' ...']
self.Welcome()
self.wizard_tell = pending_messages + [message+' ... done.']
else:
self.Tell([message])
return True
| agpl-3.0 | -3,555,205,676,955,723,300 | 34.746269 | 80 | 0.568894 | false |
jhawkesworth/ansible | lib/ansible/modules/network/dellos10/dellos10_command.py | 39 | 7311 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Sprygada <[email protected]>
# Copyright: (c) 2017, Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: dellos10_command
version_added: "2.2"
author: "Senthil Kumar Ganesan (@skg-net)"
short_description: Run commands on remote devices running Dell OS10
description:
- Sends arbitrary commands to a Dell EMC OS10 node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(dellos10_config) to configure Dell EMC OS10 devices.
extends_documentation_fragment: dellos10
options:
commands:
description:
- List of commands to send to the remote dellos10 device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
type: list
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of I(retries), the task fails.
See examples.
type: list
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
type: str
default: all
choices: [ all, any ]
version_added: "2.5"
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
type: int
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
type: int
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
dellos10_command:
commands: show version
- name: run show version and check to see if output contains OS10
dellos10_command:
commands: show version
wait_for: result[0] contains OS10
- name: run multiple commands on remote nodes
dellos10_command:
commands:
- show version
- show interface
- name: run multiple commands and evaluate the output
dellos10_command:
commands:
- show version
- show interface
wait_for:
- result[0] contains OS10
- result[1] contains Ethernet
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.dellos10.dellos10 import run_commands
from ansible.module_utils.network.dellos10.dellos10 import dellos10_argument_spec, check_args
from ansible.module_utils.network.common.utils import ComplexList
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
answer=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='dellos10_command does not support running config mode '
'commands. Please use dellos10_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list'),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(dellos10_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,767,602,693,540,514,000 | 30.649351 | 93 | 0.646834 | false |
2ndQuadrant/ansible | lib/ansible/modules/cloud/vmware/vmware_host_package_facts.py | 47 | 4254 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_package_facts
short_description: Gathers facts about available packages on an ESXi host
description:
- This module can be used to gather facts about available packages and their status on an ESXi host.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- Package facts about each ESXi server will be returned for given cluster.
- If C(esxi_hostname) is not given, this parameter is required.
esxi_hostname:
description:
- ESXi hostname.
- Package facts about this ESXi server will be returned.
- If C(cluster_name) is not given, this parameter is required.
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather facts about all ESXi Host in given Cluster
vmware_host_package_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
delegate_to: localhost
register: cluster_host_packages
- name: Gather facts about ESXi Host
vmware_host_package_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
register: host_packages
'''
RETURN = r'''
hosts_package_facts:
description:
- dict with hostname as key and dict with package facts as value
returned: hosts_package_facts
type: dict
sample: { "hosts_package_facts": { "localhost.localdomain": []}}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
class VmwarePackageManager(PyVmomi):
def __init__(self, module):
super(VmwarePackageManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
def gather_package_facts(self):
hosts_facts = {}
for host in self.hosts:
host_package_facts = []
host_pkg_mgr = host.configManager.imageConfigManager
if host_pkg_mgr:
pkgs = host_pkg_mgr.FetchSoftwarePackages()
for pkg in pkgs:
host_package_facts.append(dict(name=pkg.name,
version=pkg.version,
vendor=pkg.vendor,
summary=pkg.summary,
description=pkg.description,
acceptance_level=pkg.acceptanceLevel,
maintenance_mode_required=pkg.maintenanceModeRequired,
creation_date=pkg.creationDate,
)
)
hosts_facts[host.name] = host_package_facts
return hosts_facts
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
]
)
vmware_host_package_config = VmwarePackageManager(module)
module.exit_json(changed=False, hosts_package_facts=vmware_host_package_config.gather_package_facts())
if __name__ == "__main__":
main()
| gpl-3.0 | -1,821,010,012,927,690,200 | 33.032 | 106 | 0.605783 | false |
anilmuthineni/tensorflow | tensorflow/python/saved_model/main_op.py | 5 | 2262 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SavedModel main op.
Builds a main op that defines the sequence of ops to be run as part of the
SavedModel load/restore operations.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops as tf_data_flow_ops
from tensorflow.python.ops import variables
def main_op():
"""Returns a main op to init variables and tables.
Returns the main op including the group of ops that initializes all
variables, initializes local variables and initialize all tables.
Returns:
The set of ops to be run as part of the main op upon the load operation.
"""
init = variables.global_variables_initializer()
init_local = variables.local_variables_initializer()
init_tables = tf_data_flow_ops.tables_initializer()
return control_flow_ops.group(init, init_local, init_tables)
def main_op_with_restore(restore_op_name):
"""Returns a main op to init variables, tables and restore the graph.
Returns the main op including the group of ops that initializes all
variables, initialize local variables, initialize all tables and the restore
op name.
Args:
restore_op_name: Name of the op to use to restore the graph.
Returns:
The set of ops to be run as part of the main op upon the load operation.
"""
with ops.control_dependencies([main_op()]):
main_op_with_restore = control_flow_ops.group(restore_op_name)
return main_op_with_restore
| apache-2.0 | -411,151,667,317,544,640 | 36.081967 | 80 | 0.728559 | false |
npuichigo/ttsflow | third_party/tensorflow/tensorflow/python/kernel_tests/variables_test.py | 18 | 23601 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import operator
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
from tensorflow.python.util import compat
class VariablesTestCase(test.TestCase):
def testInitialization(self):
with self.test_session():
var0 = variables.Variable(0.0)
self.assertEqual("Variable:0", var0.name)
self.assertEqual([], var0.get_shape())
self.assertEqual([], var0.get_shape())
self.assertEqual([], var0.shape)
var1 = variables.Variable(1.1)
self.assertEqual("Variable_1:0", var1.name)
self.assertEqual([], var1.get_shape())
self.assertEqual([], var1.get_shape())
self.assertEqual([], var1.shape)
with self.assertRaisesOpError("Attempting to use uninitialized value"):
var0.eval()
with self.assertRaisesOpError("Attempting to use uninitialized value"):
var1.eval()
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var0.eval())
self.assertAllClose(1.1, var1.eval())
def testInitializationOrder(self):
with self.test_session():
rnd = variables.Variable(random_ops.random_uniform([3, 6]), name="rnd")
self.assertEqual("rnd:0", rnd.name)
self.assertEqual([3, 6], rnd.get_shape())
self.assertEqual([3, 6], rnd.get_shape())
self.assertEqual([3, 6], rnd.shape)
dep = variables.Variable(rnd.initialized_value(), name="dep")
self.assertEqual("dep:0", dep.name)
self.assertEqual([3, 6], dep.get_shape())
self.assertEqual([3, 6], dep.get_shape())
self.assertEqual([3, 6], dep.shape)
# Currently have to set the shape manually for Add.
added_val = rnd.initialized_value() + dep.initialized_value() + 2.0
added_val.set_shape(rnd.get_shape())
depdep = variables.Variable(added_val, name="depdep")
self.assertEqual("depdep:0", depdep.name)
self.assertEqual([3, 6], depdep.get_shape())
self.assertEqual([3, 6], depdep.get_shape())
self.assertEqual([3, 6], depdep.shape)
variables.global_variables_initializer().run()
self.assertAllClose(rnd.eval(), dep.eval())
self.assertAllClose(rnd.eval() + dep.eval() + 2.0, depdep.eval())
def testIterable(self):
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in variables.Variable(0.0):
pass
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in variables.Variable([0.0, 1.0]):
pass
def testAssignments(self):
with self.test_session():
var = variables.Variable(0.0)
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var.eval())
self.assertAllClose(1.0, plus_one.eval())
self.assertAllClose(1.0, var.eval())
self.assertAllClose(-1.0, minus_one.eval())
self.assertAllClose(-1.0, var.eval())
self.assertAllClose(4.0, four.eval())
self.assertAllClose(4.0, var.eval())
def testResourceAssignments(self):
with self.test_session(use_gpu=True):
var = resource_variable_ops.ResourceVariable(0.0)
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var.eval())
plus_one.eval()
self.assertAllClose(1.0, var.eval())
minus_one.eval()
self.assertAllClose(-1.0, var.eval())
four.eval()
self.assertAllClose(4.0, var.eval())
def testZeroSizeStringAssign(self):
with self.test_session() as sess:
array = variables.Variable(
initial_value=array_ops.zeros((0,), dtype=dtypes.string),
name="foo",
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])
sess.run(variables.local_variables_initializer())
old_value = array.value()
copy_op = array.assign(old_value)
self.assertEqual([], list(sess.run(copy_op)))
def _countUpToTest(self, dtype):
with self.test_session():
zero = constant_op.constant(0, dtype=dtype)
var = variables.Variable(zero)
count_up_to = var.count_up_to(3)
variables.global_variables_initializer().run()
self.assertEqual(0, var.eval())
self.assertEqual(0, count_up_to.eval())
self.assertEqual(1, var.eval())
self.assertEqual(1, count_up_to.eval())
self.assertEqual(2, var.eval())
self.assertEqual(2, count_up_to.eval())
self.assertEqual(3, var.eval())
with self.assertRaisesOpError("Reached limit of 3"):
count_up_to.eval()
self.assertEqual(3, var.eval())
with self.assertRaisesOpError("Reached limit of 3"):
count_up_to.eval()
self.assertEqual(3, var.eval())
def testCountUpToInt32(self):
self._countUpToTest(dtypes.int32)
def testCountUpToInt64(self):
self._countUpToTest(dtypes.int64)
def testControlDepsNone(self):
with self.test_session():
c = constant_op.constant(1.0)
with ops.control_dependencies([c]):
# d get the control dep.
d = constant_op.constant(2.0)
# variables do not.
var_x = variables.Variable(2.0)
self.assertEqual([c.op], d.op.control_inputs)
self.assertEqual([], var_x.initializer.control_inputs)
self.assertEqual([], var_x.value().op.control_inputs)
self.assertEqual([], var_x._ref().op.control_inputs) # pylint: disable=protected-access
def testControlFlow(self):
with self.test_session() as sess:
v0 = variables.Variable(0, name="v0")
var_dict = {}
# Call get_variable in each of the cond clauses.
def var_in_then_clause():
v1 = variables.Variable(1, name="v1")
var_dict["v1"] = v1
return v1 + v0
def var_in_else_clause():
v2 = variables.Variable(2, name="v2")
var_dict["v2"] = v2
return v2 + v0
add = control_flow_ops.cond(
math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause)
v1 = var_dict["v1"]
v2 = var_dict["v2"]
# We should be able to initialize and run v1 and v2 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual([1], sess.run(v1))
sess.run(v2.initializer)
self.assertEqual([2], sess.run(v2))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"):
sess.run(v0)
# We should not be able to run 'add' yet.
with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
def testControlFlowInitialization(self):
"""Expects an error if an initializer is in a control-flow scope."""
def cond(i, _):
return i < 10
def body(i, _):
zero = array_ops.zeros([], dtype=dtypes.int32)
v = variables.Variable(initial_value=zero)
return (i + 1, v.read_value())
with self.assertRaisesRegexp(ValueError, "inside a control-flow"):
control_flow_ops.while_loop(cond, body, [0, 0])
def testUseVariableAsTensor(self):
with self.test_session():
var_x = variables.Variable(2.0)
var_y = variables.Variable(3.0)
variables.global_variables_initializer().run()
self.assertAllClose(2.0, var_x.eval())
self.assertAllClose(3.0, var_y.eval())
self.assertAllClose(5.0, math_ops.add(var_x, var_y).eval())
def testZeroSizeVarSameAsConst(self):
with self.test_session():
zero_size_var = variables.Variable(array_ops.zeros([0, 2]))
zero_size_const = array_ops.ones([2, 0])
variable_mul = math_ops.matmul(zero_size_const, zero_size_var)
const_mul = math_ops.matmul(
zero_size_const, zero_size_const, transpose_b=True)
variables.global_variables_initializer().run()
variable_output = variable_mul.eval()
self.assertAllClose(const_mul.eval(), variable_output)
self.assertAllClose([[0., 0.], [0., 0.]], variable_output)
def testCachingDevice(self):
with self.test_session():
var = variables.Variable(2.0)
self.assertEqual(var.device, var.value().device)
self.assertEqual(var.device, var.initialized_value().device)
var_cached = variables.Variable(2.0, caching_device="/job:foo")
self.assertFalse(var_cached.device.startswith("/job:foo"))
self.assertTrue(var_cached.value().device.startswith("/job:foo"))
def testCollections(self):
with self.test_session():
var_x = variables.Variable(2.0)
var_y = variables.Variable(2.0, trainable=False)
var_z = variables.Variable(2.0, trainable=True)
var_t = variables.Variable(
2.0,
trainable=True,
collections=[
ops.GraphKeys.TRAINABLE_VARIABLES, ops.GraphKeys.GLOBAL_VARIABLES
])
self.assertEqual([var_x, var_y, var_z, var_t],
variables.global_variables())
self.assertEqual([var_x, var_z, var_t], variables.trainable_variables())
def testOperators(self):
with self.test_session():
var_f = variables.Variable([2.0])
add = var_f + 0.0
radd = 1.0 + var_f
sub = var_f - 1.0
rsub = 1.0 - var_f
mul = var_f * 10.0
rmul = 10.0 * var_f
div = var_f / 10.0
rdiv = 10.0 / var_f
lt = var_f < 3.0
rlt = 3.0 < var_f
le = var_f <= 2.0
rle = 2.0 <= var_f
gt = var_f > 3.0
rgt = 3.0 > var_f
ge = var_f >= 2.0
rge = 2.0 >= var_f
neg = -var_f
abs_v = abs(var_f)
var_i = variables.Variable([20])
mod = var_i % 7
rmod = 103 % var_i
var_b = variables.Variable([True, False])
and_v = operator.and_(var_b, [True, True])
or_v = operator.or_(var_b, [False, True])
xor_v = operator.xor(var_b, [False, False])
invert_v = ~var_b
rnd = np.random.rand(4, 4).astype("f")
var_t = variables.Variable(rnd)
slice_v = var_t[2, 0:0]
var_m = variables.Variable([[2.0, 3.0]])
matmul = var_m.__matmul__([[10.0], [20.0]])
rmatmul = var_m.__rmatmul__([[10.0], [20.0]])
variables.global_variables_initializer().run()
self.assertAllClose([2.0], add.eval())
self.assertAllClose([3.0], radd.eval())
self.assertAllClose([1.0], sub.eval())
self.assertAllClose([-1.0], rsub.eval())
self.assertAllClose([20.0], mul.eval())
self.assertAllClose([20.0], rmul.eval())
self.assertAllClose([0.2], div.eval())
self.assertAllClose([5.0], rdiv.eval())
self.assertAllClose([-2.0], neg.eval())
self.assertAllClose([2.0], abs_v.eval())
self.assertAllClose([True], lt.eval())
self.assertAllClose([False], rlt.eval())
self.assertAllClose([True], le.eval())
self.assertAllClose([True], rle.eval())
self.assertAllClose([False], gt.eval())
self.assertAllClose([True], rgt.eval())
self.assertAllClose([True], ge.eval())
self.assertAllClose([True], rge.eval())
self.assertAllClose([6], mod.eval())
self.assertAllClose([3], rmod.eval())
self.assertAllClose([True, False], and_v.eval())
self.assertAllClose([True, True], or_v.eval())
self.assertAllClose([True, False], xor_v.eval())
self.assertAllClose([False, True], invert_v.eval())
self.assertAllClose(rnd[2, 0:0], slice_v.eval())
self.assertAllClose([[80.0]], matmul.eval())
self.assertAllClose([[20.0, 30.0], [40.0, 60.0]], rmatmul.eval())
def testSession(self):
with self.test_session() as sess:
var = variables.Variable([1, 12])
variables.global_variables_initializer().run()
self.assertAllClose([1, 12], sess.run(var))
def testDevicePlacement(self):
with self.test_session() as sess:
with ops.device("/cpu:0"):
var = variables.Variable([1, 12])
init_value = var.initialized_value()
init_op = variables.global_variables_initializer()
self.assertEqual(var.op.device, init_value.device)
self.assertEqual(var.op.device, init_op.device)
sess.run(init_op)
def testColocation(self):
with ops.device("/job:ps"):
var = variables.Variable(0, name="v")
with ops.device("/job:worker/task:7"):
assign_op = var.assign(1)
self.assertDeviceEqual("/job:ps", assign_op.device)
self.assertEqual([b"loc:@v"], assign_op.op.colocation_groups())
def testInitializerFunction(self):
value = [[-42], [133.7]]
shape = [2, 1]
with self.test_session():
initializer = lambda: constant_op.constant(value)
v1 = variables.Variable(initializer, dtype=dtypes.float32)
self.assertEqual(shape, v1.get_shape())
self.assertEqual(shape, v1.shape)
self.assertAllClose(value, v1.initial_value.eval())
with self.assertRaises(errors_impl.FailedPreconditionError):
v1.eval()
v2 = variables.Variable(
math_ops.negative(v1.initialized_value()), dtype=dtypes.float32)
self.assertEqual(v1.get_shape(), v2.get_shape())
self.assertEqual(v1.shape, v2.shape)
self.assertAllClose(np.negative(value), v2.initial_value.eval())
with self.assertRaises(errors_impl.FailedPreconditionError):
v2.eval()
variables.global_variables_initializer().run()
self.assertAllClose(np.negative(value), v2.eval())
def testNoRefDataRace(self):
with self.test_session():
a = variables.Variable([1, 2, 3], dtype=dtypes.float32)
b = variables.Variable(a.initialized_value() + 2)
c = variables.Variable(b.initialized_value() + 2)
variables.global_variables_initializer().run()
self.assertAllEqual(a.eval(), [1, 2, 3])
self.assertAllEqual(b.eval(), [3, 4, 5])
self.assertAllEqual(c.eval(), [5, 6, 7])
def testInitializerFunctionDevicePlacement(self):
with self.test_session():
initializer = lambda: constant_op.constant(42.0)
with ops.device("/cpu:100"):
v1 = variables.Variable(initializer, dtype=dtypes.float32, name="v1")
expected_device = "/device:CPU:100"
expected_group_v1 = [b"loc:@v1"]
self.assertEqual(expected_device, v1.op.device)
self.assertEqual(expected_group_v1, v1.op.colocation_groups())
for i in v1.initializer.inputs:
self.assertEqual(expected_group_v1, i.op.colocation_groups())
v2 = variables.Variable(initializer, dtype=dtypes.float32, name="v2")
expected_group_v2 = [b"loc:@v2"]
self.assertEqual(expected_group_v2, v2.op.colocation_groups())
for i in v2.initializer.inputs:
self.assertEqual(expected_group_v2, i.op.colocation_groups())
def testLoad(self):
with self.test_session():
var = variables.Variable(np.zeros((5, 5), np.float32))
variables.global_variables_initializer().run()
var.load(np.ones((5, 5), np.float32))
self.assertAllClose(np.ones((5, 5), np.float32), var.eval())
def testRepr(self):
var = variables.Variable(np.zeros((5, 5), np.float32), name='noop')
self.assertEqual(
"<tf.Variable 'noop:0' shape=(5, 5) dtype=float32_ref>",
repr(var))
class IsInitializedTest(test.TestCase):
def testNoVars(self):
with ops.Graph().as_default(), self.test_session() as sess:
uninited = variables.report_uninitialized_variables()
self.assertEqual(0, sess.run(uninited).size)
def testAssertVariablesInitialized(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2], name="v")
w = variables.Variable([3, 4], name="w")
_ = v, w
uninited = variables.report_uninitialized_variables()
self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited))
variables.global_variables_initializer().run()
self.assertEqual(0, sess.run(uninited).size)
def testVariableList(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2], name="v")
w = variables.Variable([3, 4], name="w")
uninited = variables.report_uninitialized_variables()
self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited))
sess.run(w.initializer)
self.assertAllEqual(np.array([b"v"]), sess.run(uninited))
v.initializer.run()
self.assertEqual(0, sess.run(uninited).size)
def testZeroSizeVarInitialized(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable(array_ops.zeros([0, 2]), name="v")
uninited = variables.report_uninitialized_variables()
v.initializer.run() # not strictly necessary
self.assertEqual(0, sess.run(uninited).size)
def testTrainingWithZeroSizeVar(self):
with ops.Graph().as_default(), self.test_session() as sess:
a = variables.Variable(array_ops.zeros([0, 2]))
b = variables.Variable(array_ops.ones([2, 2]))
objective = math_ops.reduce_sum(b + math_ops.matmul(
a, a, transpose_a=True))
variables.global_variables_initializer().run()
do_opt = gradient_descent.GradientDescentOptimizer(0.1).minimize(
objective)
sess.run([do_opt])
self.assertAllClose([[0.9, 0.9], [0.9, 0.9]], b.eval())
class ObsoleteIsInitializedTest(test.TestCase):
def testNoVars(self):
with ops.Graph().as_default():
self.assertEqual(None, variables.assert_variables_initialized())
def testVariables(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2])
w = variables.Variable([3, 4])
_ = v, w
inited = variables.assert_variables_initialized()
with self.assertRaisesOpError("Attempting to use uninitialized value"):
sess.run(inited)
variables.global_variables_initializer().run()
sess.run(inited)
def testVariableList(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2])
w = variables.Variable([3, 4])
inited = variables.assert_variables_initialized([v])
with self.assertRaisesOpError("Attempting to use uninitialized value"):
inited.op.run()
sess.run(w.initializer)
with self.assertRaisesOpError("Attempting to use uninitialized value"):
inited.op.run()
v.initializer.run()
inited.op.run()
class PartitionedVariableTest(test.TestCase):
def testPartitionedVariable(self):
with ops.Graph().as_default():
v0 = variables.Variable([0])
v1 = variables.Variable([1])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
v1._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1]))
partitions = [2]
# Pass variable_list as [v1, v0] to ensure they are properly
# re-sorted to [v0, v1] based on their slice info offsets.
partitioned_variable = variables.PartitionedVariable(
name="two_vars",
shape=[2],
dtype=v0.dtype,
variable_list=[v1, v0],
partitions=partitions)
concatenated = ops.convert_to_tensor(partitioned_variable)
num_partitions = len(partitioned_variable)
iterated_partitions = list(partitioned_variable)
self.assertEqual(2, num_partitions)
self.assertEqual([v0, v1], iterated_partitions)
self.assertEqual([2], concatenated.get_shape())
self.assertEqual([2], concatenated.shape)
def testPartitionedVariableFailures(self):
with ops.Graph().as_default():
with self.assertRaisesRegexp(ValueError, "empty"):
variables.PartitionedVariable(
name="fail",
shape=2,
dtype=dtypes.int32,
variable_list=[],
partitions=[])
with self.assertRaisesRegexp(ValueError, "must have a save_slice_info"):
v0 = variables.Variable([0])
partitions = [1]
variables.PartitionedVariable(
name="two_vars",
shape=[1],
dtype=v0.dtype,
variable_list=[v0],
partitions=partitions)
with self.assertRaisesRegexp(ValueError, "full shapes must match"):
v0 = variables.Variable([0])
v1 = variables.Variable([1])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
v1._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1]))
partitions = [2]
variables.PartitionedVariable(
name="two_vars",
shape=[3],
dtype=v0.dtype,
variable_list=[v1, v0],
partitions=partitions)
with self.assertRaisesRegexp(ValueError, "must be positive"):
v0 = variables.Variable([0])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
partitions = [0]
variables.PartitionedVariable(
name="two_vars",
shape=[2],
dtype=v0.dtype,
variable_list=[v0],
partitions=partitions)
class VariableContainerTest(test.TestCase):
def testContainer(self):
with ops.Graph().as_default():
v0 = variables.Variable([0])
with ops.container("l1"):
v1 = variables.Variable([1])
with ops.container("l2"):
v2 = variables.Variable([2])
special_v = gen_state_ops._variable(
shape=[1],
dtype=dtypes.float32,
name="VariableInL3",
container="l3",
shared_name="")
v3 = variables.Variable([3])
v4 = variables.Variable([4])
self.assertEqual(compat.as_bytes(""), v0.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l1"), v1.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l2"), v2.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l3"), special_v.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l1"), v3.op.get_attr("container"))
self.assertEqual(compat.as_bytes(""), v4.op.get_attr("container"))
if __name__ == "__main__":
test.main()
| apache-2.0 | 3,985,162,514,220,124,700 | 35.819033 | 94 | 0.634761 | false |
sivatha/video-player-sample | source/core/js/libs/closure-library/closure/bin/build/depswriter.py | 17 | 6206 | #!/usr/bin/env python
#
# Copyright 2009 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates out a Closure deps.js file given a list of JavaScript sources.
Paths can be specified as arguments or (more commonly) specifying trees
with the flags (call with --help for descriptions).
Usage: depswriter.py [path/to/js1.js [path/to/js2.js] ...]
"""
import logging
import optparse
import os
import posixpath
import shlex
import sys
import source
import treescan
def MakeDepsFile(source_map):
"""Make a generated deps file.
Args:
source_map: A dict map of the source path to source.Source object.
Returns:
str, A generated deps file source.
"""
# Write in path alphabetical order
paths = source_map.keys()
paths.sort()
lines = []
for path in paths:
js_source = source_map[path]
# We don't need to add entries that don't provide anything.
if js_source.provides:
lines.append(_GetDepsLine(path, js_source))
return ''.join(lines)
def _GetDepsLine(path, js_source):
"""Get a deps.js file string for a source."""
provides = list(js_source.provides)
provides.sort()
requires = list(js_source.requires)
requires.sort()
return 'goog.addDependency(\'%s\', %s, %s);\n' % (path, provides, requires)
def _GetOptionsParser():
"""Get the options parser."""
parser = optparse.OptionParser(__doc__)
parser.add_option('--output_file',
dest='output_file',
action='store',
help=('If specified, write output to this path instead of '
'writing to standard output.'))
parser.add_option('--root',
dest='roots',
default=[],
action='append',
help='A root directory to scan for JS source files. '
'Paths of JS files in generated deps file will be '
'relative to this path. This flag may be specified '
'multiple times.')
parser.add_option('--root_with_prefix',
dest='roots_with_prefix',
default=[],
action='append',
help='A root directory to scan for JS source files, plus '
'a prefix (if either contains a space, surround with '
'quotes). Paths in generated deps file will be relative '
'to the root, but preceeded by the prefix. This flag '
'may be specified multiple times.')
parser.add_option('--path_with_depspath',
dest='paths_with_depspath',
default=[],
action='append',
help='A path to a source file and an alternate path to '
'the file in the generated deps file (if either contains '
'a space, surround with whitespace). This flag may be '
'specifified multiple times.')
return parser
def _NormalizePathSeparators(path):
"""Replaces OS-specific path separators with POSIX-style slashes.
Args:
path: str, A file path.
Returns:
str, The path with any OS-specific path separators (such as backslash on
Windows) replaced with URL-compatible forward slashes. A no-op on systems
that use POSIX paths.
"""
return path.replace(os.sep, posixpath.sep)
def _GetRelativePathToSourceDict(root, prefix=''):
"""Scans a top root directory for .js sources.
Args:
root: str, Root directory.
prefix: str, Prefix for returned paths.
Returns:
dict, A map of relative paths (with prefix, if given), to source.Source
objects.
"""
# Remember and restore the cwd when we're done. We work from the root so
# that paths are relative from the root.
start_wd = os.getcwd()
os.chdir(root)
path_to_source = {}
for path in treescan.ScanTreeForJsFiles('.'):
prefixed_path = _NormalizePathSeparators(os.path.join(prefix, path))
path_to_source[prefixed_path] = source.Source(source.GetFileContents(path))
os.chdir(start_wd)
return path_to_source
def _GetPair(s):
"""Return a string as a shell-parsed tuple. Two values expected."""
try:
# shlex uses '\' as an escape character, so they must be escaped.
s = s.replace('\\', '\\\\')
first, second = shlex.split(s)
return (first, second)
except:
raise Exception('Unable to parse input line as a pair: %s' % s)
def main():
"""CLI frontend to MakeDepsFile."""
logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
level=logging.INFO)
options, args = _GetOptionsParser().parse_args()
path_to_source = {}
# Roots without prefixes
for root in options.roots:
path_to_source.update(_GetRelativePathToSourceDict(root))
# Roots with prefixes
for root_and_prefix in options.roots_with_prefix:
root, prefix = _GetPair(root_and_prefix)
path_to_source.update(_GetRelativePathToSourceDict(root, prefix=prefix))
# Source paths
for path in args:
path_to_source[path] = source.Source(source.GetFileContents(path))
# Source paths with alternate deps paths
for path_with_depspath in options.paths_with_depspath:
srcpath, depspath = _GetPair(path_with_depspath)
path_to_source[depspath] = source.Source(source.GetFileContents(srcpath))
# Make our output pipe.
if options.output_file:
out = open(options.output_file, 'w')
else:
out = sys.stdout
out.write('// This file was autogenerated by %s.\n' % sys.argv[0])
out.write('// Please do not edit.\n')
out.write(MakeDepsFile(path_to_source))
if __name__ == '__main__':
main()
| apache-2.0 | 4,009,221,084,929,332,000 | 29.126214 | 79 | 0.640509 | false |
WimpyAnalytics/django-andablog | demo/common/migrations/0002_auto_20150507_1708.py | 1 | 1391 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django import VERSION as DJANGO_VERSION
def get_operations():
"""
This will break things if you upgrade Django to 1.8 having already applied this migration in 1.7.
Since this is for a demo site it doesn't really matter (simply blow away the DB if you want to go to 1.8)
Our demo site is a unusual in that we want to run it's tests (for integration testing) in multiple Django versions.
Typical sites don't have to worry about that sort of thing.
"""
compatible = (1, 8) <= DJANGO_VERSION < (1, 10)
if not compatible:
return []
return [
migrations.AlterField(
model_name='user',
name='groups',
field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', verbose_name='groups'),
),
migrations.AlterField(
model_name='user',
name='last_login',
field=models.DateTimeField(null=True, verbose_name='last login', blank=True),
),
]
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = get_operations() | bsd-2-clause | 3,429,777,446,212,096,000 | 33.8 | 256 | 0.647735 | false |
dfdx2/django | tests/httpwrappers/tests.py | 14 | 30531 | import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import DisallowedRedirect, SuspiciousOperation
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import SimpleTestCase
from django.utils.functional import lazystr
class QueryDictTests(SimpleTestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(''))
def test_missing_key(self):
q = QueryDict()
with self.assertRaises(KeyError):
q.__getitem__('foo')
def test_immutability(self):
q = QueryDict()
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
self.assertNotIn('foo', q)
self.assertEqual(list(q.items()), [])
self.assertEqual(list(q.lists()), [])
self.assertEqual(list(q.keys()), [])
self.assertEqual(list(q.values()), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict('foo=bar')
self.assertEqual(q['foo'], 'bar')
with self.assertRaises(KeyError):
q.__getitem__('bar')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('foo', q)
self.assertNotIn('bar', q)
self.assertEqual(list(q.items()), [('foo', 'bar')])
self.assertEqual(list(q.lists()), [('foo', ['bar'])])
self.assertEqual(list(q.keys()), ['foo'])
self.assertEqual(list(q.values()), ['bar'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
with self.assertRaises(KeyError):
q.__getitem__("foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
self.assertIn('foo', q)
self.assertCountEqual(q.items(), [('foo', 'another'), ('name', 'john')])
self.assertCountEqual(q.lists(), [('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertCountEqual(q.keys(), ['foo', 'name'])
self.assertCountEqual(q.values(), ['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict('vote=yes&vote=no')
self.assertEqual(q['vote'], 'no')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('vote', q)
self.assertNotIn('foo', q)
self.assertEqual(list(q.items()), [('vote', 'no')])
self.assertEqual(list(q.lists()), [('vote', ['yes', 'no'])])
self.assertEqual(list(q.keys()), ['vote'])
self.assertEqual(list(q.values()), ['no'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
with self.assertRaises(AttributeError):
q.__delitem__('vote')
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d&a=1')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict("a=1&a=2", mutable=True)
y = QueryDict("a=3&a=4")
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict('cur=%A4', encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
def test_querydict_fromkeys(self):
self.assertEqual(QueryDict.fromkeys(['key1', 'key2', 'key3']), QueryDict('key1&key2&key3'))
def test_fromkeys_with_nonempty_value(self):
self.assertEqual(
QueryDict.fromkeys(['key1', 'key2', 'key3'], value='val'),
QueryDict('key1=val&key2=val&key3=val')
)
def test_fromkeys_is_immutable_by_default(self):
# Match behavior of __init__() which is also immutable by default.
q = QueryDict.fromkeys(['key1', 'key2', 'key3'])
with self.assertRaisesMessage(AttributeError, 'This QueryDict instance is immutable'):
q['key4'] = 'nope'
def test_fromkeys_mutable_override(self):
q = QueryDict.fromkeys(['key1', 'key2', 'key3'], mutable=True)
q['key4'] = 'yep'
self.assertEqual(q, QueryDict('key1&key2&key3&key4=yep'))
def test_duplicates_in_fromkeys_iterable(self):
self.assertEqual(QueryDict.fromkeys('xyzzy'), QueryDict('x&y&z&z&y'))
def test_fromkeys_with_nondefault_encoding(self):
key_utf16 = b'\xff\xfe\x8e\x02\xdd\x01\x9e\x02'
value_utf16 = b'\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02'
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding='utf-16')
expected = QueryDict('', mutable=True)
expected['ʎǝʞ'] = 'ǝnlɐʌ'
self.assertEqual(q, expected)
def test_fromkeys_empty_iterable(self):
self.assertEqual(QueryDict.fromkeys([]), QueryDict(''))
def test_fromkeys_noniterable(self):
with self.assertRaises(TypeError):
QueryDict.fromkeys(0)
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# ASCII strings or bytes values are converted to strings.
r['key'] = 'test'
self.assertEqual(r['key'], 'test')
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], 'test')
self.assertIn(b'test', r.serialize_headers())
# Non-ASCII values are serialized to Latin-1.
r['key'] = 'café'
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], '=?utf-8?b?4oCg?=')
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts string or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
self.assertIsInstance(headers[0][0], str)
r = HttpResponse()
with self.assertRaises(UnicodeError):
r.__setitem__('føø', 'bar')
with self.assertRaises(UnicodeError):
r.__setitem__('føø'.encode(), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
with self.assertRaises(BadHeaderError):
r.__setitem__('test\rstr', 'test')
with self.assertRaises(BadHeaderError):
r.__setitem__('test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertIsNone(r.get('test'))
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode()
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertIs(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:[email protected]',
'file:///etc/passwd',
]
for url in bad_urls:
with self.assertRaises(SuspiciousOperation):
HttpResponseRedirect(url)
with self.assertRaises(SuspiciousOperation):
HttpResponsePermanentRedirect(url)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Standard HttpResponse init args can be used
response = HttpResponseRedirect(
'/redirected/',
content='The resource has temporarily moved',
content_type='text/html',
)
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_redirect_repr(self):
response = HttpResponseRedirect('/redirected/')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
self.assertEqual(repr(response), expected)
def test_invalid_redirect_repr(self):
"""
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
should work (in the debug view, for example).
"""
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
with self.assertRaisesMessage(DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"):
HttpResponseRedirect.__init__(response, 'ssh://foo')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="ssh://foo">'
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_modified_repr(self):
response = HttpResponseNotModified()
self.assertEqual(repr(response), '<HttpResponseNotModified status_code=304>')
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'], content='Only the GET method is allowed', content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
self.assertEqual(repr(response), expected)
def test_not_allowed_repr_no_content_type(self):
response = HttpResponseNotAllowed(('GET', 'POST'))
del response['Content-Type']
self.assertEqual(repr(response), '<HttpResponseNotAllowed [GET, POST] status_code=405>')
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(
TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'
):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
def test_json_response_passing_arguments_to_json_dumps(self):
response = JsonResponse({'foo': 'bar'}, json_dumps_params={'indent': 2})
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode()
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(bytes(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertTrue(file1.closed)
r.close()
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""Semicolons and commas are encoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""Semicolons and commas are decoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c['test'].value, c3['test'])
def test_decode_2(self):
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c['test'].value, c3['test'])
def test_nonstandard_keys(self):
"""
A single non-standard cookie name doesn't affect all cookies (#13007).
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
A repeated non-standard name doesn't affect all cookies (#15852).
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
def test_python_cookies(self):
"""
Test cases copied from Python's Lib/test/test_http_cookies.py
"""
self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
# Here parse_cookie() differs from Python's cookie parsing in that it
# treats all semicolons as delimiters, even within quotes.
self.assertEqual(
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
{'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
)
# Illegal cookies that have an '=' char in an unquoted value.
self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
# Cookies with ':' character in their name.
self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
# Cookies with '[' and ']'.
self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
def test_cookie_edgecases(self):
# Cookies that RFC6265 allows.
self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
# parse_cookie() has historically kept only the last cookie with the
# same name.
self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
def test_invalid_cookies(self):
"""
Cookie strings that go against RFC6265 but browsers will send if set
via document.cookie.
"""
# Chunks without an equals sign appear as unnamed values per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en').keys())
# Even a double quote may be an unamed value.
self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
# Spaces in names and values, and an equals sign in values.
self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
# More characters the spec forbids.
self.assertEqual(parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {'a b,c<>@:/[]?{}': 'd " =e,f g'})
# Unicode characters. The spec only allows ASCII.
self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': 'André Bessette'})
# Browsers don't send extra whitespace or semicolons in Cookie headers,
# but parse_cookie() should parse whitespace the same way
# document.cookie parses whitespace.
self.assertEqual(parse_cookie(' = b ; ; = ; c = ; '), {'': 'b', 'c': ''})
def test_httponly_after_load(self):
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
| bsd-3-clause | -8,008,414,834,805,939,000 | 38.356129 | 118 | 0.59939 | false |
Yuliang-Zou/Automatic_Group_Photography_Enhancement | lib/roi_pooling_layer/roi_pooling_op_grad.py | 1 | 1375 | import tensorflow as tf
from tensorflow.python.framework import ops
import roi_pooling_op
import pdb
@tf.RegisterShape("RoiPool")
def _roi_pool_shape(op):
"""Shape function for the RoiPool op.
"""
dims_data = op.inputs[0].get_shape().as_list()
channels = dims_data[3]
dims_rois = op.inputs[1].get_shape().as_list()
num_rois = dims_rois[0]
pooled_height = op.get_attr('pooled_height')
pooled_width = op.get_attr('pooled_width')
output_shape = tf.TensorShape([num_rois, pooled_height, pooled_width, channels])
return [output_shape, output_shape]
@ops.RegisterGradient("RoiPool")
def _roi_pool_grad(op, grad, _):
"""The gradients for `roi_pool`.
Args:
op: The `roi_pool` `Operation` that we are differentiating, which we can use
to find the inputs and outputs of the original op.
grad: Gradient with respect to the output of the `roi_pool` op.
Returns:
Gradients with respect to the input of `zero_out`.
"""
data = op.inputs[0]
rois = op.inputs[1]
argmax = op.outputs[1]
pooled_height = op.get_attr('pooled_height')
pooled_width = op.get_attr('pooled_width')
spatial_scale = op.get_attr('spatial_scale')
# compute gradient
data_grad = roi_pooling_op.roi_pool_grad(data, rois, argmax, grad, pooled_height, pooled_width, spatial_scale)
return [data_grad, None] # List of one Tensor, since we have one input
| mit | 8,385,428,540,939,118,000 | 30.976744 | 112 | 0.694545 | false |
msdubov/AST-text-analysis | east/main.py | 2 | 6145 | # -*- coding: utf-8 -*
import getopt
import os
import sys
from east import applications
from east import consts
from east import formatting
from east.synonyms import synonyms
from east import relevance
from east import utils
def main():
args = sys.argv[1:]
opts, args = getopt.getopt(args, "s:a:w:v:l:f:c:r:p:dy")
opts = dict(opts)
# Default values for non-boolean options
# Language of the text collection / keyphrases ("english" / "german" / "french" /...)
opts.setdefault("-l", consts.Language.ENGLISH)
# Relevance measures
# Similarity measure to use ("ast" / "cosine")
opts.setdefault("-s", consts.RelevanceMeasure.AST)
# Algorithm to use for computing ASTs ("easa" / "ast_linear" / "ast_naive")
opts.setdefault("-a", consts.ASTAlgorithm.EASA)
# Term weighting scheme used for computing the cosine similarity ("tf-idf" / "tf")
opts.setdefault("-w", consts.TermWeighting.TF_IDF)
# Elements of the vector space for the cosine similarity ("stems" / "lemmata" / "words")
opts.setdefault("-v", consts.VectorSpace.STEMS)
# Graph construction
opts.setdefault("-c", "0.6") # Referral confidence for graph construction
opts.setdefault("-r", "0.25") # Relevance threshold of the matching score
opts.setdefault("-p", "1") # Support threshold for graph nodes
# NOTE(mikhaildubov): Default value of '-f' (output format) depends on the subcommand.
if len(args) < 2:
print("Invalid syntax: EAST should be called as:\n\n"
" east [options] <command> <subcommand> args\n\n"
"Commands available: keyphrases.\n"
"Subcommands available: table/graph.")
return 1
command = args[0]
subcommand = args[1]
if command == "keyphrases":
if len(args) < 4:
print('Invalid syntax. For keyphrases analysis, EAST should be called as:\n\n'
' east [options] keyphrases <subcommand> "path/to/keyphrases.txt" '
'"path/to/texts/dir"')
return 1
# Keywords
keyphrases_file = os.path.abspath(args[2])
with open(keyphrases_file) as f:
# NOTE(mikhaildubov): utils.prepare_text() should not be called in clients like this
# one; it is already called in the applications module. Note that
# the double-calling of this method results in errors.
keyphrases = f.read().splitlines()
# Text collection (either a directory or a single file)
text_collection_path = os.path.abspath(args[3])
if os.path.isdir(text_collection_path):
text_files = [os.path.abspath(text_collection_path) + "/" + filename
for filename in os.listdir(text_collection_path)
if filename.endswith(".txt")]
else:
# TODO(mikhaildubov): Check that this single file ends with ".txt".
text_files = [os.path.abspath(text_collection_path)]
texts = {}
# NOTE(mikhaildubov): If we have only one text file, we should split the lines.
if len(text_files) == 1:
with open(text_files[0]) as f:
lines = f.read().splitlines()
for i in xrange(len(lines)):
texts[str(i)] = lines[i]
# NOTE(mikhaildubov): If there are multiple text files, read them one-by-one.
else:
for filename in text_files:
with open(filename) as f:
text_name = os.path.basename(filename).decode("utf-8")[:-4]
texts[text_name] = f.read()
language = opts["-l"]
# Similarity measure
similarity_measure = opts["-s"]
if similarity_measure == "ast":
ast_algorithm = opts["-a"]
normalized_scores = "-d" not in opts
similarity_measure = relevance.ASTRelevanceMeasure(ast_algorithm, normalized_scores)
elif similarity_measure == "cosine":
vector_space = opts["-v"]
term_weighting = opts["-w"]
similarity_measure = relevance.CosineRelevanceMeasure(vector_space, term_weighting)
# Synomimizer
use_synonyms = "-y" in opts
synonimizer = synonyms.SynonymExtractor(text_collection_path) if use_synonyms else None
if subcommand == "table":
keyphrases_table = applications.keyphrases_table(
keyphrases, texts, similarity_measure_factory,
synonimizer, language)
opts.setdefault("-f", "xml") # Table output format ("csv" is the other option)
table_format = opts["-f"].lower()
try:
res = formatting.format_table(keyphrases_table, table_format)
print res
except Exception as e:
print e
return 1
elif subcommand == "graph":
# Graph construction parameters: Referral confidence, relevance and support thresholds
referral_confidence = float(opts["-c"])
relevance_threshold = float(opts["-r"])
support_threshold = float(opts["-p"])
graph = applications.keyphrases_graph(keyphrases, texts, referral_confidence,
relevance_threshold, support_threshold,
similarity_measure, synonimizer, language)
opts.setdefault("-f", "edges") # Graph output format (also "gml" possible)
graph_format = opts["-f"].lower()
try:
res = formatting.format_graph(graph, graph_format)
print res
except Exception as e:
print e
return 1
else:
print "Invalid subcommand: '%s'. Please use one of: 'table', 'graph'." % subcommand
return 1
else:
print "Invalid command: '%s'. Please use one of: 'keyphrases'." % command
return 1
if __name__ == "__main__":
main()
| mit | -563,512,355,871,391,040 | 38.391026 | 110 | 0.573149 | false |
reasonerjt/harbor | make/photon/prepare/commands/gencerts.py | 3 | 1188 | import os
import sys
import click
import pathlib
import logging
from subprocess import Popen, PIPE, STDOUT, CalledProcessError
from utils.cert import openssl_installed
from utils.misc import get_realpath
gen_tls_script = pathlib.Path(__file__).parent.parent.joinpath('scripts/gencert.sh').absolute()
@click.command()
@click.option('-p', '--path', required=True, type=str,help='the path to store generated cert files')
@click.option('-d', '--days', default='365', type=str, help='the expired time for cert')
def gencert(path, days):
"""
gencert command will generate cert files for internal TLS
"""
path = get_realpath(path)
click.echo('Check openssl ...')
if not openssl_installed():
raise(Exception('openssl not installed'))
click.echo("start generate internal tls certs")
if not os.path.exists(path):
click.echo('path {} not exist, create it...'.format(path))
os.makedirs(path, exist_ok=True)
with Popen([gen_tls_script, days], stdout=PIPE, stderr=STDOUT, cwd=path) as p:
for line in p.stdout:
click.echo(line, nl=False)
if p.returncode != 0:
raise CalledProcessError(p.returncode, p.args)
| apache-2.0 | -8,884,601,590,905,630,000 | 35 | 100 | 0.685185 | false |
kennedyshead/home-assistant | tests/components/mikrotik/test_init.py | 8 | 3080 | """Test Mikrotik setup process."""
from unittest.mock import AsyncMock, Mock, patch
from homeassistant.components import mikrotik
from homeassistant.setup import async_setup_component
from . import MOCK_DATA
from tests.common import MockConfigEntry
async def test_setup_with_no_config(hass):
"""Test that we do not discover anything or try to set up a hub."""
assert await async_setup_component(hass, mikrotik.DOMAIN, {}) is True
assert mikrotik.DOMAIN not in hass.data
async def test_successful_config_entry(hass):
"""Test config entry successful setup."""
entry = MockConfigEntry(
domain=mikrotik.DOMAIN,
data=MOCK_DATA,
)
entry.add_to_hass(hass)
mock_registry = Mock()
with patch.object(mikrotik, "MikrotikHub") as mock_hub, patch(
"homeassistant.helpers.device_registry.async_get_registry",
return_value=mock_registry,
):
mock_hub.return_value.async_setup = AsyncMock(return_value=True)
mock_hub.return_value.serial_num = "12345678"
mock_hub.return_value.model = "RB750"
mock_hub.return_value.hostname = "mikrotik"
mock_hub.return_value.firmware = "3.65"
assert await mikrotik.async_setup_entry(hass, entry) is True
assert len(mock_hub.mock_calls) == 2
p_hass, p_entry = mock_hub.mock_calls[0][1]
assert p_hass is hass
assert p_entry is entry
assert len(mock_registry.mock_calls) == 1
assert mock_registry.mock_calls[0][2] == {
"config_entry_id": entry.entry_id,
"connections": {("mikrotik", "12345678")},
"manufacturer": mikrotik.ATTR_MANUFACTURER,
"model": "RB750",
"name": "mikrotik",
"sw_version": "3.65",
}
async def test_hub_fail_setup(hass):
"""Test that a failed setup will not store the hub."""
entry = MockConfigEntry(
domain=mikrotik.DOMAIN,
data=MOCK_DATA,
)
entry.add_to_hass(hass)
with patch.object(mikrotik, "MikrotikHub") as mock_hub:
mock_hub.return_value.async_setup = AsyncMock(return_value=False)
assert await mikrotik.async_setup_entry(hass, entry) is False
assert mikrotik.DOMAIN not in hass.data
async def test_unload_entry(hass):
"""Test being able to unload an entry."""
entry = MockConfigEntry(
domain=mikrotik.DOMAIN,
data=MOCK_DATA,
)
entry.add_to_hass(hass)
with patch.object(mikrotik, "MikrotikHub") as mock_hub, patch(
"homeassistant.helpers.device_registry.async_get_registry",
return_value=Mock(),
):
mock_hub.return_value.async_setup = AsyncMock(return_value=True)
mock_hub.return_value.serial_num = "12345678"
mock_hub.return_value.model = "RB750"
mock_hub.return_value.hostname = "mikrotik"
mock_hub.return_value.firmware = "3.65"
assert await mikrotik.async_setup_entry(hass, entry) is True
assert len(mock_hub.return_value.mock_calls) == 1
assert await mikrotik.async_unload_entry(hass, entry)
assert entry.entry_id not in hass.data[mikrotik.DOMAIN]
| apache-2.0 | -6,798,248,406,447,578,000 | 32.478261 | 73 | 0.666558 | false |
julien-hadleyjack/genrss-py | src/genrss/podcast.py | 1 | 5482 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from future.standard_library import install_aliases
install_aliases()
import os
import re
from operator import attrgetter
from urllib.request import urlretrieve
from io import open
from PIL import Image
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from . import CONFIG, get_logger, PATH
class PodcastManager(object):
def __init__(self):
self.podcasts = {}
self.fallback = Podcast(CONFIG["fallback"]["title"], is_collection=True)
def get_all_podcasts(self):
return list(self.podcasts.values()) + [self.fallback]
def add_episode(self, episode):
"""
:param episode:
:type episode: episode.Episode
"""
if episode.show not in CONFIG["shows"]:
podcast = self.fallback
elif episode.show not in self.podcasts:
podcast = Podcast(episode.show)
self.podcasts[episode.show] = podcast
else:
podcast = self.podcasts[episode.show]
if not CONFIG["technical"]["check-episode"] or episode:
podcast.episodes.append(episode)
def generate_html(self):
env = Environment(loader=FileSystemLoader(os.path.join(PATH, 'template')),
autoescape=True, trim_blocks=True, lstrip_blocks=True,
undefined=StrictUndefined)
template = env.get_template("index.html")
output = template.render(config=CONFIG, manager=self)
file_path = os.path.join(CONFIG["file-base"], CONFIG["technical"]["overview-path"])
with open(file_path, "w", encoding="utf8") as file:
get_logger().info("Writing HTML overview at %s", file_path)
file.write(output)
def generate_rss(self):
for podcast in self.get_all_podcasts():
podcast.save()
class Podcast():
def __init__(self, title, short_description=None, html_description=None, is_collection=False):
self.title = title
self.episodes = []
self.is_collection = is_collection
self.short_description = short_description or CONFIG["fallback"]["short-description"]
self.html_description = html_description or CONFIG["fallback"]["html-description"]
get_logger().debug("Creating podcast:\n\t%s", repr(self))
@staticmethod
def format_date(dt):
return dt.strftime("%a, %d %b %Y %H:%M:%S +0100")
def image_url(self):
image_url = CONFIG["fallback"]["image-url"]
if not self.is_collection and len(self.episodes) > 0:
image_location = None
image_name = CONFIG["technical"]["image-name"]
for episode in self.episodes:
location = os.path.join(episode.directory_path, image_name)
if os.path.exists(location):
image_location = location
image_url = episode.sub_directory + image_name
break
if not image_location:
for episode in self.episodes:
if os.path.exists(episode.directory_path):
image_location = os.path.join(episode.directory_path, image_name)
urlretrieve(episode.thumbnail, image_location)
image_url = episode.sub_directory + image_name
break
if image_location:
self.crop_image(image_location)
return CONFIG["url-base"] + image_url
@staticmethod
def crop_image(image_location):
# http://www.carlbednorz.de/python-create-square-thumbnails-from-images-with-pil/
img = Image.open(image_location)
width, height = img.size
if width != height:
upper_x = int((width / 2) - (height / 2))
upper_y = 0
lower_x = int((width / 2) + (height / 2))
lower_y = height
img = img.crop((upper_x, upper_y, lower_x, lower_y))
assert img.size[0] == img.size[1]
get_logger().debug("Saving a new thumbnail at %s", image_location)
img.save(image_location, "JPEG")
def get_rss_filename(self):
if not self.episodes:
get_logger().info("No episodes found for %s. No rss file name.", self.title)
elif self.is_collection:
return CONFIG["fallback"]["rss-file"]
else:
return re.sub("[^a-zA-Z0-9_\-\./]+", "_", self.title) + ".rss"
def save(self):
if not self.episodes:
get_logger().info("No episodes found for %s. Can't save rss feed", self.title)
return
sorted_episodes = sorted(self.episodes, key=attrgetter('time_added'), reverse=True)
env = Environment(loader=FileSystemLoader(os.path.join(PATH, 'template')),
autoescape=True, trim_blocks=True, lstrip_blocks=True,
undefined=StrictUndefined)
template = env.get_template("feed.rss")
output = template.render(config=CONFIG, sorted_episodes=sorted_episodes, podcast=self)
file_path = os.path.join(CONFIG["file-base"], self.get_rss_filename())
with open(file_path, "w", encoding="utf8") as file:
get_logger().info("Saving %d episodes at %s.", len(self.episodes), file_path)
file.write(output)
def __repr__(self):
return "Podcast[title={self.title}, episodes={amount}]".format(amount=len(self.episodes), **locals())
| bsd-2-clause | -5,099,532,245,293,287,000 | 36.292517 | 109 | 0.592667 | false |
Spleen64/Sick-Beard | lib/subliminal/services/subswiki.py | 35 | 5235 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from . import ServiceBase
from ..exceptions import ServiceError
from ..language import language_set, Language
from ..subtitles import get_subtitle_path, ResultSubtitle
from ..utils import get_keywords, split_keyword
from ..videos import Episode, Movie
from bs4 import BeautifulSoup
import logging
import urllib
logger = logging.getLogger("subliminal")
class SubsWiki(ServiceBase):
server_url = 'http://www.subswiki.com'
site_url = 'http://www.subswiki.com'
api_based = False
languages = language_set(['eng-US', 'eng-GB', 'eng', 'fre', 'pob', 'por', 'spa-ES', u'spa', u'ita', u'cat'])
language_map = {u'Español': Language('spa'), u'Español (España)': Language('spa'), u'Español (Latinoamérica)': Language('spa'),
u'Català': Language('cat'), u'Brazilian': Language('pob'), u'English (US)': Language('eng-US'),
u'English (UK)': Language('eng-GB')}
language_code = 'name'
videos = [Episode, Movie]
require_video = False
#required_features = ['permissive']
def list_checked(self, video, languages):
results = []
if isinstance(video, Episode):
results = self.query(video.path or video.release, languages, get_keywords(video.guess), series=video.series, season=video.season, episode=video.episode)
elif isinstance(video, Movie) and video.year:
results = self.query(video.path or video.release, languages, get_keywords(video.guess), movie=video.title, year=video.year)
return results
def query(self, filepath, languages, keywords=None, series=None, season=None, episode=None, movie=None, year=None):
if series and season and episode:
request_series = series.lower().replace(' ', '_')
if isinstance(request_series, unicode):
request_series = request_series.encode('utf-8')
logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
r = self.session.get('%s/serie/%s/%s/%s/' % (self.server_url, urllib.quote(request_series), season, episode))
if r.status_code == 404:
logger.debug(u'Could not find subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
return []
elif movie and year:
request_movie = movie.title().replace(' ', '_')
if isinstance(request_movie, unicode):
request_movie = request_movie.encode('utf-8')
logger.debug(u'Getting subtitles for %s (%d) with languages %r' % (movie, year, languages))
r = self.session.get('%s/film/%s_(%d)' % (self.server_url, urllib.quote(request_movie), year))
if r.status_code == 404:
logger.debug(u'Could not find subtitles for %s (%d) with languages %r' % (movie, year, languages))
return []
else:
raise ServiceError('One or more parameter missing')
if r.status_code != 200:
logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
return []
soup = BeautifulSoup(r.content, self.required_features)
subtitles = []
for sub in soup('td', {'class': 'NewsTitle'}):
sub_keywords = split_keyword(sub.b.string.lower())
if keywords and not keywords & sub_keywords:
logger.debug(u'None of subtitle keywords %r in %r' % (sub_keywords, keywords))
continue
for html_language in sub.parent.parent.find_all('td', {'class': 'language'}):
language = self.get_language(html_language.string.strip())
if language not in languages:
logger.debug(u'Language %r not in wanted languages %r' % (language, languages))
continue
html_status = html_language.find_next_sibling('td')
status = html_status.strong.string.strip()
if status != 'Completado':
logger.debug(u'Wrong subtitle status %s' % status)
continue
path = get_subtitle_path(filepath, language, self.config.multi)
subtitle = ResultSubtitle(path, language, self.__class__.__name__.lower(), '%s%s' % (self.server_url, html_status.find_next('td').find('a')['href']))
subtitles.append(subtitle)
return subtitles
Service = SubsWiki
| gpl-3.0 | -6,734,466,089,612,795,000 | 51.29 | 165 | 0.629183 | false |
ClearCorp-dev/account-financial-reporting | account_move_line_report_xls/report/move_line_list_xls.py | 25 | 17494 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2014 Noviat nv/sa (www.noviat.com). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import xlwt
from datetime import datetime
from openerp.osv import orm
from openerp.report import report_sxw
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell, _render
from openerp.tools.translate import translate, _
import logging
_logger = logging.getLogger(__name__)
_ir_translation_name = 'move.line.list.xls'
class move_line_xls_parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(move_line_xls_parser, self).__init__(
cr, uid, name, context=context)
move_obj = self.pool.get('account.move.line')
self.context = context
wanted_list = move_obj._report_xls_fields(cr, uid, context)
template_changes = move_obj._report_xls_template(cr, uid, context)
self.localcontext.update({
'datetime': datetime,
'wanted_list': wanted_list,
'template_changes': template_changes,
'_': self._,
})
def _(self, src):
lang = self.context.get('lang', 'en_US')
return translate(self.cr, _ir_translation_name, 'report', lang, src) \
or src
class move_line_xls(report_xls):
def __init__(self, name, table, rml=False, parser=False, header=True,
store=False):
super(move_line_xls, self).__init__(
name, table, rml, parser, header, store)
# Cell Styles
_xs = self.xls_styles
# header
rh_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rh_cell_style = xlwt.easyxf(rh_cell_format)
self.rh_cell_style_center = xlwt.easyxf(rh_cell_format + _xs['center'])
self.rh_cell_style_right = xlwt.easyxf(rh_cell_format + _xs['right'])
# lines
aml_cell_format = _xs['borders_all']
self.aml_cell_style = xlwt.easyxf(aml_cell_format)
self.aml_cell_style_center = xlwt.easyxf(
aml_cell_format + _xs['center'])
self.aml_cell_style_date = xlwt.easyxf(
aml_cell_format + _xs['left'],
num_format_str=report_xls.date_format)
self.aml_cell_style_decimal = xlwt.easyxf(
aml_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# totals
rt_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rt_cell_style = xlwt.easyxf(rt_cell_format)
self.rt_cell_style_right = xlwt.easyxf(rt_cell_format + _xs['right'])
self.rt_cell_style_decimal = xlwt.easyxf(
rt_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# XLS Template
self.col_specs_template = {
'move': {
'header': [1, 20, 'text', _render("_('Entry')")],
'lines': [1, 0, 'text', _render("line.move_id.name or ''")],
'totals': [1, 0, 'text', None]},
'name': {
'header': [1, 42, 'text', _render("_('Name')")],
'lines': [1, 0, 'text', _render("line.name or ''")],
'totals': [1, 0, 'text', None]},
'ref': {
'header': [1, 42, 'text', _render("_('Reference')")],
'lines': [1, 0, 'text', _render("line.ref or ''")],
'totals': [1, 0, 'text', None]},
'date': {
'header': [1, 13, 'text', _render("_('Effective Date')")],
'lines': [1, 0, 'date',
_render("datetime.strptime(line.date,'%Y-%m-%d')"),
None, self.aml_cell_style_date],
'totals': [1, 0, 'text', None]},
'period': {
'header': [1, 12, 'text', _render("_('Period')")],
'lines':
[1, 0, 'text',
_render("line.period_id.code or line.period_id.name")],
'totals': [1, 0, 'text', None]},
'partner': {
'header': [1, 36, 'text', _render("_('Partner')")],
'lines':
[1, 0, 'text',
_render("line.partner_id and line.partner_id.name or ''")],
'totals': [1, 0, 'text', None]},
'partner_ref': {
'header': [1, 36, 'text', _render("_('Partner Reference')")],
'lines':
[1, 0, 'text',
_render("line.partner_id and line.partner_id.ref or ''")],
'totals': [1, 0, 'text', None]},
'account': {
'header': [1, 12, 'text', _render("_('Account')")],
'lines': [1, 0, 'text', _render("line.account_id.code")],
'totals': [1, 0, 'text', None]},
'date_maturity': {
'header': [1, 13, 'text', _render("_('Maturity Date')")],
'lines':
[1, 0,
_render("line.date_maturity and 'date' or 'text'"),
_render(
"line.date_maturity"
" and datetime.strptime(line.date_maturity,'%Y-%m-%d')"
" or None"),
None, self.aml_cell_style_date],
'totals': [1, 0, 'text', None]},
'debit': {
'header': [1, 18, 'text', _render("_('Debit')"), None,
self.rh_cell_style_right],
'lines': [1, 0, 'number', _render("line.debit"), None,
self.aml_cell_style_decimal],
'totals': [1, 0, 'number', None, _render("debit_formula"),
self.rt_cell_style_decimal]},
'credit': {
'header': [1, 18, 'text', _render("_('Credit')"), None,
self.rh_cell_style_right],
'lines': [1, 0, 'number', _render("line.credit"), None,
self.aml_cell_style_decimal],
'totals': [1, 0, 'number', None, _render("credit_formula"),
self.rt_cell_style_decimal]},
'balance': {
'header': [1, 18, 'text', _render("_('Balance')"), None,
self.rh_cell_style_right],
'lines': [1, 0, 'number', None, _render("bal_formula"),
self.aml_cell_style_decimal],
'totals': [1, 0, 'number', None, _render("bal_formula"),
self.rt_cell_style_decimal]},
'reconcile': {
'header': [1, 12, 'text', _render("_('Rec.')"), None,
self.rh_cell_style_center],
'lines': [1, 0, 'text',
_render("line.reconcile_id.name or ''"), None,
self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
'reconcile_partial': {
'header': [1, 12, 'text', _render("_('Part. Rec.')"), None,
self.rh_cell_style_center],
'lines': [1, 0, 'text',
_render("line.reconcile_partial_id.name or ''"),
None, self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
'tax_code': {
'header': [1, 12, 'text', _render("_('Tax Code')"), None,
self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("line.tax_code_id.code or ''"),
None, self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
'tax_amount': {
'header': [1, 18, 'text', _render("_('Tax/Base Amount')"),
None, self.rh_cell_style_right],
'lines': [1, 0, 'number', _render("line.tax_amount"), None,
self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'amount_currency': {
'header': [1, 18, 'text', _render("_('Am. Currency')"), None,
self.rh_cell_style_right],
'lines':
[1, 0,
_render("line.amount_currency and 'number' or 'text'"),
_render("line.amount_currency or None"),
None, self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'currency_name': {
'header': [1, 6, 'text', _render("_('Curr.')"), None,
self.rh_cell_style_center],
'lines':
[1, 0, 'text',
_render("line.currency_id and line.currency_id.name or ''"),
None, self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
'journal': {
'header': [1, 12, 'text', _render("_('Journal')")],
'lines': [1, 0, 'text', _render("line.journal_id.code or ''")],
'totals': [1, 0, 'text', None]},
'company_currency': {
'header': [1, 10, 'text', _render("_('Comp. Curr.')")],
'lines': [1, 0, 'text',
_render("line.company_id.currency_id.name or ''"),
None, self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
'analytic_account': {
'header': [1, 36, 'text', _render("_('Analytic Account')")],
'lines': [1, 0, 'text',
_render("line.analytic_account_id.code or ''")],
'totals': [1, 0, 'text', None]},
'product': {
'header': [1, 36, 'text', _render("_('Product')")],
'lines': [1, 0, 'text', _render("line.product_id.name or ''")],
'totals': [1, 0, 'text', None]},
'product_ref': {
'header': [1, 36, 'text', _render("_('Product Reference')")],
'lines': [1, 0, 'text',
_render("line.product_id.default_code or ''")],
'totals': [1, 0, 'text', None]},
'product_uom': {
'header': [1, 20, 'text', _render("_('Unit of Measure')")],
'lines': [1, 0, 'text',
_render("line.product_uom_id.name or ''")],
'totals': [1, 0, 'text', None]},
'quantity': {
'header': [1, 8, 'text', _render("_('Qty')"), None,
self.rh_cell_style_right],
'lines': [1, 0,
_render("line.quantity and 'number' or 'text'"),
_render("line.quantity or None"), None,
self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'statement': {
'header': [1, 20, 'text', _render("_('Statement')")],
'lines':
[1, 0, 'text',
_render("line.statement_id and line.statement_id.name or ''")
],
'totals': [1, 0, 'text', None]},
'invoice': {
'header': [1, 20, 'text', _render("_('Invoice')")],
'lines':
[1, 0, 'text',
_render("line.invoice and line.invoice.number or ''")],
'totals': [1, 0, 'text', None]},
'amount_residual': {
'header': [1, 18, 'text', _render("_('Residual Amount')"),
None, self.rh_cell_style_right],
'lines':
[1, 0,
_render("line.amount_residual and 'number' or 'text'"),
_render("line.amount_residual or None"),
None, self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'amount_residual_currency': {
'header': [1, 18, 'text', _render("_('Res. Am. in Curr.')"),
None, self.rh_cell_style_right],
'lines':
[1, 0,
_render(
"line.amount_residual_currency and 'number' or 'text'"),
_render("line.amount_residual_currency or None"),
None, self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'narration': {
'header': [1, 42, 'text', _render("_('Notes')")],
'lines': [1, 0, 'text',
_render("line.move_id.narration or ''")],
'totals': [1, 0, 'text', None]},
'blocked': {
'header': [1, 4, 'text', _('Lit.'),
None, self.rh_cell_style_right],
'lines': [1, 0, 'text', _render("line.blocked and 'x' or ''"),
None, self.aml_cell_style_center],
'totals': [1, 0, 'text', None]},
}
def generate_xls_report(self, _p, _xs, data, objects, wb):
wanted_list = _p.wanted_list
self.col_specs_template.update(_p.template_changes)
_ = _p._
debit_pos = 'debit' in wanted_list and wanted_list.index('debit')
credit_pos = 'credit' in wanted_list and wanted_list.index('credit')
if not (credit_pos and debit_pos) and 'balance' in wanted_list:
raise orm.except_orm(
_('Customisation Error!'),
_("The 'Balance' field is a calculated XLS field requiring \
the presence of the 'Debit' and 'Credit' fields !"))
# report_name = objects[0]._description or objects[0]._name
report_name = _("Journal Items")
ws = wb.add_sheet(report_name[:31])
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos = 0
# set print header/footer
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# Title
cell_style = xlwt.easyxf(_xs['xls_title'])
c_specs = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs, ['report_name'])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=cell_style)
row_pos += 1
# Column headers
c_specs = map(lambda x: self.render(
x, self.col_specs_template, 'header', render_space={'_': _p._}),
wanted_list)
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=self.rh_cell_style,
set_column_size=True)
ws.set_horz_split_pos(row_pos)
# account move lines
for line in objects:
debit_cell = rowcol_to_cell(row_pos, debit_pos)
credit_cell = rowcol_to_cell(row_pos, credit_pos)
bal_formula = debit_cell + '-' + credit_cell
_logger.debug('dummy call - %s', bal_formula)
c_specs = map(
lambda x: self.render(x, self.col_specs_template, 'lines'),
wanted_list)
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=self.aml_cell_style)
# Totals
aml_cnt = len(objects)
debit_start = rowcol_to_cell(row_pos - aml_cnt, debit_pos)
debit_stop = rowcol_to_cell(row_pos - 1, debit_pos)
debit_formula = 'SUM(%s:%s)' % (debit_start, debit_stop)
_logger.debug('dummy call - %s', debit_formula)
credit_start = rowcol_to_cell(row_pos - aml_cnt, credit_pos)
credit_stop = rowcol_to_cell(row_pos - 1, credit_pos)
credit_formula = 'SUM(%s:%s)' % (credit_start, credit_stop)
_logger.debug('dummy call - %s', credit_formula)
debit_cell = rowcol_to_cell(row_pos, debit_pos)
credit_cell = rowcol_to_cell(row_pos, credit_pos)
bal_formula = debit_cell + '-' + credit_cell
_logger.debug('dummy call - %s', bal_formula)
c_specs = map(
lambda x: self.render(x, self.col_specs_template, 'totals'),
wanted_list)
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, row_style=self.rt_cell_style_right)
move_line_xls('report.move.line.list.xls',
'account.move.line',
parser=move_line_xls_parser)
| agpl-3.0 | -8,339,705,667,250,862,000 | 45.900804 | 79 | 0.469189 | false |
t27/ol3 | bin/check-whitespace.py | 5 | 1582 | import logging
import re
import sys
logging.basicConfig(format='%(asctime)s %(name)s: %(message)s',
level=logging.INFO)
logger = logging.getLogger('check-whitespace')
CR_RE = re.compile(r'\r')
LEADING_WHITESPACE_RE = re.compile(r'\s+')
TRAILING_WHITESPACE_RE = re.compile(r'\s+\n\Z')
NO_NEWLINE_RE = re.compile(r'[^\n]\Z')
ALL_WHITESPACE_RE = re.compile(r'\s+\Z')
def check_whitespace(*filenames):
errors = 0
for filename in sorted(filenames):
whitespace = False
for lineno, line in enumerate(open(filename, 'rU')):
if lineno == 0 and LEADING_WHITESPACE_RE.match(line):
logger.info('%s:%d: leading whitespace', filename, lineno + 1)
errors += 1
if CR_RE.search(line):
logger.info('%s:%d: carriage return character in line',
filename, lineno + 1)
errors += 1
if TRAILING_WHITESPACE_RE.search(line):
logger.info('%s:%d: trailing whitespace', filename, lineno + 1)
errors += 1
if NO_NEWLINE_RE.search(line):
logger.info('%s:%d: no newline at end of file', filename,
lineno + 1)
errors += 1
whitespace = ALL_WHITESPACE_RE.match(line)
if whitespace:
logger.info('%s: trailing whitespace at end of file', filename)
errors += 1
if errors:
logger.error('%d whitespace errors' % (errors,))
if __name__ == "__main__":
check_whitespace(*sys.argv[1:])
| bsd-2-clause | 1,302,888,564,204,308,500 | 34.954545 | 79 | 0.554994 | false |
JayvicWen/Crawler | kaoyan/crawl_post.py | 2 | 2301 | #!/usr/bin/env python
# encoding:utf-8
import os
import sys
import requests
import MySQLdb
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
from config import *
base_url = 'http://download.kaoyan.com'
status = []
def get_soup(url, parse_only=None):
content = requests.get(url).content
return BeautifulSoup(content, 'lxml', parse_only=parse_only)
def mysql_connect():
global connection
connection = MySQLdb.connect(host=DB_HOST, user=DB_USER, passwd=DB_PASSWORD,
db=DB_DATABASE, port=3306, charset='utf8')
def mysql_disconnect():
global connection
connection.commit()
connection.close()
def crawl_post(url):
status.append(url)
global connection
cursor = connection.cursor()
cursor.execute(
'INSERT INTO `kaoyan_post`(`type`, `list_url`, `post_url`) VALUES (%s, %s, %s)',
status
)
cursor.close()
status.pop()
def crawl_list(list_id):
soup = get_soup(base_url + '/list-%d' % list_id)
thread_list = soup.find('div', attrs={'class': 'threadlist'})
if thread_list is None:
print 'List not exists:', base_url + '/list-%d' % list_id
return
user_info_list = soup.find('div', attrs={'class': 'userinfolist'})
status.append('-'.join(user_info_list.span.get_text().split(u' » ')[2:]))
url = base_url + '/list-%d' % list_id
while url is not None:
print 'Crawing list:', url
status.append(url)
soup = get_soup(url)
table_dom = soup.find('div', attrs={'class': 'threadlist'}).table
post_list_dom = table_dom.find_all('a')
mysql_connect()
for post_dom in post_list_dom:
crawl_post(base_url + post_dom['href'])
mysql_disconnect()
status.pop()
pages_dom = soup.find('div', {'class': 'pages'})
if pages_dom is None:
break
next_dom = pages_dom.find('a', {'class': 'next'})
if next_dom is None:
break
url = base_url + next_dom['href']
status.pop()
if __name__ == '__main__':
if len(sys.argv) != 3:
print 'Invalid parameters!'
exit(1)
print '=' * 60
print 'start:', sys.argv
for i in xrange(int(sys.argv[1]), int(sys.argv[2]) + 1):
crawl_list(i)
| mit | 7,428,678,633,358,786,000 | 23.731183 | 88 | 0.584783 | false |
vnc-biz/pyzimbra | pyzimbra/z/admin.py | 3 | 2605 | # -*- coding: utf-8 -*-
"""
################################################################################
# Copyright (c) 2010, Ilgar Mashayev
#
# E-mail: [email protected]
# Website: http://github.com/ilgarm/pyzimbra
################################################################################
# This file is part of pyzimbra.
#
# Pyzimbra is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyzimbra is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyzimbra. If not, see <http://www.gnu.org/licenses/>.
################################################################################
Zimbra privileged client.
@author: ilgar
"""
from pyzimbra import sconstant, zconstant
from pyzimbra.zclient import ZimbraSoapClient
class ZimbraAdmin(ZimbraSoapClient):
"""
Zimbra non-privileged client.
"""
# ------------------------------------------------------------------ unbound
def authenticate(self, account_name, password):
"""
Authenticates zimbra account.
@param account_name: account email address
@param password: account password
@raise AuthException: if authentication fails
@raise SoapException: if soap communication fails
"""
self.auth_token = self.authenticator.authenticate_admin(self.transport,
account_name,
password)
def get_account(self):
"""
Gets account.
@return: Account
"""
def change_password(self, account, password):
"""
Changes account password.
@param account: account to change password for
@param password: new password
"""
def get_info(self, account, params={}):
"""
Gets account info.
@param account: account to get info for
@param params: parameters to retrieve
@return: AccountInfo
"""
res = self.invoke(zconstant.NS_ZIMBRA_ADMIN_URL,
sconstant.GetInfoRequest,
params)
return res
| lgpl-3.0 | 6,102,514,193,900,940,000 | 32.831169 | 80 | 0.547793 | false |
arummler/eudaq | legacy/producers/palpidess/scripts/slow_control/config_pALPIDE_driver.py | 11 | 3952 | #! /usr/bin/env python
##
## standard configuration of the FEC HLVDS for the use
## with the pALPIDE with the Padua Proximity board V1
##
import sys
import os
import SlowControl # slow control code
import biasDAC # special code to setup up the voltage biases
m = SlowControl.SlowControl(0) # HLVDS FEC (master)
# was the integration time given as a commandline argument?
if len(sys.argv) >= 2:
integration_time = int(float(sys.argv[1])/0.00625)
# Acquisition time is expected to specified in micro seconds (us)
else:
integration_time = 0xf600 # default value
if len(sys.argv) >= 3:
trigger_delay = int(float(sys.argv[2])/0.00625)
# Acquisition time is expected to specified in micro seconds (us)
else:
trigger_delay = 0 # default value
# 0x16 (22) readout control
#
# The bits (2:0) are not set during configuration, as they are set during the
# (start-up of a) measurement.
#
# 3: driver loop mode (activates the complete sequence although no data is sent)
# 2: single-event readout enable
# 1: single-event readout request (rising edge sensitive, need to toggle!)
# 0: continuous readout enable
rdo_settings = 0x0
# 0x18 (24) trigger control
#
# 18-12: tlu wait cycles (default = 0x0c << 4)
# 10- 4: tlu clock div (default = 0x00 << 12)
# 3: driver busy enable ( 0x8 )
# 2: tlu reset enable ( 0x4 )
# 1- 0: trig mode (0b00 = auto/continuously, 0b01 = NIM in,
# 0b10 = TLU triggering, 0b11 valid based)
trg_settings = 0x1 | (0x0c << 4) | (0x00 << 12)
### FEC HLVDS configuration registers
# all times/delays are specified in multiples of 6.25 ns
values_HLVDS = [
# explorer driver configuration
0b1100, # 0x00 ( 0) enable digital pulsing (0), clock init state (1),
# repeat global reset (2), activate mem_wr_en (3)
0x10, # 0x01 ( 1) length of GRSTb signal [init_dly_t]
0xa0, # 0x02 ( 2) length of the analog pulser reference pulse [dly_t]
trigger_delay, # 0x03 ( 3) pre-acqusition delay (in between trigger and acqs start)
# [pre_acq_dly_t]
integration_time, # 0x04 ( 4) integration time [acq_time_t]
0x0, # 0x05 ( 5) delay in between acquisition and readout [rdo_dly_t]
0xb, # 0x06 ( 6) transport delay of the signals from SRS -> pALPIDE -> SRS
# [transport_dly_t]
0x1, # 0x07 ( 7) clock divider for the output clock [clk_div_t]
0x3, # 0x08 ( 8) readout frequency divider [rdo_div_t]
0x0, # 0x09 ( 9) post-event delay [post_evt_dly_t]
0b0111111100000, # 0x0a (10) global reset settings by state (0 = active, 1 = inactive)
# .*=-.*=-.*=-. # stReset (0), stInit (1), stDly (2), stTrigWait (3), stPreAcq (4),
# stAcq (5), stAcqWait (6), stRdoWait (7), stRdoStart (8),
# stRdoFirst (9), stRdoPause (10), stRdo (11), stPostEvtDly (12)
0x2, # 0x0b (11) nim_out signal assignment (0 = off, 1 = on)
# a_pulse_ref (0), combined_busy (1)
0x0, # 0x0c (12)
0x0, # 0x0d (13)
0x0, # 0x0e (14)
0x0, # 0x0f (15)
0x0, # 0x10 (16)
0x0, # 0x11 (17)
0x0, # 0x12 (18)
0x0, # 0x13 (19)
0x0, # 0x14 (20)
0x0, # 0x15 (21)
# general configuration
rdo_settings, # 0x16 (22) readout control
8832, # 0x17 (23) maximum frame size
trg_settings # 0x18 (24) trigger control
]
# all higher addresses are read-only
SlowControl.write_burst(m, 6039, 0x0, values_HLVDS, False)
biasDAC.set_bias_voltage(12, 1.6, m) # Vreset
biasDAC.set_bias_voltage(8, 0.4, m) # VCASN
biasDAC.set_bias_voltage(10, 0.6, m) # VCASP
quit()
| lgpl-3.0 | -1,160,590,148,767,314,200 | 41.494624 | 99 | 0.577176 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.