prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>IntArrayWorker.java<|end_file_name|><|fim▁begin|>package lab;
public class IntArrayWorker
{
/** two dimensional matrix */
private int[][] matrix = null;
/** set the matrix to the passed one
* @param theMatrix the one to use
*/
public void setMatrix(int[][] theMatrix)
{
matrix = theMatrix;
}
/**
* Method to return the total
* @return the total of the values in the array
*/
public int getTotal()
{
int total = 0;
for (int row = 0; row < matrix.length; row++)
{
for (int col = 0; col < matrix[0].length; col++)
{
total = total + matrix[row][col];
}
}
return total;
}
/**
* Method to return the total using a nested for-each loop
* @return the total of the values in the array
*/
public int getTotalNested()
{
int total = 0;
for (int[] rowArray : matrix)
{
for (int item : rowArray)
{
total = total + item;
}
}
return total;
}
/**
* Method to fill with an increasing count
*/
public void fillCount()
{
int numCols = matrix[0].length;
int count = 1;
for (int row = 0; row < matrix.length; row++)
{
for (int col = 0; col < numCols; col++)
{
matrix[row][col] = count;
count++;
}
}
}
/**
* print the values in the array in rows and columns
*/
public void print()
<|fim▁hole|> for (int row = 0; row < matrix.length; row++)
{
for (int col = 0; col < matrix[0].length; col++)
{
System.out.print( matrix[row][col] + " " );
}
System.out.println();
}
System.out.println();
}
public int getCount(int num){
int count = 0;
for(int row = 0; row < matrix.length; row++)
for(int col = 0; col < matrix[0].length; col++)
if(matrix[row][col] == num)
count++;
return count;
}
public int getColTotal(int col){
int total = 0;
for(int row = 0; row < matrix.length; row++)
total += matrix[row][col];
return total;
}
public int getLargest(){
int largest = matrix[0][0];
for(int row = 0; row < matrix.length; row++)
for(int col = 0; col < matrix[0].length; col++)
if(matrix[row][col] > largest)
largest = matrix[row][col];
return largest;
}
/**
* fill the array with a pattern
*/
public void fillPattern1()
{
for (int row = 0; row < matrix.length; row++)
{
for (int col = 0; col < matrix[0].length;
col++)
{
if (row < col)
matrix[row][col] = 1;
else if (row == col)
matrix[row][col] = 2;
else
matrix[row][col] = 3;
}
}
}
}<|fim▁end|> | {
|
<|file_name|>gdiplus.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
// Name: src/msw/gdiplus.cpp
// Purpose: implements wrappers for GDI+ flat API
// Author: Vadim Zeitlin
// Created: 2007-03-14
// Copyright: (c) 2007 Vadim Zeitlin <[email protected]>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// for compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#if wxUSE_GRAPHICS_CONTEXT
#ifndef WX_PRECOMP
#include "wx/cpp.h"
#include "wx/log.h"
#include "wx/module.h"
#include "wx/string.h"
#endif // WX_PRECOMP
#include "wx/dynload.h"
#include "wx/msw/wrapgdip.h"
// w32api headers used by both MinGW and Cygwin wrongly define UINT16 inside
// Gdiplus namespace in gdiplus.h which results in ambiguity errors when using
// this type as UINT16 is also defined in global scope by windows.h (or rather
// basetsd.h included from it), so we redefine it to work around this problem.
#if defined(__CYGWIN__) || defined(__MINGW32__)
#define UINT16 unsigned short
#endif
// ----------------------------------------------------------------------------
// helper macros
// ----------------------------------------------------------------------------
// return the name we use for the type of the function with the given name
// (without "Gdip" prefix)
#define wxGDIPLUS_FUNC_T(name) Gdip##name##_t
// to avoid repeating all (several hundreds) of GDI+ functions names several
// times in this file, we define a macro which allows us to apply another macro
// to all (or almost all, as we sometimes have to handle functions not
// returning GpStatus separately) these functions at once
// this macro expands into an invocation of the given macro m for all GDI+
// functions returning standard GpStatus
//
// m is called with the name of the function without "Gdip" prefix as the first
// argument, the list of function parameters with their names as the second one
// and the list of just the parameter names as the third one
#define wxFOR_ALL_GDIPLUS_STATUS_FUNCS(m) \
m(CreatePath, (GpFillMode brushMode, GpPath **path), (brushMode, path)) \
m(CreatePath2, (GDIPCONST GpPointF* a1, GDIPCONST BYTE* a2, INT a3, GpFillMode a4, GpPath **path), (a1, a2, a3, a4, path)) \
m(CreatePath2I, (GDIPCONST GpPoint* a1, GDIPCONST BYTE* a2, INT a3, GpFillMode a4, GpPath **path), (a1, a2, a3, a4, path)) \
m(ClonePath, (GpPath* path, GpPath **clonePath), (path, clonePath)) \
m(DeletePath, (GpPath* path), (path)) \
m(ResetPath, (GpPath* path), (path)) \
m(GetPointCount, (GpPath* path, INT* count), (path, count)) \
m(GetPathTypes, (GpPath* path, BYTE* types, INT count), (path, types, count)) \
m(GetPathPoints, (GpPath* a1, GpPointF* points, INT count), (a1, points, count)) \
m(GetPathPointsI, (GpPath* a1, GpPoint* points, INT count), (a1, points, count)) \
m(GetPathFillMode, (GpPath *path, GpFillMode *fillmode), (path, fillmode)) \
m(SetPathFillMode, (GpPath *path, GpFillMode fillmode), (path, fillmode)) \
m(GetPathData, (GpPath *path, GpPathData* pathData), (path, pathData)) \
m(StartPathFigure, (GpPath *path), (path)) \
m(ClosePathFigure, (GpPath *path), (path)) \
m(ClosePathFigures, (GpPath *path), (path)) \
m(SetPathMarker, (GpPath* path), (path)) \
m(ClearPathMarkers, (GpPath* path), (path)) \
m(ReversePath, (GpPath* path), (path)) \
m(GetPathLastPoint, (GpPath* path, GpPointF* lastPoint), (path, lastPoint)) \
m(AddPathLine, (GpPath *path, REAL x1, REAL y1, REAL x2, REAL y2), (path, x1, y1, x2, y2)) \
m(AddPathLine2, (GpPath *path, GDIPCONST GpPointF *points, INT count), (path, points, count)) \
m(AddPathArc, (GpPath *path, REAL x, REAL y, REAL width, REAL height, REAL startAngle, REAL sweepAngle), (path, x, y, width, height, startAngle, sweepAngle)) \
m(AddPathBezier, (GpPath *path, REAL x1, REAL y1, REAL x2, REAL y2, REAL x3, REAL y3, REAL x4, REAL y4), (path, x1, y1, x2, y2, x3, y3, x4, y4)) \
m(AddPathBeziers, (GpPath *path, GDIPCONST GpPointF *points, INT count), (path, points, count)) \
m(AddPathCurve, (GpPath *path, GDIPCONST GpPointF *points, INT count), (path, points, count)) \
m(AddPathCurve2, (GpPath *path, GDIPCONST GpPointF *points, INT count, REAL tension), (path, points, count, tension)) \
m(AddPathCurve3, (GpPath *path, GDIPCONST GpPointF *points, INT count, INT offset, INT numberOfSegments, REAL tension), (path, points, count, offset, numberOfSegments, tension)) \
m(AddPathClosedCurve, (GpPath *path, GDIPCONST GpPointF *points, INT count), (path, points, count)) \
m(AddPathClosedCurve2, (GpPath *path, GDIPCONST GpPointF *points, INT count, REAL tension), (path, points, count, tension)) \
m(AddPathRectangle, (GpPath *path, REAL x, REAL y, REAL width, REAL height), (path, x, y, width, height)) \
m(AddPathRectangles, (GpPath *path, GDIPCONST GpRectF *rects, INT count), (path, rects, count)) \
m(AddPathEllipse, (GpPath *path, REAL x, REAL y, REAL width, REAL height), (path, x, y, width, height)) \
m(AddPathPie, (GpPath *path, REAL x, REAL y, REAL width, REAL height, REAL startAngle, REAL sweepAngle), (path, x, y, width, height, startAngle, sweepAngle)) \
m(AddPathPolygon, (GpPath *path, GDIPCONST GpPointF *points, INT count), (path, points, count)) \
m(AddPathPath, (GpPath *path, GDIPCONST GpPath* addingPath, BOOL connect), (path, addingPath, connect)) \
m(AddPathString, (GpPath *path, GDIPCONST WCHAR *string, INT length, GDIPCONST GpFontFamily *family, INT style, REAL emSize, GDIPCONST RectF *layoutRect, GDIPCONST GpStringFormat *format), (path, string, length, family, style, emSize, layoutRect, format)) \
m(AddPathStringI, (GpPath *path, GDIPCONST WCHAR *string, INT length, GDIPCONST GpFontFamily *family, INT style, REAL emSize, GDIPCONST Rect *layoutRect, GDIPCONST GpStringFormat *format), (path, string, length, family, style, emSize, layoutRect, format)) \
m(AddPathLineI, (GpPath *path, INT x1, INT y1, INT x2, INT y2), (path, x1, y1, x2, y2)) \
m(AddPathLine2I, (GpPath *path, GDIPCONST GpPoint *points, INT count), (path, points, count)) \
m(AddPathArcI, (GpPath *path, INT x, INT y, INT width, INT height, REAL startAngle, REAL sweepAngle), (path, x, y, width, height, startAngle, sweepAngle)) \
m(AddPathBezierI, (GpPath *path, INT x1, INT y1, INT x2, INT y2, INT x3, INT y3, INT x4, INT y4), (path, x1, y1, x2, y2, x3, y3, x4, y4)) \
m(AddPathBeziersI, (GpPath *path, GDIPCONST GpPoint *points, INT count), (path, points, count)) \
m(AddPathCurveI, (GpPath *path, GDIPCONST GpPoint *points, INT count), (path, points, count)) \
m(AddPathCurve2I, (GpPath *path, GDIPCONST GpPoint *points, INT count, REAL tension), (path, points, count, tension)) \
m(AddPathCurve3I, (GpPath *path, GDIPCONST GpPoint *points, INT count, INT offset, INT numberOfSegments, REAL tension), (path, points, count, offset, numberOfSegments, tension)) \
m(AddPathClosedCurveI, (GpPath *path, GDIPCONST GpPoint *points, INT count), (path, points, count)) \
m(AddPathClosedCurve2I, (GpPath *path, GDIPCONST GpPoint *points, INT count, REAL tension), (path, points, count, tension)) \
m(AddPathRectangleI, (GpPath *path, INT x, INT y, INT width, INT height), (path, x, y, width, height)) \
m(AddPathRectanglesI, (GpPath *path, GDIPCONST GpRect *rects, INT count), (path, rects, count)) \
m(AddPathEllipseI, (GpPath *path, INT x, INT y, INT width, INT height), (path, x, y, width, height)) \
m(AddPathPieI, (GpPath *path, INT x, INT y, INT width, INT height, REAL startAngle, REAL sweepAngle), (path, x, y, width, height, startAngle, sweepAngle)) \
m(AddPathPolygonI, (GpPath *path, GDIPCONST GpPoint *points, INT count), (path, points, count)) \
m(FlattenPath, (GpPath *path, GpMatrix* matrix, REAL flatness), (path, matrix, flatness)) \
m(WindingModeOutline, (GpPath *path, GpMatrix *matrix, REAL flatness), (path, matrix, flatness)) \
m(WidenPath, (GpPath *nativePath, GpPen *pen, GpMatrix *matrix, REAL flatness), (nativePath, pen, matrix, flatness)) \
m(WarpPath, (GpPath *path, GpMatrix* matrix, GDIPCONST GpPointF *points, INT count, REAL srcx, REAL srcy, REAL srcwidth, REAL srcheight, WarpMode warpMode, REAL flatness), (path, matrix, points, count, srcx, srcy, srcwidth, srcheight, warpMode, flatness)) \
m(TransformPath, (GpPath* path, GpMatrix* matrix), (path, matrix)) \
m(GetPathWorldBounds, (GpPath* path, GpRectF* bounds, GDIPCONST GpMatrix *matrix, GDIPCONST GpPen *pen), (path, bounds, matrix, pen)) \
m(GetPathWorldBoundsI, (GpPath* path, GpRect* bounds, GDIPCONST GpMatrix *matrix, GDIPCONST GpPen *pen), (path, bounds, matrix, pen)) \
m(IsVisiblePathPoint, (GpPath* path, REAL x, REAL y, GpGraphics *graphics, BOOL *result), (path, x, y, graphics, result)) \
m(IsVisiblePathPointI, (GpPath* path, INT x, INT y, GpGraphics *graphics, BOOL *result), (path, x, y, graphics, result)) \
m(IsOutlineVisiblePathPoint, (GpPath* path, REAL x, REAL y, GpPen *pen, GpGraphics *graphics, BOOL *result), (path, x, y, pen, graphics, result)) \
m(IsOutlineVisiblePathPointI, (GpPath* path, INT x, INT y, GpPen *pen, GpGraphics *graphics, BOOL *result), (path, x, y, pen, graphics, result)) \
m(CreatePathIter, (GpPathIterator **iterator, GpPath* path), (iterator, path)) \
m(DeletePathIter, (GpPathIterator *iterator), (iterator)) \
m(PathIterNextSubpath, (GpPathIterator* iterator, INT *resultCount, INT* startIndex, INT* endIndex, BOOL* isClosed), (iterator, resultCount, startIndex, endIndex, isClosed)) \
m(PathIterNextSubpathPath, (GpPathIterator* iterator, INT* resultCount, GpPath* path, BOOL* isClosed), (iterator, resultCount, path, isClosed)) \
m(PathIterNextPathType, (GpPathIterator* iterator, INT* resultCount, BYTE* pathType, INT* startIndex, INT* endIndex), (iterator, resultCount, pathType, startIndex, endIndex)) \
m(PathIterNextMarker, (GpPathIterator* iterator, INT *resultCount, INT* startIndex, INT* endIndex), (iterator, resultCount, startIndex, endIndex)) \
m(PathIterNextMarkerPath, (GpPathIterator* iterator, INT* resultCount, GpPath* path), (iterator, resultCount, path)) \
m(PathIterGetCount, (GpPathIterator* iterator, INT* count), (iterator, count)) \
m(PathIterGetSubpathCount, (GpPathIterator* iterator, INT* count), (iterator, count)) \
m(PathIterIsValid, (GpPathIterator* iterator, BOOL* valid), (iterator, valid)) \
m(PathIterHasCurve, (GpPathIterator* iterator, BOOL* hasCurve), (iterator, hasCurve)) \
m(PathIterRewind, (GpPathIterator* iterator), (iterator)) \
m(PathIterEnumerate, (GpPathIterator* iterator, INT* resultCount, GpPointF *points, BYTE *types, INT count), (iterator, resultCount, points, types, count)) \
m(PathIterCopyData, (GpPathIterator* iterator, INT* resultCount, GpPointF* points, BYTE* types, INT startIndex, INT endIndex), (iterator, resultCount, points, types, startIndex, endIndex)) \
m(CreateMatrix, (GpMatrix **matrix), (matrix)) \
m(CreateMatrix2, (REAL m11, REAL m12, REAL m21, REAL m22, REAL dx, REAL dy, GpMatrix **matrix), (m11, m12, m21, m22, dx, dy, matrix)) \
m(CreateMatrix3, (GDIPCONST GpRectF *rect, GDIPCONST GpPointF *dstplg, GpMatrix **matrix), (rect, dstplg, matrix)) \
m(CreateMatrix3I, (GDIPCONST GpRect *rect, GDIPCONST GpPoint *dstplg, GpMatrix **matrix), (rect, dstplg, matrix)) \
m(CloneMatrix, (GpMatrix *matrix, GpMatrix **cloneMatrix), (matrix, cloneMatrix)) \
m(DeleteMatrix, (GpMatrix *matrix), (matrix)) \
m(SetMatrixElements, (GpMatrix *matrix, REAL m11, REAL m12, REAL m21, REAL m22, REAL dx, REAL dy), (matrix, m11, m12, m21, m22, dx, dy)) \
m(MultiplyMatrix, (GpMatrix *matrix, GpMatrix* matrix2, GpMatrixOrder order), (matrix, matrix2, order)) \
m(TranslateMatrix, (GpMatrix *matrix, REAL offsetX, REAL offsetY, GpMatrixOrder order), (matrix, offsetX, offsetY, order)) \
m(ScaleMatrix, (GpMatrix *matrix, REAL scaleX, REAL scaleY, GpMatrixOrder order), (matrix, scaleX, scaleY, order)) \
m(RotateMatrix, (GpMatrix *matrix, REAL angle, GpMatrixOrder order), (matrix, angle, order)) \
m(ShearMatrix, (GpMatrix *matrix, REAL shearX, REAL shearY, GpMatrixOrder order), (matrix, shearX, shearY, order)) \
m(InvertMatrix, (GpMatrix *matrix), (matrix)) \
m(TransformMatrixPoints, (GpMatrix *matrix, GpPointF *pts, INT count), (matrix, pts, count)) \
m(TransformMatrixPointsI, (GpMatrix *matrix, GpPoint *pts, INT count), (matrix, pts, count)) \
m(VectorTransformMatrixPoints, (GpMatrix *matrix, GpPointF *pts, INT count), (matrix, pts, count)) \
m(VectorTransformMatrixPointsI, (GpMatrix *matrix, GpPoint *pts, INT count), (matrix, pts, count)) \
m(GetMatrixElements, (GDIPCONST GpMatrix *matrix, REAL *matrixOut), (matrix, matrixOut)) \
m(IsMatrixInvertible, (GDIPCONST GpMatrix *matrix, BOOL *result), (matrix, result)) \
m(IsMatrixIdentity, (GDIPCONST GpMatrix *matrix, BOOL *result), (matrix, result)) \
m(IsMatrixEqual, (GDIPCONST GpMatrix *matrix, GDIPCONST GpMatrix *matrix2, BOOL *result), (matrix, matrix2, result)) \
m(CreateRegion, (GpRegion **region), (region)) \
m(CreateRegionRect, (GDIPCONST GpRectF *rect, GpRegion **region), (rect, region)) \
m(CreateRegionRectI, (GDIPCONST GpRect *rect, GpRegion **region), (rect, region)) \
m(CreateRegionPath, (GpPath *path, GpRegion **region), (path, region)) \
m(CreateRegionRgnData, (GDIPCONST BYTE *regionData, INT size, GpRegion **region), (regionData, size, region)) \
m(CreateRegionHrgn, (HRGN hRgn, GpRegion **region), (hRgn, region)) \
m(CloneRegion, (GpRegion *region, GpRegion **cloneRegion), (region, cloneRegion)) \
m(DeleteRegion, (GpRegion *region), (region)) \
m(SetInfinite, (GpRegion *region), (region)) \
m(SetEmpty, (GpRegion *region), (region)) \
m(CombineRegionRect, (GpRegion *region, GDIPCONST GpRectF *rect, CombineMode combineMode), (region, rect, combineMode)) \
m(CombineRegionRectI, (GpRegion *region, GDIPCONST GpRect *rect, CombineMode combineMode), (region, rect, combineMode)) \
m(CombineRegionPath, (GpRegion *region, GpPath *path, CombineMode combineMode), (region, path, combineMode)) \
m(CombineRegionRegion, (GpRegion *region, GpRegion *region2, CombineMode combineMode), (region, region2, combineMode)) \
m(TranslateRegion, (GpRegion *region, REAL dx, REAL dy), (region, dx, dy)) \
m(TranslateRegionI, (GpRegion *region, INT dx, INT dy), (region, dx, dy)) \
m(TransformRegion, (GpRegion *region, GpMatrix *matrix), (region, matrix)) \
m(GetRegionBounds, (GpRegion *region, GpGraphics *graphics, GpRectF *rect), (region, graphics, rect)) \
m(GetRegionBoundsI, (GpRegion *region, GpGraphics *graphics, GpRect *rect), (region, graphics, rect)) \
m(GetRegionHRgn, (GpRegion *region, GpGraphics *graphics, HRGN *hRgn), (region, graphics, hRgn)) \
m(IsEmptyRegion, (GpRegion *region, GpGraphics *graphics, BOOL *result), (region, graphics, result)) \
m(IsInfiniteRegion, (GpRegion *region, GpGraphics *graphics, BOOL *result), (region, graphics, result)) \
m(IsEqualRegion, (GpRegion *region, GpRegion *region2, GpGraphics *graphics, BOOL *result), (region, region2, graphics, result)) \
m(GetRegionDataSize, (GpRegion *region, UINT *bufferSize), (region, bufferSize)) \
m(GetRegionData, (GpRegion *region, BYTE *buffer, UINT bufferSize, UINT *sizeFilled), (region, buffer, bufferSize, sizeFilled)) \
m(IsVisibleRegionPoint, (GpRegion *region, REAL x, REAL y, GpGraphics *graphics, BOOL *result), (region, x, y, graphics, result)) \
m(IsVisibleRegionPointI, (GpRegion *region, INT x, INT y, GpGraphics *graphics, BOOL *result), (region, x, y, graphics, result)) \
m(IsVisibleRegionRect, (GpRegion *region, REAL x, REAL y, REAL width, REAL height, GpGraphics *graphics, BOOL *result), (region, x, y, width, height, graphics, result)) \
m(IsVisibleRegionRectI, (GpRegion *region, INT x, INT y, INT width, INT height, GpGraphics *graphics, BOOL *result), (region, x, y, width, height, graphics, result)) \
m(GetRegionScansCount, (GpRegion *region, UINT* count, GpMatrix* matrix), (region, count, matrix)) \
m(GetRegionScans, (GpRegion *region, GpRectF* rects, INT* count, GpMatrix* matrix), (region, rects, count, matrix)) \
m(GetRegionScansI, (GpRegion *region, GpRect* rects, INT* count, GpMatrix* matrix), (region, rects, count, matrix)) \
m(CloneBrush, (GpBrush *brush, GpBrush **cloneBrush), (brush, cloneBrush)) \
m(DeleteBrush, (GpBrush *brush), (brush)) \
m(GetBrushType, (GpBrush *brush, GpBrushType *type), (brush, type)) \
m(CreateHatchBrush, (GpHatchStyle hatchstyle, ARGB forecol, ARGB backcol, GpHatch **brush), (hatchstyle, forecol, backcol, brush)) \
m(GetHatchStyle, (GpHatch *brush, GpHatchStyle *hatchstyle), (brush, hatchstyle)) \
m(GetHatchForegroundColor, (GpHatch *brush, ARGB* forecol), (brush, forecol)) \
m(GetHatchBackgroundColor, (GpHatch *brush, ARGB* backcol), (brush, backcol)) \
m(CreateTexture, (GpImage *image, GpWrapMode wrapmode, GpTexture **texture), (image, wrapmode, texture)) \
m(CreateTexture2, (GpImage *image, GpWrapMode wrapmode, REAL x, REAL y, REAL width, REAL height, GpTexture **texture), (image, wrapmode, x, y, width, height, texture)) \
m(CreateTextureIA, (GpImage *image, GDIPCONST GpImageAttributes *imageAttributes, REAL x, REAL y, REAL width, REAL height, GpTexture **texture), (image, imageAttributes, x, y, width, height, texture)) \
m(CreateTexture2I, (GpImage *image, GpWrapMode wrapmode, INT x, INT y, INT width, INT height, GpTexture **texture), (image, wrapmode, x, y, width, height, texture)) \
m(CreateTextureIAI, (GpImage *image, GDIPCONST GpImageAttributes *imageAttributes, INT x, INT y, INT width, INT height, GpTexture **texture), (image, imageAttributes, x, y, width, height, texture)) \
m(GetTextureTransform, (GpTexture *brush, GpMatrix *matrix), (brush, matrix)) \
m(SetTextureTransform, (GpTexture *brush, GDIPCONST GpMatrix *matrix), (brush, matrix)) \
m(ResetTextureTransform, (GpTexture* brush), (brush)) \
m(MultiplyTextureTransform, (GpTexture* brush, GDIPCONST GpMatrix *matrix, GpMatrixOrder order), (brush, matrix, order)) \
m(TranslateTextureTransform, (GpTexture* brush, REAL dx, REAL dy, GpMatrixOrder order), (brush, dx, dy, order)) \
m(ScaleTextureTransform, (GpTexture* brush, REAL sx, REAL sy, GpMatrixOrder order), (brush, sx, sy, order)) \
m(RotateTextureTransform, (GpTexture* brush, REAL angle, GpMatrixOrder order), (brush, angle, order)) \
m(SetTextureWrapMode, (GpTexture *brush, GpWrapMode wrapmode), (brush, wrapmode)) \
m(GetTextureWrapMode, (GpTexture *brush, GpWrapMode *wrapmode), (brush, wrapmode)) \
m(GetTextureImage, (GpTexture *brush, GpImage **image), (brush, image)) \
m(CreateSolidFill, (ARGB color, GpSolidFill **brush), (color, brush)) \
m(SetSolidFillColor, (GpSolidFill *brush, ARGB color), (brush, color)) \
m(GetSolidFillColor, (GpSolidFill *brush, ARGB *color), (brush, color)) \
m(CreateLineBrush, (GDIPCONST GpPointF* point1, GDIPCONST GpPointF* point2, ARGB color1, ARGB color2, GpWrapMode wrapMode, GpLineGradient **lineGradient), (point1, point2, color1, color2, wrapMode, lineGradient)) \
m(CreateLineBrushI, (GDIPCONST GpPoint* point1, GDIPCONST GpPoint* point2, ARGB color1, ARGB color2, GpWrapMode wrapMode, GpLineGradient **lineGradient), (point1, point2, color1, color2, wrapMode, lineGradient)) \
m(CreateLineBrushFromRect, (GDIPCONST GpRectF* rect, ARGB color1, ARGB color2, LinearGradientMode mode, GpWrapMode wrapMode, GpLineGradient **lineGradient), (rect, color1, color2, mode, wrapMode, lineGradient)) \
m(CreateLineBrushFromRectI, (GDIPCONST GpRect* rect, ARGB color1, ARGB color2, LinearGradientMode mode, GpWrapMode wrapMode, GpLineGradient **lineGradient), (rect, color1, color2, mode, wrapMode, lineGradient)) \
m(CreateLineBrushFromRectWithAngle, (GDIPCONST GpRectF* rect, ARGB color1, ARGB color2, REAL angle, BOOL isAngleScalable, GpWrapMode wrapMode, GpLineGradient **lineGradient), (rect, color1, color2, angle, isAngleScalable, wrapMode, lineGradient)) \
m(CreateLineBrushFromRectWithAngleI, (GDIPCONST GpRect* rect, ARGB color1, ARGB color2, REAL angle, BOOL isAngleScalable, GpWrapMode wrapMode, GpLineGradient **lineGradient), (rect, color1, color2, angle, isAngleScalable, wrapMode, lineGradient)) \
m(SetLineColors, (GpLineGradient *brush, ARGB color1, ARGB color2), (brush, color1, color2)) \
m(GetLineColors, (GpLineGradient *brush, ARGB* colors), (brush, colors)) \
m(GetLineRect, (GpLineGradient *brush, GpRectF *rect), (brush, rect)) \
m(GetLineRectI, (GpLineGradient *brush, GpRect *rect), (brush, rect)) \
m(SetLineGammaCorrection, (GpLineGradient *brush, BOOL useGammaCorrection), (brush, useGammaCorrection)) \
m(GetLineGammaCorrection, (GpLineGradient *brush, BOOL *useGammaCorrection), (brush, useGammaCorrection)) \
m(GetLineBlendCount, (GpLineGradient *brush, INT *count), (brush, count)) \
m(GetLineBlend, (GpLineGradient *brush, REAL *blend, REAL* positions, INT count), (brush, blend, positions, count)) \
m(SetLineBlend, (GpLineGradient *brush, GDIPCONST REAL *blend, GDIPCONST REAL* positions, INT count), (brush, blend, positions, count)) \
m(GetLinePresetBlendCount, (GpLineGradient *brush, INT *count), (brush, count)) \
m(GetLinePresetBlend, (GpLineGradient *brush, ARGB *blend, REAL* positions, INT count), (brush, blend, positions, count)) \
m(SetLinePresetBlend, (GpLineGradient *brush, GDIPCONST ARGB *blend, GDIPCONST REAL* positions, INT count), (brush, blend, positions, count)) \
m(SetLineSigmaBlend, (GpLineGradient *brush, REAL focus, REAL scale), (brush, focus, scale)) \
m(SetLineLinearBlend, (GpLineGradient *brush, REAL focus, REAL scale), (brush, focus, scale)) \
m(SetLineWrapMode, (GpLineGradient *brush, GpWrapMode wrapmode), (brush, wrapmode)) \
m(GetLineWrapMode, (GpLineGradient *brush, GpWrapMode *wrapmode), (brush, wrapmode)) \
m(GetLineTransform, (GpLineGradient *brush, GpMatrix *matrix), (brush, matrix)) \
m(SetLineTransform, (GpLineGradient *brush, GDIPCONST GpMatrix *matrix), (brush, matrix)) \
m(ResetLineTransform, (GpLineGradient* brush), (brush)) \
m(MultiplyLineTransform, (GpLineGradient* brush, GDIPCONST GpMatrix *matrix, GpMatrixOrder order), (brush, matrix, order)) \
m(TranslateLineTransform, (GpLineGradient* brush, REAL dx, REAL dy, GpMatrixOrder order), (brush, dx, dy, order)) \
m(ScaleLineTransform, (GpLineGradient* brush, REAL sx, REAL sy, GpMatrixOrder order), (brush, sx, sy, order)) \
m(RotateLineTransform, (GpLineGradient* brush, REAL angle, GpMatrixOrder order), (brush, angle, order)) \
m(CreatePathGradient, (GDIPCONST GpPointF* points, INT count, GpWrapMode wrapMode, GpPathGradient **polyGradient), (points, count, wrapMode, polyGradient)) \
m(CreatePathGradientI, (GDIPCONST GpPoint* points, INT count, GpWrapMode wrapMode, GpPathGradient **polyGradient), (points, count, wrapMode, polyGradient)) \
m(CreatePathGradientFromPath, (GDIPCONST GpPath* path, GpPathGradient **polyGradient), (path, polyGradient)) \
m(GetPathGradientCenterColor, (GpPathGradient *brush, ARGB* colors), (brush, colors)) \
m(SetPathGradientCenterColor, (GpPathGradient *brush, ARGB colors), (brush, colors)) \
m(GetPathGradientSurroundColorsWithCount, (GpPathGradient *brush, ARGB* color, INT* count), (brush, color, count)) \
m(SetPathGradientSurroundColorsWithCount, (GpPathGradient *brush, GDIPCONST ARGB* color, INT* count), (brush, color, count)) \
m(GetPathGradientPath, (GpPathGradient *brush, GpPath *path), (brush, path)) \
m(SetPathGradientPath, (GpPathGradient *brush, GDIPCONST GpPath *path), (brush, path)) \
m(GetPathGradientCenterPoint, (GpPathGradient *brush, GpPointF* points), (brush, points)) \
m(GetPathGradientCenterPointI, (GpPathGradient *brush, GpPoint* points), (brush, points)) \
m(SetPathGradientCenterPoint, (GpPathGradient *brush, GDIPCONST GpPointF* points), (brush, points)) \
m(SetPathGradientCenterPointI, (GpPathGradient *brush, GDIPCONST GpPoint* points), (brush, points)) \
m(GetPathGradientRect, (GpPathGradient *brush, GpRectF *rect), (brush, rect)) \
m(GetPathGradientRectI, (GpPathGradient *brush, GpRect *rect), (brush, rect)) \
m(GetPathGradientPointCount, (GpPathGradient *brush, INT* count), (brush, count)) \
m(GetPathGradientSurroundColorCount, (GpPathGradient *brush, INT* count), (brush, count)) \
m(SetPathGradientGammaCorrection, (GpPathGradient *brush, BOOL useGammaCorrection), (brush, useGammaCorrection)) \
m(GetPathGradientGammaCorrection, (GpPathGradient *brush, BOOL *useGammaCorrection), (brush, useGammaCorrection)) \
m(GetPathGradientBlendCount, (GpPathGradient *brush, INT *count), (brush, count)) \
m(GetPathGradientBlend, (GpPathGradient *brush, REAL *blend, REAL *positions, INT count), (brush, blend, positions, count)) \
m(SetPathGradientBlend, (GpPathGradient *brush, GDIPCONST REAL *blend, GDIPCONST REAL *positions, INT count), (brush, blend, positions, count)) \
m(GetPathGradientPresetBlendCount, (GpPathGradient *brush, INT *count), (brush, count)) \
m(GetPathGradientPresetBlend, (GpPathGradient *brush, ARGB *blend, REAL* positions, INT count), (brush, blend, positions, count)) \
m(SetPathGradientPresetBlend, (GpPathGradient *brush, GDIPCONST ARGB *blend, GDIPCONST REAL* positions, INT count), (brush, blend, positions, count)) \
m(SetPathGradientSigmaBlend, (GpPathGradient *brush, REAL focus, REAL scale), (brush, focus, scale)) \
m(SetPathGradientLinearBlend, (GpPathGradient *brush, REAL focus, REAL scale), (brush, focus, scale)) \
m(GetPathGradientWrapMode, (GpPathGradient *brush, GpWrapMode *wrapmode), (brush, wrapmode)) \
m(SetPathGradientWrapMode, (GpPathGradient *brush, GpWrapMode wrapmode), (brush, wrapmode)) \
m(GetPathGradientTransform, (GpPathGradient *brush, GpMatrix *matrix), (brush, matrix)) \
m(SetPathGradientTransform, (GpPathGradient *brush, GpMatrix *matrix), (brush, matrix)) \
m(ResetPathGradientTransform, (GpPathGradient* brush), (brush)) \
m(MultiplyPathGradientTransform, (GpPathGradient* brush, GDIPCONST GpMatrix *matrix, GpMatrixOrder order), (brush, matrix, order)) \
m(TranslatePathGradientTransform, (GpPathGradient* brush, REAL dx, REAL dy, GpMatrixOrder order), (brush, dx, dy, order)) \
m(ScalePathGradientTransform, (GpPathGradient* brush, REAL sx, REAL sy, GpMatrixOrder order), (brush, sx, sy, order)) \
m(RotatePathGradientTransform, (GpPathGradient* brush, REAL angle, GpMatrixOrder order), (brush, angle, order)) \
m(GetPathGradientFocusScales, (GpPathGradient *brush, REAL* xScale, REAL* yScale), (brush, xScale, yScale)) \
m(SetPathGradientFocusScales, (GpPathGradient *brush, REAL xScale, REAL yScale), (brush, xScale, yScale)) \
m(CreatePen1, (ARGB color, REAL width, GpUnit unit, GpPen **pen), (color, width, unit, pen)) \
m(CreatePen2, (GpBrush *brush, REAL width, GpUnit unit, GpPen **pen), (brush, width, unit, pen)) \
m(ClonePen, (GpPen *pen, GpPen **clonepen), (pen, clonepen)) \
m(DeletePen, (GpPen *pen), (pen)) \
m(SetPenWidth, (GpPen *pen, REAL width), (pen, width)) \
m(GetPenWidth, (GpPen *pen, REAL *width), (pen, width)) \
m(SetPenUnit, (GpPen *pen, GpUnit unit), (pen, unit)) \
m(GetPenUnit, (GpPen *pen, GpUnit *unit), (pen, unit)) \
m(SetPenLineCap197819, (GpPen *pen, GpLineCap startCap, GpLineCap endCap, GpDashCap dashCap), (pen, startCap, endCap, dashCap)) \
m(SetPenStartCap, (GpPen *pen, GpLineCap startCap), (pen, startCap)) \
m(SetPenEndCap, (GpPen *pen, GpLineCap endCap), (pen, endCap)) \
m(SetPenDashCap197819, (GpPen *pen, GpDashCap dashCap), (pen, dashCap)) \
m(GetPenStartCap, (GpPen *pen, GpLineCap *startCap), (pen, startCap)) \
m(GetPenEndCap, (GpPen *pen, GpLineCap *endCap), (pen, endCap)) \
m(GetPenDashCap197819, (GpPen *pen, GpDashCap *dashCap), (pen, dashCap)) \
m(SetPenLineJoin, (GpPen *pen, GpLineJoin lineJoin), (pen, lineJoin)) \
m(GetPenLineJoin, (GpPen *pen, GpLineJoin *lineJoin), (pen, lineJoin)) \
m(SetPenCustomStartCap, (GpPen *pen, GpCustomLineCap* customCap), (pen, customCap)) \
m(GetPenCustomStartCap, (GpPen *pen, GpCustomLineCap** customCap), (pen, customCap)) \
m(SetPenCustomEndCap, (GpPen *pen, GpCustomLineCap* customCap), (pen, customCap)) \
m(GetPenCustomEndCap, (GpPen *pen, GpCustomLineCap** customCap), (pen, customCap)) \
m(SetPenMiterLimit, (GpPen *pen, REAL miterLimit), (pen, miterLimit)) \
m(GetPenMiterLimit, (GpPen *pen, REAL *miterLimit), (pen, miterLimit)) \
m(SetPenMode, (GpPen *pen, GpPenAlignment penMode), (pen, penMode)) \
m(GetPenMode, (GpPen *pen, GpPenAlignment *penMode), (pen, penMode)) \
m(SetPenTransform, (GpPen *pen, GpMatrix *matrix), (pen, matrix)) \
m(GetPenTransform, (GpPen *pen, GpMatrix *matrix), (pen, matrix)) \
m(ResetPenTransform, (GpPen *pen), (pen)) \
m(MultiplyPenTransform, (GpPen *pen, GDIPCONST GpMatrix *matrix, GpMatrixOrder order), (pen, matrix, order)) \
m(TranslatePenTransform, (GpPen *pen, REAL dx, REAL dy, GpMatrixOrder order), (pen, dx, dy, order)) \
m(ScalePenTransform, (GpPen *pen, REAL sx, REAL sy, GpMatrixOrder order), (pen, sx, sy, order)) \
m(RotatePenTransform, (GpPen *pen, REAL angle, GpMatrixOrder order), (pen, angle, order)) \
m(SetPenColor, (GpPen *pen, ARGB argb), (pen, argb)) \
m(GetPenColor, (GpPen *pen, ARGB *argb), (pen, argb)) \
m(SetPenBrushFill, (GpPen *pen, GpBrush *brush), (pen, brush)) \
m(GetPenBrushFill, (GpPen *pen, GpBrush **brush), (pen, brush)) \
m(GetPenFillType, (GpPen *pen, GpPenType* type), (pen, type)) \
m(GetPenDashStyle, (GpPen *pen, GpDashStyle *dashstyle), (pen, dashstyle)) \
m(SetPenDashStyle, (GpPen *pen, GpDashStyle dashstyle), (pen, dashstyle)) \
m(GetPenDashOffset, (GpPen *pen, REAL *offset), (pen, offset)) \
m(SetPenDashOffset, (GpPen *pen, REAL offset), (pen, offset)) \
m(GetPenDashCount, (GpPen *pen, INT *count), (pen, count)) \
m(SetPenDashArray, (GpPen *pen, GDIPCONST REAL *dash, INT count), (pen, dash, count)) \
m(GetPenDashArray, (GpPen *pen, REAL *dash, INT count), (pen, dash, count)) \
m(GetPenCompoundCount, (GpPen *pen, INT *count), (pen, count)) \
m(SetPenCompoundArray, (GpPen *pen, GDIPCONST REAL *dash, INT count), (pen, dash, count)) \
m(GetPenCompoundArray, (GpPen *pen, REAL *dash, INT count), (pen, dash, count)) \
m(CreateCustomLineCap, (GpPath* fillPath, GpPath* strokePath, GpLineCap baseCap, REAL baseInset, GpCustomLineCap **customCap), (fillPath, strokePath, baseCap, baseInset, customCap)) \
m(DeleteCustomLineCap, (GpCustomLineCap* customCap), (customCap)) \
m(CloneCustomLineCap, (GpCustomLineCap* customCap, GpCustomLineCap** clonedCap), (customCap, clonedCap)) \
m(GetCustomLineCapType, (GpCustomLineCap* customCap, CustomLineCapType* capType), (customCap, capType)) \
m(SetCustomLineCapStrokeCaps, (GpCustomLineCap* customCap, GpLineCap startCap, GpLineCap endCap), (customCap, startCap, endCap)) \
m(GetCustomLineCapStrokeCaps, (GpCustomLineCap* customCap, GpLineCap* startCap, GpLineCap* endCap), (customCap, startCap, endCap)) \
m(SetCustomLineCapStrokeJoin, (GpCustomLineCap* customCap, GpLineJoin lineJoin), (customCap, lineJoin)) \
m(GetCustomLineCapStrokeJoin, (GpCustomLineCap* customCap, GpLineJoin* lineJoin), (customCap, lineJoin)) \
m(SetCustomLineCapBaseCap, (GpCustomLineCap* customCap, GpLineCap baseCap), (customCap, baseCap)) \
m(GetCustomLineCapBaseCap, (GpCustomLineCap* customCap, GpLineCap* baseCap), (customCap, baseCap)) \
m(SetCustomLineCapBaseInset, (GpCustomLineCap* customCap, REAL inset), (customCap, inset)) \
m(GetCustomLineCapBaseInset, (GpCustomLineCap* customCap, REAL* inset), (customCap, inset)) \
m(SetCustomLineCapWidthScale, (GpCustomLineCap* customCap, REAL widthScale), (customCap, widthScale)) \
m(GetCustomLineCapWidthScale, (GpCustomLineCap* customCap, REAL* widthScale), (customCap, widthScale)) \
m(CreateAdjustableArrowCap, (REAL height, REAL width, BOOL isFilled, GpAdjustableArrowCap **cap), (height, width, isFilled, cap)) \
m(SetAdjustableArrowCapHeight, (GpAdjustableArrowCap* cap, REAL height), (cap, height)) \
m(GetAdjustableArrowCapHeight, (GpAdjustableArrowCap* cap, REAL* height), (cap, height)) \
m(SetAdjustableArrowCapWidth, (GpAdjustableArrowCap* cap, REAL width), (cap, width)) \
m(GetAdjustableArrowCapWidth, (GpAdjustableArrowCap* cap, REAL* width), (cap, width)) \
m(SetAdjustableArrowCapMiddleInset, (GpAdjustableArrowCap* cap, REAL middleInset), (cap, middleInset)) \
m(GetAdjustableArrowCapMiddleInset, (GpAdjustableArrowCap* cap, REAL* middleInset), (cap, middleInset)) \
m(SetAdjustableArrowCapFillState, (GpAdjustableArrowCap* cap, BOOL fillState), (cap, fillState)) \
m(GetAdjustableArrowCapFillState, (GpAdjustableArrowCap* cap, BOOL* fillState), (cap, fillState)) \
m(LoadImageFromStream, (IStream* stream, GpImage **image), (stream, image)) \
m(LoadImageFromFile, (GDIPCONST WCHAR* filename, GpImage **image), (filename, image)) \
m(LoadImageFromStreamICM, (IStream* stream, GpImage **image), (stream, image)) \
m(LoadImageFromFileICM, (GDIPCONST WCHAR* filename, GpImage **image), (filename, image)) \
m(CloneImage, (GpImage *image, GpImage **cloneImage), (image, cloneImage)) \
m(DisposeImage, (GpImage *image), (image)) \
m(SaveImageToFile, (GpImage *image, GDIPCONST WCHAR* filename, GDIPCONST CLSID* clsidEncoder, GDIPCONST EncoderParameters* encoderParams), (image, filename, clsidEncoder, encoderParams)) \
m(SaveImageToStream, (GpImage *image, IStream* stream, GDIPCONST CLSID* clsidEncoder, GDIPCONST EncoderParameters* encoderParams), (image, stream, clsidEncoder, encoderParams)) \
m(SaveAdd, (GpImage *image, GDIPCONST EncoderParameters* encoderParams), (image, encoderParams)) \
m(SaveAddImage, (GpImage *image, GpImage* newImage, GDIPCONST EncoderParameters* encoderParams), (image, newImage, encoderParams)) \
m(GetImageGraphicsContext, (GpImage *image, GpGraphics **graphics), (image, graphics)) \
m(GetImageBounds, (GpImage *image, GpRectF *srcRect, GpUnit *srcUnit), (image, srcRect, srcUnit)) \
m(GetImageDimension, (GpImage *image, REAL *width, REAL *height), (image, width, height)) \
m(GetImageType, (GpImage *image, ImageType *type), (image, type)) \
m(GetImageWidth, (GpImage *image, UINT *width), (image, width)) \
m(GetImageHeight, (GpImage *image, UINT *height), (image, height)) \
m(GetImageHorizontalResolution, (GpImage *image, REAL *resolution), (image, resolution)) \
m(GetImageVerticalResolution, (GpImage *image, REAL *resolution), (image, resolution)) \
m(GetImageFlags, (GpImage *image, UINT *flags), (image, flags)) \
m(GetImageRawFormat, (GpImage *image, GUID *format), (image, format)) \
m(GetImagePixelFormat, (GpImage *image, PixelFormat *format), (image, format)) \
m(GetImageThumbnail, (GpImage *image, UINT thumbWidth, UINT thumbHeight, GpImage **thumbImage, GetThumbnailImageAbort callback, VOID *callbackData), (image, thumbWidth, thumbHeight, thumbImage, callback, callbackData)) \
m(GetEncoderParameterListSize, (GpImage *image, GDIPCONST CLSID* clsidEncoder, UINT* size), (image, clsidEncoder, size)) \
m(GetEncoderParameterList, (GpImage *image, GDIPCONST CLSID* clsidEncoder, UINT size, EncoderParameters* buffer), (image, clsidEncoder, size, buffer)) \
m(ImageGetFrameDimensionsCount, (GpImage* image, UINT* count), (image, count)) \
m(ImageGetFrameDimensionsList, (GpImage* image, GUID* dimensionIDs, UINT count), (image, dimensionIDs, count)) \
m(ImageGetFrameCount, (GpImage *image, GDIPCONST GUID* dimensionID, UINT* count), (image, dimensionID, count)) \
m(ImageSelectActiveFrame, (GpImage *image, GDIPCONST GUID* dimensionID, UINT frameIndex), (image, dimensionID, frameIndex)) \
m(ImageRotateFlip, (GpImage *image, RotateFlipType rfType), (image, rfType)) \
m(GetImagePalette, (GpImage *image, ColorPalette *palette, INT size), (image, palette, size)) \
m(SetImagePalette, (GpImage *image, GDIPCONST ColorPalette *palette), (image, palette)) \
m(GetImagePaletteSize, (GpImage *image, INT *size), (image, size)) \
m(GetPropertyCount, (GpImage *image, UINT* numOfProperty), (image, numOfProperty)) \
m(GetPropertyIdList, (GpImage *image, UINT numOfProperty, PROPID* list), (image, numOfProperty, list)) \
m(GetPropertyItemSize, (GpImage *image, PROPID propId, UINT* size), (image, propId, size)) \
m(GetPropertyItem, (GpImage *image, PROPID propId,UINT propSize, PropertyItem* buffer), (image, propId, propSize, buffer)) \
m(GetPropertySize, (GpImage *image, UINT* totalBufferSize, UINT* numProperties), (image, totalBufferSize, numProperties)) \
m(GetAllPropertyItems, (GpImage *image, UINT totalBufferSize, UINT numProperties, PropertyItem* allItems), (image, totalBufferSize, numProperties, allItems)) \
m(RemovePropertyItem, (GpImage *image, PROPID propId), (image, propId)) \
m(SetPropertyItem, (GpImage *image, GDIPCONST PropertyItem* item), (image, item)) \
m(ImageForceValidation, (GpImage *image), (image)) \
m(CreateBitmapFromStream, (IStream* stream, GpBitmap **bitmap), (stream, bitmap)) \
m(CreateBitmapFromFile, (GDIPCONST WCHAR* filename, GpBitmap **bitmap), (filename, bitmap)) \
m(CreateBitmapFromStreamICM, (IStream* stream, GpBitmap **bitmap), (stream, bitmap)) \
m(CreateBitmapFromFileICM, (GDIPCONST WCHAR* filename, GpBitmap **bitmap), (filename, bitmap)) \
m(CreateBitmapFromScan0, (INT width, INT height, INT stride, PixelFormat format, BYTE* scan0, GpBitmap** bitmap), (width, height, stride, format, scan0, bitmap)) \
m(CreateBitmapFromGraphics, (INT width, INT height, GpGraphics* target, GpBitmap** bitmap), (width, height, target, bitmap)) \
m(CreateBitmapFromDirectDrawSurface, (IDirectDrawSurface7* surface, GpBitmap** bitmap), (surface, bitmap)) \
m(CreateBitmapFromGdiDib, (GDIPCONST BITMAPINFO* gdiBitmapInfo, VOID* gdiBitmapData, GpBitmap** bitmap), (gdiBitmapInfo, gdiBitmapData, bitmap)) \
m(CreateBitmapFromHBITMAP, (HBITMAP hbm, HPALETTE hpal, GpBitmap** bitmap), (hbm, hpal, bitmap)) \
m(CreateHBITMAPFromBitmap, (GpBitmap* bitmap, HBITMAP* hbmReturn, ARGB background), (bitmap, hbmReturn, background)) \
m(CreateBitmapFromHICON, (HICON hicon, GpBitmap** bitmap), (hicon, bitmap)) \
m(CreateHICONFromBitmap, (GpBitmap* bitmap, HICON* hbmReturn), (bitmap, hbmReturn)) \
m(CreateBitmapFromResource, (HINSTANCE hInstance, GDIPCONST WCHAR* lpBitmapName, GpBitmap** bitmap), (hInstance, lpBitmapName, bitmap)) \
m(CloneBitmapArea, (REAL x, REAL y, REAL width, REAL height, PixelFormat format, GpBitmap *srcBitmap, GpBitmap **dstBitmap), (x, y, width, height, format, srcBitmap, dstBitmap)) \
m(CloneBitmapAreaI, (INT x, INT y, INT width, INT height, PixelFormat format, GpBitmap *srcBitmap, GpBitmap **dstBitmap), (x, y, width, height, format, srcBitmap, dstBitmap)) \
m(BitmapLockBits, (GpBitmap* bitmap, GDIPCONST GpRect* rect, UINT flags, PixelFormat format, BitmapData* lockedBitmapData), (bitmap, rect, flags, format, lockedBitmapData)) \
m(BitmapUnlockBits, (GpBitmap* bitmap, BitmapData* lockedBitmapData), (bitmap, lockedBitmapData)) \
m(BitmapGetPixel, (GpBitmap* bitmap, INT x, INT y, ARGB *color), (bitmap, x, y, color)) \
m(BitmapSetPixel, (GpBitmap* bitmap, INT x, INT y, ARGB color), (bitmap, x, y, color)) \
m(BitmapSetResolution, (GpBitmap* bitmap, REAL xdpi, REAL ydpi), (bitmap, xdpi, ydpi)) \
m(CreateImageAttributes, (GpImageAttributes **imageattr), (imageattr)) \
m(CloneImageAttributes, (GDIPCONST GpImageAttributes *imageattr, GpImageAttributes **cloneImageattr), (imageattr, cloneImageattr)) \
m(DisposeImageAttributes, (GpImageAttributes *imageattr), (imageattr)) \
m(SetImageAttributesToIdentity, (GpImageAttributes *imageattr, ColorAdjustType type), (imageattr, type)) \
m(ResetImageAttributes, (GpImageAttributes *imageattr, ColorAdjustType type), (imageattr, type)) \
m(SetImageAttributesColorMatrix, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, GDIPCONST ColorMatrix* colorMatrix, GDIPCONST ColorMatrix* grayMatrix, ColorMatrixFlags flags), (imageattr, type, enableFlag, colorMatrix, grayMatrix, flags)) \
m(SetImageAttributesThreshold, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, REAL threshold), (imageattr, type, enableFlag, threshold)) \
m(SetImageAttributesGamma, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, REAL gamma), (imageattr, type, enableFlag, gamma)) \
m(SetImageAttributesNoOp, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag), (imageattr, type, enableFlag)) \
m(SetImageAttributesColorKeys, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, ARGB colorLow, ARGB colorHigh), (imageattr, type, enableFlag, colorLow, colorHigh)) \
m(SetImageAttributesOutputChannel, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, ColorChannelFlags channelFlags), (imageattr, type, enableFlag, channelFlags)) \
m(SetImageAttributesOutputChannelColorProfile, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, GDIPCONST WCHAR *colorProfileFilename), (imageattr, type, enableFlag, colorProfileFilename)) \
m(SetImageAttributesRemapTable, (GpImageAttributes *imageattr, ColorAdjustType type, BOOL enableFlag, UINT mapSize, GDIPCONST ColorMap *map), (imageattr, type, enableFlag, mapSize, map)) \
m(SetImageAttributesWrapMode, (GpImageAttributes *imageAttr, WrapMode wrap, ARGB argb, BOOL clamp), (imageAttr, wrap, argb, clamp)) \
m(GetImageAttributesAdjustedPalette, (GpImageAttributes *imageAttr, ColorPalette *colorPalette, ColorAdjustType colorAdjustType), (imageAttr, colorPalette, colorAdjustType)) \
m(Flush, (GpGraphics *graphics, GpFlushIntention intention), (graphics, intention)) \
m(CreateFromHDC, (HDC hdc, GpGraphics **graphics), (hdc, graphics)) \
m(CreateFromHDC2, (HDC hdc, HANDLE hDevice, GpGraphics **graphics), (hdc, hDevice, graphics)) \
m(CreateFromHWND, (HWND hwnd, GpGraphics **graphics), (hwnd, graphics)) \
m(CreateFromHWNDICM, (HWND hwnd, GpGraphics **graphics), (hwnd, graphics)) \
m(DeleteGraphics, (GpGraphics *graphics), (graphics)) \
m(GetDC, (GpGraphics* graphics, HDC *hdc), (graphics, hdc)) \
m(ReleaseDC, (GpGraphics* graphics, HDC hdc), (graphics, hdc)) \
m(SetCompositingMode, (GpGraphics *graphics, CompositingMode compositingMode), (graphics, compositingMode)) \
m(GetCompositingMode, (GpGraphics *graphics, CompositingMode *compositingMode), (graphics, compositingMode)) \
m(SetRenderingOrigin, (GpGraphics *graphics, INT x, INT y), (graphics, x, y)) \
m(GetRenderingOrigin, (GpGraphics *graphics, INT *x, INT *y), (graphics, x, y)) \
m(SetCompositingQuality, (GpGraphics *graphics, CompositingQuality compositingQuality), (graphics, compositingQuality)) \
m(GetCompositingQuality, (GpGraphics *graphics, CompositingQuality *compositingQuality), (graphics, compositingQuality)) \
m(SetSmoothingMode, (GpGraphics *graphics, SmoothingMode smoothingMode), (graphics, smoothingMode)) \
m(GetSmoothingMode, (GpGraphics *graphics, SmoothingMode *smoothingMode), (graphics, smoothingMode)) \
m(SetPixelOffsetMode, (GpGraphics* graphics, PixelOffsetMode pixelOffsetMode), (graphics, pixelOffsetMode)) \
m(GetPixelOffsetMode, (GpGraphics *graphics, PixelOffsetMode *pixelOffsetMode), (graphics, pixelOffsetMode)) \
m(SetTextRenderingHint, (GpGraphics *graphics, TextRenderingHint mode), (graphics, mode)) \
m(GetTextRenderingHint, (GpGraphics *graphics, TextRenderingHint *mode), (graphics, mode)) \
m(SetTextContrast, (GpGraphics *graphics, UINT contrast), (graphics, contrast)) \
m(GetTextContrast, (GpGraphics *graphics, UINT *contrast), (graphics, contrast)) \
m(SetInterpolationMode, (GpGraphics *graphics, InterpolationMode interpolationMode), (graphics, interpolationMode)) \
m(GetInterpolationMode, (GpGraphics *graphics, InterpolationMode *interpolationMode), (graphics, interpolationMode)) \
m(SetWorldTransform, (GpGraphics *graphics, GpMatrix *matrix), (graphics, matrix)) \
m(ResetWorldTransform, (GpGraphics *graphics), (graphics)) \
m(MultiplyWorldTransform, (GpGraphics *graphics, GDIPCONST GpMatrix *matrix, GpMatrixOrder order), (graphics, matrix, order)) \
m(TranslateWorldTransform, (GpGraphics *graphics, REAL dx, REAL dy, GpMatrixOrder order), (graphics, dx, dy, order)) \
m(ScaleWorldTransform, (GpGraphics *graphics, REAL sx, REAL sy, GpMatrixOrder order), (graphics, sx, sy, order)) \
m(RotateWorldTransform, (GpGraphics *graphics, REAL angle, GpMatrixOrder order), (graphics, angle, order)) \
m(GetWorldTransform, (GpGraphics *graphics, GpMatrix *matrix), (graphics, matrix)) \
m(ResetPageTransform, (GpGraphics *graphics), (graphics)) \
m(GetPageUnit, (GpGraphics *graphics, GpUnit *unit), (graphics, unit)) \
m(GetPageScale, (GpGraphics *graphics, REAL *scale), (graphics, scale)) \
m(SetPageUnit, (GpGraphics *graphics, GpUnit unit), (graphics, unit)) \
m(SetPageScale, (GpGraphics *graphics, REAL scale), (graphics, scale)) \
m(GetDpiX, (GpGraphics *graphics, REAL* dpi), (graphics, dpi)) \
m(GetDpiY, (GpGraphics *graphics, REAL* dpi), (graphics, dpi)) \
<|fim▁hole|> m(GetNearestColor, (GpGraphics *graphics, ARGB* argb), (graphics, argb)) \
m(DrawLine, (GpGraphics *graphics, GpPen *pen, REAL x1, REAL y1, REAL x2, REAL y2), (graphics, pen, x1, y1, x2, y2)) \
m(DrawLineI, (GpGraphics *graphics, GpPen *pen, INT x1, INT y1, INT x2, INT y2), (graphics, pen, x1, y1, x2, y2)) \
m(DrawLines, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count), (graphics, pen, points, count)) \
m(DrawLinesI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count), (graphics, pen, points, count)) \
m(DrawArc, (GpGraphics *graphics, GpPen *pen, REAL x, REAL y, REAL width, REAL height, REAL startAngle, REAL sweepAngle), (graphics, pen, x, y, width, height, startAngle, sweepAngle)) \
m(DrawArcI, (GpGraphics *graphics, GpPen *pen, INT x, INT y, INT width, INT height, REAL startAngle, REAL sweepAngle), (graphics, pen, x, y, width, height, startAngle, sweepAngle)) \
m(DrawBezier, (GpGraphics *graphics, GpPen *pen, REAL x1, REAL y1, REAL x2, REAL y2, REAL x3, REAL y3, REAL x4, REAL y4), (graphics, pen, x1, y1, x2, y2, x3, y3, x4, y4)) \
m(DrawBezierI, (GpGraphics *graphics, GpPen *pen, INT x1, INT y1, INT x2, INT y2, INT x3, INT y3, INT x4, INT y4), (graphics, pen, x1, y1, x2, y2, x3, y3, x4, y4)) \
m(DrawBeziers, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count), (graphics, pen, points, count)) \
m(DrawBeziersI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count), (graphics, pen, points, count)) \
m(DrawRectangle, (GpGraphics *graphics, GpPen *pen, REAL x, REAL y, REAL width, REAL height), (graphics, pen, x, y, width, height)) \
m(DrawRectangleI, (GpGraphics *graphics, GpPen *pen, INT x, INT y, INT width, INT height), (graphics, pen, x, y, width, height)) \
m(DrawRectangles, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpRectF *rects, INT count), (graphics, pen, rects, count)) \
m(DrawRectanglesI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpRect *rects, INT count), (graphics, pen, rects, count)) \
m(DrawEllipse, (GpGraphics *graphics, GpPen *pen, REAL x, REAL y, REAL width, REAL height), (graphics, pen, x, y, width, height)) \
m(DrawEllipseI, (GpGraphics *graphics, GpPen *pen, INT x, INT y, INT width, INT height), (graphics, pen, x, y, width, height)) \
m(DrawPie, (GpGraphics *graphics, GpPen *pen, REAL x, REAL y, REAL width, REAL height, REAL startAngle, REAL sweepAngle), (graphics, pen, x, y, width, height, startAngle, sweepAngle)) \
m(DrawPieI, (GpGraphics *graphics, GpPen *pen, INT x, INT y, INT width, INT height, REAL startAngle, REAL sweepAngle), (graphics, pen, x, y, width, height, startAngle, sweepAngle)) \
m(DrawPolygon, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count), (graphics, pen, points, count)) \
m(DrawPolygonI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count), (graphics, pen, points, count)) \
m(DrawPath, (GpGraphics *graphics, GpPen *pen, GpPath *path), (graphics, pen, path)) \
m(DrawCurve, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count), (graphics, pen, points, count)) \
m(DrawCurveI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count), (graphics, pen, points, count)) \
m(DrawCurve2, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count, REAL tension), (graphics, pen, points, count, tension)) \
m(DrawCurve2I, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count, REAL tension), (graphics, pen, points, count, tension)) \
m(DrawCurve3, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count, INT offset, INT numberOfSegments, REAL tension), (graphics, pen, points, count, offset, numberOfSegments, tension)) \
m(DrawCurve3I, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count, INT offset, INT numberOfSegments, REAL tension), (graphics, pen, points, count, offset, numberOfSegments, tension)) \
m(DrawClosedCurve, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count), (graphics, pen, points, count)) \
m(DrawClosedCurveI, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count), (graphics, pen, points, count)) \
m(DrawClosedCurve2, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPointF *points, INT count, REAL tension), (graphics, pen, points, count, tension)) \
m(DrawClosedCurve2I, (GpGraphics *graphics, GpPen *pen, GDIPCONST GpPoint *points, INT count, REAL tension), (graphics, pen, points, count, tension)) \
m(GraphicsClear, (GpGraphics *graphics, ARGB color), (graphics, color)) \
m(FillRectangle, (GpGraphics *graphics, GpBrush *brush, REAL x, REAL y, REAL width, REAL height), (graphics, brush, x, y, width, height)) \
m(FillRectangleI, (GpGraphics *graphics, GpBrush *brush, INT x, INT y, INT width, INT height), (graphics, brush, x, y, width, height)) \
m(FillRectangles, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpRectF *rects, INT count), (graphics, brush, rects, count)) \
m(FillRectanglesI, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpRect *rects, INT count), (graphics, brush, rects, count)) \
m(FillPolygon, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPointF *points, INT count, GpFillMode fillMode), (graphics, brush, points, count, fillMode)) \
m(FillPolygonI, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPoint *points, INT count, GpFillMode fillMode), (graphics, brush, points, count, fillMode)) \
m(FillPolygon2, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPointF *points, INT count), (graphics, brush, points, count)) \
m(FillPolygon2I, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPoint *points, INT count), (graphics, brush, points, count)) \
m(FillEllipse, (GpGraphics *graphics, GpBrush *brush, REAL x, REAL y, REAL width, REAL height), (graphics, brush, x, y, width, height)) \
m(FillEllipseI, (GpGraphics *graphics, GpBrush *brush, INT x, INT y, INT width, INT height), (graphics, brush, x, y, width, height)) \
m(FillPie, (GpGraphics *graphics, GpBrush *brush, REAL x, REAL y, REAL width, REAL height, REAL startAngle, REAL sweepAngle), (graphics, brush, x, y, width, height, startAngle, sweepAngle)) \
m(FillPieI, (GpGraphics *graphics, GpBrush *brush, INT x, INT y, INT width, INT height, REAL startAngle, REAL sweepAngle), (graphics, brush, x, y, width, height, startAngle, sweepAngle)) \
m(FillPath, (GpGraphics *graphics, GpBrush *brush, GpPath *path), (graphics, brush, path)) \
m(FillClosedCurve, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPointF *points, INT count), (graphics, brush, points, count)) \
m(FillClosedCurveI, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPoint *points, INT count), (graphics, brush, points, count)) \
m(FillClosedCurve2, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPointF *points, INT count, REAL tension, GpFillMode fillMode), (graphics, brush, points, count, tension, fillMode)) \
m(FillClosedCurve2I, (GpGraphics *graphics, GpBrush *brush, GDIPCONST GpPoint *points, INT count, REAL tension, GpFillMode fillMode), (graphics, brush, points, count, tension, fillMode)) \
m(FillRegion, (GpGraphics *graphics, GpBrush *brush, GpRegion *region), (graphics, brush, region)) \
m(DrawImage, (GpGraphics *graphics, GpImage *image, REAL x, REAL y), (graphics, image, x, y)) \
m(DrawImageI, (GpGraphics *graphics, GpImage *image, INT x, INT y), (graphics, image, x, y)) \
m(DrawImageRect, (GpGraphics *graphics, GpImage *image, REAL x, REAL y, REAL width, REAL height), (graphics, image, x, y, width, height)) \
m(DrawImageRectI, (GpGraphics *graphics, GpImage *image, INT x, INT y, INT width, INT height), (graphics, image, x, y, width, height)) \
m(DrawImagePoints, (GpGraphics *graphics, GpImage *image, GDIPCONST GpPointF *dstpoints, INT count), (graphics, image, dstpoints, count)) \
m(DrawImagePointsI, (GpGraphics *graphics, GpImage *image, GDIPCONST GpPoint *dstpoints, INT count), (graphics, image, dstpoints, count)) \
m(DrawImagePointRect, (GpGraphics *graphics, GpImage *image, REAL x, REAL y, REAL srcx, REAL srcy, REAL srcwidth, REAL srcheight, GpUnit srcUnit), (graphics, image, x, y, srcx, srcy, srcwidth, srcheight, srcUnit)) \
m(DrawImagePointRectI, (GpGraphics *graphics, GpImage *image, INT x, INT y, INT srcx, INT srcy, INT srcwidth, INT srcheight, GpUnit srcUnit), (graphics, image, x, y, srcx, srcy, srcwidth, srcheight, srcUnit)) \
m(DrawImageRectRect, (GpGraphics *graphics, GpImage *image, REAL dstx, REAL dsty, REAL dstwidth, REAL dstheight, REAL srcx, REAL srcy, REAL srcwidth, REAL srcheight, GpUnit srcUnit, GDIPCONST GpImageAttributes* imageAttributes, DrawImageAbort callback, VOID *callbackData), (graphics, image, dstx, dsty, dstwidth, dstheight, srcx, srcy, srcwidth, srcheight, srcUnit, imageAttributes, callback, callbackData)) \
m(DrawImageRectRectI, (GpGraphics *graphics, GpImage *image, INT dstx, INT dsty, INT dstwidth, INT dstheight, INT srcx, INT srcy, INT srcwidth, INT srcheight, GpUnit srcUnit, GDIPCONST GpImageAttributes* imageAttributes, DrawImageAbort callback, VOID *callbackData), (graphics, image, dstx, dsty, dstwidth, dstheight, srcx, srcy, srcwidth, srcheight, srcUnit, imageAttributes, callback, callbackData)) \
m(DrawImagePointsRect, (GpGraphics *graphics, GpImage *image, GDIPCONST GpPointF *points, INT count, REAL srcx, REAL srcy, REAL srcwidth, REAL srcheight, GpUnit srcUnit, GDIPCONST GpImageAttributes* imageAttributes, DrawImageAbort callback, VOID *callbackData), (graphics, image, points, count, srcx, srcy, srcwidth, srcheight, srcUnit, imageAttributes, callback, callbackData)) \
m(DrawImagePointsRectI, (GpGraphics *graphics, GpImage *image, GDIPCONST GpPoint *points, INT count, INT srcx, INT srcy, INT srcwidth, INT srcheight, GpUnit srcUnit, GDIPCONST GpImageAttributes* imageAttributes, DrawImageAbort callback, VOID *callbackData), (graphics, image, points, count, srcx, srcy, srcwidth, srcheight, srcUnit, imageAttributes, callback, callbackData)) \
m(EnumerateMetafileDestPoint, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST PointF & destPoint, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoint, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileDestPointI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Point & destPoint, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoint, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileDestRect, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST RectF & destRect, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destRect, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileDestRectI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Rect & destRect, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destRect, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileDestPoints, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST PointF *destPoints, INT count, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoints, count, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileDestPointsI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Point *destPoints, INT count, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoints, count, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestPoint, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST PointF & destPoint, GDIPCONST RectF & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoint, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestPointI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Point & destPoint, GDIPCONST Rect & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoint, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestRect, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST RectF & destRect, GDIPCONST RectF & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destRect, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestRectI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Rect & destRect, GDIPCONST Rect & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destRect, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestPoints, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST PointF *destPoints, INT count, GDIPCONST RectF & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoints, count, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(EnumerateMetafileSrcRectDestPointsI, (GpGraphics *graphics, GDIPCONST GpMetafile *metafile, GDIPCONST Point *destPoints, INT count, GDIPCONST Rect & srcRect, Unit srcUnit, EnumerateMetafileProc callback, VOID *callbackData, GDIPCONST GpImageAttributes *imageAttributes), (graphics, metafile, destPoints, count, srcRect, srcUnit, callback, callbackData, imageAttributes)) \
m(PlayMetafileRecord, (GDIPCONST GpMetafile *metafile, EmfPlusRecordType recordType, UINT flags, UINT dataSize, GDIPCONST BYTE *data), (metafile, recordType, flags, dataSize, data)) \
m(SetClipGraphics, (GpGraphics *graphics, GpGraphics *srcgraphics, CombineMode combineMode), (graphics, srcgraphics, combineMode)) \
m(SetClipRect, (GpGraphics *graphics, REAL x, REAL y, REAL width, REAL height, CombineMode combineMode), (graphics, x, y, width, height, combineMode)) \
m(SetClipRectI, (GpGraphics *graphics, INT x, INT y, INT width, INT height, CombineMode combineMode), (graphics, x, y, width, height, combineMode)) \
m(SetClipPath, (GpGraphics *graphics, GpPath *path, CombineMode combineMode), (graphics, path, combineMode)) \
m(SetClipRegion, (GpGraphics *graphics, GpRegion *region, CombineMode combineMode), (graphics, region, combineMode)) \
m(SetClipHrgn, (GpGraphics *graphics, HRGN hRgn, CombineMode combineMode), (graphics, hRgn, combineMode)) \
m(ResetClip, (GpGraphics *graphics), (graphics)) \
m(TranslateClip, (GpGraphics *graphics, REAL dx, REAL dy), (graphics, dx, dy)) \
m(TranslateClipI, (GpGraphics *graphics, INT dx, INT dy), (graphics, dx, dy)) \
m(GetClip, (GpGraphics *graphics, GpRegion *region), (graphics, region)) \
m(GetClipBounds, (GpGraphics *graphics, GpRectF *rect), (graphics, rect)) \
m(GetClipBoundsI, (GpGraphics *graphics, GpRect *rect), (graphics, rect)) \
m(IsClipEmpty, (GpGraphics *graphics, BOOL *result), (graphics, result)) \
m(GetVisibleClipBounds, (GpGraphics *graphics, GpRectF *rect), (graphics, rect)) \
m(GetVisibleClipBoundsI, (GpGraphics *graphics, GpRect *rect), (graphics, rect)) \
m(IsVisibleClipEmpty, (GpGraphics *graphics, BOOL *result), (graphics, result)) \
m(IsVisiblePoint, (GpGraphics *graphics, REAL x, REAL y, BOOL *result), (graphics, x, y, result)) \
m(IsVisiblePointI, (GpGraphics *graphics, INT x, INT y, BOOL *result), (graphics, x, y, result)) \
m(IsVisibleRect, (GpGraphics *graphics, REAL x, REAL y, REAL width, REAL height, BOOL *result), (graphics, x, y, width, height, result)) \
m(IsVisibleRectI, (GpGraphics *graphics, INT x, INT y, INT width, INT height, BOOL *result), (graphics, x, y, width, height, result)) \
m(SaveGraphics, (GpGraphics *graphics, GraphicsState *state), (graphics, state)) \
m(RestoreGraphics, (GpGraphics *graphics, GraphicsState state), (graphics, state)) \
m(BeginContainer, (GpGraphics *graphics, GDIPCONST GpRectF* dstrect, GDIPCONST GpRectF *srcrect, GpUnit unit, GraphicsContainer *state), (graphics, dstrect, srcrect, unit, state)) \
m(BeginContainerI, (GpGraphics *graphics, GDIPCONST GpRect* dstrect, GDIPCONST GpRect *srcrect, GpUnit unit, GraphicsContainer *state), (graphics, dstrect, srcrect, unit, state)) \
m(BeginContainer2, (GpGraphics *graphics, GraphicsContainer* state), (graphics, state)) \
m(EndContainer, (GpGraphics *graphics, GraphicsContainer state), (graphics, state)) \
m(GetMetafileHeaderFromEmf, (HENHMETAFILE hEmf, MetafileHeader *header), (hEmf, header)) \
m(GetMetafileHeaderFromFile, (GDIPCONST WCHAR* filename, MetafileHeader *header), (filename, header)) \
m(GetMetafileHeaderFromStream, (IStream *stream, MetafileHeader *header), (stream, header)) \
m(GetMetafileHeaderFromMetafile, (GpMetafile *metafile, MetafileHeader *header), (metafile, header)) \
m(GetHemfFromMetafile, (GpMetafile *metafile, HENHMETAFILE *hEmf), (metafile, hEmf)) \
m(CreateStreamOnFile, (GDIPCONST WCHAR *filename, UINT access, IStream **stream), (filename, access, stream)) \
m(CreateMetafileFromWmf, (HMETAFILE hWmf, BOOL deleteWmf, GDIPCONST WmfPlaceableFileHeader *wmfPlaceableFileHeader, GpMetafile **metafile), (hWmf, deleteWmf, wmfPlaceableFileHeader, metafile)) \
m(CreateMetafileFromEmf, (HENHMETAFILE hEmf, BOOL deleteEmf, GpMetafile **metafile), (hEmf, deleteEmf, metafile)) \
m(CreateMetafileFromFile, (GDIPCONST WCHAR* file, GpMetafile **metafile), (file, metafile)) \
m(CreateMetafileFromWmfFile, (GDIPCONST WCHAR* file, GDIPCONST WmfPlaceableFileHeader *wmfPlaceableFileHeader, GpMetafile **metafile), (file, wmfPlaceableFileHeader, metafile)) \
m(CreateMetafileFromStream, (IStream *stream, GpMetafile **metafile), (stream, metafile)) \
m(RecordMetafile, (HDC referenceHdc, EmfType type, GDIPCONST GpRectF *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(RecordMetafileI, (HDC referenceHdc, EmfType type, GDIPCONST GpRect *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(RecordMetafileFileName, (GDIPCONST WCHAR* fileName, HDC referenceHdc, EmfType type, GDIPCONST GpRectF *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (fileName, referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(RecordMetafileFileNameI, (GDIPCONST WCHAR* fileName, HDC referenceHdc, EmfType type, GDIPCONST GpRect *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (fileName, referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(RecordMetafileStream, (IStream *stream, HDC referenceHdc, EmfType type, GDIPCONST GpRectF *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (stream, referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(RecordMetafileStreamI, (IStream *stream, HDC referenceHdc, EmfType type, GDIPCONST GpRect *frameRect, MetafileFrameUnit frameUnit, GDIPCONST WCHAR *description, GpMetafile ** metafile), (stream, referenceHdc, type, frameRect, frameUnit, description, metafile)) \
m(SetMetafileDownLevelRasterizationLimit, (GpMetafile *metafile, UINT metafileRasterizationLimitDpi), (metafile, metafileRasterizationLimitDpi)) \
m(GetMetafileDownLevelRasterizationLimit, (GDIPCONST GpMetafile *metafile, UINT *metafileRasterizationLimitDpi), (metafile, metafileRasterizationLimitDpi)) \
m(GetImageDecodersSize, (UINT *numDecoders, UINT *size), (numDecoders, size)) \
m(GetImageDecoders, (UINT numDecoders, UINT size, ImageCodecInfo *decoders), (numDecoders, size, decoders)) \
m(GetImageEncodersSize, (UINT *numEncoders, UINT *size), (numEncoders, size)) \
m(GetImageEncoders, (UINT numEncoders, UINT size, ImageCodecInfo *encoders), (numEncoders, size, encoders)) \
m(Comment, (GpGraphics* graphics, UINT sizeData, GDIPCONST BYTE *data), (graphics, sizeData, data)) \
m(CreateFontFamilyFromName, (GDIPCONST WCHAR *name, GpFontCollection *fontCollection, GpFontFamily **FontFamily), (name, fontCollection, FontFamily)) \
m(DeleteFontFamily, (GpFontFamily *FontFamily), (FontFamily)) \
m(CloneFontFamily, (GpFontFamily *FontFamily, GpFontFamily **clonedFontFamily), (FontFamily, clonedFontFamily)) \
m(GetGenericFontFamilySansSerif, (GpFontFamily **nativeFamily), (nativeFamily)) \
m(GetGenericFontFamilySerif, (GpFontFamily **nativeFamily), (nativeFamily)) \
m(GetGenericFontFamilyMonospace, (GpFontFamily **nativeFamily), (nativeFamily)) \
m(GetFamilyName, (GDIPCONST GpFontFamily *family, WCHAR name[LF_FACESIZE], LANGID language), (family, name, language)) \
m(IsStyleAvailable, (GDIPCONST GpFontFamily *family, INT style, BOOL *IsStyleAvailable), (family, style, IsStyleAvailable)) \
m(GetEmHeight, (GDIPCONST GpFontFamily *family, INT style, UINT16 *EmHeight), (family, style, EmHeight)) \
m(GetCellAscent, (GDIPCONST GpFontFamily *family, INT style, UINT16 *CellAscent), (family, style, CellAscent)) \
m(GetCellDescent, (GDIPCONST GpFontFamily *family, INT style, UINT16 *CellDescent), (family, style, CellDescent)) \
m(GetLineSpacing, (GDIPCONST GpFontFamily *family, INT style, UINT16 *LineSpacing), (family, style, LineSpacing)) \
m(CreateFontFromDC, (HDC hdc, GpFont **font), (hdc, font)) \
m(CreateFontFromLogfontA, (HDC hdc, GDIPCONST LOGFONTA *logfont, GpFont **font), (hdc, logfont, font)) \
m(CreateFontFromLogfontW, (HDC hdc, GDIPCONST LOGFONTW *logfont, GpFont **font), (hdc, logfont, font)) \
m(CreateFont, (GDIPCONST GpFontFamily *fontFamily, REAL emSize, INT style, Unit unit, GpFont **font), (fontFamily, emSize, style, unit, font)) \
m(CloneFont, (GpFont* font, GpFont** cloneFont), (font, cloneFont)) \
m(DeleteFont, (GpFont* font), (font)) \
m(GetFamily, (GpFont *font, GpFontFamily **family), (font, family)) \
m(GetFontStyle, (GpFont *font, INT *style), (font, style)) \
m(GetFontSize, (GpFont *font, REAL *size), (font, size)) \
m(GetFontUnit, (GpFont *font, Unit *unit), (font, unit)) \
m(GetFontHeight, (GDIPCONST GpFont *font, GDIPCONST GpGraphics *graphics, REAL *height), (font, graphics, height)) \
m(GetFontHeightGivenDPI, (GDIPCONST GpFont *font, REAL dpi, REAL *height), (font, dpi, height)) \
m(GetLogFontA, (GpFont *font, GpGraphics *graphics, LOGFONTA *logfontA), (font, graphics, logfontA)) \
m(GetLogFontW, (GpFont *font, GpGraphics *graphics, LOGFONTW *logfontW), (font, graphics, logfontW)) \
m(NewInstalledFontCollection, (GpFontCollection** fontCollection), (fontCollection)) \
m(NewPrivateFontCollection, (GpFontCollection** fontCollection), (fontCollection)) \
m(DeletePrivateFontCollection, (GpFontCollection** fontCollection), (fontCollection)) \
m(GetFontCollectionFamilyCount, (GpFontCollection* fontCollection, INT *numFound), (fontCollection, numFound)) \
m(GetFontCollectionFamilyList, (GpFontCollection* fontCollection, INT numSought, GpFontFamily* gpfamilies[], INT* numFound), (fontCollection, numSought, gpfamilies, numFound)) \
m(PrivateAddFontFile, (GpFontCollection* fontCollection, GDIPCONST WCHAR* filename), (fontCollection, filename)) \
m(PrivateAddMemoryFont, (GpFontCollection* fontCollection, GDIPCONST void* memory, INT length), (fontCollection, memory, length)) \
m(DrawString, (GpGraphics *graphics, GDIPCONST WCHAR *string, INT length, GDIPCONST GpFont *font, GDIPCONST RectF *layoutRect, GDIPCONST GpStringFormat *stringFormat, GDIPCONST GpBrush *brush), (graphics, string, length, font, layoutRect, stringFormat, brush)) \
m(MeasureString, (GpGraphics *graphics, GDIPCONST WCHAR *string, INT length, GDIPCONST GpFont *font, GDIPCONST RectF *layoutRect, GDIPCONST GpStringFormat *stringFormat, RectF *boundingBox, INT *codepointsFitted, INT *linesFilled), (graphics, string, length, font, layoutRect, stringFormat, boundingBox, codepointsFitted, linesFilled)) \
m(MeasureCharacterRanges, (GpGraphics *graphics, GDIPCONST WCHAR *string, INT length, GDIPCONST GpFont *font, GDIPCONST RectF &layoutRect, GDIPCONST GpStringFormat *stringFormat, INT regionCount, GpRegion **regions), (graphics, string, length, font, layoutRect, stringFormat, regionCount, regions)) \
m(DrawDriverString, (GpGraphics *graphics, GDIPCONST UINT16 *text, INT length, GDIPCONST GpFont *font, GDIPCONST GpBrush *brush, GDIPCONST PointF *positions, INT flags, GDIPCONST GpMatrix *matrix), (graphics, text, length, font, brush, positions, flags, matrix)) \
m(MeasureDriverString, (GpGraphics *graphics, GDIPCONST UINT16 *text, INT length, GDIPCONST GpFont *font, GDIPCONST PointF *positions, INT flags, GDIPCONST GpMatrix *matrix, RectF *boundingBox), (graphics, text, length, font, positions, flags, matrix, boundingBox)) \
m(CreateStringFormat, (INT formatAttributes, LANGID language, GpStringFormat **format), (formatAttributes, language, format)) \
m(StringFormatGetGenericDefault, (GpStringFormat **format), (format)) \
m(StringFormatGetGenericTypographic, (GpStringFormat **format), (format)) \
m(DeleteStringFormat, (GpStringFormat *format), (format)) \
m(CloneStringFormat, (GDIPCONST GpStringFormat *format, GpStringFormat **newFormat), (format, newFormat)) \
m(SetStringFormatFlags, (GpStringFormat *format, INT flags), (format, flags)) \
m(GetStringFormatFlags, (GDIPCONST GpStringFormat *format, INT *flags), (format, flags)) \
m(SetStringFormatAlign, (GpStringFormat *format, StringAlignment align), (format, align)) \
m(GetStringFormatAlign, (GDIPCONST GpStringFormat *format, StringAlignment *align), (format, align)) \
m(SetStringFormatLineAlign, (GpStringFormat *format, StringAlignment align), (format, align)) \
m(GetStringFormatLineAlign, (GDIPCONST GpStringFormat *format, StringAlignment *align), (format, align)) \
m(SetStringFormatTrimming, (GpStringFormat *format, StringTrimming trimming), (format, trimming)) \
m(GetStringFormatTrimming, (GDIPCONST GpStringFormat *format, StringTrimming *trimming), (format, trimming)) \
m(SetStringFormatHotkeyPrefix, (GpStringFormat *format, INT hotkeyPrefix), (format, hotkeyPrefix)) \
m(GetStringFormatHotkeyPrefix, (GDIPCONST GpStringFormat *format, INT *hotkeyPrefix), (format, hotkeyPrefix)) \
m(SetStringFormatTabStops, (GpStringFormat *format, REAL firstTabOffset, INT count, GDIPCONST REAL *tabStops), (format, firstTabOffset, count, tabStops)) \
m(GetStringFormatTabStops, (GDIPCONST GpStringFormat *format, INT count, REAL *firstTabOffset, REAL *tabStops), (format, count, firstTabOffset, tabStops)) \
m(GetStringFormatTabStopCount, (GDIPCONST GpStringFormat *format, INT *count), (format, count)) \
m(SetStringFormatDigitSubstitution, (GpStringFormat *format, LANGID language, StringDigitSubstitute substitute), (format, language, substitute)) \
m(GetStringFormatDigitSubstitution, (GDIPCONST GpStringFormat *format, LANGID *language, StringDigitSubstitute *substitute), (format, language, substitute)) \
m(GetStringFormatMeasurableCharacterRangeCount, (GDIPCONST GpStringFormat *format, INT *count), (format, count)) \
m(SetStringFormatMeasurableCharacterRanges, (GpStringFormat *format, INT rangeCount, GDIPCONST CharacterRange *ranges), (format, rangeCount, ranges)) \
m(CreateCachedBitmap, (GpBitmap *bitmap, GpGraphics *graphics, GpCachedBitmap **cachedBitmap), (bitmap, graphics, cachedBitmap)) \
m(DeleteCachedBitmap, (GpCachedBitmap *cachedBitmap), (cachedBitmap)) \
m(DrawCachedBitmap, (GpGraphics *graphics, GpCachedBitmap *cachedBitmap, INT x, INT y), (graphics, cachedBitmap, x, y)) \
m(SetImageAttributesCachedBackground, (GpImageAttributes *imageattr, BOOL enableFlag), (imageattr, enableFlag)) \
m(TestControl, (GpTestControlEnum control, void *param), (control, param)) \
// non-standard/problematic functions, to review later if needed
#if 0
// these functions don't seem to exist in the DLL even though they are
// declared in the header
m(SetImageAttributesICMMode, (GpImageAttributes *imageAttr, BOOL on), (imageAttr, on)) \
m(FontCollectionEnumerable, (GpFontCollection* fontCollection, GpGraphics* graphics, INT *numFound), (fontCollection, graphics, numFound)) \
m(FontCollectionEnumerate, (GpFontCollection* fontCollection, INT numSought, GpFontFamily* gpfamilies[], INT* numFound, GpGraphics* graphics), (fontCollection, numSought, gpfamilies, numFound, graphics)) \
GpStatus
GdipGetMetafileHeaderFromWmf(
HMETAFILE hWmf,
GDIPCONST WmfPlaceableFileHeader * wmfPlaceableFileHeader,
MetafileHeader * header
);
HPALETTE WINGDIPAPI
GdipCreateHalftonePalette();
UINT WINGDIPAPI
GdipEmfToWmfBits(
HENHMETAFILE hemf,
UINT cbData16,
LPBYTE pData16,
INT iMapMode,
INT eFlags
);
#endif // 0
// this macro expands into an invocation of the given macro m for all GDI+
// functions: m is called with the name of the function without "Gdip" prefix
#define wxFOR_ALL_GDIP_FUNCNAMES(m) \
m(Alloc, (size_t size), (size)) \
m(Free, (void *ptr), (ptr)) \
wxFOR_ALL_GDIPLUS_STATUS_FUNCS(m)
// unfortunately we need a separate macro for these functions as they have
// "Gdiplus" prefix instead of "Gdip" for (almost) all the others (and also
// WINAPI calling convention instead of WINGDIPAPI although they happen to be
// both stdcall in fact)
#define wxFOR_ALL_GDIPLUS_FUNCNAMES(m) \
m(Startup, (ULONG_PTR *token, \
const GdiplusStartupInput *input, \
GdiplusStartupOutput *output), \
(token, input, output)) \
m(Shutdown, (ULONG_PTR token), (token)) \
m(NotificationHook, (ULONG_PTR *token), (token)) \
m(NotificationUnhook, (ULONG_PTR token), (token)) \
#define wxFOR_ALL_FUNCNAMES(m) \
wxFOR_ALL_GDIP_FUNCNAMES(m) \
wxFOR_ALL_GDIPLUS_FUNCNAMES(m)
// ----------------------------------------------------------------------------
// declare typedefs for types of all GDI+ functions
// ----------------------------------------------------------------------------
extern "C"
{
typedef void* (WINGDIPAPI *wxGDIPLUS_FUNC_T(Alloc))(size_t size);
typedef void (WINGDIPAPI *wxGDIPLUS_FUNC_T(Free))(void* ptr);
typedef Status
(WINAPI *wxGDIPLUS_FUNC_T(Startup))(ULONG_PTR *token,
const GdiplusStartupInput *input,
GdiplusStartupOutput *output);
typedef void (WINAPI *wxGDIPLUS_FUNC_T(Shutdown))(ULONG_PTR token);
typedef GpStatus (WINAPI *wxGDIPLUS_FUNC_T(NotificationHook))(ULONG_PTR *token);
typedef void (WINAPI *wxGDIPLUS_FUNC_T(NotificationUnhook))(ULONG_PTR token);
#define wxDECL_GDIPLUS_FUNC_TYPE(name, params, args) \
typedef GpStatus (WINGDIPAPI *wxGDIPLUS_FUNC_T(name)) params ;
wxFOR_ALL_GDIPLUS_STATUS_FUNCS(wxDECL_GDIPLUS_FUNC_TYPE)
#undef wxDECL_GDIPLUS_FUNC_TYPE
// Special hack for w32api headers that reference this variable which is
// normally defined in w32api-specific gdiplus.lib but as we don't link with it
// and load gdiplus.dll dynamically, it's not defined in our case resulting in
// linking errors -- so just provide it ourselves, it doesn't matter where it
// is and if Cygwin headers are modified to not use it in the future, it's not
// a big deal neither, we'll just have an unused pointer.
#if defined(__CYGWIN__) || defined(__MINGW32__)
void *_GdipStringFormatCachedGenericTypographic = NULL;
#endif // __CYGWIN__ || __MINGW32__
} // extern "C"
// ============================================================================
// wxGdiPlus helper class
// ============================================================================
class wxGdiPlus
{
public:
// load GDI+ DLL when we're called for the first time, return true on
// success or false on failure
static bool Initialize()
{
if ( m_initialized == -1 )
m_initialized = DoInit();
return m_initialized == 1;
}
// check if we're initialized without loading the GDI+ DLL
static bool IsInitialized()
{
return m_initialized == 1;
}
// shutdown: should be called on termination to unload the GDI+ DLL, safe
// to call even if we hadn't loaded it
static void Terminate()
{
if ( m_hdll )
{
wxDynamicLibrary::Unload(m_hdll);
m_hdll = 0;
}
m_initialized = -1;
}
// define function pointers as members
#define wxDECL_GDIPLUS_FUNC_MEMBER(name, params, args) \
static wxGDIPLUS_FUNC_T(name) name;
wxFOR_ALL_FUNCNAMES(wxDECL_GDIPLUS_FUNC_MEMBER)
#undef wxDECL_GDIPLUS_FUNC_MEMBER
private:
// do load the GDI+ DLL and bind all the functions
static bool DoInit();
// initially -1 meaning unknown, set to false or true by Initialize()
static int m_initialized;
// handle of the GDI+ DLL if we loaded it successfully
static wxDllType m_hdll;
};
#define wxINIT_GDIPLUS_FUNC(name, params, args) \
wxGDIPLUS_FUNC_T(name) wxGdiPlus::name = NULL;
wxFOR_ALL_FUNCNAMES(wxINIT_GDIPLUS_FUNC)
#undef wxINIT_GDIPLUS_FUNC
int wxGdiPlus::m_initialized = -1;
wxDllType wxGdiPlus::m_hdll = 0;
/* static */
bool wxGdiPlus::DoInit()
{
// we're prepared to handler errors so suppress log messages about them
wxLogNull noLog;
wxDynamicLibrary dllGdip(wxT("gdiplus.dll"), wxDL_VERBATIM);
if ( !dllGdip.IsLoaded() )
return false;
// use RawGetSymbol() for efficiency, we have ~600 functions to load...
#define wxDO_LOAD_FUNC(name, namedll) \
name = (wxGDIPLUS_FUNC_T(name))dllGdip.RawGetSymbol(namedll); \
if ( !name ) \
return false;
#define wxLOAD_GDIPLUS_FUNC(name, params, args) \
wxDO_LOAD_FUNC(name, wxT("Gdiplus") wxSTRINGIZE_T(name))
wxFOR_ALL_GDIPLUS_FUNCNAMES(wxLOAD_GDIPLUS_FUNC)
#undef wxLOAD_GDIPLUS_FUNC
#define wxLOAD_GDIP_FUNC(name, params, args) \
wxDO_LOAD_FUNC(name, wxT("Gdip") wxSTRINGIZE_T(name))
wxFOR_ALL_GDIP_FUNCNAMES(wxLOAD_GDIP_FUNC)
#undef wxLOAD_GDIP_FUNC
// ok, prevent the DLL from being unloaded right now, we'll do it later
m_hdll = dllGdip.Detach();
return true;
}
// ============================================================================
// module to unload GDI+ DLL on program termination
// ============================================================================
class wxGdiPlusModule : public wxModule
{
public:
virtual bool OnInit() { return true; }
virtual void OnExit() { wxGdiPlus::Terminate(); }
DECLARE_DYNAMIC_CLASS(wxGdiPlusModule)
};
IMPLEMENT_DYNAMIC_CLASS(wxGdiPlusModule, wxModule)
// ============================================================================
// implementation of the functions themselves
// ============================================================================
extern "C"
{
void* WINGDIPAPI
GdipAlloc(size_t size)
{
return wxGdiPlus::Initialize() ? wxGdiPlus::Alloc(size) : NULL;
}
void WINGDIPAPI
GdipFree(void* ptr)
{
if ( wxGdiPlus::Initialize() )
wxGdiPlus::Free(ptr);
}
Status WINAPI
GdiplusStartup(ULONG_PTR *token,
const GdiplusStartupInput *input,
GdiplusStartupOutput *output)
{
return wxGdiPlus::Initialize() ? wxGdiPlus::Startup(token, input, output)
: GdiplusNotInitialized;
}
void WINAPI
GdiplusShutdown(ULONG_PTR token)
{
if ( wxGdiPlus::IsInitialized() )
wxGdiPlus::Shutdown(token);
}
#define wxIMPL_GDIPLUS_FUNC(name, params, args) \
GpStatus WINGDIPAPI \
Gdip##name params \
{ \
return wxGdiPlus::Initialize() ? wxGdiPlus::name args \
: GdiplusNotInitialized; \
}
wxFOR_ALL_GDIPLUS_STATUS_FUNCS(wxIMPL_GDIPLUS_FUNC)
#undef wxIMPL_GDIPLUS_FUNC
} // extern "C"
#endif // wxUSE_GRAPHICS_CONTEXT<|fim▁end|> | m(TransformPoints, (GpGraphics *graphics, GpCoordinateSpace destSpace, GpCoordinateSpace srcSpace, GpPointF *points, INT count), (graphics, destSpace, srcSpace, points, count)) \
m(TransformPointsI, (GpGraphics *graphics, GpCoordinateSpace destSpace, GpCoordinateSpace srcSpace, GpPoint *points, INT count), (graphics, destSpace, srcSpace, points, count)) \
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use cpython::*;
use cpython_ext::{PyPath, ResultPyErrExt, Str};
pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
let name = [package, "fs"].join(".");<|fim▁hole|> m.add(py, "fstype", py_fn!(py, fstype(path: &PyPath)))?;
Ok(m)
}
fn fstype(py: Python, path: &PyPath) -> PyResult<Str> {
let fstype = fsinfo::fstype(path).map_pyerr(py)?;
Ok(fstype.to_string().into())
}<|fim▁end|> | let m = PyModule::new(py, &name)?; |
<|file_name|>test-success.py<|end_file_name|><|fim▁begin|>"""
Test connecting to a server.
"""
from gabbletest import exec_test
import constants as cs
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged', args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])<|fim▁hole|> q.expect('dbus-signal', signal='StatusChanged', args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED])
if __name__ == '__main__':
exec_test(test, do_connect=False)<|fim▁end|> | q.expect('stream-authenticated')
q.expect('dbus-signal', signal='PresenceUpdate',
args=[{1L: (0L, {u'available': {}})}]) |
<|file_name|>file.dot.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: file.dot.proto
/*
Package filedotname is a generated protocol buffer package.
It is generated from these files:
file.dot.proto
It has these top-level messages:
M
*/
package filedotname
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
import github_com_gogo_protobuf_protoc_gen_gogo_descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
import compress_gzip "compress/gzip"
import bytes "bytes"
import io_ioutil "io/ioutil"
import strings "strings"
import reflect "reflect"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type M struct {
A *string `protobuf:"bytes,1,opt,name=a" json:"a,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *M) Reset() { *m = M{} }
func (*M) ProtoMessage() {}
func (*M) Descriptor() ([]byte, []int) { return fileDescriptorFileDot, []int{0} }
func init() {
proto.RegisterType((*M)(nil), "filedotname.M")
}
func (this *M) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return FileDotDescription()
}
func FileDotDescription() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
d := &github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet{}
var gzipped = []byte{
// 3657 bytes of a gzipped FileDescriptorSet
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0x5d, 0x70, 0x1b, 0xd7,
0x75, 0xe6, 0xe2, 0x87, 0x04, 0x0e, 0x40, 0x70, 0xb9, 0xa4, 0x29, 0x88, 0x8e, 0x21, 0x8a, 0xb1,
0x63, 0xda, 0x6e, 0xa8, 0x8c, 0x6c, 0xc9, 0xf2, 0xaa, 0x89, 0x06, 0x04, 0x21, 0x06, 0x2a, 0x49,
0x20, 0x0b, 0x32, 0x96, 0xd2, 0x87, 0x9d, 0xe5, 0xee, 0x05, 0xb8, 0xd2, 0x62, 0x17, 0xd9, 0x5d,
0x48, 0xa2, 0x9e, 0xd4, 0x71, 0x7f, 0x26, 0xd3, 0xe9, 0x7f, 0x67, 0x9a, 0xb8, 0x8e, 0xdb, 0x66,
0xa6, 0x75, 0x9a, 0x34, 0x6d, 0xd2, 0x9f, 0x34, 0xd3, 0xa7, 0xbc, 0xa4, 0xf5, 0x53, 0x27, 0x79,
0xeb, 0x43, 0x1f, 0x2c, 0xc6, 0x33, 0x75, 0x5b, 0xb5, 0x75, 0x1b, 0xcd, 0x34, 0x33, 0x7a, 0xc9,
0xdc, 0xbf, 0xc5, 0x2e, 0x00, 0x72, 0xc1, 0xcc, 0x38, 0x7e, 0x22, 0xef, 0xb9, 0xe7, 0xfb, 0xf6,
0xec, 0xb9, 0xe7, 0x9e, 0x73, 0xee, 0x5d, 0xc0, 0x0f, 0x2f, 0xc0, 0x52, 0xdb, 0x71, 0xda, 0x16,
0x3a, 0xd7, 0x75, 0x1d, 0xdf, 0xd9, 0xeb, 0xb5, 0xce, 0x19, 0xc8, 0xd3, 0x5d, 0xb3, 0xeb, 0x3b,
0xee, 0x2a, 0x91, 0x49, 0x33, 0x54, 0x63, 0x95, 0x6b, 0x2c, 0x6f, 0xc1, 0xec, 0x55, 0xd3, 0x42,
0xeb, 0x81, 0x62, 0x13, 0xf9, 0xd2, 0x25, 0x48, 0xb5, 0x4c, 0x0b, 0x15, 0x85, 0xa5, 0xe4, 0x4a,
0xee, 0xfc, 0xd3, 0xab, 0x03, 0xa0, 0xd5, 0x28, 0xa2, 0x81, 0xc5, 0x0a, 0x41, 0x2c, 0xbf, 0x9b,
0x82, 0xb9, 0x11, 0xb3, 0x92, 0x04, 0x29, 0x5b, 0xeb, 0x60, 0x46, 0x61, 0x25, 0xab, 0x90, 0xff,
0xa5, 0x22, 0x4c, 0x75, 0x35, 0xfd, 0x96, 0xd6, 0x46, 0xc5, 0x04, 0x11, 0xf3, 0xa1, 0x54, 0x02,
0x30, 0x50, 0x17, 0xd9, 0x06, 0xb2, 0xf5, 0x83, 0x62, 0x72, 0x29, 0xb9, 0x92, 0x55, 0x42, 0x12,
0xe9, 0x05, 0x98, 0xed, 0xf6, 0xf6, 0x2c, 0x53, 0x57, 0x43, 0x6a, 0xb0, 0x94, 0x5c, 0x49, 0x2b,
0x22, 0x9d, 0x58, 0xef, 0x2b, 0x3f, 0x0b, 0x33, 0x77, 0x90, 0x76, 0x2b, 0xac, 0x9a, 0x23, 0xaa,
0x05, 0x2c, 0x0e, 0x29, 0x56, 0x20, 0xdf, 0x41, 0x9e, 0xa7, 0xb5, 0x91, 0xea, 0x1f, 0x74, 0x51,
0x31, 0x45, 0xde, 0x7e, 0x69, 0xe8, 0xed, 0x07, 0xdf, 0x3c, 0xc7, 0x50, 0x3b, 0x07, 0x5d, 0x24,
0x95, 0x21, 0x8b, 0xec, 0x5e, 0x87, 0x32, 0xa4, 0x8f, 0xf0, 0x5f, 0xd5, 0xee, 0x75, 0x06, 0x59,
0x32, 0x18, 0xc6, 0x28, 0xa6, 0x3c, 0xe4, 0xde, 0x36, 0x75, 0x54, 0x9c, 0x24, 0x04, 0xcf, 0x0e,
0x11, 0x34, 0xe9, 0xfc, 0x20, 0x07, 0xc7, 0x49, 0x15, 0xc8, 0xa2, 0xbb, 0x3e, 0xb2, 0x3d, 0xd3,
0xb1, 0x8b, 0x53, 0x84, 0xe4, 0x99, 0x11, 0xab, 0x88, 0x2c, 0x63, 0x90, 0xa2, 0x8f, 0x93, 0x2e,
0xc2, 0x94, 0xd3, 0xf5, 0x4d, 0xc7, 0xf6, 0x8a, 0x99, 0x25, 0x61, 0x25, 0x77, 0xfe, 0x23, 0x23,
0x03, 0xa1, 0x4e, 0x75, 0x14, 0xae, 0x2c, 0xd5, 0x40, 0xf4, 0x9c, 0x9e, 0xab, 0x23, 0x55, 0x77,
0x0c, 0xa4, 0x9a, 0x76, 0xcb, 0x29, 0x66, 0x09, 0xc1, 0x99, 0xe1, 0x17, 0x21, 0x8a, 0x15, 0xc7,
0x40, 0x35, 0xbb, 0xe5, 0x28, 0x05, 0x2f, 0x32, 0x96, 0x16, 0x60, 0xd2, 0x3b, 0xb0, 0x7d, 0xed,
0x6e, 0x31, 0x4f, 0x22, 0x84, 0x8d, 0x96, 0xff, 0x3f, 0x0d, 0x33, 0xe3, 0x84, 0xd8, 0x65, 0x48,
0xb7, 0xf0, 0x5b, 0x16, 0x13, 0x27, 0xf1, 0x01, 0xc5, 0x44, 0x9d, 0x38, 0xf9, 0x53, 0x3a, 0xb1,
0x0c, 0x39, 0x1b, 0x79, 0x3e, 0x32, 0x68, 0x44, 0x24, 0xc7, 0x8c, 0x29, 0xa0, 0xa0, 0xe1, 0x90,
0x4a, 0xfd, 0x54, 0x21, 0x75, 0x1d, 0x66, 0x02, 0x93, 0x54, 0x57, 0xb3, 0xdb, 0x3c, 0x36, 0xcf,
0xc5, 0x59, 0xb2, 0x5a, 0xe5, 0x38, 0x05, 0xc3, 0x94, 0x02, 0x8a, 0x8c, 0xa5, 0x75, 0x00, 0xc7,
0x46, 0x4e, 0x4b, 0x35, 0x90, 0x6e, 0x15, 0x33, 0x47, 0x78, 0xa9, 0x8e, 0x55, 0x86, 0xbc, 0xe4,
0x50, 0xa9, 0x6e, 0x49, 0xaf, 0xf4, 0x43, 0x6d, 0xea, 0x88, 0x48, 0xd9, 0xa2, 0x9b, 0x6c, 0x28,
0xda, 0x76, 0xa1, 0xe0, 0x22, 0x1c, 0xf7, 0xc8, 0x60, 0x6f, 0x96, 0x25, 0x46, 0xac, 0xc6, 0xbe,
0x99, 0xc2, 0x60, 0xf4, 0xc5, 0xa6, 0xdd, 0xf0, 0x50, 0xfa, 0x28, 0x04, 0x02, 0x95, 0x84, 0x15,
0x90, 0x2c, 0x94, 0xe7, 0xc2, 0x6d, 0xad, 0x83, 0x16, 0x2f, 0x41, 0x21, 0xea, 0x1e, 0x69, 0x1e,
0xd2, 0x9e, 0xaf, 0xb9, 0x3e, 0x89, 0xc2, 0xb4, 0x42, 0x07, 0x92, 0x08, 0x49, 0x64, 0x1b, 0x24,
0xcb, 0xa5, 0x15, 0xfc, 0xef, 0xe2, 0xcb, 0x30, 0x1d, 0x79, 0xfc, 0xb8, 0xc0, 0xe5, 0x2f, 0x4e,
0xc2, 0xfc, 0xa8, 0x98, 0x1b, 0x19, 0xfe, 0x0b, 0x30, 0x69, 0xf7, 0x3a, 0x7b, 0xc8, 0x2d, 0x26,
0x09, 0x03, 0x1b, 0x49, 0x65, 0x48, 0x5b, 0xda, 0x1e, 0xb2, 0x8a, 0xa9, 0x25, 0x61, 0xa5, 0x70,
0xfe, 0x85, 0xb1, 0xa2, 0x7a, 0x75, 0x13, 0x43, 0x14, 0x8a, 0x94, 0x3e, 0x05, 0x29, 0x96, 0xe2,
0x30, 0xc3, 0xf3, 0xe3, 0x31, 0xe0, 0x58, 0x54, 0x08, 0x4e, 0x7a, 0x12, 0xb2, 0xf8, 0x2f, 0xf5,
0xed, 0x24, 0xb1, 0x39, 0x83, 0x05, 0xd8, 0xaf, 0xd2, 0x22, 0x64, 0x48, 0x98, 0x19, 0x88, 0x97,
0x86, 0x60, 0x8c, 0x17, 0xc6, 0x40, 0x2d, 0xad, 0x67, 0xf9, 0xea, 0x6d, 0xcd, 0xea, 0x21, 0x12,
0x30, 0x59, 0x25, 0xcf, 0x84, 0x9f, 0xc5, 0x32, 0xe9, 0x0c, 0xe4, 0x68, 0x54, 0x9a, 0xb6, 0x81,
0xee, 0x92, 0xec, 0x93, 0x56, 0x68, 0xa0, 0xd6, 0xb0, 0x04, 0x3f, 0xfe, 0xa6, 0xe7, 0xd8, 0x7c,
0x69, 0xc9, 0x23, 0xb0, 0x80, 0x3c, 0xfe, 0xe5, 0xc1, 0xc4, 0xf7, 0xd4, 0xe8, 0xd7, 0x1b, 0x8c,
0xc5, 0xe5, 0x6f, 0x27, 0x20, 0x45, 0xf6, 0xdb, 0x0c, 0xe4, 0x76, 0x6e, 0x34, 0xaa, 0xea, 0x7a,
0x7d, 0x77, 0x6d, 0xb3, 0x2a, 0x0a, 0x52, 0x01, 0x80, 0x08, 0xae, 0x6e, 0xd6, 0xcb, 0x3b, 0x62,
0x22, 0x18, 0xd7, 0xb6, 0x77, 0x2e, 0xbe, 0x24, 0x26, 0x03, 0xc0, 0x2e, 0x15, 0xa4, 0xc2, 0x0a,
0x2f, 0x9e, 0x17, 0xd3, 0x92, 0x08, 0x79, 0x4a, 0x50, 0xbb, 0x5e, 0x5d, 0xbf, 0xf8, 0x92, 0x38,
0x19, 0x95, 0xbc, 0x78, 0x5e, 0x9c, 0x92, 0xa6, 0x21, 0x4b, 0x24, 0x6b, 0xf5, 0xfa, 0xa6, 0x98,
0x09, 0x38, 0x9b, 0x3b, 0x4a, 0x6d, 0x7b, 0x43, 0xcc, 0x06, 0x9c, 0x1b, 0x4a, 0x7d, 0xb7, 0x21,
0x42, 0xc0, 0xb0, 0x55, 0x6d, 0x36, 0xcb, 0x1b, 0x55, 0x31, 0x17, 0x68, 0xac, 0xdd, 0xd8, 0xa9,
0x36, 0xc5, 0x7c, 0xc4, 0xac, 0x17, 0xcf, 0x8b, 0xd3, 0xc1, 0x23, 0xaa, 0xdb, 0xbb, 0x5b, 0x62,
0x41, 0x9a, 0x85, 0x69, 0xfa, 0x08, 0x6e, 0xc4, 0xcc, 0x80, 0xe8, 0xe2, 0x4b, 0xa2, 0xd8, 0x37,
0x84, 0xb2, 0xcc, 0x46, 0x04, 0x17, 0x5f, 0x12, 0xa5, 0xe5, 0x0a, 0xa4, 0x49, 0x74, 0x49, 0x12,
0x14, 0x36, 0xcb, 0x6b, 0xd5, 0x4d, 0xb5, 0xde, 0xd8, 0xa9, 0xd5, 0xb7, 0xcb, 0x9b, 0xa2, 0xd0,
0x97, 0x29, 0xd5, 0xcf, 0xec, 0xd6, 0x94, 0xea, 0xba, 0x98, 0x08, 0xcb, 0x1a, 0xd5, 0xf2, 0x4e,
0x75, 0x5d, 0x4c, 0x2e, 0xeb, 0x30, 0x3f, 0x2a, 0xcf, 0x8c, 0xdc, 0x19, 0xa1, 0x25, 0x4e, 0x1c,
0xb1, 0xc4, 0x84, 0x6b, 0x68, 0x89, 0xbf, 0x22, 0xc0, 0xdc, 0x88, 0x5c, 0x3b, 0xf2, 0x21, 0x57,
0x20, 0x4d, 0x43, 0x94, 0x56, 0x9f, 0xe7, 0x46, 0x26, 0x6d, 0x12, 0xb0, 0x43, 0x15, 0x88, 0xe0,
0xc2, 0x15, 0x38, 0x79, 0x44, 0x05, 0xc6, 0x14, 0x43, 0x46, 0xbe, 0x26, 0x40, 0xf1, 0x28, 0xee,
0x98, 0x44, 0x91, 0x88, 0x24, 0x8a, 0xcb, 0x83, 0x06, 0x9c, 0x3d, 0xfa, 0x1d, 0x86, 0xac, 0x78,
0x4b, 0x80, 0x85, 0xd1, 0x8d, 0xca, 0x48, 0x1b, 0x3e, 0x05, 0x93, 0x1d, 0xe4, 0xef, 0x3b, 0xbc,
0x58, 0x7f, 0x6c, 0x44, 0x09, 0xc0, 0xd3, 0x83, 0xbe, 0x62, 0xa8, 0x70, 0x0d, 0x49, 0x1e, 0xd5,
0x6d, 0x50, 0x6b, 0x86, 0x2c, 0xfd, 0x42, 0x02, 0x9e, 0x18, 0x49, 0x3e, 0xd2, 0xd0, 0xa7, 0x00,
0x4c, 0xbb, 0xdb, 0xf3, 0x69, 0x41, 0xa6, 0xf9, 0x29, 0x4b, 0x24, 0x64, 0xef, 0xe3, 0xdc, 0xd3,
0xf3, 0x83, 0xf9, 0x24, 0x99, 0x07, 0x2a, 0x22, 0x0a, 0x97, 0xfa, 0x86, 0xa6, 0x88, 0xa1, 0xa5,
0x23, 0xde, 0x74, 0xa8, 0xd6, 0x7d, 0x02, 0x44, 0xdd, 0x32, 0x91, 0xed, 0xab, 0x9e, 0xef, 0x22,
0xad, 0x63, 0xda, 0x6d, 0x92, 0x80, 0x33, 0x72, 0xba, 0xa5, 0x59, 0x1e, 0x52, 0x66, 0xe8, 0x74,
0x93, 0xcf, 0x62, 0x04, 0xa9, 0x32, 0x6e, 0x08, 0x31, 0x19, 0x41, 0xd0, 0xe9, 0x00, 0xb1, 0xfc,
0xf5, 0x29, 0xc8, 0x85, 0xda, 0x3a, 0xe9, 0x2c, 0xe4, 0x6f, 0x6a, 0xb7, 0x35, 0x95, 0xb7, 0xea,
0xd4, 0x13, 0x39, 0x2c, 0x6b, 0xb0, 0x76, 0xfd, 0x13, 0x30, 0x4f, 0x54, 0x9c, 0x9e, 0x8f, 0x5c,
0x55, 0xb7, 0x34, 0xcf, 0x23, 0x4e, 0xcb, 0x10, 0x55, 0x09, 0xcf, 0xd5, 0xf1, 0x54, 0x85, 0xcf,
0x48, 0x17, 0x60, 0x8e, 0x20, 0x3a, 0x3d, 0xcb, 0x37, 0xbb, 0x16, 0x52, 0xf1, 0xe1, 0xc1, 0x23,
0x89, 0x38, 0xb0, 0x6c, 0x16, 0x6b, 0x6c, 0x31, 0x05, 0x6c, 0x91, 0x27, 0xad, 0xc3, 0x53, 0x04,
0xd6, 0x46, 0x36, 0x72, 0x35, 0x1f, 0xa9, 0xe8, 0xf3, 0x3d, 0xcd, 0xf2, 0x54, 0xcd, 0x36, 0xd4,
0x7d, 0xcd, 0xdb, 0x2f, 0xce, 0x63, 0x82, 0xb5, 0x44, 0x51, 0x50, 0x4e, 0x63, 0xc5, 0x0d, 0xa6,
0x57, 0x25, 0x6a, 0x65, 0xdb, 0xf8, 0xb4, 0xe6, 0xed, 0x4b, 0x32, 0x2c, 0x10, 0x16, 0xcf, 0x77,
0x4d, 0xbb, 0xad, 0xea, 0xfb, 0x48, 0xbf, 0xa5, 0xf6, 0xfc, 0xd6, 0xa5, 0xe2, 0x93, 0xe1, 0xe7,
0x13, 0x0b, 0x9b, 0x44, 0xa7, 0x82, 0x55, 0x76, 0xfd, 0xd6, 0x25, 0xa9, 0x09, 0x79, 0xbc, 0x18,
0x1d, 0xf3, 0x1e, 0x52, 0x5b, 0x8e, 0x4b, 0x2a, 0x4b, 0x61, 0xc4, 0xce, 0x0e, 0x79, 0x70, 0xb5,
0xce, 0x00, 0x5b, 0x8e, 0x81, 0xe4, 0x74, 0xb3, 0x51, 0xad, 0xae, 0x2b, 0x39, 0xce, 0x72, 0xd5,
0x71, 0x71, 0x40, 0xb5, 0x9d, 0xc0, 0xc1, 0x39, 0x1a, 0x50, 0x6d, 0x87, 0xbb, 0xf7, 0x02, 0xcc,
0xe9, 0x3a, 0x7d, 0x67, 0x53, 0x57, 0x59, 0x8b, 0xef, 0x15, 0xc5, 0x88, 0xb3, 0x74, 0x7d, 0x83,
0x2a, 0xb0, 0x18, 0xf7, 0xa4, 0x57, 0xe0, 0x89, 0xbe, 0xb3, 0xc2, 0xc0, 0xd9, 0xa1, 0xb7, 0x1c,
0x84, 0x5e, 0x80, 0xb9, 0xee, 0xc1, 0x30, 0x50, 0x8a, 0x3c, 0xb1, 0x7b, 0x30, 0x08, 0x7b, 0x86,
0x1c, 0xdb, 0x5c, 0xa4, 0x6b, 0x3e, 0x32, 0x8a, 0xa7, 0xc2, 0xda, 0xa1, 0x09, 0xe9, 0x1c, 0x88,
0xba, 0xae, 0x22, 0x5b, 0xdb, 0xb3, 0x90, 0xaa, 0xb9, 0xc8, 0xd6, 0xbc, 0xe2, 0x99, 0xb0, 0x72,
0x41, 0xd7, 0xab, 0x64, 0xb6, 0x4c, 0x26, 0xa5, 0xe7, 0x61, 0xd6, 0xd9, 0xbb, 0xa9, 0xd3, 0xc8,
0x52, 0xbb, 0x2e, 0x6a, 0x99, 0x77, 0x8b, 0x4f, 0x13, 0x37, 0xcd, 0xe0, 0x09, 0x12, 0x57, 0x0d,
0x22, 0x96, 0x9e, 0x03, 0x51, 0xf7, 0xf6, 0x35, 0xb7, 0x4b, 0x4a, 0xbb, 0xd7, 0xd5, 0x74, 0x54,
0x7c, 0x86, 0xaa, 0x52, 0xf9, 0x36, 0x17, 0xe3, 0xc8, 0xf6, 0xee, 0x98, 0x2d, 0x9f, 0x33, 0x3e,
0x4b, 0x23, 0x9b, 0xc8, 0x18, 0xdb, 0x75, 0x98, 0xef, 0xd9, 0xa6, 0xed, 0x23, 0xb7, 0xeb, 0x22,
0xdc, 0xc4, 0xd3, 0x9d, 0x58, 0xfc, 0xb7, 0xa9, 0x23, 0xda, 0xf0, 0xdd, 0xb0, 0x36, 0x0d, 0x00,
0x65, 0xae, 0x37, 0x2c, 0x5c, 0x96, 0x21, 0x1f, 0x8e, 0x0b, 0x29, 0x0b, 0x34, 0x32, 0x44, 0x01,
0xd7, 0xd8, 0x4a, 0x7d, 0x1d, 0x57, 0xc7, 0xcf, 0x55, 0xc5, 0x04, 0xae, 0xd2, 0x9b, 0xb5, 0x9d,
0xaa, 0xaa, 0xec, 0x6e, 0xef, 0xd4, 0xb6, 0xaa, 0x62, 0xf2, 0xf9, 0x6c, 0xe6, 0xbd, 0x29, 0xf1,
0xfe, 0xfd, 0xfb, 0xf7, 0x13, 0xcb, 0xdf, 0x4b, 0x40, 0x21, 0xda, 0x19, 0x4b, 0x3f, 0x0f, 0xa7,
0xf8, 0x31, 0xd6, 0x43, 0xbe, 0x7a, 0xc7, 0x74, 0x49, 0xa8, 0x76, 0x34, 0xda, 0x5b, 0x06, 0x5e,
0x9e, 0x67, 0x5a, 0x4d, 0xe4, 0xbf, 0x6a, 0xba, 0x38, 0x10, 0x3b, 0x9a, 0x2f, 0x6d, 0xc2, 0x19,
0xdb, 0x51, 0x3d, 0x5f, 0xb3, 0x0d, 0xcd, 0x35, 0xd4, 0xfe, 0x05, 0x82, 0xaa, 0xe9, 0x3a, 0xf2,
0x3c, 0x87, 0x96, 0x88, 0x80, 0xe5, 0x23, 0xb6, 0xd3, 0x64, 0xca, 0xfd, 0xdc, 0x59, 0x66, 0xaa,
0x03, 0x11, 0x91, 0x3c, 0x2a, 0x22, 0x9e, 0x84, 0x6c, 0x47, 0xeb, 0xaa, 0xc8, 0xf6, 0xdd, 0x03,
0xd2, 0xcf, 0x65, 0x94, 0x4c, 0x47, 0xeb, 0x56, 0xf1, 0xf8, 0x83, 0x5b, 0x83, 0xb0, 0x1f, 0xff,
0x35, 0x09, 0xf9, 0x70, 0x4f, 0x87, 0x5b, 0x64, 0x9d, 0xe4, 0x6f, 0x81, 0xec, 0xf0, 0x8f, 0x1e,
0xdb, 0x01, 0xae, 0x56, 0x70, 0x62, 0x97, 0x27, 0x69, 0xa7, 0xa5, 0x50, 0x24, 0x2e, 0xaa, 0x78,
0x4f, 0x23, 0xda, 0xbf, 0x67, 0x14, 0x36, 0x92, 0x36, 0x60, 0xf2, 0xa6, 0x47, 0xb8, 0x27, 0x09,
0xf7, 0xd3, 0xc7, 0x73, 0x5f, 0x6b, 0x12, 0xf2, 0xec, 0xb5, 0xa6, 0xba, 0x5d, 0x57, 0xb6, 0xca,
0x9b, 0x0a, 0x83, 0x4b, 0xa7, 0x21, 0x65, 0x69, 0xf7, 0x0e, 0xa2, 0x25, 0x80, 0x88, 0xc6, 0x75,
0xfc, 0x69, 0x48, 0xdd, 0x41, 0xda, 0xad, 0x68, 0xe2, 0x25, 0xa2, 0x0f, 0x30, 0xf4, 0xcf, 0x41,
0x9a, 0xf8, 0x4b, 0x02, 0x60, 0x1e, 0x13, 0x27, 0xa4, 0x0c, 0xa4, 0x2a, 0x75, 0x05, 0x87, 0xbf,
0x08, 0x79, 0x2a, 0x55, 0x1b, 0xb5, 0x6a, 0xa5, 0x2a, 0x26, 0x96, 0x2f, 0xc0, 0x24, 0x75, 0x02,
0xde, 0x1a, 0x81, 0x1b, 0xc4, 0x09, 0x36, 0x64, 0x1c, 0x02, 0x9f, 0xdd, 0xdd, 0x5a, 0xab, 0x2a,
0x62, 0x22, 0xbc, 0xbc, 0x1e, 0xe4, 0xc3, 0xed, 0xdc, 0xcf, 0x26, 0xa6, 0xfe, 0x41, 0x80, 0x5c,
0xa8, 0x3d, 0xc3, 0x8d, 0x81, 0x66, 0x59, 0xce, 0x1d, 0x55, 0xb3, 0x4c, 0xcd, 0x63, 0x41, 0x01,
0x44, 0x54, 0xc6, 0x92, 0x71, 0x17, 0xed, 0x67, 0x62, 0xfc, 0x9b, 0x02, 0x88, 0x83, 0xad, 0xdd,
0x80, 0x81, 0xc2, 0x87, 0x6a, 0xe0, 0x1b, 0x02, 0x14, 0xa2, 0xfd, 0xdc, 0x80, 0x79, 0x67, 0x3f,
0x54, 0xf3, 0xde, 0x49, 0xc0, 0x74, 0xa4, 0x8b, 0x1b, 0xd7, 0xba, 0xcf, 0xc3, 0xac, 0x69, 0xa0,
0x4e, 0xd7, 0xf1, 0x91, 0xad, 0x1f, 0xa8, 0x16, 0xba, 0x8d, 0xac, 0xe2, 0x32, 0x49, 0x14, 0xe7,
0x8e, 0xef, 0x13, 0x57, 0x6b, 0x7d, 0xdc, 0x26, 0x86, 0xc9, 0x73, 0xb5, 0xf5, 0xea, 0x56, 0xa3,
0xbe, 0x53, 0xdd, 0xae, 0xdc, 0x50, 0x77, 0xb7, 0x7f, 0x61, 0xbb, 0xfe, 0xea, 0xb6, 0x22, 0x9a,
0x03, 0x6a, 0x1f, 0xe0, 0x56, 0x6f, 0x80, 0x38, 0x68, 0x94, 0x74, 0x0a, 0x46, 0x99, 0x25, 0x4e,
0x48, 0x73, 0x30, 0xb3, 0x5d, 0x57, 0x9b, 0xb5, 0xf5, 0xaa, 0x5a, 0xbd, 0x7a, 0xb5, 0x5a, 0xd9,
0x69, 0xd2, 0x83, 0x73, 0xa0, 0xbd, 0x13, 0xdd, 0xd4, 0xaf, 0x27, 0x61, 0x6e, 0x84, 0x25, 0x52,
0x99, 0xf5, 0xec, 0xf4, 0x18, 0xf1, 0xf1, 0x71, 0xac, 0x5f, 0xc5, 0x5d, 0x41, 0x43, 0x73, 0x7d,
0xd6, 0xe2, 0x3f, 0x07, 0xd8, 0x4b, 0xb6, 0x6f, 0xb6, 0x4c, 0xe4, 0xb2, 0x7b, 0x06, 0xda, 0xc8,
0xcf, 0xf4, 0xe5, 0xf4, 0xaa, 0xe1, 0xe7, 0x40, 0xea, 0x3a, 0x9e, 0xe9, 0x9b, 0xb7, 0x91, 0x6a,
0xda, 0xfc, 0x52, 0x02, 0x37, 0xf6, 0x29, 0x45, 0xe4, 0x33, 0x35, 0xdb, 0x0f, 0xb4, 0x6d, 0xd4,
0xd6, 0x06, 0xb4, 0x71, 0x02, 0x4f, 0x2a, 0x22, 0x9f, 0x09, 0xb4, 0xcf, 0x42, 0xde, 0x70, 0x7a,
0xb8, 0x4d, 0xa2, 0x7a, 0xb8, 0x5e, 0x08, 0x4a, 0x8e, 0xca, 0x02, 0x15, 0xd6, 0xc7, 0xf6, 0x6f,
0x43, 0xf2, 0x4a, 0x8e, 0xca, 0xa8, 0xca, 0xb3, 0x30, 0xa3, 0xb5, 0xdb, 0x2e, 0x26, 0xe7, 0x44,
0xb4, 0x33, 0x2f, 0x04, 0x62, 0xa2, 0xb8, 0x78, 0x0d, 0x32, 0xdc, 0x0f, 0xb8, 0x24, 0x63, 0x4f,
0xa8, 0x5d, 0x7a, 0x27, 0x95, 0x58, 0xc9, 0x2a, 0x19, 0x9b, 0x4f, 0x9e, 0x85, 0xbc, 0xe9, 0xa9,
0xfd, 0xcb, 0xd1, 0xc4, 0x52, 0x62, 0x25, 0xa3, 0xe4, 0x4c, 0x2f, 0xb8, 0x0d, 0x5b, 0x7e, 0x2b,
0x01, 0x85, 0xe8, 0xe5, 0xae, 0xb4, 0x0e, 0x19, 0xcb, 0xd1, 0x35, 0x12, 0x5a, 0xf4, 0xcb, 0xc2,
0x4a, 0xcc, 0x7d, 0xf0, 0xea, 0x26, 0xd3, 0x57, 0x02, 0xe4, 0xe2, 0x3f, 0x0b, 0x90, 0xe1, 0x62,
0x69, 0x01, 0x52, 0x5d, 0xcd, 0xdf, 0x27, 0x74, 0xe9, 0xb5, 0x84, 0x28, 0x28, 0x64, 0x8c, 0xe5,
0x5e, 0x57, 0xb3, 0x49, 0x08, 0x30, 0x39, 0x1e, 0xe3, 0x75, 0xb5, 0x90, 0x66, 0x90, 0xb6, 0xdf,
0xe9, 0x74, 0x90, 0xed, 0x7b, 0x7c, 0x5d, 0x99, 0xbc, 0xc2, 0xc4, 0xd2, 0x0b, 0x30, 0xeb, 0xbb,
0x9a, 0x69, 0x45, 0x74, 0x53, 0x44, 0x57, 0xe4, 0x13, 0x81, 0xb2, 0x0c, 0xa7, 0x39, 0xaf, 0x81,
0x7c, 0x4d, 0xdf, 0x47, 0x46, 0x1f, 0x34, 0x49, 0x6e, 0x0e, 0x4f, 0x31, 0x85, 0x75, 0x36, 0xcf,
0xb1, 0xcb, 0x3f, 0x10, 0x60, 0x96, 0x1f, 0x54, 0x8c, 0xc0, 0x59, 0x5b, 0x00, 0x9a, 0x6d, 0x3b,
0x7e, 0xd8, 0x5d, 0xc3, 0xa1, 0x3c, 0x84, 0x5b, 0x2d, 0x07, 0x20, 0x25, 0x44, 0xb0, 0xd8, 0x01,
0xe8, 0xcf, 0x1c, 0xe9, 0xb6, 0x33, 0x90, 0x63, 0x37, 0xf7, 0xe4, 0xf3, 0x0f, 0x3d, 0xda, 0x02,
0x15, 0xe1, 0x13, 0x8d, 0x34, 0x0f, 0xe9, 0x3d, 0xd4, 0x36, 0x6d, 0x76, 0x9f, 0x48, 0x07, 0xfc,
0x96, 0x32, 0x15, 0xdc, 0x52, 0xae, 0x5d, 0x87, 0x39, 0xdd, 0xe9, 0x0c, 0x9a, 0xbb, 0x26, 0x0e,
0x1c, 0xaf, 0xbd, 0x4f, 0x0b, 0x9f, 0x83, 0x7e, 0x8b, 0xf9, 0x95, 0x44, 0x72, 0xa3, 0xb1, 0xf6,
0xb5, 0xc4, 0xe2, 0x06, 0xc5, 0x35, 0xf8, 0x6b, 0x2a, 0xa8, 0x65, 0x21, 0x1d, 0x9b, 0x0e, 0x3f,
0xfa, 0x18, 0x7c, 0xbc, 0x6d, 0xfa, 0xfb, 0xbd, 0xbd, 0x55, 0xdd, 0xe9, 0x9c, 0x6b, 0x3b, 0x6d,
0xa7, 0xff, 0xb9, 0x0b, 0x8f, 0xc8, 0x80, 0xfc, 0xc7, 0x3e, 0x79, 0x65, 0x03, 0xe9, 0x62, 0xec,
0xf7, 0x31, 0x79, 0x1b, 0xe6, 0x98, 0xb2, 0x4a, 0xee, 0xdc, 0xe9, 0xd1, 0x40, 0x3a, 0xf6, 0xde,
0xa5, 0xf8, 0xad, 0x77, 0x49, 0xad, 0x56, 0x66, 0x19, 0x14, 0xcf, 0xd1, 0x03, 0x84, 0xac, 0xc0,
0x13, 0x11, 0x3e, 0xba, 0x2f, 0x91, 0x1b, 0xc3, 0xf8, 0x3d, 0xc6, 0x38, 0x17, 0x62, 0x6c, 0x32,
0xa8, 0x5c, 0x81, 0xe9, 0x93, 0x70, 0xfd, 0x23, 0xe3, 0xca, 0xa3, 0x30, 0xc9, 0x06, 0xcc, 0x10,
0x12, 0xbd, 0xe7, 0xf9, 0x4e, 0x87, 0x24, 0xbd, 0xe3, 0x69, 0xfe, 0xe9, 0x5d, 0xba, 0x51, 0x0a,
0x18, 0x56, 0x09, 0x50, 0xb2, 0x0c, 0xe4, 0x33, 0x83, 0x81, 0x74, 0x2b, 0x86, 0xe1, 0x6d, 0x66,
0x48, 0xa0, 0x2f, 0x7f, 0x16, 0xe6, 0xf1, 0xff, 0x24, 0x27, 0x85, 0x2d, 0x89, 0xbf, 0x65, 0x2a,
0xfe, 0xe0, 0x35, 0xba, 0x17, 0xe7, 0x02, 0x82, 0x90, 0x4d, 0xa1, 0x55, 0x6c, 0x23, 0xdf, 0x47,
0xae, 0xa7, 0x6a, 0xd6, 0x28, 0xf3, 0x42, 0xc7, 0xf4, 0xe2, 0x97, 0x1e, 0x46, 0x57, 0x71, 0x83,
0x22, 0xcb, 0x96, 0x25, 0xef, 0xc2, 0xa9, 0x11, 0x51, 0x31, 0x06, 0xe7, 0xeb, 0x8c, 0x73, 0x7e,
0x28, 0x32, 0x30, 0x6d, 0x03, 0xb8, 0x3c, 0x58, 0xcb, 0x31, 0x38, 0xff, 0x90, 0x71, 0x4a, 0x0c,
0xcb, 0x97, 0x14, 0x33, 0x5e, 0x83, 0xd9, 0xdb, 0xc8, 0xdd, 0x73, 0x3c, 0x76, 0x35, 0x32, 0x06,
0xdd, 0x1b, 0x8c, 0x6e, 0x86, 0x01, 0xc9, 0x5d, 0x09, 0xe6, 0x7a, 0x05, 0x32, 0x2d, 0x4d, 0x47,
0x63, 0x50, 0x7c, 0x99, 0x51, 0x4c, 0x61, 0x7d, 0x0c, 0x2d, 0x43, 0xbe, 0xed, 0xb0, 0xb2, 0x14,
0x0f, 0x7f, 0x93, 0xc1, 0x73, 0x1c, 0xc3, 0x28, 0xba, 0x4e, 0xb7, 0x67, 0xe1, 0x9a, 0x15, 0x4f,
0xf1, 0x47, 0x9c, 0x82, 0x63, 0x18, 0xc5, 0x09, 0xdc, 0xfa, 0xc7, 0x9c, 0xc2, 0x0b, 0xf9, 0xf3,
0x0a, 0xe4, 0x1c, 0xdb, 0x3a, 0x70, 0xec, 0x71, 0x8c, 0xf8, 0x13, 0xc6, 0x00, 0x0c, 0x82, 0x09,
0x2e, 0x43, 0x76, 0xdc, 0x85, 0xf8, 0xd3, 0x87, 0x7c, 0x7b, 0xf0, 0x15, 0xd8, 0x80, 0x19, 0x9e,
0xa0, 0x4c, 0xc7, 0x1e, 0x83, 0xe2, 0xcf, 0x18, 0x45, 0x21, 0x04, 0x63, 0xaf, 0xe1, 0x23, 0xcf,
0x6f, 0xa3, 0x71, 0x48, 0xde, 0xe2, 0xaf, 0xc1, 0x20, 0xcc, 0x95, 0x7b, 0xc8, 0xd6, 0xf7, 0xc7,
0x63, 0xf8, 0x2a, 0x77, 0x25, 0xc7, 0x60, 0x8a, 0x0a, 0x4c, 0x77, 0x34, 0xd7, 0xdb, 0xd7, 0xac,
0xb1, 0x96, 0xe3, 0xcf, 0x19, 0x47, 0x3e, 0x00, 0x31, 0x8f, 0xf4, 0xec, 0x93, 0xd0, 0x7c, 0x8d,
0x7b, 0x24, 0x04, 0x63, 0x5b, 0xcf, 0xf3, 0xc9, 0x05, 0xd4, 0x49, 0xd8, 0xbe, 0xce, 0xb7, 0x1e,
0xc5, 0x6e, 0x85, 0x19, 0x2f, 0x43, 0xd6, 0x33, 0xef, 0x8d, 0x45, 0xf3, 0x17, 0x7c, 0xa5, 0x09,
0x00, 0x83, 0x6f, 0xc0, 0xe9, 0x91, 0x65, 0x62, 0x0c, 0xb2, 0x6f, 0x30, 0xb2, 0x85, 0x11, 0xa5,
0x82, 0xa5, 0x84, 0x93, 0x52, 0xfe, 0x25, 0x4f, 0x09, 0x68, 0x80, 0xab, 0x81, 0x0f, 0x0a, 0x9e,
0xd6, 0x3a, 0x99, 0xd7, 0xfe, 0x8a, 0x7b, 0x8d, 0x62, 0x23, 0x5e, 0xdb, 0x81, 0x05, 0xc6, 0x78,
0xb2, 0x75, 0xfd, 0x26, 0x4f, 0xac, 0x14, 0xbd, 0x1b, 0x5d, 0xdd, 0x5f, 0x84, 0xc5, 0xc0, 0x9d,
0xbc, 0x23, 0xf5, 0xd4, 0x8e, 0xd6, 0x1d, 0x83, 0xf9, 0x5b, 0x8c, 0x99, 0x67, 0xfc, 0xa0, 0xa5,
0xf5, 0xb6, 0xb4, 0x2e, 0x26, 0xbf, 0x0e, 0x45, 0x4e, 0xde, 0xb3, 0x5d, 0xa4, 0x3b, 0x6d, 0xdb,
0xbc, 0x87, 0x8c, 0x31, 0xa8, 0xff, 0x7a, 0x60, 0xa9, 0x76, 0x43, 0x70, 0xcc, 0x5c, 0x03, 0x31,
0xe8, 0x55, 0x54, 0xb3, 0xd3, 0x75, 0x5c, 0x3f, 0x86, 0xf1, 0x6f, 0xf8, 0x4a, 0x05, 0xb8, 0x1a,
0x81, 0xc9, 0x55, 0x28, 0x90, 0xe1, 0xb8, 0x21, 0xf9, 0xb7, 0x8c, 0x68, 0xba, 0x8f, 0x62, 0x89,
0x43, 0x77, 0x3a, 0x5d, 0xcd, 0x1d, 0x27, 0xff, 0xfd, 0x1d, 0x4f, 0x1c, 0x0c, 0xc2, 0x12, 0x87,
0x7f, 0xd0, 0x45, 0xb8, 0xda, 0x8f, 0xc1, 0xf0, 0x6d, 0x9e, 0x38, 0x38, 0x86, 0x51, 0xf0, 0x86,
0x61, 0x0c, 0x8a, 0xbf, 0xe7, 0x14, 0x1c, 0x83, 0x29, 0x3e, 0xd3, 0x2f, 0xb4, 0x2e, 0x6a, 0x9b,
0x9e, 0xef, 0xd2, 0x3e, 0xf8, 0x78, 0xaa, 0xef, 0x3c, 0x8c, 0x36, 0x61, 0x4a, 0x08, 0x2a, 0x5f,
0x83, 0x99, 0x81, 0x16, 0x43, 0x8a, 0xfb, 0xcd, 0x42, 0xf1, 0x97, 0x1e, 0xb1, 0x64, 0x14, 0xed,
0x30, 0xe4, 0x4d, 0xbc, 0xee, 0xd1, 0x3e, 0x20, 0x9e, 0xec, 0xb5, 0x47, 0xc1, 0xd2, 0x47, 0xda,
0x00, 0xf9, 0x2a, 0x4c, 0x47, 0x7a, 0x80, 0x78, 0xaa, 0x5f, 0x66, 0x54, 0xf9, 0x70, 0x0b, 0x20,
0x5f, 0x80, 0x14, 0xae, 0xe7, 0xf1, 0xf0, 0x5f, 0x61, 0x70, 0xa2, 0x2e, 0x7f, 0x12, 0x32, 0xbc,
0x8e, 0xc7, 0x43, 0x7f, 0x95, 0x41, 0x03, 0x08, 0x86, 0xf3, 0x1a, 0x1e, 0x0f, 0xff, 0x35, 0x0e,
0xe7, 0x10, 0x0c, 0x1f, 0xdf, 0x85, 0xdf, 0xfd, 0xf5, 0x14, 0xcb, 0xc3, 0xdc, 0x77, 0x97, 0x61,
0x8a, 0x15, 0xef, 0x78, 0xf4, 0x17, 0xd8, 0xc3, 0x39, 0x42, 0x7e, 0x19, 0xd2, 0x63, 0x3a, 0xfc,
0x37, 0x18, 0x94, 0xea, 0xcb, 0x15, 0xc8, 0x85, 0x0a, 0x76, 0x3c, 0xfc, 0x37, 0x19, 0x3c, 0x8c,
0xc2, 0xa6, 0xb3, 0x82, 0x1d, 0x4f, 0xf0, 0x5b, 0xdc, 0x74, 0x86, 0xc0, 0x6e, 0xe3, 0xb5, 0x3a,
0x1e, 0xfd, 0xdb, 0xdc, 0xeb, 0x1c, 0x22, 0x5f, 0x81, 0x6c, 0x90, 0x7f, 0xe3, 0xf1, 0xbf, 0xc3,
0xf0, 0x7d, 0x0c, 0xf6, 0x40, 0x28, 0xff, 0xc7, 0x53, 0xfc, 0x2e, 0xf7, 0x40, 0x08, 0x85, 0xb7,
0xd1, 0x60, 0x4d, 0x8f, 0x67, 0xfa, 0x3d, 0xbe, 0x8d, 0x06, 0x4a, 0x3a, 0x5e, 0x4d, 0x92, 0x06,
0xe3, 0x29, 0x7e, 0x9f, 0xaf, 0x26, 0xd1, 0xc7, 0x66, 0x0c, 0x16, 0xc9, 0x78, 0x8e, 0x3f, 0xe0,
0x66, 0x0c, 0xd4, 0x48, 0xb9, 0x01, 0xd2, 0x70, 0x81, 0x8c, 0xe7, 0xfb, 0x22, 0xe3, 0x9b, 0x1d,
0xaa, 0x8f, 0xf2, 0xab, 0xb0, 0x30, 0xba, 0x38, 0xc6, 0xb3, 0x7e, 0xe9, 0xd1, 0xc0, 0x71, 0x26,
0x5c, 0x1b, 0xe5, 0x9d, 0x7e, 0x96, 0x0d, 0x17, 0xc6, 0x78, 0xda, 0xd7, 0x1f, 0x45, 0x13, 0x6d,
0xb8, 0x2e, 0xca, 0x65, 0x80, 0x7e, 0x4d, 0x8a, 0xe7, 0x7a, 0x83, 0x71, 0x85, 0x40, 0x78, 0x6b,
0xb0, 0x92, 0x14, 0x8f, 0xff, 0x32, 0xdf, 0x1a, 0x0c, 0x81, 0xb7, 0x06, 0xaf, 0x46, 0xf1, 0xe8,
0x37, 0xf9, 0xd6, 0xe0, 0x10, 0xf9, 0x32, 0x64, 0xec, 0x9e, 0x65, 0xe1, 0xd8, 0x92, 0x8e, 0xff,
0x19, 0x51, 0xf1, 0xdf, 0x1f, 0x33, 0x30, 0x07, 0xc8, 0x17, 0x20, 0x8d, 0x3a, 0x7b, 0xc8, 0x88,
0x43, 0xfe, 0xc7, 0x63, 0x9e, 0x4f, 0xb0, 0xb6, 0x7c, 0x05, 0x80, 0x1e, 0xa6, 0xc9, 0x57, 0xa2,
0x18, 0xec, 0x7f, 0x3e, 0x66, 0xbf, 0x50, 0xe8, 0x43, 0xfa, 0x04, 0xf4, 0xf7, 0x0e, 0xc7, 0x13,
0x3c, 0x8c, 0x12, 0x90, 0x03, 0xf8, 0x2b, 0x30, 0x75, 0xd3, 0x73, 0x6c, 0x5f, 0x6b, 0xc7, 0xa1,
0xff, 0x8b, 0xa1, 0xb9, 0x3e, 0x76, 0x58, 0xc7, 0x71, 0x91, 0xaf, 0xb5, 0xbd, 0x38, 0xec, 0x7f,
0x33, 0x6c, 0x00, 0xc0, 0x60, 0x5d, 0xf3, 0xfc, 0x71, 0xde, 0xfb, 0x7f, 0x38, 0x98, 0x03, 0xb0,
0xd1, 0xf8, 0xff, 0x5b, 0xe8, 0x20, 0x0e, 0xfb, 0x3e, 0x37, 0x9a, 0xe9, 0xcb, 0x9f, 0x84, 0x2c,
0xfe, 0x97, 0xfe, 0x6a, 0x27, 0x06, 0xfc, 0xbf, 0x0c, 0xdc, 0x47, 0xe0, 0x27, 0x7b, 0xbe, 0xe1,
0x9b, 0xf1, 0xce, 0xfe, 0x3f, 0xb6, 0xd2, 0x5c, 0x5f, 0x2e, 0x43, 0xce, 0xf3, 0x0d, 0xa3, 0xc7,
0x3a, 0x9a, 0x18, 0xf8, 0x8f, 0x1e, 0x07, 0x87, 0xdc, 0x00, 0xb3, 0x76, 0x76, 0xf4, 0x65, 0x1d,
0x6c, 0x38, 0x1b, 0x0e, 0xbd, 0xa6, 0x83, 0x6f, 0x08, 0x50, 0x68, 0x99, 0x16, 0x5a, 0x35, 0x1c,
0x9f, 0x5d, 0xab, 0xe5, 0xf0, 0xd8, 0x70, 0x7c, 0xbc, 0xde, 0x8b, 0x27, 0xbb, 0x92, 0x5b, 0x9e,
0x05, 0x61, 0x4b, 0xca, 0x83, 0xa0, 0xb1, 0x5f, 0x93, 0x08, 0xda, 0xda, 0xe6, 0xdb, 0x0f, 0x4a,
0x13, 0xdf, 0x7f, 0x50, 0x9a, 0xf8, 0x97, 0x07, 0xa5, 0x89, 0x77, 0x1e, 0x94, 0x84, 0xf7, 0x1e,
0x94, 0x84, 0xf7, 0x1f, 0x94, 0x84, 0x1f, 0x3f, 0x28, 0x09, 0xf7, 0x0f, 0x4b, 0xc2, 0x57, 0x0f,
0x4b, 0xc2, 0x37, 0x0f, 0x4b, 0xc2, 0x77, 0x0e, 0x4b, 0xc2, 0x77, 0x0f, 0x4b, 0xc2, 0xdb, 0x87,
0xa5, 0x89, 0xef, 0x1f, 0x96, 0x26, 0xde, 0x39, 0x2c, 0x09, 0xef, 0x1d, 0x96, 0x26, 0xde, 0x3f,
0x2c, 0x09, 0x3f, 0x3e, 0x2c, 0x4d, 0xdc, 0xff, 0x61, 0x69, 0xe2, 0x27, 0x01, 0x00, 0x00, 0xff,
0xff, 0x7c, 0xbe, 0x1b, 0xf8, 0x0c, 0x2f, 0x00, 0x00,
}
r := bytes.NewReader(gzipped)
gzipr, err := compress_gzip.NewReader(r)
if err != nil {
panic(err)
}
ungzipped, err := io_ioutil.ReadAll(gzipr)
if err != nil {
panic(err)
}
if err := github_com_gogo_protobuf_proto.Unmarshal(ungzipped, d); err != nil {
panic(err)
}
return d
}
func (this *M) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*M)
if !ok {
that2, ok := that.(M)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *M")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *M but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *M but is not nil && this == nil")
}
if this.A != nil && that1.A != nil {
if *this.A != *that1.A {
return fmt.Errorf("A this(%v) Not Equal that(%v)", *this.A, *that1.A)
}
} else if this.A != nil {
return fmt.Errorf("this.A == nil && that.A != nil")
} else if that1.A != nil {
return fmt.Errorf("A this(%v) Not Equal that(%v)", this.A, that1.A)
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return fmt.Errorf("XXX_unrecognized this(%v) Not Equal that(%v)", this.XXX_unrecognized, that1.XXX_unrecognized)
}
return nil
}
func (this *M) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*M)
if !ok {
that2, ok := that.(M)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.A != nil && that1.A != nil {
if *this.A != *that1.A {
return false
}
} else if this.A != nil {
return false
} else if that1.A != nil {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
type MFace interface {
Proto() github_com_gogo_protobuf_proto.Message
GetA() *string
}
func (this *M) Proto() github_com_gogo_protobuf_proto.Message {
return this
}
func (this *M) TestProto() github_com_gogo_protobuf_proto.Message {
return NewMFromFace(this)
}
func (this *M) GetA() *string {
return this.A
}
func NewMFromFace(that MFace) *M {
this := &M{}
this.A = that.GetA()
return this
}
func (this *M) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&filedotname.M{")
if this.A != nil {
s = append(s, "A: "+valueToGoStringFileDot(this.A, "string")+",\n")
}
if this.XXX_unrecognized != nil {
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringFileDot(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func NewPopulatedM(r randyFileDot, easy bool) *M {
this := &M{}
if r.Intn(10) != 0 {
v1 := string(randStringFileDot(r))
this.A = &v1
}
if !easy && r.Intn(10) != 0 {
this.XXX_unrecognized = randUnrecognizedFileDot(r, 2)
}
return this
}
type randyFileDot interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneFileDot(r randyFileDot) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringFileDot(r randyFileDot) string {
v2 := r.Intn(100)
tmps := make([]rune, v2)
for i := 0; i < v2; i++ {
tmps[i] = randUTF8RuneFileDot(r)
}
return string(tmps)
}
func randUnrecognizedFileDot(r randyFileDot, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldFileDot(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldFileDot(dAtA []byte, r randyFileDot, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(key))
v3 := r.Int63()
if r.Intn(2) == 0 {
v3 *= -1
}
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(v3))
case 1:
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateFileDot(dAtA, uint64(key))<|fim▁hole|>}
func encodeVarintPopulateFileDot(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *M) Size() (n int) {
var l int
_ = l
if m.A != nil {
l = len(*m.A)
n += 1 + l + sovFileDot(uint64(l))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func sovFileDot(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozFileDot(x uint64) (n int) {
return sovFileDot(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *M) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&M{`,
`A:` + valueToStringFileDot(this.A) + `,`,
`XXX_unrecognized:` + fmt.Sprintf("%v", this.XXX_unrecognized) + `,`,
`}`,
}, "")
return s
}
func valueToStringFileDot(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func init() { proto.RegisterFile("file.dot.proto", fileDescriptorFileDot) }
var fileDescriptorFileDot = []byte{
// 179 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x24, 0xcb, 0xaf, 0x6e, 0xc2, 0x50,
0x1c, 0xc5, 0xf1, 0xdf, 0x91, 0xeb, 0x96, 0x25, 0xab, 0x5a, 0x26, 0x4e, 0x96, 0xa9, 0x99, 0xb5,
0xef, 0x30, 0x0d, 0x86, 0x37, 0x68, 0xe9, 0x1f, 0x9a, 0x50, 0x2e, 0x21, 0xb7, 0xbe, 0x8f, 0x83,
0x44, 0x22, 0x91, 0x95, 0x95, 0xc8, 0xde, 0x1f, 0xa6, 0xb2, 0xb2, 0x92, 0x70, 0x71, 0xe7, 0x93,
0x9c, 0x6f, 0xf0, 0x5e, 0x54, 0xdb, 0x3c, 0xca, 0x8c, 0x8d, 0xf6, 0x07, 0x63, 0x4d, 0xf8, 0xfa,
0x70, 0x66, 0xec, 0x2e, 0xa9, 0xf3, 0xaf, 0xbf, 0xb2, 0xb2, 0x9b, 0x26, 0x8d, 0xd6, 0xa6, 0x8e,
0x4b, 0x53, 0x9a, 0xd8, 0x7f, 0xd2, 0xa6, 0xf0, 0xf2, 0xf0, 0xeb, 0xd9, 0xfe, 0x7c, 0x04, 0x58,
0x86, 0x6f, 0x01, 0x92, 0x4f, 0x7c, 0xe3, 0xf7, 0x65, 0x85, 0xe4, 0x7f, 0xd1, 0x39, 0x4a, 0xef,
0x28, 0x57, 0x47, 0x19, 0x1c, 0x31, 0x3a, 0x62, 0x72, 0xc4, 0xec, 0x88, 0x56, 0x89, 0xa3, 0x12,
0x27, 0x25, 0xce, 0x4a, 0x5c, 0x94, 0xe8, 0x94, 0xd2, 0x2b, 0x65, 0x50, 0x62, 0x54, 0xca, 0xa4,
0xc4, 0xac, 0x94, 0xf6, 0x46, 0xb9, 0x07, 0x00, 0x00, 0xff, 0xff, 0x3f, 0x59, 0x32, 0x8a, 0xad,
0x00, 0x00, 0x00,
}<|fim▁end|> | dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn pe009() -> usize {
let stop:usize = 1000;
for a in 0 .. stop {
for b in a+1 .. stop {
for c in b+1 .. stop {
if a+b+c == stop {
if a*a + b*b == c*c {
return a*b*c<|fim▁hole|> }
0
}
fn main() {
println!("{}", pe009())
}<|fim▁end|> | }
}
}
} |
<|file_name|>DcgEval.py<|end_file_name|><|fim▁begin|># This file is part of Lerot.
#
# Lerot is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lerot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Lerot. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
<|fim▁hole|> """Compute DCG (with gain = 2**rel-1 and log2 discount)."""
def get_dcg(self, ranked_labels, cutoff=-1):
"""
Get the dcg value of a list ranking.
Does not check if the numer for ranked labels is smaller than cutoff.
"""
if (cutoff == -1):
cutoff = len(ranked_labels)
rank = np.arange(cutoff)
return ((np.power(2, np.asarray(ranked_labels[:cutoff])) - 1) /
np.log2(2 + rank)).sum()
def evaluate_ranking(self, ranking, query, cutoff=-1):
"""Compute NDCG for the provided ranking. The ranking is expected
to contain document ids in rank order."""
if cutoff == -1 or cutoff > len(ranking):
cutoff = len(ranking)
if query.has_ideal():
ideal_dcg = query.get_ideal()
else:
ideal_labels = list(reversed(sorted(query.get_labels())))[:cutoff]
ideal_dcg = self.get_dcg(ideal_labels, cutoff)
query.set_ideal(ideal_dcg)
if ideal_dcg == .0:
# return 0 when there are no relevant documents. This is consistent
# with letor evaluation tools; an alternative would be to return
# 0.5 (e.g., used by the yahoo learning to rank challenge tools)
return 0.0
# get labels for the sorted docids
sorted_labels = [0] * cutoff
for i in range(cutoff):
sorted_labels[i] = query.get_label(ranking[i])
dcg = self.get_dcg(sorted_labels, cutoff)
return dcg / ideal_dcg
def get_value(self, ranking, labels, orientations, cutoff=-1):
"""
Compute the value of the metric
- ranking contains the list of documents to evaluate
- labels are the relevance labels for all the documents, even those
that are not in the ranking; labels[doc.get_id()] is the relevance of
doc
- orientations contains orientation values for the verticals;
orientations[doc.get_type()] is the orientation value for the
doc (from 0 to 1).
"""
return self.get_dcg([labels[doc.get_id()] for doc in ranking], cutoff)<|fim▁end|> | from .AbstractEval import AbstractEval
class DcgEval(AbstractEval): |
<|file_name|>AnnisGraphToolsTest.java<|end_file_name|><|fim▁begin|>package org.corpus_tools.annis.gui.visualizers.component.tree;
<|fim▁hole|>import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.corpus_tools.annis.gui.visualizers.VisualizerInput;
import org.corpus_tools.annis.gui.visualizers.component.tree.AnnisGraphTools;
import org.corpus_tools.salt.SaltFactory;
import org.corpus_tools.salt.common.SDominanceRelation;
import org.corpus_tools.salt.core.SAnnotation;
import org.junit.jupiter.api.Test;
class AnnisGraphToolsTest {
@Test
void extractAnnotation() {
assertNull(AnnisGraphTools.extractAnnotation(null, "some_ns", "func"));
Set<SAnnotation> annos = new LinkedHashSet<>();
SAnnotation annoFunc = SaltFactory.createSAnnotation();
annoFunc.setNamespace("some_ns");
annoFunc.setName("func");
annoFunc.setValue("value");
annos.add(annoFunc);
assertEquals("value", AnnisGraphTools.extractAnnotation(annos, null, "func"));
assertEquals("value", AnnisGraphTools.extractAnnotation(annos, "some_ns", "func"));
assertNull(AnnisGraphTools.extractAnnotation(annos, "another_ns", "func"));
assertNull(AnnisGraphTools.extractAnnotation(annos, "some_ns", "anno"));
assertNull(AnnisGraphTools.extractAnnotation(annos, "another_ns", "anno"));
assertNull(AnnisGraphTools.extractAnnotation(annos, null, "anno"));
}
@Test
void isTerminalNullCheck() {
assertFalse(AnnisGraphTools.isTerminal(null, null));
VisualizerInput mockedVisInput = mock(VisualizerInput.class);
assertFalse(AnnisGraphTools.isTerminal(null, mockedVisInput));
}
@Test
void hasEdgeSubtypeForEmptyType() {
SDominanceRelation rel1 = mock(SDominanceRelation.class);
VisualizerInput input = mock(VisualizerInput.class);
// When the type is empty, this should be treated like having no type (null) at all
when(rel1.getType()).thenReturn("");
Map<String, String> mappings = new LinkedHashMap<>();
when(input.getMappings()).thenReturn(mappings);
mappings.put("edge_type", "null");
AnnisGraphTools tools = new AnnisGraphTools(input);
assertTrue(tools.hasEdgeSubtype(rel1, "null"));
SDominanceRelation rel2 = mock(SDominanceRelation.class);
when(rel1.getType()).thenReturn(null);
assertTrue(tools.hasEdgeSubtype(rel2, "null"));
}
}<|fim▁end|> | |
<|file_name|>expr-alt-box.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// -*- rust -*-
// Tests for match as expressions resulting in boxed types<|fim▁hole|> let res = match true { true => { @100 } _ => fail!("wat") };
assert!((*res == 100));
}
fn test_str() {
let res = match true { true => { ~"happy" },
_ => fail!("not happy at all") };
assert!((res == ~"happy"));
}
pub fn main() { test_box(); test_str(); }<|fim▁end|> | fn test_box() { |
<|file_name|>xSlider.js<|end_file_name|><|fim▁begin|>/*
* @fileOverview xSlider
* @version 1.5.1
* @date 2016-2-25
* @author Xinbo Shang
*
*/
"use strict";
(function(factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as anonymous module.
define(['jquery'], factory);
} else if (typeof exports === 'object') {
// CommonJS Module
module.exports = factory(require('jquery'));
} else {
// Browser globals.
factory(jQuery || Zepto);
}
}(function($) {
if (!$) {
return console.warn('xSlider needs jQuery'); //jQuery must be required
}
<|fim▁hole|> this.UL = this.CONTAIN.find('ul'),
this.LI = this.CONTAIN.find('li'),
this.CURRENT_DISPLAY = 0, //it's in order to record the current display
this.IS_WORKING = false, //the switching status, to ensure one unfinished switch at most
this.TIMER = ''; //a timer to interval autoplay
//this.GROUP_NUM, //The number of rolling piece
//this.OPTIONS;
this.setOptions(options);
this.init();
};
xSlider.prototype = {
setOptions: function(options) {
var defaults = {
autoPlay: true, // it's auto play when this param is true
interval: 3000, // the time between switch
speed: 400, // the time duiring switch
foot: false, // config of the showing footer
/*{
className: '', // the className of focus footer
nav: false, // show the nav
page: false, // show the page of the slider
}*/
scrollNum: 1, // switch nums of item everytime
direction: 'ltr', // ltr from left to right and rtl from right to left
controls: true, // show the left and right arrow buttons
hoverStop: false // stop autoplay when mouse over the slider
};
this.OPTIONS = $.extend(defaults, options);
},
init: function() {
var items = this.LI,
scroll = this.OPTIONS.scrollNum;
this.GROUP_NUM = Math.ceil(items.length / scroll);
if (items.length > 1) {
this.UL.css({
'width': (items.length + scroll * 2) * 100 + '%',
'margin-left': -scroll * 100 + '%',
'left': '0'
});
this.LI.css({
'float': 'left',
'listStyle': 'none',
'width': 100 / (items.length + scroll * 2) + '%'
});
this.UL.html(items.slice(0).clone()).append(items.slice(0, scroll).clone()).prepend(items.slice(-scroll).clone());
this.setControls();
this.setFoot();
this.setHover();
this.autoPlay();
}
},
// set left and right buttons
setControls: function() {
if (this.OPTIONS.controls) {
var _this = this;
this.CONTAIN.append('<div class="xslider-arrow"><span class="prev"></span><span class="next"></span></div>').find('.xslider-arrow')
.on('click', '.next', function(event) {
_this.toNext();
})
.on('click', '.prev', function(event) {
_this.toPrev();
});
}
},
// set nav and pages
setFoot: function() {
if (this.OPTIONS.foot) {
var _this = this;
var dom = '<div class="'+ this.OPTIONS.foot.className +'">';
if (this.OPTIONS.foot.nav) {
for (var i = 0; i < this.GROUP_NUM; i++) {
if (i == 0) {
dom += '<span class="active"></span>';
} else {
dom += '<span></span>';
}
}
}
if (this.OPTIONS.foot.page) {
dom += '<p><font>1</font>/' + this.GROUP_NUM + '</p>';
}
dom += '</div>';
this.CONTAIN.append(dom).find('.' + this.OPTIONS.foot.className).on('click', 'span', function() {
if (!_this.IS_WORKING) {
var pos = $('.'+ _this.OPTIONS.foot.className +' span', _this.CONTAIN).index($(this));
_this.animate(pos);
}
});
}
},
// when mouse over the widget, it's stop autoplay
setHover: function() {
if (this.OPTIONS.hover) {
this.CONTAIN.hover(function() {
_this.stopAuto();
}, function() {
_this.autoPlay();
});
}
},
autoPlay: function() {
if (this.OPTIONS.autoPlay) {
var _this = this;
this.TIMER = setInterval(function() {
if (_this.OPTIONS.direction == 'rtl') {
_this.toPrev();
} else {
_this.toNext();
}
}, _this.OPTIONS.interval);
}
},
stopAuto: function() {
if (this.OPTIONS.autoPlay) {
var _this = this;
clearInterval(_this.TIMER);
}
},
// when autoplay,switch to the next
toNext: function() {
if (!this.IS_WORKING) {
var next = this.CURRENT_DISPLAY + 1;
this.animate(next);
}
},
// when autoplay and the options.direction is rtl, switch to the prev
toPrev: function() {
if (!this.IS_WORKING) {
var next = this.CURRENT_DISPLAY - 1;
this.animate(next);
}
},
animate: function(num) {
var _this = this;
this.IS_WORKING = true;
this.stopAuto();
this.UL.myAnimate({
left: -num * 100 + '%'
}, _this.OPTIONS.speed, function() {
if (num < 0) {
_this.CURRENT_DISPLAY = _this.GROUP_NUM - 1;
_this.UL.css('left', -(_this.GROUP_NUM - 1) * 100 + '%');
} else if (num >= _this.GROUP_NUM) {
_this.CURRENT_DISPLAY = 0;
_this.UL.css('left', 0);
} else {
_this.CURRENT_DISPLAY = num;
}
_this.setPosition(_this.CURRENT_DISPLAY);
_this.IS_WORKING = false;
if(_this.OPTIONS.autoPlay){
_this.autoPlay();
}
});
},
setPosition: function(num) {
if(this.OPTIONS.foot){
var footer = this.CONTAIN.find('.' + this.OPTIONS.foot.className);
if (this.OPTIONS.foot.nav) {
footer.find('span').removeClass('active').eq(num).addClass('active');
}
if (this.OPTIONS.foot.page) {
footer.find('font').html(num + 1);
}
}
}/*,
destroy: function() {
if (item.length > 1) {
if (options.controls) {
$('.xslider-arrow', cont).remove();
}
if (options.nav||options.page) {
cont.find('.xslider-nav').remove();
}
this.stopAuto();
working = false;
}
}*/
};
$.fn.myAnimate = $.Velocity ? $.fn.velocity : $.fn.animate;
$.fn.xSlider = function(options) {
//Multi element support
return this.each(function() {
if(!$(this).data('xSlider')){ // Prevent multiple instantiation
$(this).data('xSlider',1);
var s = new xSlider(this, options);
}
});
}
}));<|fim▁end|> | var xSlider = function(element, options) {
this.CONTAIN = $(element), //the warp of the widget |
<|file_name|>Semaphore.hh<|end_file_name|><|fim▁begin|>#pragma once
/* This file is part of Imagine.
Imagine is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Imagine is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Imagine. If not, see <http://www.gnu.org/licenses/> */
#include <imagine/config/defs.hh>
#ifdef __APPLE__
#include <imagine/thread/MachSemaphore.hh>
#else
#include <imagine/thread/PosixSemaphore.hh>
#endif
namespace IG
{<|fim▁hole|> Semaphore(unsigned int startValue);
void wait();
void notify();
};
}<|fim▁end|> |
class Semaphore : public SemaphoreImpl
{
public: |
<|file_name|>_stochastic_gradient.py<|end_file_name|><|fim▁begin|># Authors: Peter Prettenhofer <[email protected]> (main author)
# Mathieu Blondel (partial_fit support)
#
# License: BSD 3 clause
"""Classification, regression and One-Class SVM using Stochastic Gradient
Descent (SGD).
"""
import numpy as np
import warnings
from abc import ABCMeta, abstractmethod
from joblib import Parallel
from ..base import clone, is_classifier
from ._base import LinearClassifierMixin, SparseCoefMixin
from ._base import make_dataset
from ..base import BaseEstimator, RegressorMixin, OutlierMixin
from ..utils import check_random_state
from ..utils.metaestimators import available_if
from ..utils.extmath import safe_sparse_dot
from ..utils.multiclass import _check_partial_fit_first_call
from ..utils.validation import check_is_fitted, _check_sample_weight
from ..utils.fixes import delayed
from ..exceptions import ConvergenceWarning
from ..model_selection import StratifiedShuffleSplit, ShuffleSplit
from ._sgd_fast import _plain_sgd
from ..utils import compute_class_weight
from ._sgd_fast import Hinge
from ._sgd_fast import SquaredHinge
from ._sgd_fast import Log
from ._sgd_fast import ModifiedHuber
from ._sgd_fast import SquaredLoss
from ._sgd_fast import Huber
from ._sgd_fast import EpsilonInsensitive
from ._sgd_fast import SquaredEpsilonInsensitive
LEARNING_RATE_TYPES = {
"constant": 1,
"optimal": 2,
"invscaling": 3,
"adaptive": 4,
"pa1": 5,
"pa2": 6,
}
PENALTY_TYPES = {"none": 0, "l2": 2, "l1": 1, "elasticnet": 3}
DEFAULT_EPSILON = 0.1
# Default value of ``epsilon`` parameter.
MAX_INT = np.iinfo(np.int32).max
class _ValidationScoreCallback:
"""Callback for early stopping based on validation score"""
def __init__(self, estimator, X_val, y_val, sample_weight_val, classes=None):
self.estimator = clone(estimator)
self.estimator.t_ = 1 # to pass check_is_fitted
if classes is not None:
self.estimator.classes_ = classes
self.X_val = X_val
self.y_val = y_val
self.sample_weight_val = sample_weight_val
def __call__(self, coef, intercept):
est = self.estimator
est.coef_ = coef.reshape(1, -1)
est.intercept_ = np.atleast_1d(intercept)
return est.score(self.X_val, self.y_val, self.sample_weight_val)
class BaseSGD(SparseCoefMixin, BaseEstimator, metaclass=ABCMeta):
"""Base class for SGD classification and regression."""
def __init__(
self,
loss,
*,
penalty="l2",
alpha=0.0001,
C=1.0,
l1_ratio=0.15,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
epsilon=0.1,
random_state=None,
learning_rate="optimal",
eta0=0.0,
power_t=0.5,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
warm_start=False,
average=False,
):
self.loss = loss
self.penalty = penalty
self.learning_rate = learning_rate
self.epsilon = epsilon
self.alpha = alpha
self.C = C
self.l1_ratio = l1_ratio
self.fit_intercept = fit_intercept
self.shuffle = shuffle
self.random_state = random_state
self.verbose = verbose
self.eta0 = eta0
self.power_t = power_t
self.early_stopping = early_stopping
self.validation_fraction = validation_fraction
self.n_iter_no_change = n_iter_no_change
self.warm_start = warm_start
self.average = average
self.max_iter = max_iter
self.tol = tol
@abstractmethod
def fit(self, X, y):
"""Fit model."""
def _validate_params(self, for_partial_fit=False):
"""Validate input params."""
if not isinstance(self.shuffle, bool):
raise ValueError("shuffle must be either True or False")
if not isinstance(self.early_stopping, bool):
raise ValueError("early_stopping must be either True or False")
if self.early_stopping and for_partial_fit:
raise ValueError("early_stopping should be False with partial_fit")
if self.max_iter is not None and self.max_iter <= 0:
raise ValueError("max_iter must be > zero. Got %f" % self.max_iter)
if not (0.0 <= self.l1_ratio <= 1.0):
raise ValueError("l1_ratio must be in [0, 1]")
if not isinstance(self, SGDOneClassSVM) and self.alpha < 0.0:
raise ValueError("alpha must be >= 0")
if self.n_iter_no_change < 1:
raise ValueError("n_iter_no_change must be >= 1")
if not (0.0 < self.validation_fraction < 1.0):
raise ValueError("validation_fraction must be in range (0, 1)")
if self.learning_rate in ("constant", "invscaling", "adaptive"):
if self.eta0 <= 0.0:
raise ValueError("eta0 must be > 0")
if self.learning_rate == "optimal" and self.alpha == 0:
raise ValueError(
"alpha must be > 0 since "
"learning_rate is 'optimal'. alpha is used "
"to compute the optimal learning rate."
)
# raises ValueError if not registered
self._get_penalty_type(self.penalty)
self._get_learning_rate_type(self.learning_rate)
if self.loss not in self.loss_functions:
raise ValueError("The loss %s is not supported. " % self.loss)
if self.loss == "squared_loss":
warnings.warn(
"The loss 'squared_loss' was deprecated in v1.0 and will be "
"removed in version 1.2. Use `loss='squared_error'` which is "
"equivalent.",
FutureWarning,
)
def _get_loss_function(self, loss):
"""Get concrete ``LossFunction`` object for str ``loss``."""
try:
loss_ = self.loss_functions[loss]
loss_class, args = loss_[0], loss_[1:]
if loss in ("huber", "epsilon_insensitive", "squared_epsilon_insensitive"):
args = (self.epsilon,)
return loss_class(*args)
except KeyError as e:
raise ValueError("The loss %s is not supported. " % loss) from e
def _get_learning_rate_type(self, learning_rate):
try:
return LEARNING_RATE_TYPES[learning_rate]
except KeyError as e:
raise ValueError(
"learning rate %s is not supported. " % learning_rate
) from e
def _get_penalty_type(self, penalty):
penalty = str(penalty).lower()
try:
return PENALTY_TYPES[penalty]
except KeyError as e:
raise ValueError("Penalty %s is not supported. " % penalty) from e
def _allocate_parameter_mem(
self, n_classes, n_features, coef_init=None, intercept_init=None, one_class=0
):
"""Allocate mem for parameters; initialize if provided."""
if n_classes > 2:
# allocate coef_ for multi-class
if coef_init is not None:
coef_init = np.asarray(coef_init, order="C")
if coef_init.shape != (n_classes, n_features):
raise ValueError("Provided ``coef_`` does not match dataset. ")
self.coef_ = coef_init
else:
self.coef_ = np.zeros(
(n_classes, n_features), dtype=np.float64, order="C"
)
# allocate intercept_ for multi-class
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, order="C")
if intercept_init.shape != (n_classes,):
raise ValueError("Provided intercept_init does not match dataset.")
self.intercept_ = intercept_init
else:
self.intercept_ = np.zeros(n_classes, dtype=np.float64, order="C")
else:
# allocate coef_
if coef_init is not None:
coef_init = np.asarray(coef_init, dtype=np.float64, order="C")
coef_init = coef_init.ravel()
if coef_init.shape != (n_features,):
raise ValueError("Provided coef_init does not match dataset.")
self.coef_ = coef_init
else:
self.coef_ = np.zeros(n_features, dtype=np.float64, order="C")
# allocate intercept_
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, dtype=np.float64)
if intercept_init.shape != (1,) and intercept_init.shape != ():
raise ValueError("Provided intercept_init does not match dataset.")
if one_class:
self.offset_ = intercept_init.reshape(
1,
)
else:
self.intercept_ = intercept_init.reshape(
1,
)
else:
if one_class:
self.offset_ = np.zeros(1, dtype=np.float64, order="C")
else:
self.intercept_ = np.zeros(1, dtype=np.float64, order="C")
# initialize average parameters
if self.average > 0:
self._standard_coef = self.coef_
self._average_coef = np.zeros(self.coef_.shape, dtype=np.float64, order="C")
if one_class:
self._standard_intercept = 1 - self.offset_
else:
self._standard_intercept = self.intercept_
self._average_intercept = np.zeros(
self._standard_intercept.shape, dtype=np.float64, order="C"
)
def _make_validation_split(self, y):
"""Split the dataset between training set and validation set.
Parameters
----------
y : ndarray of shape (n_samples, )
Target values.
Returns
-------
validation_mask : ndarray of shape (n_samples, )
Equal to 1 on the validation set, 0 on the training set.
"""
n_samples = y.shape[0]
validation_mask = np.zeros(n_samples, dtype=np.uint8)
if not self.early_stopping:
# use the full set for training, with an empty validation set
return validation_mask
if is_classifier(self):
splitter_type = StratifiedShuffleSplit
else:
splitter_type = ShuffleSplit
cv = splitter_type(
test_size=self.validation_fraction, random_state=self.random_state
)
idx_train, idx_val = next(cv.split(np.zeros(shape=(y.shape[0], 1)), y))
if idx_train.shape[0] == 0 or idx_val.shape[0] == 0:
raise ValueError(
"Splitting %d samples into a train set and a validation set "
"with validation_fraction=%r led to an empty set (%d and %d "
"samples). Please either change validation_fraction, increase "
"number of samples, or disable early_stopping."
% (
n_samples,
self.validation_fraction,
idx_train.shape[0],
idx_val.shape[0],
)
)
validation_mask[idx_val] = 1
return validation_mask
def _make_validation_score_cb(
self, validation_mask, X, y, sample_weight, classes=None
):
if not self.early_stopping:
return None
return _ValidationScoreCallback(
self,
X[validation_mask],
y[validation_mask],
sample_weight[validation_mask],
classes=classes,
)
def _prepare_fit_binary(est, y, i):
"""Initialization for fit_binary.
Returns y, coef, intercept, average_coef, average_intercept.
"""
y_i = np.ones(y.shape, dtype=np.float64, order="C")
y_i[y != est.classes_[i]] = -1.0
average_intercept = 0
average_coef = None
if len(est.classes_) == 2:
if not est.average:
coef = est.coef_.ravel()
intercept = est.intercept_[0]
else:
coef = est._standard_coef.ravel()
intercept = est._standard_intercept[0]
average_coef = est._average_coef.ravel()
average_intercept = est._average_intercept[0]
else:
if not est.average:
coef = est.coef_[i]
intercept = est.intercept_[i]
else:
coef = est._standard_coef[i]
intercept = est._standard_intercept[i]
average_coef = est._average_coef[i]
average_intercept = est._average_intercept[i]
return y_i, coef, intercept, average_coef, average_intercept
def fit_binary(
est,
i,
X,
y,
alpha,
C,
learning_rate,
max_iter,
pos_weight,
neg_weight,
sample_weight,
validation_mask=None,
random_state=None,
):
"""Fit a single binary classifier.
The i'th class is considered the "positive" class.
Parameters
----------
est : Estimator object
The estimator to fit
i : int
Index of the positive class
X : numpy array or sparse matrix of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples, ]
Target values
alpha : float
The regularization parameter
C : float
Maximum step size for passive aggressive
learning_rate : str
The learning rate. Accepted values are 'constant', 'optimal',
'invscaling', 'pa1' and 'pa2'.
max_iter : int
The maximum number of iterations (epochs)
pos_weight : float
The weight of the positive class
neg_weight : float
The weight of the negative class
sample_weight : numpy array of shape [n_samples, ]
The weight of each sample
validation_mask : numpy array of shape [n_samples, ], default=None
Precomputed validation mask in case _fit_binary is called in the
context of a one-vs-rest reduction.
random_state : int, RandomState instance, default=None
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
# if average is not true, average_coef, and average_intercept will be
# unused
y_i, coef, intercept, average_coef, average_intercept = _prepare_fit_binary(
est, y, i
)
assert y_i.shape[0] == y.shape[0] == sample_weight.shape[0]
random_state = check_random_state(random_state)
dataset, intercept_decay = make_dataset(
X, y_i, sample_weight, random_state=random_state
)
penalty_type = est._get_penalty_type(est.penalty)
learning_rate_type = est._get_learning_rate_type(learning_rate)
if validation_mask is None:
validation_mask = est._make_validation_split(y_i)
classes = np.array([-1, 1], dtype=y_i.dtype)
validation_score_cb = est._make_validation_score_cb(
validation_mask, X, y_i, sample_weight, classes=classes
)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(MAX_INT)
tol = est.tol if est.tol is not None else -np.inf
coef, intercept, average_coef, average_intercept, n_iter_ = _plain_sgd(
coef,
intercept,
average_coef,
average_intercept,
est.loss_function_,
penalty_type,
alpha,
C,
est.l1_ratio,
dataset,
validation_mask,
est.early_stopping,
validation_score_cb,
int(est.n_iter_no_change),
max_iter,
tol,
int(est.fit_intercept),
int(est.verbose),
int(est.shuffle),
seed,
pos_weight,
neg_weight,
learning_rate_type,
est.eta0,
est.power_t,
0,
est.t_,
intercept_decay,
est.average,
)
if est.average:
if len(est.classes_) == 2:
est._average_intercept[0] = average_intercept
else:
est._average_intercept[i] = average_intercept
return coef, intercept, n_iter_
class BaseSGDClassifier(LinearClassifierMixin, BaseSGD, metaclass=ABCMeta):
# TODO: Remove squared_loss in v1.2
loss_functions = {
"hinge": (Hinge, 1.0),
"squared_hinge": (SquaredHinge, 1.0),
"perceptron": (Hinge, 0.0),
"log": (Log,),
"modified_huber": (ModifiedHuber,),
"squared_error": (SquaredLoss,),
"squared_loss": (SquaredLoss,),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive, DEFAULT_EPSILON),
}
@abstractmethod
def __init__(
self,
loss="hinge",
*,
penalty="l2",
alpha=0.0001,
l1_ratio=0.15,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
epsilon=DEFAULT_EPSILON,
n_jobs=None,
random_state=None,
learning_rate="optimal",
eta0=0.0,
power_t=0.5,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
class_weight=None,
warm_start=False,
average=False,
):
super().__init__(
loss=loss,
penalty=penalty,
alpha=alpha,
l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0,
power_t=power_t,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
warm_start=warm_start,
average=average,
)
self.class_weight = class_weight
self.n_jobs = n_jobs
def _partial_fit(
self,
X,
y,
alpha,
C,
loss,
learning_rate,
max_iter,
classes,
sample_weight,
coef_init,
intercept_init,
):
first_call = not hasattr(self, "classes_")
X, y = self._validate_data(
X,
y,
accept_sparse="csr",
dtype=np.float64,
order="C",
accept_large_sparse=False,
reset=first_call,
)
n_samples, n_features = X.shape
_check_partial_fit_first_call(self, classes)
n_classes = self.classes_.shape[0]
# Allocate datastructures from input arguments
self._expanded_class_weight = compute_class_weight(
self.class_weight, classes=self.classes_, y=y
)
sample_weight = _check_sample_weight(sample_weight, X)
if getattr(self, "coef_", None) is None or coef_init is not None:
self._allocate_parameter_mem(
n_classes, n_features, coef_init, intercept_init
)
elif n_features != self.coef_.shape[-1]:
raise ValueError(
"Number of features %d does not match previous data %d."
% (n_features, self.coef_.shape[-1])
)
self.loss_function_ = self._get_loss_function(loss)
if not hasattr(self, "t_"):
self.t_ = 1.0
# delegate to concrete training procedure
if n_classes > 2:
self._fit_multiclass(
X,
y,
alpha=alpha,
C=C,
learning_rate=learning_rate,
sample_weight=sample_weight,
max_iter=max_iter,
)
elif n_classes == 2:
self._fit_binary(
X,
y,
alpha=alpha,
C=C,
learning_rate=learning_rate,
sample_weight=sample_weight,
max_iter=max_iter,
)
else:
raise ValueError(
"The number of classes has to be greater than one; got %d class"
% n_classes
)
return self
def _fit(
self,
X,
y,
alpha,
C,
loss,
learning_rate,
coef_init=None,
intercept_init=None,
sample_weight=None,
):
self._validate_params()
if hasattr(self, "classes_"):
# delete the attribute otherwise _partial_fit thinks it's not the first call
delattr(self, "classes_")
# labels can be encoded as float, int, or string literals
# np.unique sorts in asc order; largest class id is positive class
y = self._validate_data(y=y)
classes = np.unique(y)
if self.warm_start and hasattr(self, "coef_"):
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
if self.average > 0:
self._standard_coef = self.coef_
self._standard_intercept = self.intercept_
self._average_coef = None
self._average_intercept = None
# Clear iteration count for multiple call to fit.
self.t_ = 1.0
self._partial_fit(
X,
y,
alpha,
C,
loss,
learning_rate,
self.max_iter,
classes,
sample_weight,
coef_init,
intercept_init,
)
if (
self.tol is not None
and self.tol > -np.inf
and self.n_iter_ == self.max_iter
):
warnings.warn(
"Maximum number of iteration reached before "
"convergence. Consider increasing max_iter to "
"improve the fit.",
ConvergenceWarning,
)
return self
def _fit_binary(self, X, y, alpha, C, sample_weight, learning_rate, max_iter):
"""Fit a binary classifier on X and y."""
coef, intercept, n_iter_ = fit_binary(
self,
1,
X,
y,
alpha,
C,
learning_rate,
max_iter,
self._expanded_class_weight[1],
self._expanded_class_weight[0],
sample_weight,
random_state=self.random_state,
)
self.t_ += n_iter_ * X.shape[0]
self.n_iter_ = n_iter_
# need to be 2d
if self.average > 0:
if self.average <= self.t_ - 1:
self.coef_ = self._average_coef.reshape(1, -1)
self.intercept_ = self._average_intercept
else:
self.coef_ = self._standard_coef.reshape(1, -1)
self._standard_intercept = np.atleast_1d(intercept)
self.intercept_ = self._standard_intercept
else:
self.coef_ = coef.reshape(1, -1)
# intercept is a float, need to convert it to an array of length 1
self.intercept_ = np.atleast_1d(intercept)
def _fit_multiclass(self, X, y, alpha, C, learning_rate, sample_weight, max_iter):
"""Fit a multi-class classifier by combining binary classifiers
Each binary classifier predicts one class versus all others. This
strategy is called OvA (One versus All) or OvR (One versus Rest).
"""
# Precompute the validation split using the multiclass labels
# to ensure proper balancing of the classes.
validation_mask = self._make_validation_split(y)
# Use joblib to fit OvA in parallel.
# Pick the random seed for each job outside of fit_binary to avoid
# sharing the estimator random state between threads which could lead
# to non-deterministic behavior
random_state = check_random_state(self.random_state)
seeds = random_state.randint(MAX_INT, size=len(self.classes_))
result = Parallel(
n_jobs=self.n_jobs, verbose=self.verbose, require="sharedmem"
)(
delayed(fit_binary)(
self,
i,
X,
y,
alpha,
C,
learning_rate,
max_iter,
self._expanded_class_weight[i],
1.0,
sample_weight,
validation_mask=validation_mask,
random_state=seed,
)
for i, seed in enumerate(seeds)
)
# take the maximum of n_iter_ over every binary fit
n_iter_ = 0.0
for i, (_, intercept, n_iter_i) in enumerate(result):
self.intercept_[i] = intercept
n_iter_ = max(n_iter_, n_iter_i)
self.t_ += n_iter_ * X.shape[0]
self.n_iter_ = n_iter_
if self.average > 0:
if self.average <= self.t_ - 1.0:
self.coef_ = self._average_coef
self.intercept_ = self._average_intercept
else:
self.coef_ = self._standard_coef
self._standard_intercept = np.atleast_1d(self.intercept_)
self.intercept_ = self._standard_intercept
def partial_fit(self, X, y, classes=None, sample_weight=None):
"""Perform one epoch of stochastic gradient descent on given samples.
Internally, this method uses ``max_iter = 1``. Therefore, it is not
guaranteed that a minimum of the cost function is reached after calling
it once. Matters such as objective convergence, early stopping, and
learning rate adjustments should be handled by the user.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of the training data.
y : ndarray of shape (n_samples,)
Subset of the target values.
classes : ndarray of shape (n_classes,), default=None
Classes across all calls to partial_fit.
Can be obtained by via `np.unique(y_all)`, where y_all is the
target vector of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
sample_weight : array-like, shape (n_samples,), default=None
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : object
Returns an instance of self.
"""
self._validate_params(for_partial_fit=True)
if self.class_weight in ["balanced"]:
raise ValueError(
"class_weight '{0}' is not supported for "
"partial_fit. In order to use 'balanced' weights,"
" use compute_class_weight('{0}', "
"classes=classes, y=y). "
"In place of y you can us a large enough sample "
"of the full training set target to properly "
"estimate the class frequency distributions. "
"Pass the resulting weights as the class_weight "
"parameter.".format(self.class_weight)
)
return self._partial_fit(
X,
y,
alpha=self.alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
max_iter=1,<|fim▁hole|> sample_weight=sample_weight,
coef_init=None,
intercept_init=None,
)
def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
y : ndarray of shape (n_samples,)
Target values.
coef_init : ndarray of shape (n_classes, n_features), default=None
The initial coefficients to warm-start the optimization.
intercept_init : ndarray of shape (n_classes,), default=None
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), default=None
Weights applied to individual samples.
If not provided, uniform weights are assumed. These weights will
be multiplied with class_weight (passed through the
constructor) if class_weight is specified.
Returns
-------
self : object
Returns an instance of self.
"""
return self._fit(
X,
y,
alpha=self.alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
coef_init=coef_init,
intercept_init=intercept_init,
sample_weight=sample_weight,
)
class SGDClassifier(BaseSGDClassifier):
"""Linear classifiers (SVM, logistic regression, etc.) with SGD training.
This estimator implements regularized linear models with stochastic
gradient descent (SGD) learning: the gradient of the loss is estimated
each sample at a time and the model is updated along the way with a
decreasing strength schedule (aka learning rate). SGD allows minibatch
(online/out-of-core) learning via the `partial_fit` method.
For best results using the default learning rate schedule, the data should
have zero mean and unit variance.
This implementation works with data represented as dense or sparse arrays
of floating point values for the features. The model it fits can be
controlled with the loss parameter; by default, it fits a linear support
vector machine (SVM).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, default='hinge'
The loss function to be used. Defaults to 'hinge', which gives a
linear SVM.
The possible options are 'hinge', 'log', 'modified_huber',
'squared_hinge', 'perceptron', or a regression loss: 'squared_error',
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
The 'log' loss gives logistic regression, a probabilistic classifier.
'modified_huber' is another smooth loss that brings tolerance to
outliers as well as probability estimates.
'squared_hinge' is like hinge but is quadratically penalized.
'perceptron' is the linear loss used by the perceptron algorithm.
The other losses are designed for regression but can be useful in
classification as well; see
:class:`~sklearn.linear_model.SGDRegressor` for a description.
More details about the losses formulas can be found in the
:ref:`User Guide <sgd_mathematical_formulation>`.
.. deprecated:: 1.0
The loss 'squared_loss' was deprecated in v1.0 and will be removed
in version 1.2. Use `loss='squared_error'` which is equivalent.
penalty : {'l2', 'l1', 'elasticnet'}, default='l2'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float, default=0.0001
Constant that multiplies the regularization term. The higher the
value, the stronger the regularization.
Also used to compute the learning rate when set to `learning_rate` is
set to 'optimal'.
Values must be in the range `[0.0, inf)`.
l1_ratio : float, default=0.15
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Only used if `penalty` is 'elasticnet'.
Values must be in the range `[0.0, 1.0]`.
fit_intercept : bool, default=True
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered.
max_iter : int, default=1000
The maximum number of passes over the training data (aka epochs).
It only impacts the behavior in the ``fit`` method, and not the
:meth:`partial_fit` method.
Values must be in the range `[1, inf)`.
.. versionadded:: 0.19
tol : float, default=1e-3
The stopping criterion. If it is not None, training will stop
when (loss > best_loss - tol) for ``n_iter_no_change`` consecutive
epochs.
Convergence is checked against the training loss or the
validation loss depending on the `early_stopping` parameter.
Values must be in the range `[0.0, inf)`.
.. versionadded:: 0.19
shuffle : bool, default=True
Whether or not the training data should be shuffled after each epoch.
verbose : int, default=0
The verbosity level.
Values must be in the range `[0, inf)`.
epsilon : float, default=0.1
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
Values must be in the range `[0.0, inf)`.
n_jobs : int, default=None
The number of CPUs to use to do the OVA (One Versus All, for
multi-class problems) computation.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
random_state : int, RandomState instance, default=None
Used for shuffling the data, when ``shuffle`` is set to ``True``.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Integer values must be in the range `[0, 2**32 - 1]`.
learning_rate : str, default='optimal'
The learning rate schedule:
- 'constant': `eta = eta0`
- 'optimal': `eta = 1.0 / (alpha * (t + t0))`
where `t0` is chosen by a heuristic proposed by Leon Bottou.
- 'invscaling': `eta = eta0 / pow(t, power_t)`
- 'adaptive': `eta = eta0`, as long as the training keeps decreasing.
Each time n_iter_no_change consecutive epochs fail to decrease the
training loss by tol or fail to increase validation score by tol if
`early_stopping` is `True`, the current learning rate is divided by 5.
.. versionadded:: 0.20
Added 'adaptive' option
eta0 : float, default=0.0
The initial learning rate for the 'constant', 'invscaling' or
'adaptive' schedules. The default value is 0.0 as eta0 is not used by
the default schedule 'optimal'.
Values must be in the range `(0.0, inf)`.
power_t : float, default=0.5
The exponent for inverse scaling learning rate [default 0.5].
Values must be in the range `(-inf, inf)`.
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation
score is not improving. If set to `True`, it will automatically set aside
a stratified fraction of training data as validation and terminate
training when validation score returned by the `score` method is not
improving by at least tol for n_iter_no_change consecutive epochs.
.. versionadded:: 0.20
Added 'early_stopping' option
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if `early_stopping` is True.
Values must be in the range `(0.0, 1.0)`.
.. versionadded:: 0.20
Added 'validation_fraction' option
n_iter_no_change : int, default=5
Number of iterations with no improvement to wait before stopping
fitting.
Convergence is checked against the training loss or the
validation loss depending on the `early_stopping` parameter.
Integer values must be in the range `[1, max_iter)`.
.. versionadded:: 0.20
Added 'n_iter_no_change' option
class_weight : dict, {class_label: weight} or "balanced", default=None
Preset for the class_weight fit parameter.
Weights associated with classes. If not given, all classes
are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``.
warm_start : bool, default=False
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
See :term:`the Glossary <warm_start>`.
Repeatedly calling fit or partial_fit when warm_start is True can
result in a different solution than when calling fit a single time
because of the way the data is shuffled.
If a dynamic learning rate is used, the learning rate is adapted
depending on the number of samples already seen. Calling ``fit`` resets
this counter, while ``partial_fit`` will result in increasing the
existing counter.
average : bool or int, default=False
When set to `True`, computes the averaged SGD weights across all
updates and stores the result in the ``coef_`` attribute. If set to
an int greater than 1, averaging will begin once the total number of
samples seen reaches `average`. So ``average=10`` will begin
averaging after seeing 10 samples.
Integer values must be in the range `[1, n_samples]`.
Attributes
----------
coef_ : ndarray of shape (1, n_features) if n_classes == 2 else \
(n_classes, n_features)
Weights assigned to the features.
intercept_ : ndarray of shape (1,) if n_classes == 2 else (n_classes,)
Constants in decision function.
n_iter_ : int
The actual number of iterations before reaching the stopping criterion.
For multiclass fits, it is the maximum over every binary fit.
loss_function_ : concrete ``LossFunction``
classes_ : array of shape (n_classes,)
t_ : int
Number of weight updates performed during training.
Same as ``(n_iter_ * n_samples)``.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
See Also
--------
sklearn.svm.LinearSVC : Linear support vector classification.
LogisticRegression : Logistic regression.
Perceptron : Inherits from SGDClassifier. ``Perceptron()`` is equivalent to
``SGDClassifier(loss="perceptron", eta0=1, learning_rate="constant",
penalty=None)``.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import SGDClassifier
>>> from sklearn.preprocessing import StandardScaler
>>> from sklearn.pipeline import make_pipeline
>>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
>>> Y = np.array([1, 1, 2, 2])
>>> # Always scale the input. The most convenient way is to use a pipeline.
>>> clf = make_pipeline(StandardScaler(),
... SGDClassifier(max_iter=1000, tol=1e-3))
>>> clf.fit(X, Y)
Pipeline(steps=[('standardscaler', StandardScaler()),
('sgdclassifier', SGDClassifier())])
>>> print(clf.predict([[-0.8, -1]]))
[1]
"""
def __init__(
self,
loss="hinge",
*,
penalty="l2",
alpha=0.0001,
l1_ratio=0.15,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
epsilon=DEFAULT_EPSILON,
n_jobs=None,
random_state=None,
learning_rate="optimal",
eta0=0.0,
power_t=0.5,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
class_weight=None,
warm_start=False,
average=False,
):
super().__init__(
loss=loss,
penalty=penalty,
alpha=alpha,
l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
n_jobs=n_jobs,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0,
power_t=power_t,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
class_weight=class_weight,
warm_start=warm_start,
average=average,
)
def _check_proba(self):
if self.loss not in ("log", "modified_huber"):
raise AttributeError(
"probability estimates are not available for loss=%r" % self.loss
)
return True
@available_if(_check_proba)
def predict_proba(self, X):
"""Probability estimates.
This method is only available for log loss and modified Huber loss.
Multiclass probability estimates are derived from binary (one-vs.-rest)
estimates by simple normalization, as recommended by Zadrozny and
Elkan.
Binary probability estimates for loss="modified_huber" are given by
(clip(decision_function(X), -1, 1) + 1) / 2. For other loss functions
it is necessary to perform proper probability calibration by wrapping
the classifier with
:class:`~sklearn.calibration.CalibratedClassifierCV` instead.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data for prediction.
Returns
-------
ndarray of shape (n_samples, n_classes)
Returns the probability of the sample for each class in the model,
where classes are ordered as they are in `self.classes_`.
References
----------
Zadrozny and Elkan, "Transforming classifier scores into multiclass
probability estimates", SIGKDD'02,
https://dl.acm.org/doi/pdf/10.1145/775047.775151
The justification for the formula in the loss="modified_huber"
case is in the appendix B in:
http://jmlr.csail.mit.edu/papers/volume2/zhang02c/zhang02c.pdf
"""
check_is_fitted(self)
if self.loss == "log":
return self._predict_proba_lr(X)
elif self.loss == "modified_huber":
binary = len(self.classes_) == 2
scores = self.decision_function(X)
if binary:
prob2 = np.ones((scores.shape[0], 2))
prob = prob2[:, 1]
else:
prob = scores
np.clip(scores, -1, 1, prob)
prob += 1.0
prob /= 2.0
if binary:
prob2[:, 0] -= prob
prob = prob2
else:
# the above might assign zero to all classes, which doesn't
# normalize neatly; work around this to produce uniform
# probabilities
prob_sum = prob.sum(axis=1)
all_zero = prob_sum == 0
if np.any(all_zero):
prob[all_zero, :] = 1
prob_sum[all_zero] = len(self.classes_)
# normalize
prob /= prob_sum.reshape((prob.shape[0], -1))
return prob
else:
raise NotImplementedError(
"predict_(log_)proba only supported when"
" loss='log' or loss='modified_huber' "
"(%r given)"
% self.loss
)
@available_if(_check_proba)
def predict_log_proba(self, X):
"""Log of probability estimates.
This method is only available for log loss and modified Huber loss.
When loss="modified_huber", probability estimates may be hard zeros
and ones, so taking the logarithm is not possible.
See ``predict_proba`` for details.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data for prediction.
Returns
-------
T : array-like, shape (n_samples, n_classes)
Returns the log-probability of the sample for each class in the
model, where classes are ordered as they are in
`self.classes_`.
"""
return np.log(self.predict_proba(X))
def _more_tags(self):
return {
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
),
}
}
class BaseSGDRegressor(RegressorMixin, BaseSGD):
# TODO: Remove squared_loss in v1.2
loss_functions = {
"squared_error": (SquaredLoss,),
"squared_loss": (SquaredLoss,),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive, DEFAULT_EPSILON),
}
@abstractmethod
def __init__(
self,
loss="squared_error",
*,
penalty="l2",
alpha=0.0001,
l1_ratio=0.15,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
epsilon=DEFAULT_EPSILON,
random_state=None,
learning_rate="invscaling",
eta0=0.01,
power_t=0.25,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
warm_start=False,
average=False,
):
super().__init__(
loss=loss,
penalty=penalty,
alpha=alpha,
l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0,
power_t=power_t,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
warm_start=warm_start,
average=average,
)
def _partial_fit(
self,
X,
y,
alpha,
C,
loss,
learning_rate,
max_iter,
sample_weight,
coef_init,
intercept_init,
):
first_call = getattr(self, "coef_", None) is None
X, y = self._validate_data(
X,
y,
accept_sparse="csr",
copy=False,
order="C",
dtype=np.float64,
accept_large_sparse=False,
reset=first_call,
)
y = y.astype(np.float64, copy=False)
n_samples, n_features = X.shape
sample_weight = _check_sample_weight(sample_weight, X)
# Allocate datastructures from input arguments
if first_call:
self._allocate_parameter_mem(1, n_features, coef_init, intercept_init)
if self.average > 0 and getattr(self, "_average_coef", None) is None:
self._average_coef = np.zeros(n_features, dtype=np.float64, order="C")
self._average_intercept = np.zeros(1, dtype=np.float64, order="C")
self._fit_regressor(
X, y, alpha, C, loss, learning_rate, sample_weight, max_iter
)
return self
def partial_fit(self, X, y, sample_weight=None):
"""Perform one epoch of stochastic gradient descent on given samples.
Internally, this method uses ``max_iter = 1``. Therefore, it is not
guaranteed that a minimum of the cost function is reached after calling
it once. Matters such as objective convergence and early stopping
should be handled by the user.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of training data.
y : numpy array of shape (n_samples,)
Subset of target values.
sample_weight : array-like, shape (n_samples,), default=None
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : object
Returns an instance of self.
"""
self._validate_params(for_partial_fit=True)
return self._partial_fit(
X,
y,
self.alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
max_iter=1,
sample_weight=sample_weight,
coef_init=None,
intercept_init=None,
)
def _fit(
self,
X,
y,
alpha,
C,
loss,
learning_rate,
coef_init=None,
intercept_init=None,
sample_weight=None,
):
self._validate_params()
if self.warm_start and getattr(self, "coef_", None) is not None:
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
# Clear iteration count for multiple call to fit.
self.t_ = 1.0
self._partial_fit(
X,
y,
alpha,
C,
loss,
learning_rate,
self.max_iter,
sample_weight,
coef_init,
intercept_init,
)
if (
self.tol is not None
and self.tol > -np.inf
and self.n_iter_ == self.max_iter
):
warnings.warn(
"Maximum number of iteration reached before "
"convergence. Consider increasing max_iter to "
"improve the fit.",
ConvergenceWarning,
)
return self
def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
y : ndarray of shape (n_samples,)
Target values.
coef_init : ndarray of shape (n_features,), default=None
The initial coefficients to warm-start the optimization.
intercept_init : ndarray of shape (1,), default=None
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), default=None
Weights applied to individual samples (1. for unweighted).
Returns
-------
self : object
Fitted `SGDRegressor` estimator.
"""
return self._fit(
X,
y,
alpha=self.alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
coef_init=coef_init,
intercept_init=intercept_init,
sample_weight=sample_weight,
)
def _decision_function(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
ndarray of shape (n_samples,)
Predicted target values per element in X.
"""
check_is_fitted(self)
X = self._validate_data(X, accept_sparse="csr", reset=False)
scores = safe_sparse_dot(X, self.coef_.T, dense_output=True) + self.intercept_
return scores.ravel()
def predict(self, X):
"""Predict using the linear model.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data.
Returns
-------
ndarray of shape (n_samples,)
Predicted target values per element in X.
"""
return self._decision_function(X)
def _fit_regressor(
self, X, y, alpha, C, loss, learning_rate, sample_weight, max_iter
):
dataset, intercept_decay = make_dataset(X, y, sample_weight)
loss_function = self._get_loss_function(loss)
penalty_type = self._get_penalty_type(self.penalty)
learning_rate_type = self._get_learning_rate_type(learning_rate)
if not hasattr(self, "t_"):
self.t_ = 1.0
validation_mask = self._make_validation_split(y)
validation_score_cb = self._make_validation_score_cb(
validation_mask, X, y, sample_weight
)
random_state = check_random_state(self.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
tol = self.tol if self.tol is not None else -np.inf
if self.average:
coef = self._standard_coef
intercept = self._standard_intercept
average_coef = self._average_coef
average_intercept = self._average_intercept
else:
coef = self.coef_
intercept = self.intercept_
average_coef = None # Not used
average_intercept = [0] # Not used
coef, intercept, average_coef, average_intercept, self.n_iter_ = _plain_sgd(
coef,
intercept[0],
average_coef,
average_intercept[0],
loss_function,
penalty_type,
alpha,
C,
self.l1_ratio,
dataset,
validation_mask,
self.early_stopping,
validation_score_cb,
int(self.n_iter_no_change),
max_iter,
tol,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
1.0,
1.0,
learning_rate_type,
self.eta0,
self.power_t,
0,
self.t_,
intercept_decay,
self.average,
)
self.t_ += self.n_iter_ * X.shape[0]
if self.average > 0:
self._average_intercept = np.atleast_1d(average_intercept)
self._standard_intercept = np.atleast_1d(intercept)
if self.average <= self.t_ - 1.0:
# made enough updates for averaging to be taken into account
self.coef_ = average_coef
self.intercept_ = np.atleast_1d(average_intercept)
else:
self.coef_ = coef
self.intercept_ = np.atleast_1d(intercept)
else:
self.intercept_ = np.atleast_1d(intercept)
class SGDRegressor(BaseSGDRegressor):
"""Linear model fitted by minimizing a regularized empirical loss with SGD.
SGD stands for Stochastic Gradient Descent: the gradient of the loss is
estimated each sample at a time and the model is updated along the way with
a decreasing strength schedule (aka learning rate).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
This implementation works with data represented as dense numpy arrays of
floating point values for the features.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, default='squared_error'
The loss function to be used. The possible values are 'squared_error',
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'
The 'squared_error' refers to the ordinary least squares fit.
'huber' modifies 'squared_error' to focus less on getting outliers
correct by switching from squared to linear loss past a distance of
epsilon. 'epsilon_insensitive' ignores errors less than epsilon and is
linear past that; this is the loss function used in SVR.
'squared_epsilon_insensitive' is the same but becomes squared loss past
a tolerance of epsilon.
More details about the losses formulas can be found in the
:ref:`User Guide <sgd_mathematical_formulation>`.
.. deprecated:: 1.0
The loss 'squared_loss' was deprecated in v1.0 and will be removed
in version 1.2. Use `loss='squared_error'` which is equivalent.
penalty : {'l2', 'l1', 'elasticnet'}, default='l2'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float, default=0.0001
Constant that multiplies the regularization term. The higher the
value, the stronger the regularization.
Also used to compute the learning rate when set to `learning_rate` is
set to 'optimal'.
l1_ratio : float, default=0.15
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Only used if `penalty` is 'elasticnet'.
fit_intercept : bool, default=True
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered.
max_iter : int, default=1000
The maximum number of passes over the training data (aka epochs).
It only impacts the behavior in the ``fit`` method, and not the
:meth:`partial_fit` method.
.. versionadded:: 0.19
tol : float, default=1e-3
The stopping criterion. If it is not None, training will stop
when (loss > best_loss - tol) for ``n_iter_no_change`` consecutive
epochs.
Convergence is checked against the training loss or the
validation loss depending on the `early_stopping` parameter.
.. versionadded:: 0.19
shuffle : bool, default=True
Whether or not the training data should be shuffled after each epoch.
verbose : int, default=0
The verbosity level.
epsilon : float, default=0.1
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
random_state : int, RandomState instance, default=None
Used for shuffling the data, when ``shuffle`` is set to ``True``.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
learning_rate : str, default='invscaling'
The learning rate schedule:
- 'constant': `eta = eta0`
- 'optimal': `eta = 1.0 / (alpha * (t + t0))`
where t0 is chosen by a heuristic proposed by Leon Bottou.
- 'invscaling': `eta = eta0 / pow(t, power_t)`
- 'adaptive': eta = eta0, as long as the training keeps decreasing.
Each time n_iter_no_change consecutive epochs fail to decrease the
training loss by tol or fail to increase validation score by tol if
early_stopping is True, the current learning rate is divided by 5.
.. versionadded:: 0.20
Added 'adaptive' option
eta0 : float, default=0.01
The initial learning rate for the 'constant', 'invscaling' or
'adaptive' schedules. The default value is 0.01.
power_t : float, default=0.25
The exponent for inverse scaling learning rate.
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation
score is not improving. If set to True, it will automatically set aside
a fraction of training data as validation and terminate
training when validation score returned by the `score` method is not
improving by at least `tol` for `n_iter_no_change` consecutive
epochs.
.. versionadded:: 0.20
Added 'early_stopping' option
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if `early_stopping` is True.
.. versionadded:: 0.20
Added 'validation_fraction' option
n_iter_no_change : int, default=5
Number of iterations with no improvement to wait before stopping
fitting.
Convergence is checked against the training loss or the
validation loss depending on the `early_stopping` parameter.
.. versionadded:: 0.20
Added 'n_iter_no_change' option
warm_start : bool, default=False
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
See :term:`the Glossary <warm_start>`.
Repeatedly calling fit or partial_fit when warm_start is True can
result in a different solution than when calling fit a single time
because of the way the data is shuffled.
If a dynamic learning rate is used, the learning rate is adapted
depending on the number of samples already seen. Calling ``fit`` resets
this counter, while ``partial_fit`` will result in increasing the
existing counter.
average : bool or int, default=False
When set to True, computes the averaged SGD weights across all
updates and stores the result in the ``coef_`` attribute. If set to
an int greater than 1, averaging will begin once the total number of
samples seen reaches `average`. So ``average=10`` will begin
averaging after seeing 10 samples.
Attributes
----------
coef_ : ndarray of shape (n_features,)
Weights assigned to the features.
intercept_ : ndarray of shape (1,)
The intercept term.
n_iter_ : int
The actual number of iterations before reaching the stopping criterion.
t_ : int
Number of weight updates performed during training.
Same as ``(n_iter_ * n_samples)``.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
See Also
--------
HuberRegressor : Linear regression model that is robust to outliers.
Lars : Least Angle Regression model.
Lasso : Linear Model trained with L1 prior as regularizer.
RANSACRegressor : RANSAC (RANdom SAmple Consensus) algorithm.
Ridge : Linear least squares with l2 regularization.
sklearn.svm.SVR : Epsilon-Support Vector Regression.
TheilSenRegressor : Theil-Sen Estimator robust multivariate regression model.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import SGDRegressor
>>> from sklearn.pipeline import make_pipeline
>>> from sklearn.preprocessing import StandardScaler
>>> n_samples, n_features = 10, 5
>>> rng = np.random.RandomState(0)
>>> y = rng.randn(n_samples)
>>> X = rng.randn(n_samples, n_features)
>>> # Always scale the input. The most convenient way is to use a pipeline.
>>> reg = make_pipeline(StandardScaler(),
... SGDRegressor(max_iter=1000, tol=1e-3))
>>> reg.fit(X, y)
Pipeline(steps=[('standardscaler', StandardScaler()),
('sgdregressor', SGDRegressor())])
"""
def __init__(
self,
loss="squared_error",
*,
penalty="l2",
alpha=0.0001,
l1_ratio=0.15,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
epsilon=DEFAULT_EPSILON,
random_state=None,
learning_rate="invscaling",
eta0=0.01,
power_t=0.25,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
warm_start=False,
average=False,
):
super().__init__(
loss=loss,
penalty=penalty,
alpha=alpha,
l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0,
power_t=power_t,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
warm_start=warm_start,
average=average,
)
def _more_tags(self):
return {
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
),
}
}
class SGDOneClassSVM(BaseSGD, OutlierMixin):
"""Solves linear One-Class SVM using Stochastic Gradient Descent.
This implementation is meant to be used with a kernel approximation
technique (e.g. `sklearn.kernel_approximation.Nystroem`) to obtain results
similar to `sklearn.svm.OneClassSVM` which uses a Gaussian kernel by
default.
Read more in the :ref:`User Guide <sgd_online_one_class_svm>`.
.. versionadded:: 1.0
Parameters
----------
nu : float, default=0.5
The nu parameter of the One Class SVM: an upper bound on the
fraction of training errors and a lower bound of the fraction of
support vectors. Should be in the interval (0, 1]. By default 0.5
will be taken.
fit_intercept : bool, default=True
Whether the intercept should be estimated or not. Defaults to True.
max_iter : int, default=1000
The maximum number of passes over the training data (aka epochs).
It only impacts the behavior in the ``fit`` method, and not the
`partial_fit`. Defaults to 1000.
tol : float or None, default=1e-3
The stopping criterion. If it is not None, the iterations will stop
when (loss > previous_loss - tol). Defaults to 1e-3.
shuffle : bool, default=True
Whether or not the training data should be shuffled after each epoch.
Defaults to True.
verbose : int, default=0
The verbosity level.
random_state : int, RandomState instance or None, default=None
The seed of the pseudo random number generator to use when shuffling
the data. If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random number
generator; If None, the random number generator is the RandomState
instance used by `np.random`.
learning_rate : {'constant', 'optimal', 'invscaling', 'adaptive'}, default='optimal'
The learning rate schedule to use with `fit`. (If using `partial_fit`,
learning rate must be controlled directly).
- 'constant': `eta = eta0`
- 'optimal': `eta = 1.0 / (alpha * (t + t0))`
where t0 is chosen by a heuristic proposed by Leon Bottou.
- 'invscaling': `eta = eta0 / pow(t, power_t)`
- 'adaptive': eta = eta0, as long as the training keeps decreasing.
Each time n_iter_no_change consecutive epochs fail to decrease the
training loss by tol or fail to increase validation score by tol if
early_stopping is True, the current learning rate is divided by 5.
eta0 : float, default=0.0
The initial learning rate for the 'constant', 'invscaling' or
'adaptive' schedules. The default value is 0.0 as eta0 is not used by
the default schedule 'optimal'.
power_t : float, default=0.5
The exponent for inverse scaling learning rate [default 0.5].
warm_start : bool, default=False
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
See :term:`the Glossary <warm_start>`.
Repeatedly calling fit or partial_fit when warm_start is True can
result in a different solution than when calling fit a single time
because of the way the data is shuffled.
If a dynamic learning rate is used, the learning rate is adapted
depending on the number of samples already seen. Calling ``fit`` resets
this counter, while ``partial_fit`` will result in increasing the
existing counter.
average : bool or int, default=False
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So ``average=10`` will begin averaging after seeing 10
samples.
Attributes
----------
coef_ : ndarray of shape (1, n_features)
Weights assigned to the features.
offset_ : ndarray of shape (1,)
Offset used to define the decision function from the raw scores.
We have the relation: decision_function = score_samples - offset.
n_iter_ : int
The actual number of iterations to reach the stopping criterion.
t_ : int
Number of weight updates performed during training.
Same as ``(n_iter_ * n_samples)``.
loss_function_ : concrete ``LossFunction``
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
See Also
--------
sklearn.svm.OneClassSVM : Unsupervised Outlier Detection.
Notes
-----
This estimator has a linear complexity in the number of training samples
and is thus better suited than the `sklearn.svm.OneClassSVM`
implementation for datasets with a large number of training samples (say
> 10,000).
Examples
--------
>>> import numpy as np
>>> from sklearn import linear_model
>>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
>>> clf = linear_model.SGDOneClassSVM(random_state=42)
>>> clf.fit(X)
SGDOneClassSVM(random_state=42)
>>> print(clf.predict([[4, 4]]))
[1]
"""
loss_functions = {"hinge": (Hinge, 1.0)}
def __init__(
self,
nu=0.5,
fit_intercept=True,
max_iter=1000,
tol=1e-3,
shuffle=True,
verbose=0,
random_state=None,
learning_rate="optimal",
eta0=0.0,
power_t=0.5,
warm_start=False,
average=False,
):
alpha = nu / 2
self.nu = nu
super(SGDOneClassSVM, self).__init__(
loss="hinge",
penalty="l2",
alpha=alpha,
C=1.0,
l1_ratio=0,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
shuffle=shuffle,
verbose=verbose,
epsilon=DEFAULT_EPSILON,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0,
power_t=power_t,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
warm_start=warm_start,
average=average,
)
def _validate_params(self, for_partial_fit=False):
"""Validate input params."""
if not (0 < self.nu <= 1):
raise ValueError("nu must be in (0, 1], got nu=%f" % self.nu)
super(SGDOneClassSVM, self)._validate_params(for_partial_fit=for_partial_fit)
def _fit_one_class(self, X, alpha, C, sample_weight, learning_rate, max_iter):
"""Uses SGD implementation with X and y=np.ones(n_samples)."""
# The One-Class SVM uses the SGD implementation with
# y=np.ones(n_samples).
n_samples = X.shape[0]
y = np.ones(n_samples, dtype=np.float64, order="C")
dataset, offset_decay = make_dataset(X, y, sample_weight)
penalty_type = self._get_penalty_type(self.penalty)
learning_rate_type = self._get_learning_rate_type(learning_rate)
# early stopping is set to False for the One-Class SVM. thus
# validation_mask and validation_score_cb will be set to values
# associated to early_stopping=False in _make_validation_split and
# _make_validation_score_cb respectively.
validation_mask = self._make_validation_split(y)
validation_score_cb = self._make_validation_score_cb(
validation_mask, X, y, sample_weight
)
random_state = check_random_state(self.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
tol = self.tol if self.tol is not None else -np.inf
one_class = 1
# There are no class weights for the One-Class SVM and they are
# therefore set to 1.
pos_weight = 1
neg_weight = 1
if self.average:
coef = self._standard_coef
intercept = self._standard_intercept
average_coef = self._average_coef
average_intercept = self._average_intercept
else:
coef = self.coef_
intercept = 1 - self.offset_
average_coef = None # Not used
average_intercept = [0] # Not used
coef, intercept, average_coef, average_intercept, self.n_iter_ = _plain_sgd(
coef,
intercept[0],
average_coef,
average_intercept[0],
self.loss_function_,
penalty_type,
alpha,
C,
self.l1_ratio,
dataset,
validation_mask,
self.early_stopping,
validation_score_cb,
int(self.n_iter_no_change),
max_iter,
tol,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
neg_weight,
pos_weight,
learning_rate_type,
self.eta0,
self.power_t,
one_class,
self.t_,
offset_decay,
self.average,
)
self.t_ += self.n_iter_ * n_samples
if self.average > 0:
self._average_intercept = np.atleast_1d(average_intercept)
self._standard_intercept = np.atleast_1d(intercept)
if self.average <= self.t_ - 1.0:
# made enough updates for averaging to be taken into account
self.coef_ = average_coef
self.offset_ = 1 - np.atleast_1d(average_intercept)
else:
self.coef_ = coef
self.offset_ = 1 - np.atleast_1d(intercept)
else:
self.offset_ = 1 - np.atleast_1d(intercept)
def _partial_fit(
self,
X,
alpha,
C,
loss,
learning_rate,
max_iter,
sample_weight,
coef_init,
offset_init,
):
first_call = getattr(self, "coef_", None) is None
X = self._validate_data(
X,
None,
accept_sparse="csr",
dtype=np.float64,
order="C",
accept_large_sparse=False,
reset=first_call,
)
n_features = X.shape[1]
# Allocate datastructures from input arguments
sample_weight = _check_sample_weight(sample_weight, X)
# We use intercept = 1 - offset where intercept is the intercept of
# the SGD implementation and offset is the offset of the One-Class SVM
# optimization problem.
if getattr(self, "coef_", None) is None or coef_init is not None:
self._allocate_parameter_mem(1, n_features, coef_init, offset_init, 1)
elif n_features != self.coef_.shape[-1]:
raise ValueError(
"Number of features %d does not match previous data %d."
% (n_features, self.coef_.shape[-1])
)
if self.average and getattr(self, "_average_coef", None) is None:
self._average_coef = np.zeros(n_features, dtype=np.float64, order="C")
self._average_intercept = np.zeros(1, dtype=np.float64, order="C")
self.loss_function_ = self._get_loss_function(loss)
if not hasattr(self, "t_"):
self.t_ = 1.0
# delegate to concrete training procedure
self._fit_one_class(
X,
alpha=alpha,
C=C,
learning_rate=learning_rate,
sample_weight=sample_weight,
max_iter=max_iter,
)
return self
def partial_fit(self, X, y=None, sample_weight=None):
"""Fit linear One-Class SVM with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of the training data.
y : Ignored
Not used, present for API consistency by convention.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : object
Returns a fitted instance of self.
"""
alpha = self.nu / 2
self._validate_params(for_partial_fit=True)
return self._partial_fit(
X,
alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
max_iter=1,
sample_weight=sample_weight,
coef_init=None,
offset_init=None,
)
def _fit(
self,
X,
alpha,
C,
loss,
learning_rate,
coef_init=None,
offset_init=None,
sample_weight=None,
):
self._validate_params()
if self.warm_start and hasattr(self, "coef_"):
if coef_init is None:
coef_init = self.coef_
if offset_init is None:
offset_init = self.offset_
else:
self.coef_ = None
self.offset_ = None
# Clear iteration count for multiple call to fit.
self.t_ = 1.0
self._partial_fit(
X,
alpha,
C,
loss,
learning_rate,
self.max_iter,
sample_weight,
coef_init,
offset_init,
)
if (
self.tol is not None
and self.tol > -np.inf
and self.n_iter_ == self.max_iter
):
warnings.warn(
"Maximum number of iteration reached before "
"convergence. Consider increasing max_iter to "
"improve the fit.",
ConvergenceWarning,
)
return self
def fit(self, X, y=None, coef_init=None, offset_init=None, sample_weight=None):
"""Fit linear One-Class SVM with Stochastic Gradient Descent.
This solves an equivalent optimization problem of the
One-Class SVM primal optimization problem and returns a weight vector
w and an offset rho such that the decision function is given by
<w, x> - rho.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
y : Ignored
Not used, present for API consistency by convention.
coef_init : array, shape (n_classes, n_features)
The initial coefficients to warm-start the optimization.
offset_init : array, shape (n_classes,)
The initial offset to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed. These weights will
be multiplied with class_weight (passed through the
constructor) if class_weight is specified.
Returns
-------
self : object
Returns a fitted instance of self.
"""
alpha = self.nu / 2
self._fit(
X,
alpha=alpha,
C=1.0,
loss=self.loss,
learning_rate=self.learning_rate,
coef_init=coef_init,
offset_init=offset_init,
sample_weight=sample_weight,
)
return self
def decision_function(self, X):
"""Signed distance to the separating hyperplane.
Signed distance is positive for an inlier and negative for an
outlier.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Testing data.
Returns
-------
dec : array-like, shape (n_samples,)
Decision function values of the samples.
"""
check_is_fitted(self, "coef_")
X = self._validate_data(X, accept_sparse="csr", reset=False)
decisions = safe_sparse_dot(X, self.coef_.T, dense_output=True) - self.offset_
return decisions.ravel()
def score_samples(self, X):
"""Raw scoring function of the samples.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Testing data.
Returns
-------
score_samples : array-like, shape (n_samples,)
Unshiffted scoring function values of the samples.
"""
score_samples = self.decision_function(X) + self.offset_
return score_samples
def predict(self, X):
"""Return labels (1 inlier, -1 outlier) of the samples.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Testing data.
Returns
-------
y : array, shape (n_samples,)
Labels of the samples.
"""
y = (self.decision_function(X) >= 0).astype(np.int32)
y[y == 0] = -1 # for consistency with outlier detectors
return y
def _more_tags(self):
return {
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
)
}
}<|fim▁end|> | classes=classes, |
<|file_name|>MInOutLineMA.java<|end_file_name|><|fim▁begin|>/******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2006 ComPiere, Inc. All Rights Reserved. *
* This program is free software; you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY; without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program; if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via [email protected] or http://www.compiere.org/license.html *
*****************************************************************************/
package org.compiere.model;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.util.List;
import java.util.Properties;
import org.compiere.util.DB;
/**
* Shipment Material Allocation
*
* @author Jorg Janke
* @version $Id: MInOutLineMA.java,v 1.3 2006/07/30 00:51:02 jjanke Exp $
*/
public class MInOutLineMA extends X_M_InOutLineMA
{
/**
*
*/
private static final long serialVersionUID = -3229418883339488380L;
/**
* Get Material Allocations for Line
* @param ctx context
* @param M_InOutLine_ID line
* @param trxName trx
* @return allocations
*/
public static MInOutLineMA[] get (Properties ctx, int M_InOutLine_ID, String trxName)
{
Query query = MTable.get(ctx, MInOutLineMA.Table_Name)
.createQuery(I_M_InOutLineMA.COLUMNNAME_M_InOutLine_ID+"=?", trxName);
query.setParameters(M_InOutLine_ID);
List<MInOutLineMA> list = query.list();
MInOutLineMA[] retValue = new MInOutLineMA[list.size ()];
list.toArray (retValue);
return retValue;
} // get
/**
* Delete all Material Allocation for InOut
* @param M_InOut_ID shipment
* @param trxName transaction
* @return number of rows deleted or -1 for error
*/
public static int deleteInOutMA (int M_InOut_ID, String trxName)
{
String sql = "DELETE FROM M_InOutLineMA ma WHERE EXISTS "
+ "(SELECT * FROM M_InOutLine l WHERE l.M_InOutLine_ID=ma.M_InOutLine_ID"
+ " AND M_InOut_ID=" + M_InOut_ID + ")";
return DB.executeUpdate(sql, trxName);
} // deleteInOutMA
/**
* Delete all Material Allocation for InOutLine
* @param M_InOutLine_ID Shipment Line
* @param trxName transaction
* @return number of rows deleted or -1 for error
*/
public static int deleteInOutLineMA (int M_InOutLine_ID, String trxName)
{
String sql = "DELETE FROM M_InOutLineMA ma WHERE ma.M_InOutLine_ID=?";
return DB.executeUpdate(sql, M_InOutLine_ID, trxName);
} // deleteInOutLineMA
// /** Logger */
// private static CLogger s_log = CLogger.getCLogger (MInOutLineMA.class);
/**************************************************************************
* Standard Constructor
* @param ctx context
* @param M_InOutLineMA_ID ignored
* @param trxName trx
*/
public MInOutLineMA (Properties ctx, int M_InOutLineMA_ID, String trxName)
{
super (ctx, M_InOutLineMA_ID, trxName);
if (M_InOutLineMA_ID != 0)
throw new IllegalArgumentException("Multi-Key");
} // MInOutLineMA
/**
* Load Constructor
* @param ctx context
* @param rs result set
* @param trxName trx
*/
public MInOutLineMA (Properties ctx, ResultSet rs, String trxName)
{
super (ctx, rs, trxName);
} // MInOutLineMA
<|fim▁hole|> * @param M_AttributeSetInstance_ID asi
* @param MovementQty qty
*/
public MInOutLineMA (MInOutLine parent, int M_AttributeSetInstance_ID, BigDecimal MovementQty)
{
this (parent.getCtx(), 0, parent.get_TrxName());
setClientOrg(parent);
setM_InOutLine_ID(parent.getM_InOutLine_ID());
//
setM_AttributeSetInstance_ID(M_AttributeSetInstance_ID);
setMovementQty(MovementQty);
} // MInOutLineMA
/**
* String Representation
* @return info
*/
public String toString ()
{
StringBuffer sb = new StringBuffer ("MInOutLineMA[");
sb.append("M_InOutLine_ID=").append(getM_InOutLine_ID())
.append(",M_AttributeSetInstance_ID=").append(getM_AttributeSetInstance_ID())
.append(", Qty=").append(getMovementQty())
.append ("]");
return sb.toString ();
} // toString
} // MInOutLineMA<|fim▁end|> |
/**
* Parent Constructor
* @param parent parent
|
<|file_name|>minimap-bookmarks.js<|end_file_name|><|fim▁begin|>/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS201: Simplify complex destructure assignments
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { CompositeDisposable } = require('atom')
let MinimapBookmarksBinding
module.exports = {
isActive () {
return this.active
},
activate () {
this.active = false
this.subscriptions = new CompositeDisposable()
this.bindings = new Map()
require('atom-package-deps').install('minimap-git-diff')
},
consumeMinimapServiceV1 (minimap) {
this.minimap = minimap
this.minimap.registerPlugin('bookmarks', this)
},
deactivate () {
if (this.minimap) {
this.minimap.unregisterPlugin('bookmarks')
}
this.minimap = null
},
activatePlugin () {
if (this.active) {
return
}
const bookmarksPkg = atom.packages.getLoadedPackage('bookmarks')
if (!bookmarksPkg) {
return
}
const bookmarks = bookmarksPkg.mainModule
this.active = true
this.minimapsSubscription = this.minimap.observeMinimaps(minimap => {
if (!MinimapBookmarksBinding) {
MinimapBookmarksBinding = require('./minimap-bookmarks-binding')
}
<|fim▁hole|>
const subscription = minimap.onDidDestroy(() => {
binding.destroy()
this.subscriptions.remove(subscription)
subscription.dispose()
this.bindings.delete(minimap.id)
})
this.subscriptions.add(subscription)
})
},
deactivatePlugin () {
if (!this.active) { return }
const bindings = this.bindings.values()
for (const binding of bindings) { binding.destroy() }
this.bindings.clear()
this.active = false
this.minimapsSubscription.dispose()
this.subscriptions.dispose()
},
}<|fim▁end|> | const binding = new MinimapBookmarksBinding(minimap, bookmarks)
this.bindings.set(minimap.id, binding) |
<|file_name|>mootools.more.js<|end_file_name|><|fim▁begin|>// MooTools: the javascript framework.
// Load this file's selection again by visiting: http://mootools.net/more/f0c28d76aff2f0ba12270c81dc5e8d18
// Or build this file again with packager using: packager build More/Assets More/Hash.Cookie
/*
---
script: More.js
name: More
description: MooTools More
license: MIT-style license
authors:
- Guillermo Rauch
- Thomas Aylott
- Scott Kyle
- Arian Stolwijk
- Tim Wienk
- Christoph Pojer
- Aaron Newton
- Jacob Thornton
requires:
- Core/MooTools
provides: [MooTools.More]
...
*/
MooTools.More = {
'version': '1.4.0.1',
'build': 'a4244edf2aa97ac8a196fc96082dd35af1abab87'
};
/*
---
script: Assets.js
name: Assets
description: Provides methods to dynamically load JavaScript, CSS, and Image files into the document.
license: MIT-style license
authors:
- Valerio Proietti
requires:
- Core/Element.Event
- /MooTools.More
provides: [Assets]
...
*/
var Asset = {
javascript: function(source, properties){
if (!properties) properties = {};
var script = new Element('script', {src: source, type: 'text/javascript'}),
doc = properties.document || document,
load = properties.onload || properties.onLoad;
delete properties.onload;
delete properties.onLoad;
delete properties.document;
if (load){
if (typeof script.onreadystatechange != 'undefined'){
script.addEvent('readystatechange', function(){
if (['loaded', 'complete'].contains(this.readyState)) load.call(this);
});
} else {
script.addEvent('load', load);
}
}
return script.set(properties).inject(doc.head);
},
css: function(source, properties){
if (!properties) properties = {};
var link = new Element('link', {
rel: 'stylesheet',
media: 'screen',
type: 'text/css',
href: source
});
var load = properties.onload || properties.onLoad,
doc = properties.document || document;
delete properties.onload;
delete properties.onLoad;
delete properties.document;
if (load) link.addEvent('load', load);
return link.set(properties).inject(doc.head);
},
image: function(source, properties){
if (!properties) properties = {};
var image = new Image(),
element = document.id(image) || new Element('img');
['load', 'abort', 'error'].each(function(name){
var type = 'on' + name,
cap = 'on' + name.capitalize(),
event = properties[type] || properties[cap] || function(){};
delete properties[cap];
delete properties[type];
image[type] = function(){
if (!image) return;
if (!element.parentNode){
element.width = image.width;
element.height = image.height;
}
image = image.onload = image.onabort = image.onerror = null;
event.delay(1, element, element);
element.fireEvent(name, element, 1);
};
});
image.src = element.src = source;
if (image && image.complete) image.onload.delay(1);
return element.set(properties);
},
images: function(sources, options){
sources = Array.from(sources);
var fn = function(){},
counter = 0;
options = Object.merge({
onComplete: fn,
onProgress: fn,
onError: fn,
properties: {}
}, options);
return new Elements(sources.map(function(source, index){
return Asset.image(source, Object.append(options.properties, {
onload: function(){
counter++;
options.onProgress.call(this, counter, index, source);
if (counter == sources.length) options.onComplete();<|fim▁hole|> options.onError.call(this, counter, index, source);
if (counter == sources.length) options.onComplete();
}
}));
}));
}
};
/*
---
name: Hash
description: Contains Hash Prototypes. Provides a means for overcoming the JavaScript practical impossibility of extending native Objects.
license: MIT-style license.
requires:
- Core/Object
- /MooTools.More
provides: [Hash]
...
*/
(function(){
if (this.Hash) return;
var Hash = this.Hash = new Type('Hash', function(object){
if (typeOf(object) == 'hash') object = Object.clone(object.getClean());
for (var key in object) this[key] = object[key];
return this;
});
this.$H = function(object){
return new Hash(object);
};
Hash.implement({
forEach: function(fn, bind){
Object.forEach(this, fn, bind);
},
getClean: function(){
var clean = {};
for (var key in this){
if (this.hasOwnProperty(key)) clean[key] = this[key];
}
return clean;
},
getLength: function(){
var length = 0;
for (var key in this){
if (this.hasOwnProperty(key)) length++;
}
return length;
}
});
Hash.alias('each', 'forEach');
Hash.implement({
has: Object.prototype.hasOwnProperty,
keyOf: function(value){
return Object.keyOf(this, value);
},
hasValue: function(value){
return Object.contains(this, value);
},
extend: function(properties){
Hash.each(properties || {}, function(value, key){
Hash.set(this, key, value);
}, this);
return this;
},
combine: function(properties){
Hash.each(properties || {}, function(value, key){
Hash.include(this, key, value);
}, this);
return this;
},
erase: function(key){
if (this.hasOwnProperty(key)) delete this[key];
return this;
},
get: function(key){
return (this.hasOwnProperty(key)) ? this[key] : null;
},
set: function(key, value){
if (!this[key] || this.hasOwnProperty(key)) this[key] = value;
return this;
},
empty: function(){
Hash.each(this, function(value, key){
delete this[key];
}, this);
return this;
},
include: function(key, value){
if (this[key] == undefined) this[key] = value;
return this;
},
map: function(fn, bind){
return new Hash(Object.map(this, fn, bind));
},
filter: function(fn, bind){
return new Hash(Object.filter(this, fn, bind));
},
every: function(fn, bind){
return Object.every(this, fn, bind);
},
some: function(fn, bind){
return Object.some(this, fn, bind);
},
getKeys: function(){
return Object.keys(this);
},
getValues: function(){
return Object.values(this);
},
toQueryString: function(base){
return Object.toQueryString(this, base);
}
});
Hash.alias({indexOf: 'keyOf', contains: 'hasValue'});
})();
/*
---
script: Hash.Cookie.js
name: Hash.Cookie
description: Class for creating, reading, and deleting Cookies in JSON format.
license: MIT-style license
authors:
- Valerio Proietti
- Aaron Newton
requires:
- Core/Cookie
- Core/JSON
- /MooTools.More
- /Hash
provides: [Hash.Cookie]
...
*/
Hash.Cookie = new Class({
Extends: Cookie,
options: {
autoSave: true
},
initialize: function(name, options){
this.parent(name, options);
this.load();
},
save: function(){
var value = JSON.encode(this.hash);
if (!value || value.length > 4096) return false; //cookie would be truncated!
if (value == '{}') this.dispose();
else this.write(value);
return true;
},
load: function(){
this.hash = new Hash(JSON.decode(this.read(), true));
return this;
}
});
Hash.each(Hash.prototype, function(method, name){
if (typeof method == 'function') Hash.Cookie.implement(name, function(){
var value = method.apply(this.hash, arguments);
if (this.options.autoSave) this.save();
return value;
});
});<|fim▁end|> | },
onerror: function(){
counter++; |
<|file_name|>opusabout.py<|end_file_name|><|fim▁begin|># Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
# PyQt4 includes for python bindings to QT
from PyQt4.QtCore import QString, QUrl
from PyQt4.QtGui import QDialog, QDesktopServices
# UI specific includes
from opus_gui.main.views.ui_opusabout import Ui_UrbansimAbout
class UrbansimAboutGui(QDialog, Ui_UrbansimAbout):
def __init__(self, mainwindow, fl):
QDialog.__init__(self, mainwindow, fl)
self.setupUi(self)
self.mainwindow = mainwindow
def on_webPushButton_released(self):
#print "webPushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.urbansim.org/")))<|fim▁hole|> #print "docPushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.urbansim.org/docs/opus-userguide/")))
def on_licensePushButton_released(self):
#print "licensePushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.gnu.org/copyleft/gpl.html")))
def on_buttonCancel_released(self):
#print "cancelPushButton pressed"
self.close()<|fim▁end|> |
def on_docPushButton_released(self): |
<|file_name|>typings.d.ts<|end_file_name|><|fim▁begin|>/* SystemJS module definition */
declare var module: NodeModule;
interface NodeModule {
id: string;
<|fim▁hole|>declare module '*.json' {
const value: any;
export default value;
}<|fim▁end|> | [ key: string ]: any;
}
|
<|file_name|>browser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2009-2016 Romain Bignon
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|>#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
# yapf-compatible
from __future__ import unicode_literals
from datetime import datetime
from dateutil.relativedelta import relativedelta
import time
from requests.exceptions import ConnectionError, SSLError
from weboob.browser.browsers import LoginBrowser, URL, need_login, StatesMixin
from weboob.capabilities.base import find_object
from weboob.capabilities.bank import (
AccountNotFound, Account, AddRecipientStep, AddRecipientTimeout,
TransferInvalidRecipient, Loan,
)
from weboob.capabilities.bill import Subscription, Document, DocumentTypes
from weboob.capabilities.profile import ProfileMissing
from weboob.tools.decorators import retry
from weboob.tools.capabilities.bank.transactions import sorted_transactions
from weboob.browser.exceptions import ServerError
from weboob.browser.elements import DataError
from weboob.exceptions import BrowserIncorrectPassword, BrowserUnavailable
from weboob.tools.value import Value, ValueBool
from weboob.tools.capabilities.bank.investments import create_french_liquidity
from .pages import (
LoginPage, AccountsPage, AccountsIBANPage, HistoryPage, TransferInitPage,
ConnectionThresholdPage, LifeInsurancesPage, LifeInsurancesHistoryPage,
LifeInsurancesDetailPage, NatioVieProPage, CapitalisationPage,
MarketListPage, MarketPage, MarketHistoryPage, MarketSynPage, BNPKeyboard,
RecipientsPage, ValidateTransferPage, RegisterTransferPage, AdvisorPage,
AddRecipPage, ActivateRecipPage, ProfilePage, ListDetailCardPage, ListErrorPage,
UselessPage, TransferAssertionError, LoanDetailsPage,
)
from .document_pages import DocumentsPage, DocumentsResearchPage, TitulairePage, RIBPage
__all__ = ['BNPPartPro', 'HelloBank']
class BNPParibasBrowser(LoginBrowser, StatesMixin):
TIMEOUT = 30.0
login = URL(
r'identification-wspl-pres/identification\?acceptRedirection=true×tamp=(?P<timestamp>\d+)',
r'SEEA-pa01/devServer/seeaserver',
r'https://mabanqueprivee.bnpparibas.net/fr/espace-prive/comptes-et-contrats\?u=%2FSEEA-pa01%2FdevServer%2Fseeaserver',
LoginPage
)
list_error_page = URL(
r'https://mabanque.bnpparibas/rsc/contrib/document/properties/identification-fr-part-V1.json', ListErrorPage
)
useless_page = URL(r'/fr/connexion/comptes-et-contrats', UselessPage)
con_threshold = URL(
r'/fr/connexion/100-connexions',
r'/fr/connexion/mot-de-passe-expire',
r'/fr/espace-prive/100-connexions.*',
r'/fr/espace-pro/100-connexions-pro.*',
r'/fr/espace-pro/changer-son-mot-de-passe',
r'/fr/espace-client/100-connexions',
r'/fr/espace-prive/mot-de-passe-expire',
r'/fr/client/mdp-expire',
r'/fr/client/100-connexion',
r'/fr/systeme/page-indisponible',
ConnectionThresholdPage
)
accounts = URL(r'udc-wspl/rest/getlstcpt', AccountsPage)
loan_details = URL(r'caraccomptes-wspl/rpc/(?P<loan_type>.*)', LoanDetailsPage)
ibans = URL(r'rib-wspl/rpc/comptes', AccountsIBANPage)
history = URL(r'rop2-wspl/rest/releveOp', HistoryPage)
history_old = URL(r'rop-wspl/rest/releveOp', HistoryPage)
transfer_init = URL(r'virement-wspl/rest/initialisationVirement', TransferInitPage)
lifeinsurances = URL(r'mefav-wspl/rest/infosContrat', LifeInsurancesPage)
lifeinsurances_history = URL(r'mefav-wspl/rest/listMouvements', LifeInsurancesHistoryPage)
lifeinsurances_detail = URL(r'mefav-wspl/rest/detailMouvement', LifeInsurancesDetailPage)
natio_vie_pro = URL(r'/mefav-wspl/rest/natioViePro', NatioVieProPage)
capitalisation_page = URL(
r'https://www.clients.assurance-vie.fr/servlets/helios.cinrj.htmlnav.runtime.FrontServlet', CapitalisationPage
)
market_list = URL(r'pe-war/rpc/SAVaccountDetails/get', MarketListPage)
market_syn = URL(r'pe-war/rpc/synthesis/get', MarketSynPage)
market = URL(r'pe-war/rpc/portfolioDetails/get', MarketPage)
market_history = URL(r'/pe-war/rpc/turnOverHistory/get', MarketHistoryPage)
recipients = URL(r'/virement-wspl/rest/listerBeneficiaire', RecipientsPage)
add_recip = URL(r'/virement-wspl/rest/ajouterBeneficiaire', AddRecipPage)
activate_recip_sms = URL(r'/virement-wspl/rest/activerBeneficiaire', ActivateRecipPage)
activate_recip_digital_key = URL(r'/virement-wspl/rest/verifierAuthentForte', ActivateRecipPage)
validate_transfer = URL(r'/virement-wspl/rest/validationVirement', ValidateTransferPage)
register_transfer = URL(r'/virement-wspl/rest/enregistrerVirement', RegisterTransferPage)
advisor = URL(r'/conseiller-wspl/rest/monConseiller', AdvisorPage)
titulaire = URL(r'/demat-wspl/rest/listerTitulairesDemat', TitulairePage)
document = URL(r'/demat-wspl/rest/listerDocuments', DocumentsPage)
document_research = URL(r'/demat-wspl/rest/rechercheCriteresDemat', DocumentsResearchPage)
rib_page = URL(r'/rib-wspl/rpc/restituerRIB', RIBPage)
profile = URL(r'/kyc-wspl/rest/informationsClient', ProfilePage)
list_detail_card = URL(r'/udcarte-wspl/rest/listeDetailCartes', ListDetailCardPage)
STATE_DURATION = 10
need_reload_state = False
__states__ = ('need_reload_state', 'rcpt_transfer_id')
def __init__(self, config, *args, **kwargs):
super(BNPParibasBrowser, self).__init__(config['login'].get(), config['password'].get(), *args, **kwargs)
self.accounts_list = None
self.card_to_transaction_type = {}
self.rotating_password = config['rotating_password'].get()
self.digital_key = config['digital_key'].get()
self.rcpt_transfer_id = None
@retry(ConnectionError, tries=3)
def open(self, *args, **kwargs):
return super(BNPParibasBrowser, self).open(*args, **kwargs)
def do_login(self):
if not (self.username.isdigit() and self.password.isdigit()):
raise BrowserIncorrectPassword()
timestamp = lambda: int(time.time() * 1e3)
self.login.go(timestamp=timestamp())
if self.login.is_here():
self.page.login(self.username, self.password)
def load_state(self, state):
# reload state only for new recipient feature
if state.get('need_reload_state'):
state.pop('url', None)
self.need_reload_state = False
super(BNPParibasBrowser, self).load_state(state)
def change_pass(self, oldpass, newpass):
res = self.open('/identification-wspl-pres/grille?accessible=false')
url = '/identification-wspl-pres/grille/%s' % res.json()['data']['idGrille']
keyboard = self.open(url)
vk = BNPKeyboard(self, keyboard)
data = {}
data['codeAppli'] = 'PORTAIL'
data['idGrille'] = res.json()['data']['idGrille']
data['typeGrille'] = res.json()['data']['typeGrille']
data['confirmNouveauPassword'] = vk.get_string_code(newpass)
data['nouveauPassword'] = vk.get_string_code(newpass)
data['passwordActuel'] = vk.get_string_code(oldpass)
response = self.location('/mcs-wspl/rpc/modifiercodesecret', data=data)
if response.json().get('messageIden').lower() == 'nouveau mot de passe invalide':
return False
return True
@need_login
def get_profile(self):
self.profile.go(json={}, method='POST')
profile = self.page.get_profile()
if profile:
return profile
raise ProfileMissing(self.page.get_error_message())
def is_loan(self, account):
return account.type in (
Account.TYPE_LOAN, Account.TYPE_MORTGAGE, Account.TYPE_CONSUMER_CREDIT, Account.TYPE_REVOLVING_CREDIT
)
@need_login
def iter_accounts(self):
if self.accounts_list is None:
self.accounts_list = []
# In case of password renewal, we need to go on ibans twice.
self.ibans.go()
ibans = self.page.get_ibans_dict() if self.ibans.is_here() else self.ibans.go().get_ibans_dict()
# This page might be unavailable.
try:
ibans.update(self.transfer_init.go(json={'modeBeneficiaire': '0'}).get_ibans_dict('Crediteur'))
except (TransferAssertionError, AttributeError):
pass
accounts = list(self.accounts.go().iter_accounts(ibans=ibans))
self.market_syn.go(json={}, method='POST') # do a post on the given URL
market_accounts = self.page.get_list() # get the list of 'Comptes Titres'
checked_accounts = set()
for account in accounts:
if self.is_loan(account):
account = Loan.from_dict(account.to_dict())
if account.type in (Account.TYPE_MORTGAGE, Account.TYPE_CONSUMER_CREDIT):
self.loan_details.go(data={'iban': account.id}, loan_type='creditPret')
self.page.fill_loan_details(obj=account)
elif account.type == Account.TYPE_REVOLVING_CREDIT:
self.loan_details.go(data={'iban': account.id}, loan_type='creditConsoProvisio')
self.page.fill_revolving_details(obj=account)
elif account.type == Account.TYPE_LOAN:
self.loan_details.go(data={'iban': account.id}, loan_type='creditPretPersoPro')
self.page.fill_loan_details(obj=account)
for market_acc in market_accounts:
if all((
market_acc['securityAccountNumber'].endswith(account.number[-4:]),
account.type in (Account.TYPE_MARKET, Account.TYPE_PEA),
account.label == market_acc['securityAccountName'],
not account.iban,
)):
if account.id in checked_accounts:
# in this case, we have identified two accounts for the same CompteTitre
raise DataError('we have two market accounts mapped to a same "CompteTitre" dictionary')
checked_accounts.add(account.id)
account.balance = market_acc.get('valorisation', account.balance)
account.valuation_diff = market_acc['profitLoss']
break
self.accounts_list.append(account)
# Fetching capitalisation contracts from the "Assurances Vie" space (some are not in the BNP API):
params = self.natio_vie_pro.go().get_params()
try:
self.capitalisation_page.go(params=params)
except ServerError:
self.logger.warning("An Internal Server Error occurred")
except SSLError as e:
self.logger.warning("SSL Error occurred : %s", e)
certificate_errors = (
'SEC_ERROR_EXPIRED_CERTIFICATE', # nss
'certificate verify failed', # openssl
)
if all(error not in str(e) for error in certificate_errors):
raise e
finally:
if self.capitalisation_page.is_here() and self.page.has_contracts():
for account in self.page.iter_capitalisation():
# Life Insurance accounts may appear BOTH in the API and the "Assurances Vie" domain,
# It is better to keep the API version since it contains the unitvalue:
if account.number not in [a.number for a in self.accounts_list]:
self.logger.warning("We found an account that only appears on the old BNP website.")
self.accounts_list.append(account)
else:
self.logger.warning("This account was skipped because it already appears in the API.")
return iter(self.accounts_list)
@need_login
def get_account(self, _id):
return find_object(self.iter_accounts(), id=_id, error=AccountNotFound)
@need_login
def iter_history(self, account, coming=False):
# The accounts from the "Assurances Vie" space have no available history:
if hasattr(account, '_details'):
return []
if account.type == Account.TYPE_PEA and account.label.endswith('Espèces'):
return []
if account.type == Account.TYPE_LIFE_INSURANCE:
return self.iter_lifeinsurance_history(account, coming)
elif account.type in (Account.TYPE_MARKET, Account.TYPE_PEA):
if coming:
return []
try:
self.market_list.go(json={}, method='POST')
except ServerError:
self.logger.warning("An Internal Server Error occurred")
return []
for market_acc in self.page.get_list():
if account.number[-4:] == market_acc['securityAccountNumber'][-4:]:
self.page = self.market_history.go(
json={
"securityAccountNumber": market_acc['securityAccountNumber'],
}
)
return self.page.iter_history()
return []
else:
if not self.card_to_transaction_type:
self.list_detail_card.go()
self.card_to_transaction_type = self.page.get_card_to_transaction_type()
data = {
"ibanCrypte": account.id,
"pastOrPending": 1,
"triAV": 0,
"startDate": (datetime.now() - relativedelta(years=1)).strftime('%d%m%Y'),
"endDate": datetime.now().strftime('%d%m%Y')
}
try:
self.history.go(json=data)
except BrowserUnavailable:
# old url is still used for certain connections bu we don't know which one is,
# so the same HistoryPage is attained by the old url in another URL object
data['startDate'] = (datetime.now() - relativedelta(years=3)).strftime('%d%m%Y')
# old url authorizes up to 3 years of history
self.history_old.go(data=data)
if coming:
return sorted_transactions(self.page.iter_coming())
else:
return sorted_transactions(self.page.iter_history())
@need_login
def iter_lifeinsurance_history(self, account, coming=False):
self.lifeinsurances_history.go(json={
"ibanCrypte": account.id,
})
for tr in self.page.iter_history(coming):
page = self.lifeinsurances_detail.go(
json={
"ibanCrypte": account.id,
"idMouvement": tr._op.get('idMouvement'),
"ordreMouvement": tr._op.get('ordreMouvement'),
"codeTypeMouvement": tr._op.get('codeTypeMouvement'),
}
)
tr.investments = list(page.iter_investments())
yield tr
@need_login
def iter_coming_operations(self, account):
return self.iter_history(account, coming=True)
@need_login
def iter_investment(self, account):
if account.type == Account.TYPE_PEA and 'espèces' in account.label.lower():
return [create_french_liquidity(account.balance)]
# Life insurances and PERP may be scraped from the API or from the "Assurance Vie" space,
# so we need to discriminate between both using account._details:
if account.type in (account.TYPE_LIFE_INSURANCE, account.TYPE_PERP, account.TYPE_CAPITALISATION):
if hasattr(account, '_details'):
# Going to the "Assurances Vie" page
natiovie_params = self.natio_vie_pro.go().get_params()
self.capitalisation_page.go(params=natiovie_params)
# Fetching the form to get the contract investments:
capitalisation_params = self.page.get_params(account)
self.capitalisation_page.go(params=capitalisation_params)
return self.page.iter_investments()
else:
# No capitalisation contract has yet been found in the API:
assert account.type != account.TYPE_CAPITALISATION
self.lifeinsurances.go(json={
"ibanCrypte": account.id,
})
return self.page.iter_investments()
elif account.type in (account.TYPE_MARKET, account.TYPE_PEA):
try:
self.market_list.go(json={}, method='POST')
except ServerError:
self.logger.warning("An Internal Server Error occurred")
return iter([])
for market_acc in self.page.get_list():
if account.number[-4:] == market_acc['securityAccountNumber'][-4:] and not account.iban:
# Sometimes generate an Internal Server Error ...
try:
self.market.go(json={
"securityAccountNumber": market_acc['securityAccountNumber'],
})
except ServerError:
self.logger.warning("An Internal Server Error occurred")
break
return self.page.iter_investments()
return iter([])
@need_login
def iter_recipients(self, origin_account_id):
try:
if (
not origin_account_id in self.transfer_init.go(json={
'modeBeneficiaire': '0'
}).get_ibans_dict('Debiteur')
):
raise NotImplementedError()
except TransferAssertionError:
return
# avoid recipient with same iban
seen = set()
for recipient in self.page.transferable_on(origin_account_ibancrypte=origin_account_id):
if recipient.iban not in seen:
seen.add(recipient.iban)
yield recipient
if self.page.can_transfer_to_recipients(origin_account_id):
for recipient in self.recipients.go(json={'type': 'TOUS'}).iter_recipients():
if recipient.iban not in seen:
seen.add(recipient.iban)
yield recipient
@need_login
def new_recipient(self, recipient, **params):
if 'code' in params:
# for sms authentication
return self.send_code(recipient, **params)
# prepare commun data for all authentication method
data = {}
data['adresseBeneficiaire'] = ''
data['iban'] = recipient.iban
data['libelleBeneficiaire'] = recipient.label
data['notification'] = True
data['typeBeneficiaire'] = ''
# provisional
if self.digital_key:
if 'digital_key' in params:
return self.new_recipient_digital_key(recipient, data)
# need to be on recipient page send sms or mobile notification
# needed to get the phone number, enabling the possibility to send sms.
# all users with validated phone number can receive sms code
self.recipients.go(json={'type': 'TOUS'})
# check type of recipient activation
type_activation = 'sms'
# provisional
if self.digital_key:
if self.page.has_digital_key():
# force users with digital key activated to use digital key authentication
type_activation = 'digital_key'
if type_activation == 'sms':
# post recipient data sending sms with same request
data['typeEnvoi'] = 'SMS'
recipient = self.add_recip.go(json=data).get_recipient(recipient)
self.rcpt_transfer_id = recipient._transfer_id
self.need_reload_state = True
raise AddRecipientStep(recipient, Value('code', label='Saisissez le code reçu par SMS.'))
elif type_activation == 'digital_key':
# recipient validated with digital key are immediatly available
recipient.enabled_date = datetime.today()
raise AddRecipientStep(
recipient,
ValueBool(
'digital_key',
label=
'Validez pour recevoir une demande sur votre application bancaire. La validation de votre bénéficiaire peut prendre plusieurs minutes.'
)
)
@need_login
def send_code(self, recipient, **params):
"""
add recipient with sms otp authentication
"""
data = {}
data['idBeneficiaire'] = self.rcpt_transfer_id
data['typeActivation'] = 1
data['codeActivation'] = params['code']
self.rcpt_transfer_id = None
return self.activate_recip_sms.go(json=data).get_recipient(recipient)
@need_login
def new_recipient_digital_key(self, recipient, data):
"""
add recipient with 'clé digitale' authentication
"""
# post recipient data, sending app notification with same request
data['typeEnvoi'] = 'AF'
self.add_recip.go(json=data)
recipient = self.page.get_recipient(recipient)
# prepare data for polling
assert recipient._id_transaction
polling_data = {}
polling_data['idBeneficiaire'] = recipient._transfer_id
polling_data['idTransaction'] = recipient._id_transaction
polling_data['typeActivation'] = 2
timeout = time.time() + 300.00 # float(second), like bnp website
# polling
while time.time() < timeout:
time.sleep(5) # like website
self.activate_recip_digital_key.go(json=polling_data)
if self.page.is_recipient_validated():
break
else:
raise AddRecipientTimeout()
return recipient
@need_login
def prepare_transfer(self, account, recipient, amount, reason, exec_date):
data = {}
data['devise'] = account.currency
data['motif'] = reason
data['dateExecution'] = exec_date.strftime('%d-%m-%Y')
data['compteDebiteur'] = account.id
data['montant'] = str(amount)
data['typeVirement'] = 'SEPA'
if recipient.category == u'Externe':
data['idBeneficiaire'] = recipient._transfer_id
else:
data['compteCrediteur'] = recipient.id
return data
@need_login
def init_transfer(self, account, recipient, amount, reason, exec_date):
if recipient._web_state == 'En attente':
raise TransferInvalidRecipient(message="Le bénéficiaire sélectionné n'est pas activé")
data = self.prepare_transfer(account, recipient, amount, reason, exec_date)
return self.validate_transfer.go(json=data).handle_response(account, recipient, amount, reason)
@need_login
def execute_transfer(self, transfer):
self.register_transfer.go(json={'referenceVirement': transfer.id})
return self.page.handle_response(transfer)
@need_login
def get_advisor(self):
self.advisor.stay_or_go()
if self.page.has_error():
return None
return self.page.get_advisor()
@need_login
def iter_threads(self):
raise NotImplementedError()
@need_login
def get_thread(self, thread):
raise NotImplementedError()
def _fetch_rib_document(self, subscription):
self.rib_page.go(
params={
'contractId': subscription.id,
'i18nSiteType': 'part', # site type value doesn't seem to matter as long as it's present
'i18nLang': 'fr',
'i18nVersion': 'V1',
},
)
if self.rib_page.is_here() and self.page.is_rib_available():
d = Document()
d.id = subscription.id + '_RIB'
d.url = self.page.url
d.type = DocumentTypes.RIB
d.format = 'pdf'
d.label = 'RIB'
return d
@need_login
def iter_documents(self, subscription):
rib = self._fetch_rib_document(subscription)
if rib:
yield rib
titulaires = self.titulaire.go().get_titulaires()
# Calling '/demat-wspl/rest/listerDocuments' before the request on 'document'
# is necessary when you specify an ikpi, otherwise no documents are returned
self.document.go()
docs = []
id_docs = []
iter_documents_functions = [self.page.iter_documents, self.page.iter_documents_pro]
for iter_documents in iter_documents_functions:
for doc in iter_documents(sub_id=subscription.id, sub_number=subscription._number, baseurl=self.BASEURL):
docs.append(doc)
id_docs.append(doc.id)
# documents are sorted by type then date, sort them directly by date
docs = sorted(docs, key=lambda doc: doc.date, reverse=True)
for doc in docs:
yield doc
# When we only have one titulaire, no need to use the ikpi parameter in the request,
# all document are provided with this simple request
data = {
'dateDebut': (datetime.now() - relativedelta(years=3)).strftime('%d/%m/%Y'),
'dateFin': datetime.now().strftime('%d/%m/%Y'),
}
len_titulaires = len(titulaires)
self.logger.info('The total number of titulaires on this connection is %s.', len_titulaires)
# Ikpi is necessary for multi titulaires accounts to get each document of each titulaires
if len_titulaires > 1:
data['ikpiPersonne'] = subscription._iduser
self.document_research.go(json=data)
for doc in self.page.iter_documents(
sub_id=subscription.id, sub_number=subscription._number, baseurl=self.BASEURL
):
if doc.id not in id_docs:
yield doc
@need_login
def iter_subscription(self):
acc_list = self.iter_accounts()
for acc in acc_list:
sub = Subscription()
sub.label = acc.label
sub.subscriber = acc._subscriber
sub.id = acc.id
# number is the hidden number of an account like "****1234"
# and it's used in the parsing of the docs in iter_documents
sub._number = acc.number
# iduser is the ikpi affiliate to the account,
# usefull for multi titulaires connexions
sub._iduser = acc._iduser
yield sub
class BNPPartPro(BNPParibasBrowser):
BASEURL_TEMPLATE = r'https://%s.bnpparibas/'
BASEURL = BASEURL_TEMPLATE % 'mabanque'
def __init__(self, config=None, *args, **kwargs):
self.config = config
super(BNPPartPro, self).__init__(self.config, *args, **kwargs)
def switch(self, subdomain):
self.BASEURL = self.BASEURL_TEMPLATE % subdomain
class HelloBank(BNPParibasBrowser):
BASEURL = 'https://www.hellobank.fr/'<|fim▁end|> | # GNU Lesser General Public License for more details. |
<|file_name|>os.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashSet;
use std::rc::Rc;
use syntax::ast;
use syntax::codemap::{respan, DUMMY_SP};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::quote::rt::ToTokens;
use syntax::parse::token::intern;
use syntax::ptr::P;
use builder::meta_args::{ToTyHash, set_ty_params_for_task};
use node;
use super::{Builder, TokenString, add_node_dependency};
pub fn attach(builder: &mut Builder, _: &mut ExtCtxt, node: Rc<node::Node>) {
node.materializer.set(Some(verify as fn(&mut Builder, &mut ExtCtxt, Rc<node::Node>)));
let mcu_node = builder.pt.get_by_path("mcu").unwrap();
let maybe_task_node = node.get_by_path("single_task");
if maybe_task_node.is_some() {
let task_node = maybe_task_node.unwrap();
task_node.materializer.set(Some(build_single_task as fn(&mut Builder, &mut ExtCtxt, Rc<node::Node>)));
add_node_dependency(&node, &task_node);
add_node_dependency(&task_node, &mcu_node);
let maybe_args_node = task_node.get_by_path("args");
if maybe_args_node.is_some() {
let args_node = maybe_args_node.unwrap();
for (_, ref attr) in args_node.attributes.borrow().iter() {
match attr.value {
node::RefValue(ref refname) => {
let refnode = builder.pt.get_by_name(refname.as_str()).unwrap();
add_node_dependency(&task_node, &refnode);
},
_ => (),
}
}
}
}
}
pub fn verify(_: &mut Builder, cx: &mut ExtCtxt, node: Rc<node::Node>) {
node.expect_no_attributes(cx);
node.expect_subnodes(cx, &["single_task"]);
if node.get_by_path("single_task").is_none() {
cx.parse_sess().span_diagnostic.span_err(node.name_span,
"subnode `single_task` must be present");
}
}
fn build_single_task(builder: &mut Builder, cx: &mut ExtCtxt,
node: Rc<node::Node>) {
let some_loop_fn = node.get_required_string_attr(cx, "loop");
match some_loop_fn {
Some(loop_fn) => {
let args_node = node.get_by_path("args");
let args = match args_node.and_then(|args| {
Some(build_args(builder, cx, &loop_fn, args))
}) {
None => vec!(),
Some(arg) => vec!(arg),
};
let call_expr = cx.expr_call_ident(
node.get_attr("loop").value_span,
cx.ident_of(loop_fn.as_str()),
args);
let loop_stmt = quote_stmt!(&*cx, loop { $call_expr; } ).unwrap();
builder.add_main_statement(loop_stmt);
},
None => (),
}
}
fn build_args(builder: &mut Builder, cx: &mut ExtCtxt,
struct_name: &String, node: Rc<node::Node>) -> P<ast::Expr> {
let mut fields = vec!();
let mut expr_fields = vec!();
let node_attr = node.attributes.borrow();
let mut ty_params = HashSet::new();
// this is a bit slower than for (k, v) in node.attributes.iter(), but we need
// to preserve sort order to make reasonably simple test code
let mut all_keys = Vec::new();
for k in node_attr.keys() { all_keys.push(k.clone()) };
all_keys.sort();
for k in all_keys.iter() {
let v = &(*node_attr)[k];
let (ty, val) = match v.value {
node::IntValue(i) =>
(cx.ty_ident(DUMMY_SP, cx.ident_of("u32")),
quote_expr!(&*cx, $i)),
node::BoolValue(b) =>
(cx.ty_ident(DUMMY_SP, cx.ident_of("bool")),
quote_expr!(&*cx, $b)),
node::StrValue(ref string) => {
let static_lifetime = cx.lifetime(DUMMY_SP, intern("'static"));
let val_slice = string.as_str();
(cx.ty_rptr(
DUMMY_SP,
cx.ty_ident(DUMMY_SP, cx.ident_of("str")),
Some(static_lifetime),
ast::MutImmutable), quote_expr!(&*cx, $val_slice))
},
node::RefValue(ref rname) => {
let refnode = builder.pt.get_by_name(rname.as_str()).unwrap();
let reftype = refnode.type_name().unwrap();
let refparams = refnode.type_params();
for param in refparams.iter() {
if !param.as_str().starts_with("'") {
ty_params.insert(param.clone());
}
}
let val_slice = TokenString(rname.clone());
let a_lifetime = cx.lifetime(DUMMY_SP, intern("'a"));
(cx.ty_rptr(
DUMMY_SP,
cx.ty_path(type_name_as_path(cx, reftype.as_str(), refparams)),
Some(a_lifetime),
ast::MutImmutable), quote_expr!(&*cx, &$val_slice))
},
};
let name_ident = cx.ident_of(k.as_str());
let sf = ast::StructField_ {
kind: ast::NamedField(name_ident, ast::Public),
id: ast::DUMMY_NODE_ID,
ty: ty,
attrs: vec!(),
};
fields.push(respan(DUMMY_SP, sf));
expr_fields.push(cx.field_imm(DUMMY_SP, name_ident, val));
}
let name_ident = cx.ident_of(format!("{}_args", struct_name).as_str());
let mut collected_params = vec!();
let mut ty_params_vec = vec!();
for ty in ty_params.iter() {
let typaram = cx.typaram(
DUMMY_SP,
cx.ident_of(ty.to_tyhash().as_str()),
P::from_vec(vec!(
ast::RegionTyParamBound(cx.lifetime(DUMMY_SP, intern("'a")))
)),
None);
collected_params.push(typaram);
ty_params_vec.push(ty.clone());
}
set_ty_params_for_task(cx, struct_name.as_str(), ty_params_vec);
let struct_item = P(ast::Item {
ident: name_ident,
attrs: vec!(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemStruct(
ast::VariantData::Struct(fields, ast::DUMMY_NODE_ID),
ast::Generics {
lifetimes: vec!(cx.lifetime_def(DUMMY_SP, intern("'a"), vec!())),
ty_params: P::from_vec(collected_params),
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: vec!(),
}
}),
vis: ast::Public,
span: DUMMY_SP,
});
builder.add_type_item(struct_item);
cx.expr_addr_of(DUMMY_SP,
cx.expr_struct(
DUMMY_SP,
cx.path(DUMMY_SP, vec!(cx.ident_of("pt"), name_ident)),
expr_fields))
}
fn type_name_as_path(cx: &ExtCtxt, ty: &str, params: Vec<String>) -> ast::Path {
let mut lifetimes = vec!();
let mut types = vec!();
for p in params.iter() {
let slice = p.as_str();
if slice.starts_with("'") {
let lifetime = cx.lifetime(DUMMY_SP, intern(slice));
lifetimes.push(lifetime);
} else {
let path = cx.ty_path(type_name_as_path(cx, p.to_tyhash().as_str(), vec!()));
types.push(path);
}
}
cx.path_all(DUMMY_SP, false,
ty.split("::").map(|t| cx.ident_of(t)).collect(),
lifetimes,
types,
vec!())
}
#[cfg(test)]
mod test {
use std::ops::Deref;
use syntax::codemap::DUMMY_SP;
use syntax::ext::build::AstBuilder;
use builder::Builder;
use super::build_single_task;
use test_helpers::{assert_equal_source, with_parsed};
#[test]
fn builds_single_task_os_loop() {
with_parsed("
single_task {
loop = \"run\";
}", |cx, failed, pt| {
let mut builder = Builder::new(pt.clone(), cx);
build_single_task(&mut builder, cx, pt.get_by_path("single_task").unwrap().clone());
assert!(unsafe{*failed} == false);
assert!(builder.main_stmts.len() == 1);
assert_equal_source(builder.main_stmts[0].deref(),
"loop {
run();
}");
});
}
#[test]
fn builds_single_task_with_args() {
with_parsed("
single_task {
loop = \"run\";
args {
a = 1;
b = \"a\";
c = &named;
}
}
named@ref;
", |cx, failed, pt| {
let mut builder = Builder::new(pt.clone(), cx);
pt.get_by_path("ref").unwrap().set_type_name("hello::world::Struct".to_string());
build_single_task(&mut builder, cx, pt.get_by_path("single_task").unwrap().clone());
assert!(unsafe{*failed} == false);
assert!(builder.main_stmts.len() == 1);
assert!(builder.type_items.len() == 2);
// XXX: builder.type_items[0] is `use zinc;` now
assert_equal_source(cx.stmt_item(DUMMY_SP, builder.type_items[1].clone()).deref(),
"pub struct run_args<'a> {
pub a: u32,
pub b: &'static str,
pub c: &'a hello::world::Struct,
}");
assert_equal_source(builder.main_stmts[0].deref(),
"loop {
run(&pt::run_args {
a: 1usize,
b: \"a\",
c: &named,
});
}");
});
}
}<|fim▁end|> | // Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]> |
<|file_name|>BitcoinLikeKeychain.hpp<|end_file_name|><|fim▁begin|>/*
*
* BitcoinLikeKeychain
* ledger-core
*
* Created by Pierre Pollastri on 17/01/2017.
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
#ifndef LEDGER_CORE_BITCOINLIKEKEYCHAIN_HPP
#define LEDGER_CORE_BITCOINLIKEKEYCHAIN_HPP
#include "../../../bitcoin/BitcoinLikeExtendedPublicKey.hpp"
#include <string>
#include <vector>
#include <utils/DerivationScheme.hpp>
#include "../../../utils/Option.hpp"
#include "../../../preferences/Preferences.hpp"
#include "../../../api/Configuration.hpp"
#include "../../../api/DynamicObject.hpp"
#include <api/Currency.hpp>
#include <api/AccountCreationInfo.hpp>
#include <api/ExtendedKeyAccountCreationInfo.hpp>
#include <api/Keychain.hpp>
#include <bitcoin/BitcoinLikeAddress.hpp>
namespace ledger {
namespace core {
class BitcoinLikeKeychain: public api::Keychain {
public:
enum KeyPurpose {
RECEIVE, CHANGE
};
public:
using Address = std::shared_ptr<BitcoinLikeAddress>;
BitcoinLikeKeychain(
const std::shared_ptr<api::DynamicObject>& configuration,
const api::Currency& params,
int account,
const std::shared_ptr<Preferences>& preferences);
virtual bool markAsUsed(const std::vector<std::string>& addresses);
virtual bool markAsUsed(const std::string& address, bool needExtendKeychain = true);
virtual bool markPathAsUsed(const DerivationPath& path, bool needExtendKeychain = true) = 0;
virtual std::vector<Address> getAllObservableAddresses(uint32_t from, uint32_t to) = 0;
virtual std::vector<std::string> getAllObservableAddressString(uint32_t from, uint32_t to) = 0;
virtual std::vector<Address> getAllObservableAddresses(KeyPurpose purpose, uint32_t from, uint32_t to) = 0;
virtual Address getFreshAddress(KeyPurpose purpose) = 0;
virtual std::vector<Address> getFreshAddresses(KeyPurpose purpose, size_t n) = 0;
virtual Option<KeyPurpose> getAddressPurpose(const std::string& address) const = 0;
virtual Option<std::string> getAddressDerivationPath(const std::string& address) const = 0;
virtual bool isEmpty() const = 0;
int getAccountIndex() const;
const api::BitcoinLikeNetworkParameters& getNetworkParameters() const;
const api::Currency& getCurrency() const;
virtual Option<std::vector<uint8_t>> getPublicKey(const std::string& address) const = 0;
std::shared_ptr<api::DynamicObject> getConfiguration() const;
const DerivationScheme& getDerivationScheme() const;
const DerivationScheme& getFullDerivationScheme() const;
std::string getKeychainEngine() const;<|fim▁hole|>
virtual std::string getRestoreKey() const = 0;
virtual int32_t getObservableRangeSize() const = 0;
virtual bool contains(const std::string& address) const = 0;
virtual std::vector<Address> getAllAddresses() = 0;
virtual int32_t getOutputSizeAsSignedTxInput() const = 0;
static bool isSegwit(const std::string &keychainEngine);
static bool isNativeSegwit(const std::string &keychainEngine);
std::shared_ptr<Preferences> getPreferences() const;
protected:
DerivationScheme& getDerivationScheme();
private:
const api::Currency _currency;
DerivationScheme _scheme;
DerivationScheme _fullScheme;
int _account;
std::shared_ptr<Preferences> _preferences;
std::shared_ptr<api::DynamicObject> _configuration;
};
}
}
#endif //LEDGER_CORE_BITCOINLIKEKEYCHAIN_HPP<|fim▁end|> | bool isSegwit() const;
bool isNativeSegwit() const; |
<|file_name|>Year.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time;
import static java.time.temporal.ChronoField.ERA;
import static java.time.temporal.ChronoField.YEAR;
import static java.time.temporal.ChronoField.YEAR_OF_ERA;
import static java.time.temporal.ChronoUnit.CENTURIES;
import static java.time.temporal.ChronoUnit.DECADES;
import static java.time.temporal.ChronoUnit.ERAS;
import static java.time.temporal.ChronoUnit.MILLENNIA;
import static java.time.temporal.ChronoUnit.YEARS;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.time.chrono.Chronology;
import java.time.chrono.IsoChronology;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.time.format.SignStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAmount;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalQueries;
import java.time.temporal.TemporalQuery;
import java.time.temporal.TemporalUnit;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.time.temporal.ValueRange;
import java.util.Objects;
/**
* A year in the ISO-8601 calendar system, such as {@code 2007}.
* <p>
* {@code Year} is an immutable date-time object that represents a year.
* Any field that can be derived from a year can be obtained.
* <p>
* <b>Note that years in the ISO chronology only align with years in the
* Gregorian-Julian system for modern years. Parts of Russia did not switch to the
* modern Gregorian/ISO rules until 1920.
* As such, historical years must be treated with caution.</b>
* <p>
* This class does not store or represent a month, day, time or time-zone.
* For example, the value "2007" can be stored in a {@code Year}.
* <p>
* Years represented by this class follow the ISO-8601 standard and use
* the proleptic numbering system. Year 1 is preceded by year 0, then by year -1.
* <p>
* The ISO-8601 calendar system is the modern civil calendar system used today
* in most of the world. It is equivalent to the proleptic Gregorian calendar
* system, in which today's rules for leap years are applied for all time.
* For most applications written today, the ISO-8601 rules are entirely suitable.
* However, any application that makes use of historical dates, and requires them
* to be accurate will find the ISO-8601 approach unsuitable.
*
* <p>
* This is a <a href="{@docRoot}/java/lang/doc-files/ValueBased.html">value-based</a>
* class; use of identity-sensitive operations (including reference equality
* ({@code ==}), identity hash code, or synchronization) on instances of
* {@code Year} may have unpredictable results and should be avoided.
* The {@code equals} method should be used for comparisons.
*
* @implSpec
* This class is immutable and thread-safe.
*
* @since 1.8
*/
public final class Year
implements Temporal, TemporalAdjuster, Comparable<Year>, Serializable {
/**
* The minimum supported year, '-999,999,999'.
*/
public static final int MIN_VALUE = -999_999_999;
/**
* The maximum supported year, '+999,999,999'.
*/
public static final int MAX_VALUE = 999_999_999;
/**
* Serialization version.
*/
private static final long serialVersionUID = -23038383694477807L;
/**
* Parser.
*/
private static final DateTimeFormatter PARSER = new DateTimeFormatterBuilder()
.appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter();
/**
* The year being represented.
*/
private final int year;
//-----------------------------------------------------------------------
/**
* Obtains the current year from the system clock in the default time-zone.
* <p>
* This will query the {@link java.time.Clock#systemDefaultZone() system clock} in the default
* time-zone to obtain the current year.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @return the current year using the system clock and default time-zone, not null
*/
public static Year now() {
return now(Clock.systemDefaultZone());
}
/**
* Obtains the current year from the system clock in the specified time-zone.
* <p>
* This will query the {@link Clock#system(java.time.ZoneId) system clock} to obtain the current year.
* Specifying the time-zone avoids dependence on the default time-zone.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @param zone the zone ID to use, not null
* @return the current year using the system clock, not null
*/
public static Year now(ZoneId zone) {
return now(Clock.system(zone));
}
/**
* Obtains the current year from the specified clock.
* <p>
* This will query the specified clock to obtain the current year.
* Using this method allows the use of an alternate clock for testing.
* The alternate clock may be introduced using {@link Clock dependency injection}.
*
* @param clock the clock to use, not null
* @return the current year, not null
*/
public static Year now(Clock clock) {
final LocalDate now = LocalDate.now(clock); // called once
return Year.of(now.getYear());
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Year}.
* <p>
* This method accepts a year value from the proleptic ISO calendar system.
* <p>
* The year 2AD/CE is represented by 2.<br>
* The year 1AD/CE is represented by 1.<br>
* The year 1BC/BCE is represented by 0.<br>
* The year 2BC/BCE is represented by -1.<br>
*
* @param isoYear the ISO proleptic year to represent, from {@code MIN_VALUE} to {@code MAX_VALUE}
* @return the year, not null
* @throws DateTimeException if the field is invalid
*/
public static Year of(int isoYear) {
YEAR.checkValidValue(isoYear);
return new Year(isoYear);
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Year} from a temporal object.
* <p>
* This obtains a year based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code Year}.
* <p>
* The conversion extracts the {@link ChronoField#YEAR year} field.
* The extraction is only permitted if the temporal object has an ISO
* chronology, or can be converted to a {@code LocalDate}.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used in queries via method reference, {@code Year::from}.
*
* @param temporal the temporal object to convert, not null
* @return the year, not null
* @throws DateTimeException if unable to convert to a {@code Year}
*/
public static Year from(TemporalAccessor temporal) {
if (temporal instanceof Year) {
return (Year) temporal;
}
Objects.requireNonNull(temporal, "temporal");
try {
if (IsoChronology.INSTANCE.equals(Chronology.from(temporal)) == false) {
temporal = LocalDate.from(temporal);
}
return of(temporal.get(YEAR));
} catch (DateTimeException ex) {
throw new DateTimeException("Unable to obtain Year from TemporalAccessor: " +
temporal + " of type " + temporal.getClass().getName(), ex);
}
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Year} from a text string such as {@code 2007}.
* <p>
* The string must represent a valid year.
* Years outside the range 0000 to 9999 must be prefixed by the plus or minus symbol.
*
* @param text the text to parse such as "2007", not null
* @return the parsed year, not null
* @throws DateTimeParseException if the text cannot be parsed
*/
public static Year parse(CharSequence text) {
return parse(text, PARSER);
}
/**
* Obtains an instance of {@code Year} from a text string using a specific formatter.
* <p>
* The text is parsed using the formatter, returning a year.
*
* @param text the text to parse, not null
* @param formatter the formatter to use, not null
* @return the parsed year, not null
* @throws DateTimeParseException if the text cannot be parsed
*/
public static Year parse(CharSequence text, DateTimeFormatter formatter) {
Objects.requireNonNull(formatter, "formatter");
return formatter.parse(text, Year::from);
}
//-------------------------------------------------------------------------
/**
* Checks if the year is a leap year, according to the ISO proleptic
* calendar system rules.
* <p>
* This method applies the current rules for leap years across the whole time-line.
* In general, a year is a leap year if it is divisible by four without
* remainder. However, years divisible by 100, are not leap years, with
* the exception of years divisible by 400 which are.
* <p>
* For example, 1904 is a leap year it is divisible by 4.
* 1900 was not a leap year as it is divisible by 100, however 2000 was a
* leap year as it is divisible by 400.
* <p>
* The calculation is proleptic - applying the same rules into the far future and far past.
* This is historically inaccurate, but is correct for the ISO-8601 standard.
*
* @param year the year to check
* @return true if the year is leap, false otherwise
*/
public static boolean isLeap(long year) {
return ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0);
}
//-----------------------------------------------------------------------
/**
* Constructor.
*
* @param year the year to represent
*/
private Year(int year) {
this.year = year;
}
//-----------------------------------------------------------------------
/**
* Gets the year value.
* <p>
* The year returned by this method is proleptic as per {@code get(YEAR)}.
*
* @return the year, {@code MIN_VALUE} to {@code MAX_VALUE}
*/
public int getValue() {
return year;
}
//-----------------------------------------------------------------------
/**
* Checks if the specified field is supported.
* <p>
* This checks if this year can be queried for the specified field.
* If false, then calling the {@link #range(TemporalField) range},
* {@link #get(TemporalField) get} and {@link #with(TemporalField, long)}
* methods will throw an exception.
* <p>
* If the field is a {@link ChronoField} then the query is implemented here.
* The supported fields are:
* <ul>
* <li>{@code YEAR_OF_ERA}
* <li>{@code YEAR}
* <li>{@code ERA}
* </ul>
* All other {@code ChronoField} instances will return false.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)}
* passing {@code this} as the argument.
* Whether the field is supported is determined by the field.
*
* @param field the field to check, null returns false
* @return true if the field is supported on this year, false if not
*/
@Override
public boolean isSupported(TemporalField field) {
if (field instanceof ChronoField) {
return field == YEAR || field == YEAR_OF_ERA || field == ERA;
}
return field != null && field.isSupportedBy(this);
}
/**
* Checks if the specified unit is supported.
* <p>
* This checks if the specified unit can be added to, or subtracted from, this date-time.
* If false, then calling the {@link #plus(long, TemporalUnit)} and
* {@link #minus(long, TemporalUnit) minus} methods will throw an exception.
* <p>
* If the unit is a {@link ChronoUnit} then the query is implemented here.
* The supported units are:
* <ul>
* <li>{@code YEARS}
* <li>{@code DECADES}
* <li>{@code CENTURIES}
* <li>{@code MILLENNIA}
* <li>{@code ERAS}
* </ul>
* All other {@code ChronoUnit} instances will return false.
* <p>
* If the unit is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)}
* passing {@code this} as the argument.
* Whether the unit is supported is determined by the unit.
*
* @param unit the unit to check, null returns false
* @return true if the unit can be added/subtracted, false if not
*/
@Override
public boolean isSupported(TemporalUnit unit) {
if (unit instanceof ChronoUnit) {
return unit == YEARS || unit == DECADES || unit == CENTURIES || unit == MILLENNIA || unit == ERAS;
}
return unit != null && unit.isSupportedBy(this);
}
//-----------------------------------------------------------------------
/**
* Gets the range of valid values for the specified field.
* <p>
* The range object expresses the minimum and maximum valid values for a field.
* This year is used to enhance the accuracy of the returned range.
* If it is not possible to return the range, because the field is not supported
* or for some other reason, an exception is thrown.
* <p>
* If the field is a {@link ChronoField} then the query is implemented here.
* The {@link #isSupported(TemporalField) supported fields} will return
* appropriate range instances.
* All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.rangeRefinedBy(TemporalAccessor)}
* passing {@code this} as the argument.
* Whether the range can be obtained is determined by the field.
*
* @param field the field to query the range for, not null
* @return the range of valid values for the field, not null
* @throws DateTimeException if the range for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported
*/
@Override
public ValueRange range(TemporalField field) {
if (field == YEAR_OF_ERA) {
return (year <= 0 ? ValueRange.of(1, MAX_VALUE + 1) : ValueRange.of(1, MAX_VALUE));
}
return Temporal.super.range(field);
}
/**
* Gets the value of the specified field from this year as an {@code int}.
* <p>
* This queries this year for the value for the specified field.
* The returned value will always be within the valid range of values for the field.
* If it is not possible to return the value, because the field is not supported
* or for some other reason, an exception is thrown.
* <p>
* If the field is a {@link ChronoField} then the query is implemented here.
* The {@link #isSupported(TemporalField) supported fields} will return valid
* values based on this year.
* All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)}
* passing {@code this} as the argument. Whether the value can be obtained,
* and what the value represents, is determined by the field.
*
* @param field the field to get, not null
* @return the value for the field
* @throws DateTimeException if a value for the field cannot be obtained or
* the value is outside the range of valid values for the field
* @throws UnsupportedTemporalTypeException if the field is not supported or
* the range of values exceeds an {@code int}
* @throws ArithmeticException if numeric overflow occurs
*/
@Override // override for Javadoc
public int get(TemporalField field) {
return range(field).checkValidIntValue(getLong(field), field);
}
/**
* Gets the value of the specified field from this year as a {@code long}.
* <p>
* This queries this year for the value for the specified field.
* If it is not possible to return the value, because the field is not supported
* or for some other reason, an exception is thrown.
* <p>
* If the field is a {@link ChronoField} then the query is implemented here.
* The {@link #isSupported(TemporalField) supported fields} will return valid
* values based on this year.
* All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.getFrom(TemporalAccessor)}
* passing {@code this} as the argument. Whether the value can be obtained,
* and what the value represents, is determined by the field.
*
* @param field the field to get, not null
* @return the value for the field
* @throws DateTimeException if a value for the field cannot be obtained
* @throws UnsupportedTemporalTypeException if the field is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public long getLong(TemporalField field) {
if (field instanceof ChronoField) {
switch ((ChronoField) field) {
case YEAR_OF_ERA: return (year < 1 ? 1 - year : year);
case YEAR: return year;
case ERA: return (year < 1 ? 0 : 1);
}
throw new UnsupportedTemporalTypeException("Unsupported field: " + field);
}
return field.getFrom(this);
}
//-----------------------------------------------------------------------
/**
* Checks if the year is a leap year, according to the ISO proleptic
* calendar system rules.
* <p>
* This method applies the current rules for leap years across the whole time-line.
* In general, a year is a leap year if it is divisible by four without
* remainder. However, years divisible by 100, are not leap years, with
* the exception of years divisible by 400 which are.
* <p>
* For example, 1904 is a leap year it is divisible by 4.
* 1900 was not a leap year as it is divisible by 100, however 2000 was a
* leap year as it is divisible by 400.
* <p>
* The calculation is proleptic - applying the same rules into the far future and far past.
* This is historically inaccurate, but is correct for the ISO-8601 standard.
*
* @return true if the year is leap, false otherwise
*/
public boolean isLeap() {
return Year.isLeap(year);
}
/**
* Checks if the month-day is valid for this year.
* <p>
* This method checks whether this year and the input month and day form
* a valid date.
*
* @param monthDay the month-day to validate, null returns false
* @return true if the month and day are valid for this year
*/
public boolean isValidMonthDay(MonthDay monthDay) {
return monthDay != null && monthDay.isValidYear(year);
}
/**
* Gets the length of this year in days.
*
* @return the length of this year in days, 365 or 366
*/
public int length() {
return isLeap() ? 366 : 365;
}
//-----------------------------------------------------------------------
/**
* Returns an adjusted copy of this year.
* <p>
* This returns a {@code Year}, based on this one, with the year adjusted.
* The adjustment takes place using the specified adjuster strategy object.
* Read the documentation of the adjuster to understand what adjustment will be made.
* <p>
* The result of this method is obtained by invoking the
* {@link TemporalAdjuster#adjustInto(Temporal)} method on the
* specified adjuster passing {@code this} as the argument.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param adjuster the adjuster to use, not null
* @return a {@code Year} based on {@code this} with the adjustment made, not null
* @throws DateTimeException if the adjustment cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year with(TemporalAdjuster adjuster) {
return (Year) adjuster.adjustInto(this);
}
/**
* Returns a copy of this year with the specified field set to a new value.
* <p>
* This returns a {@code Year}, based on this one, with the value
* for the specified field changed.
* If it is not possible to set the value, because the field is not supported or for
* some other reason, an exception is thrown.
* <p>
* If the field is a {@link ChronoField} then the adjustment is implemented here.
* The supported fields behave as follows:
* <ul>
* <li>{@code YEAR_OF_ERA} -
* Returns a {@code Year} with the specified year-of-era
* The era will be unchanged.
* <li>{@code YEAR} -
* Returns a {@code Year} with the specified year.
* This completely replaces the date and is equivalent to {@link #of(int)}.
* <li>{@code ERA} -
* Returns a {@code Year} with the specified era.
* The year-of-era will be unchanged.
* </ul>
* <p>
* In all cases, if the new value is outside the valid range of values for the field
* then a {@code DateTimeException} will be thrown.
* <p>
* All other {@code ChronoField} instances will throw an {@code UnsupportedTemporalTypeException}.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.adjustInto(Temporal, long)}
* passing {@code this} as the argument. In this case, the field determines
* whether and how to adjust the instant.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param field the field to set in the result, not null
* @param newValue the new value of the field in the result
* @return a {@code Year} based on {@code this} with the specified field set, not null
* @throws DateTimeException if the field cannot be set
* @throws UnsupportedTemporalTypeException if the field is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year with(TemporalField field, long newValue) {
if (field instanceof ChronoField) {
ChronoField f = (ChronoField) field;
f.checkValidValue(newValue);
switch (f) {
case YEAR_OF_ERA: return Year.of((int) (year < 1 ? 1 - newValue : newValue));
case YEAR: return Year.of((int) newValue);
case ERA: return (getLong(ERA) == newValue ? this : Year.of(1 - year));
}
throw new UnsupportedTemporalTypeException("Unsupported field: " + field);
}
return field.adjustInto(this, newValue);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this year with the specified amount added.
* <p>
* This returns a {@code Year}, based on this one, with the specified amount added.
* The amount is typically {@link Period} but may be any other type implementing
* the {@link TemporalAmount} interface.
* <p>
* The calculation is delegated to the amount object by calling
* {@link TemporalAmount#addTo(Temporal)}. The amount implementation is free
* to implement the addition in any way it wishes, however it typically
* calls back to {@link #plus(long, TemporalUnit)}. Consult the documentation
* of the amount implementation to determine if it can be successfully added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amountToAdd the amount to add, not null
* @return a {@code Year} based on this year with the addition made, not null
* @throws DateTimeException if the addition cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year plus(TemporalAmount amountToAdd) {
return (Year) amountToAdd.addTo(this);
}
/**
* Returns a copy of this year with the specified amount added.
* <p>
* This returns a {@code Year}, based on this one, with the amount
* in terms of the unit added. If it is not possible to add the amount, because the
* unit is not supported or for some other reason, an exception is thrown.
* <p>
* If the field is a {@link ChronoUnit} then the addition is implemented here.
* The supported fields behave as follows:
* <ul>
* <li>{@code YEARS} -
* Returns a {@code Year} with the specified number of years added.
* This is equivalent to {@link #plusYears(long)}.
* <li>{@code DECADES} -
* Returns a {@code Year} with the specified number of decades added.
* This is equivalent to calling {@link #plusYears(long)} with the amount
* multiplied by 10.
* <li>{@code CENTURIES} -
* Returns a {@code Year} with the specified number of centuries added.
* This is equivalent to calling {@link #plusYears(long)} with the amount
* multiplied by 100.
* <li>{@code MILLENNIA} -
* Returns a {@code Year} with the specified number of millennia added.
* This is equivalent to calling {@link #plusYears(long)} with the amount
* multiplied by 1,000.
* <li>{@code ERAS} -
* Returns a {@code Year} with the specified number of eras added.
* Only two eras are supported so the amount must be one, zero or minus one.
* If the amount is non-zero then the year is changed such that the year-of-era
* is unchanged.
* </ul>
* <p>
* All other {@code ChronoUnit} instances will throw an {@code UnsupportedTemporalTypeException}.
* <p>
* If the field is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.addTo(Temporal, long)}
* passing {@code this} as the argument. In this case, the unit determines
* whether and how to perform the addition.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amountToAdd the amount of the unit to add to the result, may be negative
* @param unit the unit of the amount to add, not null
* @return a {@code Year} based on this year with the specified amount added, not null
* @throws DateTimeException if the addition cannot be made
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year plus(long amountToAdd, TemporalUnit unit) {
if (unit instanceof ChronoUnit) {
switch ((ChronoUnit) unit) {
case YEARS: return plusYears(amountToAdd);
case DECADES: return plusYears(Math.multiplyExact(amountToAdd, 10));
case CENTURIES: return plusYears(Math.multiplyExact(amountToAdd, 100));
case MILLENNIA: return plusYears(Math.multiplyExact(amountToAdd, 1000));
case ERAS: return with(ERA, Math.addExact(getLong(ERA), amountToAdd));
}
throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit);
}
return unit.addTo(this, amountToAdd);
}
/**
* Returns a copy of this year with the specified number of years added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param yearsToAdd the years to add, may be negative
* @return a {@code Year} based on this year with the period added, not null
* @throws DateTimeException if the result exceeds the supported year range
*/
public Year plusYears(long yearsToAdd) {
if (yearsToAdd == 0) {
return this;
}
return of(YEAR.checkValidIntValue(year + yearsToAdd)); // overflow safe
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this year with the specified amount subtracted.
* <p>
* This returns a {@code Year}, based on this one, with the specified amount subtracted.
* The amount is typically {@link Period} but may be any other type implementing
* the {@link TemporalAmount} interface.
* <p>
* The calculation is delegated to the amount object by calling
* {@link TemporalAmount#subtractFrom(Temporal)}. The amount implementation is free
* to implement the subtraction in any way it wishes, however it typically
* calls back to {@link #minus(long, TemporalUnit)}. Consult the documentation
* of the amount implementation to determine if it can be successfully subtracted.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amountToSubtract the amount to subtract, not null
* @return a {@code Year} based on this year with the subtraction made, not null
* @throws DateTimeException if the subtraction cannot be made
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year minus(TemporalAmount amountToSubtract) {
return (Year) amountToSubtract.subtractFrom(this);
}
/**
* Returns a copy of this year with the specified amount subtracted.
* <p>
* This returns a {@code Year}, based on this one, with the amount
* in terms of the unit subtracted. If it is not possible to subtract the amount,
* because the unit is not supported or for some other reason, an exception is thrown.
* <p>
* This method is equivalent to {@link #plus(long, TemporalUnit)} with the amount negated.
* See that method for a full description of how addition, and thus subtraction, works.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amountToSubtract the amount of the unit to subtract from the result, may be negative
* @param unit the unit of the amount to subtract, not null
* @return a {@code Year} based on this year with the specified amount subtracted, not null
* @throws DateTimeException if the subtraction cannot be made
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Year minus(long amountToSubtract, TemporalUnit unit) {
return (amountToSubtract == Long.MIN_VALUE ? plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
}
/**
* Returns a copy of this year with the specified number of years subtracted.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param yearsToSubtract the years to subtract, may be negative
* @return a {@code Year} based on this year with the period subtracted, not null
* @throws DateTimeException if the result exceeds the supported year range
*/
public Year minusYears(long yearsToSubtract) {
return (yearsToSubtract == Long.MIN_VALUE ? plusYears(Long.MAX_VALUE).plusYears(1) : plusYears(-yearsToSubtract));
}
//-----------------------------------------------------------------------
/**
* Queries this year using the specified query.
* <p>
* This queries this year using the specified query strategy object.
* The {@code TemporalQuery} object defines the logic to be used to
* obtain the result. Read the documentation of the query to understand
* what the result of this method will be.
* <p>
* The result of this method is obtained by invoking the
* {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the
* specified query passing {@code this} as the argument.
*
* @param <R> the type of the result
* @param query the query to invoke, not null
* @return the query result, null may be returned (defined by the query)
* @throws DateTimeException if unable to query (defined by the query)
* @throws ArithmeticException if numeric overflow occurs (defined by the query)
*/
@SuppressWarnings("unchecked")
@Override
public <R> R query(TemporalQuery<R> query) {
if (query == TemporalQueries.chronology()) {
return (R) IsoChronology.INSTANCE;
} else if (query == TemporalQueries.precision()) {
return (R) YEARS;
}
return Temporal.super.query(query);
}
/**
* Adjusts the specified temporal object to have this year.
* <p>
* This returns a temporal object of the same observable type as the input
* with the year changed to be the same as this.
* <p>
* The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)}
* passing {@link ChronoField#YEAR} as the field.
* If the specified temporal object does not use the ISO calendar system then
* a {@code DateTimeException} is thrown.
* <p>
* In most cases, it is clearer to reverse the calling pattern by using
* {@link Temporal#with(TemporalAdjuster)}:
* <pre>
* // these two lines are equivalent, but the second approach is recommended
* temporal = thisYear.adjustInto(temporal);
* temporal = temporal.with(thisYear);
* </pre>
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param temporal the target object to be adjusted, not null
* @return the adjusted object, not null
* @throws DateTimeException if unable to make the adjustment
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Temporal adjustInto(Temporal temporal) {
if (Chronology.from(temporal).equals(IsoChronology.INSTANCE) == false) {
throw new DateTimeException("Adjustment only supported on ISO date-time");
}
return temporal.with(YEAR, year);
}
/**
* Calculates the amount of time until another year in terms of the specified unit.
* <p>
* This calculates the amount of time between two {@code Year}
* objects in terms of a single {@code TemporalUnit}.
* The start and end points are {@code this} and the specified year.
* The result will be negative if the end is before the start.
* The {@code Temporal} passed to this method is converted to a
* {@code Year} using {@link #from(TemporalAccessor)}.
* For example, the period in decades between two year can be calculated
* using {@code startYear.until(endYear, DECADES)}.
* <p>
* The calculation returns a whole number, representing the number of
* complete units between the two years.
* For example, the period in decades between 2012 and 2031
* will only be one decade as it is one year short of two decades.
* <p>
* There are two equivalent ways of using this method.
* The first is to invoke this method.
* The second is to use {@link TemporalUnit#between(Temporal, Temporal)}:
* <pre>
* // these two lines are equivalent
* amount = start.until(end, YEARS);
* amount = YEARS.between(start, end);
* </pre>
* The choice should be made based on which makes the code more readable.
* <p>
* The calculation is implemented in this method for {@link ChronoUnit}.
* The units {@code YEARS}, {@code DECADES}, {@code CENTURIES},
* {@code MILLENNIA} and {@code ERAS} are supported.
* Other {@code ChronoUnit} values will throw an exception.
* <p>
* If the unit is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.between(Temporal, Temporal)}
* passing {@code this} as the first argument and the converted input temporal
* as the second argument.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param endExclusive the end date, exclusive, which is converted to a {@code Year}, not null
* @param unit the unit to measure the amount in, not null
* @return the amount of time between this year and the end year
* @throws DateTimeException if the amount cannot be calculated, or the end
* temporal cannot be converted to a {@code Year}
* @throws UnsupportedTemporalTypeException if the unit is not supported
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public long until(Temporal endExclusive, TemporalUnit unit) {
Year end = Year.from(endExclusive);
if (unit instanceof ChronoUnit) {
long yearsUntil = ((long) end.year) - year; // no overflow
switch ((ChronoUnit) unit) {
case YEARS: return yearsUntil;
case DECADES: return yearsUntil / 10;
case CENTURIES: return yearsUntil / 100;
case MILLENNIA: return yearsUntil / 1000;
case ERAS: return end.getLong(ERA) - getLong(ERA);
}
throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit);
}
return unit.between(this, end);
}
/**
* Formats this year using the specified formatter.
* <p>
* This year will be passed to the formatter to produce a string.
*
* @param formatter the formatter to use, not null
* @return the formatted year string, not null
* @throws DateTimeException if an error occurs during printing
*/
public String format(DateTimeFormatter formatter) {
Objects.requireNonNull(formatter, "formatter");
return formatter.format(this);
}
//-----------------------------------------------------------------------
/**
* Combines this year with a day-of-year to create a {@code LocalDate}.
* <p>
* This returns a {@code LocalDate} formed from this year and the specified day-of-year.
* <p>
* The day-of-year value 366 is only valid in a leap year.
*
* @param dayOfYear the day-of-year to use, not null
* @return the local date formed from this year and the specified date of year, not null
* @throws DateTimeException if the day of year is zero or less, 366 or greater or equal
* to 366 and this is not a leap year
*/
public LocalDate atDay(int dayOfYear) {
return LocalDate.ofYearDay(year, dayOfYear);
}
/**
* Combines this year with a month to create a {@code YearMonth}.
* <p>
* This returns a {@code YearMonth} formed from this year and the specified month.
* All possible combinations of year and month are valid.
* <p>
* This method can be used as part of a chain to produce a date:
* <pre>
* LocalDate date = year.atMonth(month).atDay(day);
* </pre>
*
* @param month the month-of-year to use, not null
* @return the year-month formed from this year and the specified month, not null
*/
public YearMonth atMonth(Month month) {
return YearMonth.of(year, month);
}
/**
* Combines this year with a month to create a {@code YearMonth}.
* <p>
* This returns a {@code YearMonth} formed from this year and the specified month.
* All possible combinations of year and month are valid.
* <p>
* This method can be used as part of a chain to produce a date:
* <pre>
* LocalDate date = year.atMonth(month).atDay(day);
* </pre>
*
* @param month the month-of-year to use, from 1 (January) to 12 (December)
* @return the year-month formed from this year and the specified month, not null
* @throws DateTimeException if the month is invalid
*/
public YearMonth atMonth(int month) {
return YearMonth.of(year, month);
}
/**
* Combines this year with a month-day to create a {@code LocalDate}.
* <p>
* This returns a {@code LocalDate} formed from this year and the specified month-day.
* <p>
* A month-day of February 29th will be adjusted to February 28th in the resulting
* date if the year is not a leap year.
*
* @param monthDay the month-day to use, not null
* @return the local date formed from this year and the specified month-day, not null
*/
public LocalDate atMonthDay(MonthDay monthDay) {
return monthDay.atYear(year);
}
//-----------------------------------------------------------------------
/**
* Compares this year to another year.
* <p>
* The comparison is based on the value of the year.
* It is "consistent with equals", as defined by {@link Comparable}.
*
* @param other the other year to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
@Override
public int compareTo(Year other) {
return year - other.year;
}
/**
* Is this year after the specified year.
*
* @param other the other year to compare to, not null
* @return true if this is after the specified year
*/
public boolean isAfter(Year other) {
return year > other.year;
}
/**
* Is this year before the specified year.
*
* @param other the other year to compare to, not null
* @return true if this point is before the specified year
*/
public boolean isBefore(Year other) {
return year < other.year;
}
//-----------------------------------------------------------------------
/**
* Checks if this year is equal to another year.
* <p>
* The comparison is based on the time-line position of the years.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other year
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof Year) {
return year == ((Year) obj).year;
}
return false;
}
/**
* A hash code for this year.
*
* @return a suitable hash code
*/
@Override
public int hashCode() {
return year;
}
//-----------------------------------------------------------------------
/**
* Outputs this year as a {@code String}.
*
* @return a string representation of this year, not null
*/
@Override
public String toString() {
return Integer.toString(year);
}
//-----------------------------------------------------------------------
/**
* Writes the object using a
* <a href="../../serialized-form.html#java.time.Ser">dedicated serialized form</a>.
* @serialData
* <pre>
* out.writeByte(11); // identifies a Year
* out.writeInt(year);
* </pre>
*
* @return the instance of {@code Ser}, not null
*/
private Object writeReplace() {
return new Ser(Ser.YEAR_TYPE, this);
}
/**
* Defend against malicious streams.
*
* @throws InvalidObjectException always
*/
private void readObject(ObjectInputStream s) throws InvalidObjectException {
throw new InvalidObjectException("Deserialization via serialization delegate");
}
void writeExternal(DataOutput out) throws IOException {
out.writeInt(year);
}
static Year readExternal(DataInput in) throws IOException {
return Year.of(in.readInt());<|fim▁hole|>}<|fim▁end|> | }
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
This package contains different `unittests <https://docs.python.org/3/library/unittest.html>`_ for the project.
Those tests help to validate difficult pieces of the software.
"""<|fim▁hole|>__license__ = "GPLv3"<|fim▁end|> |
__author__ = 'Wuersch Marcel' |
<|file_name|>averaging_regressor_test.py<|end_file_name|><|fim▁begin|>import pprint as pp
import pandas as pd
from sklearn.linear_model import Ridge, Lasso<|fim▁hole|>from ionyx.contrib import AveragingRegressor
from ionyx.datasets import DataSetLoader
print('Beginning averaging regressor test...')
data, X, y = DataSetLoader.load_property_inspection()
data = data.iloc[:1000, :]
X = X[:1000, :]
y = y[:1000]
estimators = [('ridge', Ridge()), ('lasso', Lasso()), ('svm', LinearSVR())]
ensemble = AveragingRegressor(estimators, weights=[1.0, 1.5, 2.0])
ensemble.fit(X, y)
print('Estimators list:')
pp.pprint(ensemble.estimators_)
print('Named estimators dict:')
pp.pprint(ensemble.named_estimators_)
print('Model 1 score = {0}'.format(mean_absolute_error(y, ensemble.estimators_[0].predict(X))))
print('Model 2 score = {0}'.format(mean_absolute_error(y, ensemble.estimators_[1].predict(X))))
print('Model 3 score = {0}'.format(mean_absolute_error(y, ensemble.estimators_[2].predict(X))))
print('Ensemble score = {0}'.format(mean_absolute_error(y, ensemble.predict(X))))
cv = KFold()
print('Cross-validation score = {0}'.format(cross_val_score(ensemble, X, y, cv=cv)))
param_grid = [
{
'ridge__alpha': [0.01, 0.1]
}
]
grid = GridSearchCV(ensemble, param_grid=param_grid, cv=cv, return_train_score=True)
grid.fit(X, y)
results = pd.DataFrame(grid.cv_results_)
results = results.sort_values(by='mean_test_score', ascending=False)
print('Grid search results:')
print(results)
print('Done.')<|fim▁end|> | from sklearn.metrics import mean_absolute_error
from sklearn.model_selection import GridSearchCV, KFold, cross_val_score
from sklearn.svm import LinearSVR |
<|file_name|>test_asarandom.py<|end_file_name|><|fim▁begin|>import os
import unittest
from urlparse import urlparse
from paegan.utils.asarandom import AsaRandom
class AsaRandomTest(unittest.TestCase):
def test_create_random_filename(self):
temp_filename = AsaRandom.filename(prefix="superduper", suffix=".nc")
path = urlparse(temp_filename).path
name, ext = os.path.splitext(path)
<|fim▁hole|><|fim▁end|> | assert name.index("superduper") == 0
assert ext == ".nc" |
<|file_name|>filter.ts<|end_file_name|><|fim▁begin|>import {Operator} from '../Operator';
import {Subscriber} from '../Subscriber';
import {Observable} from '../Observable';
/**
* Filter items emitted by the source Observable by only emitting those that
* satisfy a specified predicate.
*
* <span class="informal">Like
* [Array.prototype.filter()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter),
* it only emits a value from the source if it passes a criterion function.</span>
*
* <img src="./img/filter.png" width="100%">
*
* Similar to the well-known `Array.prototype.filter` method, this operator
* takes values from the source Observable, passes them through a `predicate`
* function and only emits those values that yielded `true`.
*
* @example <caption>Emit only click events whose target was a DIV element</caption>
* var clicks = Rx.Observable.fromEvent(document, 'click');
* var clicksOnDivs = clicks.filter(ev => ev.target.tagName === 'DIV');
* clicksOnDivs.subscribe(x => console.log(x));
*
* @see {@link distinct}
* @see {@link distinctKey}
* @see {@link distinctUntilChanged}
* @see {@link distinctUntilKeyChanged}
* @see {@link ignoreElements}
* @see {@link partition}
* @see {@link skip}
*
* @param {function(value: T, index: number): boolean} predicate A function that
* evaluates each value emitted by the source Observable. If it returns `true`,
* the value is emitted, if `false` the value is not passed to the output
* Observable. The `index` parameter is the number `i` for the i-th source
* emission that has happened since the subscription, starting from the number
* `0`.
* @param {any} [thisArg] An optional argument to determine the value of `this`
* in the `predicate` function.
* @return {Observable} An Observable of values from the source that were
* allowed by the `predicate` function.<|fim▁hole|>export function filter<T>(predicate: (value: T, index: number) => boolean,
thisArg?: any): Observable<T> {
return this.lift(new FilterOperator(predicate, thisArg));
}
export interface FilterSignature<T> {
(predicate: (value: T, index: number) => boolean, thisArg?: any): Observable<T>;
}
class FilterOperator<T> implements Operator<T, T> {
constructor(private predicate: (value: T, index: number) => boolean,
private thisArg?: any) {
}
call(subscriber: Subscriber<T>, source: any): any {
return source._subscribe(new FilterSubscriber(subscriber, this.predicate, this.thisArg));
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
class FilterSubscriber<T> extends Subscriber<T> {
count: number = 0;
constructor(destination: Subscriber<T>,
private predicate: (value: T, index: number) => boolean,
private thisArg: any) {
super(destination);
this.predicate = predicate;
}
// the try catch block below is left specifically for
// optimization and perf reasons. a tryCatcher is not necessary here.
protected _next(value: T) {
let result: any;
try {
result = this.predicate.call(this.thisArg, value, this.count++);
} catch (err) {
this.destination.error(err);
return;
}
if (result) {
this.destination.next(value);
}
}
}<|fim▁end|> | * @method filter
* @owner Observable
*/ |
<|file_name|>coverart_listview.py<|end_file_name|><|fim▁begin|># -*- Mode: python; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
#
# Copyright (C) 2012 - fossfreedom
# Copyright (C) 2012 - Agustin Carrasco
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
from gi.repository import GObject
from gi.repository import GLib
from coverart_widgets import AbstractView
class ListShowingPolicy(GObject.Object):
'''
Policy that mostly takes care of how and when things should be showed on
the view that makes use of the `AlbumsModel`.
'''
def __init__(self, list_view):
super(ListShowingPolicy, self).__init__()
self.counter = 0
self._has_initialised = False
<|fim▁hole|> return
self._has_initialised = True
class ListView(AbstractView):
__gtype_name__ = "ListView"
name = 'listview'
use_plugin_window = False
def __init__(self):
super(ListView, self).__init__()
self.view = self
self._has_initialised = False
self.show_policy = ListShowingPolicy(self)
def initialise(self, source):
if self._has_initialised:
return
self._has_initialised = True
self.view_name = "list_view"
super(ListView, self).initialise(source)
# self.album_manager = source.album_manager
self.shell = source.shell
def switch_to_view(self, source, album):
self.initialise(source)
GLib.idle_add(self.shell.props.display_page_tree.select,
self.shell.props.library_source)
def get_selected_objects(self):
'''
finds what has been selected
returns an array of `Album`
'''
return []<|fim▁end|> | def initialise(self, album_manager):
if self._has_initialised: |
<|file_name|>XYZ.js<|end_file_name|><|fim▁begin|>var should = require("should");
var Mat3x3 = require("./Mat3x3");
var Barycentric3 = require("./Barycentric3");
var Logger = require("./Logger");
(function(exports) {
var verboseLogger = new Logger({
logLevel: "debug"
});
////////////////// constructor
function XYZ(x, y, z, options) {
var that = this;
if (typeof x === "number") {
that.x = x;
that.y = y;
that.z = z;
should &&
y.should.Number &&
z.should.Number &&
isNaN(x).should.False &&
isNaN(y).should.False &&
isNaN(z).should.False;
} else {
var xyz = x;
should &&
xyz.x.should.Number &&
xyz.y.should.Number &&
xyz.z.should.Number;
that.x = xyz.x;
that.y = xyz.y;
that.z = xyz.z;
if (options == null) {
options = y;
}
}
options = options || {};
if (options.verbose) {
that.verbose = options.verbose;
}
return that;
}
XYZ.prototype.nearest = function(a, b) {
var that = this;
var adx = a.x - that.x;
var ady = a.y - that.y;
var adz = a.z - that.z;
var bdx = b.x - that.x;
var bdy = b.y - that.y;
var bdz = b.z - that.z;
var ad2 = adx * adx + ady * ady + adz * adz;
var bd2 = bdx * bdx + bdy * bdy + bdz * bdz;
return ad2 <= bd2 ? a : b;
}
XYZ.prototype.dot = function(xyz) {
var that = this;
return that.x * xyz.x + that.y * xyz.y + that.z * xyz.z;
}
XYZ.prototype.cross = function(xyz) {
var that = this;
return new XYZ(
that.y * xyz.z - that.z * xyz.y, -(that.x * xyz.z - that.z * xyz.x),
that.x * xyz.y - that.y * xyz.x,
that);
}
XYZ.prototype.interpolate = function(xyz, p) {
var that = this;
p = p == null ? 0.5 : p;
var p1 = 1 - p;
should &&
xyz.should.exist &&
xyz.x.should.Number &&
xyz.y.should.Number &&
xyz.z.should.Number;
return new XYZ(
p * xyz.x + p1 * that.x,
p * xyz.y + p1 * that.y,
p * xyz.z + p1 * that.z,
that);
}
XYZ.prototype.invalidate = function() {
var that = this;
delete that._norm;
}
XYZ.prototype.normSquared = function() {
var that = this;
return that.x * that.x + that.y * that.y + that.z * that.z;
}
XYZ.prototype.norm = function() {
var that = this;
if (that._norm == null) {
that._norm = Math.sqrt(that.normSquared());
}
return that._norm;
}
XYZ.prototype.minus = function(value) {
var that = this;
should &&
value.x.should.Number &&
value.y.should.Number &&
value.z.should.Number;
return new XYZ(that.x - value.x, that.y - value.y, that.z - value.z, that);
}
XYZ.prototype.plus = function(value) {
var that = this;
should &&
value.x.should.Number &&
value.y.should.Number &&
value.z.should.Number;
return new XYZ(that.x + value.x, that.y + value.y, that.z + value.z, that);
}
XYZ.prototype.equal = function(value, tolerance) {
var that = this;
if (value == null) {
that.verbose && console.log("XYZ.equal(null) => false");
return false;
}
if (value.x == null) {
that.verbose && console.log("XYZ.equal(value.x is null) => false");
return false;
}
if (value.y == null) {
that.verbose && console.log("XYZ.equal(value.y is null) => false");
return false;
}
if (value.z == null) {
that.verbose && console.log("XYZ.equal(value.z is null) => false");
return false;
}
tolerance = tolerance || 0;
var result = value.x - tolerance <= that.x && that.x <= value.x + tolerance &&
value.y - tolerance <= that.y && that.y <= value.y + tolerance &&
value.z - tolerance <= that.z && that.z <= value.z + tolerance;
that.verbose && !result && verboseLogger.debug("XYZ", that, ".equal(", value, ") => false");
return result;
}
XYZ.prototype.toString = function() {
var that = this;
var scale = 1000;
return "[" + Math.round(that.x * scale) / scale +
"," + Math.round(that.y * scale) / scale +
"," + Math.round(that.z * scale) / scale +
"]";
}
XYZ.prototype.multiply = function(m) {
var that = this;
if (m instanceof Mat3x3) {
return new XYZ(
m.get(0, 0) * that.x + m.get(0, 1) * that.y + m.get(0, 2) * that.z,
m.get(1, 0) * that.x + m.get(1, 1) * that.y + m.get(1, 2) * that.z,
m.get(2, 0) * that.x + m.get(2, 1) * that.y + m.get(2, 2) * that.z,
that);
}
should && m.should.Number;
return new XYZ(
m * that.x,
m * that.y,
m * that.z,
that);
}
/////////// class
XYZ.of = function(xyz, options) {
options = options || {};
if (xyz instanceof XYZ) {
return xyz;
}
if (options.strict) {
should &&
xyz.x.should.Number &&
xyz.y.should.Number &&
xyz.z.should.Number;
} else {
if (!xyz.x instanceof Number) {
return null;
}
if (!xyz.y instanceof Number) {
return null;
}
if (!xyz.z instanceof Number) {
return null;
}
}
return new XYZ(xyz.x, xyz.y, xyz.z, options);
}
XYZ.precisionDriftComparator = function(v1, v2) {
// comparator order will reveal
// any long-term precision drift
// as a horizontal visual break along x-axis
var s1 = v1.y < 0 ? -1 : 1;
var s2 = v2.y < 0 ? -1 : 1;
var cmp = s1 - s2;
cmp === 0 && (cmp = Math.round(v2.y) - Math.round(v1.y));
if (cmp === 0) {
var v1x = Math.round(v1.x);
var v2x = Math.round(v2.x);
cmp = v1.y < 0 ? v1x - v2x : v2x - v1x;
}
cmp === 0 && (cmp = v1.z - v2.z);
return cmp;
}
module.exports = exports.XYZ = XYZ;
})(typeof exports === "object" ? exports : (exports = {}));
// mocha -R min --inline-diffs *.js
(typeof describe === 'function') && describe("XYZ", function() {
var XYZ = require("./XYZ");
var options = {
verbose: true
};
it("XYZ(1,2,3) should create an XYZ coordinate", function() {
var xyz = new XYZ(1, 2, 3);
xyz.should.instanceOf(XYZ);
xyz.x.should.equal(1);
xyz.y.should.equal(2);
xyz.z.should.equal(3);
})
it("XYZ({x:1,y:2,z:3) should create an XYZ coordinate", function() {
var xyz = new XYZ(1, 2, 3);
var xyz2 = new XYZ(xyz);
xyz2.should.instanceOf(XYZ);
xyz2.x.should.equal(1);
xyz2.y.should.equal(2);
xyz2.z.should.equal(3);
var xyz3 = new XYZ({
x: 1,
y: 2,
z: 3
});
xyz2.should.instanceOf(XYZ);
xyz2.x.should.equal(1);
xyz2.y.should.equal(2);
xyz2.z.should.equal(3);
})
it("equal(value, tolerance) should return true if coordinates are same within tolerance", function() {
var xyz = new XYZ(1, 2, 3);
var xyz2 = new XYZ(xyz);
xyz.equal(xyz2).should.True;
xyz2.x = xyz.x - 0.00001;
xyz.equal(xyz2).should.False;
xyz.equal(xyz2, 0.00001).should.True;
xyz.equal(xyz2, 0.000001).should.False;
xyz2.x = xyz.x + 0.00001;
xyz.equal(xyz2).should.False;
xyz.equal(xyz2, 0.00001).should.True;
xyz.equal(xyz2, 0.000001).should.False;
})
it("norm() should return true the vector length", function() {
var e = 0.000001;
new XYZ(1, 2, 3).norm().should.within(3.741657 - e, 3.741657 + e);
new XYZ(-1, 2, 3).norm().should.within(3.741657 - e, 3.741657 + e);
new XYZ(1, -2, 3).norm().should.within(3.741657 - e, 3.741657 + e);
new XYZ(1, -2, -3).norm().should.within(3.741657 - e, 3.741657 + e);
new XYZ(1, 0, 1).norm().should.within(1.414213 - e, 1.414213 + e);
new XYZ(0, 1, 1).norm().should.within(1.414213 - e, 1.414213 + e);
new XYZ(1, 1, 0).norm().should.within(1.414213 - e, 1.414213 + e);
})
it("normSquared() should return norm squared", function() {
var xyz = new XYZ(1, 2, 3);
xyz.norm().should.equal(Math.sqrt(xyz.normSquared()));
})
it("minus(value) should return vector difference", function() {
var xyz1 = new XYZ(1, 2, 3);
var xyz2 = new XYZ(10, 20, 30);
var xyz3 = xyz1.minus(xyz2);
xyz3.equal({
x: -9,
y: -18,
z: -27
}).should.True;
})
it("plus(value) should return vector sum", function() {
var xyz1 = new XYZ(1, 2, 3);
var xyz2 = new XYZ(10, 20, 30);
var xyz3 = xyz1.plus(xyz2);
xyz3.equal({
x: 11,
y: 22,
z: 33
}).should.True;
})
it("interpolate(xyz,p) should interpolate to given point for p[0,1]", function() {
var pt1 = new XYZ(1, 1, 1, {
verbose: true
});
var pt2 = new XYZ(10, 20, 30, {
verbose: true
});
pt1.interpolate(pt2, 0).equal(pt1).should.True;
pt1.interpolate(pt2, 1).equal(pt2).should.True;
pt1.interpolate(pt2, 0.1).equal({
x: 1.9,
y: 2.9,<|fim▁hole|> var xyz = XYZ.of({
x: 1,
y: 2,
z: 3
});
xyz.should.instanceOf.XYZ;
var xyz2 = XYZ.of(xyz);
xyz2.should.equal(xyz);
});
it("cross(xyz) returns cross product with xyz", function() {
var v1 = new XYZ(1, 2, 3, options);
var v2 = new XYZ(4, 5, 6, options);
var cross = v1.cross(v2);
var e = 0;
cross.equal({
x: -3,
y: 6,
z: -3,
e
}).should.True;
});
it("teString() returns concise string representation", function() {
new XYZ(1, 2, 3).toString().should.equal("[1,2,3]");
new XYZ(1.001, 2.0001, -3.001).toString().should.equal("[1.001,2,-3.001]");
new XYZ(1.001, 2.0001, -3.001).toString().should.equal("[1.001,2,-3.001]");
})
it("dot(xyz) returns dot product with xyz", function() {
var v1 = new XYZ(1, 2, 3, options);
var v2 = new XYZ(4, 5, 6, options);
var dot = v1.dot(v2);
dot.should.equal(32);
});
it("nearest(a,b) returns nearest point", function() {
var vx = new XYZ(1, 0, 0);
var vy = new XYZ(0, 1, 0);
var vz = new XYZ(0, 0, 1);
new XYZ(2, 0, 0).nearest(vx, vy).should.equal(vx);
new XYZ(0, 0, 2).nearest(vx, vy).should.equal(vx);
new XYZ(0, 2, 0).nearest(vx, vy).should.equal(vy);
new XYZ(0, 2, 0).nearest(vz, vy).should.equal(vy);
new XYZ(0, 0, 2).nearest(vy, vz).should.equal(vz);
new XYZ(0, 0, 2).nearest(vx, vz).should.equal(vz);
});
it("precisionDriftComparator(v1,v2) sorts scanned vertices to reveal long-term precision drift", function() {
XYZ.precisionDriftComparator(new XYZ(1, 2, 3), new XYZ(1, 2, 3)).should.equal(0);
XYZ.precisionDriftComparator(new XYZ(1, -2, 3), new XYZ(1, -2, 3)).should.equal(0);
XYZ.precisionDriftComparator(new XYZ(1, -2, 3), new XYZ(1, 2, 3)).should.below(0);
XYZ.precisionDriftComparator(new XYZ(1, 2, 3), new XYZ(1, -2, 3)).should.above(0);
XYZ.precisionDriftComparator(new XYZ(1, -2, 3), new XYZ(1, -3, 3)).should.below(0);
XYZ.precisionDriftComparator(new XYZ(1, -2, 3), new XYZ(2, -2, 3)).should.below(0);
XYZ.precisionDriftComparator(new XYZ(1, 2, 3), new XYZ(2, 2, 3)).should.above(0);
XYZ.precisionDriftComparator(new XYZ(1, 2, 3), new XYZ(1, 3, 3)).should.above(0);
});
})<|fim▁end|> | z: 3.9
}).should.True;
});
it("XYZ.of(pt) should return an XYZ object for given point", function() { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var PgQuery = require('bindings')('pg-query');
module.exports = {
parse: function(query) {
var result = PgQuery.parse(query);
if (result.query) {
result.query = JSON.parse(result.query);
}
if (result.error) {
var err = new Error(result.error.message);
err.fileName = result.error.fileName;
err.lineNumber = result.error.lineNumber;
err.cursorPosition = result.error.cursorPosition;
err.functionName = result.error.functionName;<|fim▁hole|> result.error = err;
}
return result;
}
};<|fim▁end|> | err.context = result.error.context;
|
<|file_name|>CloudFrontDistributionConfiguration.java<|end_file_name|><|fim▁begin|>package ch.cyberduck.core.cloudfront;
/*
* Copyright (c) 2002-2013 David Kocher. All rights reserved.
* http://cyberduck.ch/
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Bug fixes, suggestions and comments should be sent to:
* [email protected]
*/
import ch.cyberduck.core.AlphanumericRandomStringService;
import ch.cyberduck.core.DisabledListProgressListener;
import ch.cyberduck.core.Host;
import ch.cyberduck.core.HostUrlProvider;
import ch.cyberduck.core.LocaleFactory;
import ch.cyberduck.core.LoginCallback;
import ch.cyberduck.core.Path;
import ch.cyberduck.core.PathContainerService;
import ch.cyberduck.core.Scheme;
import ch.cyberduck.core.auth.AWSCredentialsConfigurator;
import ch.cyberduck.core.aws.AmazonServiceExceptionMappingService;
import ch.cyberduck.core.aws.CustomClientConfiguration;
import ch.cyberduck.core.cdn.Distribution;
import ch.cyberduck.core.cdn.DistributionConfiguration;
import ch.cyberduck.core.cdn.features.Cname;
import ch.cyberduck.core.cdn.features.DistributionLogging;
import ch.cyberduck.core.cdn.features.Index;
import ch.cyberduck.core.cdn.features.Purge;
import ch.cyberduck.core.exception.AccessDeniedException;
import ch.cyberduck.core.exception.BackgroundException;
import ch.cyberduck.core.exception.InteroperabilityException;
import ch.cyberduck.core.features.Location;
import ch.cyberduck.core.preferences.HostPreferences;
import ch.cyberduck.core.preferences.Preferences;
import ch.cyberduck.core.preferences.PreferencesFactory;
import ch.cyberduck.core.s3.S3BucketListService;
import ch.cyberduck.core.s3.S3LocationFeature;
import ch.cyberduck.core.s3.S3Protocol;
import ch.cyberduck.core.s3.S3Session;
import ch.cyberduck.core.ssl.ThreadLocalHostnameDelegatingTrustManager;
import ch.cyberduck.core.ssl.X509KeyManager;
import ch.cyberduck.core.ssl.X509TrustManager;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jets3t.service.utils.ServiceUtils;
import java.net.URI;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import com.amazonaws.AmazonClientException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.cloudfront.AmazonCloudFront;
import com.amazonaws.services.cloudfront.AmazonCloudFrontClientBuilder;
import com.amazonaws.services.cloudfront.model.*;
/**
* Amazon CloudFront CDN configuration.
*/
public class CloudFrontDistributionConfiguration implements DistributionConfiguration, Purge, Index, DistributionLogging, Cname {
private static final Logger log = LogManager.getLogger(CloudFrontDistributionConfiguration.class);
private final Preferences preferences = PreferencesFactory.get();
protected final S3Session session;
private final Host bookmark;
private final ClientConfiguration configuration;
private final Location locationFeature;
public CloudFrontDistributionConfiguration(final S3Session session, final X509TrustManager trust, final X509KeyManager key) {
this.session = session;
this.bookmark = session.getHost();
this.configuration = new CustomClientConfiguration(bookmark,
new ThreadLocalHostnameDelegatingTrustManager(trust, bookmark.getHostname()), key);
this.locationFeature = session.getFeature(Location.class);
}
@Override
public String getName() {
return LocaleFactory.localizedString("Amazon CloudFront", "S3");
}
/**
* @param method Distribution method
* @return Origin server hostname. This is not the same as the container for custom origin configurations and
* website endpoints. <bucketname>.s3.amazonaws.com
*/
protected URI getOrigin(final Path container, final Distribution.Method method) throws BackgroundException {
return URI.create(String.format("http://%s.%s", container.getName(), bookmark.getProtocol().getDefaultHostname()));
}
@Override
public List<Distribution.Method> getMethods(final Path container) {
return Arrays.asList(Distribution.DOWNLOAD, Distribution.STREAMING);
}
@Override
public Distribution read(final Path file, final Distribution.Method method, final LoginCallback prompt) throws BackgroundException {
final Path container = session.getFeature(PathContainerService.class).getContainer(file);
try {
if(log.isDebugEnabled()) {
log.debug(String.format("List %s distributions", method));
}
final AmazonCloudFront client = this.client(container);
if(method.equals(Distribution.STREAMING)) {
for(StreamingDistributionSummary d : client.listStreamingDistributions(
new ListStreamingDistributionsRequest()).getStreamingDistributionList().getItems()) {
final S3Origin config = d.getS3Origin();
if(config != null) {
final URI origin = this.getOrigin(container, method);
if(config.getDomainName().equals(origin.getHost())) {
// We currently only support one distribution per bucket
return this.readStreamingDistribution(client, d, container, method);
}
}
}
}
else if(method.equals(Distribution.DOWNLOAD)) {
// List distributions restricting to bucket name origin
for(DistributionSummary d : client.listDistributions(
new ListDistributionsRequest()).getDistributionList().getItems()) {
for(Origin o : d.getOrigins().getItems()) {
final S3OriginConfig config = o.getS3OriginConfig();
if(config != null) {
if(o.getDomainName().equals(this.getOrigin(container, method).getHost())) {
// We currently only support one distribution per bucket
return this.readDownloadDistribution(client, d, container, method);
}
}
}
}
}
else if(method.equals(Distribution.CUSTOM) || method.equals(Distribution.WEBSITE_CDN)) {
for(DistributionSummary d : client.listDistributions(new ListDistributionsRequest()).getDistributionList().getItems()) {
final URI origin = this.getOrigin(container, method);
for(Origin o : d.getOrigins().getItems()) {
// Listing all distributions and look for custom origin
final CustomOriginConfig config = o.getCustomOriginConfig();
if(config != null) {
if(o.getDomainName().equals(origin.getHost())) {
// We currently only support one distribution per bucket
return this.readDownloadDistribution(client, d, container, method);
}
}
}
}
}
final URI origin = this.getOrigin(container, method);
// Return disabled configuration
return new Distribution(method, this.getName(), origin, false);
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot read CDN configuration", e);
}
}
@Override
public void write(final Path file, final Distribution distribution, final LoginCallback prompt) throws BackgroundException {
final Path container = session.getFeature(PathContainerService.class).getContainer(file);
try {
if(null == distribution.getId()) {
// No existing configuration
if(log.isDebugEnabled()) {
log.debug(String.format("No existing distribution found for method %s", distribution.getMethod()));
}
if(distribution.getMethod().equals(Distribution.STREAMING)) {
distribution.setId(this.createStreamingDistribution(container, distribution).getId());
}
else if(distribution.getMethod().equals(Distribution.DOWNLOAD)) {
distribution.setId(this.createDownloadDistribution(container, distribution).getId());
}
else if(distribution.getMethod().equals(Distribution.CUSTOM)
|| distribution.getMethod().equals(Distribution.WEBSITE_CDN)) {
distribution.setId(this.createCustomDistribution(container, distribution).getId());
}
}
else {
if(distribution.getMethod().equals(Distribution.DOWNLOAD)) {
distribution.setEtag(this.updateDownloadDistribution(container, distribution).getETag());
}
else if(distribution.getMethod().equals(Distribution.STREAMING)) {
distribution.setEtag(this.updateStreamingDistribution(container, distribution).getETag());
}
else if(distribution.getMethod().equals(Distribution.CUSTOM)
|| distribution.getMethod().equals(Distribution.WEBSITE_CDN)) {
distribution.setEtag(this.updateCustomDistribution(container, distribution).getETag());
}
}
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot write CDN configuration", e);
}
}
@Override<|fim▁hole|> public <T> T getFeature(final Class<T> type, final Distribution.Method method) {
if(type == Purge.class || type == Index.class) {
if(method.equals(Distribution.DOWNLOAD)
|| method.equals(Distribution.WEBSITE_CDN)
|| method.equals(Distribution.CUSTOM)) {
return (T) this;
}
}
if(type == DistributionLogging.class) {
if(method.equals(Distribution.DOWNLOAD)
|| method.equals(Distribution.STREAMING)
|| method.equals(Distribution.CUSTOM)) {
return (T) this;
}
}
if(type == Cname.class) {
return (T) this;
}
return null;
}
/**
* You can make any number of invalidation requests, but you can have only three invalidation requests in progress
* at one time. Each request can contain up to 1,000 objects to invalidate. If you exceed these limits, you get an
* error message.
* <p>
* It usually takes 10 to 15 minutes to complete your invalidation request, depending on the size of your request.
*/
@Override
public void invalidate(final Path container, final Distribution.Method method, final List<Path> files, final LoginCallback prompt) throws BackgroundException {
try {
final Distribution d = this.read(container, method, prompt);
if(d.isEnabled()) {
final List<String> keys = new ArrayList<>();
for(Path file : files) {
if(session.getFeature(PathContainerService.class).isContainer(file)) {
// To invalidate all of the objects in a distribution
keys.add(String.format("%s*", Path.DELIMITER));
}
else {
if(file.isDirectory()) {
// The *, which replaces 0 or more characters, must be the last character in the invalidation path
keys.add(String.format("/%s*", session.getFeature(PathContainerService.class).getKey(file)));
}
else {
keys.add(String.format("/%s", session.getFeature(PathContainerService.class).getKey(file)));
}
}
}
if(keys.isEmpty()) {
log.warn("No keys selected for invalidation");
}
else {
final AmazonCloudFront client = this.client(container);
client.createInvalidation(new CreateInvalidationRequest(d.getId(),
new InvalidationBatch(new Paths().withItems(keys).withQuantity(keys.size()), new AlphanumericRandomStringService().random())
));
}
}
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot write CDN configuration", e);
}
}
/**
* @param distribution Configuration
* @return Status message from service
*/
private String readInvalidationStatus(final AmazonCloudFront client,
final Distribution distribution) throws BackgroundException {
try {
int pending = 0;
int completed = 0;
String marker = null;
do {
final ListInvalidationsResult response = client.listInvalidations(new ListInvalidationsRequest(distribution.getId())
.withMaxItems(String.valueOf(1000))
.withMarker(marker));
for(InvalidationSummary s : response.getInvalidationList().getItems()) {
// When the invalidation batch is finished, the status is Completed.
if("Completed".equals(s.getStatus())) {
// No schema for status enumeration. Fail.
completed++;
}
else {
// InProgress
pending++;
}
}
marker = response.getInvalidationList().getNextMarker();
}
while(marker != null);
if(pending > 0) {
return MessageFormat.format(LocaleFactory.localizedString("{0} invalidations in progress", "S3"), pending);
}
if(completed > 0) {
return MessageFormat.format(LocaleFactory.localizedString("{0} invalidations completed", "S3"), completed);
}
return LocaleFactory.localizedString("None");
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot read CDN configuration", e);
}
}
/**
* Amazon CloudFront Extension to create a new distribution configuration
*
* @return Distribution configuration
*/
protected StreamingDistribution createStreamingDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
if(log.isDebugEnabled()) {
log.debug(String.format("Create new %s distribution", distribution));
}
final AmazonCloudFront client = this.client(container);
final URI origin = this.getOrigin(container, distribution.getMethod());
final String originId = String.format("%s-%s", preferences.getProperty("application.name"), new AlphanumericRandomStringService().random());
final StreamingDistributionConfig config = new StreamingDistributionConfig(new AlphanumericRandomStringService().random(),
new S3Origin(origin.getHost(), StringUtils.EMPTY), distribution.isEnabled())
.withComment(originId)
.withTrustedSigners(new TrustedSigners().withEnabled(false).withQuantity(0))
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new StreamingLoggingConfig()
.withEnabled(distribution.isLogging())
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix"))
);
return client.createStreamingDistribution(new CreateStreamingDistributionRequest(config)).getStreamingDistribution();
}
protected com.amazonaws.services.cloudfront.model.Distribution createDownloadDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
if(log.isDebugEnabled()) {
log.debug(String.format("Create new %s distribution", distribution));
}
final AmazonCloudFront client = this.client(container);
final URI origin = this.getOrigin(container, distribution.getMethod());
final String originId = String.format("%s-%s", preferences.getProperty("application.name"), new AlphanumericRandomStringService().random());
final DistributionConfig config = new DistributionConfig(new AlphanumericRandomStringService().random(), distribution.isEnabled())
.withComment(originId)
.withOrigins(new Origins()
.withQuantity(1)
.withItems(new Origin()
.withId(originId)
.withCustomHeaders(new CustomHeaders().withQuantity(0))
.withOriginPath(StringUtils.EMPTY)
.withDomainName(origin.getHost())
.withS3OriginConfig(new S3OriginConfig().withOriginAccessIdentity(StringUtils.EMPTY))
)
)
.withPriceClass(PriceClass.PriceClass_All)
.withDefaultCacheBehavior(new DefaultCacheBehavior()
.withTargetOriginId(originId)
.withForwardedValues(new ForwardedValues().withQueryString(true).withCookies(new CookiePreference().withForward(ItemSelection.All)))
.withViewerProtocolPolicy(ViewerProtocolPolicy.AllowAll)
.withMinTTL(0L)
.withTrustedSigners(new TrustedSigners().withEnabled(false).withQuantity(0)))
.withDefaultRootObject(distribution.getIndexDocument())
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new LoggingConfig()
.withEnabled(distribution.isLogging())
.withIncludeCookies(true)
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix")
));
return client.createDistribution(new CreateDistributionRequest(config)).getDistribution();
}
protected com.amazonaws.services.cloudfront.model.Distribution createCustomDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final AmazonCloudFront client = this.client(container);
int httpPort = 80;
int httpsPort = 443;
final URI origin = this.getOrigin(container, distribution.getMethod());
if(origin.getPort() != -1) {
if(origin.getScheme().equals(Scheme.http.name())) {
httpPort = origin.getPort();
}
if(origin.getScheme().equals(Scheme.https.name())) {
httpsPort = origin.getPort();
}
}
final String originId = String.format("%s-%s", preferences.getProperty("application.name"), new AlphanumericRandomStringService().random());
final DistributionConfig config = new DistributionConfig(new AlphanumericRandomStringService().random(), distribution.isEnabled())
.withComment(originId)
.withOrigins(new Origins()
.withQuantity(1)
.withItems(new Origin()
.withId(originId)
.withDomainName(origin.getHost())
.withCustomOriginConfig(new CustomOriginConfig()
.withHTTPPort(httpPort)
.withHTTPSPort(httpsPort)
.withOriginSslProtocols(new OriginSslProtocols().withQuantity(2).withItems("TLSv1.1", "TLSv1.2"))
.withOriginProtocolPolicy(this.getPolicy(distribution.getMethod()))
)
)
)
.withPriceClass(PriceClass.PriceClass_All)
.withDefaultCacheBehavior(new DefaultCacheBehavior()
.withTargetOriginId(originId)
.withForwardedValues(new ForwardedValues().withQueryString(true).withCookies(new CookiePreference().withForward(ItemSelection.All)))
.withViewerProtocolPolicy(ViewerProtocolPolicy.AllowAll)
.withMinTTL(0L)
.withTrustedSigners(new TrustedSigners().withEnabled(false).withQuantity(0)))
.withDefaultRootObject(distribution.getIndexDocument())
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
if(distribution.isLogging()) {
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new LoggingConfig()
.withEnabled(distribution.isLogging())
.withIncludeCookies(true)
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix"))
);
}
return client.createDistribution(new CreateDistributionRequest(config)).getDistribution();
}
/**
* Amazon CloudFront Extension used to enable or disable a distribution configuration and its CNAMESs
*/
protected UpdateDistributionResult updateDownloadDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final URI origin = this.getOrigin(container, distribution.getMethod());
if(log.isDebugEnabled()) {
log.debug(String.format("Update %s distribution with origin %s", distribution, origin));
}
final AmazonCloudFront client = this.client(container);
final GetDistributionConfigResult response = client.getDistributionConfig(new GetDistributionConfigRequest(distribution.getId()));
final DistributionConfig config = response.getDistributionConfig()
.withEnabled(distribution.isEnabled())
.withDefaultRootObject(distribution.getIndexDocument())
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
if(distribution.isLogging()) {
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new LoggingConfig()
.withEnabled(distribution.isLogging())
.withIncludeCookies(true)
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix"))
);
}
return client.updateDistribution(new UpdateDistributionRequest(config, distribution.getId(), response.getETag()));
}
protected UpdateStreamingDistributionResult updateStreamingDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final URI origin = this.getOrigin(container, distribution.getMethod());
if(log.isDebugEnabled()) {
log.debug(String.format("Update %s distribution with origin %s", distribution, origin));
}
final AmazonCloudFront client = this.client(container);
final GetStreamingDistributionConfigResult response = client.getStreamingDistributionConfig(new GetStreamingDistributionConfigRequest(distribution.getId()));
final StreamingDistributionConfig config = response.getStreamingDistributionConfig()
.withEnabled(distribution.isEnabled())
.withS3Origin(new S3Origin(origin.getHost(), StringUtils.EMPTY))
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
if(distribution.isLogging()) {
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new StreamingLoggingConfig()
.withEnabled(distribution.isLogging())
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix"))
);
}
return client.updateStreamingDistribution(new UpdateStreamingDistributionRequest(config, distribution.getId(), response.getETag()));
}
protected UpdateDistributionResult updateCustomDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final URI origin = this.getOrigin(container, distribution.getMethod());
if(log.isDebugEnabled()) {
log.debug(String.format("Update %s distribution with origin %s", distribution, origin));
}
final AmazonCloudFront client = this.client(container);
final GetDistributionConfigResult response = client.getDistributionConfig(new GetDistributionConfigRequest(distribution.getId()));
final DistributionConfig config = response.getDistributionConfig()
.withEnabled(distribution.isEnabled())
.withDefaultRootObject(distribution.getIndexDocument() != null ? distribution.getIndexDocument() : StringUtils.EMPTY)
.withAliases(new Aliases().withItems(distribution.getCNAMEs()).withQuantity(distribution.getCNAMEs().length));
// Make bucket name fully qualified
final String loggingTarget = ServiceUtils.generateS3HostnameForBucket(distribution.getLoggingContainer(),
false, new S3Protocol().getDefaultHostname());
if(log.isDebugEnabled()) {
log.debug(String.format("Set logging target for %s to %s", distribution, loggingTarget));
}
config.setLogging(new LoggingConfig()
.withEnabled(distribution.isLogging())
.withIncludeCookies(true)
.withBucket(loggingTarget)
.withPrefix(new HostPreferences(session.getHost()).getProperty("cloudfront.logging.prefix"))
);
return client.updateDistribution(new UpdateDistributionRequest(config, distribution.getId(), response.getETag()));
}
protected void deleteDownloadDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final URI origin = this.getOrigin(container, distribution.getMethod());
if(log.isDebugEnabled()) {
log.debug(String.format("Update %s distribution with origin %s", distribution, origin));
}
final AmazonCloudFront client = this.client(container);
client.deleteDistribution(new DeleteDistributionRequest(distribution.getId(), distribution.getEtag()));
}
protected void deleteStreamingDistribution(final Path container, final Distribution distribution)
throws BackgroundException {
final URI origin = this.getOrigin(container, distribution.getMethod());
if(log.isDebugEnabled()) {
log.debug(String.format("Update %s distribution with origin %s", distribution, origin));
}
final AmazonCloudFront client = this.client(container);
client.deleteStreamingDistribution(new DeleteStreamingDistributionRequest(distribution.getId(), distribution.getEtag()));
}
/**
* @param method Distribution method
* @return Match viewer policy
*/
protected OriginProtocolPolicy getPolicy(final Distribution.Method method) {
return OriginProtocolPolicy.MatchViewer;
}
private Distribution readStreamingDistribution(final AmazonCloudFront client,
final StreamingDistributionSummary summary,
final Path container,
final Distribution.Method method) throws BackgroundException {
// Retrieve distributions configuration to access current logging status settings.
try {
final GetStreamingDistributionConfigResult response = client.getStreamingDistributionConfig(new GetStreamingDistributionConfigRequest(summary.getId()));
final StreamingDistributionConfig configuration = response.getStreamingDistributionConfig();
final Distribution distribution = new Distribution(method, this.getName(), this.getOrigin(container, method), summary.isEnabled());
distribution.setId(summary.getId());
distribution.setDeployed("Deployed".equals(summary.getStatus()));
distribution.setUrl(URI.create(String.format("%s://%s%s", method.getScheme(), summary.getDomainName(), method.getContext())));
distribution.setSslUrl(method.equals(Distribution.DOWNLOAD) || method.equals(Distribution.CUSTOM) ? URI.create(String.format("https://%s%s", summary.getDomainName(), method.getContext())) : null);
distribution.setReference(configuration.getCallerReference());
distribution.setEtag(response.getETag());
distribution.setStatus(LocaleFactory.localizedString(summary.getStatus(), "S3"));
distribution.setCNAMEs(configuration.getAliases().getItems().toArray(new String[configuration.getAliases().getItems().size()]));
distribution.setLogging(configuration.getLogging().isEnabled());
distribution.setLoggingContainer(StringUtils.isNotBlank(configuration.getLogging().getBucket()) ?
ServiceUtils.findBucketNameInHostname(configuration.getLogging().getBucket(), new S3Protocol().getDefaultHostname()) : null);
if(this.getFeature(Purge.class, method) != null) {
distribution.setInvalidationStatus(this.readInvalidationStatus(client, distribution));
}
if(this.getFeature(DistributionLogging.class, method) != null) {
try {
distribution.setContainers(new S3BucketListService(session, new S3LocationFeature.S3Region(bookmark.getRegion())).list(
new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)),
new DisabledListProgressListener()).toList());
}
catch(AccessDeniedException | InteroperabilityException e) {
log.warn(String.format("Failure listing buckets. %s", e.getMessage()));
}
}
return distribution;
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot read CDN configuration", e);
}
}
private Distribution readDownloadDistribution(final AmazonCloudFront client,
final DistributionSummary summary,
final Path container,
final Distribution.Method method) throws BackgroundException {
// Retrieve distributions configuration to access current logging status settings.
try {
final GetDistributionConfigResult response = client.getDistributionConfig(new GetDistributionConfigRequest(summary.getId()));
final DistributionConfig configuration = response.getDistributionConfig();
final Distribution distribution = new Distribution(method, this.getName(), this.getOrigin(container, method), summary.isEnabled());
distribution.setId(summary.getId());
distribution.setDeployed("Deployed".equals(summary.getStatus()));
distribution.setUrl(URI.create(String.format("%s://%s%s", method.getScheme(), summary.getDomainName(), method.getContext())));
distribution.setSslUrl(method.equals(Distribution.DOWNLOAD) || method.equals(Distribution.CUSTOM) ? URI.create(String.format("https://%s%s", summary.getDomainName(), method.getContext())) : null);
distribution.setReference(configuration.getCallerReference());
distribution.setEtag(response.getETag());
distribution.setStatus(LocaleFactory.localizedString(summary.getStatus(), "S3"));
distribution.setCNAMEs(configuration.getAliases().getItems().toArray(new String[configuration.getAliases().getItems().size()]));
distribution.setLogging(configuration.getLogging().isEnabled());
distribution.setLoggingContainer(StringUtils.isNotBlank(configuration.getLogging().getBucket()) ?
ServiceUtils.findBucketNameInHostname(configuration.getLogging().getBucket(), new S3Protocol().getDefaultHostname()) : null);
if(StringUtils.isNotBlank(configuration.getDefaultRootObject())) {
distribution.setIndexDocument(configuration.getDefaultRootObject());
}
if(this.getFeature(Purge.class, method) != null) {
distribution.setInvalidationStatus(this.readInvalidationStatus(client, distribution));
}
if(this.getFeature(DistributionLogging.class, method) != null) {
distribution.setContainers(new S3BucketListService(session, new S3LocationFeature.S3Region(bookmark.getRegion())).list(
new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)),
new DisabledListProgressListener()).toList());
}
return distribution;
}
catch(AmazonClientException e) {
throw new AmazonServiceExceptionMappingService().map("Cannot read CDN configuration", e);
}
}
private AmazonCloudFront client(final Path container) throws BackgroundException {
final AmazonCloudFrontClientBuilder builder = AmazonCloudFrontClientBuilder.standard()
.withCredentials(AWSCredentialsConfigurator.toAWSCredentialsProvider(session.getClient().getProviderCredentials()))
.withClientConfiguration(configuration);
final Location.Name region = this.getRegion(container);
if(S3Session.isAwsHostname(session.getHost().getHostname(), false)) {
if(Location.unknown.equals(region)) {
builder.withRegion(Regions.DEFAULT_REGION);
}
else {
builder.withRegion(region.getIdentifier());
}
}
else {
builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
new HostUrlProvider(false).get(session.getHost()), region.getIdentifier()));
}
return builder.build();
}
protected Location.Name getRegion(final Path container) throws BackgroundException {
return locationFeature.getLocation(container);
}
}<|fim▁end|> | @SuppressWarnings("unchecked") |
<|file_name|>compile_rules.py<|end_file_name|><|fim▁begin|>import re
import sys
import traceback
import copy
import json
from distutils import file_util
from grammalecte.echo import echo
DEF = {}
FUNCTIONS = []
JSREGEXES = {}
WORDLIMITLEFT = r"(?<![\w.,–-])" # r"(?<![-.,—])\b" seems slower
WORDLIMITRIGHT = r"(?![\w–-])" # r"\b(?!-—)" seems slower
def prepare_for_eval (s):
s = re.sub(r"(select|exclude)[(][\\](\d+)", '\\1(dDA, m.start(\\2), m.group(\\2)', s)
s = re.sub(r"define[(][\\](\d+)", 'define(dDA, m.start(\\1)', s)
s = re.sub(r"(morph|morphex|displayInfo)[(][\\](\d+)", '\\1((m.start(\\2), m.group(\\2))', s)
s = re.sub(r"(morph|morphex|displayInfo)[(]", '\\1(dDA, ', s)
s = re.sub(r"(sugg\w+|switch\w+)\(@", '\\1(m.group(i[4])', s)
s = re.sub(r"word\(\s*1\b", 'nextword1(s, m.end()', s) # word(1)
s = re.sub(r"word\(\s*-1\b", 'prevword1(s, m.start()', s) # word(-1)
s = re.sub(r"word\(\s*(\d)", 'nextword(s, m.end(), \\1', s) # word(n)
s = re.sub(r"word\(\s*-(\d)", 'prevword(s, m.start(), \\1', s) # word(-n)
s = re.sub(r"before\(\s*", 'look(s[:m.start()], ', s) # before(s)
s = re.sub(r"after\(\s*", 'look(s[m.end():], ', s) # after(s)
s = re.sub(r"textarea\(\s*", 'look(s, ', s) # textarea(s)
s = re.sub(r"before_chk1\(\s*", 'look_chk1(dDA, s[:m.start()], 0, ', s) # before_chk1(s)
s = re.sub(r"after_chk1\(\s*", 'look_chk1(dDA, s[m.end():], m.end(), ', s) # after_chk1(s)
s = re.sub(r"textarea_chk1\(\s*", 'look_chk1(dDA, s, 0, ', s) # textarea_chk1(s)
s = re.sub(r"before0\(\s*", 'look(sx[:m.start()], ', s) # before0(s)
s = re.sub(r"after0\(\s*", 'look(sx[m.end():], ', s) # after0(s)
s = re.sub(r"textarea0\(\s*", 'look(sx, ', s) # textarea0(s)
s = re.sub(r"before0_chk1\(\s*", 'look_chk1(dDA, sx[:m.start()], 0, ', s) # before0_chk1(s)
s = re.sub(r"after0_chk1\(\s*", 'look_chk1(dDA, sx[m.end():], m.end(), ', s) # after0_chk1(s)
s = re.sub(r"textarea0_chk1\(\s*", 'look_chk1(dDA, sx, 0, ', s) # textarea0_chk1(s)
s = re.sub(r"isEndOfNG\(\s*\)", 'isEndOfNG(dDA, s[m.end():], m.end())', s) # isEndOfNG(s)
s = re.sub(r"\bspell *[(]", '_oDict.isValid(', s)
s = re.sub(r"[\\](\d+)", 'm.group(\\1)', s)
return s
def py2js (sCode):
"convert Python code to JavaScript code"
# Python 2.x unicode strings
sCode = re.sub('\\b[ur]"', '"', sCode)
sCode = re.sub("\\b[ur]'", "'", sCode)
# operators
sCode = sCode.replace(" and ", " && ")
sCode = sCode.replace(" or ", " || ")
sCode = re.sub("\\bnot\\b", "!", sCode)
sCode = re.sub("(.+) if (.+) else (.+)", "\\2 ? \\1 : \\3", sCode)
# boolean
sCode = sCode.replace("False", "false")
sCode = sCode.replace("True", "true")
sCode = sCode.replace("bool", "Boolean")
# methods
sCode = sCode.replace(".endswith", ".endsWith")
sCode = sCode.replace(".find", ".indexOf")
sCode = sCode.replace(".startswith", ".startsWith")
sCode = sCode.replace(".lower", ".toLowerCase")
sCode = sCode.replace(".upper", ".toUpperCase")
sCode = sCode.replace(".isdigit", "._isDigit")
sCode = sCode.replace(".isupper", "._isUpperCase")
sCode = sCode.replace(".islower", "._isLowerCase")
sCode = sCode.replace(".istitle", "._isTitle")
sCode = sCode.replace(".capitalize", "._toCapitalize")
sCode = sCode.replace(".strip", "._trim")
sCode = sCode.replace(".lstrip", "._trimLeft")
sCode = sCode.replace(".rstrip", "._trimRight")
sCode = sCode.replace('.replace("."', ".replace(/\./g")
sCode = sCode.replace('.replace("..."', ".replace(/\.\.\./g")
sCode = re.sub('.replace\("([^"]+)" ?,', ".replace(/\\1/g,", sCode)
# regex
sCode = re.sub('re.search\("([^"]+)", *(m.group\(\\d\))\)', "(\\2.search(/\\1/) >= 0)", sCode)
sCode = re.sub(".search\\(/\\(\\?i\\)([^/]+)/\\) >= 0\\)", ".search(/\\1/i) >= 0)", sCode)
sCode = re.sub('(look\\(sx?[][.a-z:()]*), "\\(\\?i\\)([^"]+)"', "\\1, /\\2/i", sCode)
sCode = re.sub('(look\\(sx?[][.a-z:()]*), "([^"]+)"', "\\1, /\\2/", sCode)
sCode = re.sub('(look_chk1\\(dDA, sx?[][.a-z:()]*, [0-9a-z.()]+), "\\(\\?i\\)([^"]+)"', "\\1, /\\2/i", sCode)
sCode = re.sub('(look_chk1\\(dDA, sx?[][.a-z:()]*, [0-9a-z.()]+), "([^"]+)"', "\\1, /\\2/i", sCode)
sCode = sCode.replace("(?<!-)", "") # todo
# slices
sCode = sCode.replace("[:m.start()]", ".slice(0,m.index)")
sCode = sCode.replace("[m.end():]", ".slice(m.end[0])")
sCode = re.sub("\\[(-?\\d+):(-?\\d+)\\]", ".slice(\\1,\\2)", sCode)
sCode = re.sub("\\[(-?\\d+):\\]", ".slice(\\1)", sCode)
sCode = re.sub("\\[:(-?\\d+)\\]", ".slice(0,\\1)", sCode)
# regex matches<|fim▁hole|> sCode = re.sub("m\\.group\\((\\d+)\\)", "m[\\1]", sCode)
# tuples -> lists
sCode = re.sub("\((m\.start\[\\d+\], m\[\\d+\])\)", "[\\1]", sCode)
# regex
sCode = sCode.replace("\w[\w-]+", "[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ][a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ-]+")
sCode = sCode.replace(r"/\w/", "/[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ]/")
sCode = sCode.replace(r"[\w-]", "[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ-]")
sCode = sCode.replace(r"[\w,]", "[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ,]")
return sCode
def uppercase (s, sLang):
"convert regex to uppercase regex: 'foo' becomes '[Ff][Oo][Oo]', but 'Bar' becomes 'B[Aa][Rr]'."
sUp = ""
state = 0
for i in range(0, len(s)):
c = s[i]
if c == "[":
state = 1
if state == 1 and c == "]":
state = 0
if c == "<" and i > 3 and s[i-3:i] == "(?P":
state = 2
if state == 2 and c == ">":
state = 0
if c == "?" and i > 0 and s[i-1:i] == "(" and s[i+1:i+2] != ":":
state = 5
if state == 5 and c == ")":
state = 0
if c.isalpha() and c.islower() and state == 0:
if c == "i" and (sLang == "tr" or sLang == "az"):
sUp += "[İ" + c + "]"
else:
sUp += "[" + c.upper() + c + "]"
elif c.isalpha() and c.islower() and state == 1 and s[i+1:i+2] != "-":
if s[i-1:i] == "-" and s[i-2:i-1].islower(): # [a-z] -> [a-zA-Z]
sUp += c + s[i-2:i-1].upper() + "-" + c.upper()
elif c == "i" and (sLang == "tr" or sLang == "az"):
sUp += "İ" + c
else:
sUp += c.upper() + c
else:
sUp += c
if c == "\\":
state = 4
elif state == 4:
state = 0
return sUp
def countGroupInRegex (sRegex):
try:
return re.compile(sRegex).groups
except:
traceback.print_exc()
echo(sRegex)
return 0
def createRule (s, nIdLine, sLang, bParagraph):
"returns rule as list [option name, regex, bCaseInsensitive, identifier, list of actions]"
global JSREGEXES
#### OPTIONS
sRuleId = str(nIdLine) + ("p" if bParagraph else "s")
sOption = False # False or [a-z0-9]+ name
tGroups = None # code for groups positioning (only useful for JavaScript)
cCaseMode = 'i' # i: case insensitive, s: case sensitive, u: uppercasing allowed
cWordLimitLeft = '[' # [: word limit, <: no specific limit
cWordLimitRight = ']' # ]: word limit, >: no specific limit
m = re.match("^__([[<]\\w[]>])(/[a-zA-Z0-9]+|)__ *", s)
if m:
if m.group(1):
cWordLimitLeft = m.group(1)[0]
cCaseMode = m.group(1)[1]
cWordLimitRight = m.group(1)[2]
sOption = m.group(2)[1:] if m.group(2) else False
s = s[m.end(0):]
#### REGEX TRIGGER
i = s.find(" <<-")
if i == -1:
print("# Error: no condition at line " + sRuleId)
return None
sRegex = s[:i].strip()
s = s[i+4:]
# JS groups positioning codes
m = re.search("@@\\S+", sRegex)
if m:
tGroups = groupsPositioningCodeToList(sRegex[m.start()+2:])
sRegex = sRegex[:m.start()].strip()
# JS regex
m = re.search("<js>.+</js>i?", sRegex)
if m:
JSREGEXES[sRuleId] = m.group(0)
sRegex = sRegex[:m.start()].strip()
if "<js>" in sRegex or "</js>" in sRegex:
print("# Error: JavaScript regex not delimited at line " + sRuleId)
return None
# quotes ?
if sRegex.startswith('"') and sRegex.endswith('"'):
sRegex = sRegex[1:-1]
## definitions
for sDef, sRepl in DEF.items():
sRegex = sRegex.replace(sDef, sRepl)
## count number of groups (must be done before modifying the regex)
nGroup = countGroupInRegex(sRegex)
if nGroup > 0:
if not tGroups:
print("# warning: groups positioning code for JavaScript should be defined at line " + sRuleId)
else:
if nGroup != len(tGroups):
print("# error: groups positioning code irrelevant at line " + sRuleId)
## word limit
if cWordLimitLeft == '[' and not sRegex.startswith(("^", '’', "'", ",")):
sRegex = WORDLIMITLEFT + sRegex
if cWordLimitRight == ']' and not sRegex.endswith(("$", '’', "'", ",")):
sRegex = sRegex + WORDLIMITRIGHT
## casing mode
if cCaseMode == "i":
bCaseInsensitive = True
if not sRegex.startswith("(?i)"):
sRegex = "(?i)" + sRegex
elif cCaseMode == "s":
bCaseInsensitive = False
sRegex = sRegex.replace("(?i)", "")
elif cCaseMode == "u":
bCaseInsensitive = False
sRegex = sRegex.replace("(?i)", "")
sRegex = uppercase(sRegex, sLang)
else:
print("# Unknown case mode [" + cCaseMode + "] at line " + sRuleId)
## check regex
try:
z = re.compile(sRegex)
except:
print("# Regex error at line ", nIdLine)
echo(sRegex)
traceback.print_exc()
return None
## groups in non grouping parenthesis
for x in re.finditer("\(\?:[^)]*\([[\w -]", sRegex):
print("# Warning: groups inside non grouping parenthesis in regex at line " + sRuleId)
#### PARSE ACTIONS
lActions = []
nAction = 1
for sAction in s.split(" <<- "):
t = createAction(sRuleId + "_" + str(nAction), sAction, nGroup)
nAction += 1
if t:
lActions.append(t)
if not lActions:
return None
return [sOption, sRegex, bCaseInsensitive, sRuleId, lActions, tGroups]
def createAction (sIdAction, sAction, nGroup):
"returns an action to perform as a tuple (condition, action type, action[, iGroup [, message, URL ]])"
global FUNCTIONS
m = re.search(r"([-~=])(\d*|)>> ", sAction)
if not m:
echo("# No action at line " + sIdAction)
return None
#### CONDITION
sCondition = sAction[:m.start()].strip()
if sCondition:
sCondition = prepare_for_eval(sCondition)
FUNCTIONS.append(("c"+sIdAction, sCondition))
for x in re.finditer("[.](?:group|start|end)[(](\d+)[)]", sCondition):
if int(x.group(1)) > nGroup:
print("# Error in groups in condition at line " + sIdAction + " ("+str(nGroup)+" groups only)")
if ".match" in sCondition:
echo("# Error. JS compatibility. Don't use .match() in condition, use .search()")
sCondition = "c"+sIdAction
else:
sCondition = None
#### iGroup / positioning
iGroup = int(m.group(2)) if m.group(2) else 0
if iGroup > nGroup:
echo("# Selected group > group number in regex at line " + sIdAction)
#### ACTION
sAction = sAction[m.end():].strip()
cAction = m.group(1)
if cAction == "-":
## error
iMsg = sAction.find(" # ")
sMsg = sAction[iMsg+3:].strip()
sAction = sAction[:iMsg].strip()
sURL = ""
mURL = re.search("[|] *(https?://.*)", sMsg)
if mURL:
sURL = mURL.group(1).strip()
sMsg = sMsg[:mURL.start(0)].strip()
if sMsg[0:1] == "=":
sMsg = prepare_for_eval(sMsg[1:])
FUNCTIONS.append(("m"+sIdAction, sMsg))
for x in re.finditer("group[(](\d+)[)]", sMsg):
if int(x.group(1)) > nGroup:
print("# error in groups in message at line " + sIdAction + " ("+str(nGroup)+" groups only)")
sMsg = "=m"+sIdAction
else:
for x in re.finditer(r"\\(\d+)", sMsg):
if int(x.group(1)) > nGroup:
print("# error in groups in message at line " + sIdAction + " ("+str(nGroup)+" groups only)")
if re.search("[.]\\w+[(]", sMsg):
print("# error in message at line " + sIdAction + ": This message looks like code. Line should begin with =")
if sAction[0:1] == "=" or cAction == "=":
if "define" in sAction and not re.search(r"define\(\\\d+ *, *\[.*\] *\)", sAction):
print("# error in action at line " + sIdAction + ": second argument for define must be a list of strings")
sAction = prepare_for_eval(sAction)
sAction = sAction.replace("m.group(i[4])", "m.group("+str(iGroup)+")")
for x in re.finditer("group[(](\d+)[)]", sAction):
if int(x.group(1)) > nGroup:
print("# error in groups in replacement at line " + sIdAction + " ("+str(nGroup)+" groups only)")
else:
for x in re.finditer(r"\\(\d+)", sAction):
if int(x.group(1)) > nGroup:
print("# error in groups in replacement at line " + sIdAction + " ("+str(nGroup)+" groups only)")
if re.search("[.]\\w+[(]", sAction):
print("# error in action at line " + sIdAction + ": This action looks like code. Line should begin with =")
if cAction == "-":
## error detected
if sAction[0:1] == "=":
FUNCTIONS.append(("s"+sIdAction, sAction[1:]))
sAction = "=s"+sIdAction
elif sAction.startswith('"') and sAction.endswith('"'):
sAction = sAction[1:-1]
return [sCondition, cAction, sAction, iGroup, sMsg, sURL]
elif cAction == "~":
## text preprocessor
if sAction[0:1] == "=":
if sAction[1:2] == "@":
FUNCTIONS.append(("p"+sIdAction, sAction[2:]))
sAction = "=@p"+sIdAction
else:
FUNCTIONS.append(("p"+sIdAction, sAction[1:]))
sAction = "=p"+sIdAction
return [sCondition, cAction, sAction, iGroup]
elif cAction == "=":
## disambiguator
if sAction[0:1] == "=":
sAction = sAction[1:]
FUNCTIONS.append(("d"+sIdAction, sAction))
sAction = "d"+sIdAction
return [sCondition, cAction, sAction]
else:
echo("# Unknown action at line " + sIdAction)
return None
def regex2js (sRegex):
"converts Python regex to JS regex and returns JS regex and list of negative lookbefore assertions"
# Latin letters: http://unicode-table.com/fr/
# 0-9
# A-Z
# a-z
# À-Ö 00C0-00D6 (upper case)
# Ø-ß 00D8-00DF (upper case)
# à-ö 00E0-00F6 (lower case)
# ø-ÿ 00F8-00FF (lower case)
# Ā-ʯ 0100-02AF (mixed)
# -> a-zA-Zà-ö0-9À-Öø-ÿØ-ßĀ-ʯ
bCaseInsensitive = False
if "(?i)" in sRegex:
sRegex = sRegex.replace("(?i)", "")
bCaseInsensitive = True
lNegLookBeforeRegex = []
if WORDLIMITLEFT in sRegex:
sRegex = sRegex.replace(WORDLIMITLEFT, "")
lNegLookBeforeRegex = ["[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ.,–-]$"]
sRegex = sRegex.replace("[\\w", "[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ")
sRegex = sRegex.replace("\\w", "[a-zA-Zà-öÀ-Ö0-9ø-ÿØ-ßĀ-ʯ]")
sRegex = sRegex.replace("[.]", r"\.")
if not sRegex.startswith("<js>"):
sRegex = sRegex.replace("/", r"\/")
m = re.search(r"\(\?<!([^()]+)\)", sRegex) # Negative lookbefore assertion should always be at the beginning of regex
if m:
lNegLookBeforeRegex.append(m.group(1)+"$")
sRegex = sRegex.replace(m.group(0), "")
if "(?<" in sRegex:
echo("# Warning. Lookbefore assertion not changed in:\n ")
echo(sRegex)
if sRegex.startswith("<js>"):
sRegex = sRegex.replace('<js>', '/').replace('</js>i', '/ig').replace('</js>', '/g')
else:
sRegex = "/" + sRegex + "/g"
if bCaseInsensitive and not sRegex.endswith("/ig"):
sRegex = sRegex + "i"
if not lNegLookBeforeRegex:
lNegLookBeforeRegex = None
return (sRegex, lNegLookBeforeRegex)
def pyRuleToJS (lRule):
lRuleJS = copy.deepcopy(lRule)
del lRule[-1] # tGroups positioning codes are useless for Python
# error messages
for aAction in lRuleJS[4]:
if aAction[1] == "-":
aAction[4] = aAction[4].replace("« ", "« ").replace(" »", " »")
# js regexes
lRuleJS[1], lNegLookBehindRegex = regex2js( JSREGEXES.get(lRuleJS[3], lRuleJS[1]) )
lRuleJS.append(lNegLookBehindRegex)
return lRuleJS
def writeRulesToJSArray_old (lRules):
s = "[\n"
for lRule in lRules:
# [sOption, sRegex, bCaseInsensitive, sRuleId, lActions, aGroups, aNegLookBehindRegex]
s += ' ["' + lRule[0] + '", ' if lRule[0] else " [false, "
s += lRule[1] + ", "
s += "true, " if lRule[2] else "false, "
s += '"' + lRule[3] + '", '
s += json.dumps(lRule[4], ensure_ascii=False) + ", "
s += json.dumps(lRule[5], ensure_ascii=False) + ", "
s += json.dumps(lRule[6], ensure_ascii=False) + "],\n"
s += "]"
return s
def writeRulesToJSArray (lRules):
s = "[\n"
for sOption, aRuleGroup in lRules:
s += ' ["' + sOption + '", [\n' if sOption else " [false, [\n"
for lRule in aRuleGroup:
# [sRegex, bCaseInsensitive, sRuleId, lActions, aGroups, aNegLookBehindRegex]
s += ' [' + lRule[0] + ", "
s += "true, " if lRule[1] else "false, "
s += '"' + lRule[2] + '", '
s += json.dumps(lRule[3], ensure_ascii=False) + ", "
s += json.dumps(lRule[4], ensure_ascii=False) + ", "
s += json.dumps(lRule[5], ensure_ascii=False) + "],\n"
s += " ]],\n"
s += "]"
return s
def groupsPositioningCodeToList (sGroupsPositioningCode):
if not sGroupsPositioningCode:
return None
return [ int(sCode) if sCode.isdigit() or (sCode[0:1] == "-" and sCode[1:].isdigit()) else sCode \
for sCode in sGroupsPositioningCode.split(",") ]
def displayStats (lRules):
print("{:>2} {:>18} {:>18} {:>18} {:>18}".format("#", "DISAMBIGUATOR", "TEXT PROCESSOR", "GRAMMAR CHECKING", "RULES"))
for i in range(len(lRules)):
d = {'=':0, '~': 0, '-': 0}
for aRule in lRules[i]:
for aAction in aRule[4]:
d[aAction[1]] = d[aAction[1]] + 1
print("{:>2} {:>18} {:>18} {:>18} {:>18}".format(i, d['='], d['~'], d['-'], len(lRules[i])))
def mergeRulesByOption (lRules):
"returns a list of tuples [option, list of rules] keeping the rules order"
lFinal = []
lTemp = []
sOption = None
for aRule in lRules:
if aRule[0] != sOption:
if sOption != None:
lFinal.append([sOption, lTemp])
# new tuple
sOption = aRule[0]
lTemp = []
lTemp.append(aRule[1:])
lFinal.append([sOption, lTemp])
return lFinal
def make (lRules, sLang, bJavaScript):
"compile rules"
# removing comments, zeroing empty lines, creating definitions
global DEF
lLine = []
lTest = []
for i, sLine in enumerate(lRules):
if sLine.startswith('#END'):
break
elif sLine.startswith("#"):
lLine.append("")
elif sLine.startswith("DEF:"):
m = re.match("DEF: +([a-zA-Z_][a-zA-Z_0-9]*) +(.+)$", sLine.strip())
if m:
DEF["{"+m.group(1)+"}"] = m.group(2)
else:
print("Error in definition: ", end="")
echo(sLine.strip())
lLine.append("")
elif sLine.startswith("TEST:"):
lTest.append("{:<8}".format(i+1) + " " + sLine[5:].lstrip())
lLine.append("")
elif sLine.startswith("TODO:"):
lLine.append("")
elif re.match("[ \t]+$", sLine):
lLine.append("")
else:
lLine.append(sLine)
# generating test files
with open("tests/"+sLang+"/gc_test.txt", "w", encoding="utf-8") as hDstPy, \
open("gc_lang/"+sLang+"/modules-js/tests_data.js", "w", encoding="utf-8") as hDstJS:
hDstPy.write("# TESTS FOR LANG ["+sLang+"]\n\n")
hDstJS.write("// JavaScript\n//Tests (generated code, do not edit)\n\nconst aData = [\n")
for sLine in lTest:
hDstPy.write(sLine)
hDstJS.write(' ' + json.dumps(sLine.strip(), ensure_ascii=False) +',\n')
hDstJS.write("];\nexports.aData = aData;\n")
# concatenate multiline commands
dLineIndex = {}
sStack = ""
for i in range(len(lLine)-1, -1, -1):
if lLine[i].startswith((" ", "\t")):
sStack = lLine[i].strip() + " " + sStack
del lLine[i]
elif lLine[i] == "":
del lLine[i]
else:
lLine[i] = lLine[i].strip() + " " + sStack
lLine[i] = lLine[i].strip()
dLineIndex[lLine[i]] = i + 1 # +1 because in text editor, line number begins by 1.
sStack = ""
# processing
bParagraph = True
lParagraphRules = []
lSentenceRules = []
lParagraphRulesJS = []
lSentenceRulesJS = []
for sLine in lLine:
if sLine:
if sLine == "[++]":
bParagraph = False
else:
lRule = createRule(sLine, dLineIndex[sLine], sLang, bParagraph)
if lRule:
if bParagraph:
lParagraphRules.append(lRule)
lParagraphRulesJS.append(pyRuleToJS(lRule))
else:
lSentenceRules.append(lRule)
lSentenceRulesJS.append(pyRuleToJS(lRule))
# creating file with all functions callable by rules
with open("gc_lang/"+sLang+"/modules/gc_tmp_eval.py", "w", encoding="utf-8") as hDstPy, \
open("gc_lang/"+sLang+"/modules-js/gc_tmp_eval.js", "w", encoding="utf-8") as hDstJS:
hDstPy.write("# generated code, do not edit\n")
hDstJS.write("// generated code, do not edit\nvar oEvalFunc = {\n")
for sFuncName, sReturn in FUNCTIONS:
cType = sFuncName[0:1]
if cType == "c": # condition
sParams = "s, sx, m, dDA, sCountry"
elif cType == "m": # message
sParams = "s, m"
elif cType == "s": # suggestion
sParams = "s, m"
elif cType == "p": # preprocessor
sParams = "s, m"
elif cType == "d": # disambiguator
sParams = "s, m, dDA"
else:
print("# Unknown function type in [" + sFuncName + "]")
continue
hDstPy.write("def {} ({}):\n".format(sFuncName, sParams))
hDstPy.write(" return " + sReturn + "\n")
hDstJS.write(" {}: function ({})".format(sFuncName, sParams) + " {\n")
hDstJS.write(" return " + py2js(sReturn) + ";\n")
hDstJS.write(" },\n")
hDstJS.write("}\n")
displayStats([lParagraphRules, lSentenceRules])
return { "paragraph_rules": mergeRulesByOption(lParagraphRules),
"sentence_rules": mergeRulesByOption(lSentenceRules),
"paragraph_rules_JS": writeRulesToJSArray(mergeRulesByOption(lParagraphRulesJS)),
"sentence_rules_JS": writeRulesToJSArray(mergeRulesByOption(lSentenceRulesJS)) }<|fim▁end|> | sCode = sCode.replace(".end()", ".end[0]")
sCode = sCode.replace(".start()", ".index")
sCode = sCode.replace("m.group()", "m[0]")
sCode = re.sub("\\.start\\((\\d+)\\)", ".start[\\1]", sCode) |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/**
* @file ui/core/styleguide/index//html/01-body/40-main/main.js
* @description Listeners on the body, iframe, and rightpull bar.
*/
/* istanbul ignore if */
if (typeof window === 'object') {
document.addEventListener('DOMContentLoaded', () => {
const $orgs = FEPPER_UI.requerio.$orgs;
const {
uiFns,
uiProps
} = FEPPER_UI;
$orgs['#sg-rightpull'].on('mouseenter', function () {
$orgs['#sg-cover'].dispatchAction('addClass', 'shown-by-rightpull-hover');
});
$orgs['#sg-rightpull'].on('mouseleave', function () {
$orgs['#sg-cover'].dispatchAction('removeClass', 'shown-by-rightpull-hover');
});<|fim▁hole|> // 3. On "mousemove" calculate the math, save the results to a cookie, and update the viewport.
$orgs['#sg-rightpull'].on('mousedown', function (e) {
uiProps.sgRightpull.posX = e.pageX;
uiProps.sgRightpull.vpWidth = uiProps.vpWidth;
// Show the cover.
$orgs['#sg-cover'].dispatchAction('addClass', 'shown-by-rightpull-drag');
});
// Add the mouse move event and capture data. Also update the viewport width.
$orgs['#patternlab-body'].on('mousemove', function (e) {
if ($orgs['#sg-cover'].getState().classArray.includes('shown-by-rightpull-drag')) {
let vpWidthNew = uiProps.sgRightpull.vpWidth;
if (uiProps.dockPosition === 'bottom') {
vpWidthNew += 2 * (e.pageX - uiProps.sgRightpull.posX);
}
else {
vpWidthNew += e.pageX - uiProps.sgRightpull.posX;
}
if (vpWidthNew > uiProps.minViewportWidth) {
uiFns.sizeIframe(vpWidthNew, false);
}
}
});
// Handle letting go of rightpull bar after dragging to resize.
$orgs['#patternlab-body'].on('mouseup', function () {
uiProps.sgRightpull.posX = null;
uiProps.sgRightpull.vpWidth = null;
$orgs['#sg-cover'].dispatchAction('removeClass', 'shown-by-rightpull-hover');
$orgs['#sg-cover'].dispatchAction('removeClass', 'shown-by-rightpull-drag');
});
});
}<|fim▁end|> |
// Handle manually resizing the viewport.
// 1. On "mousedown" store the click location.
// 2. Make a hidden div visible so that the cursor doesn't get lost in the iframe. |
<|file_name|>toggle-trait-fn.rs<|end_file_name|><|fim▁begin|>#![crate_name = "foo"]
// Trait methods with documentation should be wrapped in a <details> toggle with an appropriate
// summary. Trait methods with no documentation should not be wrapped.
//
// @has foo/trait.Foo.html
// @has - '//details[@class="rustdoc-toggle"]//summary//h4[@class="code-header"]' 'is_documented()'
// @!has - '//details[@class="rustdoc-toggle"]//summary//h4[@class="code-header"]' 'not_documented()'
// @has - '//details[@class="rustdoc-toggle"]//*[@class="docblock"]' 'is_documented is documented'
// @has - '//details[@class="rustdoc-toggle"]//summary//h4[@class="code-header"]' 'is_documented_optional()'
// @!has - '//details[@class="rustdoc-toggle"]//summary//h4[@class="code-header"]' 'not_documented_optional()'
// @has - '//details[@class="rustdoc-toggle"]//*[@class="docblock"]' 'is_documented_optional is documented'
pub trait Foo {
fn not_documented();
/// is_documented is documented
fn is_documented();
fn not_documented_optional() {}<|fim▁hole|> fn is_documented_optional() {}
}<|fim▁end|> |
/// is_documented_optional is documented |
<|file_name|>getGeneDescLocal_Illumina.py<|end_file_name|><|fim▁begin|>import pandas as pd
def closeFunc():
print('''Type 'quit' and press enter to exit program''')
answer = input(': ')
if answer == 'quit':<|fim▁hole|>def oligosList():
oligosPath = input('Path to the file containing the list of probes: ')
oligos = open(oligosPath)
oligos = oligos.readlines()
oligosList = []
for oligo in oligos:
item = oligo.strip()
oligosList.append(item)
return oligosList
def main(oligosList, fullData = False):
db = pd.read_csv('probes_illumina.txt', sep = '\t', header = 0, low_memory = False, index_col = 11)
output = db.ix[oligosList]
if fullData == False:
output = output['Definition']
print(output)
else:
output = output[['Accession', 'Symbol', 'Definition']]
output.to_csv('getGeneDescLocal_results.txt', sep = '\t')
closeFunc()
if __name__ == "__main__":
oligosList = oligosList()
answer = input('Do you want full data? (yes/no) ')
if answer == 'no':
main(oligosList)
elif answer == 'yes':
main(oligosList, True)
else:
print('Wrong answer')
closeFunc()<|fim▁end|> | quit()
else:
closeFunc()
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Parsing and interpretation for unoptimized Brainfuck abstract syntax trees.
//!
//! In `bfi` by default, this pass runs before run-length encoding and peephole optimization. To
//! run the unoptimized AST directly and skip all optimization, pass `--ast` flag.
//!
//! In this module, BF programs are represented by the [`Program`](type.Program.html)
//! type, which is an array of [`Instruction`](enum.Instruction.html)s. `Instruction`s
//! correspond directly to Brainfuck commands, except that loops are represented as subtrees
//! rather than with begin and end markers.
mod parser;
mod interpreter;
pub use self::parser::parse_program;
use common::Command;
/// A BF program is represented as a slice of statements. The slice will
/// typically be boxed.
pub type Program = [Statement];
/// An unoptimized BF statement.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Statement {
/// A non-loop command.<|fim▁hole|> ///
/// The `Command` cannot be `Begin` or `End`.
Cmd(Command),
/// A loop surrounding a sequence of instructions.
Loop(Box<[Statement]>),
}<|fim▁end|> | ///
/// # Invariants |
<|file_name|>simplifications_common.py<|end_file_name|><|fim▁begin|># ----------------------------- #
# Common simplifications passes #
# ----------------------------- #
from miasm2.expression.modint import mod_size2int, mod_size2uint
from miasm2.expression.expression import *
from miasm2.expression.expression_helper import *
def simp_cst_propagation(e_s, e):
"""This passe includes:
- Constant folding
- Common logical identities
- Common binary identities
"""
# merge associatif op
args = list(e.args)
op = e.op
# simpl integer manip
# int OP int => int
# TODO: <<< >>> << >> are architecture dependant
if op in op_propag_cst:
while (len(args) >= 2 and
args[-1].is_int() and
args[-2].is_int()):
i2 = args.pop()
i1 = args.pop()
if op == '+':
o = i1.arg + i2.arg
elif op == '*':
o = i1.arg * i2.arg
elif op == '**':
o =i1.arg ** i2.arg
elif op == '^':
o = i1.arg ^ i2.arg
elif op == '&':
o = i1.arg & i2.arg
elif op == '|':
o = i1.arg | i2.arg
elif op == '>>':
o = i1.arg >> i2.arg
elif op == '<<':
o = i1.arg << i2.arg
elif op == 'a>>':
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 >> x2)
elif op == '>>>':
o = (i1.arg >> (i2.arg % i2.size) |
i1.arg << ((i1.size - i2.arg) % i2.size))
elif op == '<<<':
o = (i1.arg << (i2.arg % i2.size) |
i1.arg >> ((i1.size - i2.arg) % i2.size))
elif op == '/':
o = i1.arg / i2.arg
elif op == '%':
o = i1.arg % i2.arg
elif op == 'idiv':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
elif op == 'imod':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'umod':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'udiv':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
o = ExprInt(o, i1.size)
args.append(o)
# bsf(int) => int
if op == "bsf" and args[0].is_int() and args[0].arg != 0:
i = 0
while args[0].arg & (1 << i) == 0:
i += 1
return ExprInt(i, args[0].size)
# bsr(int) => int
if op == "bsr" and args[0].is_int() and args[0].arg != 0:
i = args[0].size - 1
while args[0].arg & (1 << i) == 0:
i -= 1
return ExprInt(i, args[0].size)
# -(-(A)) => A
if (op == '-' and len(args) == 1 and args[0].is_op('-') and
len(args[0].args) == 1):
return args[0].args[0]
# -(int) => -int
if op == '-' and len(args) == 1 and args[0].is_int():
return ExprInt(-int(args[0]), e.size)
# A op 0 =>A
if op in ['+', '|', "^", "<<", ">>", "<<<", ">>>"] and len(args) > 1:
if args[-1].is_int(0):
args.pop()
# A - 0 =>A
if op == '-' and len(args) > 1 and args[-1].is_int(0):
assert(len(args) == 2) # Op '-' with more than 2 args: SantityCheckError
return args[0]
# A * 1 =>A
if op == "*" and len(args) > 1 and args[-1].is_int(1):
args.pop()
# for cannon form
# A * -1 => - A
if op == "*" and len(args) > 1 and args[-1].is_int((1 << args[-1].size) - 1):
args.pop()
args[-1] = - args[-1]
# op A => A
if op in ['+', '*', '^', '&', '|', '>>', '<<',
'a>>', '<<<', '>>>', 'idiv', 'imod', 'umod', 'udiv'] and len(args) == 1:
return args[0]
# A-B => A + (-B)
if op == '-' and len(args) > 1:
if len(args) > 2:
raise ValueError(
'sanity check fail on expr -: should have one or 2 args ' +
'%r %s' % (e, e))
return ExprOp('+', args[0], -args[1])
# A op 0 => 0
if op in ['&', "*"] and args[1].is_int(0):
return ExprInt(0, e.size)
# - (A + B +...) => -A + -B + -C
if op == '-' and len(args) == 1 and args[0].is_op('+'):
args = [-a for a in args[0].args]<|fim▁hole|> return e
# -(a?int1:int2) => (a?-int1:-int2)
if (op == '-' and len(args) == 1 and
args[0].is_cond() and
args[0].src1.is_int() and args[0].src2.is_int()):
i1 = args[0].src1
i2 = args[0].src2
i1 = ExprInt(-i1.arg, i1.size)
i2 = ExprInt(-i2.arg, i2.size)
return ExprCond(args[0].cond, i1, i2)
i = 0
while i < len(args) - 1:
j = i + 1
while j < len(args):
# A ^ A => 0
if op == '^' and args[i] == args[j]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# A + (- A) => 0
if op == '+' and args[j].is_op("-"):
if len(args[j].args) == 1 and args[i] == args[j].args[0]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# (- A) + A => 0
if op == '+' and args[i].is_op("-"):
if len(args[i].args) == 1 and args[j] == args[i].args[0]:
args[i] = ExprInt(0, args[i].size)
del(args[j])
continue
# A | A => A
if op == '|' and args[i] == args[j]:
del(args[j])
continue
# A & A => A
if op == '&' and args[i] == args[j]:
del(args[j])
continue
j += 1
i += 1
if op in ['|', '&', '%', '/', '**'] and len(args) == 1:
return args[0]
# A <<< A.size => A
if (op in ['<<<', '>>>'] and
args[1].is_int() and
args[1].arg == args[0].size):
return args[0]
# A <<< X <<< Y => A <<< (X+Y) (ou <<< >>>)
if (op in ['<<<', '>>>'] and
args[0].is_op() and
args[0].op in ['<<<', '>>>']):
op1 = op
op2 = args[0].op
if op1 == op2:
op = op1
args1 = args[0].args[1] + args[1]
else:
op = op2
args1 = args[0].args[1] - args[1]
args0 = args[0].args[0]
args = [args0, args1]
# A >> X >> Y => A >> (X+Y)
if (op in ['<<', '>>'] and
args[0].is_op(op)):
args = [args[0].args[0], args[0].args[1] + args[1]]
# ((A & A.mask)
if op == "&" and args[-1] == e.mask:
return ExprOp('&', *args[:-1])
# ((A | A.mask)
if op == "|" and args[-1] == e.mask:
return args[-1]
# ! (!X + int) => X - int
# TODO
# ((A & mask) >> shift) whith mask < 2**shift => 0
if op == ">>" and args[1].is_int() and args[0].is_op("&"):
if (args[0].args[1].is_int() and
2 ** args[1].arg > args[0].args[1].arg):
return ExprInt(0, args[0].size)
# parity(int) => int
if op == 'parity' and args[0].is_int():
return ExprInt(parity(int(args[0])), 1)
# (-a) * b * (-c) * (-d) => (-a) * b * c * d
if op == "*" and len(args) > 1:
new_args = []
counter = 0
for a in args:
if a.is_op('-') and len(a.args) == 1:
new_args.append(a.args[0])
counter += 1
else:
new_args.append(a)
if counter % 2:
return -ExprOp(op, *new_args)
args = new_args
# A << int with A ExprCompose => move index
if (op == "<<" and args[0].is_compose() and
args[1].is_int() and int(args[1]) != 0):
final_size = args[0].size
shift = int(args[1])
new_args = []
# shift indexes
for index, arg in args[0].iter_args():
new_args.append((arg, index+shift, index+shift+arg.size))
# filter out expression
filter_args = []
min_index = final_size
for expr, start, stop in new_args:
if start >= final_size:
continue
if stop > final_size:
expr = expr[:expr.size - (stop - final_size)]
stop = final_size
filter_args.append(expr)
min_index = min(start, min_index)
# create entry 0
assert min_index != 0
expr = ExprInt(0, min_index)
args = [expr] + filter_args
return ExprCompose(*args)
# A >> int with A ExprCompose => move index
if op == ">>" and args[0].is_compose() and args[1].is_int():
final_size = args[0].size
shift = int(args[1])
new_args = []
# shift indexes
for index, arg in args[0].iter_args():
new_args.append((arg, index-shift, index+arg.size-shift))
# filter out expression
filter_args = []
max_index = 0
for expr, start, stop in new_args:
if stop <= 0:
continue
if start < 0:
expr = expr[-start:]
start = 0
filter_args.append(expr)
max_index = max(stop, max_index)
# create entry 0
expr = ExprInt(0, final_size - max_index)
args = filter_args + [expr]
return ExprCompose(*args)
# Compose(a) OP Compose(b) with a/b same bounds => Compose(a OP b)
if op in ['|', '&', '^'] and all([arg.is_compose() for arg in args]):
bounds = set()
for arg in args:
bound = tuple([expr.size for expr in arg.args])
bounds.add(bound)
if len(bounds) == 1:
bound = list(bounds)[0]
new_args = [[expr] for expr in args[0].args]
for sub_arg in args[1:]:
for i, expr in enumerate(sub_arg.args):
new_args[i].append(expr)
args = []
for i, arg in enumerate(new_args):
args.append(ExprOp(op, *arg))
return ExprCompose(*args)
# <<<c_rez, >>>c_rez
if op in [">>>c_rez", "<<<c_rez"]:
assert len(args) == 3
dest, rounds, cf = args
# Skipped if rounds is 0
if rounds.is_int(0):
return dest
elif all(map(lambda x: x.is_int(), args)):
# The expression can be resolved
tmp = int(dest)
cf = int(cf)
size = dest.size
tmp_count = (int(rounds) &
(0x3f if size == 64 else 0x1f)) % (size + 1)
if op == ">>>c_rez":
while (tmp_count != 0):
tmp_cf = tmp & 1;
tmp = (tmp >> 1) + (cf << (size - 1))
cf = tmp_cf
tmp_count -= 1
tmp &= int(dest.mask)
elif op == "<<<c_rez":
while (tmp_count != 0):
tmp_cf = (tmp >> (size - 1)) & 1
tmp = (tmp << 1) + cf
cf = tmp_cf
tmp_count -= 1
tmp &= int(dest.mask)
else:
raise RuntimeError("Unknown operation: %s" % op)
return ExprInt(tmp, size=dest.size)
return ExprOp(op, *args)
def simp_cond_op_int(e_s, e):
"Extract conditions from operations"
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
if not e.args[-1].is_int():
return e
a_int = e.args[-1]
conds = []
for a in e.args[:-1]:
if not a.is_cond():
return e
conds.append(a)
if not conds:
return e
c = conds.pop()
c = ExprCond(c.cond,
ExprOp(e.op, c.src1, a_int),
ExprOp(e.op, c.src2, a_int))
conds.append(c)
new_e = ExprOp(e.op, *conds)
return new_e
def simp_cond_factor(e_s, e):
"Merge similar conditions"
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
conds = {}
not_conds = []
multi_cond = False
for a in e.args:
if not a.is_cond():
not_conds.append(a)
continue
c = a.cond
if not c in conds:
conds[c] = []
else:
multi_cond = True
conds[c].append(a)
if not multi_cond:
return e
c_out = not_conds[:]
for c, vals in conds.items():
new_src1 = [x.src1 for x in vals]
new_src2 = [x.src2 for x in vals]
src1 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src1))
src2 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src2))
c_out.append(ExprCond(c, src1, src2))
if len(c_out) == 1:
new_e = c_out[0]
else:
new_e = ExprOp(e.op, *c_out)
return new_e
def simp_slice(e_s, e):
"Slice optimization"
# slice(A, 0, a.size) => A
if e.start == 0 and e.stop == e.arg.size:
return e.arg
# Slice(int) => int
elif e.arg.is_int():
total_bit = e.stop - e.start
mask = (1 << (e.stop - e.start)) - 1
return ExprInt(int((e.arg.arg >> e.start) & mask), total_bit)
# Slice(Slice(A, x), y) => Slice(A, z)
elif e.arg.is_slice():
if e.stop - e.start > e.arg.stop - e.arg.start:
raise ValueError('slice in slice: getting more val', str(e))
new_e = ExprSlice(e.arg.arg, e.start + e.arg.start,
e.start + e.arg.start + (e.stop - e.start))
return new_e
elif e.arg.is_compose():
# Slice(Compose(A), x) => Slice(A, y)
for index, arg in e.arg.iter_args():
if index <= e.start and index+arg.size >= e.stop:
new_e = arg[e.start - index:e.stop - index]
return new_e
# Slice(Compose(A, B, C), x) => Compose(A, B, C) with truncated A/B/C
out = []
for index, arg in e.arg.iter_args():
# arg is before slice start
if e.start >= index + arg.size:
continue
# arg is after slice stop
elif e.stop <= index:
continue
# arg is fully included in slice
elif e.start <= index and index + arg.size <= e.stop:
out.append(arg)
continue
# arg is truncated at start
if e.start > index:
slice_start = e.start - index
a_start = 0
else:
# arg is not truncated at start
slice_start = 0
a_start = index - e.start
# a is truncated at stop
if e.stop < index + arg.size:
slice_stop = arg.size + e.stop - (index + arg.size) - slice_start
a_stop = e.stop - e.start
else:
slice_stop = arg.size
a_stop = index + arg.size - e.start
out.append(arg[slice_start:slice_stop])
return ExprCompose(*out)
# ExprMem(x, size)[:A] => ExprMem(x, a)
# XXXX todo hum, is it safe?
elif (e.arg.is_mem() and
e.start == 0 and
e.arg.size > e.stop and e.stop % 8 == 0):
e = ExprMem(e.arg.arg, size=e.stop)
return e
# distributivity of slice and &
# (a & int)[x:y] => 0 if int[x:y] == 0
elif e.arg.is_op("&") and e.arg.args[-1].is_int():
tmp = e_s.expr_simp_wrapper(e.arg.args[-1][e.start:e.stop])
if tmp.is_int(0):
return tmp
# distributivity of slice and exprcond
# (a?int1:int2)[x:y] => (a?int1[x:y]:int2[x:y])
elif e.arg.is_cond() and e.arg.src1.is_int() and e.arg.src2.is_int():
src1 = e.arg.src1[e.start:e.stop]
src2 = e.arg.src2[e.start:e.stop]
e = ExprCond(e.arg.cond, src1, src2)
# (a * int)[0:y] => (a[0:y] * int[0:y])
elif e.start == 0 and e.arg.is_op("*") and e.arg.args[-1].is_int():
args = [e_s.expr_simp_wrapper(a[e.start:e.stop]) for a in e.arg.args]
e = ExprOp(e.arg.op, *args)
# (a >> int)[x:y] => a[x+int:y+int] with int+y <= a.size
# (a << int)[x:y] => a[x-int:y-int] with x-int >= 0
elif (e.arg.is_op() and e.arg.op in [">>", "<<"] and
e.arg.args[1].is_int()):
arg, shift = e.arg.args
shift = int(shift)
if e.arg.op == ">>":
if shift + e.stop <= arg.size:
return arg[e.start + shift:e.stop + shift]
elif e.arg.op == "<<":
if e.start - shift >= 0:
return arg[e.start - shift:e.stop - shift]
else:
raise ValueError('Bad case')
return e
def simp_compose(e_s, e):
"Commons simplification on ExprCompose"
args = merge_sliceto_slice(e)
out = []
# compose of compose
for arg in args:
if arg.is_compose():
out += arg.args
else:
out.append(arg)
args = out
# Compose(a) with a.size = compose.size => a
if len(args) == 1 and args[0].size == e.size:
return args[0]
# {(X[z:], 0, X.size-z), (0, X.size-z, X.size)} => (X >> z)
if len(args) == 2 and args[1].is_int(0):
if (args[0].is_slice() and
args[0].stop == args[0].arg.size and
args[0].size + args[1].size == args[0].arg.size):
new_e = args[0].arg >> ExprInt(args[0].start, args[0].arg.size)
return new_e
# {@X[base + i] 0 X, @Y[base + i + X] X (X + Y)} => @(X+Y)[base + i]
for i, arg in enumerate(args[:-1]):
nxt = args[i + 1]
if arg.is_mem() and nxt.is_mem():
gap = e_s(nxt.arg - arg.arg)
if gap.is_int() and int(gap) == arg.size / 8:
args = args[:i] + [ExprMem(arg.arg,
arg.size + nxt.size)] + args[i + 2:]
return ExprCompose(*args)
# Compose with ExprCond with integers for src1/src2 and intergers =>
# propagage integers
# {XXX?(0x0,0x1)?(0x0,0x1),0,8, 0x0,8,32} => XXX?(int1, int2)
ok = True
expr_cond_index = None
expr_ints_or_conds = []
for i, arg in enumerate(args):
if not is_int_or_cond_src_int(arg):
ok = False
break
expr_ints_or_conds.append(arg)
if arg.is_cond():
if expr_cond_index is not None:
ok = False
expr_cond_index = i
cond = arg
if ok and expr_cond_index is not None:
src1 = []
src2 = []
for i, arg in enumerate(expr_ints_or_conds):
if i == expr_cond_index:
src1.append(arg.src1)
src2.append(arg.src2)
else:
src1.append(arg)
src2.append(arg)
src1 = e_s.apply_simp(ExprCompose(*src1))
src2 = e_s.apply_simp(ExprCompose(*src2))
if src1.is_int() and src2.is_int():
return ExprCond(cond.cond, src1, src2)
return ExprCompose(*args)
def simp_cond(e_s, e):
"Common simplifications on ExprCond"
# eval exprcond src1/src2 with satifiable/unsatisfiable condition
# propagation
if (not e.cond.is_int()) and e.cond.size == 1:
src1 = e.src1.replace_expr({e.cond: ExprInt(1, 1)})
src2 = e.src2.replace_expr({e.cond: ExprInt(0, 1)})
if src1 != e.src1 or src2 != e.src2:
return ExprCond(e.cond, src1, src2)
# -A ? B:C => A ? B:C
if e.cond.is_op('-') and len(e.cond.args) == 1:
e = ExprCond(e.cond.args[0], e.src1, e.src2)
# a?x:x
elif e.src1 == e.src2:
e = e.src1
# int ? A:B => A or B
elif e.cond.is_int():
if e.cond.arg == 0:
e = e.src2
else:
e = e.src1
# a?(a?b:c):x => a?b:x
elif e.src1.is_cond() and e.cond == e.src1.cond:
e = ExprCond(e.cond, e.src1.src1, e.src2)
# a?x:(a?b:c) => a?x:c
elif e.src2.is_cond() and e.cond == e.src2.cond:
e = ExprCond(e.cond, e.src1, e.src2.src2)
# a|int ? b:c => b with int != 0
elif (e.cond.is_op('|') and
e.cond.args[1].is_int() and
e.cond.args[1].arg != 0):
return e.src1
# (C?int1:int2)?(A:B) =>
elif (e.cond.is_cond() and
e.cond.src1.is_int() and
e.cond.src2.is_int()):
int1 = e.cond.src1.arg.arg
int2 = e.cond.src2.arg.arg
if int1 and int2:
e = e.src1
elif int1 == 0 and int2 == 0:
e = e.src2
elif int1 == 0 and int2:
e = ExprCond(e.cond.cond, e.src2, e.src1)
elif int1 and int2 == 0:
e = ExprCond(e.cond.cond, e.src1, e.src2)
return e<|fim▁end|> | e = ExprOp('+', *args) |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.urls import reverse_lazy, reverse
from django.views.generic import ListView, DetailView, FormView, CreateView, \
UpdateView
from django.views.generic.base import TemplateView, View
from django.contrib.auth.mixins import LoginRequiredMixin
from . import xml_hero
from .models import Group, Hero
from .forms import GroupForm, HeroAddForm, CharsheetUploadForm
# Create your views here.
class HomeView(TemplateView):
template_name = "home.html"
class GroupView(LoginRequiredMixin, ListView):
model = Group
template_name = 'groups_all.html'
def get_context_data(self, **kwargs):
context = super(GroupView, self).get_context_data(**kwargs)
user = self.request.user
context['gm_groups'] = user.gaming_group_master.all()
context['player_groups'] = user.gaming_group.all()
return context
class GroupDetailView(LoginRequiredMixin, DetailView):
model = Group
template_name = 'groups_detail.html'
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return queryset.get(name=self.kwargs['group'])
def render_to_response(self, context, **response_kwargs):
user = self.request.user
print(self.object.players.all())
if user in self.object.players.all() and not user.hero_set.filter(
group=self.object).exists():
return redirect(
reverse_lazy('hero:group_add_hero', kwargs=self.kwargs))
else:
return super(GroupDetailView, self).render_to_response(context)
class GroupAdminView(LoginRequiredMixin, DetailView):
model = Group
template_name = 'groups_all.html'
def get_context_data(self, **kwargs):
context = super(GroupView, self).get_context_data(**kwargs)
user = self.request.user
context['gm_groups'] = user.gaming_group_master.all()
context['player_groups'] = user.gaming_group.all()
return context
class GroupAddView(LoginRequiredMixin, CreateView):
template_name = 'groups_add.html'
form_class = GroupForm
def form_valid(self, form):
group = Group(
name=form.cleaned_data['name'],
rule_version=form.cleaned_data['rule_version'],
players=form.cleaned_data['players'],
description=form.cleaned_data['description'],
game_master=self.request.user
)
group.save()
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return reverse_lazy('hero:group_detail', args=(self.object.name,))
class PlayerDetailView(LoginRequiredMixin, DetailView):
pass
class GroupAddHeroView(LoginRequiredMixin, CreateView):
template_name = "groups_add_hero.html"
form_class = HeroAddForm
def get_context_data(self, **kwargs):
context = super(CreateView, self).get_context_data(**kwargs)
context['group_name'] = self.kwargs['group']
return context
def get_form_kwargs(self):
# pass "user" keyword argument with the current user to your form
kwargs = super(CreateView, self).get_form_kwargs()
kwargs['player'] = self.request.user
kwargs['group'] = Group.objects.get(name=self.kwargs['group'])
return kwargs
def form_valid(self, form):
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return reverse('hero:group_detail', args=(self.kwargs['group'],))
class ProfileView(LoginRequiredMixin, DetailView):
pass
class HeroCharsheetView(DetailView):
model = Hero
template_name = "hero_charsheet.html"
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return queryset.get(name=self.kwargs['hero'],
group=self.kwargs['group'])
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
try:
context['charsheet'] = xml_hero.get_hero(
self.object.group.rule_version,
self.object.char_sheet.read())
finally:
return context
def get(self, request, *args, **kwargs):
if not self.get_object().char_sheet:
self.template_name = "no_charsheet.html"
return super(HeroCharsheetView, self).get(self, request, *args,
**kwargs)
class GroupHeroView(LoginRequiredMixin, DetailView):
pass
class HeroAddCharsheetView(LoginRequiredMixin, UpdateView):
model = Hero
fields = ['char_sheet']
template_name = 'charsheet_upload.html'
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return queryset.get(name=self.kwargs['hero'],
group=self.kwargs['group'])
def get_success_url(self):
print(self.kwargs['group'])
return reverse('hero:group_detail', args=(self.kwargs['group'],))
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views.generic import DetailView, ListView, CreateView
from .models import DiaryEntry, Adventure
from hero.models import Group, Hero
class AdventureView(LoginRequiredMixin, DetailView):
model = Adventure
template_name = "adventure_overview.html"
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
group = Group.objects.get(name=self.kwargs['group'])
context['group'] = group
return context
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
group = Group.objects.get(name=self.kwargs['group'])
return queryset.get(group=group, name=self.kwargs['adventure'])
class DiaryView(LoginRequiredMixin, ListView):
model = DiaryEntry
template_name = 'diary_overview.html'
def get_context_data(self, **kwargs):
context = super(ListView, self).get_context_data(**kwargs)
hero = Hero.objects.get(name=self.kwargs['hero'])
group = hero.group
context['group'] = group
return context
class DiaryEntryView(LoginRequiredMixin, DetailView):
model = DiaryEntry
template_name = "diary_entry.html"
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return queryset
class AddDiaryEntryView(LoginRequiredMixin, CreateView):
model = DiaryEntry
fields = ('name', 'date', 'entry', 'hero')
template_name = 'add_diary_entry.html'
def get_context_data(self, **kwargs):
context = super(CreateView, self).get_context_data(**kwargs)
context['adventure'] = Adventure.objects.get(
name=self.kwargs['adventure'])
return context
def form_valid(self, form):
adventure = Adventure.objects.get(name=self.kwargs['adventure'])
diary_entry = DiaryEntry(
name=form.cleaned_data['name'],
date=form.cleaned_data['date'],
entry=form.cleaned_data['entry'],
hero=form.cleaned_data['hero'],
adventure=adventure,
)
diary_entry.save()
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
print(form.errors)
return HttpResponseRedirect(self.get_success_url())
<|fim▁hole|> def get_success_url(self):
return reverse('adventure:hero_adventure_diary',
args=(self.kwargs['hero'], self.kwargs['adventure']))<|fim▁end|> | |
<|file_name|>p2p-nfc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Example nfcpy to wpa_supplicant wrapper for P2P NFC operations
# Copyright (c) 2012-2013, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import os
import sys
import time
import random
import threading
import argparse
import nfc
import nfc.ndef
import nfc.llcp
import nfc.handover
import logging
import wpaspy
wpas_ctrl = '/var/run/wpa_supplicant'
ifname = None
init_on_touch = False
in_raw_mode = False
prev_tcgetattr = 0
include_wps_req = True
include_p2p_req = True
no_input = False
srv = None
continue_loop = True
terminate_now = False
summary_file = None
success_file = None
def summary(txt):
print txt
if summary_file:
with open(summary_file, 'a') as f:
f.write(txt + "\n")
def success_report(txt):
summary(txt)
if success_file:
with open(success_file, 'a') as f:
f.write(txt + "\n")
def wpas_connect():
ifaces = []
if os.path.isdir(wpas_ctrl):
try:
ifaces = [os.path.join(wpas_ctrl, i) for i in os.listdir(wpas_ctrl)]
except OSError, error:
print "Could not find wpa_supplicant: ", error
return None
if len(ifaces) < 1:
print "No wpa_supplicant control interface found"
return None
for ctrl in ifaces:
if ifname:
if ifname not in ctrl:
continue
try:
print "Trying to use control interface " + ctrl
wpas = wpaspy.Ctrl(ctrl)
return wpas
except Exception, e:
pass
return None
def wpas_tag_read(message):
wpas = wpas_connect()
if (wpas == None):
return False
cmd = "WPS_NFC_TAG_READ " + str(message).encode("hex")
global force_freq
if force_freq:
cmd = cmd + " freq=" + force_freq
if "FAIL" in wpas.request(cmd):
return False
return True
def wpas_get_handover_req():
wpas = wpas_connect()
if (wpas == None):
return None
res = wpas.request("NFC_GET_HANDOVER_REQ NDEF P2P-CR").rstrip()
if "FAIL" in res:
return None
return res.decode("hex")
def wpas_get_handover_req_wps():
wpas = wpas_connect()
if (wpas == None):
return None
res = wpas.request("NFC_GET_HANDOVER_REQ NDEF WPS-CR").rstrip()
if "FAIL" in res:
return None
return res.decode("hex")
def wpas_get_handover_sel(tag=False):
wpas = wpas_connect()
if (wpas == None):
return None
if tag:
res = wpas.request("NFC_GET_HANDOVER_SEL NDEF P2P-CR-TAG").rstrip()
else:
res = wpas.request("NFC_GET_HANDOVER_SEL NDEF P2P-CR").rstrip()
if "FAIL" in res:
return None
return res.decode("hex")
def wpas_get_handover_sel_wps():
wpas = wpas_connect()
if (wpas == None):
return None
res = wpas.request("NFC_GET_HANDOVER_SEL NDEF WPS-CR");
if "FAIL" in res:
return None
return res.rstrip().decode("hex")
def wpas_report_handover(req, sel, type):
wpas = wpas_connect()
if (wpas == None):
return None
cmd = "NFC_REPORT_HANDOVER " + type + " P2P " + str(req).encode("hex") + " " + str(sel).encode("hex")
global force_freq
if force_freq:
cmd = cmd + " freq=" + force_freq
return wpas.request(cmd)
def wpas_report_handover_wsc(req, sel, type):
wpas = wpas_connect()
if (wpas == None):
return None
cmd = "NFC_REPORT_HANDOVER " + type + " WPS " + str(req).encode("hex") + " " + str(sel).encode("hex")
if force_freq:
cmd = cmd + " freq=" + force_freq
return wpas.request(cmd)
def p2p_handover_client(llc):
message = nfc.ndef.HandoverRequestMessage(version="1.2")
message.nonce = random.randint(0, 0xffff)
global include_p2p_req
if include_p2p_req:
data = wpas_get_handover_req()
if (data == None):
summary("Could not get handover request carrier record from wpa_supplicant")
return
print "Handover request carrier record from wpa_supplicant: " + data.encode("hex")
datamsg = nfc.ndef.Message(data)
message.add_carrier(datamsg[0], "active", datamsg[1:])
global include_wps_req
if include_wps_req:
print "Handover request (pre-WPS):"
try:
print message.pretty()
except Exception, e:
print e
data = wpas_get_handover_req_wps()
if data:
print "Add WPS request in addition to P2P"
datamsg = nfc.ndef.Message(data)
message.add_carrier(datamsg[0], "active", datamsg[1:])
print "Handover request:"
try:
print message.pretty()
except Exception, e:
print e
print str(message).encode("hex")
client = nfc.handover.HandoverClient(llc)
try:
summary("Trying to initiate NFC connection handover")
client.connect()
summary("Connected for handover")
except nfc.llcp.ConnectRefused:
summary("Handover connection refused")
client.close()
return
except Exception, e:
summary("Other exception: " + str(e))
client.close()
return
summary("Sending handover request")
if not client.send(message):
summary("Failed to send handover request")
client.close()
return
summary("Receiving handover response")
message = client._recv()
if message is None:
summary("No response received")
client.close()
return
if message.type != "urn:nfc:wkt:Hs":
summary("Response was not Hs - received: " + message.type)
client.close()
return
print "Received message"
try:
print message.pretty()
except Exception, e:
print e
print str(message).encode("hex")
message = nfc.ndef.HandoverSelectMessage(message)
summary("Handover select received")
try:
print message.pretty()
except Exception, e:
print e
for carrier in message.carriers:
print "Remote carrier type: " + carrier.type
if carrier.type == "application/vnd.wfa.p2p":
print "P2P carrier type match - send to wpa_supplicant"
if "OK" in wpas_report_handover(data, carrier.record, "INIT"):
success_report("P2P handover reported successfully (initiator)")
else:
summary("P2P handover report rejected")
break
print "Remove peer"
client.close()
print "Done with handover"
global only_one
if only_one:
print "only_one -> stop loop"
global continue_loop
continue_loop = False
global no_wait
if no_wait:
print "Trying to exit.."
global terminate_now
terminate_now = True
class HandoverServer(nfc.handover.HandoverServer):
def __init__(self, llc):
super(HandoverServer, self).__init__(llc)
self.sent_carrier = None
self.ho_server_processing = False
self.success = False
# override to avoid parser error in request/response.pretty() in nfcpy
# due to new WSC handover format
def _process_request(self, request):
summary("received handover request {}".format(request.type))
response = nfc.ndef.Message("\xd1\x02\x01Hs\x12")
if not request.type == 'urn:nfc:wkt:Hr':
summary("not a handover request")
else:
try:
request = nfc.ndef.HandoverRequestMessage(request)
except nfc.ndef.DecodeError as e:
summary("error decoding 'Hr' message: {}".format(e))
else:
response = self.process_request(request)
summary("send handover response {}".format(response.type))
return response
def process_request(self, request):
self.ho_server_processing = True
clear_raw_mode()
print "HandoverServer - request received"
try:
print "Parsed handover request: " + request.pretty()
except Exception, e:
print e
sel = nfc.ndef.HandoverSelectMessage(version="1.2")
found = False
for carrier in request.carriers:
print "Remote carrier type: " + carrier.type
if carrier.type == "application/vnd.wfa.p2p":
print "P2P carrier type match - add P2P carrier record"
found = True
self.received_carrier = carrier.record
print "Carrier record:"
try:
print carrier.record.pretty()
except Exception, e:
print e
data = wpas_get_handover_sel()
if data is None:
print "Could not get handover select carrier record from wpa_supplicant"
continue
print "Handover select carrier record from wpa_supplicant:"
print data.encode("hex")
self.sent_carrier = data
if "OK" in wpas_report_handover(self.received_carrier, self.sent_carrier, "RESP"):
success_report("P2P handover reported successfully (responder)")
else:
summary("P2P handover report rejected")
break
message = nfc.ndef.Message(data);
sel.add_carrier(message[0], "active", message[1:])
break
for carrier in request.carriers:
if found:
break
print "Remote carrier type: " + carrier.type
if carrier.type == "application/vnd.wfa.wsc":
print "WSC carrier type match - add WSC carrier record"
found = True
self.received_carrier = carrier.record
print "Carrier record:"
try:
print carrier.record.pretty()
except Exception, e:
print e
data = wpas_get_handover_sel_wps()
if data is None:
print "Could not get handover select carrier record from wpa_supplicant"
continue
print "Handover select carrier record from wpa_supplicant:"
print data.encode("hex")
self.sent_carrier = data
if "OK" in wpas_report_handover_wsc(self.received_carrier, self.sent_carrier, "RESP"):
success_report("WSC handover reported successfully")
else:
summary("WSC handover report rejected")
break
message = nfc.ndef.Message(data);<|fim▁hole|> print "Handover select:"
try:
print sel.pretty()
except Exception, e:
print e
print str(sel).encode("hex")
summary("Sending handover select")
self.success = True
return sel
def clear_raw_mode():
import sys, tty, termios
global prev_tcgetattr, in_raw_mode
if not in_raw_mode:
return
fd = sys.stdin.fileno()
termios.tcsetattr(fd, termios.TCSADRAIN, prev_tcgetattr)
in_raw_mode = False
def getch():
import sys, tty, termios, select
global prev_tcgetattr, in_raw_mode
fd = sys.stdin.fileno()
prev_tcgetattr = termios.tcgetattr(fd)
ch = None
try:
tty.setraw(fd)
in_raw_mode = True
[i, o, e] = select.select([fd], [], [], 0.05)
if i:
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, prev_tcgetattr)
in_raw_mode = False
return ch
def p2p_tag_read(tag):
success = False
if len(tag.ndef.message):
for record in tag.ndef.message:
print "record type " + record.type
if record.type == "application/vnd.wfa.wsc":
summary("WPS tag - send to wpa_supplicant")
success = wpas_tag_read(tag.ndef.message)
break
if record.type == "application/vnd.wfa.p2p":
summary("P2P tag - send to wpa_supplicant")
success = wpas_tag_read(tag.ndef.message)
break
else:
summary("Empty tag")
if success:
success_report("Tag read succeeded")
return success
def rdwr_connected_p2p_write(tag):
summary("Tag found - writing - " + str(tag))
global p2p_sel_data
tag.ndef.message = str(p2p_sel_data)
success_report("Tag write succeeded")
print "Done - remove tag"
global only_one
if only_one:
global continue_loop
continue_loop = False
global p2p_sel_wait_remove
return p2p_sel_wait_remove
def wps_write_p2p_handover_sel(clf, wait_remove=True):
print "Write P2P handover select"
data = wpas_get_handover_sel(tag=True)
if (data == None):
summary("Could not get P2P handover select from wpa_supplicant")
return
global p2p_sel_wait_remove
p2p_sel_wait_remove = wait_remove
global p2p_sel_data
p2p_sel_data = nfc.ndef.HandoverSelectMessage(version="1.2")
message = nfc.ndef.Message(data);
p2p_sel_data.add_carrier(message[0], "active", message[1:])
print "Handover select:"
try:
print p2p_sel_data.pretty()
except Exception, e:
print e
print str(p2p_sel_data).encode("hex")
print "Touch an NFC tag"
clf.connect(rdwr={'on-connect': rdwr_connected_p2p_write})
def rdwr_connected(tag):
global only_one, no_wait
summary("Tag connected: " + str(tag))
if tag.ndef:
print "NDEF tag: " + tag.type
try:
print tag.ndef.message.pretty()
except Exception, e:
print e
success = p2p_tag_read(tag)
if only_one and success:
global continue_loop
continue_loop = False
else:
summary("Not an NDEF tag - remove tag")
return True
return not no_wait
def llcp_worker(llc):
global init_on_touch
if init_on_touch:
print "Starting handover client"
p2p_handover_client(llc)
return
global no_input
if no_input:
print "Wait for handover to complete"
else:
print "Wait for handover to complete - press 'i' to initiate ('w' for WPS only, 'p' for P2P only)"
global srv
global wait_connection
while not wait_connection and srv.sent_carrier is None:
if srv.ho_server_processing:
time.sleep(0.025)
elif no_input:
time.sleep(0.5)
else:
global include_wps_req, include_p2p_req
res = getch()
if res == 'i':
include_wps_req = True
include_p2p_req = True
elif res == 'p':
include_wps_req = False
include_p2p_req = True
elif res == 'w':
include_wps_req = True
include_p2p_req = False
else:
continue
clear_raw_mode()
print "Starting handover client"
p2p_handover_client(llc)
return
clear_raw_mode()
print "Exiting llcp_worker thread"
def llcp_startup(clf, llc):
print "Start LLCP server"
global srv
srv = HandoverServer(llc)
return llc
def llcp_connected(llc):
print "P2P LLCP connected"
global wait_connection
wait_connection = False
global init_on_touch
if not init_on_touch:
global srv
srv.start()
if init_on_touch or not no_input:
threading.Thread(target=llcp_worker, args=(llc,)).start()
return True
def terminate_loop():
global terminate_now
return terminate_now
def main():
clf = nfc.ContactlessFrontend()
parser = argparse.ArgumentParser(description='nfcpy to wpa_supplicant integration for P2P and WPS NFC operations')
parser.add_argument('-d', const=logging.DEBUG, default=logging.INFO,
action='store_const', dest='loglevel',
help='verbose debug output')
parser.add_argument('-q', const=logging.WARNING, action='store_const',
dest='loglevel', help='be quiet')
parser.add_argument('--only-one', '-1', action='store_true',
help='run only one operation and exit')
parser.add_argument('--init-on-touch', '-I', action='store_true',
help='initiate handover on touch')
parser.add_argument('--no-wait', action='store_true',
help='do not wait for tag to be removed before exiting')
parser.add_argument('--ifname', '-i',
help='network interface name')
parser.add_argument('--no-wps-req', '-N', action='store_true',
help='do not include WPS carrier record in request')
parser.add_argument('--no-input', '-a', action='store_true',
help='do not use stdout input to initiate handover')
parser.add_argument('--tag-read-only', '-t', action='store_true',
help='tag read only (do not allow connection handover)')
parser.add_argument('--handover-only', action='store_true',
help='connection handover only (do not allow tag read)')
parser.add_argument('--freq', '-f',
help='forced frequency of operating channel in MHz')
parser.add_argument('--summary',
help='summary file for writing status updates')
parser.add_argument('--success',
help='success file for writing success update')
parser.add_argument('command', choices=['write-p2p-sel'],
nargs='?')
args = parser.parse_args()
global only_one
only_one = args.only_one
global no_wait
no_wait = args.no_wait
global force_freq
force_freq = args.freq
logging.basicConfig(level=args.loglevel)
global init_on_touch
init_on_touch = args.init_on_touch
if args.ifname:
global ifname
ifname = args.ifname
print "Selected ifname " + ifname
if args.no_wps_req:
global include_wps_req
include_wps_req = False
if args.summary:
global summary_file
summary_file = args.summary
if args.success:
global success_file
success_file = args.success
if args.no_input:
global no_input
no_input = True
clf = nfc.ContactlessFrontend()
global wait_connection
try:
if not clf.open("usb"):
print "Could not open connection with an NFC device"
raise SystemExit
if args.command == "write-p2p-sel":
wps_write_p2p_handover_sel(clf, wait_remove=not args.no_wait)
raise SystemExit
global continue_loop
while continue_loop:
print "Waiting for a tag or peer to be touched"
wait_connection = True
try:
if args.tag_read_only:
if not clf.connect(rdwr={'on-connect': rdwr_connected}):
break
elif args.handover_only:
if not clf.connect(llcp={'on-startup': llcp_startup,
'on-connect': llcp_connected},
terminate=terminate_loop):
break
else:
if not clf.connect(rdwr={'on-connect': rdwr_connected},
llcp={'on-startup': llcp_startup,
'on-connect': llcp_connected},
terminate=terminate_loop):
break
except Exception, e:
print "clf.connect failed"
global srv
if only_one and srv and srv.success:
raise SystemExit
except KeyboardInterrupt:
raise SystemExit
finally:
clf.close()
raise SystemExit
if __name__ == '__main__':
main()<|fim▁end|> | sel.add_carrier(message[0], "active", message[1:])
found = True
break
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md><|fim▁hole|>extern crate winapi;
use winapi::*;
extern "system" {
}<|fim▁end|> | //! FFI bindings to glmf32.
#![no_std]
#![experimental] |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for react-router-bootstrap
// Project: https://github.com/react-bootstrap/react-router-bootstrap
// Definitions by: Vincent Lesierse <https://github.com/vlesierse>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference types="react"/>
/// <reference types="react-router"/>
<|fim▁hole|>declare namespace ReactRouterBootstrap {
interface LinkContainerProps extends ReactRouter.LinkProps {
disabled?: boolean
}
interface LinkContainer extends React.ComponentClass<LinkContainerProps> {}
interface LinkContainerElement extends React.ReactElement<LinkContainerProps> {}
const LinkContainer: LinkContainer
const IndexLinkContainer: LinkContainer
}
declare module "react-router-bootstrap/lib/LinkContainer" {
export default ReactRouterBootstrap.LinkContainer
}
declare module "react-router-bootstrap/lib/IndexLinkContainer" {
export default ReactRouterBootstrap.IndexLinkContainer
}
declare module "react-router-bootstrap" {
import LinkContainer from "react-router-bootstrap/lib/LinkContainer"
import IndexLinkContainer from "react-router-bootstrap/lib/IndexLinkContainer"
export {
LinkContainer,
IndexLinkContainer
}
}<|fim▁end|> | |
<|file_name|>roman_to_decimal.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <cstdlib>
#include <map>
using std::map;
using std::cout;
using std::endl;
using std::cerr;
using std::string;
#define MAXROMAN 7
typedef map<char,int> roman_mapvals;
typedef roman_mapvals::const_iterator roman_mapiter;
int RTABLE_VALUES[]={ 1, 5, 10, 50, 100, 500, 1000 };
char RTABLE_CHARS[] ={ 'I','V','X','L','C','D','M' };
int usage(const char *name){
cout << "usage: "<< name << " <roman number> " << endl;
exit(EXIT_SUCCESS);
}
int roman_to_decimal(const string &input,
const roman_mapvals &romanreftable) {
if( input.size() == 1 ){
roman_mapiter itr_roman_num = romanreftable.find(input[0]);
if( itr_roman_num == romanreftable.end())
return EXIT_FAILURE;
return itr_roman_num->second;
}
int sum=0, prefix=0, suffix =0, previous=0;
string::const_iterator itr_input = input.begin();
for(int i=0,j=1; i != input.size() && j != input.size(); ++i,++j){
roman_mapiter
itr_firstroman_num = romanreftable.find(itr_input[i]);
roman_mapiter
itr_secondroman_num = romanreftable.find(itr_input[j]);
if( itr_firstroman_num == romanreftable.end()
|| itr_secondroman_num == romanreftable.end() )
// only valid roman characters in RTABLE_CHARS
return EXIT_FAILURE;
prefix = itr_firstroman_num->second;
suffix = itr_secondroman_num->second;
if( prefix < suffix )
sum = ( sum - prefix ) + ( suffix - prefix );
else{
if( i != 0 )
// not first time around
sum += suffix;
else
// first time around
sum = prefix + suffix;
}
previous = prefix;
}
return sum;
}
int main( int argc, char *argv[])
{
if( argc < 2 ) usage(argv[0]);
std::string roman_number(argv[1]); roman_mapvals reference_table;
for(int i=0; i != MAXROMAN; ++i)
// build lookup table
reference_table[ RTABLE_CHARS[i] ] = RTABLE_VALUES[i];
int result = roman_to_decimal(roman_number, reference_table);
if( result == EXIT_FAILURE ){<|fim▁hole|> cout << "Arabic number " << result << endl;
return EXIT_SUCCESS;
}<|fim▁end|> | cerr << "FAILED: input contains non-Roman character "<< endl;
exit(EXIT_FAILURE);
}
|
<|file_name|>wegas-helper.js<|end_file_name|><|fim▁begin|>/*
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013, 2014, 2015 School of Business and Engineering Vaud, Comem
* Licensed under the MIT License
*/
/**
* @fileoverview
* @author Francois-Xavier Aeberhard <[email protected]>
* @author Cyril Junod <cyril.junod at gmail.com>
*/
YUI.add('wegas-helper', function(Y) {
"use strict";
var Wegas = Y.namespace("Wegas"),
Helper;
/**
* @name Y.Wegas.Helper
* @class
* @constructor
*/
Helper = {
/**
* Generate ID an unique id based on current time.
* @function
* @static
* @return {Number} time
* @description
*/
genId: function() {
var now = new Date();
return now.getHours() + now.getMinutes() + now.getSeconds();
},
/**
* Escape a html string by replacing <, > and " by their html entities.
*
* @function
* @static
* @param {String} str
* @return {String} Escaped string
*/
htmlEntities: function(str) {
return String(str).replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"');
},
/**
* Replace any text line return
* @function
* @static
* @param {String} str the string to escape
* @param {String} replaceBy The value to replace with, default is \<br \/\>
* @return {String} Escaped string
*/
nl2br: function(str, replaceBy) {
replaceBy = replaceBy || '<br />';
return (String(str)).replace(/([^>\r\n]?)(\r\n|\n\r|\r|\n)/g, '$1' + replaceBy + '$2');
},
escapeJSString: function(str) {
return str.replace(/"/g, "\\\"").replace(/(\r\n|\n\r|\r|\n)/g, "");
//.replace(/(\r\n|\n\r|\r|\n)/g, "\\n");
},
unesacapeJSString: function(str) {
return str.replace(/\\"/g, '"');
},
escapeCSSClass: function(str) {
return str.replace(/ /g, "-").toLowerCase();
},
stripHtml: function(html) {
var div = document.createElement("div");
div.innerHTML = html;
return div.textContent || div.innerText || "";
},
trimLength: function(string, length, after) {
after = after || "...";
return string.length > length ? string.substring(0, length - after.length) + after :
string.substring(0, length);
},
/**
* Format a date, using provided format string.
*
* @function
* @static
* @argument {Number} timestamp
* @argument {String} fmt the format to apply, ex. '%d.%M.%Y at %H:%i:%s' <br />
* d Day of the month, 2 digits with leading zeros <br />
* m Numeric representation of a month, with leading zeros <br />
* M A short textual representation of a month, three letters <br />
* Y A full numeric representation of a year, 4 digits <br />
* H 24-hour format of an hour with leading zeros <br />
* i Minutes with leading zeros <br />
* s Seconds, with leading zeros <br />
* @returns {String} formated date
*/
formatDate: function(timestamp, fmt) {
var date = new Date(timestamp),
months = ["Jan", "Feb", "Mar", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
function pad(value) {
return (value.toString().length < 2) ? '0' + value : value;
}
return fmt.replace(/%([a-zA-Z])/g, function(_, fmtCode) {
switch (fmtCode) {
case 'Y':
return date.getFullYear();
case 'M':
return months[date.getMonth()];
case 'm':
return pad(date.getMonth() + 1);
case 'd':
return pad(date.getDate());
case 'H':
return pad(date.getHours());
case 'i':
return pad(date.getMinutes());
case 's':
return pad(date.getSeconds());
default:
throw new Error('Unsupported format code: ' + fmtCode);
}
});
},
/**
* Returns a time lapse between provided timestamp and now, e.g. "a month ago",
* "2 hours ago", "10 minutes ago"
* @function
* @static
* @argument {Number} timestamp
* @return {String} The formatted time
*/
smartDate: function(timestamp, prefix) {
var date = new Date(timestamp),
now = new Date(),
diffN = now.getTime() - timestamp,
oneMinute = 60 * 1000,
oneHour = 60 * oneMinute,
oneDay = 24 * oneHour;
// oneMonth = 30 * oneDay,
// oneYear = 365 * oneDay;
if (!date.getTime()) {
return "undefined";
}
if (diffN < oneMinute) { // last minute
return Math.round(diffN / 1000) + " seconds ago";
} else if (diffN < oneHour) { // last hour
return Math.round(diffN / oneMinute) + " minutes ago";
} else if (diffN < oneDay && now.getDay() === date.getDay()) { // Today
return (prefix ? "at " : "") + Helper.formatDate(timestamp, "%H:%i");
} else if (date.getYear() === now.getYear()) { // This year
return (prefix ? "the " : "") + Helper.formatDate(timestamp, "%d %M");
} else { // Older
return (prefix ? "the " : "") + Helper.formatDate(timestamp, "%d %M %Y");
}
},
/**<|fim▁hole|> hashCode: function(value) {
return Y.Array.reduce(value.split(""), 0, function(prev, curr) {
prev = ((prev << 5) - prev) + curr.charCodeAt(0);
return (prev |= 0); //Force 32 bits
});
},
/**
* Return an object with functions (first level only, not objects in object...)
* that will execute the supplied function in the supplied object's context,
* optionally adding any additional supplied parameters to the beginning of
* the arguments collection the supplied to the function.
* @param {Object} o the object with in functions to execute on the context object.
* @param {Object} c the execution context.
* @param {any} 0..n arguments to include before the arguments the function is executed with.
* @returns An object with the wrapped functions.
*/
superbind: function(o, c) {
var i, args = arguments.length > 0 ? Y.Array(arguments, 0, true) : null;
for (i in o) {
if (o.hasOwnProperty(i)) {
args[0] = o[i];
o[i] = Y.bind.apply(c, args);
}
}
return o;
},
getURLParameter: function(name) {
var param = ((new RegExp(name + '=' + '(.+?)(&|$)')).exec(location.search) || [, null])[1];
return param ? decodeURIComponent(param) : param;
},
getURLParameters: function() {
var match,
search = /([^&=]+)=?([^&]*)/g,
query = window.location.search.substring(1),
params = {};
while (match = search.exec(query)) {
params[decodeURIComponent(match[1])] = decodeURIComponent(match[2]);
}
return params;
},
setURLParameters: function(params) {
var par, str = [], tmp;
for (par in params) {
if (params.hasOwnProperty(par)) {
tmp = encodeURIComponent(par);
if (params[par]) {
tmp += "=" + encodeURIComponent(params[par]);
}
str.push(tmp);
}
}
window.location.search = "?" + str.join("&");
},
getFilename: function(path) {
return path.replace(/^.*[\\\/]/, '');
},
/**
* @function
* source: http://stackoverflow.com/a/15203639
* @param {type} el
* @returns {Boolean}
*/
isElementVisible: function(el) {
if (el.getDOMNode) {
el = el.getDOMNode();
}
var eap,
rect = el.getBoundingClientRect(),
docEl = document.documentElement,
vWidth = window.innerWidth || docEl.clientWidth,
vHeight = window.innerHeight || docEl.clientHeight,
efp = function(x, y) {
return document.elementFromPoint(x, y);
},
contains = "contains" in el ? "contains" : "compareDocumentPosition",
has = contains === "contains" ? 1 : 0x14;
// Return false if it's not in the viewport
if (rect.right < 0 || rect.bottom < 0 || rect.left > vWidth || rect.top > vHeight) {
return false;
}
// Return true if any of its four corners are visible
return ((eap = efp(rect.left, rect.top)) === el || el[contains](eap) === has ||
(eap = efp(rect.right, rect.top)) === el || el[contains](eap) === has ||
(eap = efp(rect.right, rect.bottom)) === el || el[contains](eap) === has ||
(eap = efp(rect.left, rect.bottom)) === el || el[contains](eap) === has);
},
/**
*
*/
scrollIntoViewIfNot: function(node, alignTop) {
if (!Helper.isElementVisible(node)) {
node.scrollIntoView(alignTop);
}
},
/**
* Quote a given string to be passed in a regular expression
*
* @param str String the string to quote
* @returns String the quoted string
*/
RegExpQuote: function(str) {
return (String(str)).replace(/([.*?+\^$\[\]\\(){}|\-])/g, "\\$1");
}
};
Wegas.Helper = Helper;
Wegas.superbind = Helper.superbind;
/**
*
*/
Wegas.Timer = Y.Base.create("wegas-timer", Y.Base, [], {
start: function() {
if (!this.handler) {
this.handler = Y.later(this.get("duration"), this, this.timeOut);
}
return this;
},
reset: function() {
this.cancel();
return this.start();
},
cancel: function() {
if (this.handler) {
this.handler.cancel();
this.handler = null;
}
return this;
},
timeOut: function() {
this.cancel();
this.fire("timeOut");
return this;
},
destructor: function() {
this.cancel();
}
}, {
ATTRS: {
duration: {
value: 400
}
}
});
Y.Object.filter = function(o, fn) {
var r = {};
Y.Object.each(o, function(i, k) {
if (fn(i, k)) {
r[k] = i;
}
});
return r;
};
/**
* asynchronous function queuing, chain asychronous operations
*
* @class Y.Wegas.Helper.Queue
* @constructor
*/
Helper.Queue = (function() {
/**
*
* @constructor Q
* @returns {_L250.Q}
*/
var Q = function() {
this._f = []; // function queue
this._a = []; // arguments queue
this._lock = false;
},
doNext = function(queue) {
var cb;
if (queue._f.length && !queue._lock) {
queue._lock = true;
cb = queue._f.shift();
cb.apply(cb, [queue].concat(queue._a.shift()));
}
};
Q.prototype = {
/*@lends Y.Wegas.Helper.Queue#*/
/**
* Add a function to queue and runs it if lock is released.
* Chainable.
* @param {Function} cb callback function
* @param {Any*} args additional arguments passed to callback function.
* @returns {_L250.Q.prototype}
*/
add: function(cb, args) {
if (typeof cb !== "function") {
return this;
}
this._f.push(cb);
this._a.push(Array.prototype.splice.call(arguments, 0, 1));
doNext(this);
return this;
},
/**
* release lock and run next function if any.
* @returns {undefined}
*/
next: function() {
this._lock = false;
doNext(this);
},
/**
* remove further callbacks.
* Chainable.
* @returns {_L254.Q.prototype}
*/
empty: function() {
this._f.length = 0;
this._a.length = 0;
return this;
}
};
return Q;
}());
Helper.Diacritics = (function() {
/**
* Map originally at
* http://web.archive.org/web/20120918093154/http://lehelk.com/2011/05/06/script-to-remove-diacritics/
*/
var DIACRITICS = {
'a': /[\u0061\u24D0\uFF41\u1E9A\u00E0\u00E1\u00E2\u1EA7\u1EA5\u1EAB\u1EA9\u00E3\u0101\u0103\u1EB1\u1EAF\u1EB5\u1EB3\u0227\u01E1\u00E4\u01DF\u1EA3\u00E5\u01FB\u01CE\u0201\u0203\u1EA1\u1EAD\u1EB7\u1E01\u0105\u2C65\u0250]/g,
'aa': /[\uA733]/g,
'ae': /[\u00E6\u01FD\u01E3]/g,
'ao': /[\uA735]/g,
'au': /[\uA737]/g,
'av': /[\uA739\uA73B]/g,
'ay': /[\uA73D]/g,
'b': /[\u0062\u24D1\uFF42\u1E03\u1E05\u1E07\u0180\u0183\u0253]/g,
'c': /[\u0063\u24D2\uFF43\u0107\u0109\u010B\u010D\u00E7\u1E09\u0188\u023C\uA73F\u2184]/g,
'd': /[\u0064\u24D3\uFF44\u1E0B\u010F\u1E0D\u1E11\u1E13\u1E0F\u0111\u018C\u0256\u0257\uA77A]/g,
'dz': /[\u01F3\u01C6]/g,
'e': /[\u0065\u24D4\uFF45\u00E8\u00E9\u00EA\u1EC1\u1EBF\u1EC5\u1EC3\u1EBD\u0113\u1E15\u1E17\u0115\u0117\u00EB\u1EBB\u011B\u0205\u0207\u1EB9\u1EC7\u0229\u1E1D\u0119\u1E19\u1E1B\u0247\u025B\u01DD]/g,
'f': /[\u0066\u24D5\uFF46\u1E1F\u0192\uA77C]/g,
'g': /[\u0067\u24D6\uFF47\u01F5\u011D\u1E21\u011F\u0121\u01E7\u0123\u01E5\u0260\uA7A1\u1D79\uA77F]/g,
'h': /[\u0068\u24D7\uFF48\u0125\u1E23\u1E27\u021F\u1E25\u1E29\u1E2B\u1E96\u0127\u2C68\u2C76\u0265]/g,
'hv': /[\u0195]/g,
'i': /[\u0069\u24D8\uFF49\u00EC\u00ED\u00EE\u0129\u012B\u012D\u00EF\u1E2F\u1EC9\u01D0\u0209\u020B\u1ECB\u012F\u1E2D\u0268\u0131]/g,
'j': /[\u006A\u24D9\uFF4A\u0135\u01F0\u0249]/g,
'k': /[\u006B\u24DA\uFF4B\u1E31\u01E9\u1E33\u0137\u1E35\u0199\u2C6A\uA741\uA743\uA745\uA7A3]/g,
'l': /[\u006C\u24DB\uFF4C\u0140\u013A\u013E\u1E37\u1E39\u013C\u1E3D\u1E3B\u017F\u0142\u019A\u026B\u2C61\uA749\uA781\uA747]/g,
'lj': /[\u01C9]/g,
'm': /[\u006D\u24DC\uFF4D\u1E3F\u1E41\u1E43\u0271\u026F]/g,
'n': /[\u006E\u24DD\uFF4E\u01F9\u0144\u00F1\u1E45\u0148\u1E47\u0146\u1E4B\u1E49\u019E\u0272\u0149\uA791\uA7A5]/g,
'nj': /[\u01CC]/g,
'o': /[\u006F\u24DE\uFF4F\u00F2\u00F3\u00F4\u1ED3\u1ED1\u1ED7\u1ED5\u00F5\u1E4D\u022D\u1E4F\u014D\u1E51\u1E53\u014F\u022F\u0231\u00F6\u022B\u1ECF\u0151\u01D2\u020D\u020F\u01A1\u1EDD\u1EDB\u1EE1\u1EDF\u1EE3\u1ECD\u1ED9\u01EB\u01ED\u00F8\u01FF\u0254\uA74B\uA74D\u0275]/g,
'oi': /[\u01A3]/g,
'ou': /[\u0223]/g,
'oo': /[\uA74F]/g,
'p': /[\u0070\u24DF\uFF50\u1E55\u1E57\u01A5\u1D7D\uA751\uA753\uA755]/g,
'q': /[\u0071\u24E0\uFF51\u024B\uA757\uA759]/g,
'r': /[\u0072\u24E1\uFF52\u0155\u1E59\u0159\u0211\u0213\u1E5B\u1E5D\u0157\u1E5F\u024D\u027D\uA75B\uA7A7\uA783]/g,
's': /[\u0073\u24E2\uFF53\u00DF\u015B\u1E65\u015D\u1E61\u0161\u1E67\u1E63\u1E69\u0219\u015F\u023F\uA7A9\uA785\u1E9B]/g,
'ss': /[\u1E9E]/g,
't': /[\u0074\u24E3\uFF54\u1E6B\u1E97\u0165\u1E6D\u021B\u0163\u1E71\u1E6F\u0167\u01AD\u0288\u2C66\uA787]/g,
'tz': /[\uA729]/g,
'u': /[\u0075\u24E4\uFF55\u00F9\u00FA\u00FB\u0169\u1E79\u016B\u1E7B\u016D\u00FC\u01DC\u01D8\u01D6\u01DA\u1EE7\u016F\u0171\u01D4\u0215\u0217\u01B0\u1EEB\u1EE9\u1EEF\u1EED\u1EF1\u1EE5\u1E73\u0173\u1E77\u1E75\u0289]/g,
'v': /[\u0076\u24E5\uFF56\u1E7D\u1E7F\u028B\uA75F\u028C]/g,
'vy': /[\uA761]/g,
'w': /[\u0077\u24E6\uFF57\u1E81\u1E83\u0175\u1E87\u1E85\u1E98\u1E89\u2C73]/g,
'x': /[\u0078\u24E7\uFF58\u1E8B\u1E8D]/g,
'y': /[\u0079\u24E8\uFF59\u1EF3\u00FD\u0177\u1EF9\u0233\u1E8F\u00FF\u1EF7\u1E99\u1EF5\u01B4\u024F\u1EFF]/g,
'z': /[\u007A\u24E9\uFF5A\u017A\u1E91\u017C\u017E\u1E93\u1E95\u01B6\u0225\u0240\u2C6C\uA763]/g,
'A': /[\u0041\u24B6\uFF21\u00C0\u00C1\u00C2\u1EA6\u1EA4\u1EAA\u1EA8\u00C3\u0100\u0102\u1EB0\u1EAE\u1EB4\u1EB2\u0226\u01E0\u00C4\u01DE\u1EA2\u00C5\u01FA\u01CD\u0200\u0202\u1EA0\u1EAC\u1EB6\u1E00\u0104\u023A\u2C6F]/g,
'AA': /[\uA732]/g,
'AE': /[\u00C6\u01FC\u01E2]/g,
'AO': /[\uA734]/g,
'AU': /[\uA736]/g,
'AV': /[\uA738\uA73A]/g,
'AY': /[\uA73C]/g,
'B': /[\u0042\u24B7\uFF22\u1E02\u1E04\u1E06\u0243\u0182\u0181]/g,
'C': /[\u0043\u24B8\uFF23\u0106\u0108\u010A\u010C\u00C7\u1E08\u0187\u023B\uA73E]/g,
'D': /[\u0044\u24B9\uFF24\u1E0A\u010E\u1E0C\u1E10\u1E12\u1E0E\u0110\u018B\u018A\u0189\uA779]/g,
'DZ': /[\u01F1\u01C4]/g,
'Dz': /[\u01F2\u01C5]/g,
'E': /[\u0045\u24BA\uFF25\u00C8\u00C9\u00CA\u1EC0\u1EBE\u1EC4\u1EC2\u1EBC\u0112\u1E14\u1E16\u0114\u0116\u00CB\u1EBA\u011A\u0204\u0206\u1EB8\u1EC6\u0228\u1E1C\u0118\u1E18\u1E1A\u0190\u018E]/g,
'F': /[\u0046\u24BB\uFF26\u1E1E\u0191\uA77B]/g,
'G': /[\u0047\u24BC\uFF27\u01F4\u011C\u1E20\u011E\u0120\u01E6\u0122\u01E4\u0193\uA7A0\uA77D\uA77E]/g,
'H': /[\u0048\u24BD\uFF28\u0124\u1E22\u1E26\u021E\u1E24\u1E28\u1E2A\u0126\u2C67\u2C75\uA78D]/g,
'I': /[\u0049\u24BE\uFF29\u00CC\u00CD\u00CE\u0128\u012A\u012C\u0130\u00CF\u1E2E\u1EC8\u01CF\u0208\u020A\u1ECA\u012E\u1E2C\u0197]/g,
'J': /[\u004A\u24BF\uFF2A\u0134\u0248]/g,
'K': /[\u004B\u24C0\uFF2B\u1E30\u01E8\u1E32\u0136\u1E34\u0198\u2C69\uA740\uA742\uA744\uA7A2]/g,
'L': /[\u004C\u24C1\uFF2C\u013F\u0139\u013D\u1E36\u1E38\u013B\u1E3C\u1E3A\u0141\u023D\u2C62\u2C60\uA748\uA746\uA780]/g,
'LJ': /[\u01C7]/g,
'Lj': /[\u01C8]/g,
'M': /[\u004D\u24C2\uFF2D\u1E3E\u1E40\u1E42\u2C6E\u019C]/g,
'N': /[\u004E\u24C3\uFF2E\u01F8\u0143\u00D1\u1E44\u0147\u1E46\u0145\u1E4A\u1E48\u0220\u019D\uA790\uA7A4]/g,
'NJ': /[\u01CA]/g,
'Nj': /[\u01CB]/g,
'O': /[\u004F\u24C4\uFF2F\u00D2\u00D3\u00D4\u1ED2\u1ED0\u1ED6\u1ED4\u00D5\u1E4C\u022C\u1E4E\u014C\u1E50\u1E52\u014E\u022E\u0230\u00D6\u022A\u1ECE\u0150\u01D1\u020C\u020E\u01A0\u1EDC\u1EDA\u1EE0\u1EDE\u1EE2\u1ECC\u1ED8\u01EA\u01EC\u00D8\u01FE\u0186\u019F\uA74A\uA74C]/g,
'OI': /[\u01A2]/g,
'OO': /[\uA74E]/g,
'OU': /[\u0222]/g,
'P': /[\u0050\u24C5\uFF30\u1E54\u1E56\u01A4\u2C63\uA750\uA752\uA754]/g,
'Q': /[\u0051\u24C6\uFF31\uA756\uA758\u024A]/g,
'R': /[\u0052\u24C7\uFF32\u0154\u1E58\u0158\u0210\u0212\u1E5A\u1E5C\u0156\u1E5E\u024C\u2C64\uA75A\uA7A6\uA782]/g,
'S': /[\u0053\u24C8\uFF33\u015A\u1E64\u015C\u1E60\u0160\u1E66\u1E62\u1E68\u0218\u015E\u2C7E\uA7A8\uA784]/g,
'T': /[\u0054\u24C9\uFF34\u1E6A\u0164\u1E6C\u021A\u0162\u1E70\u1E6E\u0166\u01AC\u01AE\u023E\uA786]/g,
'TZ': /[\uA728]/g,
'U': /[\u0055\u24CA\uFF35\u00D9\u00DA\u00DB\u0168\u1E78\u016A\u1E7A\u016C\u00DC\u01DB\u01D7\u01D5\u01D9\u1EE6\u016E\u0170\u01D3\u0214\u0216\u01AF\u1EEA\u1EE8\u1EEE\u1EEC\u1EF0\u1EE4\u1E72\u0172\u1E76\u1E74\u0244]/g,
'V': /[\u0056\u24CB\uFF36\u1E7C\u1E7E\u01B2\uA75E\u0245]/g,
'VY': /[\uA760]/g,
'W': /[\u0057\u24CC\uFF37\u1E80\u1E82\u0174\u1E86\u1E84\u1E88\u2C72]/g,
'X': /[\u0058\u24CD\uFF38\u1E8A\u1E8C]/g,
'Y': /[\u0059\u24CE\uFF39\u1EF2\u00DD\u0176\u1EF8\u0232\u1E8E\u0178\u1EF6\u1EF4\u01B3\u024E\u1EFE]/g,
'Z': /[\u005A\u24CF\uFF3A\u0179\u1E90\u017B\u017D\u1E92\u1E94\u01B5\u0224\u2C7F\u2C6B\uA762]/g
},
isDiacriticsRE = (function() {
var re = [];
Y.Object.each(DIACRITICS, function(v) {
re.push(v.source);
});
return re.join("|");
}()),
removeDiacritics = function(str) {
Y.Object.each(DIACRITICS, function(v, k) {
str = str.replace(v, k);
});
return str;
};
return {
removeDiacritics: removeDiacritics,
isDiacritics: isDiacriticsRE
};
}());
});<|fim▁end|> | * Java hashCode implementation
* @param {String} value to hash
* @returns {Number}
*/ |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>package text
import (<|fim▁hole|><|fim▁end|> | _ "qiniupkg.com/text/tpl.v1"
) |
<|file_name|>reverse-string.rs<|end_file_name|><|fim▁begin|>//! Tests for reverse-string
//!
//! Generated by [script][script] using [canonical data][canonical-data]
//!
//! [script]: https://github.com/exercism/rust/blob/master/bin/init_exercise.py
//! [canonical-data]: https://raw.githubusercontent.com/exercism/problem-specifications/master/exercises/reverse-string/canonical_data.json
extern crate reverse_string;
use reverse_string::*;
/// Process a single test case for the property `reverse`
fn process_reverse_case(input: &str, expected: &str) {
assert_eq!(&reverse(input), expected)
}
#[test]
/// empty string<|fim▁hole|>}
#[test]
/// a word
fn test_a_word() {
process_reverse_case("robot", "tobor");
}
#[test]
/// a capitalized word
fn test_a_capitalized_word() {
process_reverse_case("Ramen", "nemaR");
}
#[test]
/// a sentence with punctuation
fn test_a_sentence_with_punctuation() {
process_reverse_case("I'm hungry!", "!yrgnuh m'I");
}
#[test]
/// a palindrome
fn test_a_palindrome() {
process_reverse_case("racecar", "racecar");
}<|fim▁end|> | fn test_empty_string() {
process_reverse_case("", ""); |
<|file_name|>osm_field.js<|end_file_name|><|fim▁begin|>/* jQuery OSM field
2014 by Thomas netAction Schmidt for Sinnwerkstatt
https://www.sinnwerkstatt.com/<|fim▁hole|>(function ($) {
$.fn.osmfield = function () {
return this.each(function () {
// Create HTML elements for osmfield
var lang, map, marker, tile_url,
idAttribute = $(this).attr('id'),
idLatElement = $(this).data('lat-field'),
idLonElement = $(this).data('lon-field'),
idDataElement = $(this).data('data-field'),
osmfieldElement = $(this);
if (idLatElement === undefined) {
idLatElement = '#' + idAttribute + '_lat';
} else {
idLatElement = '#id_' + idLatElement;
}
if (idLonElement === undefined) {
idLonElement = '#' + idAttribute + '_lon';
} else {
idLonElement = '#id_' + idLonElement;
}
if (idDataElement === undefined) {
idDataElement = '#' + idAttribute + '_data';
} else {
idDataElement = '#id_' + idDataElement;
}
$(this).addClass('osmfield-input');
// Create map container when not existent.
// Wrapper is only for CSS.
if (!$('#' + idAttribute + '-map').length) {
$(this).before('<div class="osmfield-wrapper"><div id="' + idAttribute + '-map"></div></div>');
}
$(this).data('lat-element', $(idLatElement));
$(this).data('lng-element', $(idLonElement));
$(this).data('data-element', $(idDataElement));
$(this).data('map-element', $('#' + idAttribute + '-map'));
$(this).data('map-element').addClass('osmfield-map');
// initialize Leaflet map, tile layer and marker
map = L.map(osmfieldElement.data('map-element')[0]).setView([0, 0], 15);
tile_url = (window.location.protocol === 'http:') ?
'http://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png' :
'https://cartodb-basemaps-{s}.global.ssl.fastly.net/light_all/{z}/{x}/{y}.png';
L.tileLayer(tile_url, {
attribution: 'CartoDB | Open Streetmap',
maxZoom: 18
}).addTo(map);
marker = L.marker([0, 0], {draggable: true}).addTo(map);
// bubble up the dom to find out language or use 'en' as default
lang = osmfieldElement.closest('[lang]').attr('lang');
if (lang) {
lang = lang.split('-');
lang = lang[0];
} else {
lang = 'en';
}
osmfieldElement.data('language', lang);
// magic that happens when marker in map is dragged
(function (osmfieldElement) {
marker.on('dragend', function (event) {
var language = osmfieldElement.data('language'),
position = event.target.getLatLng(),
url = 'https://nominatim.openstreetmap.org/reverse?json_callback=?',
zoom = map.getZoom();
map.panTo(position);
osmfieldElement.data('lat-element').val(position.lat);
osmfieldElement.data('lng-element').val(position.lng);
$.getJSON(
url,
{
format: 'json',
lat: position.lat,
lon: position.lng,
zoom: zoom,
addressdetails: 1,
'accept-language': language
},
function (data) {
osmfieldElement.val(data.display_name);
osmfieldElement.data('data-element').val(JSON.stringify(data));
}
);
});
})(osmfieldElement);
// User enters something in INPUT field
osmfieldElement.on('propertychange keyup input paste change', function () {
if ($(this).data('oldvalue') === $(this).val()) {
return;
}
$(this).data('oldvalue', $(this).val());
function search(nameInputElement) {
var language = nameInputElement.data('language'),
url = 'https://nominatim.openstreetmap.org/search?json_callback=?';
(function (osmfieldElement) {
// We could kill previous ajax requests here.
$.getJSON(
url,
{
format: 'json',
q: nameInputElement.val(),
addressdetails: 1,
'accept-language': language
},
function (data) {
// coordinates found for this address?
if (data.length) {
var lat = data[0].lat,
lng = data[0].lon,
// name = data[0].display_name,
newLatLng = new L.LatLng(lat, lng),
eldata = JSON.stringify(data[0]);
osmfieldElement.data('lat-element').val(lat);
osmfieldElement.data('lng-element').val(lng);
osmfieldElement.data('data-element').val(eldata);
marker.setLatLng(newLatLng);
map.panTo(newLatLng);
} else {
osmfieldElement.data('map-element').slideUp();
osmfieldElement.data('lat-element').val('');
osmfieldElement.data('lng-element').val('');
osmfieldElement.data('data-element').val('');
}
// Show map when INPUT has focus and coordinates are known
if (
osmfieldElement.is(":focus") &&
osmfieldElement.data('lat-element').val() &&
osmfieldElement.data('lng-element').val()
) {
osmfieldElement.data('map-element').slideDown(function () {
window.dispatchEvent(new Event('resize'));
});
} else {
osmfieldElement.data('map-element').slideUp();
}
}
);
})(nameInputElement);
}
// Wait 500ms for INPUT updates until Ajax request
clearTimeout($.data(this, 'timer'));
var nameInputElement = $(this),
wait = setTimeout(function () { search(nameInputElement); }, 500);
$(this).data('timer', wait);
});
// Initialize INPUT, map and data attributes
osmfieldElement.data('map-element').hide();
// Use start values if given
if (osmfieldElement.data('lat-element').val() &&
osmfieldElement.data('lng-element').val()) {
var newLatLng = new L.LatLng(
osmfieldElement.data('lat-element').val(),
osmfieldElement.data('lng-element').val()
);
marker.setLatLng(newLatLng);
map.panTo(newLatLng);
} else {
// Maybe OpenStreetMap has coordinates or hide the map
osmfieldElement.trigger('change');
}
// Hide map when clicking outside
$(document).click(function (event) {
// A child of INPUT or map was clicked
var thisosmfield = $(event.target).closest('.osmfield-input, .osmfield-map');
if (thisosmfield.length) {
// hide all maps except of this
if (thisosmfield.hasClass('osmfield-input')) {
thisosmfield = thisosmfield.data('map-element');
}
$('.osmfield-map').not(thisosmfield).slideUp();
} else {
// hide all
$('.osmfield-map').slideUp();
}
});
// Show map when INPUT gets focus and position is known
(function (osmfieldElement) {
osmfieldElement.focus(function () {
if (osmfieldElement.data('lat-element').val() &&
osmfieldElement.data('lng-element').val()) {
osmfieldElement.data('map-element').slideDown(function () {
window.dispatchEvent(new Event('resize'));
});
}
});
})(osmfieldElement);
}); // each osmfield element
}; // jQuery plugin end
}(jQuery));<|fim▁end|> | MIT License */ |
<|file_name|>ioapic.rs<|end_file_name|><|fim▁begin|>use core::ptr::write_volatile;
use core::ptr::read_volatile;
use kernel::mm::*;
const REG_ID: u32 = 0x00;
const REG_VER: u32 = 0x01;
const REG_ARB: u32 = 0x02;
const fn reg_redtbl_low(num: u32) -> u32 {
0x10 + (2*num)
}
const fn reg_redtbl_high(num: u32) -> u32 {
0x11 + (2*num)
}
struct RegId(u32);
struct RegVer(u32);
struct RegRedTblL(u32);
struct RegRedTblH(u32);
impl RegId {
pub const fn id(&self) -> u32 {
(self.0 >> 24) & 0b1111
}
}
impl RegVer {
pub const fn apic_version(&self) -> u32 {
self.0 & 0xFF
}
pub const fn max_red_entry(&self) -> u32 {
(self.0 >> 16) & 0xFF
}
}
impl RegRedTblL {
pub const fn masked(&self) -> bool {
self.0 & (1 << 16) != 0
}
pub fn set_masked(&mut self, masked: bool) {
if masked {
self.0 |= 1 << 16;
} else {
self.0 &= !(1 << 16);
}
}
pub fn set_vector(&mut self, idx: u32) {
self.0 = (self.0 & !(0xFFu32)) | (idx & 0xFF);
}
pub const fn vector(&self) -> u32 {
self.0 & 0xFF
}
}
impl RegRedTblH {
pub fn set_destination(&mut self, dest: u32) {
self.0 = (self.0 & !(0xFFu32 << 24)) | (dest & 0xFF);
}
pub const fn destination(&mut self) -> u32 {
self.0 >> 24
}
}
pub struct IOApic {
ioapic_base: Option<MappedAddr>,
}
impl IOApic {
fn read(&self, reg: u32) -> u32 {
if let Some(base) = self.ioapic_base {
unsafe {
write_volatile::<u32>(
base.0 as *mut u32,
reg
);
return read_volatile::<u32>(
(base.0 + 0x10) as *const u32
);
}
} else {
panic!("IOApic module not initialised");
}<|fim▁hole|> unsafe {
write_volatile::<u32>(
base.0 as *mut u32,
reg
);
write_volatile::<u32>(
(base.0 + 0x10) as *mut u32,
value
);
}
} else {
panic!("IOApic module not initialised");
}
}
pub fn id(&self) -> u32 {
RegId(self.read(REG_ID)).id()
}
pub fn identification(&self) -> u32 {
RegId(self.read(REG_ARB)).id()
}
pub fn max_red_entry(&self) -> u32 {
RegVer(self.read(REG_VER)).max_red_entry()
}
pub fn version(&self) -> u32 {
RegVer(self.read(REG_VER)).apic_version()
}
pub fn mask_interrupt(&mut self, i: u32, masked: bool) {
let mut l = RegRedTblL(self.read(reg_redtbl_low(i)));
let h = RegRedTblH(self.read(reg_redtbl_high(i)));
l.set_masked(masked);
self.write(reg_redtbl_low(i), l.0);
self.write(reg_redtbl_high(i), h.0);
}
pub fn set_int(&mut self, i: u32, idt_idx: u32) {
let mut l = RegRedTblL(self.read(reg_redtbl_low(i)));
let mut h = RegRedTblH(self.read(reg_redtbl_high(i)));
l.set_vector(idt_idx);
l.set_masked(false);
h.set_destination(0);
self.write(reg_redtbl_low(i), l.0);
self.write(reg_redtbl_high(i), h.0);
}
pub const fn new() -> IOApic {
IOApic {
ioapic_base: None
}
}
pub fn init(&mut self, base: MappedAddr) {
self.ioapic_base = Some(base);
for i in 0..self.max_red_entry() + 1 {
self.mask_interrupt(i, true);
}
}
}<|fim▁end|> | }
fn write(&self, reg: u32, value: u32) {
if let Some(base) = self.ioapic_base { |
<|file_name|>settings_api_bubble_helpers.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/extensions/settings_api_bubble_helpers.h"
#include <utility>
#include "build/build_config.h"
#include "chrome/browser/extensions/ntp_overridden_bubble_delegate.h"
#include "chrome/browser/extensions/settings_api_bubble_delegate.h"
#include "chrome/browser/extensions/settings_api_helpers.h"
#include "chrome/browser/ui/browser_dialogs.h"
#include "chrome/browser/ui/browser_finder.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/browser/ui/extensions/extension_message_bubble_bridge.h"
#include "chrome/browser/ui/extensions/extension_settings_overridden_dialog.h"
#include "chrome/browser/ui/extensions/settings_overridden_params_providers.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/browser/ui/toolbar/toolbar_actions_bar.h"
#include "chrome/browser/ui/ui_features.h"
#include "chrome/common/extensions/manifest_handlers/settings_overrides_handler.h"
#include "chrome/common/url_constants.h"
#include "content/public/browser/browser_url_handler.h"
#include "content/public/browser/navigation_entry.h"
#include "extensions/common/constants.h"
namespace extensions {
namespace {
// Whether the NTP post-install UI is enabled. By default, this is limited to
// Windows, Mac, and ChromeOS, but can be overridden for testing.
#if defined(OS_WIN) || defined(OS_MACOSX) || defined(OS_CHROMEOS)
bool g_ntp_post_install_ui_enabled = true;
#else
bool g_ntp_post_install_ui_enabled = false;
#endif
#if defined(OS_WIN) || defined(OS_MACOSX)
void ShowSettingsApiBubble(SettingsApiOverrideType type,
Browser* browser) {
ToolbarActionsModel* model = ToolbarActionsModel::Get(browser->profile());
if (model->has_active_bubble())
return;
std::unique_ptr<ExtensionMessageBubbleController> settings_api_bubble(
new ExtensionMessageBubbleController(
new SettingsApiBubbleDelegate(browser->profile(), type), browser));
if (!settings_api_bubble->ShouldShow())
return;
settings_api_bubble->SetIsActiveBubble();
std::unique_ptr<ToolbarActionsBarBubbleDelegate> bridge(
new ExtensionMessageBubbleBridge(std::move(settings_api_bubble)));
browser->window()->GetExtensionsContainer()->ShowToolbarActionBubbleAsync(
std::move(bridge));
}
#endif
} // namespace
void SetNtpPostInstallUiEnabledForTesting(bool enabled) {
g_ntp_post_install_ui_enabled = enabled;
}<|fim▁hole|>#endif
}
void MaybeShowExtensionControlledSearchNotification(
content::WebContents* web_contents,
AutocompleteMatch::Type match_type) {
#if defined(OS_WIN) || defined(OS_MACOSX)
if (!AutocompleteMatch::IsSearchType(match_type) ||
match_type == AutocompleteMatchType::SEARCH_OTHER_ENGINE) {
return;
}
Browser* browser = chrome::FindBrowserWithWebContents(web_contents);
if (!browser)
return;
if (base::FeatureList::IsEnabled(
features::kExtensionSettingsOverriddenDialogs)) {
base::Optional<ExtensionSettingsOverriddenDialog::Params> params =
settings_overridden_params::GetSearchOverriddenParams(
browser->profile());
if (!params)
return;
auto dialog = std::make_unique<ExtensionSettingsOverriddenDialog>(
std::move(*params), browser->profile());
if (!dialog->ShouldShow())
return;
chrome::ShowExtensionSettingsOverriddenDialog(std::move(dialog), browser);
} else {
ShowSettingsApiBubble(BUBBLE_TYPE_SEARCH_ENGINE, browser);
}
#endif
}
void MaybeShowExtensionControlledNewTabPage(
Browser* browser, content::WebContents* web_contents) {
if (!g_ntp_post_install_ui_enabled)
return;
// Acknowledge existing extensions if necessary.
NtpOverriddenBubbleDelegate::MaybeAcknowledgeExistingNtpExtensions(
browser->profile());
// Jump through a series of hoops to see if the web contents is pointing to
// an extension-controlled NTP.
// TODO(devlin): Some of this is redundant with the checks in the bubble/
// dialog. We should consolidate, but that'll be simpler once we only have
// one UI option. In the meantime, extra checks don't hurt.
content::NavigationEntry* entry =
web_contents->GetController().GetVisibleEntry();
if (!entry)
return;
GURL active_url = entry->GetURL();
if (!active_url.SchemeIs(extensions::kExtensionScheme))
return; // Not a URL that we care about.
// See if the current active URL matches a transformed NewTab URL.
GURL ntp_url(chrome::kChromeUINewTabURL);
content::BrowserURLHandler::GetInstance()->RewriteURLIfNecessary(
&ntp_url, web_contents->GetBrowserContext());
if (ntp_url != active_url)
return; // Not being overridden by an extension.
Profile* const profile = browser->profile();
ToolbarActionsModel* model = ToolbarActionsModel::Get(profile);
if (model->has_active_bubble())
return;
if (base::FeatureList::IsEnabled(
features::kExtensionSettingsOverriddenDialogs)) {
base::Optional<ExtensionSettingsOverriddenDialog::Params> params =
settings_overridden_params::GetNtpOverriddenParams(profile);
if (!params)
return;
auto dialog = std::make_unique<ExtensionSettingsOverriddenDialog>(
std::move(*params), profile);
if (!dialog->ShouldShow())
return;
chrome::ShowExtensionSettingsOverriddenDialog(std::move(dialog), browser);
return;
}
std::unique_ptr<ExtensionMessageBubbleController> ntp_overridden_bubble(
new ExtensionMessageBubbleController(
new NtpOverriddenBubbleDelegate(profile), browser));
if (!ntp_overridden_bubble->ShouldShow())
return;
ntp_overridden_bubble->SetIsActiveBubble();
std::unique_ptr<ToolbarActionsBarBubbleDelegate> bridge(
new ExtensionMessageBubbleBridge(std::move(ntp_overridden_bubble)));
browser->window()->GetExtensionsContainer()->ShowToolbarActionBubbleAsync(
std::move(bridge));
}
} // namespace extensions<|fim▁end|> |
void MaybeShowExtensionControlledHomeNotification(Browser* browser) {
#if defined(OS_WIN) || defined(OS_MACOSX)
ShowSettingsApiBubble(BUBBLE_TYPE_HOME_PAGE, browser); |
<|file_name|>inferno.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(factory((global.Inferno = global.Inferno || {})));
}(this, (function (exports) { 'use strict';
var NO_OP = '$NO_OP';
var ERROR_MSG = 'a runtime error occured! Use Inferno in development environment to find the error.';
// This should be boolean and not reference to window.document
var isBrowser = !!(typeof window !== 'undefined' && window.document);
// this is MUCH faster than .constructor === Array and instanceof Array
// in Node 7 and the later versions of V8, slower in older versions though
var isArray = Array.isArray;
function isStringOrNumber(o) {
var type = typeof o;
return type === 'string' || type === 'number';
}
function isNullOrUndef(o) {
return isUndefined(o) || isNull(o);
}
function isInvalid(o) {
return isNull(o) || o === false || isTrue(o) || isUndefined(o);
}
function isFunction(o) {
return typeof o === 'function';
}
function isString(o) {
return typeof o === 'string';
}
function isNumber(o) {
return typeof o === 'number';
}
function isNull(o) {
return o === null;
}
function isTrue(o) {
return o === true;
}
function isUndefined(o) {
return o === void 0;
}
function isDefined(o) {
return o !== void 0;
}
function isObject(o) {
return typeof o === 'object';
}
function throwError(message) {
if (!message) {
message = ERROR_MSG;
}
throw new Error(("Inferno Error: " + message));
}
function warning(message) {
// tslint:disable-next-line:no-console
console.error(message);
}
function combineFrom(first, second) {
var out = {};
if (first) {
for (var key in first) {
out[key] = first[key];
}
}
if (second) {
for (var key$1 in second) {
out[key$1] = second[key$1];
}
}
return out;
}
function getTagName(input) {
var tagName;
if (isArray(input)) {
var arrayText = input.length > 3 ? input.slice(0, 3).toString() + ',...' : input.toString();
tagName = 'Array(' + arrayText + ')';
}
else if (isStringOrNumber(input)) {
tagName = 'Text(' + input + ')';
}
else if (isInvalid(input)) {
tagName = 'InvalidVNode(' + input + ')';
}
else {
var flags = input.flags;
if (flags & 481 /* Element */) {
tagName = "<" + (input.type) + (input.className ? ' class="' + input.className + '"' : '') + ">";
}
else if (flags & 16 /* Text */) {
tagName = "Text(" + (input.children) + ")";
}
else if (flags & 1024 /* Portal */) {
tagName = "Portal*";
}
else {
var type = input.type;
// Fallback for IE
var componentName = type.name || type.displayName || type.constructor.name || (type.toString().match(/^function\s*([^\s(]+)/) || [])[1];
tagName = "<" + componentName + " />";
}
}
return '>> ' + tagName + '\n';
}
function DEV_ValidateKeys(vNodeTree, vNode, forceKeyed) {
var foundKeys = [];
for (var i = 0, len = vNodeTree.length; i < len; i++) {
var childNode = vNodeTree[i];
if (isArray(childNode)) {
return 'Encountered ARRAY in mount, array must be flattened, or normalize used. Location: \n' + getTagName(childNode);
}
if (isInvalid(childNode)) {
if (forceKeyed) {
return 'Encountered invalid node when preparing to keyed algorithm. Location: \n' + getTagName(childNode);
}
else if (foundKeys.length !== 0) {
return 'Encountered invalid node with mixed keys. Location: \n' + getTagName(childNode);
}
continue;
}
if (typeof childNode === 'object') {
childNode.isValidated = true;
}
var key = childNode.key;
if (!isNullOrUndef(key) && !isStringOrNumber(key)) {
return 'Encountered child vNode where key property is not string or number. Location: \n' + getTagName(childNode);
}
var children = childNode.children;
var childFlags = childNode.childFlags;
if (!isInvalid(children)) {
var val = (void 0);
if (childFlags & 12 /* MultipleChildren */) {
val = DEV_ValidateKeys(children, childNode, childNode.childFlags & 8 /* HasKeyedChildren */);
}
else if (childFlags === 2 /* HasVNodeChildren */) {
val = DEV_ValidateKeys([children], childNode, childNode.childFlags & 8 /* HasKeyedChildren */);
}
if (val) {
val += getTagName(childNode);
return val;
}
}
if (forceKeyed && isNullOrUndef(key)) {
return ('Encountered child without key during keyed algorithm. If this error points to Array make sure children is flat list. Location: \n' +
getTagName(childNode));
}
else if (!forceKeyed && isNullOrUndef(key)) {
if (foundKeys.length !== 0) {
return 'Encountered children with key missing. Location: \n' + getTagName(childNode);
}
continue;
}
if (foundKeys.indexOf(key) > -1) {
return 'Encountered two children with same key: {' + key + '}. Location: \n' + getTagName(childNode);
}
foundKeys.push(key);
}
}
function validateVNodeElementChildren(vNode) {
{
if (vNode.childFlags & 1 /* HasInvalidChildren */) {
return;
}
if (vNode.flags & 64 /* InputElement */) {
throwError("input elements can't have children.");
}
if (vNode.flags & 128 /* TextareaElement */) {
throwError("textarea elements can't have children.");
}
if (vNode.flags & 481 /* Element */) {
var voidTypes = ['area', 'base', 'br', 'col', 'command', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'meta', 'param', 'source', 'track', 'wbr'];
var tag = vNode.type.toLowerCase();
if (tag === 'media') {
throwError("media elements can't have children.");
}
var idx = voidTypes.indexOf(tag);
if (idx !== -1) {
throwError(((voidTypes[idx]) + " elements can't have children."));
}
}
}
}
function validateKeys(vNode) {
{
// Checks if there is any key missing or duplicate keys
if (vNode.isValidated === false && vNode.children && vNode.flags & 481 /* Element */) {
var error = DEV_ValidateKeys(Array.isArray(vNode.children) ? vNode.children : [vNode.children], vNode, (vNode.childFlags & 8 /* HasKeyedChildren */) > 0);
if (error) {
throwError(error + getTagName(vNode));
}
}
vNode.isValidated = true;
}
}
var keyPrefix = '$';
function getVNode(childFlags, children, className, flags, key, props, ref, type) {
{
return {
childFlags: childFlags,
children: children,
className: className,
dom: null,
flags: flags,
isValidated: false,
key: key === void 0 ? null : key,
parentVNode: null,
props: props === void 0 ? null : props,
ref: ref === void 0 ? null : ref,
type: type
};
}
return {
childFlags: childFlags,
children: children,
className: className,
dom: null,
flags: flags,
key: key === void 0 ? null : key,
parentVNode: null,
props: props === void 0 ? null : props,
ref: ref === void 0 ? null : ref,
type: type
};
}
function createVNode(flags, type, className, children, childFlags, props, key, ref) {
{
if (flags & 14 /* Component */) {
throwError('Creating Component vNodes using createVNode is not allowed. Use Inferno.createComponentVNode method.');
}
}
var childFlag = childFlags === void 0 ? 1 /* HasInvalidChildren */ : childFlags;
var vNode = getVNode(childFlag, children, className, flags, key, props, ref, type);
var optsVNode = options.createVNode;
if (typeof optsVNode === 'function') {
optsVNode(vNode);
}
if (childFlag === 0 /* UnknownChildren */) {
normalizeChildren(vNode, vNode.children);
}
{
validateVNodeElementChildren(vNode);
}
return vNode;
}
function createComponentVNode(flags, type, props, key, ref) {
{
if (flags & 1 /* HtmlElement */) {
throwError('Creating element vNodes using createComponentVNode is not allowed. Use Inferno.createVNode method.');
}
}
if ((flags & 2 /* ComponentUnknown */) > 0) {
flags = isDefined(type.prototype) && isFunction(type.prototype.render) ? 4 /* ComponentClass */ : 8 /* ComponentFunction */;
}
// set default props
var defaultProps = type.defaultProps;
if (!isNullOrUndef(defaultProps)) {
if (!props) {
props = {}; // Props can be referenced and modified at application level so always create new object
}
for (var prop in defaultProps) {
if (isUndefined(props[prop])) {
props[prop] = defaultProps[prop];
}
}
}
if ((flags & 8 /* ComponentFunction */) > 0) {
var defaultHooks = type.defaultHooks;
if (!isNullOrUndef(defaultHooks)) {
if (!ref) {
// As ref cannot be referenced from application level, we can use the same refs object
ref = defaultHooks;
}
else {
for (var prop$1 in defaultHooks) {
if (isUndefined(ref[prop$1])) {
ref[prop$1] = defaultHooks[prop$1];
}
}
}
}
}
var vNode = getVNode(1 /* HasInvalidChildren */, null, null, flags, key, props, ref, type);
var optsVNode = options.createVNode;
if (isFunction(optsVNode)) {
optsVNode(vNode);
}
return vNode;
}
function createTextVNode(text, key) {
return getVNode(1 /* HasInvalidChildren */, isNullOrUndef(text) ? '' : text, null, 16 /* Text */, key, null, null, null);
}
function normalizeProps(vNode) {
var props = vNode.props;
if (props) {
var flags = vNode.flags;
if (flags & 481 /* Element */) {
if (isDefined(props.children) && isNullOrUndef(vNode.children)) {
normalizeChildren(vNode, props.children);
}
if (isDefined(props.className)) {
vNode.className = props.className || null;
props.className = undefined;
}
}
if (isDefined(props.key)) {
vNode.key = props.key;
props.key = undefined;
}
if (isDefined(props.ref)) {
if (flags & 8 /* ComponentFunction */) {
vNode.ref = combineFrom(vNode.ref, props.ref);
}
else {
vNode.ref = props.ref;
}
props.ref = undefined;
}
}
return vNode;
}
function directClone(vNodeToClone) {
var newVNode;
var flags = vNodeToClone.flags;
if (flags & 14 /* Component */) {
var props;
var propsToClone = vNodeToClone.props;
if (!isNull(propsToClone)) {
props = {};
for (var key in propsToClone) {
props[key] = propsToClone[key];
}
}
newVNode = createComponentVNode(flags, vNodeToClone.type, props, vNodeToClone.key, vNodeToClone.ref);
}
else if (flags & 481 /* Element */) {
var children = vNodeToClone.children;
newVNode = createVNode(flags, vNodeToClone.type, vNodeToClone.className, children, 0 /* UnknownChildren */, vNodeToClone.props, vNodeToClone.key, vNodeToClone.ref);
}
else if (flags & 16 /* Text */) {
newVNode = createTextVNode(vNodeToClone.children, vNodeToClone.key);
}
else if (flags & 1024 /* Portal */) {
newVNode = vNodeToClone;
}
return newVNode;
}
function createVoidVNode() {
return createTextVNode('', null);
}
function _normalizeVNodes(nodes, result, index, currentKey) {
for (var len = nodes.length; index < len; index++) {
var n = nodes[index];
if (!isInvalid(n)) {
var newKey = currentKey + keyPrefix + index;
if (isArray(n)) {
_normalizeVNodes(n, result, 0, newKey);
}
else {
if (isStringOrNumber(n)) {
n = createTextVNode(n, newKey);
}
else {
var oldKey = n.key;
var isPrefixedKey = isString(oldKey) && oldKey[0] === keyPrefix;
if (!isNull(n.dom) || isPrefixedKey) {
n = directClone(n);
}
if (isNull(oldKey) || isPrefixedKey) {
n.key = newKey;
}
else {
n.key = currentKey + oldKey;
}
}
result.push(n);
}
}
}
}
function getFlagsForElementVnode(type) {
if (type === 'svg') {
return 32 /* SvgElement */;
}
if (type === 'input') {
return 64 /* InputElement */;
}
if (type === 'select') {
return 256 /* SelectElement */;
}
if (type === 'textarea') {
return 128 /* TextareaElement */;
}
return 1 /* HtmlElement */;
}
function normalizeChildren(vNode, children) {
var newChildren;
var newChildFlags = 1 /* HasInvalidChildren */;
// Don't change children to match strict equal (===) true in patching
if (isInvalid(children)) {
newChildren = children;
}
else if (isString(children)) {
newChildFlags = 2 /* HasVNodeChildren */;
newChildren = createTextVNode(children);
}
else if (isNumber(children)) {
newChildFlags = 2 /* HasVNodeChildren */;
newChildren = createTextVNode(children + '');
}
else if (isArray(children)) {
var len = children.length;
if (len === 0) {
newChildren = null;
newChildFlags = 1 /* HasInvalidChildren */;
}
else {
// we assign $ which basically means we've flagged this array for future note
// if it comes back again, we need to clone it, as people are using it
// in an immutable way
// tslint:disable-next-line
if (Object.isFrozen(children) || children['$'] === true) {
children = children.slice();
}
newChildFlags = 8 /* HasKeyedChildren */;
for (var i = 0; i < len; i++) {
var n = children[i];
if (isInvalid(n) || isArray(n)) {
newChildren = newChildren || children.slice(0, i);
_normalizeVNodes(children, newChildren, i, '');
break;
}
else if (isStringOrNumber(n)) {
newChildren = newChildren || children.slice(0, i);
newChildren.push(createTextVNode(n, keyPrefix + i));
}
else {
var key = n.key;
var isNullDom = isNull(n.dom);
var isNullKey = isNull(key);
var isPrefixed = !isNullKey && key[0] === keyPrefix;
if (!isNullDom || isNullKey || isPrefixed) {
newChildren = newChildren || children.slice(0, i);
if (!isNullDom || isPrefixed) {
n = directClone(n);
}
if (isNullKey || isPrefixed) {
n.key = keyPrefix + i;
}
newChildren.push(n);
}
else if (newChildren) {
newChildren.push(n);
}
}
}
newChildren = newChildren || children;
newChildren.$ = true;
}
}
else {
newChildren = children;
if (!isNull(children.dom)) {
newChildren = directClone(children);
}
newChildFlags = 2 /* HasVNodeChildren */;
}
vNode.children = newChildren;
vNode.childFlags = newChildFlags;
{
validateVNodeElementChildren(vNode);
}
return vNode;
}
var options = {
afterMount: null,
afterRender: null,
afterUpdate: null,
beforeRender: null,
beforeUnmount: null,
createVNode: null,
roots: []
};
/**
* Links given data to event as first parameter
* @param {*} data data to be linked, it will be available in function as first parameter
* @param {Function} event Function to be called when event occurs
* @returns {{data: *, event: Function}}
*/
function linkEvent(data, event) {
if (isFunction(event)) {
return { data: data, event: event };
}
return null; // Return null when event is invalid, to avoid creating unnecessary event handlers
}
var xlinkNS = 'http://www.w3.org/1999/xlink';
var xmlNS = 'http://www.w3.org/XML/1998/namespace';
var svgNS = 'http://www.w3.org/2000/svg';
var namespaces = {
'xlink:actuate': xlinkNS,
'xlink:arcrole': xlinkNS,
'xlink:href': xlinkNS,
'xlink:role': xlinkNS,
'xlink:show': xlinkNS,
'xlink:title': xlinkNS,
'xlink:type': xlinkNS,
'xml:base': xmlNS,
'xml:lang': xmlNS,
'xml:space': xmlNS
};
// We need EMPTY_OBJ defined in one place.
// Its used for comparison so we cant inline it into shared
var EMPTY_OBJ = {};
var LIFECYCLE = [];
{
Object.freeze(EMPTY_OBJ);
}
function appendChild(parentDom, dom) {
parentDom.appendChild(dom);
}
function insertOrAppend(parentDom, newNode, nextNode) {
if (isNullOrUndef(nextNode)) {
appendChild(parentDom, newNode);
}
else {
parentDom.insertBefore(newNode, nextNode);
}
}
function documentCreateElement(tag, isSVG) {
if (isSVG === true) {
return document.createElementNS(svgNS, tag);
}
return document.createElement(tag);
}
function replaceChild(parentDom, newDom, lastDom) {
parentDom.replaceChild(newDom, lastDom);
}
function removeChild(parentDom, dom) {
parentDom.removeChild(dom);
}
function callAll(arrayFn) {
var listener;
while ((listener = arrayFn.shift()) !== undefined) {
listener();
}
}
var attachedEventCounts = {};
var attachedEvents = {};
function handleEvent(name, nextEvent, dom) {
var eventsLeft = attachedEventCounts[name];
var eventsObject = dom.$EV;
if (nextEvent) {
if (!eventsLeft) {
attachedEvents[name] = attachEventToDocument(name);
attachedEventCounts[name] = 0;
}
if (!eventsObject) {
eventsObject = dom.$EV = {};
}
if (!eventsObject[name]) {
attachedEventCounts[name]++;
}
eventsObject[name] = nextEvent;
}
else if (eventsObject && eventsObject[name]) {
attachedEventCounts[name]--;
if (eventsLeft === 1) {
document.removeEventListener(normalizeEventName(name), attachedEvents[name]);
attachedEvents[name] = null;
}
eventsObject[name] = nextEvent;
}
}
function dispatchEvents(event, target, isClick, name, eventData) {
var dom = target;
while (!isNull(dom)) {
// Html Nodes can be nested fe: span inside button in that scenario browser does not handle disabled attribute on parent,
// because the event listener is on document.body
// Don't process clicks on disabled elements
if (isClick && dom.disabled) {
return;
}
var eventsObject = dom.$EV;
if (eventsObject) {
var currentEvent = eventsObject[name];
if (currentEvent) {
// linkEvent object
eventData.dom = dom;
if (currentEvent.event) {
currentEvent.event(currentEvent.data, event);
}
else {
currentEvent(event);
}
if (event.cancelBubble) {
return;
}
}
}
dom = dom.parentNode;
}
}
function normalizeEventName(name) {
return name.substr(2).toLowerCase();
}
function stopPropagation() {
this.cancelBubble = true;
this.stopImmediatePropagation();
}
function attachEventToDocument(name) {
var docEvent = function (event) {
var type = event.type;
var isClick = type === 'click' || type === 'dblclick';
if (isClick && event.button !== 0) {
// Firefox incorrectly triggers click event for mid/right mouse buttons.
// This bug has been active for 12 years.
// https://bugzilla.mozilla.org/show_bug.cgi?id=184051
event.preventDefault();
event.stopPropagation();
return false;
}
event.stopPropagation = stopPropagation;
// Event data needs to be object to save reference to currentTarget getter
var eventData = {
dom: document
};
try {
Object.defineProperty(event, 'currentTarget', {
configurable: true,
get: function get() {
return eventData.dom;
}
});
}
catch (e) {
/* safari7 and phantomJS will crash */
}
dispatchEvents(event, event.target, isClick, name, eventData);
};
document.addEventListener(normalizeEventName(name), docEvent);
return docEvent;
}
function isSameInnerHTML(dom, innerHTML) {
var tempdom = document.createElement('i');
tempdom.innerHTML = innerHTML;
return tempdom.innerHTML === dom.innerHTML;
}
function isSamePropsInnerHTML(dom, props) {
return Boolean(props && props.dangerouslySetInnerHTML && props.dangerouslySetInnerHTML.__html && isSameInnerHTML(dom, props.dangerouslySetInnerHTML.__html));
}
function triggerEventListener(props, methodName, e) {
if (props[methodName]) {
var listener = props[methodName];
if (listener.event) {
listener.event(listener.data, e);
}
else {
listener(e);
}
}
else {
var nativeListenerName = methodName.toLowerCase();
if (props[nativeListenerName]) {
props[nativeListenerName](e);
}
}
}
function createWrappedFunction(methodName, applyValue) {
var fnMethod = function (e) {
e.stopPropagation();
var vNode = this.$V;
// If vNode is gone by the time event fires, no-op
if (!vNode) {
return;
}
var props = vNode.props || EMPTY_OBJ;
var dom = vNode.dom;
if (isString(methodName)) {
triggerEventListener(props, methodName, e);
}
else {
for (var i = 0; i < methodName.length; i++) {
triggerEventListener(props, methodName[i], e);
}
}
if (isFunction(applyValue)) {
var newVNode = this.$V;
var newProps = newVNode.props || EMPTY_OBJ;
applyValue(newProps, dom, false, newVNode);
}
};
Object.defineProperty(fnMethod, 'wrapped', {
configurable: false,
enumerable: false,
value: true,
writable: false
});
return fnMethod;
}
function isCheckedType(type) {
return type === 'checkbox' || type === 'radio';
}
var onTextInputChange = createWrappedFunction('onInput', applyValueInput);
var wrappedOnChange = createWrappedFunction(['onClick', 'onChange'], applyValueInput);
/* tslint:disable-next-line:no-empty */
function emptywrapper(event) {
event.stopPropagation();
}
emptywrapper.wrapped = true;
function inputEvents(dom, nextPropsOrEmpty) {
if (isCheckedType(nextPropsOrEmpty.type)) {
dom.onchange = wrappedOnChange;
dom.onclick = emptywrapper;
}
else {
dom.oninput = onTextInputChange;
}
}
function applyValueInput(nextPropsOrEmpty, dom) {
var type = nextPropsOrEmpty.type;
var value = nextPropsOrEmpty.value;
var checked = nextPropsOrEmpty.checked;
var multiple = nextPropsOrEmpty.multiple;
var defaultValue = nextPropsOrEmpty.defaultValue;
var hasValue = !isNullOrUndef(value);
if (type && type !== dom.type) {
dom.setAttribute('type', type);
}
if (!isNullOrUndef(multiple) && multiple !== dom.multiple) {
dom.multiple = multiple;
}
if (!isNullOrUndef(defaultValue) && !hasValue) {
dom.defaultValue = defaultValue + '';
}
if (isCheckedType(type)) {
if (hasValue) {
dom.value = value;
}
if (!isNullOrUndef(checked)) {
dom.checked = checked;
}
}
else {
if (hasValue && dom.value !== value) {
dom.defaultValue = value;
dom.value = value;
}
else if (!isNullOrUndef(checked)) {
dom.checked = checked;
}
}
}
function updateChildOptionGroup(vNode, value) {
var type = vNode.type;
if (type === 'optgroup') {
var children = vNode.children;
var childFlags = vNode.childFlags;
if (childFlags & 12 /* MultipleChildren */) {
for (var i = 0, len = children.length; i < len; i++) {
updateChildOption(children[i], value);
}
}
else if (childFlags === 2 /* HasVNodeChildren */) {
updateChildOption(children, value);
}
}
else {
updateChildOption(vNode, value);
}
}
function updateChildOption(vNode, value) {
var props = vNode.props || EMPTY_OBJ;
var dom = vNode.dom;
// we do this as multiple may have changed
dom.value = props.value;
if ((isArray(value) && value.indexOf(props.value) !== -1) || props.value === value) {
dom.selected = true;
}
else if (!isNullOrUndef(value) || !isNullOrUndef(props.selected)) {
dom.selected = props.selected || false;
}
}
var onSelectChange = createWrappedFunction('onChange', applyValueSelect);
function selectEvents(dom) {
dom.onchange = onSelectChange;
}
function applyValueSelect(nextPropsOrEmpty, dom, mounting, vNode) {
var multiplePropInBoolean = Boolean(nextPropsOrEmpty.multiple);
if (!isNullOrUndef(nextPropsOrEmpty.multiple) && multiplePropInBoolean !== dom.multiple) {
dom.multiple = multiplePropInBoolean;
}
var childFlags = vNode.childFlags;
if ((childFlags & 1 /* HasInvalidChildren */) === 0) {
var children = vNode.children;
var value = nextPropsOrEmpty.value;
if (mounting && isNullOrUndef(value)) {
value = nextPropsOrEmpty.defaultValue;
}
if (childFlags & 12 /* MultipleChildren */) {
for (var i = 0, len = children.length; i < len; i++) {
updateChildOptionGroup(children[i], value);
}
}
else if (childFlags === 2 /* HasVNodeChildren */) {
updateChildOptionGroup(children, value);
}
}
}
var onTextareaInputChange = createWrappedFunction('onInput', applyValueTextArea);
var wrappedOnChange$1 = createWrappedFunction('onChange');
function textAreaEvents(dom, nextPropsOrEmpty) {
dom.oninput = onTextareaInputChange;
if (nextPropsOrEmpty.onChange) {
dom.onchange = wrappedOnChange$1;
}
}
function applyValueTextArea(nextPropsOrEmpty, dom, mounting) {
var value = nextPropsOrEmpty.value;
var domValue = dom.value;
if (isNullOrUndef(value)) {
if (mounting) {
var defaultValue = nextPropsOrEmpty.defaultValue;
if (!isNullOrUndef(defaultValue) && defaultValue !== domValue) {
dom.defaultValue = defaultValue;
dom.value = defaultValue;
}
}
}
else if (domValue !== value) {
/* There is value so keep it controlled */
dom.defaultValue = value;
dom.value = value;
}
}
/**
* There is currently no support for switching same input between controlled and nonControlled
* If that ever becomes a real issue, then re design controlled elements
* Currently user must choose either controlled or non-controlled and stick with that
*/
function processElement(flags, vNode, dom, nextPropsOrEmpty, mounting, isControlled) {
if (flags & 64 /* InputElement */) {
applyValueInput(nextPropsOrEmpty, dom);
}
else if (flags & 256 /* SelectElement */) {
applyValueSelect(nextPropsOrEmpty, dom, mounting, vNode);
}
else if (flags & 128 /* TextareaElement */) {
applyValueTextArea(nextPropsOrEmpty, dom, mounting);
}
if (isControlled) {
dom.$V = vNode;
}
}
function addFormElementEventHandlers(flags, dom, nextPropsOrEmpty) {
if (flags & 64 /* InputElement */) {
inputEvents(dom, nextPropsOrEmpty);
}
else if (flags & 256 /* SelectElement */) {
selectEvents(dom);
}
else if (flags & 128 /* TextareaElement */) {
textAreaEvents(dom, nextPropsOrEmpty);
}
}
function isControlledFormElement(nextPropsOrEmpty) {
return nextPropsOrEmpty.type && isCheckedType(nextPropsOrEmpty.type) ? !isNullOrUndef(nextPropsOrEmpty.checked) : !isNullOrUndef(nextPropsOrEmpty.value);
}
function remove(vNode, parentDom) {
unmount(vNode);
if (!isNull(parentDom)) {
removeChild(parentDom, vNode.dom);
// Let carbage collector free memory
vNode.dom = null;
}
}
function unmount(vNode) {
var flags = vNode.flags;
if (flags & 481 /* Element */) {
var ref = vNode.ref;
var props = vNode.props;
if (isFunction(ref)) {
ref(null);
}
var children = vNode.children;
var childFlags = vNode.childFlags;
if (childFlags & 12 /* MultipleChildren */) {
unmountAllChildren(children);
}
else if (childFlags === 2 /* HasVNodeChildren */) {
unmount(children);
}
if (!isNull(props)) {
for (var name in props) {
switch (name) {
case 'onClick':
case 'onDblClick':
case 'onFocusIn':
case 'onFocusOut':
case 'onKeyDown':
case 'onKeyPress':
case 'onKeyUp':
case 'onMouseDown':
case 'onMouseMove':
case 'onMouseUp':
case 'onSubmit':
case 'onTouchEnd':
case 'onTouchMove':
case 'onTouchStart':
handleEvent(name, null, vNode.dom);
break;
default:
break;
}
}
}
}
else if (flags & 14 /* Component */) {
var instance = vNode.children;
var ref$1 = vNode.ref;
if (flags & 4 /* ComponentClass */) {
if (isFunction(options.beforeUnmount)) {
options.beforeUnmount(vNode);
}
if (isFunction(instance.componentWillUnmount)) {
instance.componentWillUnmount();
}
if (isFunction(ref$1)) {
ref$1(null);
}
instance.$UN = true;
unmount(instance.$LI);
}
else {
if (!isNullOrUndef(ref$1) && isFunction(ref$1.onComponentWillUnmount)) {
ref$1.onComponentWillUnmount(vNode.dom, vNode.props || EMPTY_OBJ);
}
unmount(instance);
}
}
else if (flags & 1024 /* Portal */) {
var children$1 = vNode.children;
if (!isNull(children$1) && isObject(children$1)) {
remove(children$1, vNode.type);
}
}
}
function unmountAllChildren(children) {
for (var i = 0, len = children.length; i < len; i++) {
unmount(children[i]);
}
}
function removeAllChildren(dom, children) {
unmountAllChildren(children);
dom.textContent = '';
}
function createLinkEvent(linkEvent, nextValue) {
return function (e) {
linkEvent(nextValue.data, e);
};
}
function patchEvent(name, lastValue, nextValue, dom) {
var nameLowerCase = name.toLowerCase();
if (!isFunction(nextValue) && !isNullOrUndef(nextValue)) {
var linkEvent = nextValue.event;
if (linkEvent && isFunction(linkEvent)) {
dom[nameLowerCase] = createLinkEvent(linkEvent, nextValue);
}
else {
// Development warning
{
throwError(("an event on a VNode \"" + name + "\". was not a function or a valid linkEvent."));
}
}
}
else {
var domEvent = dom[nameLowerCase];
// if the function is wrapped, that means it's been controlled by a wrapper
if (!domEvent || !domEvent.wrapped) {
dom[nameLowerCase] = nextValue;
}
}
}
function getNumberStyleValue(style, value) {
switch (style) {
case 'animationIterationCount':
case 'borderImageOutset':
case 'borderImageSlice':
case 'borderImageWidth':
case 'boxFlex':
case 'boxFlexGroup':
case 'boxOrdinalGroup':
case 'columnCount':
case 'fillOpacity':
case 'flex':
case 'flexGrow':
case 'flexNegative':
case 'flexOrder':
case 'flexPositive':
case 'flexShrink':
case 'floodOpacity':
case 'fontWeight':
case 'gridColumn':
case 'gridRow':
case 'lineClamp':
case 'lineHeight':
case 'opacity':
case 'order':
case 'orphans':
case 'stopOpacity':
case 'strokeDasharray':
case 'strokeDashoffset':
case 'strokeMiterlimit':
case 'strokeOpacity':
case 'strokeWidth':
case 'tabSize':
case 'widows':
case 'zIndex':
case 'zoom':
return value;
default:
return value + 'px';
}
}
// We are assuming here that we come from patchProp routine
// -nextAttrValue cannot be null or undefined
function patchStyle(lastAttrValue, nextAttrValue, dom) {
var domStyle = dom.style;
var style;
var value;
if (isString(nextAttrValue)) {
domStyle.cssText = nextAttrValue;
return;
}
if (!isNullOrUndef(lastAttrValue) && !isString(lastAttrValue)) {
for (style in nextAttrValue) {
// do not add a hasOwnProperty check here, it affects performance
value = nextAttrValue[style];
if (value !== lastAttrValue[style]) {
domStyle[style] = isNumber(value) ? getNumberStyleValue(style, value) : value;
}
}
for (style in lastAttrValue) {
if (isNullOrUndef(nextAttrValue[style])) {
domStyle[style] = '';
}
}
}
else {
for (style in nextAttrValue) {
value = nextAttrValue[style];
domStyle[style] = isNumber(value) ? getNumberStyleValue(style, value) : value;
}
}
}
function patchProp(prop, lastValue, nextValue, dom, isSVG, hasControlledValue, lastVNode) {
switch (prop) {
case 'onClick':
case 'onDblClick':
case 'onFocusIn':
case 'onFocusOut':
case 'onKeyDown':
case 'onKeyPress':
case 'onKeyUp':
case 'onMouseDown':
case 'onMouseMove':
case 'onMouseUp':
case 'onSubmit':
case 'onTouchEnd':
case 'onTouchMove':
case 'onTouchStart':
handleEvent(prop, nextValue, dom);
break;
case 'children':
case 'childrenType':
case 'className':
case 'defaultValue':
case 'key':
case 'multiple':
case 'ref':
return;
case 'allowfullscreen':
case 'autoFocus':
case 'autoplay':
case 'capture':
case 'checked':
case 'controls':
case 'default':
case 'disabled':
case 'hidden':
case 'indeterminate':
case 'loop':
case 'muted':
case 'novalidate':
case 'open':
case 'readOnly':
case 'required':
case 'reversed':
case 'scoped':
case 'seamless':
case 'selected':
prop = prop === 'autoFocus' ? prop.toLowerCase() : prop;
dom[prop] = !!nextValue;
break;
case 'defaultChecked':
case 'value':
case 'volume':
if (hasControlledValue && prop === 'value') {
return;
}
var value = isNullOrUndef(nextValue) ? '' : nextValue;
if (dom[prop] !== value) {
dom[prop] = value;
}
break;
case 'dangerouslySetInnerHTML':
var lastHtml = (lastValue && lastValue.__html) || '';
var nextHtml = (nextValue && nextValue.__html) || '';
if (lastHtml !== nextHtml) {
if (!isNullOrUndef(nextHtml) && !isSameInnerHTML(dom, nextHtml)) {
if (!isNull(lastVNode)) {
if (lastVNode.childFlags & 12 /* MultipleChildren */) {
unmountAllChildren(lastVNode.children);
}
else if (lastVNode.childFlags === 2 /* HasVNodeChildren */) {
unmount(lastVNode.children);
}
lastVNode.children = null;
lastVNode.childFlags = 1 /* HasInvalidChildren */;
}
dom.innerHTML = nextHtml;
}
}
break;
default:
if (prop[0] === 'o' && prop[1] === 'n') {
patchEvent(prop, lastValue, nextValue, dom);
}
else if (isNullOrUndef(nextValue)) {
dom.removeAttribute(prop);
}
else if (prop === 'style') {
patchStyle(lastValue, nextValue, dom);
}
else if (isSVG && namespaces[prop]) {
// We optimize for NS being boolean. Its 99.9% time false
// If we end up in this path we can read property again
dom.setAttributeNS(namespaces[prop], prop, nextValue);
}
else {
dom.setAttribute(prop, nextValue);
}
break;
}
}
function mountProps(vNode, flags, props, dom, isSVG) {
var hasControlledValue = false;
var isFormElement = (flags & 448 /* FormElement */) > 0;
if (isFormElement) {
hasControlledValue = isControlledFormElement(props);
if (hasControlledValue) {
addFormElementEventHandlers(flags, dom, props);
}
}
for (var prop in props) {
// do not add a hasOwnProperty check here, it affects performance
patchProp(prop, null, props[prop], dom, isSVG, hasControlledValue, null);
}
if (isFormElement) {
processElement(flags, vNode, dom, props, true, hasControlledValue);
}
}
function createClassComponentInstance(vNode, Component, props, context) {
var instance = new Component(props, context);
vNode.children = instance;
instance.$V = vNode;
instance.$BS = false;
instance.context = context;
if (instance.props === EMPTY_OBJ) {
instance.props = props;
}
instance.$UN = false;
if (isFunction(instance.componentWillMount)) {
instance.$BR = true;
instance.componentWillMount();
if (instance.$PSS) {
var state = instance.state;
var pending = instance.$PS;
if (isNull(state)) {
instance.state = pending;
}
else {
for (var key in pending) {
state[key] = pending[key];
}
}
instance.$PSS = false;
instance.$PS = null;
}
instance.$BR = false;
}
if (isFunction(options.beforeRender)) {
options.beforeRender(instance);
}
var input = handleComponentInput(instance.render(props, instance.state, context), vNode);
var childContext;
if (isFunction(instance.getChildContext)) {
childContext = instance.getChildContext();
}
if (isNullOrUndef(childContext)) {
instance.$CX = context;
}
else {
instance.$CX = combineFrom(context, childContext);
}
if (isFunction(options.afterRender)) {
options.afterRender(instance);
}
instance.$LI = input;
return instance;
}
function handleComponentInput(input, componentVNode) {
// Development validation
{
if (isArray(input)) {
throwError('a valid Inferno VNode (or null) must be returned from a component render. You may have returned an array or an invalid object.');
}
}
if (isInvalid(input)) {
input = createVoidVNode();
}
else if (isStringOrNumber(input)) {
input = createTextVNode(input, null);
}
else {
if (input.dom) {
input = directClone(input);
}
if (input.flags & 14 /* Component */) {
// if we have an input that is also a component, we run into a tricky situation
// where the root vNode needs to always have the correct DOM entry
// we can optimise this in the future, but this gets us out of a lot of issues
input.parentVNode = componentVNode;
}
}
return input;
}
function mount(vNode, parentDom, lifecycle, context, isSVG) {
var flags = vNode.flags;
if (flags & 481 /* Element */) {
return mountElement(vNode, parentDom, lifecycle, context, isSVG);
}
if (flags & 14 /* Component */) {
return mountComponent(vNode, parentDom, lifecycle, context, isSVG, (flags & 4 /* ComponentClass */) > 0);
}
if (flags & 512 /* Void */ || flags & 16 /* Text */) {
return mountText(vNode, parentDom);
}
if (flags & 1024 /* Portal */) {
mount(vNode.children, vNode.type, lifecycle, context, false);
return (vNode.dom = mountText(createVoidVNode(), parentDom));
}
// Development validation, in production we don't need to throw because it crashes anyway
{
if (typeof vNode === 'object') {
throwError(("mount() received an object that's not a valid VNode, you should stringify it first, fix createVNode flags or call normalizeChildren. Object: \"" + (JSON.stringify(vNode)) + "\"."));
}
else {
throwError(("mount() expects a valid VNode, instead it received an object with the type \"" + (typeof vNode) + "\"."));
}
}
}
function mountText(vNode, parentDom) {
var dom = (vNode.dom = document.createTextNode(vNode.children));
if (!isNull(parentDom)) {
appendChild(parentDom, dom);
}
return dom;
}
function mountElement(vNode, parentDom, lifecycle, context, isSVG) {
var flags = vNode.flags;
var children = vNode.children;
var props = vNode.props;
var className = vNode.className;
var ref = vNode.ref;
var childFlags = vNode.childFlags;
isSVG = isSVG || (flags & 32 /* SvgElement */) > 0;
var dom = documentCreateElement(vNode.type, isSVG);
vNode.dom = dom;
if (!isNullOrUndef(className) && className !== '') {
if (isSVG) {
dom.setAttribute('class', className);
}
else {
dom.className = className;
}
}
{
validateKeys(vNode);
}
if (!isNull(parentDom)) {
appendChild(parentDom, dom);
}
if ((childFlags & 1 /* HasInvalidChildren */) === 0) {
var childrenIsSVG = isSVG === true && vNode.type !== 'foreignObject';
if (childFlags === 2 /* HasVNodeChildren */) {
mount(children, dom, lifecycle, context, childrenIsSVG);
}
else if (childFlags & 12 /* MultipleChildren */) {
mountArrayChildren(children, dom, lifecycle, context, childrenIsSVG);
}
}
if (!isNull(props)) {
mountProps(vNode, flags, props, dom, isSVG);
}
{
if (isString(ref)) {
throwError('string "refs" are not supported in Inferno 1.0. Use callback "refs" instead.');
}
}
if (isFunction(ref)) {
mountRef(dom, ref, lifecycle);
}
return dom;
}
function mountArrayChildren(children, dom, lifecycle, context, isSVG) {
for (var i = 0, len = children.length; i < len; i++) {
var child = children[i];
if (!isNull(child.dom)) {
children[i] = child = directClone(child);
}
mount(child, dom, lifecycle, context, isSVG);
}
}
function mountComponent(vNode, parentDom, lifecycle, context, isSVG, isClass) {
var dom;
var type = vNode.type;
var props = vNode.props || EMPTY_OBJ;
var ref = vNode.ref;
if (isClass) {
var instance = createClassComponentInstance(vNode, type, props, context);
vNode.dom = dom = mount(instance.$LI, null, lifecycle, instance.$CX, isSVG);
mountClassComponentCallbacks(vNode, ref, instance, lifecycle);
instance.$UPD = false;
}
else {
var input = handleComponentInput(type(props, context), vNode);
vNode.children = input;
vNode.dom = dom = mount(input, null, lifecycle, context, isSVG);
mountFunctionalComponentCallbacks(props, ref, dom, lifecycle);
}
if (!isNull(parentDom)) {
appendChild(parentDom, dom);
}
return dom;
}
function createClassMountCallback(instance, hasAfterMount, afterMount, vNode, hasDidMount) {
return function () {
instance.$UPD = true;
if (hasAfterMount) {
afterMount(vNode);
}
if (hasDidMount) {
instance.componentDidMount();
}
instance.$UPD = false;
};
}
function mountClassComponentCallbacks(vNode, ref, instance, lifecycle) {
if (isFunction(ref)) {
ref(instance);
}
else {
{
if (isStringOrNumber(ref)) {
throwError('string "refs" are not supported in Inferno 1.0. Use callback "refs" instead.');
}
else if (!isNullOrUndef(ref) && isObject(ref) && vNode.flags & 4 /* ComponentClass */) {
throwError('functional component lifecycle events are not supported on ES2015 class components.');
}
}
}
var hasDidMount = isFunction(instance.componentDidMount);
var afterMount = options.afterMount;
var hasAfterMount = isFunction(afterMount);
if (hasDidMount || hasAfterMount) {
lifecycle.push(createClassMountCallback(instance, hasAfterMount, afterMount, vNode, hasDidMount));
}
}
// Create did mount callback lazily to avoid creating function context if not needed
function createOnMountCallback(ref, dom, props) {
return function () { return ref.onComponentDidMount(dom, props); };
}
function mountFunctionalComponentCallbacks(props, ref, dom, lifecycle) {
if (!isNullOrUndef(ref)) {
if (isFunction(ref.onComponentWillMount)) {
ref.onComponentWillMount(props);
}
if (isFunction(ref.onComponentDidMount)) {
lifecycle.push(createOnMountCallback(ref, dom, props));
}
}
}
function mountRef(dom, value, lifecycle) {
lifecycle.push(function () { return value(dom); });
}
function hydrateComponent(vNode, dom, lifecycle, context, isSVG, isClass) {
var type = vNode.type;
var ref = vNode.ref;
var props = vNode.props || EMPTY_OBJ;
if (isClass) {
var instance = createClassComponentInstance(vNode, type, props, context);
var input = instance.$LI;
hydrateVNode(input, dom, lifecycle, instance.$CX, isSVG);
vNode.dom = input.dom;
mountClassComponentCallbacks(vNode, ref, instance, lifecycle);
instance.$UPD = false; // Mount finished allow going sync
}
else {
var input$1 = handleComponentInput(type(props, context), vNode);
hydrateVNode(input$1, dom, lifecycle, context, isSVG);
vNode.children = input$1;
vNode.dom = input$1.dom;
mountFunctionalComponentCallbacks(props, ref, dom, lifecycle);
}
}
function hydrateElement(vNode, dom, lifecycle, context, isSVG) {
var children = vNode.children;
var props = vNode.props;
var className = vNode.className;
var flags = vNode.flags;
var ref = vNode.ref;
isSVG = isSVG || (flags & 32 /* SvgElement */) > 0;
if (dom.nodeType !== 1 || dom.tagName.toLowerCase() !== vNode.type) {
{
warning("Inferno hydration: Server-side markup doesn't match client-side markup or Initial render target is not empty");
}
var newDom = mountElement(vNode, null, lifecycle, context, isSVG);
vNode.dom = newDom;
replaceChild(dom.parentNode, newDom, dom);
}
else {
vNode.dom = dom;
var childNode = dom.firstChild;
var childFlags = vNode.childFlags;
if ((childFlags & 1 /* HasInvalidChildren */) === 0) {
var nextSibling = null;
while (childNode) {
nextSibling = childNode.nextSibling;
if (childNode.nodeType === 8) {
if (childNode.data === '!') {
dom.replaceChild(document.createTextNode(''), childNode);
}
else {
dom.removeChild(childNode);
}
}
childNode = nextSibling;
}
childNode = dom.firstChild;
if (childFlags === 2 /* HasVNodeChildren */) {
if (isNull(childNode)) {
mount(children, dom, lifecycle, context, isSVG);
}
else {
nextSibling = childNode.nextSibling;
hydrateVNode(children, childNode, lifecycle, context, isSVG);
childNode = nextSibling;
}
}
else if (childFlags & 12 /* MultipleChildren */) {
for (var i = 0, len = children.length; i < len; i++) {
var child = children[i];
if (isNull(childNode)) {
mount(child, dom, lifecycle, context, isSVG);
}
else {
nextSibling = childNode.nextSibling;
hydrateVNode(child, childNode, lifecycle, context, isSVG);
childNode = nextSibling;
}
}
}
// clear any other DOM nodes, there should be only a single entry for the root
while (childNode) {
nextSibling = childNode.nextSibling;
dom.removeChild(childNode);
childNode = nextSibling;
}
}
else if (!isNull(dom.firstChild) && !isSamePropsInnerHTML(dom, props)) {
dom.textContent = ''; // dom has content, but VNode has no children remove everything from DOM
if (flags & 448 /* FormElement */) {
// If element is form element, we need to clear defaultValue also
dom.defaultValue = '';
}
}
if (!isNull(props)) {
mountProps(vNode, flags, props, dom, isSVG);
}
if (isNullOrUndef(className)) {
if (dom.className !== '') {
dom.removeAttribute('class');
}
}
else if (isSVG) {
dom.setAttribute('class', className);
}
else {
dom.className = className;
}
if (isFunction(ref)) {
mountRef(dom, ref, lifecycle);
}
else {
{
if (isString(ref)) {
throwError('string "refs" are not supported in Inferno 1.0. Use callback "refs" instead.');
}
}
}
}
}
function hydrateText(vNode, dom) {
if (dom.nodeType !== 3) {
var newDom = mountText(vNode, null);
vNode.dom = newDom;
replaceChild(dom.parentNode, newDom, dom);
}
else {
var text = vNode.children;
if (dom.nodeValue !== text) {
dom.nodeValue = text;
}
vNode.dom = dom;
}
}
function hydrateVNode(vNode, dom, lifecycle, context, isSVG) {
var flags = vNode.flags;
if (flags & 14 /* Component */) {
hydrateComponent(vNode, dom, lifecycle, context, isSVG, (flags & 4 /* ComponentClass */) > 0);
}
else if (flags & 481 /* Element */) {
hydrateElement(vNode, dom, lifecycle, context, isSVG);
}
else if (flags & 16 /* Text */) {
hydrateText(vNode, dom);
}
else if (flags & 512 /* Void */) {
vNode.dom = dom;
}
else {
{
throwError(("hydrate() expects a valid VNode, instead it received an object with the type \"" + (typeof vNode) + "\"."));
}
throwError();
}
}
function hydrate(input, parentDom, callback) {
var dom = parentDom.firstChild;
if (!isNull(dom)) {
if (!isInvalid(input)) {
hydrateVNode(input, dom, LIFECYCLE, EMPTY_OBJ, false);
}
dom = parentDom.firstChild;
// clear any other DOM nodes, there should be only a single entry for the root
while ((dom = dom.nextSibling)) {
parentDom.removeChild(dom);
}
}
if (LIFECYCLE.length > 0) {
callAll(LIFECYCLE);
}
if (!parentDom.$V) {
options.roots.push(parentDom);
}
parentDom.$V = input;
if (isFunction(callback)) {
callback();
}
}
function replaceWithNewNode(lastNode, nextNode, parentDom, lifecycle, context, isSVG) {
unmount(lastNode);
replaceChild(parentDom, mount(nextNode, null, lifecycle, context, isSVG), lastNode.dom);
}
function patch(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG) {
if (lastVNode !== nextVNode) {
var nextFlags = nextVNode.flags | 0;
if (lastVNode.flags !== nextFlags || nextFlags & 2048 /* ReCreate */) {
replaceWithNewNode(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG);
}
else if (nextFlags & 481 /* Element */) {
patchElement(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG);
}
else if (nextFlags & 14 /* Component */) {
patchComponent(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG, (nextFlags & 4 /* ComponentClass */) > 0);
}
else if (nextFlags & 16 /* Text */) {
patchText(lastVNode, nextVNode, parentDom);
}
else if (nextFlags & 512 /* Void */) {
nextVNode.dom = lastVNode.dom;
}
else {
// Portal
patchPortal(lastVNode, nextVNode, lifecycle, context);
}
}
}
function patchPortal(lastVNode, nextVNode, lifecycle, context) {
var lastContainer = lastVNode.type;
var nextContainer = nextVNode.type;
var nextChildren = nextVNode.children;
patchChildren(lastVNode.childFlags, nextVNode.childFlags, lastVNode.children, nextChildren, lastContainer, lifecycle, context, false);
nextVNode.dom = lastVNode.dom;
if (lastContainer !== nextContainer && !isInvalid(nextChildren)) {
var node = nextChildren.dom;
lastContainer.removeChild(node);
nextContainer.appendChild(node);
}
}
function patchElement(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG) {
var nextTag = nextVNode.type;
if (lastVNode.type !== nextTag) {
replaceWithNewNode(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG);
}
else {
var dom = lastVNode.dom;
var nextFlags = nextVNode.flags;
var lastProps = lastVNode.props;
var nextProps = nextVNode.props;
var isFormElement = false;
var hasControlledValue = false;
var nextPropsOrEmpty;
nextVNode.dom = dom;
isSVG = isSVG || (nextFlags & 32 /* SvgElement */) > 0;
// inlined patchProps -- starts --
if (lastProps !== nextProps) {
var lastPropsOrEmpty = lastProps || EMPTY_OBJ;
nextPropsOrEmpty = nextProps || EMPTY_OBJ;
if (nextPropsOrEmpty !== EMPTY_OBJ) {
isFormElement = (nextFlags & 448 /* FormElement */) > 0;
if (isFormElement) {
hasControlledValue = isControlledFormElement(nextPropsOrEmpty);
}
for (var prop in nextPropsOrEmpty) {
var lastValue = lastPropsOrEmpty[prop];
var nextValue = nextPropsOrEmpty[prop];
if (lastValue !== nextValue) {
patchProp(prop, lastValue, nextValue, dom, isSVG, hasControlledValue, lastVNode);
}
}
}
if (lastPropsOrEmpty !== EMPTY_OBJ) {
for (var prop$1 in lastPropsOrEmpty) {
// do not add a hasOwnProperty check here, it affects performance
if (!nextPropsOrEmpty.hasOwnProperty(prop$1) && !isNullOrUndef(lastPropsOrEmpty[prop$1])) {
patchProp(prop$1, lastPropsOrEmpty[prop$1], null, dom, isSVG, hasControlledValue, lastVNode);
}
}
}
}
var lastChildren = lastVNode.children;
var nextChildren = nextVNode.children;
var nextRef = nextVNode.ref;
var lastClassName = lastVNode.className;
var nextClassName = nextVNode.className;
if (lastChildren !== nextChildren) {
{
validateKeys(nextVNode);
}
patchChildren(lastVNode.childFlags, nextVNode.childFlags, lastChildren, nextChildren, dom, lifecycle, context, isSVG && nextTag !== 'foreignObject');
}
if (isFormElement) {
processElement(nextFlags, nextVNode, dom, nextPropsOrEmpty, false, hasControlledValue);
}
// inlined patchProps -- ends --
if (lastClassName !== nextClassName) {
if (isNullOrUndef(nextClassName)) {
dom.removeAttribute('class');
}
else if (isSVG) {
dom.setAttribute('class', nextClassName);
}
else {
dom.className = nextClassName;
}
}
if (isFunction(nextRef) && lastVNode.ref !== nextRef) {
mountRef(dom, nextRef, lifecycle);
}
else {
{
if (isString(nextRef)) {
throwError('string "refs" are not supported in Inferno 1.0. Use callback "refs" instead.');
}
}
}
}
}
function patchChildren(lastChildFlags, nextChildFlags, lastChildren, nextChildren, parentDOM, lifecycle, context, isSVG) {
switch (lastChildFlags) {
case 2 /* HasVNodeChildren */:
switch (nextChildFlags) {
case 2 /* HasVNodeChildren */:
patch(lastChildren, nextChildren, parentDOM, lifecycle, context, isSVG);
break;
case 1 /* HasInvalidChildren */:
remove(lastChildren, parentDOM);
break;
default:
remove(lastChildren, parentDOM);
mountArrayChildren(nextChildren, parentDOM, lifecycle, context, isSVG);
break;
}
break;
case 1 /* HasInvalidChildren */:
switch (nextChildFlags) {
case 2 /* HasVNodeChildren */:
mount(nextChildren, parentDOM, lifecycle, context, isSVG);
break;
case 1 /* HasInvalidChildren */:
break;
default:
mountArrayChildren(nextChildren, parentDOM, lifecycle, context, isSVG);
break;
}
break;
default:
if (nextChildFlags & 12 /* MultipleChildren */) {
var lastLength = lastChildren.length;
var nextLength = nextChildren.length;
// Fast path's for both algorithms
if (lastLength === 0) {
if (nextLength > 0) {
mountArrayChildren(nextChildren, parentDOM, lifecycle, context, isSVG);
}
}
else if (nextLength === 0) {
removeAllChildren(parentDOM, lastChildren);
}
else if (nextChildFlags === 8 /* HasKeyedChildren */ && lastChildFlags === 8 /* HasKeyedChildren */) {
patchKeyedChildren(lastChildren, nextChildren, parentDOM, lifecycle, context, isSVG, lastLength, nextLength);
}
else {
patchNonKeyedChildren(lastChildren, nextChildren, parentDOM, lifecycle, context, isSVG, lastLength, nextLength);
}
}
else if (nextChildFlags === 1 /* HasInvalidChildren */) {
removeAllChildren(parentDOM, lastChildren);
}
else {
removeAllChildren(parentDOM, lastChildren);
mount(nextChildren, parentDOM, lifecycle, context, isSVG);
}
break;
}
}
function updateClassComponent(instance, nextState, nextVNode, nextProps, parentDom, lifecycle, context, isSVG, force, fromSetState) {
var lastState = instance.state;
var lastProps = instance.props;
nextVNode.children = instance;
var lastInput = instance.$LI;
var renderOutput;
if (instance.$UN) {
{
throwError('Inferno Error: Can only update a mounted or mounting component. This usually means you called setState() or forceUpdate() on an unmounted component. This is a no-op.');
}
return;
}
if (lastProps !== nextProps || nextProps === EMPTY_OBJ) {
if (!fromSetState && isFunction(instance.componentWillReceiveProps)) {
instance.$BR = true;
instance.componentWillReceiveProps(nextProps, context);
// If instance component was removed during its own update do nothing...
if (instance.$UN) {
return;
}
instance.$BR = false;
}
if (instance.$PSS) {
nextState = combineFrom(nextState, instance.$PS);
instance.$PSS = false;
instance.$PS = null;
}
}
/* Update if scu is not defined, or it returns truthy value or force */
var hasSCU = isFunction(instance.shouldComponentUpdate);
if (force || !hasSCU || (hasSCU && instance.shouldComponentUpdate(nextProps, nextState, context))) {
if (isFunction(instance.componentWillUpdate)) {
instance.$BS = true;
instance.componentWillUpdate(nextProps, nextState, context);
instance.$BS = false;
}
instance.props = nextProps;
instance.state = nextState;
instance.context = context;
if (isFunction(options.beforeRender)) {
options.beforeRender(instance);
}
renderOutput = instance.render(nextProps, nextState, context);
if (isFunction(options.afterRender)) {
options.afterRender(instance);
}
var didUpdate = renderOutput !== NO_OP;
var childContext;
if (isFunction(instance.getChildContext)) {
childContext = instance.getChildContext();
}
if (isNullOrUndef(childContext)) {
childContext = context;
}
else {
childContext = combineFrom(context, childContext);
}
instance.$CX = childContext;
if (didUpdate) {
var nextInput = (instance.$LI = handleComponentInput(renderOutput, nextVNode));
patch(lastInput, nextInput, parentDom, lifecycle, childContext, isSVG);
if (isFunction(instance.componentDidUpdate)) {
instance.componentDidUpdate(lastProps, lastState);
}
if (isFunction(options.afterUpdate)) {
options.afterUpdate(nextVNode);
}
}
}
else {
instance.props = nextProps;
instance.state = nextState;
instance.context = context;
}
nextVNode.dom = instance.$LI.dom;
}
function patchComponent(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG, isClass) {
var nextType = nextVNode.type;
var lastKey = lastVNode.key;
var nextKey = nextVNode.key;
if (lastVNode.type !== nextType || lastKey !== nextKey) {
replaceWithNewNode(lastVNode, nextVNode, parentDom, lifecycle, context, isSVG);
}
else {
var nextProps = nextVNode.props || EMPTY_OBJ;
if (isClass) {
var instance = lastVNode.children;
instance.$UPD = true;
updateClassComponent(instance, instance.state, nextVNode, nextProps, parentDom, lifecycle, context, isSVG, false, false);
instance.$V = nextVNode;
instance.$UPD = false;
}
else {
var shouldUpdate = true;
var lastProps = lastVNode.props;
var nextHooks = nextVNode.ref;
var nextHooksDefined = !isNullOrUndef(nextHooks);
var lastInput = lastVNode.children;
nextVNode.dom = lastVNode.dom;
nextVNode.children = lastInput;
if (nextHooksDefined && isFunction(nextHooks.onComponentShouldUpdate)) {
shouldUpdate = nextHooks.onComponentShouldUpdate(lastProps, nextProps);
}
if (shouldUpdate !== false) {
if (nextHooksDefined && isFunction(nextHooks.onComponentWillUpdate)) {
nextHooks.onComponentWillUpdate(lastProps, nextProps);
}
var nextInput = nextType(nextProps, context);
if (nextInput !== NO_OP) {
nextInput = handleComponentInput(nextInput, nextVNode);
patch(lastInput, nextInput, parentDom, lifecycle, context, isSVG);
nextVNode.children = nextInput;
nextVNode.dom = nextInput.dom;
if (nextHooksDefined && isFunction(nextHooks.onComponentDidUpdate)) {
nextHooks.onComponentDidUpdate(lastProps, nextProps);
}
}
}
else if (lastInput.flags & 14 /* Component */) {
lastInput.parentVNode = nextVNode;
}
}
}
}
function patchText(lastVNode, nextVNode, parentDom) {
var nextText = nextVNode.children;
var textNode = parentDom.firstChild;
var dom;
// Guard against external change on DOM node.
if (isNull(textNode)) {
parentDom.textContent = nextText;
dom = parentDom.firstChild;
}
else {
dom = lastVNode.dom;
if (nextText !== lastVNode.children) {
dom.nodeValue = nextText;
}
}
nextVNode.dom = dom;
}
function patchNonKeyedChildren(lastChildren, nextChildren, dom, lifecycle, context, isSVG, lastChildrenLength, nextChildrenLength) {
var commonLength = lastChildrenLength > nextChildrenLength ? nextChildrenLength : lastChildrenLength;
var i = 0;
for (; i < commonLength; i++) {
var nextChild = nextChildren[i];
if (nextChild.dom) {
nextChild = nextChildren[i] = directClone(nextChild);
}
patch(lastChildren[i], nextChild, dom, lifecycle, context, isSVG);
}
if (lastChildrenLength < nextChildrenLength) {
for (i = commonLength; i < nextChildrenLength; i++) {
var nextChild$1 = nextChildren[i];
if (nextChild$1.dom) {
nextChild$1 = nextChildren[i] = directClone(nextChild$1);
}
mount(nextChild$1, dom, lifecycle, context, isSVG);
}
}
else if (lastChildrenLength > nextChildrenLength) {
for (i = commonLength; i < lastChildrenLength; i++) {
remove(lastChildren[i], dom);
}
}
}
function patchKeyedChildren(a, b, dom, lifecycle, context, isSVG, aLength, bLength) {
var aEnd = aLength - 1;
var bEnd = bLength - 1;
var aStart = 0;
var bStart = 0;
var i;
var j;
var aNode;
var bNode;
var nextNode;
var nextPos;
var node;
var aStartNode = a[aStart];
var bStartNode = b[bStart];
var aEndNode = a[aEnd];
var bEndNode = b[bEnd];
if (bStartNode.dom) {
b[bStart] = bStartNode = directClone(bStartNode);
}
if (bEndNode.dom) {
b[bEnd] = bEndNode = directClone(bEndNode);
}
// Step 1
// tslint:disable-next-line
outer: {
// Sync nodes with the same key at the beginning.
while (aStartNode.key === bStartNode.key) {
patch(aStartNode, bStartNode, dom, lifecycle, context, isSVG);
aStart++;
bStart++;
if (aStart > aEnd || bStart > bEnd) {
break outer;
}
aStartNode = a[aStart];
bStartNode = b[bStart];
if (bStartNode.dom) {
b[bStart] = bStartNode = directClone(bStartNode);
}
}
// Sync nodes with the same key at the end.
while (aEndNode.key === bEndNode.key) {
patch(aEndNode, bEndNode, dom, lifecycle, context, isSVG);
aEnd--;
bEnd--;
if (aStart > aEnd || bStart > bEnd) {
break outer;
}
aEndNode = a[aEnd];
bEndNode = b[bEnd];
if (bEndNode.dom) {
b[bEnd] = bEndNode = directClone(bEndNode);
}
}
}
if (aStart > aEnd) {
if (bStart <= bEnd) {
nextPos = bEnd + 1;
nextNode = nextPos < bLength ? b[nextPos].dom : null;
while (bStart <= bEnd) {
<|fim▁hole|> if (node.dom) {
b[bStart] = node = directClone(node);
}
bStart++;
insertOrAppend(dom, mount(node, null, lifecycle, context, isSVG), nextNode);
}
}
}
else if (bStart > bEnd) {
while (aStart <= aEnd) {
remove(a[aStart++], dom);
}
}
else {
var aLeft = aEnd - aStart + 1;
var bLeft = bEnd - bStart + 1;
var sources = new Array(bLeft);
for (i = 0; i < bLeft; i++) {
sources[i] = -1;
}
var moved = false;
var pos = 0;
var patched = 0;
// When sizes are small, just loop them through
if (bLeft <= 4 || aLeft * bLeft <= 16) {
for (i = aStart; i <= aEnd; i++) {
aNode = a[i];
if (patched < bLeft) {
for (j = bStart; j <= bEnd; j++) {
bNode = b[j];
if (aNode.key === bNode.key) {
sources[j - bStart] = i;
if (pos > j) {
moved = true;
}
else {
pos = j;
}
if (bNode.dom) {
b[j] = bNode = directClone(bNode);
}
patch(aNode, bNode, dom, lifecycle, context, isSVG);
patched++;
a[i] = null;
break;
}
}
}
}
}
else {
var keyIndex = {};
// Map keys by their index in array
for (i = bStart; i <= bEnd; i++) {
keyIndex[b[i].key] = i;
}
// Try to patch same keys
for (i = aStart; i <= aEnd; i++) {
aNode = a[i];
if (patched < bLeft) {
j = keyIndex[aNode.key];
if (isDefined(j)) {
bNode = b[j];
sources[j - bStart] = i;
if (pos > j) {
moved = true;
}
else {
pos = j;
}
if (bNode.dom) {
b[j] = bNode = directClone(bNode);
}
patch(aNode, bNode, dom, lifecycle, context, isSVG);
patched++;
a[i] = null;
}
}
}
}
// fast-path: if nothing patched remove all old and add all new
if (aLeft === aLength && patched === 0) {
removeAllChildren(dom, a);
mountArrayChildren(b, dom, lifecycle, context, isSVG);
}
else {
i = aLeft - patched;
while (i > 0) {
aNode = a[aStart++];
if (!isNull(aNode)) {
remove(aNode, dom);
i--;
}
}
if (moved) {
var seq = lis_algorithm(sources);
j = seq.length - 1;
for (i = bLeft - 1; i >= 0; i--) {
if (sources[i] === -1) {
pos = i + bStart;
node = b[pos];
if (node.dom) {
b[pos] = node = directClone(node);
}
nextPos = pos + 1;
insertOrAppend(dom, mount(node, null, lifecycle, context, isSVG), nextPos < bLength ? b[nextPos].dom : null);
}
else if (j < 0 || i !== seq[j]) {
pos = i + bStart;
node = b[pos];
nextPos = pos + 1;
insertOrAppend(dom, node.dom, nextPos < bLength ? b[nextPos].dom : null);
}
else {
j--;
}
}
}
else if (patched !== bLeft) {
// when patched count doesn't match b length we need to insert those new ones
// loop backwards so we can use insertBefore
for (i = bLeft - 1; i >= 0; i--) {
if (sources[i] === -1) {
pos = i + bStart;
node = b[pos];
if (node.dom) {
b[pos] = node = directClone(node);
}
nextPos = pos + 1;
insertOrAppend(dom, mount(node, null, lifecycle, context, isSVG), nextPos < bLength ? b[nextPos].dom : null);
}
}
}
}
}
}
// // https://en.wikipedia.org/wiki/Longest_increasing_subsequence
function lis_algorithm(arr) {
var p = arr.slice();
var result = [0];
var i;
var j;
var u;
var v;
var c;
var len = arr.length;
for (i = 0; i < len; i++) {
var arrI = arr[i];
if (arrI !== -1) {
j = result[result.length - 1];
if (arr[j] < arrI) {
p[i] = j;
result.push(i);
continue;
}
u = 0;
v = result.length - 1;
while (u < v) {
c = ((u + v) / 2) | 0;
if (arr[result[c]] < arrI) {
u = c + 1;
}
else {
v = c;
}
}
if (arrI < arr[result[u]]) {
if (u > 0) {
p[i] = result[u - 1];
}
result[u] = i;
}
}
}
u = result.length;
v = result[u - 1];
while (u-- > 0) {
result[u] = v;
v = p[v];
}
return result;
}
var roots = options.roots;
{
if (isBrowser && document.body === null) {
warning('Inferno warning: you cannot initialize inferno without "document.body". Wait on "DOMContentLoaded" event, add script to bottom of body, or use async/defer attributes on script tag.');
}
}
var documentBody = isBrowser ? document.body : null;
function render(input, parentDom, callback) {
// Development warning
{
if (documentBody === parentDom) {
throwError('you cannot render() to the "document.body". Use an empty element as a container instead.');
}
}
if (input === NO_OP) {
return;
}
var rootLen = roots.length;
var rootInput;
var index;
for (index = 0; index < rootLen; index++) {
if (roots[index] === parentDom) {
rootInput = parentDom.$V;
break;
}
}
if (isUndefined(rootInput)) {
if (!isInvalid(input)) {
if (input.dom) {
input = directClone(input);
}
if (isNull(parentDom.firstChild)) {
mount(input, parentDom, LIFECYCLE, EMPTY_OBJ, false);
parentDom.$V = input;
roots.push(parentDom);
}
else {
hydrate(input, parentDom);
}
rootInput = input;
}
}
else {
if (isNullOrUndef(input)) {
remove(rootInput, parentDom);
roots.splice(index, 1);
}
else {
if (input.dom) {
input = directClone(input);
}
patch(rootInput, input, parentDom, LIFECYCLE, EMPTY_OBJ, false);
rootInput = parentDom.$V = input;
}
}
if (LIFECYCLE.length > 0) {
callAll(LIFECYCLE);
}
if (isFunction(callback)) {
callback();
}
if (rootInput && rootInput.flags & 14 /* Component */) {
return rootInput.children;
}
}
function createRenderer(parentDom) {
return function renderer(lastInput, nextInput) {
if (!parentDom) {
parentDom = lastInput;
}
render(nextInput, parentDom);
};
}
function createPortal(children, container) {
return createVNode(1024 /* Portal */, container, null, children, 0 /* UnknownChildren */, null, isInvalid(children) ? null : children.key, null);
}
var resolvedPromise = typeof Promise === 'undefined' ? null : Promise.resolve();
var fallbackMethod = typeof requestAnimationFrame === 'undefined' ? setTimeout : requestAnimationFrame;
function nextTick(fn) {
if (resolvedPromise) {
return resolvedPromise.then(fn);
}
return fallbackMethod(fn);
}
function queueStateChanges(component, newState, callback) {
if (isFunction(newState)) {
newState = newState(component.state, component.props, component.context);
}
var pending = component.$PS;
if (isNullOrUndef(pending)) {
component.$PS = newState;
}
else {
for (var stateKey in newState) {
pending[stateKey] = newState[stateKey];
}
}
if (!component.$PSS && !component.$BR) {
if (!component.$UPD) {
component.$PSS = true;
component.$UPD = true;
applyState(component, false, callback);
component.$UPD = false;
}
else {
// Async
var queue = component.$QU;
if (isNull(queue)) {
queue = component.$QU = [];
nextTick(promiseCallback(component, queue));
}
if (isFunction(callback)) {
queue.push(callback);
}
}
}
else {
component.$PSS = true;
if (component.$BR && isFunction(callback)) {
LIFECYCLE.push(callback.bind(component));
}
}
}
function promiseCallback(component, queue) {
return function () {
component.$QU = null;
component.$UPD = true;
applyState(component, false, function () {
for (var i = 0, len = queue.length; i < len; i++) {
queue[i].call(component);
}
});
component.$UPD = false;
};
}
function applyState(component, force, callback) {
if (component.$UN) {
return;
}
if (force || !component.$BR) {
component.$PSS = false;
var pendingState = component.$PS;
var prevState = component.state;
var nextState = combineFrom(prevState, pendingState);
var props = component.props;
var context = component.context;
component.$PS = null;
var vNode = component.$V;
var lastInput = component.$LI;
var parentDom = lastInput.dom && lastInput.dom.parentNode;
updateClassComponent(component, nextState, vNode, props, parentDom, LIFECYCLE, context, (vNode.flags & 32 /* SvgElement */) > 0, force, true);
if (component.$UN) {
return;
}
if ((component.$LI.flags & 1024 /* Portal */) === 0) {
var dom = component.$LI.dom;
while (!isNull((vNode = vNode.parentVNode))) {
if ((vNode.flags & 14 /* Component */) > 0) {
vNode.dom = dom;
}
}
}
if (LIFECYCLE.length > 0) {
callAll(LIFECYCLE);
}
}
else {
component.state = component.$PS;
component.$PS = null;
}
if (isFunction(callback)) {
callback.call(component);
}
}
var Component = function Component(props, context) {
this.state = null;
// Internal properties
this.$BR = false; // BLOCK RENDER
this.$BS = true; // BLOCK STATE
this.$PSS = false; // PENDING SET STATE
this.$PS = null; // PENDING STATE (PARTIAL or FULL)
this.$LI = null; // LAST INPUT
this.$V = null; // VNODE
this.$UN = false; // UNMOUNTED
this.$CX = null; // CHILDCONTEXT
this.$UPD = true; // UPDATING
this.$QU = null; // QUEUE
/** @type {object} */
this.props = props || EMPTY_OBJ;
/** @type {object} */
this.context = context || EMPTY_OBJ; // context should not be mutable
};
Component.prototype.forceUpdate = function forceUpdate (callback) {
if (this.$UN) {
return;
}
applyState(this, true, callback);
};
Component.prototype.setState = function setState (newState, callback) {
if (this.$UN) {
return;
}
if (!this.$BS) {
queueStateChanges(this, newState, callback);
}
else {
// Development warning
{
throwError('cannot update state via setState() in componentWillUpdate() or constructor.');
}
return;
}
};
// tslint:disable-next-line:no-empty
Component.prototype.render = function render (nextProps, nextState, nextContext) { };
// Public
Component.defaultProps = null;
{
/* tslint:disable-next-line:no-empty */
var testFunc = function testFn() { };
if ((testFunc.name || testFunc.toString()).indexOf('testFn') === -1) {
warning("It looks like you're using a minified copy of the development build " +
'of Inferno. When deploying Inferno apps to production, make sure to use ' +
'the production build which skips development warnings and is faster. ' +
'See http://infernojs.org for more details.');
}
}
var version = "4.0.8";
exports.Component = Component;
exports.EMPTY_OBJ = EMPTY_OBJ;
exports.NO_OP = NO_OP;
exports.createComponentVNode = createComponentVNode;
exports.createPortal = createPortal;
exports.createRenderer = createRenderer;
exports.createTextVNode = createTextVNode;
exports.createVNode = createVNode;
exports.directClone = directClone;
exports.getFlagsForElementVnode = getFlagsForElementVnode;
exports.getNumberStyleValue = getNumberStyleValue;
exports.hydrate = hydrate;
exports.linkEvent = linkEvent;
exports.normalizeProps = normalizeProps;
exports.options = options;
exports.render = render;
exports.version = version;
Object.defineProperty(exports, '__esModule', { value: true });
})));<|fim▁end|> | node = b[bStart];
|
<|file_name|>CoAPTxRequestPacketTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2017, Digi International Inc.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, you can obtain one at http://mozilla.org/MPL/2.0/.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.digi.xbee.api.packet.thread;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.*;
import java.net.Inet6Address;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.digi.xbee.api.models.CoAPURI;
import com.digi.xbee.api.models.HTTPMethodEnum;
import com.digi.xbee.api.models.RemoteATCommandOptions;
import com.digi.xbee.api.packet.APIFrameType;
import com.digi.xbee.api.packet.thread.CoAPTxRequestPacket;
import com.digi.xbee.api.utils.HexUtils;
@PrepareForTest({Inet6Address.class, CoAPTxRequestPacket.class})
@RunWith(PowerMockRunner.class)
public class CoAPTxRequestPacketTest {
// Constants.
private static final String IPV6_ADDRESS = "FDB3:0001:0002:0000:0004:0005:0006:0007";
// Variables.
private int frameType = APIFrameType.COAP_TX_REQUEST.getValue();
private int frameID = 0x01;
private int options = RemoteATCommandOptions.OPTION_NONE;
private Inet6Address destAddress;
private HTTPMethodEnum method = HTTPMethodEnum.GET;
private String uriData = CoAPURI.URI_DATA_TRANSMISSION;
private byte[] data = "Test".getBytes();
@Rule
public ExpectedException exception = ExpectedException.none();
public CoAPTxRequestPacketTest() throws Exception {
destAddress = (Inet6Address) Inet6Address.getByName(IPV6_ADDRESS);
}
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>A {@code NullPointerException} exception must be thrown when parsing
* a {@code null} byte array.</p>
*/
@Test
public final void testCreatePacketNullPayload() {
// Set up the resources for the test.
byte[] payload = null;
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("CoAP Tx Request packet payload cannot be null.")));
// Call the method under test that should throw a NullPointerException.
CoAPTxRequestPacket.createPacket(payload);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>An {@code IllegalArgumentException} exception must be thrown when
* parsing an empty byte array.</p>
*/
@Test
public final void testCreatePacketEmptyPayload() {
// Set up the resources for the test.
byte[] payload = new byte[0];
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Incomplete CoAP Tx Request packet.")));
// Call the method under test that should throw an IllegalArgumentException.
CoAPTxRequestPacket.createPacket(payload);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>An {@code IllegalArgumentException} exception must be thrown when
* parsing a byte array shorter than the needed one is provided.</p>
*/
@Test
public final void testCreatePacketPayloadShorterThanNeeded() {
// Set up the resources for the test.
byte[] payload = new byte[25];
payload[0] = (byte)frameType;
payload[1] = (byte)frameID;
payload[2] = (byte)options;
payload[3] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, payload, 4, destAddress.getAddress().length);
payload[20] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, payload, 21, uriData.getBytes().length - 1);
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Incomplete CoAP Tx Request packet.")));
// Call the method under test that should throw an IllegalArgumentException.
CoAPTxRequestPacket.createPacket(payload);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>An {@code IllegalArgumentException} exception must be thrown when
* parsing a byte array not including the Frame type.</p>
*/
@Test
public final void testCreatePacketPayloadNotIncludingFrameType() {
// Set up the resources for the test.
byte[] payload = new byte[25 + data.length];
payload[0] = (byte)frameID;
payload[1] = (byte)options;
payload[2] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, payload, 3, destAddress.getAddress().length);
payload[20] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, payload, 20, uriData.getBytes().length);
System.arraycopy(data, 0, payload, 20 + uriData.getBytes().length, data.length);
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Payload is not a CoAP Tx Request packet.")));
// Call the method under test that should throw an IllegalArgumentException.
CoAPTxRequestPacket.createPacket(payload);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>An {@code IllegalArgumentException} exception must be thrown when
* parsing a byte array with an invalid IPv6 address.</p>
*/
@Test
public final void testCreatePacketPayloadInvalidIP() throws Exception {
// Set up the resources for the test.
byte[] payload = new byte[26];
payload[0] = (byte)frameType;
payload[1] = (byte)frameID;
payload[2] = (byte)options;
payload[3] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, payload, 4, destAddress.getAddress().length);
payload[20] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, payload, 21, uriData.getBytes().length);
PowerMockito.mockStatic(Inet6Address.class);
PowerMockito.when(Inet6Address.getByAddress(Mockito.any(byte[].class))).thenThrow(new UnknownHostException());
exception.expect(IllegalArgumentException.class);
// Call the method under test that should throw an IllegalArgumentException.
CoAPTxRequestPacket.createPacket(payload);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>A valid CoAP TX Request packet with the provided options and without
* data is created.</p>
*/
@Test
public final void testCreatePacketValidPayloadWithoutData() {
// Set up the resources for the test.
byte[] payload = new byte[26];
payload[0] = (byte)frameType;
payload[1] = (byte)frameID;
payload[2] = (byte)options;
payload[3] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, payload, 4, destAddress.getAddress().length);
payload[20] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, payload, 21, uriData.getBytes().length);
// Call the method under test.
CoAPTxRequestPacket packet = CoAPTxRequestPacket.createPacket(payload);<|fim▁hole|> assertThat("Returned length is not the expected one", packet.getPacketLength(), is(equalTo(payload.length)));
assertThat("Returned frame ID is not the expected one", packet.getFrameID(), is(equalTo(frameID)));
assertThat("Returned options are not the expected ones", packet.getTransmitOptions(), is(equalTo(options)));
assertThat("Returned RESTul method is not the expected one", packet.getMethod(), is(equalTo(method)));
assertThat("Returned dest address is not the expected one", packet.getDestAddress(), is(equalTo(destAddress)));
assertThat("Returned URI is not the expected one", packet.getURI(), is(equalTo(uriData)));
assertThat("Returned data is not the expected one", packet.getPayload(), is(nullValue()));
assertThat("Returned payload array is not the expected one", packet.getPacketData(), is(equalTo(payload)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#createPacket(byte[])}.
*
* <p>A valid CoAP TX Request packet with the provided options and data is
* created.</p>
*/
@Test
public final void testCreatePacketValidPayloadWithData() {
// Set up the resources for the test.
byte[] payload = new byte[26 + data.length];
payload[0] = (byte)frameType;
payload[1] = (byte)frameID;
payload[2] = (byte)options;
payload[3] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, payload, 4, destAddress.getAddress().length);
payload[20] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, payload, 21, uriData.getBytes().length);
System.arraycopy(data, 0, payload, 21 + uriData.getBytes().length, data.length);
// Call the method under test.
CoAPTxRequestPacket packet = CoAPTxRequestPacket.createPacket(payload);
// Verify the result.
assertThat("Returned length is not the expected one", packet.getPacketLength(), is(equalTo(payload.length)));
assertThat("Returned frame ID is not the expected one", packet.getFrameID(), is(equalTo(frameID)));
assertThat("Returned options are not the expected ones", packet.getTransmitOptions(), is(equalTo(options)));
assertThat("Returned RESTul method is not the expected one", packet.getMethod(), is(equalTo(method)));
assertThat("Returned dest address is not the expected one", packet.getDestAddress(), is(equalTo(destAddress)));
assertThat("Returned URI is not the expected one", packet.getURI(), is(equalTo(uriData)));
assertThat("Returned data is not the expected one", packet.getPayload(), is(equalTo(data)));
assertThat("Returned payload array is not the expected one", packet.getPacketData(), is(equalTo(payload)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with a frame ID bigger than
* 255. This must throw an {@code IllegalArgumentException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketFrameIDBiggerThan255() {
// Set up the resources for the test.
int frameID = 524;
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Frame ID must be between 0 and 255.")));
// Call the method under test that should throw an IllegalArgumentException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with a negative frame ID. This
* must throw an {@code IllegalArgumentException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketFrameIDNegative() {
// Set up the resources for the test.
int frameID = -6;
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Frame ID must be between 0 and 255.")));
// Call the method under test that should throw an IllegalArgumentException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with invalid transmit options.
* This must throw a {@code IllegalArgumentException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketTransmitOptionsInvalid() {
// Set up the resources for the test.
int options = -1;
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Transmit options can only be " +
RemoteATCommandOptions.OPTION_NONE +
" or " + RemoteATCommandOptions.OPTION_APPLY_CHANGES + ".")));
// Call the method under test that should throw a NullPointerException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with a null RESTful method.
* This must throw a {@code NullPointerException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketMethodNull() {
// Set up the resources for the test.
HTTPMethodEnum method = null;
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("HTTP Method cannot be null.")));
// Call the method under test that should throw a NullPointerException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with a null destination
* address. This must throw a {@code NullPointerException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketDestAddressNull() {
// Set up the resources for the test.
Inet6Address destAddress = null;
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("Destination address cannot be null.")));
// Call the method under test that should throw a NullPointerException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with a null URI. This must
* throw a {@code NullPointerException}.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketURINull() {
// Set up the resources for the test.
String uriData = null;
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("URI cannot be null.")));
// Call the method under test that should throw a NullPointerException.
new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet without data ({@code null}).</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketValidDataNull() {
// Set up the resources for the test.
data = null;
int expectedLength = 26;
// Call the method under test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Verify the result.
assertThat("Returned length is not the expected one", packet.getPacketLength(), is(equalTo(expectedLength)));
assertThat("Returned frame ID is not the expected one", packet.getFrameID(), is(equalTo(frameID)));
assertThat("Returned options are not the expected ones", packet.getTransmitOptions(), is(equalTo(options)));
assertThat("Returned RESTul method is not the expected one", packet.getMethod(), is(equalTo(method)));
assertThat("Returned dest address is not the expected one", packet.getDestAddress(), is(equalTo(destAddress)));
assertThat("Returned URI is not the expected one", packet.getURI(), is(equalTo(uriData)));
assertThat("Returned data is not the expected one", packet.getPayload(), is(nullValue()));
assertThat("CoAP TX Request packet needs API Frame ID", packet.needsAPIFrameID(), is(equalTo(true)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#CoAPTxRequestPacket(int, int, HTTPMethodEnum, Inet6Address, String, byte[])}.
*
* <p>Construct a new CoAP TX Request packet with data.</p>
*/
@Test
public final void testCreateCoAPTxRequestPacketValidDataNotNull() {
// Set up the resources for the test.
int expectedLength = 26 + data.length;
// Call the method under test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Verify the result.
assertThat("Returned length is not the expected one", packet.getPacketLength(), is(equalTo(expectedLength)));
assertThat("Returned frame ID is not the expected one", packet.getFrameID(), is(equalTo(frameID)));
assertThat("Returned options are not the expected ones", packet.getTransmitOptions(), is(equalTo(options)));
assertThat("Returned RESTul method is not the expected one", packet.getMethod(), is(equalTo(method)));
assertThat("Returned dest address is not the expected one", packet.getDestAddress(), is(equalTo(destAddress)));
assertThat("Returned URI is not the expected one", packet.getURI(), is(equalTo(uriData)));
assertThat("Returned data is not the expected one", packet.getPayload(), is(data));
assertThat("CoAP TX Request packet needs API Frame ID", packet.needsAPIFrameID(), is(equalTo(true)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getAPIData()}.
*
* <p>Test the get API parameters with a {@code null} received data.</p>
*/
@Test
public final void testGetAPIDataReceivedDataNull() {
// Set up the resources for the test.
byte[] data = null;
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
int expectedLength = 25;
byte[] expectedData = new byte[expectedLength];
expectedData[0] = (byte)frameID;
expectedData[1] = (byte)options;
expectedData[2] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, expectedData, 3, destAddress.getAddress().length);
expectedData[19] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, expectedData, 20, uriData.getBytes().length);
// Call the method under test.
byte[] apiData = packet.getAPIData();
// Verify the result.
assertThat("API data is not the expected", apiData, is(equalTo(expectedData)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getAPIData()}.
*
* <p>Test the get API parameters with a not-{@code null} received data.</p>
*/
@Test
public final void testGetAPIDataReceivedDataNotNull() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
int expectedLength = 25 + data.length;
byte[] expectedData = new byte[expectedLength];
expectedData[0] = (byte)frameID;
expectedData[1] = (byte)options;
expectedData[2] = (byte)method.getValue();
System.arraycopy(destAddress.getAddress(), 0, expectedData, 3, destAddress.getAddress().length);
expectedData[19] = (byte)(uriData.length());
System.arraycopy(uriData.getBytes(), 0, expectedData, 20, uriData.getBytes().length);
System.arraycopy(data, 0, expectedData, 20 + uriData.getBytes().length, data.length);
// Call the method under test.
byte[] apiData = packet.getAPIData();
// Verify the result.
assertThat("API data is not the expected", apiData, is(equalTo(expectedData)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getAPIPacketParameters()}.
*
* <p>Test the get API parameters with a {@code null} received data.</p>
*/
@Test
public final void testGetAPIPacketParametersReceivedDataNull() {
// Set up the resources for the test.
byte[] data = null;
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
LinkedHashMap<String, String> packetParams = packet.getAPIPacketParameters();
// Verify the result.
assertThat("Packet parameters map size is not the expected one", packetParams.size(), is(equalTo(6)));
assertThat("Returned transmit options are not the expected ones", packetParams.get("Options"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(options, 1)))));
assertThat("Returned HTTP method is not the expected one", packetParams.get("Method"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(method.getValue(), 1)) + " (" + method.getName() + ")")));
assertThat("Returned dest address is not the expected one", packetParams.get("Destination address"),
is(equalTo(HexUtils.prettyHexString(destAddress.getAddress()) + " (" + destAddress.getHostAddress() + ")")));
assertThat("Returned URI length is not the expected one", packetParams.get("URI length"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(uriData.length(), 1)) + " (" + uriData.length() + ")")));
assertThat("Returned URI is not the expected one", packetParams.get("URI"),
is(equalTo(HexUtils.prettyHexString(HexUtils.byteArrayToHexString(uriData.getBytes())) + " (" + uriData + ")")));
assertThat("RF data is not the expected", packetParams.get("RF data"), is(nullValue(String.class)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getAPIPacketParameters()}.
*
* <p>Test the get API parameters with a not-{@code null} received data.</p>
*/
@Test
public final void testGetAPIPacketParametersReceivedDataNotNull() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
LinkedHashMap<String, String> packetParams = packet.getAPIPacketParameters();
// Verify the result.
assertThat("Packet parameters map size is not the expected one", packetParams.size(), is(equalTo(7)));
assertThat("Returned transmit options are not the expected ones", packetParams.get("Options"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(options, 1)))));
assertThat("Returned HTTP method is not the expected one", packetParams.get("Method"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(method.getValue(), 1)) + " (" + method.getName() + ")")));
assertThat("Returned dest address is not the expected one", packetParams.get("Destination address"),
is(equalTo(HexUtils.prettyHexString(destAddress.getAddress()) + " (" + destAddress.getHostAddress() + ")")));
assertThat("Returned URI length is not the expected one", packetParams.get("URI length"),
is(equalTo(HexUtils.prettyHexString(HexUtils.integerToHexString(uriData.length(), 1)) + " (" + uriData.length() + ")")));
assertThat("Returned URI is not the expected one", packetParams.get("URI"),
is(equalTo(HexUtils.prettyHexString(HexUtils.byteArrayToHexString(uriData.getBytes())) + " (" + uriData + ")")));
assertThat("RF data is not the expected", packetParams.get("Payload"),
is(equalTo(HexUtils.prettyHexString(HexUtils.byteArrayToHexString(data)))));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setDestAddress(Inet6Address)}.
*/
@Test
public final void testSetDestAddressNull() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("Destination address cannot be null.")));
// Call the method under test that should throw a NullPointerException.
packet.setDestAddress(null);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setDestAddress(Inet6Address)}.
*
* @throws Exception
*/
@Test
public final void testSetDestAddressNotNull() throws Exception {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
Inet6Address newAddress = (Inet6Address) Inet6Address.getByName("fd8a:cb11:ad71:0000:7662:c401:5efe:dc41");
// Call the method under test.
packet.setDestAddress(newAddress);
// Verify the result.
assertThat("Dest address is not the expected one", packet.getDestAddress(), is(equalTo(newAddress)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setTransmitOptions(int)}.
*/
@Test
public final void testSetTransmitOptionsATURIOptionsIllegal() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, CoAPURI.URI_AT_COMMAND, data);
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Transmit options can only be " +
RemoteATCommandOptions.OPTION_NONE +
" or " + RemoteATCommandOptions.OPTION_APPLY_CHANGES + ".")));
// Call the method under test that should throw an IllegalArgumentException.
packet.setTransmitOptions(0x03);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setTransmitOptions(int)}.
*/
@Test
public final void testSetTransmitOptionsTXURIOptionsIllegal() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, CoAPURI.URI_DATA_TRANSMISSION, data);
exception.expect(IllegalArgumentException.class);
exception.expectMessage(is(equalTo("Transmit options can only be " +
RemoteATCommandOptions.OPTION_NONE +
" or " + RemoteATCommandOptions.OPTION_APPLY_CHANGES + ".")));
// Call the method under test that should throw an IllegalArgumentException.
packet.setTransmitOptions(0x02);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setTransmitOptions(int)}.
*/
@Test
public final void testTransmitOptionsValid() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
int newOptions = 0x00;
// Call the method under test.
packet.setTransmitOptions(newOptions);
// Verify the result.
assertThat("Transmit options are not the expected ones", packet.getTransmitOptions(), is(equalTo(newOptions)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setMethod(HTTPMethodEnum)}.
*/
@Test
public final void testSetMEthodNull() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
exception.expect(NullPointerException.class);
exception.expectMessage(is(equalTo("HTTP Method cannot be null.")));
// Call the method under test that should throw a NullPointerException.
packet.setMethod(null);
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setMethod(HTTPMethodEnum)}.
*/
@Test
public final void testSetMethodNotNull() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
HTTPMethodEnum newMethod = HTTPMethodEnum.PUT;
// Call the method under test.
packet.setMethod(newMethod);
// Verify the result.
assertThat("HTTP method is not the expected one", packet.getMethod(), is(equalTo(newMethod)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getPayload()}.
*/
@Test
public final void testGetDataNullData() {
// Set up the resources for the test.
byte[] data = null;
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
byte[] result = packet.getPayload();
// Verify the result.
assertThat("RF data must be the same", result, is(equalTo(data)));
assertThat("RF data must be null", result, is(nullValue(byte[].class)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#getPayload()}.
*/
@Test
public final void testGetDataValidData() {
// Set up the resources for the test.
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
byte[] result = packet.getPayload();
// Verify the result.
assertThat("Data must be the same", result, is(equalTo(data)));
assertThat("Data must not be the same object", result.hashCode(), is(not(equalTo(data.hashCode()))));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setPayload(byte[])}.
*/
@Test
public final void testSetDataNullData() {
// Set up the resources for the test.
byte[] newData = null;
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
packet.setPayload(newData);
byte[] result = packet.getPayload();
// Verify the result.
assertThat("Data must be the same", result, is(equalTo(newData)));
assertThat("Data must be null", result, is(nullValue(byte[].class)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setPayload(byte[])}.
*/
@Test
public final void testSetDataValidData() {
// Set up the resources for the test.
byte[] newData = "New data".getBytes();
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
packet.setPayload(newData);
byte[] result = packet.getPayload();
// Verify the result.
assertThat("Data must be the same", result, is(equalTo(newData)));
}
/**
* Test method for {@link com.digi.xbee.api.packet.thread.CoAPTxRequestPacket#setPayload(byte[])}.
*/
@Test
public final void testSetDataAndModifyOriginal() {
// Set up the resources for the test.
byte[] newData = "New data".getBytes();
CoAPTxRequestPacket packet = new CoAPTxRequestPacket(frameID, options, method, destAddress, uriData, data);
// Call the method under test.
packet.setPayload(newData);
byte[] backup = Arrays.copyOf(newData, newData.length);
newData[0] = 0x00;
byte[] result = packet.getPayload();
// Verify the result.
assertThat("Data must be the same as the setted data", result, is(equalTo(backup)));
assertThat("Data must not be the current value of received data", result, is(not(equalTo(data))));
assertThat("Data must not be the same object", result.hashCode(), is(not(equalTo(backup.hashCode()))));
assertThat("Data must not be the same object", result.hashCode(), is(not(equalTo(data.hashCode()))));
}
}<|fim▁end|> |
// Verify the result. |
<|file_name|>login.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
<|fim▁hole|>import { LoginComponent } from './login.component';
@NgModule({
declarations: [
LoginComponent
],
imports: [
CommonModule,
LoginRoutingModule
]
})
export class LoginModule { }<|fim▁end|> | import { LoginRoutingModule } from './login-routing.module'; |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.postgres.fields.jsonb import JSONField
class Supplier(models.Model):
name = models.CharField(max_length=50)
tax_id = models.CharField(max_length=10)
def __str__(self):
return self.name
class Bargain(models.Model):
sku = models.CharField(max_length=20)
price = models.DecimalField(max_digits=7, decimal_places=2)
supplier = models.ForeignKey(Supplier, on_delete=models.CASCADE,
db_index=True)
info = JSONField(db_index=True) # This will create a btree index, not GIN
def __str__(self):
return self.sku
@property
def description(self):
return self.info.get('description', '')
@property<|fim▁hole|> def acquire_cost(self):
return self.info.get('acquire_cost', '')
@property
def color(self):
return self.info.get('color', '')<|fim▁end|> | def sale_price(self):
return self.info.get('sale_price', '')
@property |
<|file_name|>gsm0338.py<|end_file_name|><|fim▁begin|># vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
""" Python Character Mapping Codec based on gsm0338 generated from './GSM0338.TXT' with gencodec.py.
With extra sauce to deal with the 'multibyte' extensions!
"""#"
import codecs
import re
### Codec APIs
#
# Shared funcs
#
def _encode(input,errors='strict'):
# split to see if we have any 'extended' characters
runs=unicode_splitter.split(input)
# now iterate through handling any 'multibyte' ourselves
out_str=list()
consumed=0
extended=extended_encode_map.keys()
for run in runs:
if len(run)==1 and run[0] in extended:
out_str.append(extended_indicator+extended_encode_map[run])
consumed+=1
else:
# pass it to the standard encoder
out,cons=codecs.charmap_encode(run,errors,encoding_table)
out_str.append(out)
consumed+=cons
return (''.join(out_str),consumed)
def _decode(input,errors='strict'):
# opposite of above, look for multibye 'marker'
# and handle it ourselves, pass the rest to the
# standard decoder
# split to see if we have any 'extended' characters
runs = str_splitter.split(input)
# now iterate through handling any 'multibyte' ourselves
out_uni = []
consumed = 0
for run in runs:
if len(run)==0:
# first char was a marker, but we don't care
# the marker itself will come up in the next run
continue
if len(run)==2 and run[0]==extended_indicator:
try:
out_uni.append(extended_decode_map[run[1]])
consumed += 2
continue
except KeyError:
# second char was not an extended, so
# let this pass through and the marker
# will be interpreted by the table as a NBSP
pass
# pass it to the standard encoder
out,cons=codecs.charmap_decode(run,errors,decoding_table)
out_uni.append(out)
consumed+=cons
return (u''.join(out_uni),consumed)
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return _encode(input,errors)
def decode(self,input,errors='strict'):
# strip any trailing '\x00's as the standard
# says trailing ones are _not_ @'s and
# are in fact blanks
if input[-1]=='\x00':
input=input[:-1]
return _decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
# just use the standard encoding as there is no need
# to hold state
return _encode(input,self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
# a little trickier 'cause input _might_ come in
# split right on the extended char marker boundary
def __init__(self,errors='strict'):
codecs.IncrementalDecoder.__init__(self,errors)
self.last_saw_mark=False
def decode(self, input, final=False):
if final:
# check for final '\x00' which should not
# be interpreted as a '@'
if input[-1]=='\x00':
input=input[:-1]
# keep track of how many chars we've added or
# removed to the run to adjust the response from
# _decode
consumed_delta=0
# see if last char was a 2-byte mark
if self.last_saw_mark:
# add it back to the current run
input=extended_indicator+input
consumed_delta-=1 # 'cause we added a char
self.last_saw_mark=False # reset
if input[-1:]==extended_indicator and not final:
# chop it off
input=input[:-1]
consumed_delta+=1 # because we just consumed one char
self.last_saw_mark=True
# NOTE: if we are final and last mark is
# and extended indicator, it will be interpreted
# as NBSP
return _decode(input,self.errors)[0]
def reset(self):
self.last_saw_mark=False
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='gsm0338',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Tables
# gsm 'extended' character.
# gsm, annoyingly, is MOSTLY 7-bit chars
#
# BUT has 10 'extended' chars represented
# by 2-chars, an idicator, and then one of
# the 10
# first of the 2-chars is indicator
extended_indicator='\x1b'
# second char is the 'extended' character
extended_encode_map = { # Unicode->GSM string
u'\x0c':'\x0a', # FORM FEED
u'^':'\x14', # CIRCUMFLEX ACCENT
u'{':'\x28', # LEFT CURLY BRACKET
u'}':'\x29', # RIGHT CURLY BRACKET
u'\\':'\x2f', # REVERSE SOLIDUS
u'[':'\x3c', # LEFT SQUARE BRACKET
u'~':'\x3d', # TILDE
u']':'\x3e', # RIGHT SQUARE BRACKET
u'|':'\x40', # VERTICAL LINE
u'\u20ac':'\x65' # EURO SIGN
}
# reverse the map above for decoding
# GSM String->Unicode
uni,gsm=zip(*extended_encode_map.items())
extended_decode_map=dict(zip(gsm,uni))
# splitter
str_splitter=re.compile('(%(ind)s[^%(ind)s])' % { 'ind':extended_indicator })
unicode_splitter=re.compile(u'([%s])' % re.escape(''.join(extended_encode_map.keys())), re.UNICODE)
# the normal 1-char table
decoding_table = (
u'@' # 0x00 -> COMMERCIAL AT
u'\xa3' # 0x01 -> POUND SIGN
u'$' # 0x02 -> DOLLAR SIGN
u'\xa5' # 0x03 -> YEN SIGN
u'\xe8' # 0x04 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0x05 -> LATIN SMALL LETTER E WITH ACUTE
u'\xf9' # 0x06 -> LATIN SMALL LETTER U WITH GRAVE
u'\xec' # 0x07 -> LATIN SMALL LETTER I WITH GRAVE
u'\xf2' # 0x08 -> LATIN SMALL LETTER O WITH GRAVE
u'\xe7' # 0x09 -> LATIN SMALL LETTER C WITH CEDILLA
u'\n' # 0x0A -> LINE FEED
u'\xd8' # 0x0B -> LATIN CAPITAL LETTER O WITH STROKE
u'\xf8' # 0x0C -> LATIN SMALL LETTER O WITH STROKE
u'\r' # 0x0D -> CARRIAGE RETURN
u'\xc5' # 0x0E -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xe5' # 0x0F -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0394' # 0x10 -> GREEK CAPITAL LETTER DELTA
u'_' # 0x11 -> LOW LINE
u'\u03a6' # 0x12 -> GREEK CAPITAL LETTER PHI
u'\u0393' # 0x13 -> GREEK CAPITAL LETTER GAMMA
u'\u039b' # 0x14 -> GREEK CAPITAL LETTER LAMDA
u'\u03a9' # 0x15 -> GREEK CAPITAL LETTER OMEGA
u'\u03a0' # 0x16 -> GREEK CAPITAL LETTER PI
u'\u03a8' # 0x17 -> GREEK CAPITAL LETTER PSI
u'\u03a3' # 0x18 -> GREEK CAPITAL LETTER SIGMA
u'\u0398' # 0x19 -> GREEK CAPITAL LETTER THETA
u'\u039e' # 0x1A -> GREEK CAPITAL LETTER XI
u'\xa0' # 0x1B -> ESCAPE TO EXTENSION TABLE (or displayed as NBSP, see note above)
u'\xc6' # 0x1C -> LATIN CAPITAL LETTER AE<|fim▁hole|> u'\xc9' # 0x1F -> LATIN CAPITAL LETTER E WITH ACUTE
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'\xa4' # 0x24 -> CURRENCY SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'\xa1' # 0x40 -> INVERTED EXCLAMATION MARK
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'\xc4' # 0x5B -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xd6' # 0x5C -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd1' # 0x5D -> LATIN CAPITAL LETTER N WITH TILDE
u'\xdc' # 0x5E -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xa7' # 0x5F -> SECTION SIGN
u'\xbf' # 0x60 -> INVERTED QUESTION MARK
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'\xe4' # 0x7B -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xf6' # 0x7C -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf1' # 0x7D -> LATIN SMALL LETTER N WITH TILDE
u'\xfc' # 0x7E -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe0' # 0x7F -> LATIN SMALL LETTER A WITH GRAVE
u'\ufffe' # 0x80 -> UNDEFINED
u'\ufffe' # 0x81 -> UNDEFINED
u'\ufffe' # 0x82 -> UNDEFINED
u'\ufffe' # 0x83 -> UNDEFINED
u'\ufffe' # 0x84 -> UNDEFINED
u'\ufffe' # 0x85 -> UNDEFINED
u'\ufffe' # 0x86 -> UNDEFINED
u'\ufffe' # 0x87 -> UNDEFINED
u'\ufffe' # 0x88 -> UNDEFINED
u'\ufffe' # 0x89 -> UNDEFINED
u'\ufffe' # 0x8A -> UNDEFINED
u'\ufffe' # 0x8B -> UNDEFINED
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\ufffe' # 0x91 -> UNDEFINED
u'\ufffe' # 0x92 -> UNDEFINED
u'\ufffe' # 0x93 -> UNDEFINED
u'\ufffe' # 0x94 -> UNDEFINED
u'\ufffe' # 0x95 -> UNDEFINED
u'\ufffe' # 0x96 -> UNDEFINED
u'\ufffe' # 0x97 -> UNDEFINED
u'\ufffe' # 0x98 -> UNDEFINED
u'\ufffe' # 0x99 -> UNDEFINED
u'\ufffe' # 0x9A -> UNDEFINED
u'\ufffe' # 0x9B -> UNDEFINED
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\ufffe' # 0xA0 -> UNDEFINED
u'\ufffe' # 0xA1 -> UNDEFINED
u'\ufffe' # 0xA2 -> UNDEFINED
u'\ufffe' # 0xA3 -> UNDEFINED
u'\ufffe' # 0xA4 -> UNDEFINED
u'\ufffe' # 0xA5 -> UNDEFINED
u'\ufffe' # 0xA6 -> UNDEFINED
u'\ufffe' # 0xA7 -> UNDEFINED
u'\ufffe' # 0xA8 -> UNDEFINED
u'\ufffe' # 0xA9 -> UNDEFINED
u'\ufffe' # 0xAA -> UNDEFINED
u'\ufffe' # 0xAB -> UNDEFINED
u'\ufffe' # 0xAC -> UNDEFINED
u'\ufffe' # 0xAD -> UNDEFINED
u'\ufffe' # 0xAE -> UNDEFINED
u'\ufffe' # 0xAF -> UNDEFINED
u'\ufffe' # 0xB0 -> UNDEFINED
u'\ufffe' # 0xB1 -> UNDEFINED
u'\ufffe' # 0xB2 -> UNDEFINED
u'\ufffe' # 0xB3 -> UNDEFINED
u'\ufffe' # 0xB4 -> UNDEFINED
u'\ufffe' # 0xB5 -> UNDEFINED
u'\ufffe' # 0xB6 -> UNDEFINED
u'\ufffe' # 0xB7 -> UNDEFINED
u'\ufffe' # 0xB8 -> UNDEFINED
u'\ufffe' # 0xB9 -> UNDEFINED
u'\ufffe' # 0xBA -> UNDEFINED
u'\ufffe' # 0xBB -> UNDEFINED
u'\ufffe' # 0xBC -> UNDEFINED
u'\ufffe' # 0xBD -> UNDEFINED
u'\ufffe' # 0xBE -> UNDEFINED
u'\ufffe' # 0xBF -> UNDEFINED
u'\ufffe' # 0xC0 -> UNDEFINED
u'\ufffe' # 0xC1 -> UNDEFINED
u'\ufffe' # 0xC2 -> UNDEFINED
u'\ufffe' # 0xC3 -> UNDEFINED
u'\ufffe' # 0xC4 -> UNDEFINED
u'\ufffe' # 0xC5 -> UNDEFINED
u'\ufffe' # 0xC6 -> UNDEFINED
u'\ufffe' # 0xC7 -> UNDEFINED
u'\ufffe' # 0xC8 -> UNDEFINED
u'\ufffe' # 0xC9 -> UNDEFINED
u'\ufffe' # 0xCA -> UNDEFINED
u'\ufffe' # 0xCB -> UNDEFINED
u'\ufffe' # 0xCC -> UNDEFINED
u'\ufffe' # 0xCD -> UNDEFINED
u'\ufffe' # 0xCE -> UNDEFINED
u'\ufffe' # 0xCF -> UNDEFINED
u'\ufffe' # 0xD0 -> UNDEFINED
u'\ufffe' # 0xD1 -> UNDEFINED
u'\ufffe' # 0xD2 -> UNDEFINED
u'\ufffe' # 0xD3 -> UNDEFINED
u'\ufffe' # 0xD4 -> UNDEFINED
u'\ufffe' # 0xD5 -> UNDEFINED
u'\ufffe' # 0xD6 -> UNDEFINED
u'\ufffe' # 0xD7 -> UNDEFINED
u'\ufffe' # 0xD8 -> UNDEFINED
u'\ufffe' # 0xD9 -> UNDEFINED
u'\ufffe' # 0xDA -> UNDEFINED
u'\ufffe' # 0xDB -> UNDEFINED
u'\ufffe' # 0xDC -> UNDEFINED
u'\ufffe' # 0xDD -> UNDEFINED
u'\ufffe' # 0xDE -> UNDEFINED
u'\ufffe' # 0xDF -> UNDEFINED
u'\ufffe' # 0xE0 -> UNDEFINED
u'\ufffe' # 0xE1 -> UNDEFINED
u'\ufffe' # 0xE2 -> UNDEFINED
u'\ufffe' # 0xE3 -> UNDEFINED
u'\ufffe' # 0xE4 -> UNDEFINED
u'\ufffe' # 0xE5 -> UNDEFINED
u'\ufffe' # 0xE6 -> UNDEFINED
u'\ufffe' # 0xE7 -> UNDEFINED
u'\ufffe' # 0xE8 -> UNDEFINED
u'\ufffe' # 0xE9 -> UNDEFINED
u'\ufffe' # 0xEA -> UNDEFINED
u'\ufffe' # 0xEB -> UNDEFINED
u'\ufffe' # 0xEC -> UNDEFINED
u'\ufffe' # 0xED -> UNDEFINED
u'\ufffe' # 0xEE -> UNDEFINED
u'\ufffe' # 0xEF -> UNDEFINED
u'\ufffe' # 0xF0 -> UNDEFINED
u'\ufffe' # 0xF1 -> UNDEFINED
u'\ufffe' # 0xF2 -> UNDEFINED
u'\ufffe' # 0xF3 -> UNDEFINED
u'\ufffe' # 0xF4 -> UNDEFINED
u'\ufffe' # 0xF5 -> UNDEFINED
u'\ufffe' # 0xF6 -> UNDEFINED
u'\ufffe' # 0xF7 -> UNDEFINED
u'\ufffe' # 0xF8 -> UNDEFINED
u'\ufffe' # 0xF9 -> UNDEFINED
u'\ufffe' # 0xFA -> UNDEFINED
u'\ufffe' # 0xFB -> UNDEFINED
u'\ufffe' # 0xFC -> UNDEFINED
u'\ufffe' # 0xFD -> UNDEFINED
u'\ufffe' # 0xFE -> UNDEFINED
u'\ufffe' # 0xFF -> UNDEFINED
)
encoding_table=codecs.charmap_build(decoding_table)
if __name__ == "__main__":
"""
Run this as a script for poor-man's unit tests
"""
isoLatin15_alpha=u" !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJLKMNOPQRSTUVWXYZ[\\]^-`abcdefghijklmnopqrstuvwxyz{|}~¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
gsm_alpha=u"\u00A0@£$¥èéùìòçØøÅåΔ_ΦΓΛΩΠΨΣΘΞ^{}\\[~]|\u00A0\u00A0€ÆæßÉ !\"#¤%&'()*+,-./0123456789:;<=>?¡ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÑܧ¿abcdefghijklmnopqrstuvwxyzäöñüà\u00A0"
gsm_alpha_encoded='1b000102030405060708090b0c0e0f101112131415161718191a1b141b281b291b2f1b3c1b3d1b3e1b401b1b1b651c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f1b'
gsm_alpha_gsm=gsm_alpha_encoded.decode('hex')
# some simple tests
print "Assert GSM alphabet, encoded in GSM is correct (unicode->gsm_str)..."
encoded=_encode(gsm_alpha)[0].encode('hex')
print encoded
assert(encoded==gsm_alpha_encoded)
print "Good"
print
print "Assert GSM encoded string converts to correct Unicode (gsm_str->unicode)..."
assert(_decode(gsm_alpha_gsm)[0]==gsm_alpha)
print "Good"
print
# test Codec objects
print "Try the codec objects unicode_test_str->encode->decode==unicode_test_str..."
c=Codec()
gsm_str,out=c.encode(gsm_alpha)
assert(c.decode(gsm_str)[0]==gsm_alpha)
print "Good"
print
print "Try the incremental codecs, same test, but loop it..."
def _inc_encode(ie):
encoded=list()
hop=17 # make it something odd
final=False
for i in range(0,len(gsm_alpha),hop):
end=i+hop
if end>=len(gsm_alpha): final=True
encoded.append(ie.encode(gsm_alpha[i:end],final))
return ''.join(encoded)
enc=IncrementalEncoder()
assert(_inc_encode(enc)==gsm_alpha_gsm)
print "Good"
print
print "Now do that again with the same encoder to make sure state is reset..."
enc.reset()
assert(_inc_encode(enc)==gsm_alpha_gsm)
print "Good"
print
print "Now decode the encoded string back to unicode..."
def _inc_decode(idec):
decoded=list()
# define so we KNOW we hit a mark as last char
hop=gsm_alpha_gsm.index('\x1b')+1
final=False
for i in range(0,len(gsm_alpha_gsm),hop):
end=i+hop
if end>=len(gsm_alpha_gsm): final=True
decoded.append(idec.decode(gsm_alpha_gsm[i:end],final))
return ''.join(decoded)
dec=IncrementalDecoder()
assert(_inc_decode(dec)==gsm_alpha)
print "Good"
print
print "Do it again with some decoder to make sure state is cleared..."
dec.reset()
assert(_inc_decode(dec)==gsm_alpha)
print "Good"
print<|fim▁end|> | u'\xe6' # 0x1D -> LATIN SMALL LETTER AE
u'\xdf' # 0x1E -> LATIN SMALL LETTER SHARP S (German) |
<|file_name|>bitcoin_pt_BR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="pt_BR" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About EvilCoin</source>
<translation>Sobre o EvilCoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>EvilCoin</b> version</source>
<translation><b>EvilCoin</b> versao</translation>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The EvilCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎
Este é um software experimental.⏎
⏎
Distribuido sob a licença de software MIT/X11, veja o arquivo anexo COPYING ou http://www.opensource.org/licenses/mit-license.php.⏎
⏎
Este produto inclui software desenvolvido pelo Projeto OpenSSL para uso no OpenSSL Toolkit (http://www.openssl.org/), software de criptografia escrito por Eric Young ([email protected]) e sofware UPnP escrito por Thomas Bernard.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Livro de Endereços</translation>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Clique duas vezes para editar o endereço ou a etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Criar um novo endereço</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copie o endereço selecionado para a área de transferência do sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Novo Endereço</translation>
</message>
<message>
<location line="-46"/>
<source>These are your EvilCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Estes são os seus endereços EvilCoin para receber pagamentos. Você pode dar um diferente a cada remetente para que você possa acompanhar quem está pagando.</translation>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Copiar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostrar &QR Code</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a EvilCoin address</source>
<translation>Assine a mensagem para provar que você possui um endereço EvilCoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Assinar &Mensagem</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Excluir os endereços selecionados da lista</translation>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified EvilCoin address</source>
<translation>Verifique a mensagem para garantir que ela foi assinada com um endereço EvilCoin específico</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Excluir</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Copiar &Etiqueta</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation>Exportar Dados do Livro de Endereços</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível escrever no arquivo %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Rótulo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Janela da Frase de Segurança</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Digite a frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repita a nova frase de segurança</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Serve para desativar o envio de dinheiro trivial quando conta do SO for comprometida. Não oferece segurança real.</translation>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation>Apenas para participação</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Digite a nova frase de seguraça da sua carteira. <br/> Por favor, use uma frase de <b>10 ou mais caracteres aleatórios,</b> ou <b>oito ou mais palavras.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Criptografar carteira</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para desbloquear a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desbloquear carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para descriptografar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Descriptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Alterar frase de segurança</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Digite a frase de segurança antiga e nova para a carteira.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirmar criptografia da carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation>Aviso: Se você criptografar sua carteira e perder sua senha, você vai <b>PERDER TODAS AS SUAS MOEDAS</ b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Tem certeza de que deseja criptografar sua carteira?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Qualquer backup prévio que você tenha feito do seu arquivo wallet deve ser substituído pelo novo e encriptado arquivo wallet gerado. Por razões de segurança, qualquer backup do arquivo wallet não criptografado se tornará inútil assim que você começar a usar uma nova carteira criptografada.</translation>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Cuidado: A tecla Caps Lock está ligada!</translation>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Carteira criptografada</translation>
</message>
<message>
<location line="-58"/>
<source>EvilCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation>EvilCoin vai fechar agora para concluir o processo de criptografia. Lembre-se que a criptografia de sua carteira não pode proteger totalmente suas moedas de serem roubados por malwares infectem seu computador.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>A criptografia da carteira falhou</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>A criptografia da carteira falhou devido a um erro interno. Sua carteira não estava criptografada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>A frase de segurança fornecida não confere.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>A abertura da carteira falhou</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>A frase de segurança digitada para a descriptografia da carteira estava incorreta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>A descriptografia da carteira falhou</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>A frase de segurança da carteira foi alterada com êxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation>&Assinar Mensagem...</translation>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando com a rede...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Visão geral</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostrar visão geral da carteira</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Navegar pelo histórico de transações</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation>&Livro de Endereços</translation>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Edite a lista de endereços armazenados e rótulos</translation>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation>&Receber moedas</translation>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Mostrar a lista de endereços para o recebimento de pagamentos</translation>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation>&Enviar moedas</translation>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>S&air</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Sair da aplicação</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about EvilCoin</source>
<translation>Mostrar informações sobre o EvilCoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Sobre &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar informações sobre o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opções...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>&Criptografar Carteira...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup Carteira...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Mudar frase de segurança...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation><numerusform>~%n bloco faltando</numerusform><numerusform>~%n blocos faltando</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation>Baixados %1 de %2 blocos de histórico de transações (%3% completo).</translation>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation>&Exportar...</translation>
</message>
<message>
<location line="-64"/>
<source>Send coins to a EvilCoin address</source>
<translation>Enviar moedas para um endereço EvilCoin</translation>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for EvilCoin</source>
<translation>Modificar opções de configuração para EvilCoin</translation>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados da guia atual para um arquivo</translation>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation>Cryptografar ou Decryptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Fazer cópia de segurança da carteira para uma outra localização</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Mudar a frase de segurança utilizada na criptografia da carteira</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>Janela de &Depuração</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Abrir console de depuração e diagnóstico</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&Verificar mensagem...</translation>
</message>
<message>
<location line="-202"/>
<source>EvilCoin</source>
<translation>EvilCoin</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+180"/>
<source>&About EvilCoin</source>
<translation>Sobre o EvilCoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Exibir/Ocultar</translation>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation>Desbloquear carteira</translation>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation>&Bloquear Carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation>Bloquear Carteira</translation>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Arquivo</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Configurações</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Ajuda</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Barra de ferramentas</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>EvilCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to EvilCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About EvilCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about EvilCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Atualizado</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Recuperando o atraso ...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Transação enviada</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Transação recebida</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Quantidade: %2
Tipo: %3
Endereço: %4</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid EvilCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>desbloqueada</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>bloqueada</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n dia</numerusform><numerusform>%n dias</numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. EvilCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Alerta da Rede</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation>Quantidade:</translation>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Quantia:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation>Prioridade:</translation>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation>Taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation>Rendimento baixo:</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation>não</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation>Depois da taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation>trocar</translation>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation>(de)selecionar tudo</translation>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation>Modo árvore</translation>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation>Modo lista</translation>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation>Confirmações</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation>Prioridade</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Copiar endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation>Copiar ID da transação</translation>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation>Copiar quantidade</translation>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation>Copiar taxa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copia pós-taxa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copiar bytes</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copia prioridade</translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation>Copia saída de pouco valor</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copia alteração</translation>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation>mais alta possível</translation>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation>alta</translation>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation>média-alta</translation>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation>média</translation>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation>média-baixa</translation>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation>baixa</translation>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation>a mais baixa possível</translation>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation>sim</translation>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation>troco de %1 (%2)</translation>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation>(troco)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Endereço</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Novo endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Novo endereço de envio</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar endereço de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>O endereço digitado "%1" já se encontra no catálogo de endereços.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid EvilCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Não foi possível destravar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>A geração de nova chave falhou.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>EvilCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opções</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Pagar taxa de &transação</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start EvilCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start EvilCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the EvilCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapear porta usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the EvilCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP do proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Porta:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porta do serviço de proxy (ex. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>&Versão do SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versão do proxy SOCKS (ex. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Janela</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostrar apenas um ícone na bandeja ao minimizar a janela.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimizar para a bandeja em vez da barra de tarefas.</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimizar em vez de sair do aplicativo quando a janela for fechada. Quando esta opção é escolhida, o aplicativo só será fechado selecionando Sair no menu Arquivo.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimizar ao sair</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Língua da interface com usuário:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting EvilCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidade usada para mostrar quantidades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Escolha a unidade padrão de subdivisão para interface mostrar quando enviar bitcoins.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show EvilCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Mostrar en&dereços na lista de transações</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation>Mostrar ou não opções de controle da moeda.</translation>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancelar</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>padrão</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting EvilCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>O endereço proxy fornecido é inválido.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the EvilCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation>Seu saldo atual spendable</translation>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation>Imaturo:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Saldo minerado que ainda não maturou</translation>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Total:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation>Seu saldo total atual</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transações recentes</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>fora de sincronia</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nome do cliente</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versão do cliente</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informação</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Usando OpenSSL versão</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Horário de inicialização</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Número de conexões</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Corrente de blocos</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Quantidade atual de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Total estimado de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Horário do último bloco</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the EvilCoin-Qt help message to get a list with possible EvilCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Data do 'build'</translation>
</message>
<message>
<location line="-104"/>
<source>EvilCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>EvilCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Arquivo de log de Depuração</translation>
</message>
<message>
<location line="+7"/>
<source>Open the EvilCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Limpar console</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the EvilCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Use as setas para cima e para baixo para navegar pelo histórico, e <b>Ctrl-L</b> para limpar a tela.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Digite <b>help</b> para uma visão geral dos comandos disponíveis.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar dinheiro</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation>Opções de Controle da Moeda</translation>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation>Entradas...</translation>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation>automaticamente selecionado</translation>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation>Saldo insuficiente!</translation>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation>Quantidade:</translation>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Quantia:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation>Prioridade:</translation>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation>Taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation>Rendimento baixo:</translation>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation>Depois da taxa:</translation>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar para vários destinatários de uma só vez</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Adicionar destinatário</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmar o envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Enviar</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a EvilCoin address (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation>Copiar quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation>Copiar taxa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copia pós-taxa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copiar bytes</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copia prioridade</translation>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation>Copia saída de pouco valor</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copia alteração</translation>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar envio de dinheiro</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>O endereço do destinatário não é válido, favor verificar.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>A quantidade a ser paga precisa ser maior que 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>A quantidade excede seu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>O total excede seu saldo quando uma taxa de transação de %1 é incluída.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Endereço duplicado: pode-se enviar para cada endereço apenas uma vez por transação.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid EvilCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Q&uantidade:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Pagar &Para:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Digite uma etiqueta para este endereço para adicioná-lo ao catálogo de endereços</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a EvilCoin address (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Assinaturas - Assinar / Verificar uma mensagem</translation>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Você pode assinar mensagens com seus endereços para provar que você é o dono deles. Seja cuidadoso para não assinar algo vago, pois ataques de pishing podem tentar te enganar para dar sua assinatura de identidade para eles. Apenas assine afirmações completamente detalhadas com as quais você concorda.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Entre a mensagem que você quer assinar aqui</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copiar a assinatura para a área de transferência do sistema</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this EvilCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Forneça o endereço da assinatura, a mensagem (se assegure que você copiou quebras de linha, espaços, tabs, etc. exatamente) e a assinatura abaixo para verificar a mensagem. Cuidado para não ler mais na assinatura do que está escrito na mensagem propriamente, para evitar ser vítima de uma ataque do tipo "man-in-the-middle".</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified EvilCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a EvilCoin address (e.g. EvilCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Clique em "Assinar Mensagem" para gerar a assinatura</translation>
</message>
<message>
<location line="+3"/>
<source>Enter EvilCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>O endereço fornecido é inválido.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Por favor, verifique o endereço e tente novamente.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>O endereço fornecido não se refere a uma chave.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Destravamento da Carteira foi cancelado.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>A chave privada para o endereço fornecido não está disponível.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Assinatura da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mensagem assinada.</translation><|fim▁hole|> <message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>A assinatura não pode ser decodificada.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Por favor, verifique a assinatura e tente novamente.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>A assinatura não corresponde ao "resumo da mensagem".</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verificação da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mensagem verificada.</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation>em conflito</translation>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/não confirmadas</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmações</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, difundir atráves de %n nó</numerusform><numerusform>, difundir atráves de %n nós</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Fonte</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Gerados</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Para</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>seu próprio endereço</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Crédito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>matura em mais %n bloco</numerusform><numerusform>matura em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>não aceito</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Débito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Taxa de transação</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Valor líquido</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensagem</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentário</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informação de depuração</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transação</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation>Entradas</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>verdadeiro</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falso</translation>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ainda não foi propagada na rede com sucesso.</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>desconhecido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalhes da transação</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Este painel mostra uma descrição detalhada da transação</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmações)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation>Não confirmado</translation>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Confirmando (%1 de %2 confirmações recomendadas)</translation>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation>Conflitou</translation>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Recém-criado (%1 confirmações, disponível somente após %2)</translation>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloco não foi recebido por nenhum outro participante da rede e provavelmente não será aceito!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Gerado mas não aceito</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recebido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagamento para você mesmo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status da transação. Passe o mouse sobre este campo para mostrar o número de confirmações.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data e hora em que a transação foi recebida.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Endereço de destino da transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Quantidade debitada ou creditada ao saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Todos</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoje</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Este mês</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mês passado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este ano</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Para você mesmo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Outro</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Procure um endereço ou etiqueta</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Quantidade mínima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copiar endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copiar ID da transação</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Editar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostrar detalhes da transação</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervalo: </translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>EvilCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or EvilCoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Lista de comandos</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Obtenha ajuda sobre um comando</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Opções:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: EvilCoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: EvilCoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation>Especifique o arquivo da carteira (dentro do diretório de dados)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especificar diretório de dados</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Definir o tamanho do cache do banco de dados em megabytes (padrão: 25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Manter no máximo <n> conexões aos peers (padrão: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conectar a um nó para receber endereços de participantes, e desconectar.</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation>Especificar seu próprio endereço público</translation>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Limite para desconectar peers mal comportados (padrão: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Número de segundos para impedir que peers mal comportados reconectem (padrão: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv4: %s</translation>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceitar linha de comando e comandos JSON-RPC</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Rodar em segundo plano como serviço e aceitar comandos</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Usar rede de teste</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Aceitar conexões externas (padrão: 1 se opções -proxy ou -connect não estiverem presentes)</translation>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv6, voltando ao IPv4: %s</translation>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Cuidado: valor de -paytxfee escolhido é muito alto! Este é o valor da taxa de transação que você irá pagar se enviar a transação.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong EvilCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Cuidado: erro ao ler arquivo wallet.dat! Todas as chaves foram lidas corretamente, mas dados transações e do catálogo de endereços podem estar faltando ou estar incorretas.</translation>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Aviso: wallet.dat corrompido, dados recuperados! Arquivo wallet.dat original salvo como wallet.{timestamp}.bak em %s; se seu saldo ou transações estiverem incorretos, você deve restauras o backup.</translation>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tentar recuperar chaves privadas de um arquivo wallet.dat corrompido</translation>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation>Opções de criação de blocos:</translation>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Conectar apenas a nó(s) específico(s)</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Descobrir os próprios endereços IP (padrão: 1 quando no modo listening e opção -externalip não estiver presente)</translation>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Falha ao escutar em qualquer porta. Use -listen=0 se você quiser isso.</translation>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Buffer máximo de recebimento por conexão, <n>*1000 bytes (padrão: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Buffer máximo de envio por conexão, <n>*1000 bytes (padrão: 1000)</translation>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Apenas conectar em nós na rede <net> (IPv4, IPv6, ou Tor)</translation>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>Opções SSL: (veja a Wiki do Bitcoin para instruções de configuração SSL)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Mandar informação de trace/debug para o console em vez de para o arquivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Determinar tamanho mínimo de bloco em bytes (padrão: 0)</translation>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Encolher arquivo debug.log ao iniciar o cliente (padrão 1 se opção -debug não estiver presente)</translation>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifique o tempo limite (timeout) da conexão em milissegundos (padrão: 5000) </translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 1 quando estiver escutando)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Nome de usuário para conexões JSON-RPC</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Cuidado: Esta versão está obsoleta, atualização exigida!</translation>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompido, recuperação falhou</translation>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Senha para conexões JSON-RPC</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=EvilCoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "EvilCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permitir conexões JSON-RPC de endereços IP específicos</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Enviar comando para nó rodando em <ip> (pardão: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Executar comando quando o melhor bloco mudar (%s no comando será substituído pelo hash do bloco)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executar comando quando uma transação da carteira mudar (%s no comando será substituído por TxID)</translation>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Atualizar carteira para o formato mais recente</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Determinar tamanho do pool de endereços para <n> (padrão: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Re-escanear blocos procurando por transações perdidas da carteira</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usar OpenSSL (https) para conexões JSON-RPC</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Arquivo de certificado do servidor (padrão: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Chave privada do servidor (padrão: server.pem)</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Esta mensagem de ajuda</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. EvilCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>EvilCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Impossível vincular a %s neste computador (bind retornou erro %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permitir consultas DNS para -addnode, -seednode e -connect</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Carregando endereços...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Erro ao carregar wallet.dat: Carteira corrompida</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of EvilCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart EvilCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Erro ao carregar wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Endereço -proxy inválido: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rede desconhecida especificada em -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Versão desconhecida do proxy -socks requisitada: %i</translation>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossível encontrar o endereço -bind: '%s'</translation>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossível encontrar endereço -externalip: '%s'</translation>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Quantidade inválida para -paytxfee=<quantidade>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Quantidade inválida</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Saldo insuficiente</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Carregando índice de blocos...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Adicionar um nó com o qual se conectar e tentar manter a conexão ativa</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. EvilCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Carregando carteira...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Não é possível fazer downgrade da carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Não foi possível escrever no endereço padrão</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Re-escaneando...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Carregamento terminado</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>Para usar a opção %s</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Você precisa especificar rpcpassword=<senha> no arquivo de configurações:⏎
%s⏎
Se o arquivo não existir, crie um com permissão de leitura apenas pelo dono</translation>
</message>
</context>
</TS><|fim▁end|> | </message> |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
exports.ContainerBuilder = require('./CqrsContainerBuilder');
exports.EventStream = require('./EventStream');
exports.CommandBus = require('./CommandBus');
exports.EventStore = require('./EventStore');
exports.AbstractAggregate = require('./AbstractAggregate');
exports.AggregateCommandHandler = require('./AggregateCommandHandler');
exports.AbstractSaga = require('./AbstractSaga');
exports.SagaEventHandler = require('./SagaEventHandler');
exports.AbstractProjection = require('./AbstractProjection');
<|fim▁hole|>exports.InMemoryMessageBus = require('./infrastructure/InMemoryMessageBus');
exports.InMemoryEventStorage = require('./infrastructure/InMemoryEventStorage');
exports.InMemorySnapshotStorage = require('./infrastructure/InMemorySnapshotStorage');
exports.InMemoryView = require('./infrastructure/InMemoryView');
exports.getMessageHandlerNames = require('./utils/getMessageHandlerNames');
exports.subscribe = require('./subscribe');<|fim▁end|> | |
<|file_name|>goagent.go<|end_file_name|><|fim▁begin|>package goagent
import (
"bytes"
"crypto/tls"
"encoding/json"
"errors"
"io/ioutil"
"log"
"net/http"
"net/url"
"os/exec"
)
const updateSecretFromAgentPath = "/mitro-core/api/internal/UpdateSecretFromAgent"
const phantomCommand = "phantomjs"
// Top level request object sent to MitroServlet.
type SignedRequest struct {
ImplicitEndTransaction *bool `json:"omitempty"`
ImplicitBeginTransaction *bool `json:"omitempty"`
OperationName string `json:"operationName,omitempty"`
// Optional transaction id for this request.
TransactionId *string `json:"omitempty"`
// Serialized JSON object containing the actual request. A string is easy to sign/verify.
Request string `json:"request"`
Signature string `json:"signature"`
// User making the request.
Identity string `json:"identity"`
ClientIdentifier string `json:"omitempty"`
Platform string `json:"omitempty"`
}
type UpdateSecretFromAgentRequest struct {
DataFromUser string `json:"dataFromUser"`
DataFromUserSignature string `json:"dataFromUserSignature"`
Username string `json:"username"`
URL string `json:"url"`
ClientIdentifier string `json:"clientIdentifier"`
Platform string `json:"platform"`
}
type CriticalData struct {
Password string `json:"password"`
Note string `json:"note"`
OldPassword string `json:"oldPassword"`
}
type UserData struct {
SecretId int `json:"secretId"`
UserId string `json:"userId"`
CriticalData CriticalData `json:"criticalData"`
}
type UpdateSecretFromAgentResponse struct {
dataFromUser string
dataFromUserSignature string
}
// MitroCoreClient contains settings for communicating with the Mitro core server.
type MitroCoreClient struct {
client *http.Client
internalURL string
}
func NewClientInsecure(internalURL string) (*MitroCoreClient, error) {
t := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
return newClient(internalURL, &http.Client{Transport: t})
}
func NewClient(internalURL string) (*MitroCoreClient, error) {
return newClient(internalURL, &http.Client{})
}
func newClient(internalURL string, client *http.Client) (*MitroCoreClient, error) {
u, err := url.Parse(internalURL)
if err != nil {
return nil, err
}
if u.Scheme != "http" && u.Scheme != "https" {
return nil, errors.New("unsupported scheme: " + u.Scheme)
}
if u.Host == "" {
return nil, errors.New("internalURL missing host")
}
if u.Path != "" || u.Fragment != "" {
return nil, errors.New("internalURL must not have path or fragment")
}
return &MitroCoreClient{client, internalURL}, nil
}
func (c *MitroCoreClient) post(path string, request interface{}, response interface{}) error {
topLevel := SignedRequest{}
topLevel.OperationName = "changepw"
serialized, err := json.Marshal(request)
if err != nil {
return err
}
topLevel.Request = string(serialized)
// TODO: sign?
topLevel.Signature = ""
topLevel.Identity = "[email protected]"
serialized, err = json.Marshal(topLevel)
if err != nil {
return err<|fim▁hole|> return err
}
responseBytes, err := ioutil.ReadAll(postResponse.Body)
postResponse.Body.Close()
if err != nil {
return err
}
if postResponse.StatusCode != http.StatusOK {
return errors.New("post failed: " + postResponse.Status)
}
return json.Unmarshal(responseBytes, response)
}
func (c *MitroCoreClient) UpdateSecretFromAgent(request *UpdateSecretFromAgentRequest) error {
response := UpdateSecretFromAgentResponse{}
return c.post(updateSecretFromAgentPath, request, &response)
}
var supportedSites = map[string]struct{}{
"github.com": {},
"instagram.com": {},
"skype.com": {},
"twitter.com": {},
}
func ChangePassword(loginURL string, username string, oldPassword string, newPassword string) error {
u, err := url.Parse(loginURL)
if err != nil {
return err
}
log.Print("change password for host ", u.Host)
_, exists := supportedSites[u.Host]
if !exists {
return errors.New("Unsupported site: " + u.Host)
}
log.Printf("attempting change password with phantomjs for host=%s username=%s", u.Host, username)
arguments := []string{"changepw.js", u.Host, username, oldPassword, newPassword}
phantomjs := exec.Command(phantomCommand, arguments...)
output, err := phantomjs.CombinedOutput()
if err != nil {
message := err.Error() + ": " + string(output)
lastIndex := len(message)
for message[lastIndex-1] == '\n' {
lastIndex -= 1
}
return errors.New(message[:lastIndex])
}
return nil
}<|fim▁end|> | }
postResponse, err := c.client.Post(c.internalURL+path, "application/json", bytes.NewReader(serialized))
if err != nil { |
<|file_name|>canteraTest.py<|end_file_name|><|fim▁begin|>import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
import rmgpy
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""<|fim▁hole|> molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac)
class RMGToCanteraTest(unittest.TestCase):
"""
Contains unit tests for the conversion of RMG species and reaction objects to Cantera objects.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
from rmgpy.chemkin import loadChemkinFile
folder = os.path.join(os.path.dirname(rmgpy.__file__),'tools/data/various_kinetics')
chemkinPath = os.path.join(folder, 'chem_annotated.inp')
dictionaryPath = os.path.join(folder, 'species_dictionary.txt')
transportPath = os.path.join(folder, 'tran.dat')
species, reactions = loadChemkinFile(chemkinPath, dictionaryPath,transportPath)
self.rmg_ctSpecies = [spec.toCantera() for spec in species]
self.rmg_ctReactions = []
for rxn in reactions:
convertedReactions = rxn.toCantera(species)
if isinstance(convertedReactions,list):
self.rmg_ctReactions.extend(convertedReactions)
else:
self.rmg_ctReactions.append(convertedReactions)
job = Cantera()
job.loadChemkinModel(chemkinPath, transportFile=transportPath,quiet=True)
self.ctSpecies = job.model.species()
self.ctReactions = job.model.reactions()
def testSpeciesConversion(self):
"""
Test that species objects convert properly
"""
from rmgpy.tools.canteraModel import checkEquivalentCanteraSpecies
for i in range(len(self.ctSpecies)):
self.assertTrue(checkEquivalentCanteraSpecies(self.ctSpecies[i],self.rmg_ctSpecies[i]))
def testReactionConversion(self):
"""
Test that species objects convert properly
"""
from rmgpy.tools.canteraModel import checkEquivalentCanteraReaction
for i in range(len(self.ctReactions)):
self.assertTrue(checkEquivalentCanteraReaction(self.ctReactions[i],self.rmg_ctReactions[i]))<|fim▁end|> | reactorType='IdealGasReactor' |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###################################################################################
#
# Copyright (c) 2017-2019 MuK IT GmbH.
#
# This file is part of MuK Web Editor Utils
# (see https://mukit.at).
#<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###################################################################################<|fim▁end|> | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. |
<|file_name|>UtestPlatform.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2007, Michael Feathers, James Grenning, Bas Vodde and Timo Puronen
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE EARLIER MENTIONED AUTHORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <copyright holder> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "CppUTest/TestHarness.h"
#include <e32def.h>
#include <e32std.h>
#include <sys/time.h>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <math.h>
#include <stdlib.h>
#include "CppUTest/PlatformSpecificFunctions.h"
static jmp_buf test_exit_jmp_buf[10];
static int jmp_buf_index = 0;
int PlatformSpecificSetJmp(void (*function) (void* data), void* data)
{
if (0 == setjmp(test_exit_jmp_buf[jmp_buf_index])) {
jmp_buf_index++;
function(data);
jmp_buf_index--;
return 1;
}
return 0;
}
void PlatformSpecificLongJmp()
{
jmp_buf_index--;
longjmp(test_exit_jmp_buf[jmp_buf_index], 1);
}
void PlatformSpecificRestoreJumpBuffer()
{
jmp_buf_index--;
}
void PlatformSpecificRunTestInASeperateProcess(UtestShell* shell, TestPlugin* plugin, TestResult* result)
{
printf("-p doesn't work on this platform as it is not implemented. Running inside the process\b");
shell->runOneTest(plugin, *result);
}
static long TimeInMillisImplementation() {
struct timeval tv;
struct timezone tz;
::gettimeofday(&tv, &tz);
return (tv.tv_sec * 1000) + (long)(tv.tv_usec * 0.001);
}
long (*GetPlatformSpecificTimeInMillis)() = TimeInMillisImplementation;
TestOutput::WorkingEnvironment PlatformSpecificGetWorkingEnvironment()
{
return TestOutput::eclipse;
}
static SimpleString TimeStringImplementation() {
time_t tm = time(NULL);
return ctime(&tm);
}
SimpleString GetPlatformSpecificTimeString() = TimeStringImplementation;
int PlatformSpecificVSNprintf(char* str, size_t size, const char* format, va_list args) {
return vsnprintf(str, size, format, args);
}
void PlatformSpecificFlush() {
fflush(stdout);
}
int PlatformSpecificPutchar(int c) {
return putchar(c);
}
double PlatformSpecificFabs(double d) {
return fabs(d);
}
void* PlatformSpecificMalloc(size_t size) {
return malloc(size);
}
void* PlatformSpecificRealloc (void* memory, size_t size) {
return realloc(memory, size);
}
void PlatformSpecificFree(void* memory) {
free(memory);
}
void* PlatformSpecificMemCpy(void* s1, const void* s2, size_t size) {
return memcpy(s1, s2, size);
}
<|fim▁hole|>void* PlatformSpecificMemset(void* mem, int c, size_t size)
{
return memset(mem, c, size);
}
PlatformSpecificFile PlatformSpecificFOpen(const char* filename, const char* flag) {
return fopen(filename, flag);
}
void PlatformSpecificFPuts(const char* str, PlatformSpecificFile file) {
fputs(str, (FILE*)file);
}
void PlatformSpecificFClose(PlatformSpecificFile file) {
fclose((FILE*)file);
}
extern "C" {
static int IsNanImplementation(double d)
{
return isnan(d);
}
static int IsInfImplementation(double d)
{
return isinf(d);
}
int (*PlatformSpecificIsNan)(double) = IsNanImplementation;
int (*PlatformSpecificIsInf)(double) = IsInfImplementation;
}
static PlatformSpecificMutex DummyMutexCreate(void)
{
FAIL("PlatformSpecificMutexCreate is not implemented");
return 0;
}
static void DummyMutexLock(PlatformSpecificMutex mtx)
{
FAIL("PlatformSpecificMutexLock is not implemented");
}
static void DummyMutexUnlock(PlatformSpecificMutex mtx)
{
FAIL("PlatformSpecificMutexUnlock is not implemented");
}
static void DummyMutexDestroy(PlatformSpecificMutex mtx)
{
FAIL("PlatformSpecificMutexDestroy is not implemented");
}
PlatformSpecificMutex (*PlatformSpecificMutexCreate)(void) = DummyMutexCreate;
void (*PlatformSpecificMutexLock)(PlatformSpecificMutex) = DummyMutexLock;
void (*PlatformSpecificMutexUnlock)(PlatformSpecificMutex) = DummyMutexUnlock;
void (*PlatformSpecificMutexDestroy)(PlatformSpecificMutex) = DummyMutexDestroy;<|fim▁end|> | |
<|file_name|>shell.py<|end_file_name|><|fim▁begin|># The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)<|fim▁hole|> def initialize_app(self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_command('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
return KanboardShell().run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))<|fim▁end|> |
return parser
|
<|file_name|>file_operations.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import grp
import tempfile
from django.conf import settings
from utilities import encoding
import shutil
import zipfile
gid = None
if (settings.USEPRAKTOMATTESTER):
gid = grp.getgrnam('praktomat').gr_gid
def makedirs(path):
if os.path.exists(path):
return
else:
(head, tail) = os.path.split(path)
makedirs(head)
os.mkdir(path)
if (gid):
os.chown(path, -1, gid)
os.chmod(path, 0o770)
<|fim▁hole|> if not os.path.exists(dirname):
makedirs(dirname)
else:
if os.path.exists(path):
if override: # delete file
os.remove(path)
else: # throw exception
raise Exception('File already exists')
with open(path, 'wb') as fd:
if binary:
fd.write(content)
else:
fd.write(encoding.get_utf8(encoding.get_unicode(content)))
if (gid):
# chown :praktomat <path>
os.chown(path, -1, gid)
# rwxrwx--- access for praktomattester:praktomat
os.chmod(path, 0o770)
def copy_file(from_path, to_path, to_is_directory=False, override=True):
""" """
if to_is_directory:
to_path = os.path.join(to_path, os.path.basename(from_path))
with open(from_path, "rb") as fd:
create_file(to_path, fd.read(), override=override, binary=True)
def create_tempfolder(path):
makedirs(path)
tempfile.tempdir = path
new_tmpdir = tempfile.mkdtemp()
if (gid):
os.chown(new_tmpdir, -1, gid)
os.chmod(new_tmpdir, 0o770)
return new_tmpdir
class InvalidZipFile(Exception):
pass
def unpack_zipfile_to(zipfilename, to_path, override_cb=None, file_cb=None):
"""
Extracts a zipfile to the given location, trying to safeguard against wrong paths
The override_cb is called for every file that overwrites an existing file,
with the name of the file in the archive as the parameter.
The file_cb is called for every file, after extracting it.
"""
if not zipfile.is_zipfile(zipfilename):
raise InvalidZipFile("File %s is not a zipfile." % zipfilename)
zip = zipfile.ZipFile(zipfilename, 'r')
if zip.testzip():
raise InvalidZipFile("File %s is invalid." % zipfilename)
# zip.extractall would not protect against ..-paths,
# it would do so from python 2.7.4 on.
for finfo in zip.infolist():
dest = os.path.join(to_path, finfo.filename)
# This check is from http://stackoverflow.com/a/10077309/946226
if not os.path.realpath(os.path.abspath(dest)).startswith(to_path):
raise InvalidZipFile("File %s contains illegal path %s." % (zipfilename, finfo.filename))
if override_cb is not None and os.path.exists(dest):
override_cb(finfo.filename)
zip.extract(finfo, to_path)
if file_cb is not None and os.path.isfile(os.path.join(to_path, finfo.filename)):
file_cb(finfo.filename)<|fim▁end|> | def create_file(path, content, override=True, binary=False):
""" """
dirname = os.path.dirname(path) |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/* global window */
import React from 'react'
import NProgress from 'nprogress'
import PropTypes from 'prop-types'
import { connect } from 'dva'
import { Helmet } from 'react-helmet'
import { withRouter } from 'dva/router'
let lastHref
const App = ({ children, dispatch, app, loading, location }) => {
const href = window.location.href
if (lastHref !== href) {
NProgress.start()
if (!loading.global) {
NProgress.done()
lastHref = href
}
}
return (
<div>
<Helmet>
<title>DVA DEMO</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</Helmet>
<div>
{ children }
</div>
</div>
)
}
App.propTypes = {
children: PropTypes.element.isRequired,<|fim▁hole|> app: PropTypes.object,
loading: PropTypes.object,
}
export default withRouter(connect(({ app, loading }) => ({ app, loading }))(App))<|fim▁end|> | location: PropTypes.object,
dispatch: PropTypes.func, |
<|file_name|>issue-11612.rs<|end_file_name|><|fim▁begin|>// check-pass
#![allow(dead_code)]
// #11612
// We weren't updating the auto adjustments with all the resolved
// type information after type check.
// pretty-expanded FIXME #23616
trait A { fn dummy(&self) { } }
struct B<'a, T:'a> {
f: &'a T
}<|fim▁hole|>
impl<'a, T> A for B<'a, T> {}
fn foo(_: &dyn A) {}
fn bar<G>(b: &B<G>) {
foo(b); // Coercion should work
foo(b as &dyn A); // Explicit cast should work as well
}
fn main() {}<|fim▁end|> | |
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme<|fim▁hole|>}<|fim▁end|> | #[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once(); |
<|file_name|>jquery.waituntilexists.js<|end_file_name|><|fim▁begin|>;(function ($, window) {<|fim▁hole|>
var intervals = {};
var removeListener = function(selector) {
if (intervals[selector]) {
window.clearInterval(intervals[selector]);
intervals[selector] = null;
}
};
var found = 'waitUntilExists.found';
/**
* @function
* @property {object} jQuery plugin which runs handler function once specified
* element is inserted into the DOM
* @param {function|string} handler
* A function to execute at the time when the element is inserted or
* string "remove" to remove the listener from the given selector
* @param {bool} shouldRunHandlerOnce
* Optional: if true, handler is unbound after its first invocation
* @example jQuery(selector).waitUntilExists(function);
*/
$.fn.waitUntilExists = function(handler, shouldRunHandlerOnce, isChild) {
var selector = this.selector;
var $this = $(selector);
var $elements = $this.not(function() { return $(this).data(found); });
if (handler === 'remove') {
// Hijack and remove interval immediately if the code requests
removeListener(selector);
}
else {
// Run the handler on all found elements and mark as found
$elements.each(handler).data(found, true);
if (shouldRunHandlerOnce && $this.length) {
// Element was found, implying the handler already ran for all
// matched elements
removeListener(selector);
}
else if (!isChild) {
// If this is a recurring search or if the target has not yet been
// found, create an interval to continue searching for the target
intervals[selector] = window.setInterval(function () {
$this.waitUntilExists(handler, shouldRunHandlerOnce, true);
}, 500);
}
}
return $this;
};
}(jQuery, window));<|fim▁end|> | |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2012, Martin Angers & Contributors
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation and/or
// other materials provided with the distribution.
// * Neither the name of the author nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
Package goquery implements features similar to jQuery, including the chainable
syntax, to manipulate and query an HTML document (the modification functions of jQuery are not included).
It depends on Go's experimental html package, which must be installed so that it
can be imported as "code.google.com/p/go.net/html". See this tutorial on how to install it
accordingly: http://code.google.com/p/go-wiki/wiki/InstallingExp
It uses Cascadia as CSS selector (similar to Sizzle for jQuery). This dependency
is automatically installed when using "go get ..." to install GoQuery.
To provide a chainable interface, error management is strict, and goquery panics
if an invalid Cascadia selector is used (this is consistent with the behavior of
jQuery/Sizzle/document.querySelectorAll, where an error is thrown). This is
necessary since multiple return values cannot be used to allow a chainable
interface.
It is hosted on GitHub, along with additional documentation in the README.md
file: https://github.com/puerkitobio/goquery
The various methods are split into files based on the category of behavior:
* array.go : array-like positional manipulation of the selection.
- Eq()
- First()
- Get()
- Index...()
- Last()
- Slice()
* expand.go : methods that expand or augment the selection's set.
- Add...()
- AndSelf()
- Union(), which is an alias for AddSelection()
* filter.go : filtering methods, that reduce the selection's set.
- End()
- Filter...()
- Has...()
- Intersection(), which is an alias of FilterSelection()
- Not...()
* iteration.go : methods to loop over the selection's nodes.
- Each()
- Map()<|fim▁hole|> - Length()
- Size(), which is an alias for Length()
- Text()
* query.go : methods that query, or reflect, a node's identity.
- Contains()
- HasClass()
- Is...()
* traversal.go : methods to traverse the HTML document tree.
- Children...()
- Contents()
- Find...()
- Next...()
- Parent[s]...()
- Prev...()
- Siblings...()
* type.go : definition of the types exposed by GoQuery.
- Document
- Selection
*/
package goquery<|fim▁end|> |
* property.go : methods that inspect and get the node's properties values.
- Attr()
- Html() |
<|file_name|>build_test.go<|end_file_name|><|fim▁begin|>package gexec_test
import (
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/onsi/gomega/gexec"
)
var packagePath = "./_fixture/firefly"
var _ = Describe(".Build", func() {
Context("when there have been previous calls to Build", func() {
BeforeEach(func() {
_, err := gexec.Build(packagePath)
Expect(err).ShouldNot(HaveOccurred())
})
It("compiles the specified package", func() {
compiledPath, err := gexec.Build(packagePath)
Expect(err).ShouldNot(HaveOccurred())
Expect(compiledPath).Should(BeAnExistingFile())
})
Context("and CleanupBuildArtifacts has been called", func() {
BeforeEach(func() {
gexec.CleanupBuildArtifacts()
})
It("compiles the specified package", func() {
var err error
fireflyPath, err = gexec.Build(packagePath)
Expect(err).ShouldNot(HaveOccurred())
Expect(fireflyPath).Should(BeAnExistingFile())
})
})
})
})
var _ = Describe(".BuildWithEnvironment", func() {
var err error
env := []string{
"GOOS=linux",
"GOARCH=amd64",
}
It("compiles the specified package with the specified env vars", func() {
compiledPath, err := gexec.BuildWithEnvironment(packagePath, env)
Expect(err).ShouldNot(HaveOccurred())
Expect(compiledPath).Should(BeAnExistingFile())
})
It("returns the environment to a good state", func() {
_, err = gexec.BuildWithEnvironment(packagePath, env)
Expect(err).ShouldNot(HaveOccurred())
Expect(os.Environ()).ShouldNot(ContainElement("GOOS=linux"))
})
})
var _ = Describe(".BuildIn", func() {
var (
gopath string<|fim▁hole|> BeforeEach(func() {
gopath = os.Getenv("GOPATH")
Expect(gopath).NotTo(BeEmpty())
Expect(os.Setenv("GOPATH", "/tmp")).To(Succeed())
Expect(os.Environ()).To(ContainElement("GOPATH=/tmp"))
})
AfterEach(func() {
Expect(os.Setenv("GOPATH", gopath)).To(Succeed())
})
It("appends the gopath env var", func() {
_, err := gexec.BuildIn(gopath, "github.com/onsi/gomega/gexec/_fixture/firefly/")
Expect(err).NotTo(HaveOccurred())
})
It("resets GOPATH to its original value", func() {
_, err := gexec.BuildIn(gopath, "github.com/onsi/gomega/gexec/_fixture/firefly/")
Expect(err).NotTo(HaveOccurred())
Expect(os.Getenv("GOPATH")).To(Equal("/tmp"))
})
})<|fim▁end|> | )
|
<|file_name|>test_path_groups.py<|end_file_name|><|fim▁begin|>import nose
import angr
import logging
l = logging.getLogger("angr_tests.path_groups")
import os
location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
addresses_fauxware = {
'armel': 0x8524,
'armhf': 0x104c9, # addr+1 to force thumb
#'i386': 0x8048524, # commenting out because of the freaking stack check
'mips': 0x400710,
'mipsel': 0x4006d0,
'ppc': 0x1000054c,
'ppc64': 0x10000698,
'x86_64': 0x400664
}
<|fim▁hole|> p = angr.Project(location + '/' + arch + '/fauxware', load_options={'auto_load_libs': False})
pg = p.factory.path_group(threads=threads)
nose.tools.assert_equal(len(pg.active), 1)
nose.tools.assert_equal(pg.active[0].length, 0)
# step until the backdoor split occurs
pg2 = pg.step(until=lambda lpg: len(lpg.active) > 1, step_func=lambda lpg: lpg.prune())
nose.tools.assert_equal(len(pg2.active), 2)
nose.tools.assert_true(any("SOSNEAKY" in s for s in pg2.mp_active.state.posix.dumps(0).mp_items))
nose.tools.assert_false(all("SOSNEAKY" in s for s in pg2.mp_active.state.posix.dumps(0).mp_items))
# separate out the backdoor and normal paths
pg3 = pg2.stash(lambda path: "SOSNEAKY" in path.state.posix.dumps(0), to_stash="backdoor").stash_all(to_stash="auth")
nose.tools.assert_equal(len(pg3.active), 0)
nose.tools.assert_equal(len(pg3.backdoor), 1)
nose.tools.assert_equal(len(pg3.auth), 1)
# step the backdoor path until it returns to main
pg4 = pg3.step(until=lambda lpg: lpg.backdoor[0].jumpkinds[-1] == 'Ijk_Ret', stash='backdoor')
main_addr = pg4.backdoor[0].addr
nose.tools.assert_equal(len(pg4.active), 0)
nose.tools.assert_equal(len(pg4.backdoor), 1)
nose.tools.assert_equal(len(pg4.auth), 1)
# now step the real path until the real authentication paths return to the same place
pg5 = pg4.explore(find=main_addr, num_find=2, stash='auth').unstash_all(from_stash='found', to_stash='auth')
nose.tools.assert_equal(len(pg5.active), 0)
nose.tools.assert_equal(len(pg5.backdoor), 1)
nose.tools.assert_equal(len(pg5.auth), 2)
# now unstash everything
pg6 = pg5.unstash_all(from_stash='backdoor').unstash_all(from_stash='auth')
nose.tools.assert_equal(len(pg6.active), 3)
nose.tools.assert_equal(len(pg6.backdoor), 0)
nose.tools.assert_equal(len(pg6.auth), 0)
nose.tools.assert_equal(len(set(pg6.mp_active.addr.mp_items)), 1)
# now merge them!
pg7 = pg6.merge()
nose.tools.assert_equal(len(pg7.active), 1)
nose.tools.assert_equal(len(pg7.backdoor), 0)
nose.tools.assert_equal(len(pg7.auth), 0)
#import ipdb; ipdb.set_trace()
#print pg2.mp_active.addr.mp_map(hex).mp_items
# test selecting paths to step
pg_a = p.factory.path_group(immutable=True)
pg_b = pg_a.step(until=lambda lpg: len(lpg.active) > 1, step_func=lambda lpg: lpg.prune().drop(stash='pruned'))
pg_c = pg_b.step(selector_func=lambda p: p is pg_b.active[0], step_func=lambda lpg: lpg.prune().drop(stash='pruned'))
nose.tools.assert_is(pg_b.active[1], pg_c.active[0])
nose.tools.assert_is_not(pg_b.active[0], pg_c.active[1])
total_active = len(pg_c.active)
# test special stashes
nose.tools.assert_equals(len(pg_c.stashed), 0)
pg_d = pg_c.stash(filter_func=lambda p: p is pg_c.active[1], to_stash='asdf')
nose.tools.assert_equals(len(pg_d.stashed), 0)
nose.tools.assert_equals(len(pg_d.asdf), 1)
nose.tools.assert_equals(len(pg_d.active), total_active-1)
pg_e = pg_d.stash(from_stash=pg_d.ALL, to_stash='fdsa')
nose.tools.assert_equals(len(pg_e.asdf), 0)
nose.tools.assert_equals(len(pg_e.active), 0)
nose.tools.assert_equals(len(pg_e.fdsa), total_active)
pg_f = pg_e.stash(from_stash=pg_e.ALL, to_stash=pg_e.DROP)
nose.tools.assert_true(all(len(s) == 0 for s in pg_f.stashes.values()))
def test_fauxware():
for arch in addresses_fauxware:
yield run_fauxware, arch, None
yield run_fauxware, arch, 2
if __name__ == "__main__":
for func, march, threads in test_fauxware():
print 'testing ' + march
func(march, threads)<|fim▁end|> | def run_fauxware(arch, threads): |
<|file_name|>deleteTranslations.js<|end_file_name|><|fim▁begin|>'use strict';
//Translations service used for translations REST endpoint<|fim▁hole|> }, {
update: {
method: 'PUT'
}
});
}
]);<|fim▁end|> | angular.module('mean.translations').factory('deleteTranslations', ['$resource',
function($resource) {
return $resource('deleteTranslations/:translationId', {
translationId: '@_id' |
<|file_name|>execute.spec.js<|end_file_name|><|fim▁begin|>var chai = require('chai');
chai.use(require('chai-fs'));
var expect = chai.expect
var execute = require('../');
describe('execute', function() {
it('should exist', function() {
expect(execute).to.exist;
});
it('should return a promise', function() {
expect(execute().then).to.exist;
});
it('should return an result object handed through it', function(done) {
var result = {
key: 'val'
}
execute(result).then(function(res) {
if (res) {
expect(res).to.equal(result);
done();
} else {
done(new Error('Expected result to be resolved'));
}
}, function() {
done(new Error('Expected function to resolve, not reject.'));
});
});
describe('shell commands', function() {
it('should execute a string as a shell script', function(done) {
//Test by creating file and asserting that it exists
execute(null, {
shell: 'echo "new file content" >> ./test/file.txt'
}).then(function(){
expect("./test/file.txt").to.be.a.file("file.txt not found")
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
//Remove file and asserting that it does not exist
execute(null, {
shell: 'rm ./test/file.txt'
}).then(function(){
expect("./test/file.txt").not.to.be.a.file("file.txt not found")
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
});
});
describe('bash scripts', function() {
it('should execute a file as a bash script', function(done) {
//Test by creating file and asserting that it exists
execute(null, {
bashScript: './test/test-script'
}).then(function(){
expect("./test/file.txt").to.be.a.file("file.txt not found")
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
//Remove file and asserting that it does not exist
execute(null, {
shell: 'rm ./test/file.txt'
}).then(function(){
expect("./test/file.txt").not.to.be.a.file("file.txt not found")
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
});
});
it('should hand parameters to bash scripts', function(done) {
//Test by creating file and asserting that it exists
execute(null, {
bashScript: './test/test-script-params',
bashParams: ['./test/file.txt']
}).then(function(){
expect("./test/file.txt").to.be.a.file("file.txt not found")
done()<|fim▁hole|> done(new Error('expected function to resolve, not reject'));
})
//Remove file and asserting that it does not exist
execute(null, {
shell: 'rm ./test/file.txt'
}).then(function(){
expect("./test/file.txt").not.to.be.a.file("file.txt not found")
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
});
});
xdescribe('logging', function() {
//TODO: enforce this: these two log tests could be enforced with an abstracted log func and a spy....
it('should default logging to false', function() {
execute(null, {
shell: 'echo "i should not log"'
}).then(function(){
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
});
//TODO: enforce this
it('should allow toggling logging', function() {
execute(null, {
logOutput: true,
shell: 'echo "i should log"'
}).then(function(){
done()
}, function() {
done(new Error('expected function to resolve, not reject'));
})
});
});
});<|fim▁end|> | }, function() { |
<|file_name|>BreakfastDiningRounded.js<|end_file_name|><|fim▁begin|>import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";<|fim▁hole|>export default createSvgIcon( /*#__PURE__*/_jsx("path", {
d: "M18 3H6C3.79 3 2 4.79 2 7c0 1.48.81 2.75 2 3.45V19c0 1.1.9 2 2 2h12c1.1 0 2-.9 2-2v-8.55c1.19-.69 2-1.97 2-3.45 0-2.21-1.79-4-4-4zm-2.29 10.7-3 3c-.39.39-1.02.39-1.42 0l-3-3a.9959.9959 0 0 1 0-1.41l3-3c.39-.39 1.02-.39 1.41 0l3 3c.4.39.4 1.02.01 1.41z"
}), 'BreakfastDiningRounded');<|fim▁end|> | |
<|file_name|>bitcoin_cs.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="cs" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About CrackCoin</source>
<translation>O CrackCoinu</translation>
</message>
<message>
<location line="+39"/>
<source><b>CrackCoin</b> version</source>
<translation>Verze <b>CrackCoinu</b> </translation>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The CrackCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Tohle je experimentální program.
Šířen pod licencí MIT/X11, viz přiložený soubor COPYING nebo http://www.opensource.org/licenses/mit-license.php.
Tento produkt zahrnuje programy vyvinuté OpenSSL Projektem pro použití v OpenSSL Toolkitu (http://www.openssl.org/) a kryptografický program od Erika Younga ([email protected]) a program UPnP od Thomase Bernarda.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Kniha adres</translation>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Dvojklikem myši začneš upravovat označení adresy</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Vytvoř novou adresu</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Zkopíruj aktuálně vybranou adresu do systémové schránky</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Nová adresa</translation>
</message>
<message>
<location line="-46"/>
<source>These are your CrackCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Kopíruj adresu</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Smaž zvolenou adresu ze seznamu</translation>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Ověřit správu</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Smaž</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Kopíruj &označení</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Uprav</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>CSV formát (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Označení</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(bez označení)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Změna hesla</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Zadej platné heslo</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Zadej nové heslo</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Totéž heslo ještě jednou</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Zadej nové heslo k peněžence.<br/>Použij <b>alespoň 10 náhodných znaků</b> nebo <b>alespoň osm slov</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Zašifruj peněženku</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla odemknout.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Odemkni peněženku</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla dešifrovat.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dešifruj peněženku</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Změň heslo</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Zadej staré a nové heslo k peněžence.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Potvrď zašifrování peněženky</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Jsi si jistý, že chceš peněženku zašifrovat?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>DŮLEŽITÉ: Všechny předchozí zálohy peněženky by měly být nahrazeny nově vygenerovanou, zašifrovanou peněženkou. Z bezpečnostních důvodů budou předchozí zálohy nešifrované peněženky nepoužitelné, jakmile začneš používat novou zašifrovanou peněženku.</translation>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Upozornění: Caps Lock je zapnutý!</translation>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Peněženka je zašifrována</translation>
</message>
<message>
<location line="-58"/>
<source>CrackCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Zašifrování peněženky selhalo</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Zašifrování peněženky selhalo kvůli vnitřní chybě. Tvá peněženka tedy nebyla zašifrována.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Zadaná hesla nejsou shodná.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Odemčení peněženky selhalo</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Nezadal jsi správné heslo pro dešifrování peněženky.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dešifrování peněženky selhalo</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Heslo k peněžence bylo v pořádku změněno.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+280"/>
<source>Sign &message...</source>
<translation>Po&depiš zprávu...</translation>
</message>
<message>
<location line="+242"/>
<source>Synchronizing with network...</source>
<translation>Synchronizuji se se sítí...</translation>
</message>
<message>
<location line="-308"/>
<source>&Overview</source>
<translation>&Přehled</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Zobraz celkový přehled peněženky</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transakce</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Procházej historii transakcí</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>&Konec</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Ukonči aplikaci</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>O &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Zobraz informace o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Možnosti...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>Zaši&fruj peněženku...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Zazálohuj peněženku...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>Změň &heslo...</translation>
</message>
<message numerus="yes">
<location line="+250"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-247"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Send coins to a CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Modify configuration options for CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Zazálohuj peněženku na jiné místo</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Změň heslo k šifrování peněženky</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>&Ladicí okno</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Otevři ladicí a diagnostickou konzoli</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&Ověř zprávu...</translation>
</message>
<message>
<location line="-200"/>
<source>CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Peněženka</translation>
</message>
<message>
<location line="+178"/>
<source>&About CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Zobraz/Skryj</translation>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>&File</source>
<translation>&Soubor</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Nastavení</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>Ná&pověda</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Panel s listy</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>CrackCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to CrackCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-284"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+288"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Aktuální</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Stahuji...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Odeslané transakce</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Příchozí transakce</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Částka: %2
Typ: %3
Adresa: %4
</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid CrackCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Peněženka je <b>zašifrovaná</b> a momentálně <b>odemčená</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Peněženka je <b>zašifrovaná</b> a momentálně <b>zamčená</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation><numerusform>hodinu</numerusform><numerusform>%n hodiny</numerusform><numerusform>%n hodin</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>den</numerusform><numerusform>%n dny</numerusform><numerusform>%n dnů</numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. CrackCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Upozornění sítě</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Částka:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Částka</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Potvrzeno</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Kopírovat sdresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopírovat popis</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Kopíruj částku</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation>Kopíruj ID transakce</translation>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(bez popisu)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Uprav adresu</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Označení</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adresa</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Nová přijímací adresa</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nová odesílací adresa</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Uprav přijímací adresu</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Uprav odesílací adresu</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Zadaná adresa "%1" už v adresáři je.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid CrackCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Nemohu odemknout peněženku.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Nepodařilo se mi vygenerovat nový klíč.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>CrackCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Možnosti</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Hlavní</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Platit &transakční poplatek</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start CrackCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start CrackCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Síť</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the CrackCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Namapovat port přes &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the CrackCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP adresa proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>Por&t:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port proxy (např. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>&Verze SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Verze SOCKS proxy (např. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>O&kno</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Po minimalizaci okna zobrazí pouze ikonu v panelu.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimalizovávat do ikony v panelu</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Zavřením se aplikace minimalizuje. Pokud je tato volba zaškrtnuta, tak se aplikace ukončí pouze zvolením Konec v menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>Za&vřením minimalizovat</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>Zobr&azení</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Jazyk uživatelského rozhraní:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting CrackCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>J&ednotka pro částky: </translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Zvol výchozí podjednotku, která se bude zobrazovat v programu a při posílání mincí.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show CrackCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Ukazo&vat adresy ve výpisu transakcí</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&Budiž</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Zrušit</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>výchozí</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting CrackCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Zadaná adresa proxy je neplatná.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulář</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the CrackCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Peněženka</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation>Aktuální disponibilní stav tvého účtu</translation>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation>Nedozráno:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Vytěžené mince, které ještě nejsou zralé</translation>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Celkem:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation>Celkový stav tvého účtu</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Poslední transakce</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>nesynchronizováno</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Název klienta</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Verze klienta</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informace</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Používaná verze OpenSSL</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Čas spuštění</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Síť</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Počet spojení</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Řetězec bloků</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Aktuální počet bloků</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Odhad celkového počtu bloků</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Čas posledního bloku</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Otevřít</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the CrackCoin-Qt help message to get a list with possible CrackCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konzole</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Datum kompilace</translation>
</message>
<message>
<location line="-104"/>
<source>CrackCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>CrackCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Soubor s ladicími záznamy</translation>
</message>
<message>
<location line="+7"/>
<source>Open the CrackCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Vyčistit konzoli</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the CrackCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>V historii se pohybuješ šipkami nahoru a dolů a pomocí <b>Ctrl-L</b> čistíš obrazovku.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Napsáním <b>help</b> si vypíšeš přehled dostupných příkazů.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Pošli mince</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Částka:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 CRACK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Pošli více příjemcům naráz</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Při&dej příjemce</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Všechno s&maž</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Stav účtu:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 CRACK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Potvrď odeslání</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>P&ošli</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a CrackCoin address (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopíruj částku</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Potvrď odeslání mincí</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Adresa příjemce je neplatná, překontroluj ji prosím.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Odesílaná částka musí být větší než 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Částka překračuje stav účtu.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Celková částka při připočítání poplatku %1 překročí stav účtu.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Zaznamenána duplikovaná adresa; každá adresa může být v odesílané platbě pouze jednou.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(bez popisu)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Čás&tka:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>&Komu:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Zadej označení této adresy; obojí se ti pak uloží do adresáře</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>O&značení:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Vlož adresu ze schránky</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a CrackCoin address (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Podpisy - podepsat/ověřit zprávu</translation>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>&Podepiš zprávu</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Podepsáním zprávy svými adresami můžeš prokázat, že je skutečně vlastníš. Buď opatrný a nepodepisuj nic vágního; například při phishingových útocích můžeš být lákán, abys něco takového podepsal. Podepisuj pouze zcela úplná a detailní prohlášení, se kterými souhlasíš.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Vlož adresu ze schránky</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Sem vepiš zprávu, kterou chceš podepsat</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Zkopíruj aktuálně vybraný podpis do systémové schránky</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation>Vymaž všechna pole formuláře pro podepsání zrávy</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Všechno &smaž</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>&Ověř zprávu</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>K ověření podpisu zprávy zadej podepisující adresu, zprávu (ověř si, že správně kopíruješ zalomení řádků, mezery, tabulátory apod.) a podpis. Dávej pozor na to, abys nezkopíroval do podpisu víc, než co je v samotné podepsané zprávě, abys nebyl napálen man-in-the-middle útokem.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified CrackCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation>Vymaž všechna pole formuláře pro ověření zrávy</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a CrackCoin address (e.g. CphU6HbZCuDsx1nGWncuwxYdHcF74zLv8U)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Kliknutím na "Podepiš zprávu" vygeneruješ podpis</translation>
</message>
<message>
<location line="+3"/>
<source>Enter CrackCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Zadaná adresa je neplatná.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Zkontroluj ji prosím a zkus to pak znovu.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Zadaná adresa nepasuje ke klíči.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Odemčení peněženky bylo zrušeno.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Soukromý klíč pro zadanou adresu není dostupný.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Podepisování zprávy selhalo.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Zpráv podepsána.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Podpis nejde dekódovat.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Zkontroluj ho prosím a zkus to pak znovu.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Podpis se neshoduje s hašem zprávy.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Ověřování zprávy selhalo.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Zpráva ověřena.</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Otřevřeno dokud %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/nepotvrzeno</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 potvrzení</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Stav</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, rozesláno přes 1 uzel</numerusform><numerusform>, rozesláno přes %n uzly</numerusform><numerusform>, rozesláno přes %n uzlů</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Zdroj</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Vygenerováno</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Od</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Pro</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>vlastní adresa</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>označení</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Příjem</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>dozraje po jednom bloku</numerusform><numerusform>dozraje po %n blocích</numerusform><numerusform>dozraje po %n blocích</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>neakceptováno</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Výdaj</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Transakční poplatek</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Čistá částka</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Zpráva</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Komentář</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID transakce</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Ladicí informace</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transakce</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation>Vstupy</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Částka</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>true</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>false</translation>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ještě nebylo rozesláno</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>neznámo</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detaily transakce</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Toto okno zobrazuje detailní popis transakce</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Částka</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Otřevřeno dokud %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Potvrzeno (%1 potvrzení)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Otevřeno pro 1 další blok</numerusform><numerusform>Otevřeno pro %n další bloky</numerusform><numerusform>Otevřeno pro %n dalších bloků</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Tento blok nedostal žádný jiný uzel a pravděpodobně nebude akceptován!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Vygenerováno, ale neakceptováno</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Přijato do</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Přijato od</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Posláno na</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Platba sama sobě</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Vytěženo</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Stav transakce. Najetím myši na toto políčko si zobrazíš počet potvrzení.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum a čas přijetí transakce.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Druh transakce.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Cílová adresa transakce.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Částka odečtená z nebo přičtená k účtu.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Vše</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Dnes</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Tento týden</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Tento měsíc</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Minulý měsíc</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Letos</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Rozsah...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Přijato</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Posláno</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Sám sobě</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Vytěženo</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Ostatní</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Zadej adresu nebo označení pro její vyhledání</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Minimální částka</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopíruj adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopíruj její označení</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopíruj částku</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Kopíruj ID transakce</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Uprav označení</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Zobraz detaily transakce</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>CSV formát (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Potvrzeno</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Označení</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Částka</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Rozsah:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>až</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>CrackCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Užití:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or crackcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Výpis příkazů</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Získat nápovědu pro příkaz</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Možnosti:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: crackcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: crackcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation>Udej název souboru s peněženkou (v rámci datového adresáře)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Adresář pro data</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Nastavit velikost databázové vyrovnávací paměti v megabajtech (výchozí: 25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Povolit nejvýše <n> připojení k uzlům (výchozí: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Připojit se k uzlu, získat adresy jeho protějšků a odpojit se</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation>Specifikuj svou veřejnou adresu</translation>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Práh pro odpojování zlobivých uzlů (výchozí: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Doba ve vteřinách, po kterou se nebudou moci zlobivé uzly znovu připojit (výchozí: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Při nastavování naslouchacího RPC portu %i pro IPv4 nastala chyba: %s</translation>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Akceptovat příkazy z příkazové řádky a přes JSON-RPC</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Běžet na pozadí jako démon a akceptovat příkazy</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Použít testovací síť (testnet)</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Přijímat spojení zvenčí (výchozí: 1, pokud není zadáno -proxy nebo -connect)</translation>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Při nastavování naslouchacího RPC portu %u pro IPv6 nastala chyba, vracím se k IPv4: %s</translation>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Upozornění: -paytxfee je nastaveno velmi vysoko! Toto je transakční poplatek, který zaplatíš za každou poslanou transakci.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong CrackCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Upozornění: nastala chyba při čtení souboru wallet.dat! Všechny klíče se přečetly správně, ale data o transakcích nebo záznamy v adresáři mohou chybět či být nesprávné.</translation>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Upozornění: soubor wallet.dat je poškozený, data jsou však zachráněna! Původní soubor wallet.dat je uložený jako wallet.{timestamp}.bak v %s. Pokud je stav tvého účtu nebo transakce nesprávné, zřejmě bys měl obnovit zálohu.</translation>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Pokusit se zachránit soukromé klíče z poškozeného souboru wallet.dat</translation>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation>Možnosti vytvoření bloku:</translation>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Připojit se pouze k zadanému uzlu (příp. zadaným uzlům)</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Zjistit vlastní IP adresu (výchozí: 1, pokud naslouchá a není zadáno -externalip)</translation>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Nepodařilo se naslouchat na žádném portu. Použij -listen=0, pokud to byl tvůj záměr.</translation>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Maximální velikost přijímacího bufferu pro každé spojení, <n>*1000 bajtů (výchozí: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Maximální velikost odesílacího bufferu pro každé spojení, <n>*1000 bajtů (výchozí: 1000)</translation>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Připojit se pouze k uzlům v <net> síti (IPv4, IPv6 nebo Tor)</translation>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>Možnosti SSL: (viz instrukce nastavení SSL v Bitcoin Wiki)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Posílat stopovací/ladicí informace do konzole místo do souboru debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Nastavit minimální velikost bloku v bajtech (výchozí: 0)</translation>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Při spuštění klienta zmenšit soubor debug.log (výchozí: 1, pokud není zadáno -debug)</translation>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Zadej časový limit spojení v milisekundách (výchozí: 5000)</translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Použít UPnP k namapování naslouchacího portu (výchozí: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Použít UPnP k namapování naslouchacího portu (výchozí: 1, pokud naslouchá)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Uživatelské jméno pro JSON-RPC spojení</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Upozornění: tahle verze je zastaralá, měl bys ji aktualizovat!</translation>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>Soubor wallet.dat je poškozen, jeho záchrana se nezdařila</translation>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Heslo pro JSON-RPC spojení</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=crackcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "CrackCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Povolit JSON-RPC spojení ze specifikované IP adresy</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Posílat příkazy uzlu běžícím na <ip> (výchozí: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Spustit příkaz, když se změní nejlepší blok (%s se v příkazu nahradí hashem bloku)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Spustit příkaz, když se objeví transakce týkající se peněženky (%s se v příkazu nahradí za TxID)</translation>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Převést peněženku na nejnovější formát</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Nastavit zásobník klíčů na velikost <n> (výchozí: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Přeskenovat řetězec bloků na chybějící transakce tvé pěněženky</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Použít OpenSSL (https) pro JSON-RPC spojení</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Soubor se serverovým certifikátem (výchozí: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Soubor se serverovým soukromým klíčem (výchozí: server.pem)</translation>
</message>
<message><|fim▁hole|> <location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Tato nápověda</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. CrackCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Nedaří se mi připojit na %s na tomhle počítači (operace bind vrátila chybu %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Povolit DNS dotazy pro -addnode (přidání uzlu), -seednode a -connect (připojení)</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Načítám adresy...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Chyba při načítání wallet.dat: peněženka je poškozená</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of CrackCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart CrackCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Chyba při načítání wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Neplatná -proxy adresa: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>V -onlynet byla uvedena neznámá síť: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>V -socks byla požadována neznámá verze proxy: %i</translation>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Nemohu přeložit -bind adresu: '%s'</translation>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Nemohu přeložit -externalip adresu: '%s'</translation>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Neplatná částka pro -paytxfee=<částka>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Neplatná částka</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Nedostatek prostředků</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Načítám index bloků...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Přidat uzel, ke kterému se připojit a snažit se spojení udržet</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. CrackCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Načítám peněženku...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Nemohu převést peněženku do staršího formátu</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Nemohu napsat výchozí adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Přeskenovávám...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Načítání dokončeno</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>K použití volby %s</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Chyba</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Musíš nastavit rpcpassword=<heslo> v konfiguračním souboru:
%s
Pokud konfigurační soubor ještě neexistuje, vytvoř ho tak, aby ho mohl číst pouze vlastník.</translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>0021_auto_20151008_1526.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def from_election_to_elections(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Election = apps.get_model("elections", "Election")
Candidate = apps.get_model("elections", "Candidate")
for candidate in Candidate.objects.all():
candidate.elections.add(candidate.election)
class Migration(migrations.Migration):<|fim▁hole|> ('elections', '0020_auto_20150821_2101'),
]
operations = [
migrations.AddField(
model_name='candidate',
name='elections',
field=models.ManyToManyField(related_name='candidates', null=True, to='elections.Election'),
),
migrations.RunPython(from_election_to_elections),
]<|fim▁end|> |
dependencies = [ |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// This file is part of Rubik.
// Copyright Peter Beard, licensed under the GPLv3. See LICENSE for details.
//
//! Algorithms for solving Rubik's cubes
use super::cube::{Cube, Move};
/// Trait for things that can solve Rubik's cubes
pub trait Solver {
/// Calculate a sequence of moves that puts the cube in the solved state
fn find_solution(&mut self, cube: &Cube) -> Vec<Move>;
}
/// Solver that doesn't do anything
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::{Solver, NullSolver};
///
/// let mut c = Cube::new();
/// let mut ns = NullSolver::new();
///
/// assert_eq!(c.solve(&mut ns), vec![]);
/// ```
pub struct NullSolver;
impl NullSolver {
pub fn new() -> NullSolver {
NullSolver
}<|fim▁hole|>impl Solver for NullSolver {
fn find_solution(&mut self, _: &Cube) -> Vec<Move> {
vec![]
}
}
/// Solver that uses a simple iterative deepening algorithm
///
/// This algorithm is very slow and probably won't halt in a reasonable time for
/// most cubes
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::IDSolver;
///
/// let mut c = Cube::new();
/// let mut ids = IDSolver::new();
///
/// c.apply_moves("F'U'D'");
/// println!("{:?}", c.solve(&mut ids));
///
/// assert!(c.is_solved());
/// ```
pub struct IDSolver {
max_depth: u8,
}
impl IDSolver {
/// Create a new solver with the default maximum depth of 26
/// (all cubes are solveable in at most 26 moves)
pub fn new() -> IDSolver {
IDSolver {
max_depth: 26u8,
}
}
/// Create a solver with the given maximum depth (max number of moves)
pub fn with_max_depth(d: u8) -> IDSolver {
IDSolver {
max_depth: d,
}
}
}
impl Solver for IDSolver {
fn find_solution(&mut self, cube: &Cube) -> Vec<Move> {
let mut current_solution: Option<Vec<Move>> = None;
let mut current_depth = 1;
// A solved cube requires zero moves to solve
if !cube.is_solved() {
// Look until we find a solution or run out of moves
while current_depth <= self.max_depth && current_solution.is_none() {
current_solution = dbsearch(cube, current_depth);
current_depth += 1;
}
}
// Return no moves if there's no solution within the max depth
if let Some(s) = current_solution {
s
} else {
vec![]
}
}
}
/// Depth-bounded search for a solution
fn dbsearch(start: &Cube, maxdepth: u8) -> Option<Vec<Move>> {
// Zero means we're at the max depth
if maxdepth == 0 {
return None;
}
let possible_moves = [
Move::F,
Move::R,
Move::U,
Move::B,
Move::L,
Move::D,
Move::FPrime,
Move::RPrime,
Move::UPrime,
Move::BPrime,
Move::LPrime,
Move::DPrime,
];
let mut moves = Vec::new();
// Try every possible move and see where we get
for &m in &possible_moves {
let mut s = start.clone();
s.apply_move(m);
moves.push(m);
if s.is_solved() {
break;
}
if let Some(ms) = dbsearch(&s, maxdepth - 1) {
moves.append(&mut ms.clone());
break;
} else {
moves.pop();
}
}
if moves.len() > 0 {
Some(moves)
} else {
None
}
}<|fim▁end|> | }
|
<|file_name|>progressbar.py<|end_file_name|><|fim▁begin|>""" progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
for i in range(n_ticks):
pbar.update(i)
sleep(tick)
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def eta_bar(msg, max_value):
"""Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),<|fim▁hole|><|fim▁end|> | progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value) |
<|file_name|>CtrlActionWorkspaceSaveAsOracle.java<|end_file_name|><|fim▁begin|>package com.supermap.desktop.framemenus;
import com.supermap.desktop.Application;
import com.supermap.desktop.Interface.IBaseItem;
import com.supermap.desktop.Interface.IForm;
import com.supermap.desktop.implement.CtrlAction;
import com.supermap.desktop.ui.controls.JDialogWorkspaceSaveAs;
import javax.swing.*;
public class CtrlActionWorkspaceSaveAsOracle extends CtrlAction {
public CtrlActionWorkspaceSaveAsOracle(IBaseItem caller, IForm formClass) {
super(caller, formClass);
}
public void run(){
JFrame parent = (JFrame) Application.getActiveApplication().getMainFrame();
JDialogWorkspaceSaveAs dialog = new JDialogWorkspaceSaveAs(parent, true,JDialogWorkspaceSaveAs.saveAsOracle);
dialog.showDialog();
}
@Override
public boolean enable() {
return true;<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>mocks.rs<|end_file_name|><|fim▁begin|>use crate::{
cart::Cart,
rom::{CHR_BANK_SIZE, PRG_BANK_SIZE},
};
pub struct CartMock {
pub prg: [u8; PRG_BANK_SIZE],
pub chr: [u8; CHR_BANK_SIZE],
}
impl Default for CartMock {
fn default() -> Self {
CartMock {
prg: [0; PRG_BANK_SIZE],<|fim▁hole|> }
}
impl Cart for CartMock {
fn read_prg(&self, addr: u16) -> u8 {
self.prg[addr as usize]
}
fn write_prg(&mut self, addr: u16, value: u8) {
self.prg[addr as usize] = value
}
fn read_chr(&self, addr: u16) -> u8 {
self.chr[addr as usize]
}
fn write_chr(&mut self, addr: u16, value: u8) {
self.chr[addr as usize] = value
}
}<|fim▁end|> | chr: [0; CHR_BANK_SIZE],
} |
<|file_name|>unop-move-semantics.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that move restrictions are enforced on overloaded unary operations
use std::ops::Not;
fn move_then_borrow<T: Not<Output=T> + Clone>(x: T) {
!x;
x.clone(); //~ ERROR: use of moved value
}
fn move_borrowed<T: Not<Output=T>>(x: T, mut y: T) {
let m = &x;
let n = &mut y;
!x; //~ ERROR: cannot move out of `x` because it is borrowed
!y; //~ ERROR: cannot move out of `y` because it is borrowed
use_mut(n); use_imm(m);
}
fn illegal_dereference<T: Not<Output=T>>(mut x: T, y: T) {
let m = &mut x;
let n = &y;
!*m; //~ ERROR: cannot move out of borrowed content<|fim▁hole|>}
fn main() {}
fn use_mut<T>(_: &mut T) { }
fn use_imm<T>(_: &T) { }<|fim▁end|> |
!*n; //~ ERROR: cannot move out of borrowed content
use_imm(n); use_mut(m); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from "react";
import styled from "styled-components";
import Img from "gatsby-image";
import { graphql } from "gatsby";
import Center from "../components/center";
import ResponsiveContainer from "../components/responsive-container";
import Inset from "../components/inset";
import Text from "../components/text";
import colors from "../constants/colors";
import Layout from "../layouts";
import VersandhausWalzLogo from "../assets/compressed/versandhaus-walz.png";
import OttoLogo from "../assets/compressed/otto.png";
import LudwigMediaLogo from "../assets/compressed/ludwigmedia.png";
import BadischerWeinLogo from "../assets/compressed/badischer-wein.png";
import DrawkitContentManColour from "../assets/drawkit-content-man-colour.svg";
import Stack from "../components/stack";
const customers = [
{
src: OttoLogo,
alt: "Otto (GmbH & CoKG)"
},
{
src: VersandhausWalzLogo,
alt: "Versandhaus Walz"
},
{
src: LudwigMediaLogo,
alt: "LUDWIG:media gmbh"
},
{
src: BadischerWeinLogo,
alt: "Badischer Wein eKfr"
}
];
const HeroImage = styled.img`
width: 80%;
height: auto;
@media (min-width: 48em) {
width: auto;
height: 80vh;
max-height: 460px;
margin-left: -32px;
}
`;
const HeroLayout = styled(Stack)`
margin: 64px 0 32px;
@media (min-width: 48em) {
margin-top: 0;
flex-direction: row;
}
`;
const HeroArea = styled.div`
position: relative;
background-color: ${colors.black};
display: flex;
align-items: center;
@media (min-width: 48em) {
height: 80vh;
}
`;
const HeroTextArea = styled.div`
position: relative;
`;
const HeroText = styled.h1`
line-height: 1.1;<|fim▁hole|> color: ${colors.white};
text-align: center;
@media (min-width: 48em) {
font-size: 3rem;
text-align: left;
}
`;
const HeroTextHighlicht = styled.span`
color: ${colors.orange};
`;
const HeroTextSubheadline = styled.p`
line-height: 1.1;
font-size: 1.6rem;
max-width: 22ch;
color: #ddb992;
margin: 0;
text-align: center;
@media (min-width: 48em) {
text-align: left;
font-size: 2rem;
}
`;
const CustomerContainer = ResponsiveContainer.extend`
display: flex;
align-items: center;
flex-direction: column;
@media (min-width: 700px) {
flex-direction: row;
}
`;
const CustomerList = Center.extend`
padding: 32px 0;
background-color: hsla(31, 17%, 93%, 1);
`;
const CustomerLogo = styled.div`
flex: 1 1 auto;
max-height: 60px;
margin: 10px;
transition: all 0.5s ease-in-out;
> img {
max-width: 100%;
max-height: 100%;
}
`;
const H2 = styled.h2`
color: ${colors.textPrimary};
margin: 0 0 24px;
`;
const H3 = styled.h3`
color: ${colors.textPrimary};
margin: 0 0 16px;
`;
const SkillGrid = styled.div`
display: grid;
grid-template-columns: 1fr;
grid-gap: 16px;
@media (min-width: 700px) {
grid-template-columns: 1fr 1fr;
}
`;
const ServiceSection = styled.div`
background-color: hsla(13, 10%, 97%, 1);
padding: 32px 0;
`;
const Skill = styled.div`
background-color: white;
border-radius: 4px;
display: flex;
flex-direction: column;
text-align: left;
color: ${colors.text};
animation: fadein 2s;
`;
const SkillImage = styled.div`
max-height: 175px;
background-color: #ffe6df;
border-radius: 4px 4px 0 0;
overflow: hidden;
`;
const IndexPage = ({ data }) => (
<Layout>
<HeroArea>
<ResponsiveContainer>
<HeroLayout alignItems="center" scale="xl">
<HeroTextArea>
<HeroText>
Wir liefern digitale Lösungen,<br />
<HeroTextHighlicht>die unsere Kunden lieben.</HeroTextHighlicht>
</HeroText>
<HeroTextSubheadline>Und das seit 15 Jahren!</HeroTextSubheadline>
</HeroTextArea>
<HeroImage src={DrawkitContentManColour} alt="" />
</HeroLayout>
</ResponsiveContainer>
</HeroArea>
<CustomerList>
<CustomerContainer>
{customers.map(logo => (
<CustomerLogo key={logo.alt}>
<img src={logo.src} alt={logo.alt} />
</CustomerLogo>
))}
</CustomerContainer>
</CustomerList>
<ServiceSection>
<ResponsiveContainer>
<Center>
<H2>Unsere Leistungen</H2>
</Center>
<SkillGrid>
{data.allContentfulServices.edges.map(({ node }) => (
<Skill key={node.id}>
<SkillImage>
{node.image && <Img fluid={node.image.fluid} />}
</SkillImage>
<Inset scale="xl">
<H3>{node.title}</H3>
<Text.Detail>
<div
dangerouslySetInnerHTML={{
__html: node.description.childMarkdownRemark.html
}}
/>
</Text.Detail>
</Inset>
</Skill>
))}
</SkillGrid>
</ResponsiveContainer>
</ServiceSection>
</Layout>
);
export default IndexPage;
export const query = graphql`
query AllServices {
allContentfulServices(sort: { fields: [order], order: ASC }) {
edges {
node {
id
title
description {
childMarkdownRemark {
html
}
}
image {
fluid(maxWidth: 426) {
...GatsbyContentfulFluid
}
}
}
}
}
contentfulAsset(title: { eq: "Hero" }) {
fluid(maxHeight: 1000) {
...GatsbyContentfulFluid
}
}
}
`;<|fim▁end|> | font-size: 2.5rem;
max-width: 22ch;
margin-top: 0; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[derive(Debug)]
pub enum HammingError {
FirstLonger,
SecondLonger,
}
pub fn hamming_distance(a: &str, b: &str) -> Result<u64, HammingError> {
if a.len() > b.len() {
Err(HammingError::FirstLonger)
} else if a.len() < b.len() {<|fim▁hole|> Err(HammingError::SecondLonger)
} else {
Ok(a.chars()
.zip(b.chars())
.fold(0, |z, (x, y)| z + (if x == y { 0 } else { 1 })))
}
}<|fim▁end|> | |
<|file_name|>geth.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,<|fim▁hole|>// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::path::PathBuf;
use ethkey::Address;
use {SafeAccount, Error};
use super::{KeyDirectory, DiskDirectory, DirectoryType};
#[cfg(target_os = "macos")]
fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push("Library");
home.push("Ethereum");
home
}
#[cfg(windows)]
/// Default path for ethereum installation on Windows
pub fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push("AppData");
home.push("Roaming");
home.push("Ethereum");
home
}
#[cfg(not(any(target_os = "macos", windows)))]
/// Default path for ethereum installation on posix system which is not Mac OS
pub fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push(".ethereum");
home
}
fn geth_keystore(t: DirectoryType) -> PathBuf {
let mut dir = geth_dir_path();
match t {
DirectoryType::Testnet => {
dir.push("testnet");
dir.push("keystore");
},
DirectoryType::Main => {
dir.push("keystore");
}
}
dir
}
pub struct GethDirectory {
dir: DiskDirectory,
}
impl GethDirectory {
pub fn create(t: DirectoryType) -> Result<Self, Error> {
let result = GethDirectory {
dir: try!(DiskDirectory::create(geth_keystore(t))),
};
Ok(result)
}
pub fn open(t: DirectoryType) -> Self {
GethDirectory {
dir: DiskDirectory::at(geth_keystore(t)),
}
}
}
impl KeyDirectory for GethDirectory {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
self.dir.load()
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
self.dir.insert(account)
}
fn remove(&self, address: &Address) -> Result<(), Error> {
self.dir.remove(address)
}
}<|fim▁end|> | |
<|file_name|>metaDataProvider.js<|end_file_name|><|fim▁begin|>import external from '../../../externalModules.js';
import getNumberValues from './getNumberValues.js';
import parseImageId from '../parseImageId.js';
import dataSetCacheManager from '../dataSetCacheManager.js';
import getImagePixelModule from './getImagePixelModule.js';
import getOverlayPlaneModule from './getOverlayPlaneModule.js';
import getLUTs from './getLUTs.js';
import getModalityLUTOutputPixelRepresentation from './getModalityLUTOutputPixelRepresentation.js';
function metaDataProvider(type, imageId) {
const { dicomParser } = external;
const parsedImageId = parseImageId(imageId);
const dataSet = dataSetCacheManager.get(parsedImageId.url);
if (!dataSet) {
return;
}
if (type === 'generalSeriesModule') {
return {
modality: dataSet.string('x00080060'),
seriesInstanceUID: dataSet.string('x0020000e'),
seriesNumber: dataSet.intString('x00200011'),
studyInstanceUID: dataSet.string('x0020000d'),
seriesDate: dicomParser.parseDA(dataSet.string('x00080021')),
seriesTime: dicomParser.parseTM(dataSet.string('x00080031') || ''),
};
}
if (type === 'patientStudyModule') {
return {
patientAge: dataSet.intString('x00101010'),
patientSize: dataSet.floatString('x00101020'),
patientWeight: dataSet.floatString('x00101030'),
};
}
if (type === 'imagePlaneModule') {
const imageOrientationPatient = getNumberValues(dataSet, 'x00200037', 6);
const imagePositionPatient = getNumberValues(dataSet, 'x00200032', 3);
const pixelSpacing = getNumberValues(dataSet, 'x00280030', 2);
let columnPixelSpacing = null;
let rowPixelSpacing = null;
if (pixelSpacing) {
rowPixelSpacing = pixelSpacing[0];
columnPixelSpacing = pixelSpacing[1];
}
let rowCosines = null;
<|fim▁hole|> parseFloat(imageOrientationPatient[0]),
parseFloat(imageOrientationPatient[1]),
parseFloat(imageOrientationPatient[2]),
];
columnCosines = [
parseFloat(imageOrientationPatient[3]),
parseFloat(imageOrientationPatient[4]),
parseFloat(imageOrientationPatient[5]),
];
}
return {
frameOfReferenceUID: dataSet.string('x00200052'),
rows: dataSet.uint16('x00280010'),
columns: dataSet.uint16('x00280011'),
imageOrientationPatient,
rowCosines,
columnCosines,
imagePositionPatient,
sliceThickness: dataSet.floatString('x00180050'),
sliceLocation: dataSet.floatString('x00201041'),
pixelSpacing,
rowPixelSpacing,
columnPixelSpacing,
};
}
if (type === 'imagePixelModule') {
return getImagePixelModule(dataSet);
}
if (type === 'modalityLutModule') {
return {
rescaleIntercept: dataSet.floatString('x00281052'),
rescaleSlope: dataSet.floatString('x00281053'),
rescaleType: dataSet.string('x00281054'),
modalityLUTSequence: getLUTs(
dataSet.uint16('x00280103'),
dataSet.elements.x00283000
),
};
}
if (type === 'voiLutModule') {
const modalityLUTOutputPixelRepresentation = getModalityLUTOutputPixelRepresentation(
dataSet
);
return {
windowCenter: getNumberValues(dataSet, 'x00281050', 1),
windowWidth: getNumberValues(dataSet, 'x00281051', 1),
voiLUTSequence: getLUTs(
modalityLUTOutputPixelRepresentation,
dataSet.elements.x00283010
),
};
}
if (type === 'sopCommonModule') {
return {
sopClassUID: dataSet.string('x00080016'),
sopInstanceUID: dataSet.string('x00080018'),
};
}
if (type === 'petIsotopeModule') {
const radiopharmaceuticalInfo = dataSet.elements.x00540016;
if (radiopharmaceuticalInfo === undefined) {
return;
}
const firstRadiopharmaceuticalInfoDataSet =
radiopharmaceuticalInfo.items[0].dataSet;
return {
radiopharmaceuticalInfo: {
radiopharmaceuticalStartTime: dicomParser.parseTM(
firstRadiopharmaceuticalInfoDataSet.string('x00181072') || ''
),
radionuclideTotalDose: firstRadiopharmaceuticalInfoDataSet.floatString(
'x00181074'
),
radionuclideHalfLife: firstRadiopharmaceuticalInfoDataSet.floatString(
'x00181075'
),
},
};
}
if (type === 'overlayPlaneModule') {
return getOverlayPlaneModule(dataSet);
}
}
export default metaDataProvider;<|fim▁end|> | let columnCosines = null;
if (imageOrientationPatient) {
rowCosines = [ |
<|file_name|>OpTEST.hpp<|end_file_name|><|fim▁begin|>/*
*
* Copyright (c) 2015-2017 Stanislav Zhukov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef YAPE_OPTEST_HPP
#define YAPE_OPTEST_HPP
#include "OpAND.hpp"
class OpTEST : public OpAND
{
public:
void Execute() override;<|fim▁hole|>
#endif //YAPE_OPTEST_HPP<|fim▁end|> | int GetOpcode() const override;
}; |
<|file_name|>vic_memory.rs<|end_file_name|><|fim▁begin|>// This file is part of zinc64.
// Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved.
// Licensed under the GPLv3. See LICENSE file in the project root for full license text.
use zinc64_core::{Ram, Rom, Shared, SharedCell};
pub struct VicMemory {
base_address: SharedCell<u16>,
charset: Shared<Rom>,
ram: Shared<Ram>,
}
impl VicMemory {
pub fn new(base_address: SharedCell<u16>, charset: Shared<Rom>, ram: Shared<Ram>) -> VicMemory {
VicMemory {
base_address,
charset,
ram,
}
}
pub fn read(&self, address: u16) -> u8 {<|fim▁hole|> 0x01 => self.charset.borrow().read(full_address - 0x1000),
0x09 => self.charset.borrow().read(full_address - 0x9000),
_ => self.ram.borrow().read(full_address),
}
}
}<|fim▁end|> | let full_address = self.base_address.get() | address;
let zone = full_address >> 12;
match zone { |
<|file_name|>featurestore_service.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1.types import entity_type as gca_entity_type
from google.cloud.aiplatform_v1.types import feature as gca_feature
from google.cloud.aiplatform_v1.types import feature_selector as gca_feature_selector
from google.cloud.aiplatform_v1.types import featurestore as gca_featurestore
from google.cloud.aiplatform_v1.types import io
from google.cloud.aiplatform_v1.types import operation
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={
"CreateFeaturestoreRequest",
"GetFeaturestoreRequest",
"ListFeaturestoresRequest",
"ListFeaturestoresResponse",
"UpdateFeaturestoreRequest",
"DeleteFeaturestoreRequest",
"ImportFeatureValuesRequest",
"ImportFeatureValuesResponse",
"BatchReadFeatureValuesRequest",
"ExportFeatureValuesRequest",
"DestinationFeatureSetting",
"FeatureValueDestination",
"ExportFeatureValuesResponse",
"BatchReadFeatureValuesResponse",
"CreateEntityTypeRequest",
"GetEntityTypeRequest",
"ListEntityTypesRequest",
"ListEntityTypesResponse",
"UpdateEntityTypeRequest",
"DeleteEntityTypeRequest",
"CreateFeatureRequest",
"BatchCreateFeaturesRequest",
"BatchCreateFeaturesResponse",
"GetFeatureRequest",
"ListFeaturesRequest",
"ListFeaturesResponse",
"SearchFeaturesRequest",
"SearchFeaturesResponse",
"UpdateFeatureRequest",
"DeleteFeatureRequest",
"CreateFeaturestoreOperationMetadata",
"UpdateFeaturestoreOperationMetadata",
"ImportFeatureValuesOperationMetadata",
"ExportFeatureValuesOperationMetadata",
"BatchReadFeatureValuesOperationMetadata",
"CreateEntityTypeOperationMetadata",
"CreateFeatureOperationMetadata",
"BatchCreateFeaturesOperationMetadata",
},
)
class CreateFeaturestoreRequest(proto.Message):
r"""Request message for
[FeaturestoreService.CreateFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.CreateFeaturestore].
Attributes:
parent (str):
Required. The resource name of the Location to create
Featurestores. Format:
``projects/{project}/locations/{location}'``
featurestore (google.cloud.aiplatform_v1.types.Featurestore):
Required. The Featurestore to create.
featurestore_id (str):
Required. The ID to use for this Featurestore, which will
become the final component of the Featurestore's resource
name.
This value may be up to 60 characters, and valid characters
are ``[a-z0-9_]``. The first character cannot be a number.
The value must be unique within the project and location.
"""
parent = proto.Field(proto.STRING, number=1,)
featurestore = proto.Field(
proto.MESSAGE, number=2, message=gca_featurestore.Featurestore,
)
featurestore_id = proto.Field(proto.STRING, number=3,)
class GetFeaturestoreRequest(proto.Message):
r"""Request message for
[FeaturestoreService.GetFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.GetFeaturestore].
Attributes:
name (str):
Required. The name of the Featurestore
resource.
"""
name = proto.Field(proto.STRING, number=1,)
class ListFeaturestoresRequest(proto.Message):
r"""Request message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores].
Attributes:
parent (str):
Required. The resource name of the Location to list
Featurestores. Format:
``projects/{project}/locations/{location}``
filter (str):
Lists the featurestores that match the filter expression.
The following fields are supported:
- ``create_time``: Supports ``=``, ``!=``, ``<``, ``>``,
``<=``, and ``>=`` comparisons. Values must be in RFC
3339 format.
- ``update_time``: Supports ``=``, ``!=``, ``<``, ``>``,
``<=``, and ``>=`` comparisons. Values must be in RFC
3339 format.<|fim▁hole|> - ``labels``: Supports key-value equality and key presence.
Examples:
- ``create_time > "2020-01-01" OR update_time > "2020-01-01"``
Featurestores created or updated after 2020-01-01.
- ``labels.env = "prod"`` Featurestores with label "env"
set to "prod".
page_size (int):
The maximum number of Featurestores to
return. The service may return fewer than this
value. If unspecified, at most 100 Featurestores
will be returned. The maximum value is 100; any
value greater than 100 will be coerced to 100.
page_token (str):
A page token, received from a previous
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores]
call. Provide this to retrieve the subsequent page.
When paginating, all other parameters provided to
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores]
must match the call that provided the page token.
order_by (str):
A comma-separated list of fields to order by, sorted in
ascending order. Use "desc" after a field name for
descending. Supported Fields:
- ``create_time``
- ``update_time``
- ``online_serving_config.fixed_node_count``
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
order_by = proto.Field(proto.STRING, number=5,)
read_mask = proto.Field(proto.MESSAGE, number=6, message=field_mask_pb2.FieldMask,)
class ListFeaturestoresResponse(proto.Message):
r"""Response message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores].
Attributes:
featurestores (Sequence[google.cloud.aiplatform_v1.types.Featurestore]):
The Featurestores matching the request.
next_page_token (str):
A token, which can be sent as
[ListFeaturestoresRequest.page_token][google.cloud.aiplatform.v1.ListFeaturestoresRequest.page_token]
to retrieve the next page. If this field is omitted, there
are no subsequent pages.
"""
@property
def raw_page(self):
return self
featurestores = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_featurestore.Featurestore,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class UpdateFeaturestoreRequest(proto.Message):
r"""Request message for
[FeaturestoreService.UpdateFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.UpdateFeaturestore].
Attributes:
featurestore (google.cloud.aiplatform_v1.types.Featurestore):
Required. The Featurestore's ``name`` field is used to
identify the Featurestore to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Field mask is used to specify the fields to be overwritten
in the Featurestore resource by the update. The fields
specified in the update_mask are relative to the resource,
not the full request. A field will be overwritten if it is
in the mask. If the user does not provide a mask then only
the non-empty fields present in the request will be
overwritten. Set the update_mask to ``*`` to override all
fields.
Updatable fields:
- ``labels``
- ``online_serving_config.fixed_node_count``
"""
featurestore = proto.Field(
proto.MESSAGE, number=1, message=gca_featurestore.Featurestore,
)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class DeleteFeaturestoreRequest(proto.Message):
r"""Request message for
[FeaturestoreService.DeleteFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeaturestore].
Attributes:
name (str):
Required. The name of the Featurestore to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
force (bool):
If set to true, any EntityTypes and Features
for this Featurestore will also be deleted.
(Otherwise, the request will only work if the
Featurestore has no EntityTypes.)
"""
name = proto.Field(proto.STRING, number=1,)
force = proto.Field(proto.BOOL, number=2,)
class ImportFeatureValuesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ImportFeatureValues].
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
avro_source (google.cloud.aiplatform_v1.types.AvroSource):
This field is a member of `oneof`_ ``source``.
bigquery_source (google.cloud.aiplatform_v1.types.BigQuerySource):
This field is a member of `oneof`_ ``source``.
csv_source (google.cloud.aiplatform_v1.types.CsvSource):
This field is a member of `oneof`_ ``source``.
feature_time_field (str):
Source column that holds the Feature
timestamp for all Feature values in each entity.
This field is a member of `oneof`_ ``feature_time_source``.
feature_time (google.protobuf.timestamp_pb2.Timestamp):
Single Feature timestamp for all entities
being imported. The timestamp must not have
higher than millisecond precision.
This field is a member of `oneof`_ ``feature_time_source``.
entity_type (str):
Required. The resource name of the EntityType grouping the
Features for which values are being imported. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``
entity_id_field (str):
Source column that holds entity IDs. If not provided, entity
IDs are extracted from the column named ``entity_id``.
feature_specs (Sequence[google.cloud.aiplatform_v1.types.ImportFeatureValuesRequest.FeatureSpec]):
Required. Specifications defining which Feature values to
import from the entity. The request fails if no
feature_specs are provided, and having multiple
feature_specs for one Feature is not allowed.
disable_online_serving (bool):
If set, data will not be imported for online
serving. This is typically used for backfilling,
where Feature generation timestamps are not in
the timestamp range needed for online serving.
worker_count (int):
Specifies the number of workers that are used
to write data to the Featurestore. Consider the
online serving capacity that you require to
achieve the desired import throughput without
interfering with online serving. The value must
be positive, and less than or equal to 100. If
not set, defaults to using 1 worker. The low
count ensures minimal impact on online serving
performance.
"""
class FeatureSpec(proto.Message):
r"""Defines the Feature value(s) to import.
Attributes:
id (str):
Required. ID of the Feature to import values
of. This Feature must exist in the target
EntityType, or the request will fail.
source_field (str):
Source column to get the Feature values from.
If not set, uses the column with the same name
as the Feature ID.
"""
id = proto.Field(proto.STRING, number=1,)
source_field = proto.Field(proto.STRING, number=2,)
avro_source = proto.Field(
proto.MESSAGE, number=2, oneof="source", message=io.AvroSource,
)
bigquery_source = proto.Field(
proto.MESSAGE, number=3, oneof="source", message=io.BigQuerySource,
)
csv_source = proto.Field(
proto.MESSAGE, number=4, oneof="source", message=io.CsvSource,
)
feature_time_field = proto.Field(
proto.STRING, number=6, oneof="feature_time_source",
)
feature_time = proto.Field(
proto.MESSAGE,
number=7,
oneof="feature_time_source",
message=timestamp_pb2.Timestamp,
)
entity_type = proto.Field(proto.STRING, number=1,)
entity_id_field = proto.Field(proto.STRING, number=5,)
feature_specs = proto.RepeatedField(proto.MESSAGE, number=8, message=FeatureSpec,)
disable_online_serving = proto.Field(proto.BOOL, number=9,)
worker_count = proto.Field(proto.INT32, number=11,)
class ImportFeatureValuesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ImportFeatureValues].
Attributes:
imported_entity_count (int):
Number of entities that have been imported by
the operation.
imported_feature_value_count (int):
Number of Feature values that have been
imported by the operation.
invalid_row_count (int):
The number of rows in input source that weren't imported due
to either
- Not having any featureValues.
- Having a null entityId.
- Having a null timestamp.
- Not being parsable (applicable for CSV sources).
"""
imported_entity_count = proto.Field(proto.INT64, number=1,)
imported_feature_value_count = proto.Field(proto.INT64, number=2,)
invalid_row_count = proto.Field(proto.INT64, number=6,)
class BatchReadFeatureValuesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.BatchReadFeatureValues].
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
csv_read_instances (google.cloud.aiplatform_v1.types.CsvSource):
Each read instance consists of exactly one read timestamp
and one or more entity IDs identifying entities of the
corresponding EntityTypes whose Features are requested.
Each output instance contains Feature values of requested
entities concatenated together as of the read time.
An example read instance may be
``foo_entity_id, bar_entity_id, 2020-01-01T10:00:00.123Z``.
An example output instance may be
``foo_entity_id, bar_entity_id, 2020-01-01T10:00:00.123Z, foo_entity_feature1_value, bar_entity_feature2_value``.
Timestamp in each read instance must be millisecond-aligned.
``csv_read_instances`` are read instances stored in a
plain-text CSV file. The header should be:
[ENTITY_TYPE_ID1], [ENTITY_TYPE_ID2], ..., timestamp
The columns can be in any order.
Values in the timestamp column must use the RFC 3339 format,
e.g. ``2012-07-30T10:43:17.123Z``.
This field is a member of `oneof`_ ``read_option``.
bigquery_read_instances (google.cloud.aiplatform_v1.types.BigQuerySource):
Similar to csv_read_instances, but from BigQuery source.
This field is a member of `oneof`_ ``read_option``.
featurestore (str):
Required. The resource name of the Featurestore from which
to query Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
destination (google.cloud.aiplatform_v1.types.FeatureValueDestination):
Required. Specifies output location and
format.
pass_through_fields (Sequence[google.cloud.aiplatform_v1.types.BatchReadFeatureValuesRequest.PassThroughField]):
When not empty, the specified fields in the
\*_read_instances source will be joined as-is in the output,
in addition to those fields from the Featurestore Entity.
For BigQuery source, the type of the pass-through values
will be automatically inferred. For CSV source, the
pass-through values will be passed as opaque bytes.
entity_type_specs (Sequence[google.cloud.aiplatform_v1.types.BatchReadFeatureValuesRequest.EntityTypeSpec]):
Required. Specifies EntityType grouping Features to read
values of and settings. Each EntityType referenced in
[BatchReadFeatureValuesRequest.entity_type_specs] must have
a column specifying entity IDs in the EntityType in
[BatchReadFeatureValuesRequest.request][] .
"""
class PassThroughField(proto.Message):
r"""Describe pass-through fields in read_instance source.
Attributes:
field_name (str):
Required. The name of the field in the CSV header or the
name of the column in BigQuery table. The naming restriction
is the same as
[Feature.name][google.cloud.aiplatform.v1.Feature.name].
"""
field_name = proto.Field(proto.STRING, number=1,)
class EntityTypeSpec(proto.Message):
r"""Selects Features of an EntityType to read values of and
specifies read settings.
Attributes:
entity_type_id (str):
Required. ID of the EntityType to select Features. The
EntityType id is the
[entity_type_id][google.cloud.aiplatform.v1.CreateEntityTypeRequest.entity_type_id]
specified during EntityType creation.
feature_selector (google.cloud.aiplatform_v1.types.FeatureSelector):
Required. Selectors choosing which Feature
values to read from the EntityType.
settings (Sequence[google.cloud.aiplatform_v1.types.DestinationFeatureSetting]):
Per-Feature settings for the batch read.
"""
entity_type_id = proto.Field(proto.STRING, number=1,)
feature_selector = proto.Field(
proto.MESSAGE, number=2, message=gca_feature_selector.FeatureSelector,
)
settings = proto.RepeatedField(
proto.MESSAGE, number=3, message="DestinationFeatureSetting",
)
csv_read_instances = proto.Field(
proto.MESSAGE, number=3, oneof="read_option", message=io.CsvSource,
)
bigquery_read_instances = proto.Field(
proto.MESSAGE, number=5, oneof="read_option", message=io.BigQuerySource,
)
featurestore = proto.Field(proto.STRING, number=1,)
destination = proto.Field(
proto.MESSAGE, number=4, message="FeatureValueDestination",
)
pass_through_fields = proto.RepeatedField(
proto.MESSAGE, number=8, message=PassThroughField,
)
entity_type_specs = proto.RepeatedField(
proto.MESSAGE, number=7, message=EntityTypeSpec,
)
class ExportFeatureValuesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues].
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
snapshot_export (google.cloud.aiplatform_v1.types.ExportFeatureValuesRequest.SnapshotExport):
Exports the latest Feature values of all
entities of the EntityType within a time range.
This field is a member of `oneof`_ ``mode``.
full_export (google.cloud.aiplatform_v1.types.ExportFeatureValuesRequest.FullExport):
Exports all historical values of all entities
of the EntityType within a time range
This field is a member of `oneof`_ ``mode``.
entity_type (str):
Required. The resource name of the EntityType from which to
export Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
destination (google.cloud.aiplatform_v1.types.FeatureValueDestination):
Required. Specifies destination location and
format.
feature_selector (google.cloud.aiplatform_v1.types.FeatureSelector):
Required. Selects Features to export values
of.
settings (Sequence[google.cloud.aiplatform_v1.types.DestinationFeatureSetting]):
Per-Feature export settings.
"""
class SnapshotExport(proto.Message):
r"""Describes exporting the latest Feature values of all entities of the
EntityType between [start_time, snapshot_time].
Attributes:
snapshot_time (google.protobuf.timestamp_pb2.Timestamp):
Exports Feature values as of this timestamp.
If not set, retrieve values as of now.
Timestamp, if present, must not have higher than
millisecond precision.
start_time (google.protobuf.timestamp_pb2.Timestamp):
Excludes Feature values with feature
generation timestamp before this timestamp. If
not set, retrieve oldest values kept in Feature
Store. Timestamp, if present, must not have
higher than millisecond precision.
"""
snapshot_time = proto.Field(
proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
)
start_time = proto.Field(
proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,
)
class FullExport(proto.Message):
r"""Describes exporting all historical Feature values of all entities of
the EntityType between [start_time, end_time].
Attributes:
start_time (google.protobuf.timestamp_pb2.Timestamp):
Excludes Feature values with feature
generation timestamp before this timestamp. If
not set, retrieve oldest values kept in Feature
Store. Timestamp, if present, must not have
higher than millisecond precision.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Exports Feature values as of this timestamp.
If not set, retrieve values as of now.
Timestamp, if present, must not have higher than
millisecond precision.
"""
start_time = proto.Field(
proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,
)
end_time = proto.Field(
proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
)
snapshot_export = proto.Field(
proto.MESSAGE, number=3, oneof="mode", message=SnapshotExport,
)
full_export = proto.Field(
proto.MESSAGE, number=7, oneof="mode", message=FullExport,
)
entity_type = proto.Field(proto.STRING, number=1,)
destination = proto.Field(
proto.MESSAGE, number=4, message="FeatureValueDestination",
)
feature_selector = proto.Field(
proto.MESSAGE, number=5, message=gca_feature_selector.FeatureSelector,
)
settings = proto.RepeatedField(
proto.MESSAGE, number=6, message="DestinationFeatureSetting",
)
class DestinationFeatureSetting(proto.Message):
r"""
Attributes:
feature_id (str):
Required. The ID of the Feature to apply the
setting to.
destination_field (str):
Specify the field name in the export
destination. If not specified, Feature ID is
used.
"""
feature_id = proto.Field(proto.STRING, number=1,)
destination_field = proto.Field(proto.STRING, number=2,)
class FeatureValueDestination(proto.Message):
r"""A destination location for Feature values and format.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination):
Output in BigQuery format.
[BigQueryDestination.output_uri][google.cloud.aiplatform.v1.BigQueryDestination.output_uri]
in
[FeatureValueDestination.bigquery_destination][google.cloud.aiplatform.v1.FeatureValueDestination.bigquery_destination]
must refer to a table.
This field is a member of `oneof`_ ``destination``.
tfrecord_destination (google.cloud.aiplatform_v1.types.TFRecordDestination):
Output in TFRecord format.
Below are the mapping from Feature value type in
Featurestore to Feature value type in TFRecord:
::
Value type in Featurestore | Value type in TFRecord
DOUBLE, DOUBLE_ARRAY | FLOAT_LIST
INT64, INT64_ARRAY | INT64_LIST
STRING, STRING_ARRAY, BYTES | BYTES_LIST
true -> byte_string("true"), false -> byte_string("false")
BOOL, BOOL_ARRAY (true, false) | BYTES_LIST
This field is a member of `oneof`_ ``destination``.
csv_destination (google.cloud.aiplatform_v1.types.CsvDestination):
Output in CSV format. Array Feature value
types are not allowed in CSV format.
This field is a member of `oneof`_ ``destination``.
"""
bigquery_destination = proto.Field(
proto.MESSAGE, number=1, oneof="destination", message=io.BigQueryDestination,
)
tfrecord_destination = proto.Field(
proto.MESSAGE, number=2, oneof="destination", message=io.TFRecordDestination,
)
csv_destination = proto.Field(
proto.MESSAGE, number=3, oneof="destination", message=io.CsvDestination,
)
class ExportFeatureValuesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues].
"""
class BatchReadFeatureValuesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.BatchReadFeatureValues].
"""
class CreateEntityTypeRequest(proto.Message):
r"""Request message for
[FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.CreateEntityType].
Attributes:
parent (str):
Required. The resource name of the Featurestore to create
EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
entity_type (google.cloud.aiplatform_v1.types.EntityType):
The EntityType to create.
entity_type_id (str):
Required. The ID to use for the EntityType, which will
become the final component of the EntityType's resource
name.
This value may be up to 60 characters, and valid characters
are ``[a-z0-9_]``. The first character cannot be a number.
The value must be unique within a featurestore.
"""
parent = proto.Field(proto.STRING, number=1,)
entity_type = proto.Field(
proto.MESSAGE, number=2, message=gca_entity_type.EntityType,
)
entity_type_id = proto.Field(proto.STRING, number=3,)
class GetEntityTypeRequest(proto.Message):
r"""Request message for
[FeaturestoreService.GetEntityType][google.cloud.aiplatform.v1.FeaturestoreService.GetEntityType].
Attributes:
name (str):
Required. The name of the EntityType resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
"""
name = proto.Field(proto.STRING, number=1,)
class ListEntityTypesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes].
Attributes:
parent (str):
Required. The resource name of the Featurestore to list
EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
filter (str):
Lists the EntityTypes that match the filter expression. The
following filters are supported:
- ``create_time``: Supports ``=``, ``!=``, ``<``, ``>``,
``>=``, and ``<=`` comparisons. Values must be in RFC
3339 format.
- ``update_time``: Supports ``=``, ``!=``, ``<``, ``>``,
``>=``, and ``<=`` comparisons. Values must be in RFC
3339 format.
- ``labels``: Supports key-value equality as well as key
presence.
Examples:
- ``create_time > \"2020-01-31T15:30:00.000000Z\" OR update_time > \"2020-01-31T15:30:00.000000Z\"``
--> EntityTypes created or updated after
2020-01-31T15:30:00.000000Z.
- ``labels.active = yes AND labels.env = prod`` -->
EntityTypes having both (active: yes) and (env: prod)
labels.
- ``labels.env: *`` --> Any EntityType which has a label
with 'env' as the key.
page_size (int):
The maximum number of EntityTypes to return.
The service may return fewer than this value. If
unspecified, at most 1000 EntityTypes will be
returned. The maximum value is 1000; any value
greater than 1000 will be coerced to 1000.
page_token (str):
A page token, received from a previous
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes]
call. Provide this to retrieve the subsequent page.
When paginating, all other parameters provided to
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes]
must match the call that provided the page token.
order_by (str):
A comma-separated list of fields to order by, sorted in
ascending order. Use "desc" after a field name for
descending.
Supported fields:
- ``entity_type_id``
- ``create_time``
- ``update_time``
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
order_by = proto.Field(proto.STRING, number=5,)
read_mask = proto.Field(proto.MESSAGE, number=6, message=field_mask_pb2.FieldMask,)
class ListEntityTypesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes].
Attributes:
entity_types (Sequence[google.cloud.aiplatform_v1.types.EntityType]):
The EntityTypes matching the request.
next_page_token (str):
A token, which can be sent as
[ListEntityTypesRequest.page_token][google.cloud.aiplatform.v1.ListEntityTypesRequest.page_token]
to retrieve the next page. If this field is omitted, there
are no subsequent pages.
"""
@property
def raw_page(self):
return self
entity_types = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_entity_type.EntityType,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class UpdateEntityTypeRequest(proto.Message):
r"""Request message for
[FeaturestoreService.UpdateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.UpdateEntityType].
Attributes:
entity_type (google.cloud.aiplatform_v1.types.EntityType):
Required. The EntityType's ``name`` field is used to
identify the EntityType to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Field mask is used to specify the fields to be overwritten
in the EntityType resource by the update. The fields
specified in the update_mask are relative to the resource,
not the full request. A field will be overwritten if it is
in the mask. If the user does not provide a mask then only
the non-empty fields present in the request will be
overwritten. Set the update_mask to ``*`` to override all
fields.
Updatable fields:
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval``
"""
entity_type = proto.Field(
proto.MESSAGE, number=1, message=gca_entity_type.EntityType,
)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class DeleteEntityTypeRequest(proto.Message):
r"""Request message for [FeaturestoreService.DeleteEntityTypes][].
Attributes:
name (str):
Required. The name of the EntityType to be deleted. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
force (bool):
If set to true, any Features for this
EntityType will also be deleted. (Otherwise, the
request will only work if the EntityType has no
Features.)
"""
name = proto.Field(proto.STRING, number=1,)
force = proto.Field(proto.BOOL, number=2,)
class CreateFeatureRequest(proto.Message):
r"""Request message for
[FeaturestoreService.CreateFeature][google.cloud.aiplatform.v1.FeaturestoreService.CreateFeature].
Attributes:
parent (str):
Required. The resource name of the EntityType to create a
Feature. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
feature (google.cloud.aiplatform_v1.types.Feature):
Required. The Feature to create.
feature_id (str):
Required. The ID to use for the Feature, which will become
the final component of the Feature's resource name.
This value may be up to 60 characters, and valid characters
are ``[a-z0-9_]``. The first character cannot be a number.
The value must be unique within an EntityType.
"""
parent = proto.Field(proto.STRING, number=1,)
feature = proto.Field(proto.MESSAGE, number=2, message=gca_feature.Feature,)
feature_id = proto.Field(proto.STRING, number=3,)
class BatchCreateFeaturesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures].
Attributes:
parent (str):
Required. The resource name of the EntityType to create the
batch of Features under. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
requests (Sequence[google.cloud.aiplatform_v1.types.CreateFeatureRequest]):
Required. The request message specifying the Features to
create. All Features must be created under the same parent
EntityType. The ``parent`` field in each child request
message can be omitted. If ``parent`` is set in a child
request, then the value must match the ``parent`` value in
this request message.
"""
parent = proto.Field(proto.STRING, number=1,)
requests = proto.RepeatedField(
proto.MESSAGE, number=2, message="CreateFeatureRequest",
)
class BatchCreateFeaturesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures].
Attributes:
features (Sequence[google.cloud.aiplatform_v1.types.Feature]):
The Features created.
"""
features = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_feature.Feature,
)
class GetFeatureRequest(proto.Message):
r"""Request message for
[FeaturestoreService.GetFeature][google.cloud.aiplatform.v1.FeaturestoreService.GetFeature].
Attributes:
name (str):
Required. The name of the Feature resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
"""
name = proto.Field(proto.STRING, number=1,)
class ListFeaturesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures].
Attributes:
parent (str):
Required. The resource name of the Location to list
Features. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
filter (str):
Lists the Features that match the filter expression. The
following filters are supported:
- ``value_type``: Supports = and != comparisons.
- ``create_time``: Supports =, !=, <, >, >=, and <=
comparisons. Values must be in RFC 3339 format.
- ``update_time``: Supports =, !=, <, >, >=, and <=
comparisons. Values must be in RFC 3339 format.
- ``labels``: Supports key-value equality as well as key
presence.
Examples:
- ``value_type = DOUBLE`` --> Features whose type is
DOUBLE.
- ``create_time > \"2020-01-31T15:30:00.000000Z\" OR update_time > \"2020-01-31T15:30:00.000000Z\"``
--> EntityTypes created or updated after
2020-01-31T15:30:00.000000Z.
- ``labels.active = yes AND labels.env = prod`` -->
Features having both (active: yes) and (env: prod)
labels.
- ``labels.env: *`` --> Any Feature which has a label with
'env' as the key.
page_size (int):
The maximum number of Features to return. The
service may return fewer than this value. If
unspecified, at most 1000 Features will be
returned. The maximum value is 1000; any value
greater than 1000 will be coerced to 1000.
page_token (str):
A page token, received from a previous
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures]
call. Provide this to retrieve the subsequent page.
When paginating, all other parameters provided to
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures]
must match the call that provided the page token.
order_by (str):
A comma-separated list of fields to order by, sorted in
ascending order. Use "desc" after a field name for
descending. Supported fields:
- ``feature_id``
- ``value_type``
- ``create_time``
- ``update_time``
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
latest_stats_count (int):
If set, return the most recent
[ListFeaturesRequest.latest_stats_count][google.cloud.aiplatform.v1.ListFeaturesRequest.latest_stats_count]
of stats for each Feature in response. Valid value is [0,
10]. If number of stats exists <
[ListFeaturesRequest.latest_stats_count][google.cloud.aiplatform.v1.ListFeaturesRequest.latest_stats_count],
return all existing stats.
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
order_by = proto.Field(proto.STRING, number=5,)
read_mask = proto.Field(proto.MESSAGE, number=6, message=field_mask_pb2.FieldMask,)
latest_stats_count = proto.Field(proto.INT32, number=7,)
class ListFeaturesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures].
Attributes:
features (Sequence[google.cloud.aiplatform_v1.types.Feature]):
The Features matching the request.
next_page_token (str):
A token, which can be sent as
[ListFeaturesRequest.page_token][google.cloud.aiplatform.v1.ListFeaturesRequest.page_token]
to retrieve the next page. If this field is omitted, there
are no subsequent pages.
"""
@property
def raw_page(self):
return self
features = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_feature.Feature,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class SearchFeaturesRequest(proto.Message):
r"""Request message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures].
Attributes:
location (str):
Required. The resource name of the Location to search
Features. Format:
``projects/{project}/locations/{location}``
query (str):
Query string that is a conjunction of field-restricted
queries and/or field-restricted filters. Field-restricted
queries and filters can be combined using ``AND`` to form a
conjunction.
A field query is in the form FIELD:QUERY. This implicitly
checks if QUERY exists as a substring within Feature's
FIELD. The QUERY and the FIELD are converted to a sequence
of words (i.e. tokens) for comparison. This is done by:
- Removing leading/trailing whitespace and tokenizing the
search value. Characters that are not one of alphanumeric
``[a-zA-Z0-9]``, underscore ``_``, or asterisk ``*`` are
treated as delimiters for tokens. ``*`` is treated as a
wildcard that matches characters within a token.
- Ignoring case.
- Prepending an asterisk to the first and appending an
asterisk to the last token in QUERY.
A QUERY must be either a singular token or a phrase. A
phrase is one or multiple words enclosed in double quotation
marks ("). With phrases, the order of the words is
important. Words in the phrase must be matching in order and
consecutively.
Supported FIELDs for field-restricted queries:
- ``feature_id``
- ``description``
- ``entity_type_id``
Examples:
- ``feature_id: foo`` --> Matches a Feature with ID
containing the substring ``foo`` (eg. ``foo``,
``foofeature``, ``barfoo``).
- ``feature_id: foo*feature`` --> Matches a Feature with ID
containing the substring ``foo*feature`` (eg.
``foobarfeature``).
- ``feature_id: foo AND description: bar`` --> Matches a
Feature with ID containing the substring ``foo`` and
description containing the substring ``bar``.
Besides field queries, the following exact-match filters are
supported. The exact-match filters do not support wildcards.
Unlike field-restricted queries, exact-match filters are
case-sensitive.
- ``feature_id``: Supports = comparisons.
- ``description``: Supports = comparisons. Multi-token
filters should be enclosed in quotes.
- ``entity_type_id``: Supports = comparisons.
- ``value_type``: Supports = and != comparisons.
- ``labels``: Supports key-value equality as well as key
presence.
- ``featurestore_id``: Supports = comparisons.
Examples:
- ``description = "foo bar"`` --> Any Feature with
description exactly equal to ``foo bar``
- ``value_type = DOUBLE`` --> Features whose type is
DOUBLE.
- ``labels.active = yes AND labels.env = prod`` -->
Features having both (active: yes) and (env: prod)
labels.
- ``labels.env: *`` --> Any Feature which has a label with
``env`` as the key.
page_size (int):
The maximum number of Features to return. The
service may return fewer than this value. If
unspecified, at most 100 Features will be
returned. The maximum value is 100; any value
greater than 100 will be coerced to 100.
page_token (str):
A page token, received from a previous
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures]
call. Provide this to retrieve the subsequent page.
When paginating, all other parameters provided to
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures],
except ``page_size``, must match the call that provided the
page token.
"""
location = proto.Field(proto.STRING, number=1,)
query = proto.Field(proto.STRING, number=3,)
page_size = proto.Field(proto.INT32, number=4,)
page_token = proto.Field(proto.STRING, number=5,)
class SearchFeaturesResponse(proto.Message):
r"""Response message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures].
Attributes:
features (Sequence[google.cloud.aiplatform_v1.types.Feature]):
The Features matching the request.
Fields returned:
- ``name``
- ``description``
- ``labels``
- ``create_time``
- ``update_time``
next_page_token (str):
A token, which can be sent as
[SearchFeaturesRequest.page_token][google.cloud.aiplatform.v1.SearchFeaturesRequest.page_token]
to retrieve the next page. If this field is omitted, there
are no subsequent pages.
"""
@property
def raw_page(self):
return self
features = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_feature.Feature,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class UpdateFeatureRequest(proto.Message):
r"""Request message for
[FeaturestoreService.UpdateFeature][google.cloud.aiplatform.v1.FeaturestoreService.UpdateFeature].
Attributes:
feature (google.cloud.aiplatform_v1.types.Feature):
Required. The Feature's ``name`` field is used to identify
the Feature to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Field mask is used to specify the fields to be overwritten
in the Features resource by the update. The fields specified
in the update_mask are relative to the resource, not the
full request. A field will be overwritten if it is in the
mask. If the user does not provide a mask then only the
non-empty fields present in the request will be overwritten.
Set the update_mask to ``*`` to override all fields.
Updatable fields:
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval``
"""
feature = proto.Field(proto.MESSAGE, number=1, message=gca_feature.Feature,)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class DeleteFeatureRequest(proto.Message):
r"""Request message for
[FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeature].
Attributes:
name (str):
Required. The name of the Features to be deleted. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
"""
name = proto.Field(proto.STRING, number=1,)
class CreateFeaturestoreOperationMetadata(proto.Message):
r"""Details of operations that perform create Featurestore.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Featurestore.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class UpdateFeaturestoreOperationMetadata(proto.Message):
r"""Details of operations that perform update Featurestore.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Featurestore.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class ImportFeatureValuesOperationMetadata(proto.Message):
r"""Details of operations that perform import Feature values.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Featurestore import
Feature values.
imported_entity_count (int):
Number of entities that have been imported by
the operation.
imported_feature_value_count (int):
Number of Feature values that have been
imported by the operation.
invalid_row_count (int):
The number of rows in input source that weren't imported due
to either
- Not having any featureValues.
- Having a null entityId.
- Having a null timestamp.
- Not being parsable (applicable for CSV sources).
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
imported_entity_count = proto.Field(proto.INT64, number=2,)
imported_feature_value_count = proto.Field(proto.INT64, number=3,)
invalid_row_count = proto.Field(proto.INT64, number=6,)
class ExportFeatureValuesOperationMetadata(proto.Message):
r"""Details of operations that exports Features values.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Featurestore export
Feature values.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class BatchReadFeatureValuesOperationMetadata(proto.Message):
r"""Details of operations that batch reads Feature values.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Featurestore batch
read Features values.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class CreateEntityTypeOperationMetadata(proto.Message):
r"""Details of operations that perform create EntityType.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for EntityType.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class CreateFeatureOperationMetadata(proto.Message):
r"""Details of operations that perform create Feature.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Feature.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class BatchCreateFeaturesOperationMetadata(proto.Message):
r"""Details of operations that perform batch create Features.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
Operation metadata for Feature.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
__all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|> | - ``online_serving_config.fixed_node_count``: Supports
``=``, ``!=``, ``<``, ``>``, ``<=``, and ``>=``
comparisons. |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { storiesOf } from '@storybook/react';
import moment from 'moment';
import {
withKnobs,
number,
object,
boolean,
text,
select,
date,
array,
color,
files,
} from '../../src';
const stories = storiesOf('Example of Knobs', module);
stories.addDecorator(withKnobs);
stories.add('simple example', () => <button>{text('Label', 'Hello Button')}</button>);
stories.add('with all knobs', () => {
const name = text('Name', 'Tom Cary');
const dob = date('DOB', new Date('January 20 1887'));
const bold = boolean('Bold', false);
const selectedColor = color('Color', 'black');<|fim▁hole|> const passions = array('Passions', ['Fishing', 'Skiing']);
const images = files('Happy Picture', 'image/*', [
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAQAAAC1+jfqAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAAAmJLR0QA/4ePzL8AAAAHdElNRQfiARwMCyEWcOFPAAAAP0lEQVQoz8WQMQoAIAwDL/7/z3GwghSp4KDZyiUpBMCYUgd8rehtH16/l3XewgU2KAzapjXBbNFaPS6lDMlKB6OiDv3iAH1OAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE4LTAxLTI4VDEyOjExOjMzLTA3OjAwlAHQBgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxOC0wMS0yOFQxMjoxMTozMy0wNzowMOVcaLoAAAAASUVORK5CYII=',
]);
const customStyle = object('Style', {
fontFamily: 'Arial',
padding: 20,
});
const style = {
...customStyle,
fontWeight: bold ? 800 : 400,
favoriteNumber,
color: selectedColor,
};
return (
<div style={style}>
I'm {name} and I was born on "{moment(dob).format('DD MMM YYYY')}" I like:{' '}
<ul>{passions.map(p => <li key={p}>{p}</li>)}</ul>
<p>My favorite number is {favoriteNumber}.</p>
<p>My most comfortable room temperature is {comfortTemp} degrees Fahrenheit.</p>
<p>
When I am happy I look like this: <img src={images[0]} alt="happy" />
</p>
</div>
);
});
stories.add('dates Knob', () => {
const today = date('today');
const dob = date('DOB', null);
const myDob = date('My DOB', new Date('July 07 1993'));
return (
<ul style={{ listStyleType: 'none', listStyle: 'none', paddingLeft: '15px' }}>
<li>
<p>
<b>Javascript Date</b> default value, passes date value
</p>
<blockquote>
<code>const myDob = date('My DOB', new Date('July 07 1993'));</code>
<pre>{`// I was born in: "${moment(myDob).format('DD MMM YYYY')}"`}</pre>
</blockquote>
</li>
<li>
<p>
<b>undefined</b> default value passes today's date
</p>
<blockquote>
<code>const today = date('today');</code>
<pre>{`// Today's date is: "${moment(today).format('DD MMM YYYY')}"`}</pre>
</blockquote>
</li>
<li>
<p>
<b>null</b> default value passes null value
</p>
<blockquote>
<code>const dob = date('DOB', null);</code>
<pre>
{`// You were born in: "${
dob ? moment(dob).format('DD MMM YYYY') : 'Please select date.'
}"`}
</pre>
</blockquote>
</li>
</ul>
);
});
stories.add('dynamic knobs', () => {
const showOptional = select('Show optional', ['yes', 'no'], 'yes');
return (
<div>
<div>{text('compulsary', 'I must be here')}</div>
{showOptional === 'yes' ? <div>{text('optional', 'I can disapear')}</div> : null}
</div>
);
});
stories.add('without any knob', () => <button>This is a button</button>);<|fim▁end|> | const favoriteNumber = number('Favorite Number', 42);
const comfortTemp = number('Comfort Temp', 72, { range: true, min: 60, max: 90, step: 1 });
|
<|file_name|>middleware.py<|end_file_name|><|fim▁begin|># Copyright (c) 2008 Mikeal Rogers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distribuetd under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dealer.git import git
from django.template import RequestContext
requestcontext = None
class MakoMiddleware(object):
def process_request(self, request):
global requestcontext
requestcontext = RequestContext(request)
requestcontext['is_secure'] = request.is_secure()
requestcontext['site'] = request.get_host()<|fim▁hole|><|fim▁end|> | requestcontext['REVISION'] = git.revision |
<|file_name|>system.py<|end_file_name|><|fim▁begin|>import math
import inspect
import numpy as np
import numpy.linalg as linalg
import scipy as sp
import scipy.optimize
import scipy.io
from itertools import product
import trep
import _trep
from _trep import _System
from frame import Frame
from finput import Input
from config import Config
from force import Force
from constraint import Constraint
from potential import Potential
from util import dynamics_indexing_decorator
class System(_System):
"""
The System class represents a complete mechanical system
comprising coordinate frames, configuration variables, potential
energies, constraints, and forces.
"""
def __init__(self):
"""
Create a new mechanical system.
"""
_System.__init__(self)
# _System variables need to be initialized (cleaner here than in C w/ ref counting)
self._frames = tuple()
self._configs = tuple()
self._dyn_configs = tuple()
self._kin_configs = tuple()
self._potentials = tuple()
self._forces = tuple()
self._inputs = tuple()
self._constraints = tuple()
self._masses = tuple()
self._hold_structure_changes = 0
self._structure_changed_funcs = []
# Hold off the initial structure update until we have a world
# frame.
self._hold_structure_changes = 1
self._world_frame = Frame(self, trep.WORLD, None, name="World")
self._hold_structure_changes = 0
self._structure_changed()
def __repr__(self):
return '<System %d configs, %d frames, %d potentials, %d constraints, %d forces, %d inputs>' % (
len(self.configs),
len(self.frames),
len(self.potentials),
len(self.constraints),
len(self.forces),
len(self.inputs))
@property
def nQ(self):
"""Number of configuration variables in the system."""
return len(self.configs)
@property
def nQd(self):
"""Number of dynamic configuration variables in the system."""
return len(self.dyn_configs)
@property
def nQk(self):
"""Number of kinematic configuration variables in the system."""
return len(self.kin_configs)
@property
def nu(self):
"""Number of inputs in the system."""
return len(self.inputs)
@property
def nc(self):
"""Number of constraints in the system."""
return len(self.constraints)
@property
def t(self):
"""Current time of the system."""
return self._time
@t.setter
def t(self, value):
self._clear_cache()
self._time = value
def get_frame(self, identifier):
"""
get_frame(identifier) -> Frame,None
Return the first frame with the matching identifier. The
identifier can be the frame name, index, or the frame itself.
Raise an exception if no match is found.
"""
return self._get_object(identifier, Frame, self.frames)
def get_config(self, identifier):
"""
get_config(identifier) -> Config,None
Return the first configuration variable with the matching
identifier. The identifier can be the config name, index, or
the config itself. Raise an exception if no match is found.
"""
return self._get_object(identifier, Config, self.configs)
def get_potential(self, identifier):
"""
get_potential(identifier) -> Potential,None
Return the first potential with the matching identifier. The
identifier can be the constraint name, index, or the
constraint itself. Raise an exception if no match is found.
"""
return self._get_object(identifier, Potential, self.potentials)
def get_constraint(self, identifier):
"""
get_constraint(identifier) -> Constraint,None
Return the first constraint with the matching identifier. The
identifier can be the constraint name, index, or the
constraint itself. Raise an exception if no match is found.
"""
return self._get_object(identifier, Constraint, self.constraints)
def get_force(self, identifier):
"""
get_force(identifier) -> Force,None
Return the first force with the matching identifier. The
identifier can be the force name, index, or the
force itself. Raise an exception if no match is found.
"""
return self._get_object(identifier, Force, self.forces)
def get_input(self, identifier):
"""
get_input(identifier) -> Input,None
Return the first input with the matching identifier. The<|fim▁hole|> """
return self._get_object(identifier, Input, self.inputs)
def satisfy_constraints(self, tolerance=1e-10, verbose=False,
keep_kinematic=False, constant_q_list=None):
"""
Modify the current configuration to satisfy the system
constraints.
The configuration velocity (ie, config.dq) is simply set to
zero. This should be fixed in the future.
Passing True to keep_kinematic will not allow method to modify
kinematic configuration variables.
Passing a list (or tuple) of configurations to constant_q_list
will keep all elements in list constant. The method uses
trep.System.get_config so the list may contain configuration
objects, indices in Q, or names. Passing anything for
constant_list_q will overwrite value for keep_kinematic.
"""
self.dq = 0
if keep_kinematic:
names = [q.name for q in self.dyn_configs]
q0 = self.qd
else:
names = [q.name for q in self.configs]
q0 = self.q
if constant_q_list:
connames = [self.get_config(q).name for q in constant_q_list]
names = []
for q in self.configs:
if q.name not in connames:
names.append(q.name)
q0 = np.array([self.q[self.get_config(name).index] for name in names])
def func(q):
v = (q - q0)
return np.dot(v,v)
def fprime(q):
return 2*(q-q0)
def f_eqcons(q):
self.q = dict(zip(names,q))
return np.array([c.h() for c in self.constraints])
def fprime_eqcons(q):
self.q = dict(zip(names,q))
return np.array([[c.h_dq(self.get_config(q)) for q in names] for c in self.constraints])
(q_opt, fx, its, imode, smode) = sp.optimize.fmin_slsqp(func, q0, f_eqcons=f_eqcons,
fprime=fprime, fprime_eqcons=fprime_eqcons,
acc=tolerance, iter=100*self.nQ,
iprint=0, full_output=True)
if imode != 0:
raise StandardError("Minimization failed: %s" % smode)
self.q = dict(zip(names,q_opt))
return self.q
def minimize_potential_energy(self, tolerance=1e-10, verbose=False,
keep_kinematic=False, constant_q_list=None):
"""
Find a nearby configuration where the potential energy is
minimized. Useful for finding nearby equilibrium points.
If minimum is found, all constraints will be found as well
The configuration velocity (ie, config.dq) is set to
zero which ensures the kinetic energy is zero.
Passing True to keep_kinematic will not allow method to modify
kinematic configuration variables.
Passing a list (or tuple) of configurations to constant_q_list
will keep all elements in list constant. The method uses
trep.System.get_config so the list may contain configuration
objects, indices in Q, or names. Passing anything for
constant_list_q will overwrite value for keep_kinematic.
"""
self.dq = 0
if keep_kinematic:
names = [q.name for q in self.dyn_configs]
q0 = self.qd
else:
names = [q.name for q in self.configs]
q0 = self.q
if constant_q_list:
connames = [self.get_config(q).name for q in constant_q_list]
names = []
for q in self.configs:
if q.name not in connames:
names.append(q.name)
q0 = np.array([self.q[self.get_config(name).index] for name in names])
def func(q):
self.q = dict(zip(names,q))
return -self.L()
def fprime(q):
return [-self.L_dq(self.get_config(name)) for name in names]
def f_eqcons(q):
self.q = dict(zip(names,q))
return np.array([c.h() for c in self.constraints])
def fprime_eqcons(q):
self.q = dict(zip(names,q))
return np.array([[c.h_dq(self.get_config(q)) for q in names] for c in self.constraints])
(q_opt, fx, its, imode, smode) = sp.optimize.fmin_slsqp(func, q0, f_eqcons=f_eqcons,
fprime=fprime, fprime_eqcons=fprime_eqcons,
acc=tolerance, iter=100*self.nQ,
iprint=0, full_output=True)
if imode != 0:
raise StandardError("Minimization failed: %s" % smode)
self.q = dict(zip(names,q_opt))
return self.q
def set_state(self, q=None, dq=None, u=None, ddqk=None, t=None):
"""
Set the current state of the system, not including the "output" ddqd.
"""
if q is not None: self.q = q
if dq is not None: self.dq = dq
if u is not None: self.u = u
if ddqk is not None: self.ddqk = ddqk
if t is not None: self.t = t
def import_frames(self, children):
"""
Adds children to this system's world frame using a special
frame definition. See Frame.import_frames() for details.
"""
self.world_frame.import_frames(children)
def export_frames(self, system_name='system', frames_name='frames', tab_size=4):
"""
Create python source code to define this system's frames.
"""
txt = ''
txt += '#'*80 + '\n'
txt += '# Frame tree definition generated by System.%s()\n\n' % inspect.stack()[0][3]
txt += 'from trep import %s\n' % ', '.join(sorted(trep.frame.frame_def_mapping.values()))
txt += '%s = [\n' % frames_name
txt += ',\n'.join([child.export_frames(1, tab_size) for child in self.world_frame.children]) + '\n'
txt += ' '*tab_size + ']\n'
txt += '%s.import_frames(%s)\n' % (system_name, frames_name)
txt += '#'*80 + '\n'
return txt
@property
def q(self):
"""Current configuration of the system."""
return np.array([q.q for q in self.configs])
@q.setter
def q(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.configs:
q.q = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).q = v
else:
for q,v in zip(self.configs, value):
q.q = v
@property
def dq(self):
""" Current configuration velocity of the system """
return np.array([q.dq for q in self.configs])
@dq.setter
def dq(self, value):
# Writing c.dq will clear system cache
if isinstance(value, (int, float)):
for q in self.configs:
q.dq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).dq = v
else:
for q,v in zip(self.configs, value):
q.dq = v
@property
def ddq(self):
""" Current configuration acceleration of the system """
return np.array([q.ddq for q in self.configs])
@ddq.setter
def ddq(self, value):
# Writing c.ddq will clear system cache
if isinstance(value, (int, float)):
for q in self.configs:
q.ddq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).ddq = v
else:
for q,v in zip(self.configs, value):
q.ddq = v
@property
def qd(self):
"""Dynamic part of the system's current configuration."""
return np.array([q.q for q in self.dyn_configs])
@qd.setter
def qd(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.dyn_configs:
q.q = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).q = v
else:
for q,v in zip(self.dyn_configs, value):
q.q = v
@property
def dqd(self):
"""Dynamic part of the system's current configuration velocity."""
return np.array([q.dq for q in self.dyn_configs])
@dqd.setter
def dqd(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.dyn_configs:
q.dq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).dq = v
else:
for q,v in zip(self.dyn_configs, value):
q.dq = v
@property
def ddqd(self):
"""Dynamic part of the system's current configuration acceleration."""
return np.array([q.ddq for q in self.dyn_configs])
@ddqd.setter
def ddqd(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.dyn_configs:
q.ddq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).ddq = v
else:
for q,v in zip(self.dyn_configs, value):
q.ddq = v
@property
def qk(self):
"""Kinematic part of the system's current configuration."""
return np.array([q.q for q in self.kin_configs])
@qk.setter
def qk(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.kin_configs:
q.q = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).q = v
else:
for q,v in zip(self.kin_configs, value):
q.q = v
@property
def dqk(self):
"""Kinematic part of the system's current configuration velocity."""
return np.array([q.dq for q in self.kin_configs])
@dqk.setter
def dqk(self, value):
# Writing c.q will clear system cache
if isinstance(value, (int, float)):
for q in self.kin_configs:
q.dq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).dq = v
else:
for q,v in zip(self.kin_configs, value):
q.dq = v
@property
def ddqk(self):
"""Kinematic part of the system's current configuration acceleration."""
return np.array([q.ddq for q in self.kin_configs])
@ddqk.setter
def ddqk(self, value):
# Writing c.ddq will clear system cache
if isinstance(value, (int, float)):
for q in self.kin_configs:
q.ddq = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_config(name).ddq = v
else:
for q,v in zip(self.kin_configs, value):
q.ddq = v
@property
def u(self):
"""Current input vector of the system."""
return np.array([u.u for u in self.inputs])
@u.setter
def u(self, value):
# Writing u.u will clear system cache
if isinstance(value, (int, float)):
for u in self.inputs:
u.u = value
elif isinstance(value, dict):
for name, v in value.iteritems():
self.get_input(name).u = v
else:
for u,v in zip(self.inputs, value):
u.u = v
@property
def world_frame(self):
"The root spatial frame of the system."
return self._world_frame
@property
def frames(self):
"Tuple of all the frames in the system."
return self._frames
@property
def configs(self):
"""
Tuple of all the configuration variables in the system.
This is always equal to self.dyn_configs + self.kin_configs
"""
return self._configs
@property
def dyn_configs(self):
"""
Tuple of all the dynamic configuration variables in the system.
"""
return self._dyn_configs
@property
def kin_configs(self):
"""
Tuple of all the kinematic configuration variables in the
system.
"""
return self._kin_configs
@property
def potentials(self):
"Tuple of all the potentials in the system."
return self._potentials
@property
def forces(self):
"Tuple of all the forces in the system."
return self._forces
@property
def inputs(self):
"Tuple of all the input variables in the system."
return self._inputs
@property
def constraints(self):
"Tuple of all the constraints in the system."
return self._constraints
@property
def masses(self):
"Tuple of all the frames with non-zero inertias."
return self._masses
def _clear_cache(self):
"""Clear the system cache."""
self._cache = 0
self._state_counter += 1
def _get_object(self, identifier, objtype, array):
"""
_get_object(identifier, objtype, array) -> object,None
Return the first item in array with a matching identifier.
The type of 'identifier' defines how the object is identified.
type(identifier) -> how identifier is used
None -> return None
int -> return array[identifier]
name -> return item in array such that item.name == identifier
objtype -> return identifier
Raise an exception if 'identifier' is a different type or
there is an error/no match.
"""
if identifier == None:
return None
elif isinstance(identifier, objtype):
return identifier
elif isinstance(identifier, int):
return array[identifier]
elif isinstance(identifier, str):
for item in array:
if item.name == identifier:
return item
raise KeyError("%s with name '%s' not found" % (objtype, identifier))
else:
raise TypeError()
def _add_kin_config(self, config):
"""
_add_kin_config(config) -> Append config to the kin_configs
tuple.
"""
assert isinstance(config, trep.Config)
self._kin_configs += (config,)
def _add_dyn_config(self, config):
"""
_add_dyn_config(config) -> Append config to the dyn_configs
tuple.
"""
assert isinstance(config, trep.Config)
self._dyn_configs += (config,)
def _add_constraint(self, constraint):
"""
_add_constraint(constraint) -> Append constraint to the
constraint tuple.
"""
assert isinstance(constraint, trep.Constraint)
self._constraints += (constraint,)
def _add_potential(self, potential):
"""
_add_potential(potential) -> Append potential to the
potentials tuple.
"""
assert isinstance(potential, trep.Potential)
self._potentials += (potential,)
def _add_input(self, finput):
"""
_add_input(finput) -> Append input to the inputs tuple.
"""
assert isinstance(finput, trep.Input)
self._inputs += (finput,)
def _add_force(self, force):
"""
_add_force(force) -> Append force to the forces tuple.
"""
assert isinstance(force, trep.Force)
self._forces += (force,)
def add_structure_changed_func(self, function):
"""
Register a function to call whenever the system structure
changes. This includes adding and removing frames,
configuration variables, constraints, potentials, and forces.
"""
self._structure_changed_funcs.append(function)
def hold_structure_changes(self):
"""
Prevent the system from calling System._update_structure()
(mostly). Useful when building a large system to avoid
needlessly allocating and deallocating memory.
"""
self._hold_structure_changes += 1
def resume_structure_changes(self):
"""
Stop preventing the system from calling
System._update_structure(). The structure will only be
updated once every hold has been removed, so calling this does
not guarantee that the structure will be immediately upated.
"""
if self._hold_structure_changes == 0:
raise StandardError("System.resume_structure_changes() called" \
" when _hold_structure_changes is 0")
self._hold_structure_changes -= 1
if self._hold_structure_changes == 0:
self._structure_changed()
def _structure_changed(self):
"""
Updates variables so that System is internally consistent.
There is a lot of duplicate information throughout a System,
for either convenience or performance reasons. For duplicate
information, one place is considered the 'master'. These are
places that other functions manipulate. The other duplicates
are created from the 'master'.
The variables controlled by this function include:
system.frames - This tuple is built by descending the frames
tree and collecting each frame.
system.configs - This tuple is built by concatenating
system.dyn_configs and system.kin_configs.
config.config_gen - config_gen is set by descending down the
tree while keeping track of how many configuration variables
have been seen.
config.index - 'index' is set using the config's index in
system.configs
config.k_index - 'k_index' is set using the config's index in
system.kin_configs or to -1 for dynamic configuration
variables.
system.masses - This tuple is set by running through
system.frames and collecting any frame that has non-zero
inertia properties.
frame.cache_index - Built for each frame by descending up the
tree and collecting every configuration variable that is
encountered. This is set in Frame._structure_changed()
config.masses - Built for each config by looking at each frame
in self.masses and collecting those that depend on the config.
Finally, we call all the registered structure update functions
for any external objects that need to update their own
structures.
"""
# When we build big systems, we waste a lot of time building
# the cache over and over again. Instead, we can turn off the
# updating for a bit, and then do it once when we're
# done.
if self._hold_structure_changes != 0:
return
# Cache value dependencies:
# system.frames :depends on: none
# system.configs :depends on: none
# config.config_gen :depends on: none
# config.index :depend on: system.configs
# system.masses :depends on: none
# frame.cache_index :depends on: config.config_gen
# config.masses :depends on: frame.cache_index, system.masses
self._frames = tuple(self.world_frame.flatten_tree())
self._configs = self.dyn_configs + self.kin_configs
# Initialize config_gens to be N+1. Configs that do not drive
# frame transformations will retain this value
for config in self.configs:
config._config_gen = len(self._configs)
def update_config_gen(frame, index):
if frame.config != None:
frame.config._config_gen = index;
index += 1
for child in frame.children:
update_config_gen(child, index)
update_config_gen(self.world_frame, 0)
for (i, config) in enumerate(self.configs):
config._index = i
config._k_index = -1
for (i, config) in enumerate(self.kin_configs):
config._k_index = i
for (i, constraint) in enumerate(self.constraints):
constraint._index = i
for (i, finput) in enumerate(self.inputs):
finput._index = i
# Find all frames with non-zero masses
self._masses = tuple([f for f in self.frames
if f.mass != 0.0
or f.Ixx != 0.0
or f.Iyy != 0.0
or f.Izz != 0.0])
self.world_frame._structure_changed()
for config in self.configs:
config._masses = tuple([f for f in self._masses
if config in f._cache_index])
# Create numpy arrays used for calculation and storage
self._f = np.zeros( (self.nQd,), np.double, 'C')
self._lambda = np.zeros( (self.nc,), np.double, 'C')
self._D = np.zeros( (self.nQd,), np.double, 'C')
self._Ad = np.zeros((self.nc, self.nQd), np.double, 'C')
self._AdT = np.zeros((self.nQd, self.nc), np.double, 'C')
self._M_lu = np.zeros((self.nQd, self.nQd), np.double, 'C')
self._M_lu_index = np.zeros((self.nQd,), np.int, 'C')
self._A_proj_lu = np.zeros((self.nc, self.nc), np.double, 'C')
self._A_proj_lu_index = np.zeros((self.nc, ), np.int, 'C')
self._Ak = np.zeros( (self.nc, self.nQk), np.double, 'C')
self._Adt = np.zeros( (self.nc, self.nQ), np.double, 'C')
self._Ad_dq = np.zeros( (self.nQ, self.nc, self.nQd), np.double, 'C')
self._Ak_dq = np.zeros( (self.nQ, self.nc, self.nQk), np.double, 'C')
self._Adt_dq = np.zeros( (self.nQ, self.nc, self.nQ), np.double, 'C')
self._D_dq = np.zeros( (self.nQ, self.nQd), np.double, 'C')
self._D_ddq = np.zeros( (self.nQ, self.nQd), np.double, 'C')
self._D_du = np.zeros( (self.nu, self.nQd), np.double, 'C')
self._D_dk = np.zeros( (self.nQk, self.nQd), np.double, 'C')
self._f_dq = np.zeros( (self.nQ, self.nQd), np.double, 'C')
self._f_ddq = np.zeros( (self.nQ, self.nQd), np.double, 'C')
self._f_du = np.zeros( (self.nu, self.nQd), np.double, 'C')
self._f_dk = np.zeros( (self.nQk, self.nQd), np.double, 'C')
self._lambda_dq = np.zeros( (self.nQ, self.nc), np.double, 'C')
self._lambda_ddq = np.zeros( (self.nQ, self.nc), np.double, 'C')
self._lambda_du = np.zeros( (self.nu, self.nc), np.double, 'C')
self._lambda_dk = np.zeros( (self.nQk, self.nc), np.double, 'C')
self._Ad_dqdq = np.zeros( (self.nQ, self.nQ, self.nc, self.nQd), np.double, 'C')
self._Ak_dqdq = np.zeros( (self.nQ, self.nQ, self.nc, self.nQk), np.double, 'C')
self._Adt_dqdq = np.zeros( (self.nQ, self.nQ, self.nc, self.nQ), np.double, 'C')
self._D_dqdq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._D_ddqdq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._D_ddqddq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._D_dkdq = np.zeros( (self.nQk, self.nQ, self.nQd), np.double, 'C')
self._D_dudq = np.zeros( (self.nu, self.nQ, self.nQd), np.double, 'C')
self._D_duddq = np.zeros( (self.nu, self.nQ, self.nQd), np.double, 'C')
self._D_dudu = np.zeros( (self.nu, self.nu, self.nQd), np.double, 'C')
self._f_dqdq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._f_ddqdq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._f_ddqddq = np.zeros( (self.nQ, self.nQ, self.nQd), np.double, 'C')
self._f_dkdq = np.zeros( (self.nQk, self.nQ, self.nQd), np.double, 'C')
self._f_dudq = np.zeros( (self.nu, self.nQ, self.nQd), np.double, 'C')
self._f_duddq = np.zeros( (self.nu, self.nQ, self.nQd), np.double, 'C')
self._f_dudu = np.zeros( (self.nu, self.nu, self.nQd), np.double, 'C')
self._lambda_dqdq = np.zeros( (self.nQ, self.nQ, self.nc), np.double, 'C')
self._lambda_ddqdq = np.zeros( (self.nQ, self.nQ, self.nc), np.double, 'C')
self._lambda_ddqddq = np.zeros( (self.nQ, self.nQ, self.nc), np.double, 'C')
self._lambda_dkdq = np.zeros( (self.nQk, self.nQ, self.nc), np.double, 'C')
self._lambda_dudq = np.zeros( (self.nu, self.nQ, self.nc), np.double, 'C')
self._lambda_duddq = np.zeros( (self.nu, self.nQ, self.nc), np.double, 'C')
self._lambda_dudu = np.zeros( (self.nu, self.nu, self.nc), np.double, 'C')
self._temp_nd = np.zeros( (self.nQd,), np.double, 'C')
self._temp_ndnc = np.zeros( (self.nQd, self.nc), np.double, 'C')
self._M_dq = np.zeros( (self.nQ, self.nQ, self.nQ), np.double, 'C')
self._M_dqdq = np.zeros( (self.nQ, self.nQ, self.nQ, self.nQ), np.double, 'C')
for func in self._structure_changed_funcs:
func()
def total_energy(self):
"""Calculate the total energy in the current state."""
return self._total_energy()
def L(self):
"""Calculate the Lagrangian at the current state."""
return self._L()
def L_dq(self, q1):
"""
Calculate the derivative of the Lagrangian with respect to the
value of q1.
"""
assert isinstance(q1, _trep._Config)
return self._L_dq(q1)
def L_dqdq(self, q1, q2):
"""
Calculate the second derivative of the Lagrangian with respect
to the value of q1 and the value of q2.
"""
assert isinstance(q1, _trep._Config)
assert isinstance(q2, _trep._Config)
return self._L_dqdq(q1, q2)
def L_dqdqdq(self, q1, q2, q3):
"""
Calculate the third derivative of the Lagrangian with respect
to the value of q1, the value of q2, and the value of q3.
"""
assert isinstance(q1, _trep._Config)
assert isinstance(q2, _trep._Config)
assert isinstance(q3, _trep._Config)
return self._L_dqdqdq(q1, q2, q3)
def L_ddq(self, dq1):
"""
Calculate the derivative of the Lagrangian with respect
to the velocity of dq1.
"""
assert isinstance(dq1, _trep._Config)
return self._L_ddq(dq1)
def L_ddqdq(self, dq1, q2):
"""
Calculate the second derivative of the Lagrangian with respect
to the velocity of dq1 and the value of q2.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(q2, _trep._Config)
return self._L_ddqdq(dq1, q2)
def L_ddqdqdq(self, dq1, q2, q3):
"""
Calculate the third derivative of the Lagrangian with respect
to the velocity of dq1, the value of q2, and the value of q3.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(q2, _trep._Config)
assert isinstance(q3, _trep._Config)
return self._L_ddqdqdq(dq1, q2, q3)
def L_ddqdqdqdq(self, dq1, q2, q3, q4):
"""
Calculate the fourth derivative of the Lagrangian with respect
to the velocity of dq1, the value of q2, the value of q3, and
the value of q4.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(q2, _trep._Config)
assert isinstance(q3, _trep._Config)
assert isinstance(q4, _trep._Config)
return self._L_ddqdqdqdq(dq1, q2, q3, q4)
def L_ddqddq(self, dq1, dq2):
"""
Calculate the second derivative of the Lagrangian with respect
to the velocity of dq1 and the velocity of dq2.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(dq2, _trep._Config)
return self._L_ddqddq(dq1, dq2)
def L_ddqddqdq(self, dq1, dq2, q3):
"""
Calculate the third derivative of the Lagrangian with respect
to the velocity of dq1, the velocity of dq2, and the value of
q3.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(dq2, _trep._Config)
assert isinstance( q3, _trep._Config)
return self._L_ddqddqdq(dq1, dq2, q3)
def L_ddqddqdqdq(self, dq1, dq2, q3, q4):
"""
Calculate the fourth derivative of the Lagrangian with respect
to the velocity of dq1, the velocity of dq2, the value of q3,
and the value of q4.
"""
assert isinstance(dq1, _trep._Config)
assert isinstance(dq2, _trep._Config)
assert isinstance( q3, _trep._Config)
assert isinstance( q4, _trep._Config)
return self._L_ddqddqdqdq(dq1, dq2, q3, q4)
@dynamics_indexing_decorator('d')
def f(self, q=None):
"""
Calculate the dynamics at the current state.
See documentation for details.
"""
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS)
return self._f[q].copy()
@dynamics_indexing_decorator('dq')
def f_dq(self, q=None, q1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._f_dq[q1, q].T.copy()
@dynamics_indexing_decorator('dq')
def f_ddq(self, q=None, dq1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._f_ddq[dq1, q].T.copy()
@dynamics_indexing_decorator('dk')
def f_dddk(self, q=None, k1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._f_dk[k1, q].T.copy()
@dynamics_indexing_decorator('du')
def f_du(self, q=None, u1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._f_du[u1, q].T.copy()
@dynamics_indexing_decorator('dqq')
def f_dqdq(self, q=None, q1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_dqdq[q1, q2, q].copy()
@dynamics_indexing_decorator('dqq')
def f_ddqdq(self, q=None, dq1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_ddqdq[dq1, q2, q].copy()
@dynamics_indexing_decorator('dqq')
def f_ddqddq(self, q=None, dq1=None, dq2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_ddqddq[dq1, dq2, q].copy()
@dynamics_indexing_decorator('dkq')
def f_dddkdq(self, q=None, k1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_dkdq[k1, q2, q].copy()
@dynamics_indexing_decorator('duq')
def f_dudq(self, q=None, u1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_dudq[u1, q2, q].copy()
@dynamics_indexing_decorator('duq')
def f_duddq(self, q=None, u1=None, dq2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_duddq[u1, dq2, q].copy()
@dynamics_indexing_decorator('duu')
def f_dudu(self, q=None, u1=None, u2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._f_dudu[u1, u2, q].copy()
@dynamics_indexing_decorator('c')
def lambda_(self, constraint=None):
"""
Calculate the constraint forces at the current state.
"""
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS)
return self._lambda[constraint].copy()
@dynamics_indexing_decorator('cq')
def lambda_dq(self, constraint=None, q1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._lambda_dq[q1, constraint].T.copy()
@dynamics_indexing_decorator('cq')
def lambda_ddq(self, constraint=None, dq1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._lambda_ddq[dq1, constraint].T.copy()
@dynamics_indexing_decorator('ck')
def lambda_dddk(self, constraint=None, k1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._lambda_dk[k1, constraint].T.copy()
@dynamics_indexing_decorator('cu')
def lambda_du(self, constraint=None, u1=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV1)
return self._lambda_du[u1, constraint].T.copy()
@dynamics_indexing_decorator('cqq')
def lambda_dqdq(self, constraint=None, q1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_dqdq[q1, q2, constraint].copy()
@dynamics_indexing_decorator('cqq')
def lambda_ddqdq(self, constraint=None, dq1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_ddqdq[dq1, q2, constraint].copy()
@dynamics_indexing_decorator('cqq')
def lambda_ddqddq(self, constraint=None, dq1=None, dq2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_ddqddq[dq1, dq2, constraint].copy()
@dynamics_indexing_decorator('ckq')
def lambda_dddkdq(self, constraint=None, k1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_dkdq[k1, q2, constraint].copy()
@dynamics_indexing_decorator('cuq')
def lambda_dudq(self, constraint=None, u1=None, q2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_dudq[u1, q2, constraint].copy()
@dynamics_indexing_decorator('cuq')
def lambda_duddq(self, constraint=None, u1=None, dq2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_duddq[u1, dq2, constraint].copy()
@dynamics_indexing_decorator('cuu')
def lambda_dudu(self, constraint=None, u1=None, u2=None):
self._update_cache(_trep.SYSTEM_CACHE_DYNAMICS_DERIV2)
return self._lambda_dudu[u1, u2, constraint].copy()
def test_derivative_dq(self, func, func_dq, delta=1e-6, tolerance=1e-7,
verbose=False, test_name='<unnamed>'):
"""
Test the derivative of a function with respect to a
configuration variable value against its numerical
approximation.
func -> Callable taking no arguments and returning float or np.array
func_dq -> Callable taking one configuration variable argument
and returning a float or np.array.
delta -> perturbation to the current configuration to
calculate the numeric approximation.
Returns stuff
"""
q0 = self.q
tests_total = 0
tests_failed = 0
for q in self.configs:
self.q = q0
dy_exact = func_dq(q)
delta_q = q0.copy()
delta_q[q.index] -= delta
self.q = delta_q
y0 = func()
delta_q = q0.copy()
delta_q[q.index] += delta
self.q = delta_q
y1 = func()
dy_approx = (y1 - y0)/(2*delta)
error = np.linalg.norm(dy_exact - dy_approx)
tests_total += 1
if math.isnan(error) or error > tolerance:
tests_failed += 1
if verbose:
print "Test '%s' failed for dq derivative of '%s'." % (test_name, q)
print " Error: %f > %f" % (error, tolerance)
print " Approx dy: %s" % dy_approx
print " Exact dy: %s" % dy_exact
if verbose:
if tests_failed == 0:
print "%d tests passing." % tests_total
else:
print "%d/%d tests FAILED. <#######" % (tests_failed, tests_total)
# Reset configuration
self.q = q0
return not tests_failed
def test_derivative_ddq(self, func, func_ddq, delta=1e-6, tolerance=1e-7,
verbose=False, test_name='<unnamed>'):
"""
Test the derivative of a function with respect to a
configuration variable's time derivative and its numerical
approximation.
func -> Callable taking no arguments and returning float or np.array
func_ddq -> Callable taking one configuration variable argument
and returning a float or np.array.
delta -> perturbation to the current configuration to
calculate the numeric approximation.
tolerance -> acceptable difference between the approximation
and exact value. (|exact - approx| <= tolerance)
verbose -> Boolean indicating if a message should be printed for failures.
name -> String identifier to print out when reporting messages
when verbose is true.
Returns False if any tests fail and True otherwise.
"""
dq0 = self.dq
tests_total = 0
tests_failed = 0
for q in self.configs:
self.dq = dq0
dy_exact = func_ddq(q)
delta_dq = dq0.copy()
delta_dq[q.index] -= delta
self.dq = delta_dq
y0 = func()
delta_dq = dq0.copy()
delta_dq[q.index] += delta
self.dq = delta_dq
y1 = func()
dy_approx = (y1 - y0)/(2*delta)
error = np.linalg.norm(dy_exact - dy_approx)
tests_total += 1
if math.isnan(error) or error > tolerance:
tests_failed += 1
if verbose:
print "Test '%s' failed for dq derivative of '%s'." % (test_name, q)
print " Error: %f > %f" % (error, tolerance)
print " Approx dy: %f" % dy_approx
print " Exact dy: %f" % dy_exact
if verbose:
if tests_failed == 0:
print "%d tests passing." % tests_total
else:
print "%d/%d tests FAILED. <#######" % (tests_failed, tests_total)
# Reset velocity
self.dq = dq0
return not tests_failed
# Supressing a scipy.io.savemat warning.
import warnings
warnings.simplefilter("ignore", FutureWarning)
def save_trajectory(filename, system, t, Q=None, p=None, v=None, u=None, rho=None):
# Save trajectory to a matlab file. t is a 1D numpy array.
# q,p,u,and rho are expected to be numpy arrays of the appropriate
# dimensions or None
t = np.array(t)
data = { 'time' : np.array(t) }
if Q is not None: data['Q'] = np.array(Q)
if p is not None: data['p'] = np.array(p)
if v is not None: data['v'] = np.array(v)
if u is not None: data['u'] = np.array(u)
if rho is not None: data['rho'] = np.array(rho)
# Build indices - Convert to cells so they are well behaved in matlab
data['Q_index'] = np.array([q.name for q in system.configs], dtype=np.object)
data['p_index'] = np.array([q.name for q in system.dyn_configs], dtype=np.object)
data['v_index'] = np.array([q.name for q in system.kin_configs], dtype=np.object)
data['u_index'] = np.array([u.name for u in system.inputs], dtype=np.object)
data['rho_index'] = np.array([q.name for q in system.kin_configs], dtype=np.object)
sp.io.savemat(filename, data)
def load_trajectory(filename, system=None):
data = sp.io.loadmat(filename)
# Load time as a 1D array
t = data['time'].squeeze()
Q_in = data.get('Q', None)
p_in = data.get('p', None)
v_in = data.get('v', None)
u_in = data.get('u', None)
rho_in = data.get('rho', None)
Q_index = [str(s[0]).strip() for s in data['Q_index'].ravel()]
p_index = [str(s[0]).strip() for s in data['p_index'].ravel()]
v_index = [str(s[0]).strip() for s in data['v_index'].ravel()]
u_index = [str(s[0]).strip() for s in data['u_index'].ravel()]
rho_index = [str(s[0]).strip() for s in data['rho_index'].ravel()]
# If no system was given, just return the data as it was along
# with the indices.
if system is None:
return (t,
(Q_index, Q_in),
(p_index, p_in),
(v_index, v_in),
(u_index, u_in),
(rho_index, rho_in))
else:
# If a system was specified, reorganize the data to match the
# system's layout.
if Q_in is not None:
Q = np.zeros((len(t), system.nQ))
for config in system.configs:
if config.name in Q_index:
Q[:,config.index] = Q_in[:, Q_index.index(config.name)]
else:
Q = None
if p_in is not None:
p = np.zeros((len(t), system.nQd))
for config in system.dyn_configs:
if config.name in p_index:
p[:,config.index] = p_in[:, p_index.index(config.name)]
else:
p = None
if v_in is not None:
v = np.zeros((len(t), system.nQk))
for config in system.kin_configs:
if config.name in v_index:
v[:,config.k_index] = v_in[:, v_index.index(config.name)]
else:
v = None
if u_in is not None:
u = np.zeros((len(t)-1, system.nu))
for finput in system.inputs:
if finput.name in u_index:
u[:,finput.index] = u_in[:, u_index.index(finput.name)]
else:
u = None
if rho_in is not None:
rho = np.zeros((len(t)-1, system.nQk))
for config in system.kin_configs:
if config.name in rho_index:
rho[:,config.k_index] = rho_in[:, rho_index.index(config.name)]
else:
rho = None
return t,Q,p,v,u,rho<|fim▁end|> | identifier can be the input name, index, or the
input itself. Raise an exception if no match is found. |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/**
* @author Oleksandr Prunyak ([email protected])
* @version $Id$
* @since 0.1<|fim▁hole|><|fim▁end|> | */
package ru.job4j.loop; |
<|file_name|>changes_listeners.js<|end_file_name|><|fim▁begin|>// This code (and its parent process in changes.js) is a Node.JS listener
// listening to CouchDB's _changes feed, and is derived from
// https://github.com/mikeal/node.couch.js and
// http://dominicbarnes.us/node-couchdb-api/api/database/changes.html
// It monitors when requests are submitted to:
// (when configuring a directory's settings) get a url in general
// (when a directory is already configured) download all cong data for a directory
// TODO: Could we use backbone-couch.js here instead of cradle, in order to use our
// Backbone model here?
var buffer = '',
http = require('http'),
https = require('https'),
ncl_dir = '/_attachments/node_changes_listeners/',
config = require('./config'),
db = config.db,
log = require('./lib').log;
//$ = require('jquery');
//var model = require('model.js').model
//stdin = process.openStdin();
// if (config.debug)
// var longjohn = require('./node_modules/longjohn')
//stdin.setEncoding('utf8');
console.log('Starting changes listener...')
// -------- Declare utility functions --------
function get_url(doc, from_url, to_html, status_flag, options){
var http_lib = http
if (doc[from_url].indexOf('https') === 0){
// Switch to using https if necessary
var http_lib = https
}
http_lib.get(doc[from_url], function(res){
var pageData = ''
res.on('data', function(chunk){
pageData += chunk
})
res.on('end', function(){
// Check to see if we got a 404 response
if (res.statusCode == '404'){
console.log('Got a 404!')
// TODO: If we got a 404, then notify the user this page doesn't exist
doc[status_flag] = '404'
db.save(doc._id, doc._rev, doc)
}else{
// Write the contents of the html variable back to the database
doc[to_html] = pageData
doc[status_flag] = 'gotten'
// console.log(new Date().getTime() + '\t n: ' + status_flag + ': ' + doc[status_flag] + ' ' + doc[from_url])
// TODO: Use Backbone here instead of cradle
db.save(doc._id, doc._rev, doc, function(err, res){
// TODO: Do anything more that needs to be done here
if (to_html == 'url_html'){
console.log('Getting url_html...handling response end')
console.log(doc)
}
if (options && options.success){
options.success()
}
});
}
})
});
}
function save(options){
db.get(options.doc._id, function(err, doc){
options.doc = doc
if (!err && options.doc && options.doc._id && typeof options.doc._id !== 'undefined'){
// Save to the db all the HTML we've gotten
// TODO: This is running several times in series
options.doc[options.to_html] = options.output_array
options.doc[options.status_flag] = 'gotten';
// Deletes number downloaded since it's not needed anymore
delete options.doc[options.number_downloaded]
db.save(options.doc._id, options.doc._rev, options.doc, function(err, response){
if (err !== null){
console.error(err)
// Recurse to try saving again
// Only recurse a certain number of times, then fail, to avoid a memory leak
if (options.save_attempts <= 5){
options.save_attempts++;
// console.log('options.save_attempts: ' + options.save_attempts)
save(options)
}else{
// TODO: This is where we get an error. For some reason sometimes,
// but not always, we have the wrong revision here, and this causes get_state_url_html
// to never == 'gotten', (so the state details page doesn't display?)
// console.error('Failed to save doc: ' + options.doc._id, options.doc._rev)
}
}else{
// console.log('Succeeded at saving all the states\' HTML pages')
options.output_array_saved = true
// Remove this options.status_flag from the list of tasks
currently_getting.splice(currently_getting.indexOf(options.status_flag),1)
// Clean up some memory<|fim▁hole|> }
})
}
function recurse_then_save(i, options){
// If we've downloaded all the HTML, and haven't saved to the db yet
if (options.output_array.length == options.doc[options.from_urls].length && options.output_array_saved !== true){
options.save_attempts = 0
if (options.output_array_saved !== true){
save(options)
// console.log ("after saving all the states")
}
}
// Call the parent function recursively to enable throttling the rate of web-scraping requests
// Handle next URL
recurse_urls(i+1, options)
}
function recurse_urls(i, options){
if (typeof options.doc[options.from_urls] == 'undefined'){
// console.log(options.doc[options.from_urls])
}
// Stop running if we have reached the end of the list of URLs,
if (options.doc[options.from_urls][i] !== '' && typeof options.doc[options.from_urls][i] !== 'undefined' &&
// and don't run if we've already downloaded the HTML for this URL
typeof options.doc[i] == 'undefined'){
// TODO: Make this handle options.doc[options.method] == 'post'
http.get(options.doc[options.from_urls][i], function(res){
var pageData = ''
res.on('data', function(chunk){
pageData += chunk
})
res.on('end', function(){
// TODO: Check to see if we got a 404 response
// Append result to options.output_array
options.output_array[i] = pageData
if (options.doc[options.status_flag] !== 'getting'){
options.doc[options.status_flag] = 'getting'
// Set flag to indicate that we just reset the status_flag
options.flag_set = true
// report to the db the fact we are getting the HTML
// console.log ("before saving all the states")
db.save(options.doc._id, options.doc._rev, options.doc, function(err, response){
recurse_then_save(i, options)
})
}
// Record the number downloaded
// Don't run until the status_flag has been set
if (typeof options.flag_set !== 'undefined' && options.flag_set === true){
recurse_then_save(i, options)
}
})
})
}else{
currently_getting.splice(currently_getting.indexOf(options.status_flag),1)
}
}
currently_getting = []
function get_url_set(options){
// Don't run more than one copy of this task at a time
if (currently_getting.indexOf(options.status_flag) == -1){
// Add this options.status_flag to the list of tasks
currently_getting.push(options.status_flag)
var i = 0
options.output_array = []
options.output_array_saved = false
// Use a recursive function to allow throttling the rate of web-scraping requests
// per second to avoid getting banned by some servers.
recurse_urls(i, options)
}
}
// -------- Main routine that handles all db changes --------
// Only get changes after "update_seq"
db.get('', function(err,doc){
// TODO: This throws: TypeError: Cannot read property 'update_seq' of undefined
db.changes({since:doc.update_seq}).on('change', function (change) {
db.get(change.id, change.changes[0].rev, function(err, doc){
if (change.id && change.id.slice(0, '_design/'.length) !== '_design/') {
// This is a change to a data document
// Feed the new doc into the changes listeners
if (doc) { // Don't handle docs that have been deleted
// Watch for requests to get the contents of a URL for a church directory
// TODO: Check to see if the URL is valid
if (doc.collection == 'directory' && doc.get_url_html=='requested' && doc.url){
// E.g., when a user enters "opc.org/locator.html" into the church directory configuration page,
// then go get the contents of that URL.
get_url(doc, 'url', 'url_html', 'get_url_html')
}
if (doc.collection == 'directory' && doc.get_cong_url_html=='requested' && doc.cong_url){
get_url(doc, 'cong_url_raw', 'cong_url_html', 'get_cong_url_html', {success:function(){
// Iterate state pages' HTML
for (var i=0; i<doc.state_url_html.length; i++){
// TODO: Get each cong's URL
var state_html = doc.state_url_html[i]
// TODO: Get each cong page's HTML & write to database
}
}})
}
// Watch for requests to get the contents of a state page URL
if (doc.collection == 'directory' && doc.get_state_url_html=='requested' && doc.state_url){
// Interpolate state names into URLs
var state_page_urls = []
// console.log('before interpolating state names into URLs')
for (var i=0; i<doc.state_page_values.length; i++){
if (doc.state_page_values[i] !== ''){
state_page_urls.push(doc.state_url.replace('{state_name}', doc.state_page_values[i]))
}
}
// console.log('about to get_url_set')
doc.state_page_urls = state_page_urls
get_url_set({
doc: doc,
from_urls: 'state_page_urls',
method: 'state_url_method',
to_html: 'state_url_html',
status_flag: 'get_state_url_html',
number_downloaded: 'state_urls_gotten',
success:function(){
// TODO: Cleanup unnecessary doc attributes here? Probably that should be done in
// ImportDirectoryView.js instead.
}})
}
// Watch for requests to get the contents of a batchgeo map URL
if (doc.collection == 'directory' && doc.get_batchgeo_map_html=='requested' && doc.batchgeo_map_url){
get_url(doc, 'batchgeo_map_url', 'batchgeo_map_html', 'get_batchgeo_map_html')
}
// Watch for requests to get the contents of a JSON feed
if (doc.collection == 'directory' && doc.get_json=='requested' && doc.json_url){
get_url(doc, 'json_url', 'json', 'get_json')
}
}
}
});
});
})<|fim▁end|> | options.output_array = []
}
}) |
<|file_name|>partner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Author: OpenDrive Ltda
# Copyright (c) 2013 Opendrive Ltda
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|>##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
class Partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'legal_representative': fields.char(
'Legal Representative',
),
}<|fim▁end|> | # You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# |
<|file_name|>DOMActions.ts<|end_file_name|><|fim▁begin|>
module tsp.DOMActions {
try {<|fim▁hole|> global.refs.moduleTarget = tsp;
} finally { }
const fsa = FileSystemActions;
const ca = CommonActions;
const pa = ParserActions;
//#region DOM Actions
export interface IUglify {
uglify(pathOfReferencingFile: string, relativeURL: string): string;
}
interface IDOMState {
htmlFile: FileSystemActions.IHTMLFile;
}
export interface IHTMLFileBuildAction extends FileSystemActions.ISelectAndProcessFileAction {
domTransformActions: IDOMTransformAction[];
}
//#endregion
//#region Element Build Actions
interface IDOMElementBuildActionState extends IDOMState {
element: JQuery;
DOMTransform?: IDOMTransformAction;
}
export interface IDOMElementBuildAction extends FileSystemActions.IWebAction {
state?: IDOMElementBuildActionState;
//isDOMElementAction?: (action: IBuildAction) => boolean;
}
export function remove(action: IDOMElementBuildAction, context: FileSystemActions.IWebContext, callback: CommonActions.ICallback) {
action.state.element.remove();
ca.endAction(action, callback);
}
export function addToJSClob(action: IDOMElementBuildAction, context: FileSystemActions.IWebContext, callback: CommonActions.ICallback) {
const state = action.state;
const src = action.state.element.attr('src');
const referringDir = context.fileManager.resolve(state.htmlFile.filePath, '..', src);
if (!context.JSOutputs) context.JSOutputs = {};
const jsOutputs = context.JSOutputs;
if (!jsOutputs[referringDir]) jsOutputs[state.htmlFile.filePath] = [];
const minifiedVersionFilePath = pa.replaceEndWith(referringDir, '.js', '.min.js');
if (!context.fileManager.doesFilePathExist(minifiedVersionFilePath)) {
console.log('minified filepath ' + minifiedVersionFilePath + ' does not exist.');
ca.endAction(action, callback);
return;
}
const minifiedContent = context.fileManager.readTextFileSync(minifiedVersionFilePath);
jsOutputs[state.htmlFile.filePath].push(minifiedContent);
action.state.element.remove();
ca.endAction(action, callback);
}
//#endregion
//#region DOM Element Css Selector
export interface IDOMElementCSSSelectorState extends IDOMState {
relativeTo?: JQuery;
elements?: JQuery;
treeNode?: IDOMTransformAction;
}
export interface IDOMElementSelector extends FileSystemActions.IWebAction {
}
export interface IDOMElementCSSSelector extends IDOMElementSelector {
cssSelector: string;
state?: IDOMElementCSSSelectorState;
}
export function selectElements(action: IDOMElementCSSSelector, context: FileSystemActions.IWebContext, callback: CommonActions.ICallback) {
if (action.debug) debugger;
const aS = action.state;
if (aS.relativeTo) {
aS.elements = aS.relativeTo.find(action.cssSelector);
} else {
//aS.elements = aS.$(action.cssSelector);
aS.elements = aS.htmlFile.$(action.cssSelector);
}
ca.endAction(action, callback);
}
//#endregion
//#region DOM Transform
export interface IDOMTransformActionState extends IDOMState {
parent?: IDOMTransformAction;
}
export interface IDOMTransformAction extends FileSystemActions.IWebAction {
selector: IDOMElementCSSSelector;
elementAction?: IDOMElementBuildAction;
state?: IDOMTransformActionState;
}
export function DOMTransform(action: IDOMTransformAction, context: FileSystemActions.IWebContext, callback: CommonActions.ICallback) {
let elements: JQuery;
let p: IDOMTransformAction;
if (action.state) {
p = action.state.parent;
}
const aSel = action.selector;
if (!aSel.state) {
aSel.state = {
htmlFile: action.state.htmlFile,
};
}
const aSelSt = aSel.state;
aSelSt.treeNode = action;
if (p && p.elementAction) {
aSelSt.relativeTo = p.elementAction.state.element;
}
aSel.do(aSel, context);
const eA = action.elementAction;
if (eA) {
//#region element Action
eA.state = {
element: null,
DOMTransform: action,
htmlFile: aSelSt.htmlFile,
};
if (eA.async) {
let i = 0;
const n = aSelSt.elements.length;
const eACallback = (err) => {
if (i < n) {
const $elem = aSelSt.htmlFile.$(aSelSt.elements[i]);
i++;
eA.state.element = $elem;
eA.do(eA, context, eACallback);
} else {
ca.endAction(action, callback);
}
};
eACallback(null);
} else {
const n = aSelSt.elements.length
for (let i = 0; i < n; i++) {
const $elem = aSelSt.htmlFile.$(aSelSt.elements[i]);
eA.state.element = $elem;
eA.do(eA, context);
}
ca.endAction(action, callback);
}
//#endregion
} else {
ca.endAction(action, callback);
}
}
type ISubMergeHTMLFileIntoDomTransform = CommonActions.ISubMergeAction<IDOMTransformAction, FileSystemActions.IHTMLFile, IDOMTransformActionState>;
//export interface IPutHTMLFileIntoDomTransformAction extends CommonActions.IAction {
// htmlFiles: FileSystemActions.IHTMLFile[];
// domTransforms: IDOMTransformAction[];
//}
export interface IDOMTransformForEachHTMLFileAction<TContainer, TListItem> {
//htmlFilesGenerator?: (container: TContainer) => FileSystemActions.IHTMLFile[];
//domTransformsGenerator?: (container: TContainer) => IDOMTransformAction[];
//putHTMLFileIntoDomTransformGenerator?: (container: TContainer) => IPutHTMLFileIntoDomTransformAction;
htmlFiles?: FileSystemActions.IHTMLFile[];
domTransforms?: IDOMTransformAction[];
}
export function ApplyDOMTransformsOnHTMLFiles<TContainer, TListItem>(action: IDOMTransformForEachHTMLFileAction<TContainer, TListItem>, context: FileSystemActions.IWebContext, callback: CommonActions.ICallback) {
const htmlFiles = action.htmlFiles;
const domTransforms = action.domTransforms;
for (let i = 0, n = htmlFiles.length; i < n; i++) {
const htmlFile = htmlFiles[i];
for (let j = 0, m = domTransforms.length; j < m; j++) {
const domTransform = domTransforms[j];
domTransform.state = {
htmlFile: htmlFile,
};
domTransform.do(domTransform, context, null);
}
}
}
//#endregion
}
try {
global.refs.ref = ['DOMActions', tsp.DOMActions];
} finally { }<|fim▁end|> | require('./Refs'); |
<|file_name|>semisemver.go<|end_file_name|><|fim▁begin|>package utility
import (
"fmt"
"regexp"
"strconv"
"strings"
)
var semisemverLetter = regexp.MustCompile(`^([\d]+)\.([\d]+)\.([\d]+)([a-z]+)$`)<|fim▁hole|>func RewriteSemiSemVer(version string) string {
match := semisemverLetter.FindStringSubmatch(version)
if match != nil {
idx := strings.IndexByte("abcdefghijklmnopqrstuvwxyz", match[4][0])
atoi, _ := strconv.Atoi(match[3])
return fmt.Sprintf("%s.%s.%d", match[1], match[2], atoi*10000+idx)
}
return version
}<|fim▁end|> |
// for limited purpose of internal filtering/sorting non-standard semvers |
<|file_name|>test_track_view.py<|end_file_name|><|fim▁begin|>from rest_framework import status
from rest_framework.test import APITestCase, APIClient
from django.core.urlresolvers import reverse
<|fim▁hole|>
class TestTrackView(APITestCase):
fixtures = ['directory', 'file', 'playlist', 'track', 'user']
def setUp(self):
self.user = User.objects.get(pk=1)
self.client = APIClient(enforce_csrf_checks=True)
self.client.force_authenticate(user=self.user)
self.serializer = TrackSerializer()
def test_unauthenticated_track_query(self):
url = reverse('api:track-list')
client = APIClient()
response = client.get(url)
self.assertEqual(response.data, UNAUTHENTICATED_RESPONSE)
def test_track_query(self):
url = reverse('api:track-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
tracks = Track.objects.all()
tracks_json = [self.serializer.to_representation(track) for track in tracks]
self.assertEqual(response.data, tracks_json)
def test_track_detailed(self):
pk = 1
url = reverse('api:track-detail', args=[pk])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
track = Track.objects.get(pk=pk)
track_json = self.serializer.to_representation(track)
self.assertEqual(response.data, track_json)<|fim▁end|> | from cherrymusic.apps.core.models import User, Track
from cherrymusic.apps.api.v1.serializers import TrackSerializer
from cherrymusic.apps.api.v1.tests.views import UNAUTHENTICATED_RESPONSE |
<|file_name|>class_peta_poco_1_1_factory.js<|end_file_name|><|fim▁begin|>var class_peta_poco_1_1_factory =
[<|fim▁hole|><|fim▁end|> | [ "ProviderName", "class_peta_poco_1_1_factory.html#ab3fd0d733879b1a810f8d5e3731c721b", null ]
]; |
<|file_name|>rpa_ul_128.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
RPA Unlikelihood (128 model)
"""
from parlai.zoo.light_whoami.whoami_download import download_with_model_type
<|fim▁hole|><|fim▁end|> |
def download(datapath):
download_with_model_type(datapath, 'rpa_ul_128', 'v1.0') |
<|file_name|>rust_project.rs<|end_file_name|><|fim▁begin|>//! Library for generating rust_project.json files from a `Vec<CrateSpec>`
//! See official documentation of file format at https://rust-analyzer.github.io/manual.html
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::io::ErrorKind;
use std::path::Path;
use anyhow::anyhow;
use serde::Serialize;
<|fim▁hole|>use crate::aquery::CrateSpec;
/// A `rust-project.json` workspace representation. See
/// [rust-analyzer documentation][rd] for a thorough description of this interface.
/// [rd]: https://rust-analyzer.github.io/manual.html#non-cargo-based-projects
#[derive(Debug, Serialize)]
pub struct RustProject {
/// Path to the directory with *source code* of
/// sysroot crates.
sysroot_src: Option<String>,
/// The set of crates comprising the current
/// project. Must include all transitive
/// dependencies as well as sysroot crate (libstd,
/// libcore and such).
crates: Vec<Crate>,
}
/// A `rust-project.json` crate representation. See
/// [rust-analyzer documentation][rd] for a thorough description of this interface.
/// [rd]: https://rust-analyzer.github.io/manual.html#non-cargo-based-projects
#[derive(Debug, Serialize)]
pub struct Crate {
/// A name used in the package's project declaration
#[serde(skip_serializing_if = "Option::is_none")]
display_name: Option<String>,
/// Path to the root module of the crate.
root_module: String,
/// Edition of the crate.
edition: String,
/// Dependencies
deps: Vec<Dependency>,
/// Should this crate be treated as a member of current "workspace".
#[serde(skip_serializing_if = "Option::is_none")]
is_workspace_member: Option<bool>,
/// Optionally specify the (super)set of `.rs` files comprising this crate.
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<Source>,
/// The set of cfgs activated for a given crate, like
/// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
cfg: Vec<String>,
/// Target triple for this Crate.
#[serde(skip_serializing_if = "Option::is_none")]
target: Option<String>,
/// Environment variables, used for the `env!` macro
#[serde(skip_serializing_if = "Option::is_none")]
env: Option<BTreeMap<String, String>>,
/// Whether the crate is a proc-macro crate.
is_proc_macro: bool,
/// For proc-macro crates, path to compiled proc-macro (.so file).
#[serde(skip_serializing_if = "Option::is_none")]
proc_macro_dylib_path: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct Source {
include_dirs: Vec<String>,
exclude_dirs: Vec<String>,
}
#[derive(Debug, Serialize)]
pub struct Dependency {
/// Index of a crate in the `crates` array.
#[serde(rename = "crate")]
crate_index: usize,
/// The display name of the crate.
name: String,
}
pub fn generate_rust_project(
sysroot_src: &str,
crates: &BTreeSet<CrateSpec>,
) -> anyhow::Result<RustProject> {
let mut project = RustProject {
sysroot_src: Some(sysroot_src.into()),
crates: Vec::new(),
};
let mut unmerged_crates: Vec<&CrateSpec> = crates.iter().collect();
let mut skipped_crates: Vec<&CrateSpec> = Vec::new();
let mut merged_crates_index: HashMap<String, usize> = HashMap::new();
while !unmerged_crates.is_empty() {
for c in unmerged_crates.iter() {
if c.deps
.iter()
.any(|dep| !merged_crates_index.contains_key(dep))
{
log::trace!(
"Skipped crate {} because missing deps: {:?}",
&c.crate_id,
c.deps
.iter()
.filter(|dep| !merged_crates_index.contains_key(*dep))
.cloned()
.collect::<Vec<_>>()
);
skipped_crates.push(c);
} else {
log::trace!("Merging crate {}", &c.crate_id);
merged_crates_index.insert(c.crate_id.clone(), project.crates.len());
project.crates.push(Crate {
display_name: Some(c.display_name.clone()),
root_module: c.root_module.clone(),
edition: c.edition.clone(),
deps: c
.deps
.iter()
.map(|dep| {
let crate_index = *merged_crates_index
.get(dep)
.expect("failed to find dependency on second lookup");
let dep_crate = &project.crates[crate_index as usize];
Dependency {
crate_index,
name: dep_crate
.display_name
.as_ref()
.expect("all crates should have display_name")
.clone(),
}
})
.collect(),
is_workspace_member: Some(c.is_workspace_member),
source: c.source.as_ref().map(|s| Source {
exclude_dirs: s.exclude_dirs.clone(),
include_dirs: s.include_dirs.clone(),
}),
cfg: c.cfg.clone(),
target: Some(c.target.clone()),
env: Some(c.env.clone()),
is_proc_macro: c.proc_macro_dylib_path.is_some(),
proc_macro_dylib_path: c.proc_macro_dylib_path.clone(),
});
}
}
// This should not happen, but if it does exit to prevent infinite loop.
if unmerged_crates.len() == skipped_crates.len() {
log::debug!(
"Did not make progress on {} unmerged crates. Crates: {:?}",
skipped_crates.len(),
skipped_crates
);
return Err(anyhow!(
"Failed to make progress on building crate dependency graph"
));
}
std::mem::swap(&mut unmerged_crates, &mut skipped_crates);
skipped_crates.clear();
}
Ok(project)
}
pub fn write_rust_project(
rust_project_path: &Path,
execution_root: &Path,
rust_project: &RustProject,
) -> anyhow::Result<()> {
let execution_root = execution_root
.to_str()
.ok_or_else(|| anyhow!("execution_root is not valid UTF-8"))?;
// Try to remove the existing rust-project.json. It's OK if the file doesn't exist.
match std::fs::remove_file(rust_project_path) {
Ok(_) => {}
Err(err) if err.kind() == ErrorKind::NotFound => {}
Err(err) => {
return Err(anyhow!(
"Unexpected error removing old rust-project.json: {}",
err
))
}
}
// Render the `rust-project.json` file and replace the exec root
// placeholders with the path to the local exec root.
let rust_project_content =
serde_json::to_string(rust_project)?.replace("__EXEC_ROOT__", execution_root);
// Write the new rust-project.json file.
std::fs::write(rust_project_path, rust_project_content)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::BTreeSet;
use crate::aquery::CrateSpec;
/// A simple example with a single crate and no dependencies.
#[test]
fn generate_rust_project_single() {
let project = generate_rust_project(
"sysroot",
&BTreeSet::from([CrateSpec {
crate_id: "ID-example".into(),
display_name: "example".into(),
edition: "2018".into(),
root_module: "example/lib.rs".into(),
is_workspace_member: true,
deps: BTreeSet::new(),
proc_macro_dylib_path: None,
source: None,
cfg: vec!["test".into(), "debug_assertions".into()],
env: BTreeMap::new(),
target: "x86_64-unknown-linux-gnu".into(),
crate_type: "rlib".into(),
}]),
)
.expect("expect success");
assert_eq!(project.crates.len(), 1);
let c = &project.crates[0];
assert_eq!(c.display_name, Some("example".into()));
assert_eq!(c.root_module, "example/lib.rs");
assert_eq!(c.deps.len(), 0);
}
/// An example with a one crate having two dependencies.
#[test]
fn generate_rust_project_with_deps() {
let project = generate_rust_project(
"sysroot",
&BTreeSet::from([
CrateSpec {
crate_id: "ID-example".into(),
display_name: "example".into(),
edition: "2018".into(),
root_module: "example/lib.rs".into(),
is_workspace_member: true,
deps: BTreeSet::from(["ID-dep_a".into(), "ID-dep_b".into()]),
proc_macro_dylib_path: None,
source: None,
cfg: vec!["test".into(), "debug_assertions".into()],
env: BTreeMap::new(),
target: "x86_64-unknown-linux-gnu".into(),
crate_type: "rlib".into(),
},
CrateSpec {
crate_id: "ID-dep_a".into(),
display_name: "dep_a".into(),
edition: "2018".into(),
root_module: "dep_a/lib.rs".into(),
is_workspace_member: false,
deps: BTreeSet::new(),
proc_macro_dylib_path: None,
source: None,
cfg: vec!["test".into(), "debug_assertions".into()],
env: BTreeMap::new(),
target: "x86_64-unknown-linux-gnu".into(),
crate_type: "rlib".into(),
},
CrateSpec {
crate_id: "ID-dep_b".into(),
display_name: "dep_b".into(),
edition: "2018".into(),
root_module: "dep_b/lib.rs".into(),
is_workspace_member: false,
deps: BTreeSet::new(),
proc_macro_dylib_path: None,
source: None,
cfg: vec!["test".into(), "debug_assertions".into()],
env: BTreeMap::new(),
target: "x86_64-unknown-linux-gnu".into(),
crate_type: "rlib".into(),
},
]),
)
.expect("expect success");
assert_eq!(project.crates.len(), 3);
// Both dep_a and dep_b should be one of the first two crates.
assert!(
Some("dep_a".into()) == project.crates[0].display_name
|| Some("dep_a".into()) == project.crates[1].display_name
);
assert!(
Some("dep_b".into()) == project.crates[0].display_name
|| Some("dep_b".into()) == project.crates[1].display_name
);
let c = &project.crates[2];
assert_eq!(c.display_name, Some("example".into()));
}
}<|fim▁end|> | |
<|file_name|>asteroid-timer.en_GB.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr">
<context>
<name></name>
<message id="id-app-launcher-name">
<location filename="asteroid-timer.desktop.h" line="6"/><|fim▁hole|></context>
</TS><|fim▁end|> | <source>Timer</source>
<translation>Timer</translation>
</message> |
<|file_name|>0035_auto__del_field_trial_max_participants.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Trial.max_participants'
db.delete_column(u'trials_trial', 'max_participants')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Trial.max_participants'
raise RuntimeError("Cannot reverse this migration. 'Trial.max_participants' and its values cannot be restored.")
models = {
u'trials.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.invitation': {
'Meta': {'object_name': 'Invitation'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.participant': {
'Meta': {'object_name': 'Participant'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']", 'null': 'True', 'blank': 'True'})
},
u'trials.report': {
'Meta': {'object_name': 'Report'},
'binary': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Participant']", 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'variable': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Variable']"})
},
u'trials.trial': {
'Meta': {'object_name': 'Trial'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finish_date': ('django.db.models.fields.DateField', [], {}),
'group_a': ('django.db.models.fields.TextField', [], {}),
'group_a_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),<|fim▁hole|> u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'instruction_delivery': ('django.db.models.fields.TextField', [], {'default': "'im'", 'max_length': '2'}),
'instruction_hours_after': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_edited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'min_participants': ('django.db.models.fields.IntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'participants': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recruiting': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recruitment': ('django.db.models.fields.CharField', [], {'default': "'an'", 'max_length': '2'}),
'reporting_freq': ('django.db.models.fields.CharField', [], {'default': "'da'", 'max_length': '200'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'stopped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'trials.variable': {
'Meta': {'object_name': 'Variable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'userprofiles.rmuser': {
'Meta': {'object_name': 'RMUser'},
'account': ('django.db.models.fields.CharField', [], {'default': "'st'", 'max_length': '2'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'receive_questions': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'})
}
}
complete_apps = ['trials']<|fim▁end|> | 'group_a_expected': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_b': ('django.db.models.fields.TextField', [], {}),
'group_b_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'group_b_impressed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), |
<|file_name|>path.rs<|end_file_name|><|fim▁begin|>use std::cmp;
use std::fmt::{self, Debug, Formatter};
use std::fs;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use filetime::FileTime;
use git2;
use glob::Pattern;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use ops;
use util::{self, CargoResult, internal, internal_error, human, ChainError};
use util::Config;
pub struct PathSource<'cfg> {
id: SourceId,
path: PathBuf,
updated: bool,
packages: Vec<Package>,
config: &'cfg Config,
}
// TODO: Figure out if packages should be discovered in new or self should be
// mut and packages are discovered in update
impl<'cfg> PathSource<'cfg> {
pub fn for_path(path: &Path, config: &'cfg Config)
-> CargoResult<PathSource<'cfg>> {
trace!("PathSource::for_path; path={}", path.display());
Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config))
}
/// Invoked with an absolute path to a directory that contains a Cargo.toml.
/// The source will read the manifest and find any other packages contained
/// in the directory structure reachable by the root manifest.
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
-> PathSource<'cfg> {
trace!("new; id={}", id);
PathSource {
id: id.clone(),
path: path.to_path_buf(),
updated: false,
packages: Vec::new(),
config: config,
}
}
pub fn root_package(&self) -> CargoResult<Package> {
trace!("root_package; source={:?}", self);
if !self.updated {
return Err(internal("source has not been updated"))
}
match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()),
None => Err(internal("no package found in source"))
}
}
fn read_packages(&self) -> CargoResult<Vec<Package>> {
if self.updated {
Ok(self.packages.clone())
} else if self.id.is_path() && self.id.precise().is_some() {
// If our source id is a path and it's listed with a precise
// version, then it means that we're not allowed to have nested
// dependencies (they've been rewritten to crates.io dependencies)
// In this case we specifically read just one package, not a list of
// packages.
let path = self.path.join("Cargo.toml");
let (pkg, _) = try!(ops::read_package(&path, &self.id,
self.config));
Ok(vec![pkg])
} else {
ops::read_packages(&self.path, &self.id, self.config)
}
}
/// List all files relevant to building this package inside this source.
///
/// This function will use the appropriate methods to determine what is the
/// set of files underneath this source's directory which are relevant for
/// building `pkg`.
///
/// The basic assumption of this method is that all files in the directory
/// are relevant for building this package, but it also contains logic to
/// use other methods like .gitignore to filter the list of files.
pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {
let root = pkg.root();
let parse = |p: &String| {
Pattern::new(p).map_err(|e| {
human(format!("could not parse pattern `{}`: {}", p, e))
})
};
let exclude = try!(pkg.manifest().exclude().iter()
.map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
let include = try!(pkg.manifest().include().iter()
.map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
let mut filter = |p: &Path| {
let relative_path = util::without_prefix(p, &root).unwrap();
include.iter().any(|p| p.matches_path(&relative_path)) || {
include.len() == 0 &&
!exclude.iter().any(|p| p.matches_path(&relative_path))
}
};
// If this package is a git repository, then we really do want to query
// the git repository as it takes into account items such as .gitignore.
// We're not quite sure where the git repository is, however, so we do a
// bit of a probe.
//
// We check all packages in this source that are ancestors of the
// specified package (including the same package) to see if they're at
// the root of the git repository. This isn't always true, but it'll get
// us there most of the time!.
let repo = self.packages.iter()
.map(|pkg| pkg.root())
.filter(|path| root.starts_with(path))
.filter_map(|path| git2::Repository::open(&path).ok())
.next();
match repo {
Some(repo) => self.list_files_git(pkg, repo, &mut filter),
None => self.list_files_walk(pkg, &mut filter),
}
}
fn list_files_git(&self, pkg: &Package, repo: git2::Repository,
filter: &mut FnMut(&Path) -> bool)
-> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
let index = try!(repo.index());
let root = try!(repo.workdir().chain_error(|| {
internal_error("Can't list files on a bare repository.", "")
}));
let pkg_path = pkg.root();
let mut ret = Vec::new();
// We use information from the git repository to guide use in traversing
// its tree. The primary purpose of this is to take advantage of the
// .gitignore and auto-ignore files that don't matter.
//
// Here we're also careful to look at both tracked an untracked files as
// the untracked files are often part of a build and may become relevant
// as part of a future commit.
let index_files = index.iter().map(|entry| {
use libgit2_sys::git_filemode_t::GIT_FILEMODE_COMMIT;
let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;
(join(&root, &entry.path), Some(is_dir))
});
let mut opts = git2::StatusOptions::new();
opts.include_untracked(true);
if let Some(suffix) = util::without_prefix(pkg_path, &root) {
opts.pathspec(suffix);
}
let statuses = try!(repo.statuses(Some(&mut opts)));
let untracked = statuses.iter().map(|entry| {
(join(&root, entry.path_bytes()), None)
});
'outer: for (file_path, is_dir) in index_files.chain(untracked) {
let file_path = try!(file_path);
// Filter out files outside this package.
if !file_path.starts_with(pkg_path) { continue }
// Filter out Cargo.lock and target always
{
let fname = file_path.file_name().and_then(|s| s.to_str());
if fname == Some("Cargo.lock") { continue }
if fname == Some("target") { continue }
}
// Filter out sub-packages of this package
for other_pkg in self.packages.iter().filter(|p| *p != pkg) {
let other_path = other_pkg.root();
if other_path.starts_with(pkg_path) &&
file_path.starts_with(other_path) {
continue 'outer;
}
}
let is_dir = is_dir.or_else(|| {
fs::metadata(&file_path).ok().map(|m| m.is_dir())
}).unwrap_or(false);
if is_dir {
warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, &root).unwrap();
let rel = try!(rel.to_str().chain_error(|| {
human(format!("invalid utf-8 filename: {}", rel.display()))
}));
// Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew?
let rel = rel.replace(r"\", "/");
match repo.find_submodule(&rel).and_then(|s| s.open()) {<|fim▁hole|> let files = try!(self.list_files_git(pkg, repo, filter));
ret.extend(files.into_iter());
}
Err(..) => {
try!(PathSource::walk(&file_path, &mut ret, false,
filter));
}
}
} else if (*filter)(&file_path) {
// We found a file!
warn!(" found {}", file_path.display());
ret.push(file_path);
}
}
return Ok(ret);
#[cfg(unix)]
fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
use std::os::unix::prelude::*;
use std::ffi::OsStr;
Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))
}
#[cfg(windows)]
fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
use std::str;
match str::from_utf8(data) {
Ok(s) => Ok(path.join(s)),
Err(..) => Err(internal("cannot process path in git with a non \
unicode filename")),
}
}
}
fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool)
-> CargoResult<Vec<PathBuf>> {
let mut ret = Vec::new();
for pkg in self.packages.iter().filter(|p| *p == pkg) {
let loc = pkg.root();
try!(PathSource::walk(loc, &mut ret, true, filter));
}
return Ok(ret);
}
fn walk(path: &Path, ret: &mut Vec<PathBuf>,
is_root: bool, filter: &mut FnMut(&Path) -> bool) -> CargoResult<()>
{
if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
if (*filter)(path) {
ret.push(path.to_path_buf());
}
return Ok(())
}
// Don't recurse into any sub-packages that we have
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
return Ok(())
}
for dir in try!(fs::read_dir(path)) {
let dir = try!(dir).path();
let name = dir.file_name().and_then(|s| s.to_str());
// Skip dotfile directories
if name.map(|s| s.starts_with(".")) == Some(true) {
continue
} else if is_root {
// Skip cargo artifacts
match name {
Some("target") | Some("Cargo.lock") => continue,
_ => {}
}
}
try!(PathSource::walk(&dir, ret, false, filter));
}
return Ok(())
}
}
impl<'cfg> Debug for PathSource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "the paths source")
}
}
impl<'cfg> Registry for PathSource<'cfg> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
self.packages.query(dep)
}
}
impl<'cfg> Source for PathSource<'cfg> {
fn update(&mut self) -> CargoResult<()> {
if !self.updated {
let packages = try!(self.read_packages());
self.packages.extend(packages.into_iter());
self.updated = true;
}
Ok(())
}
fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{
// TODO: assert! that the PackageId is contained by the source
Ok(())
}
fn get(&self, ids: &[PackageId]) -> CargoResult<Vec<Package>> {
trace!("getting packages; ids={:?}", ids);
Ok(self.packages.iter()
.filter(|pkg| ids.iter().any(|id| pkg.package_id() == id))
.map(|pkg| pkg.clone())
.collect())
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
if !self.updated {
return Err(internal_error("BUG: source was not updated", ""));
}
let mut max = FileTime::zero();
for file in try!(self.list_files(pkg)).iter() {
// An fs::stat error here is either because path is a
// broken symlink, a permissions error, or a race
// condition where this path was rm'ed - either way,
// we can ignore the error and treat the path's mtime
// as 0.
let mtime = fs::metadata(file).map(|meta| {
FileTime::from_last_modification_time(&meta)
}).unwrap_or(FileTime::zero());
warn!("{} {}", mtime, file.display());
max = cmp::max(max, mtime);
}
trace!("fingerprint {}: {}", self.path.display(), max);
Ok(max.to_string())
}
}<|fim▁end|> | Ok(repo) => { |
<|file_name|>02-index-power.py<|end_file_name|><|fim▁begin|><|fim▁hole|>index_power=lambda a,n:a[n]**n if n<len(a)else-1<|fim▁end|> | |
<|file_name|>entry.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use driver::config;
use driver::session::Session;
use syntax::ast::{Crate, Name, NodeId, Item, ItemFn};
use syntax::ast_map;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
struct EntryContext<'a> {
session: &'a Session,
ast_map: &'a ast_map::Map,
// The interned Name for "main".
main_name: Name,
// The top-level function called 'main'
main_fn: Option<(NodeId, Span)>,
// The function that has attribute named 'main'
attr_main_fn: Option<(NodeId, Span)>,
// The function that has the attribute 'start' on it
start_fn: Option<(NodeId, Span)>,
// The functions that one might think are 'main' but aren't, e.g.
// main functions not defined at the top level. For diagnostics.
non_main_fns: Vec<(NodeId, Span)> ,
}
impl<'a> Visitor<()> for EntryContext<'a> {
fn visit_item(&mut self, item: &Item, _:()) {
find_item(item, self);
}
}
pub fn find_entry_point(session: &Session, krate: &Crate, ast_map: &ast_map::Map) {
let any_exe = session.crate_types.borrow().iter().any(|ty| {
*ty == config::CrateTypeExecutable
});
if !any_exe {
// No need to find a main function
return
}
// If the user wants no main function at all, then stop here.
if attr::contains_name(krate.attrs.as_slice(), "no_main") {
session.entry_type.set(Some(config::EntryNone));
return
}
let mut ctxt = EntryContext {
session: session,
main_name: token::intern("main"),
ast_map: ast_map,
main_fn: None,
attr_main_fn: None,
start_fn: None,
non_main_fns: Vec::new(),
};
visit::walk_crate(&mut ctxt, krate, ());
configure_main(&mut ctxt);
}
fn find_item(item: &Item, ctxt: &mut EntryContext) {
match item.node {
ItemFn(..) => {
if item.ident.name == ctxt.main_name {
ctxt.ast_map.with_path(item.id, |mut path| {
if path.count() == 1 {
// This is a top-level function so can be 'main'
if ctxt.main_fn.is_none() {
ctxt.main_fn = Some((item.id, item.span));
} else {
ctxt.session.span_err(
item.span,
"multiple 'main' functions");
}
} else {
// This isn't main
ctxt.non_main_fns.push((item.id, item.span));
}
});
}
if attr::contains_name(item.attrs.as_slice(), "main") {
if ctxt.attr_main_fn.is_none() {
ctxt.attr_main_fn = Some((item.id, item.span));
} else {
ctxt.session.span_err(
item.span,
"multiple 'main' functions");
}
}
if attr::contains_name(item.attrs.as_slice(), "start") {
if ctxt.start_fn.is_none() {
ctxt.start_fn = Some((item.id, item.span));
} else {
ctxt.session.span_err(<|fim▁hole|> item.span,
"multiple 'start' functions");
}
}
}
_ => ()
}
visit::walk_item(ctxt, item, ());
}
fn configure_main(this: &mut EntryContext) {
if this.start_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.start_fn;
this.session.entry_type.set(Some(config::EntryStart));
} else if this.attr_main_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.attr_main_fn;
this.session.entry_type.set(Some(config::EntryMain));
} else if this.main_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.main_fn;
this.session.entry_type.set(Some(config::EntryMain));
} else {
// No main function
this.session.err("main function not found");
if !this.non_main_fns.is_empty() {
// There were some functions named 'main' though. Try to give the user a hint.
this.session.note("the main function must be defined at the crate level \
but you have one or more functions named 'main' that are not \
defined at the crate level. Either move the definition or \
attach the `#[main]` attribute to override this behavior.");
for &(_, span) in this.non_main_fns.iter() {
this.session.span_note(span, "here is a function named 'main'");
}
this.session.abort_if_errors();
}
}
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.