prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>ASTMatch0.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2008 Roberto Raggi <[email protected]> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // W A R N I N G // ------------- // // This file is automatically generated. // Changes will be lost. // #include "AST.h" #include "ASTMatcher.h" <|fim▁hole|>{ if (ObjCSelectorArgumentAST *_other = pattern->asObjCSelectorArgument()) return matcher->match(this, _other); return false; } bool ObjCSelectorAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCSelectorAST *_other = pattern->asObjCSelector()) return matcher->match(this, _other); return false; } bool SimpleSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (SimpleSpecifierAST *_other = pattern->asSimpleSpecifier()) return matcher->match(this, _other); return false; } bool AttributeSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (AttributeSpecifierAST *_other = pattern->asAttributeSpecifier()) return matcher->match(this, _other); return false; } bool AttributeAST::match0(AST *pattern, ASTMatcher *matcher) { if (AttributeAST *_other = pattern->asAttribute()) return matcher->match(this, _other); return false; } bool TypeofSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypeofSpecifierAST *_other = pattern->asTypeofSpecifier()) return matcher->match(this, _other); return false; } bool DeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (DeclaratorAST *_other = pattern->asDeclarator()) return matcher->match(this, _other); return false; } bool SimpleDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (SimpleDeclarationAST *_other = pattern->asSimpleDeclaration()) return matcher->match(this, _other); return false; } bool EmptyDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (EmptyDeclarationAST *_other = pattern->asEmptyDeclaration()) return matcher->match(this, _other); return false; } bool AccessDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (AccessDeclarationAST *_other = pattern->asAccessDeclaration()) return matcher->match(this, _other); return false; } bool QtObjectTagAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtObjectTagAST *_other = pattern->asQtObjectTag()) return matcher->match(this, _other); return false; } bool QtPrivateSlotAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtPrivateSlotAST *_other = pattern->asQtPrivateSlot()) return matcher->match(this, _other); return false; } bool QtPropertyDeclarationItemAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtPropertyDeclarationItemAST *_other = pattern->asQtPropertyDeclarationItem()) return matcher->match(this, _other); return false; } bool QtPropertyDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtPropertyDeclarationAST *_other = pattern->asQtPropertyDeclaration()) return matcher->match(this, _other); return false; } bool QtEnumDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtEnumDeclarationAST *_other = pattern->asQtEnumDeclaration()) return matcher->match(this, _other); return false; } bool QtFlagsDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtFlagsDeclarationAST *_other = pattern->asQtFlagsDeclaration()) return matcher->match(this, _other); return false; } bool QtInterfaceNameAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtInterfaceNameAST *_other = pattern->asQtInterfaceName()) return matcher->match(this, _other); return false; } bool QtInterfacesDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtInterfacesDeclarationAST *_other = pattern->asQtInterfacesDeclaration()) return matcher->match(this, _other); return false; } bool AsmDefinitionAST::match0(AST *pattern, ASTMatcher *matcher) { if (AsmDefinitionAST *_other = pattern->asAsmDefinition()) return matcher->match(this, _other); return false; } bool BaseSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (BaseSpecifierAST *_other = pattern->asBaseSpecifier()) return matcher->match(this, _other); return false; } bool IdExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (IdExpressionAST *_other = pattern->asIdExpression()) return matcher->match(this, _other); return false; } bool CompoundExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (CompoundExpressionAST *_other = pattern->asCompoundExpression()) return matcher->match(this, _other); return false; } bool CompoundLiteralAST::match0(AST *pattern, ASTMatcher *matcher) { if (CompoundLiteralAST *_other = pattern->asCompoundLiteral()) return matcher->match(this, _other); return false; } bool QtMethodAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtMethodAST *_other = pattern->asQtMethod()) return matcher->match(this, _other); return false; } bool QtMemberDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (QtMemberDeclarationAST *_other = pattern->asQtMemberDeclaration()) return matcher->match(this, _other); return false; } bool BinaryExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (BinaryExpressionAST *_other = pattern->asBinaryExpression()) return matcher->match(this, _other); return false; } bool CastExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (CastExpressionAST *_other = pattern->asCastExpression()) return matcher->match(this, _other); return false; } bool ClassSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (ClassSpecifierAST *_other = pattern->asClassSpecifier()) return matcher->match(this, _other); return false; } bool CaseStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (CaseStatementAST *_other = pattern->asCaseStatement()) return matcher->match(this, _other); return false; } bool CompoundStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (CompoundStatementAST *_other = pattern->asCompoundStatement()) return matcher->match(this, _other); return false; } bool ConditionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ConditionAST *_other = pattern->asCondition()) return matcher->match(this, _other); return false; } bool ConditionalExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ConditionalExpressionAST *_other = pattern->asConditionalExpression()) return matcher->match(this, _other); return false; } bool CppCastExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (CppCastExpressionAST *_other = pattern->asCppCastExpression()) return matcher->match(this, _other); return false; } bool CtorInitializerAST::match0(AST *pattern, ASTMatcher *matcher) { if (CtorInitializerAST *_other = pattern->asCtorInitializer()) return matcher->match(this, _other); return false; } bool DeclarationStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (DeclarationStatementAST *_other = pattern->asDeclarationStatement()) return matcher->match(this, _other); return false; } bool DeclaratorIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (DeclaratorIdAST *_other = pattern->asDeclaratorId()) return matcher->match(this, _other); return false; } bool NestedDeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (NestedDeclaratorAST *_other = pattern->asNestedDeclarator()) return matcher->match(this, _other); return false; } bool FunctionDeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (FunctionDeclaratorAST *_other = pattern->asFunctionDeclarator()) return matcher->match(this, _other); return false; } bool ArrayDeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (ArrayDeclaratorAST *_other = pattern->asArrayDeclarator()) return matcher->match(this, _other); return false; } bool DeleteExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (DeleteExpressionAST *_other = pattern->asDeleteExpression()) return matcher->match(this, _other); return false; } bool DoStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (DoStatementAST *_other = pattern->asDoStatement()) return matcher->match(this, _other); return false; } bool NamedTypeSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (NamedTypeSpecifierAST *_other = pattern->asNamedTypeSpecifier()) return matcher->match(this, _other); return false; } bool ElaboratedTypeSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (ElaboratedTypeSpecifierAST *_other = pattern->asElaboratedTypeSpecifier()) return matcher->match(this, _other); return false; } bool EnumSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (EnumSpecifierAST *_other = pattern->asEnumSpecifier()) return matcher->match(this, _other); return false; } bool EnumeratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (EnumeratorAST *_other = pattern->asEnumerator()) return matcher->match(this, _other); return false; } bool ExceptionDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ExceptionDeclarationAST *_other = pattern->asExceptionDeclaration()) return matcher->match(this, _other); return false; } bool ExceptionSpecificationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ExceptionSpecificationAST *_other = pattern->asExceptionSpecification()) return matcher->match(this, _other); return false; } bool ExpressionOrDeclarationStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ExpressionOrDeclarationStatementAST *_other = pattern->asExpressionOrDeclarationStatement()) return matcher->match(this, _other); return false; } bool ExpressionStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ExpressionStatementAST *_other = pattern->asExpressionStatement()) return matcher->match(this, _other); return false; } bool FunctionDefinitionAST::match0(AST *pattern, ASTMatcher *matcher) { if (FunctionDefinitionAST *_other = pattern->asFunctionDefinition()) return matcher->match(this, _other); return false; } bool ForeachStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ForeachStatementAST *_other = pattern->asForeachStatement()) return matcher->match(this, _other); return false; } bool ForStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ForStatementAST *_other = pattern->asForStatement()) return matcher->match(this, _other); return false; } bool IfStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (IfStatementAST *_other = pattern->asIfStatement()) return matcher->match(this, _other); return false; } bool ArrayInitializerAST::match0(AST *pattern, ASTMatcher *matcher) { if (ArrayInitializerAST *_other = pattern->asArrayInitializer()) return matcher->match(this, _other); return false; } bool LabeledStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (LabeledStatementAST *_other = pattern->asLabeledStatement()) return matcher->match(this, _other); return false; } bool LinkageBodyAST::match0(AST *pattern, ASTMatcher *matcher) { if (LinkageBodyAST *_other = pattern->asLinkageBody()) return matcher->match(this, _other); return false; } bool LinkageSpecificationAST::match0(AST *pattern, ASTMatcher *matcher) { if (LinkageSpecificationAST *_other = pattern->asLinkageSpecification()) return matcher->match(this, _other); return false; } bool MemInitializerAST::match0(AST *pattern, ASTMatcher *matcher) { if (MemInitializerAST *_other = pattern->asMemInitializer()) return matcher->match(this, _other); return false; } bool NestedNameSpecifierAST::match0(AST *pattern, ASTMatcher *matcher) { if (NestedNameSpecifierAST *_other = pattern->asNestedNameSpecifier()) return matcher->match(this, _other); return false; } bool QualifiedNameAST::match0(AST *pattern, ASTMatcher *matcher) { if (QualifiedNameAST *_other = pattern->asQualifiedName()) return matcher->match(this, _other); return false; } bool OperatorFunctionIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (OperatorFunctionIdAST *_other = pattern->asOperatorFunctionId()) return matcher->match(this, _other); return false; } bool ConversionFunctionIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (ConversionFunctionIdAST *_other = pattern->asConversionFunctionId()) return matcher->match(this, _other); return false; } bool SimpleNameAST::match0(AST *pattern, ASTMatcher *matcher) { if (SimpleNameAST *_other = pattern->asSimpleName()) return matcher->match(this, _other); return false; } bool DestructorNameAST::match0(AST *pattern, ASTMatcher *matcher) { if (DestructorNameAST *_other = pattern->asDestructorName()) return matcher->match(this, _other); return false; } bool TemplateIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (TemplateIdAST *_other = pattern->asTemplateId()) return matcher->match(this, _other); return false; } bool NamespaceAST::match0(AST *pattern, ASTMatcher *matcher) { if (NamespaceAST *_other = pattern->asNamespace()) return matcher->match(this, _other); return false; } bool NamespaceAliasDefinitionAST::match0(AST *pattern, ASTMatcher *matcher) { if (NamespaceAliasDefinitionAST *_other = pattern->asNamespaceAliasDefinition()) return matcher->match(this, _other); return false; } bool NewPlacementAST::match0(AST *pattern, ASTMatcher *matcher) { if (NewPlacementAST *_other = pattern->asNewPlacement()) return matcher->match(this, _other); return false; } bool NewArrayDeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (NewArrayDeclaratorAST *_other = pattern->asNewArrayDeclarator()) return matcher->match(this, _other); return false; } bool NewExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (NewExpressionAST *_other = pattern->asNewExpression()) return matcher->match(this, _other); return false; } bool NewInitializerAST::match0(AST *pattern, ASTMatcher *matcher) { if (NewInitializerAST *_other = pattern->asNewInitializer()) return matcher->match(this, _other); return false; } bool NewTypeIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (NewTypeIdAST *_other = pattern->asNewTypeId()) return matcher->match(this, _other); return false; } bool OperatorAST::match0(AST *pattern, ASTMatcher *matcher) { if (OperatorAST *_other = pattern->asOperator()) return matcher->match(this, _other); return false; } bool ParameterDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ParameterDeclarationAST *_other = pattern->asParameterDeclaration()) return matcher->match(this, _other); return false; } bool ParameterDeclarationClauseAST::match0(AST *pattern, ASTMatcher *matcher) { if (ParameterDeclarationClauseAST *_other = pattern->asParameterDeclarationClause()) return matcher->match(this, _other); return false; } bool CallAST::match0(AST *pattern, ASTMatcher *matcher) { if (CallAST *_other = pattern->asCall()) return matcher->match(this, _other); return false; } bool ArrayAccessAST::match0(AST *pattern, ASTMatcher *matcher) { if (ArrayAccessAST *_other = pattern->asArrayAccess()) return matcher->match(this, _other); return false; } bool PostIncrDecrAST::match0(AST *pattern, ASTMatcher *matcher) { if (PostIncrDecrAST *_other = pattern->asPostIncrDecr()) return matcher->match(this, _other); return false; } bool MemberAccessAST::match0(AST *pattern, ASTMatcher *matcher) { if (MemberAccessAST *_other = pattern->asMemberAccess()) return matcher->match(this, _other); return false; } bool TypeidExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypeidExpressionAST *_other = pattern->asTypeidExpression()) return matcher->match(this, _other); return false; } bool TypenameCallExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypenameCallExpressionAST *_other = pattern->asTypenameCallExpression()) return matcher->match(this, _other); return false; } bool TypeConstructorCallAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypeConstructorCallAST *_other = pattern->asTypeConstructorCall()) return matcher->match(this, _other); return false; } bool PointerToMemberAST::match0(AST *pattern, ASTMatcher *matcher) { if (PointerToMemberAST *_other = pattern->asPointerToMember()) return matcher->match(this, _other); return false; } bool PointerAST::match0(AST *pattern, ASTMatcher *matcher) { if (PointerAST *_other = pattern->asPointer()) return matcher->match(this, _other); return false; } bool ReferenceAST::match0(AST *pattern, ASTMatcher *matcher) { if (ReferenceAST *_other = pattern->asReference()) return matcher->match(this, _other); return false; } bool BreakStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (BreakStatementAST *_other = pattern->asBreakStatement()) return matcher->match(this, _other); return false; } bool ContinueStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ContinueStatementAST *_other = pattern->asContinueStatement()) return matcher->match(this, _other); return false; } bool GotoStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (GotoStatementAST *_other = pattern->asGotoStatement()) return matcher->match(this, _other); return false; } bool ReturnStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ReturnStatementAST *_other = pattern->asReturnStatement()) return matcher->match(this, _other); return false; } bool SizeofExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (SizeofExpressionAST *_other = pattern->asSizeofExpression()) return matcher->match(this, _other); return false; } bool PointerLiteralAST::match0(AST *pattern, ASTMatcher *matcher) { if (PointerLiteralAST *_other = pattern->asPointerLiteral()) return matcher->match(this, _other); return false; } bool NumericLiteralAST::match0(AST *pattern, ASTMatcher *matcher) { if (NumericLiteralAST *_other = pattern->asNumericLiteral()) return matcher->match(this, _other); return false; } bool BoolLiteralAST::match0(AST *pattern, ASTMatcher *matcher) { if (BoolLiteralAST *_other = pattern->asBoolLiteral()) return matcher->match(this, _other); return false; } bool ThisExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ThisExpressionAST *_other = pattern->asThisExpression()) return matcher->match(this, _other); return false; } bool NestedExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (NestedExpressionAST *_other = pattern->asNestedExpression()) return matcher->match(this, _other); return false; } bool StringLiteralAST::match0(AST *pattern, ASTMatcher *matcher) { if (StringLiteralAST *_other = pattern->asStringLiteral()) return matcher->match(this, _other); return false; } bool SwitchStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (SwitchStatementAST *_other = pattern->asSwitchStatement()) return matcher->match(this, _other); return false; } bool TemplateDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (TemplateDeclarationAST *_other = pattern->asTemplateDeclaration()) return matcher->match(this, _other); return false; } bool ThrowExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ThrowExpressionAST *_other = pattern->asThrowExpression()) return matcher->match(this, _other); return false; } bool TranslationUnitAST::match0(AST *pattern, ASTMatcher *matcher) { if (TranslationUnitAST *_other = pattern->asTranslationUnit()) return matcher->match(this, _other); return false; } bool TryBlockStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (TryBlockStatementAST *_other = pattern->asTryBlockStatement()) return matcher->match(this, _other); return false; } bool CatchClauseAST::match0(AST *pattern, ASTMatcher *matcher) { if (CatchClauseAST *_other = pattern->asCatchClause()) return matcher->match(this, _other); return false; } bool TypeIdAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypeIdAST *_other = pattern->asTypeId()) return matcher->match(this, _other); return false; } bool TypenameTypeParameterAST::match0(AST *pattern, ASTMatcher *matcher) { if (TypenameTypeParameterAST *_other = pattern->asTypenameTypeParameter()) return matcher->match(this, _other); return false; } bool TemplateTypeParameterAST::match0(AST *pattern, ASTMatcher *matcher) { if (TemplateTypeParameterAST *_other = pattern->asTemplateTypeParameter()) return matcher->match(this, _other); return false; } bool UnaryExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (UnaryExpressionAST *_other = pattern->asUnaryExpression()) return matcher->match(this, _other); return false; } bool UsingAST::match0(AST *pattern, ASTMatcher *matcher) { if (UsingAST *_other = pattern->asUsing()) return matcher->match(this, _other); return false; } bool UsingDirectiveAST::match0(AST *pattern, ASTMatcher *matcher) { if (UsingDirectiveAST *_other = pattern->asUsingDirective()) return matcher->match(this, _other); return false; } bool WhileStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (WhileStatementAST *_other = pattern->asWhileStatement()) return matcher->match(this, _other); return false; } bool ObjCClassForwardDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCClassForwardDeclarationAST *_other = pattern->asObjCClassForwardDeclaration()) return matcher->match(this, _other); return false; } bool ObjCClassDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCClassDeclarationAST *_other = pattern->asObjCClassDeclaration()) return matcher->match(this, _other); return false; } bool ObjCProtocolForwardDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCProtocolForwardDeclarationAST *_other = pattern->asObjCProtocolForwardDeclaration()) return matcher->match(this, _other); return false; } bool ObjCProtocolDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCProtocolDeclarationAST *_other = pattern->asObjCProtocolDeclaration()) return matcher->match(this, _other); return false; } bool ObjCProtocolRefsAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCProtocolRefsAST *_other = pattern->asObjCProtocolRefs()) return matcher->match(this, _other); return false; } bool ObjCMessageArgumentAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCMessageArgumentAST *_other = pattern->asObjCMessageArgument()) return matcher->match(this, _other); return false; } bool ObjCMessageExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCMessageExpressionAST *_other = pattern->asObjCMessageExpression()) return matcher->match(this, _other); return false; } bool ObjCProtocolExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCProtocolExpressionAST *_other = pattern->asObjCProtocolExpression()) return matcher->match(this, _other); return false; } bool ObjCTypeNameAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCTypeNameAST *_other = pattern->asObjCTypeName()) return matcher->match(this, _other); return false; } bool ObjCEncodeExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCEncodeExpressionAST *_other = pattern->asObjCEncodeExpression()) return matcher->match(this, _other); return false; } bool ObjCSelectorExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCSelectorExpressionAST *_other = pattern->asObjCSelectorExpression()) return matcher->match(this, _other); return false; } bool ObjCInstanceVariablesDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCInstanceVariablesDeclarationAST *_other = pattern->asObjCInstanceVariablesDeclaration()) return matcher->match(this, _other); return false; } bool ObjCVisibilityDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCVisibilityDeclarationAST *_other = pattern->asObjCVisibilityDeclaration()) return matcher->match(this, _other); return false; } bool ObjCPropertyAttributeAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCPropertyAttributeAST *_other = pattern->asObjCPropertyAttribute()) return matcher->match(this, _other); return false; } bool ObjCPropertyDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCPropertyDeclarationAST *_other = pattern->asObjCPropertyDeclaration()) return matcher->match(this, _other); return false; } bool ObjCMessageArgumentDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCMessageArgumentDeclarationAST *_other = pattern->asObjCMessageArgumentDeclaration()) return matcher->match(this, _other); return false; } bool ObjCMethodPrototypeAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCMethodPrototypeAST *_other = pattern->asObjCMethodPrototype()) return matcher->match(this, _other); return false; } bool ObjCMethodDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCMethodDeclarationAST *_other = pattern->asObjCMethodDeclaration()) return matcher->match(this, _other); return false; } bool ObjCSynthesizedPropertyAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCSynthesizedPropertyAST *_other = pattern->asObjCSynthesizedProperty()) return matcher->match(this, _other); return false; } bool ObjCSynthesizedPropertiesDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCSynthesizedPropertiesDeclarationAST *_other = pattern->asObjCSynthesizedPropertiesDeclaration()) return matcher->match(this, _other); return false; } bool ObjCDynamicPropertiesDeclarationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCDynamicPropertiesDeclarationAST *_other = pattern->asObjCDynamicPropertiesDeclaration()) return matcher->match(this, _other); return false; } bool ObjCFastEnumerationAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCFastEnumerationAST *_other = pattern->asObjCFastEnumeration()) return matcher->match(this, _other); return false; } bool ObjCSynchronizedStatementAST::match0(AST *pattern, ASTMatcher *matcher) { if (ObjCSynchronizedStatementAST *_other = pattern->asObjCSynchronizedStatement()) return matcher->match(this, _other); return false; } bool LambdaExpressionAST::match0(AST *pattern, ASTMatcher *matcher) { if (LambdaExpressionAST *_other = pattern->asLambdaExpression()) return matcher->match(this, _other); return false; } bool LambdaIntroducerAST::match0(AST *pattern, ASTMatcher *matcher) { if (LambdaIntroducerAST *_other = pattern->asLambdaIntroducer()) return matcher->match(this, _other); return false; } bool LambdaCaptureAST::match0(AST *pattern, ASTMatcher *matcher) { if (LambdaCaptureAST *_other = pattern->asLambdaCapture()) return matcher->match(this, _other); return false; } bool CaptureAST::match0(AST *pattern, ASTMatcher *matcher) { if (CaptureAST *_other = pattern->asCapture()) return matcher->match(this, _other); return false; } bool LambdaDeclaratorAST::match0(AST *pattern, ASTMatcher *matcher) { if (LambdaDeclaratorAST *_other = pattern->asLambdaDeclarator()) return matcher->match(this, _other); return false; } bool TrailingReturnTypeAST::match0(AST *pattern, ASTMatcher *matcher) { if (TrailingReturnTypeAST *_other = pattern->asTrailingReturnType()) return matcher->match(this, _other); return false; } bool BracedInitializerAST::match0(AST *pattern, ASTMatcher *matcher) { if (BracedInitializerAST *_other = pattern->asBracedInitializer()) return matcher->match(this, _other); return false; }<|fim▁end|>
using namespace CPlusPlus; bool ObjCSelectorArgumentAST::match0(AST *pattern, ASTMatcher *matcher)
<|file_name|>users.js<|end_file_name|><|fim▁begin|>/** * Module dependencies */ var httpStatus = require('../helpers/http-status') , User = require('mongoose').model('User') , logger = require('../../logger'); exports.login = function (req, res) { res.render('users/login', {title: 'login'}); }; exports.checkAuth = function (req, res, next) { var errors = []; ['username', 'password'].forEach(function (prop) { if (!req.body[prop] || req.body[prop].trim() === '') { errors.push({ error: 'empty', expected: 'not empty', value: req.body[prop] || '', field: prop, msg: prop + ' field is empty' }); } }); if (errors.length > 0) {<|fim▁hole|> } next(); }; function checkAccessTokenAuth(req, res, next) { var accessToken = req.get('Authorization') || req.query.Authorization; if (accessToken && accessToken !== '') { logger.log('info', 'Trying authentication with accessToken=[%s]', accessToken); User.findByAccessToken(accessToken, function (err, user) { if (user && user.hasAccessToken(accessToken)) { logger.log('info', 'Access token authentication successful for user=[%s]', user.username); req.accessToken = accessToken.replace('Justbrew ', ''); req.user = user; return authenticate(req, res, next); //valid access token } logger.log('warn', 'Access token authentication invalid for token=[%s]', accessToken); return res.send(httpStatus.UNAUTHORIZED); //invalid access token }); return; } next(); } function authenticate(req, res) { var accessToken = req.accessToken || req.user.newAccessToken(); var user = { id: req.user.id, name: req.user.name, email: req.user.email, username: req.user.username }; var ret = { user: user, accessToken: accessToken }; res.format({ html: function () { return res.redirect('/?Authorization=' + accessToken); }, json: function () { return res.json(httpStatus.OK, ret); } }); } exports.authenticate = authenticate;<|fim▁end|>
return checkAccessTokenAuth(req, res, function () { res.send(httpStatus.BAD_REQUEST, { errors: errors }); });
<|file_name|>net.js<|end_file_name|><|fim▁begin|>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const tslib_1 = require("tslib"); const diagnostics = tslib_1.__importStar(require("./diagnostics")); const log = diagnostics.log; // new diagnostics.Logger( __filename ); //import compression = require("./compression"); //export import axios = require("axios"); const axios = tslib_1.__importStar(require("axios")); exports.axios = axios; const remote_http_endpoint_1 = require("./_net/remote-http-endpoint"); exports.RemoteHttpEndpoint = remote_http_endpoint_1.RemoteHttpEndpoint; //module _test { // describe(__filename, () => { // describe("EzEndpoint", () => { // describe("success cases", () => { // let test: any = it("basic ezEndpoint, roundtrip phantomjscloud", () => { // const testEzEndpoint = new EzEndpoint<any, any>({ origin: "http://phantomjscloud.com", path: "/api/browser/v2/a-demo-key-with-low-quota-per-ip-address/" }, { timeout: 3000, interval: 100, backoff: 3 }, {}, ); // const targetUrl = "https://example.com"; // const requestPayload = { // pages: [ // { // url: targetUrl, // renderType: "html", // outputAsJson: true, // } // ], // }; // return testEzEndpoint.post(requestPayload) // .then((response) => { // log.assert(response.status === 200, "should get success response", { response }); // log.assert(targetUrl === response.data.pageResponses[0].pageRequest.url, "response contents should contain a value of response.data.pageResponses[0].pageRequest.url that matchest targetUrl", { targetUrl, gotTargetUrl: response.data.pageResponses[0].pageRequest.url, response }); // }, (err) => { // const axiosErr = err as _axiosDTs.AxiosErrorResponse<void>; // throw log.error("did not expect an axiosErr", { err }); // }); // }); // // set timeout increase (default=2000ms) https://mochajs.org/#timeouts // test.timeout(5000); // }); // describe("fail cases", () => { // let test: any = it("basic retry, 429 error", () => { // const testEzEndpoint = new EzEndpoint<void, void>({ origin: "http://phantomjscloud.com", path: "/examples/helpers/statusCode/429" }, { timeout: 1000, interval: 100, backoff: 3 }, {}, ); // return testEzEndpoint.post() // .then((response) => { // throw log.errorAndThrowIfFalse(response.status === 429, "should have failed with 429 response", { response }); // }, (err) => { // const axiosErr = err as _axiosDTs.AxiosErrorResponse<void>; // if (axiosErr.response != null) { // log.assert(axiosErr.response.status === 429, "should have failed with 429 response", { axiosErr }); // } else { // throw log.error("expected a axiosErr but didn't get one", { err }) // } // }); // }) // // set timeout increase (default=2000ms) https://mochajs.org/#timeouts // test.timeout(5000); // test = it("invalid domain", () => { // const testEzEndpoint = new EzEndpoint<void, void>({ origin: "http://asdfasdfasdfasetasgoud.com", path: "/examples/helpers/statusCode/429" }, { timeout: 1000, interval: 100, backoff: 3 }, {}, ); // return testEzEndpoint.post() // .then((response) => { // throw log.errorAndThrowIfFalse(response.status === 429, "should have failed with 429 response", { response }); // }, (err) => { // //TODO: describe EzEndpoint fail error type, and add error definitions to bluebird // // // // // export interface AxiosErrorResponse<T> extends Error { // // // // // /** inherited from the Error object*/ // // // // // name: "Error"; // // // // // /**human readable error message, such as ```getaddrinfo ENOTFOUND moo moo:443``` or ```Request failed with status code 401``` */ // // // // // message: string; // // // // // /** // // // // // * config that was provided to `axios` for the request // // // // // */ // // // // // config: AxiosXHRConfig<T>; // // // // // /** The server response. ```undefined``` if no response from server (such as invalid url or network timeout */ // // // // // response?: AxiosXHR<T>; // // // // // /** example ```ETIMEDOUT```, but only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // code?: string; // // // // // /** only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // failure?:{ // // // // // name:string; // // // // // /**human readable error message, such as ```getaddrinfo ENOTFOUND moo moo:443``` or ```Request failed with status code 401``` */ // // // // // message: string; // // // // // /** example ```ENOTFOUND```, but only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // code: string; // // // // // /** example ```ENOTFOUND```, but only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // errno: string; // // // // // /** example ```getaddrinfo```, but only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // syscall: string; // // // // // /** only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // hostname: string; // // // // // /** only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // host: string; // // // // // /** only set if unable to get response from server. otherwise does not exist (not even undefined!). */ // // // // // port: number; // // // // // }; // }); // }) // // set timeout increase (default=2000ms) https://mochajs.org/#timeouts // test.timeout(5000); // }); // }); // describe("axios", () => { // const targetUrl = "http://phantomjscloud.com/examples/helpers/requestdata"; // const samplePostPayload1 = { hi: 1, bye: "two", inner: { three: 4 } }; // const sampleHeader1 = { head1: "val1" }; // describe("success cases", () => { // it("basic e2e", () => { // return axios.post(targetUrl, samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // log.assert(axiosResponse.config != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.data != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.headers != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.status != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.status != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.statusText != null, "missing property", { axiosResponse }); // log.assert(axiosResponse.status === 200, "status code wrong", { axiosResponse }); // return Promise.resolve(); // }); // }); // }); // describe("fail cases", () => { // it("basic fail e2e", () => { // return axios.post("http://phantomjscloud.com/examples/helpers/statusCode/400", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with 400 error", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // if (err.response == null) { // throw log.error("response should be defined", { err }); // } // log.assert(err.config != null, "missing property config", { err }); // log.assert(err.message != null, "missing property message", { err }); // log.assert(err.name != null, "missing property name", { err }); // log.assert(err.response != null, "missing property response", { err }); // log.assert(err.stack != null, "missing property stack", { err }); // log.assert(err.response.config != null, "missing property response.config", { err }); // log.assert(err.response.data != null, "missing property response.data", { err }); // log.assert(err.response.headers != null, "missing property response.headers", { err }); // log.assert(err.response.status != null, "missing property response.status ", { err }); // log.assert(err.response.statusText != null, "missing property response.statusText", { err }); // log.assert(err.response.status === 400, "wrong status code.", { err }); // return Promise.resolve(); // }); // }); // const badUrl = "http://moo"; // let test: any = it("invlid url", () => { // return axios.post(badUrl, samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with invalid url", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // //log.info("got error as expected", { err }); // return Promise.resolve(); // }); // }); // // set timeout increase (default=2000ms) https://mochajs.org/#timeouts // test.timeout(5000);<|fim▁hole|>// return axios.post("http://phantomjscloud.com/examples/helpers/statusCode/401", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with 401 error", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // if (err.response == null) { // throw log.error("response should be defined", { err }); // } // log.assert(err.response.status === 401, "wrong status code.", { err }); // return Promise.resolve(); // }); // }); // it("status 429 response", () => { // return axios.post("http://phantomjscloud.com/examples/helpers/statusCode/429", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with 429 error", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // if (err.response == null) { // throw log.error("response should be defined", { err }); // } // log.assert(err.response.status === 429, "wrong status code.", { err }); // return Promise.resolve(); // }); // }); // it("status 500 response", () => { // return axios.post("http://phantomjscloud.com/examples/helpers/statusCode/500", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with 500 error", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // if (err.response == null) { // throw log.error("response should be defined", { err }); // } // log.assert(err.response.status === 500, "wrong status code.", { err }); // return Promise.resolve(); // }); // }); // it("status 503 response", () => { // return axios.post("http://phantomjscloud.com/examples/helpers/statusCode/503", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // .then((axiosResponse) => { // throw log.error("should have failed with 503 error", { badUrl, axiosResponse }); // }) // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // if (err.response == null) { // throw log.error("response should be defined", { err }); // } // log.assert(err.response.status === 503, "wrong status code.", { err }); // return Promise.resolve(); // }); // }); // //it("network timeout", () => { // // return axios.post("https://localhost:827", samplePostPayload1, { headers: sampleHeader1, responseType: "json" }) // // .then((axiosResponse) => { // // throw log.error("should have failed with 500 error", { badUrl, axiosResponse }); // // }) // // .catch((err: _axiosDTs.AxiosErrorResponse<any>) => { // // if (err.response == null) { // // throw log.error("response should be defined", { err }); // // } // // log.assert(err.response.status === 500, "wrong status code.", { err }); // // return Promise.resolve(); // // }); // //}); // }); // }); // }); //} //# sourceMappingURL=net.js.map<|fim▁end|>
// it("status 401 response", () => {
<|file_name|>this-3.js<|end_file_name|><|fim▁begin|>// node/this-3.js<|fim▁hole|>var object = { id: "xyz", printId: function() { console.log('The id is '+ this.id + ' ' + this.toString()); } }; // setTimeout(object.printId, 100); var callback = object.printId; callback();<|fim▁end|>
<|file_name|>populate_visualization_cache.py<|end_file_name|><|fim▁begin|>import logging <|fim▁hole|>from catalog.core.visualization.data_access import visualization_cache logger = logging.getLogger(__name__) class Command(BaseCommand): help = '''Build pandas dataframe cache of primary data''' def handle(self, *args, **options): visualization_cache.get_or_create_many()<|fim▁end|>
from django.core.management.base import BaseCommand
<|file_name|>reconnecting.js<|end_file_name|><|fim▁begin|>module.exports = client => { //eslint-disable-line no-unused-vars console.log(`Reconnecting... [at ${new Date()}]`);<|fim▁hole|><|fim▁end|>
};
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>""" Django settings for school project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__))) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, '../templates'), ) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'koeorn$p_9&6!%1!84=erv*)#40-f$&z+_hq1^a1+2#93_ev%y' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True<|fim▁hole|>ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( # 'django.contrib.admin', # 'django.contrib.auth', # 'django.contrib.contenttypes', # 'django.contrib.sessions', # 'django.contrib.messages', # 'django.contrib.staticfiles', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'school.urls' WSGI_APPLICATION = 'school.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.sqlite3', # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), # } # } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'asia/chongqing' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/'<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import datetime from openerp.exceptions import AccessError ############################################################################## # # OLD API # ############################################################################## from openerp.osv import osv, fields class res_partner(osv.Model): _inherit = 'res.partner' # # add related fields to test them # _columns = { # a regular one 'related_company_partner_id': fields.related( 'company_id', 'partner_id', type='many2one', obj='res.partner'), # a related field with a single field 'single_related_company_id': fields.related( 'company_id', type='many2one', obj='res.company'), # a related field with a single field that is also a related field! 'related_related_company_id': fields.related( 'single_related_company_id', type='many2one', obj='res.company'), } class TestFunctionCounter(osv.Model): _name = 'test_old_api.function_counter' def _compute_cnt(self, cr, uid, ids, fname, arg, context=None): res = {} for cnt in self.browse(cr, uid, ids, context=context): res[cnt.id] = cnt.access and cnt.cnt + 1 or 0 return res _columns = { 'access': fields.datetime('Datetime Field'), 'cnt': fields.function( _compute_cnt, type='integer', string='Function Field', store=True), } class TestFunctionNoInfiniteRecursion(osv.Model): _name = 'test_old_api.function_noinfiniterecursion' def _compute_f1(self, cr, uid, ids, fname, arg, context=None): res = {} for tf in self.browse(cr, uid, ids, context=context): res[tf.id] = 'create' in tf.f0 and 'create' or 'write' cntobj = self.pool['test_old_api.function_counter'] cnt_id = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'test_new_api.c1') cntobj.write( cr, uid, cnt_id, {'access': datetime.datetime.now()}, context=context) return res _columns = { 'f0': fields.char('Char Field'), 'f1': fields.function( _compute_f1, type='char', string='Function Field', store=True), } ############################################################################## # # NEW API # ############################################################################## from openerp import models, fields, api, _ class Category(models.Model): _name = 'test_new_api.category' name = fields.Char(required=True) parent = fields.Many2one('test_new_api.category') display_name = fields.Char(compute='_compute_display_name', inverse='_inverse_display_name') discussions = fields.Many2many('test_new_api.discussion', 'test_new_api_discussion_category', 'category', 'discussion') @api.one @api.depends('name', 'parent.display_name') # this definition is recursive def _compute_display_name(self): if self.parent: self.display_name = self.parent.display_name + ' / ' + self.name else: self.display_name = self.name @api.one def _inverse_display_name(self): names = self.display_name.split('/') # determine sequence of categories categories = [] for name in names[:-1]: category = self.search([('name', 'ilike', name.strip())]) categories.append(category[0]) categories.append(self) # assign parents following sequence for parent, child in zip(categories, categories[1:]): if parent and child: child.parent = parent # assign name of last category, and reassign display_name (to normalize it) self.name = names[-1].strip() def read(self, fields=None, load='_classic_read'): if self.search_count([('id', 'in', self._ids), ('name', '=', 'NOACCESS')]): raise AccessError('Sorry') return super(Category, self).read(fields, load) class Discussion(models.Model): _name = 'test_new_api.discussion' name = fields.Char(string='Title', required=True, help="General description of what this discussion is about.") moderator = fields.Many2one('res.users') categories = fields.Many2many('test_new_api.category', 'test_new_api_discussion_category', 'discussion', 'category') participants = fields.Many2many('res.users') messages = fields.One2many('test_new_api.message', 'discussion') message_changes = fields.Integer(string='Message changes') important_messages = fields.One2many('test_new_api.message', 'discussion', domain=[('important', '=', True)]) @api.onchange('moderator') def _onchange_moderator(self): self.participants |= self.moderator @api.onchange('messages') def _onchange_messages(self): self.message_changes = len(self.messages) class Message(models.Model): _name = 'test_new_api.message' discussion = fields.Many2one('test_new_api.discussion', ondelete='cascade') body = fields.Text() author = fields.Many2one('res.users', default=lambda self: self.env.user) name = fields.Char(string='Title', compute='_compute_name', store=True) display_name = fields.Char(string='Abstract', compute='_compute_display_name') size = fields.Integer(compute='_compute_size', search='_search_size') double_size = fields.Integer(compute='_compute_double_size') discussion_name = fields.Char(related='discussion.name') author_partner = fields.Many2one( 'res.partner', compute='_compute_author_partner', search='_search_author_partner') important = fields.Boolean() @api.one @api.constrains('author', 'discussion') def _check_author(self): if self.discussion and self.author not in self.discussion.participants: raise ValueError(_("Author must be among the discussion participants.")) @api.one @api.depends('author.name', 'discussion.name') def _compute_name(self): self.name = "[%s] %s" % (self.discussion.name or '', self.author.name or '') @api.one @api.depends('author.name', 'discussion.name', 'body') def _compute_display_name(self): stuff = "[%s] %s: %s" % (self.author.name, self.discussion.name or '', self.body or '') self.display_name = stuff[:80] @api.one @api.depends('body') def _compute_size(self): self.size = len(self.body or '') def _search_size(self, operator, value): if operator not in ('=', '!=', '<', '<=', '>', '>=', 'in', 'not in'): return [] # retrieve all the messages that match with a specific SQL query query = """SELECT id FROM "%s" WHERE char_length("body") %s %%s""" % \ (self._table, operator) self.env.cr.execute(query, (value,)) ids = [t[0] for t in self.env.cr.fetchall()] return [('id', 'in', ids)] @api.one @api.depends('size') def _compute_double_size(self): # This illustrates a subtle situation: self.double_size depends on # self.size. When size is computed, self.size is assigned, which should # normally invalidate self.double_size. However, this may not happen # while self.double_size is being computed: the last statement below # would fail, because self.double_size would be undefined. self.double_size = 0 size = self.size self.double_size = self.double_size + size @api.one @api.depends('author', 'author.partner_id') def _compute_author_partner(self): self.author_partner = author.partner_id @api.model def _search_author_partner(self, operator, value): return [('author.partner_id', operator, value)] class Multi(models.Model): """ Model for testing multiple onchange methods in cascade that modify a one2many field several times. """ _name = 'test_new_api.multi' name = fields.Char(related='partner.name', readonly=True) partner = fields.Many2one('res.partner') lines = fields.One2many('test_new_api.multi.line', 'multi') @api.onchange('name') def _onchange_name(self): for line in self.lines: line.name = self.name @api.onchange('partner') def _onchange_partner(self): for line in self.lines: line.partner = self.partner class MultiLine(models.Model): _name = 'test_new_api.multi.line' multi = fields.Many2one('test_new_api.multi', ondelete='cascade') name = fields.Char() partner = fields.Many2one('res.partner') class MixedModel(models.Model): _name = 'test_new_api.mixed' number = fields.Float(digits=(10, 2), default=3.14) date = fields.Date() now = fields.Datetime(compute='_compute_now') lang = fields.Selection(string='Language', selection='_get_lang') reference = fields.Reference(string='Related Document', selection='_reference_models') @api.one def _compute_now(self): # this is a non-stored computed field without dependencies self.now = fields.Datetime.now() @api.model def _get_lang(self): langs = self.env['res.lang'].search([]) return [(lang.code, lang.name) for lang in langs] @api.model def _reference_models(self): models = self.env['ir.model'].search([('state', '!=', 'manual')]) return [(model.model, model.name)<|fim▁hole|> class BoolModel(models.Model): _name = 'domain.bool' bool_true = fields.Boolean('b1', default=True) bool_false = fields.Boolean('b2', default=False) bool_undefined = fields.Boolean('b3')<|fim▁end|>
for model in models if not model.model.startswith('ir.')]
<|file_name|>odom_reader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import rospy from pprint import pformat from tf_conversions import transformations from math import pi from nav_msgs.msg import Odometry class odom_reader: def __init__(self):<|fim▁hole|> """ convert an orientation given in quaternions to an actual angle in degrees for a 2D robot """ def odom_orientation(self, q): y, p, r = transformations.euler_from_quaternion([q.w, q.x, q.y, q.z]) return y * 180 / pi def callback(self, data): print "odom pose: \n" + pformat(data.pose.pose) angle = self.odom_orientation(data.pose.pose.orientation) print "angle = %f" % angle ic = odom_reader() rospy.init_node('odom_reader') rospy.spin()<|fim▁end|>
self.image_sub = rospy.Subscriber("/odom", Odometry, self.callback)
<|file_name|>path_to_enlightenment.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- '''<|fim▁hole|>Path to Enlightenment. ''' import io import unittest # The path to enlightenment starts with the following: KOANS_FILENAME = 'koans.txt' def filter_koan_names(lines): ''' Strips leading and trailing whitespace, then filters out blank lines and comment lines. ''' for line in lines: line = line.strip() if line.startswith('#'): continue if line: yield line return def names_from_file(filename): ''' Opens the given ``filename`` and yields the fully-qualified names of TestCases found inside (one per line). ''' with io.open(filename, 'rt', encoding='utf8') as names_file: for name in filter_koan_names(names_file): yield name return def koans_suite(names): ''' Returns a ``TestSuite`` loaded with all tests found in the given ``names``, preserving the order in which they are found. ''' suite = unittest.TestSuite() loader = unittest.TestLoader() loader.sortTestMethodsUsing = None for name in names: tests = loader.loadTestsFromName(name) suite.addTests(tests) return suite def koans(filename=KOANS_FILENAME): ''' Returns a ``TestSuite`` loaded with all the koans (``TestCase``s) listed in ``filename``. ''' names = names_from_file(filename) return koans_suite(names)<|fim▁end|>
Functions to load the test cases ("koans") that make up the
<|file_name|>iterators.rs<|end_file_name|><|fim▁begin|>#[macro_use(s)] extern crate ndarray; extern crate itertools; use ndarray::{Array0, Array2}; use ndarray::RcArray; use ndarray::{Ix, Si, S}; use ndarray::{ ArrayBase, Data, Dimension, aview1, arr2, arr3, Axis, indices, }; use itertools::assert_equal; use itertools::{rev, enumerate}; #[test] fn double_ended() { let a = RcArray::linspace(0., 7., 8); let mut it = a.iter().map(|x| *x); assert_eq!(it.next(), Some(0.)); assert_eq!(it.next_back(), Some(7.)); assert_eq!(it.next(), Some(1.)); assert_eq!(it.rev().last(), Some(2.)); assert_equal(aview1(&[1, 2, 3]), &[1, 2, 3]); assert_equal(rev(aview1(&[1, 2, 3])), rev(&[1, 2, 3])); } #[test] fn iter_size_hint() { // Check that the size hint is correctly computed let a = RcArray::from_iter(0..24).reshape((2, 3, 4)); let mut data = [0; 24]; for (i, elt) in enumerate(&mut data) { *elt = i as i32; } assert_equal(&a, &data); let mut it = a.iter(); let mut ans = data.iter(); assert_eq!(it.len(), ans.len()); while ans.len() > 0 { assert_eq!(it.next(), ans.next()); assert_eq!(it.len(), ans.len()); } } #[test] fn indexed() { let a = RcArray::linspace(0., 7., 8); for (i, elt) in a.indexed_iter() { assert_eq!(i, *elt as Ix); } let a = a.reshape((2, 4, 1)); let (mut i, mut j, k) = (0, 0, 0); for (idx, elt) in a.indexed_iter() { assert_eq!(idx, (i, j, k)); j += 1; if j == 4 { j = 0; i += 1; } println!("{:?}", (idx, elt)); } } fn assert_slice_correct<A, S, D>(v: &ArrayBase<S, D>) where S: Data<Elem=A>, D: Dimension, A: PartialEq + std::fmt::Debug, { let slc = v.as_slice(); assert!(slc.is_some()); let slc = slc.unwrap(); assert_eq!(v.len(), slc.len()); assert_equal(v.iter(), slc); } #[test] fn as_slice() { let a = RcArray::linspace(0., 7., 8); let a = a.reshape((2, 4, 1)); assert_slice_correct(&a); let a = a.reshape((2, 4)); assert_slice_correct(&a); assert!(a.view().subview(Axis(1), 0).as_slice().is_none()); let v = a.view(); assert_slice_correct(&v); assert_slice_correct(&v.subview(Axis(0), 0));<|fim▁hole|> assert!(v.slice(&[Si(0, Some(1), 2), S]).as_slice().is_some()); // `u` is contiguous, because the column stride of `2` doesn't matter // when the result is just one row anyway -- length of that dimension is 1 let u = v.slice(&[Si(0, Some(1), 2), S]); println!("{:?}", u.shape()); println!("{:?}", u.strides()); println!("{:?}", v.slice(&[Si(0, Some(1), 2), S])); assert!(u.as_slice().is_some()); assert_slice_correct(&u); let a = a.reshape((8, 1)); assert_slice_correct(&a); let u = a.slice(&[Si(0, None, 2), S]); println!("u={:?}, shape={:?}, strides={:?}", u, u.shape(), u.strides()); assert!(u.as_slice().is_none()); } #[test] fn inner_iter() { let a = RcArray::from_iter(0..12); let a = a.reshape((2, 3, 2)); // [[[0, 1], // [2, 3], // [4, 5]], // [[6, 7], // [8, 9], // ... assert_equal(a.inner_iter(), vec![aview1(&[0, 1]), aview1(&[2, 3]), aview1(&[4, 5]), aview1(&[6, 7]), aview1(&[8, 9]), aview1(&[10, 11])]); let mut b = RcArray::zeros((2, 3, 2)); b.swap_axes(0, 2); b.assign(&a); assert_equal(b.inner_iter(), vec![aview1(&[0, 1]), aview1(&[2, 3]), aview1(&[4, 5]), aview1(&[6, 7]), aview1(&[8, 9]), aview1(&[10, 11])]); } #[test] fn inner_iter_corner_cases() { let a0 = RcArray::zeros(()); assert_equal(a0.inner_iter(), vec![aview1(&[0])]); let a2 = RcArray::<i32, _>::zeros((0, 3)); assert_equal(a2.inner_iter(), vec![aview1(&[]); 0]); let a2 = RcArray::<i32, _>::zeros((3, 0)); assert_equal(a2.inner_iter(), vec![aview1(&[]); 3]); } #[test] fn inner_iter_size_hint() { // Check that the size hint is correctly computed let a = RcArray::from_iter(0..24).reshape((2, 3, 4)); let mut len = 6; let mut it = a.inner_iter(); assert_eq!(it.len(), len); while len > 0 { it.next(); len -= 1; assert_eq!(it.len(), len); } } #[allow(deprecated)] // into_outer_iter #[test] fn outer_iter() { let a = RcArray::from_iter(0..12); let a = a.reshape((2, 3, 2)); // [[[0, 1], // [2, 3], // [4, 5]], // [[6, 7], // [8, 9], // ... assert_equal(a.outer_iter(), vec![a.subview(Axis(0), 0), a.subview(Axis(0), 1)]); let mut b = RcArray::zeros((2, 3, 2)); b.swap_axes(0, 2); b.assign(&a); assert_equal(b.outer_iter(), vec![a.subview(Axis(0), 0), a.subview(Axis(0), 1)]); let mut found_rows = Vec::new(); for sub in b.outer_iter() { for row in sub.into_outer_iter() { found_rows.push(row); } } assert_equal(a.inner_iter(), found_rows.clone()); let mut found_rows_rev = Vec::new(); for sub in b.outer_iter().rev() { for row in sub.into_outer_iter().rev() { found_rows_rev.push(row); } } found_rows_rev.reverse(); assert_eq!(&found_rows, &found_rows_rev); // Test a case where strides are negative instead let mut c = RcArray::zeros((2, 3, 2)); let mut cv = c.slice_mut(s![..;-1, ..;-1, ..;-1]); cv.assign(&a); assert_eq!(&a, &cv); assert_equal(cv.outer_iter(), vec![a.subview(Axis(0), 0), a.subview(Axis(0), 1)]); let mut found_rows = Vec::new(); for sub in cv.outer_iter() { for row in sub.into_outer_iter() { found_rows.push(row); } } println!("{:#?}", found_rows); assert_equal(a.inner_iter(), found_rows); } #[test] fn axis_iter() { let a = RcArray::from_iter(0..12); let a = a.reshape((2, 3, 2)); // [[[0, 1], // [2, 3], // [4, 5]], // [[6, 7], // [8, 9], // ... assert_equal(a.axis_iter(Axis(1)), vec![a.subview(Axis(1), 0), a.subview(Axis(1), 1), a.subview(Axis(1), 2)]); } #[test] fn outer_iter_corner_cases() { let a2 = RcArray::<i32, _>::zeros((0, 3)); assert_equal(a2.outer_iter(), vec![aview1(&[]); 0]); let a2 = RcArray::<i32, _>::zeros((3, 0)); assert_equal(a2.outer_iter(), vec![aview1(&[]); 3]); } #[allow(deprecated)] #[test] fn outer_iter_mut() { let a = RcArray::from_iter(0..12); let a = a.reshape((2, 3, 2)); // [[[0, 1], // [2, 3], // [4, 5]], // [[6, 7], // [8, 9], // ... let mut b = RcArray::zeros((2, 3, 2)); b.swap_axes(0, 2); b.assign(&a); assert_equal(b.outer_iter_mut(), vec![a.subview(Axis(0), 0), a.subview(Axis(0), 1)]); let mut found_rows = Vec::new(); for sub in b.outer_iter_mut() { for row in sub.into_outer_iter() { found_rows.push(row); } } assert_equal(a.inner_iter(), found_rows); } #[test] fn axis_iter_mut() { let a = RcArray::from_iter(0..12); let a = a.reshape((2, 3, 2)); // [[[0, 1], // [2, 3], // [4, 5]], // [[6, 7], // [8, 9], // ... let mut a = a.to_owned(); for mut subview in a.axis_iter_mut(Axis(1)) { subview[[0, 0]] = 42; } let b = arr3(&[[[42, 1], [42, 3], [42, 5]], [[6, 7], [8, 9], [10, 11]]]); assert_eq!(a, b); } #[test] fn axis_chunks_iter() { let a = RcArray::from_iter(0..24); let a = a.reshape((2, 6, 2)); let it = a.axis_chunks_iter(Axis(1), 2); assert_equal(it, vec![arr3(&[[[0, 1], [2, 3]], [[12, 13], [14, 15]]]), arr3(&[[[4, 5], [6, 7]], [[16, 17], [18, 19]]]), arr3(&[[[8, 9], [10, 11]], [[20, 21], [22, 23]]])]); let a = RcArray::from_iter(0..28); let a = a.reshape((2, 7, 2)); let it = a.axis_chunks_iter(Axis(1), 2); assert_equal(it, vec![arr3(&[[[0, 1], [2, 3]], [[14, 15], [16, 17]]]), arr3(&[[[4, 5], [6, 7]], [[18, 19], [20, 21]]]), arr3(&[[[8, 9], [10, 11]], [[22, 23], [24, 25]]]), arr3(&[[[12, 13]], [[26, 27]]])]); let it = a.axis_chunks_iter(Axis(1), 2).rev(); assert_equal(it, vec![arr3(&[[[12, 13]], [[26, 27]]]), arr3(&[[[8, 9], [10, 11]], [[22, 23], [24, 25]]]), arr3(&[[[4, 5], [6, 7]], [[18, 19], [20, 21]]]), arr3(&[[[0, 1], [2, 3]], [[14, 15], [16, 17]]])]); let it = a.axis_chunks_iter(Axis(1), 7); assert_equal(it, vec![a.view()]); let it = a.axis_chunks_iter(Axis(1), 9); assert_equal(it, vec![a.view()]); } #[test] fn axis_chunks_iter_corner_cases() { // examples provided by @bluss in PR #65 // these tests highlight corner cases of the axis_chunks_iter implementation // and enable checking if no pointer offseting is out of bounds. However // checking the absence of of out of bounds offseting cannot (?) be // done automatically, so one has to launch this test in a debugger. let a = RcArray::<f32, _>::linspace(0., 7., 8).reshape((8, 1)); let it = a.axis_chunks_iter(Axis(0), 4); assert_equal(it, vec![a.slice(s![..4, ..]), a.slice(s![4.., ..])]); let a = a.slice(s![..;-1,..]); let it = a.axis_chunks_iter(Axis(0), 8); assert_equal(it, vec![a.view()]); let it = a.axis_chunks_iter(Axis(0), 3); assert_equal(it, vec![arr2(&[[7.], [6.], [5.]]), arr2(&[[4.], [3.], [2.]]), arr2(&[[1.], [0.]])]); let b = RcArray::<f32, _>::zeros((8, 2)); let a = b.slice(s![1..;2,..]); let it = a.axis_chunks_iter(Axis(0), 8); assert_equal(it, vec![a.view()]); let it = a.axis_chunks_iter(Axis(0), 1); assert_equal(it, vec![RcArray::zeros((1, 2)); 4]); } #[test] fn axis_chunks_iter_mut() { let a = RcArray::from_iter(0..24); let mut a = a.reshape((2, 6, 2)); let mut it = a.axis_chunks_iter_mut(Axis(1), 2); let mut col0 = it.next().unwrap(); col0[[0, 0, 0]] = 42; assert_eq!(col0, arr3(&[[[42, 1], [2, 3]], [[12, 13], [14, 15]]])); } #[test] fn outer_iter_size_hint() { // Check that the size hint is correctly computed let a = RcArray::from_iter(0..24).reshape((4, 3, 2)); let mut len = 4; let mut it = a.outer_iter(); assert_eq!(it.len(), len); while len > 0 { it.next(); len -= 1; assert_eq!(it.len(), len); } // now try the double ended case let mut it = a.outer_iter(); it.next_back(); let mut len = 3; while len > 0 { it.next(); len -= 1; assert_eq!(it.len(), len); } let mut it = a.outer_iter(); it.next(); let mut len = 3; while len > 0 { it.next_back(); len -= 1; assert_eq!(it.len(), len); } } #[test] fn outer_iter_split_at() { let a = RcArray::from_iter(0..30).reshape((5, 3, 2)); let it = a.outer_iter(); let (mut itl, mut itr) = it.clone().split_at(2); assert_eq!(itl.next().unwrap()[[2, 1]], 5); assert_eq!(itl.next().unwrap()[[2, 1]], 11); assert_eq!(itl.next(), None); assert_eq!(itr.next().unwrap()[[2, 1]], 17); assert_eq!(itr.next().unwrap()[[2, 1]], 23); assert_eq!(itr.next().unwrap()[[2, 1]], 29); assert_eq!(itr.next(), None); // split_at on length should yield an empty iterator // on the right part let (_, mut itr) = it.split_at(5); assert_eq!(itr.next(), None); } #[test] #[should_panic] fn outer_iter_split_at_panics() { let a = RcArray::from_iter(0..30).reshape((5, 3, 2)); let it = a.outer_iter(); it.split_at(6); } #[test] fn outer_iter_mut_split_at() { let mut a = RcArray::from_iter(0..30).reshape((5, 3, 2)); { let it = a.outer_iter_mut(); let (mut itl, mut itr) = it.split_at(2); itl.next(); itl.next().unwrap()[[2, 1]] += 1; // now this value is 12 assert_eq!(itl.next(), None); itr.next(); itr.next(); itr.next().unwrap()[[2, 1]] -= 1; // now this value is 28 assert_eq!(itr.next(), None); } assert_eq!(a[[1, 2, 1]], 12); assert_eq!(a[[4, 2, 1]], 28); } #[test] fn iterators_are_send_sync() { // When the element type is Send + Sync, then the iterators and views // are too. fn _send_sync<T: Send + Sync>(_: &T) { } let mut a = RcArray::from_iter(0..30).into_shape((5, 3, 2)).unwrap(); _send_sync(&a.view()); _send_sync(&a.view_mut()); _send_sync(&a.iter()); _send_sync(&a.iter_mut()); _send_sync(&a.indexed_iter()); _send_sync(&a.indexed_iter_mut()); _send_sync(&a.inner_iter()); _send_sync(&a.inner_iter_mut()); _send_sync(&a.outer_iter()); _send_sync(&a.outer_iter_mut()); _send_sync(&a.axis_iter(Axis(1))); _send_sync(&a.axis_iter_mut(Axis(1))); _send_sync(&a.axis_chunks_iter(Axis(1), 1)); _send_sync(&a.axis_chunks_iter_mut(Axis(1), 1)); _send_sync(&indices(a.dim())); } #[test] fn test_fold() { let mut a = Array2::<i32>::default((20, 20)); a += 1; let mut iter = a.iter(); iter.next(); assert_eq!(iter.fold(0, |acc, &x| acc + x), a.scalar_sum() - 1); let mut a = Array0::<i32>::default(()); a += 1; assert_eq!(a.iter().fold(0, |acc, &x| acc + x), 1); }<|fim▁end|>
assert_slice_correct(&v.subview(Axis(0), 1)); assert!(v.slice(&[S, Si(0, Some(1), 1)]).as_slice().is_none()); println!("{:?}", v.slice(&[Si(0, Some(1), 2), S]));
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. #-------------------------------------------------------------------------- import re import os.path from io import open from setuptools import find_packages, setup # Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-mgmt-servicefabric" PACKAGE_PPRINT_NAME = "Service Fabric Management" # a-b-c => a/b/c package_folder_path = PACKAGE_NAME.replace('-', '/') # a-b-c => a.b.c namespace_name = PACKAGE_NAME.replace('-', '.') # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, 'version.py') if os.path.exists(os.path.join(package_folder_path, 'version.py')) else os.path.join(package_folder_path, '_version.py'), 'r') as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError('Cannot find version information') with open('README.md', encoding='utf-8') as f: readme = f.read() with open('CHANGELOG.md', encoding='utf-8') as f: changelog = f.read() setup( name=PACKAGE_NAME,<|fim▁hole|> version=version, description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), long_description=readme + '\n\n' + changelog, long_description_content_type='text/markdown', license='MIT License', author='Microsoft Corporation', author_email='[email protected]', url='https://github.com/Azure/azure-sdk-for-python', keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'License :: OSI Approved :: MIT License', ], zip_safe=False, packages=find_packages(exclude=[ 'tests', # Exclude packages that will be covered by PEP420 or nspkg 'azure', 'azure.mgmt', ]), install_requires=[ 'msrest>=0.6.21', 'azure-common~=1.1', 'azure-mgmt-core>=1.3.0,<2.0.0', ], python_requires=">=3.6" )<|fim▁end|>
<|file_name|>projection-body.rs<|end_file_name|><|fim▁begin|>// Test that when we infer the lifetime to a subset of the fn body, it // works out. // // compile-pass trait MyTrait<'a> { type Output; } fn foo1<T>() where for<'x> T: MyTrait<'x>,<|fim▁hole|> let x = bar::<T::Output>(); drop(x); } fn bar<'a, T>() -> &'a () where T: 'a, { &() } fn main() {}<|fim▁end|>
{ // Here the region `'c` in `<T as MyTrait<'c>>::Output` will be // inferred to a subset of the fn body.
<|file_name|>element_wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! A wrapper over an element and a snapshot, that allows us to selector-match //! against a past state of the element. use {Atom, CaseSensitivityExt, LocalName, Namespace, WeakAtom}; use dom::TElement; use element_state::ElementState; use selector_parser::{NonTSPseudoClass, PseudoElement, SelectorImpl, Snapshot, SnapshotMap, AttrValue}; use selectors::{Element, OpaqueElement}; use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint}; use selectors::matching::{ElementSelectorFlags, MatchingContext}; use std::cell::Cell; use std::fmt; /// In order to compute restyle hints, we perform a selector match against a /// list of partial selectors whose rightmost simple selector may be sensitive /// to the thing being changed. We do this matching twice, once for the element /// as it exists now and once for the element as it existed at the time of the /// last restyle. If the results of the selector match differ, that means that /// the given partial selector is sensitive to the change, and we compute a /// restyle hint based on its combinator. /// /// In order to run selector matching against the old element state, we generate /// a wrapper for the element which claims to have the old state. This is the /// ElementWrapper logic below. /// /// Gecko does this differently for element states, and passes a mask called /// mStateMask, which indicates the states that need to be ignored during /// selector matching. This saves an ElementWrapper allocation and an additional /// selector match call at the expense of additional complexity inside the /// selector matching logic. This only works for boolean states though, so we /// still need to take the ElementWrapper approach for attribute-dependent /// style. So we do it the same both ways for now to reduce complexity, but it's /// worth measuring the performance impact (if any) of the mStateMask approach. pub trait ElementSnapshot : Sized { /// The state of the snapshot, if any. fn state(&self) -> Option<ElementState>; /// If this snapshot contains attribute information. fn has_attrs(&self) -> bool; /// The ID attribute per this snapshot. Should only be called if /// `has_attrs()` returns true. fn id_attr(&self) -> Option<&WeakAtom>; /// Whether this snapshot contains the class `name`. Should only be called /// if `has_attrs()` returns true. fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool; /// A callback that should be called for each class of the snapshot. Should /// only be called if `has_attrs()` returns true. fn each_class<F>(&self, F) where F: FnMut(&Atom); /// The `xml:lang=""` or `lang=""` attribute value per this snapshot. fn lang_attr(&self) -> Option<AttrValue>; } /// A simple wrapper over an element and a snapshot, that allows us to /// selector-match against a past state of the element. #[derive(Clone)] pub struct ElementWrapper<'a, E> where E: TElement, { element: E, cached_snapshot: Cell<Option<&'a Snapshot>>, snapshot_map: &'a SnapshotMap, } impl<'a, E> ElementWrapper<'a, E> where E: TElement, { /// Trivially constructs an `ElementWrapper`. pub fn new(el: E, snapshot_map: &'a SnapshotMap) -> Self { ElementWrapper { element: el, cached_snapshot: Cell::new(None), snapshot_map: snapshot_map, } } /// Gets the snapshot associated with this element, if any. pub fn snapshot(&self) -> Option<&'a Snapshot> { if !self.element.has_snapshot() { return None; } if let Some(s) = self.cached_snapshot.get() { return Some(s); } let snapshot = self.snapshot_map.get(&self.element); debug_assert!(snapshot.is_some(), "has_snapshot lied!"); self.cached_snapshot.set(snapshot); snapshot } /// Returns the states that have changed since the element was snapshotted. pub fn state_changes(&self) -> ElementState { let snapshot = match self.snapshot() { Some(s) => s, None => return ElementState::empty(), }; match snapshot.state() { Some(state) => state ^ self.element.state(), None => ElementState::empty(), } } /// Returns the value of the `xml:lang=""` (or, if appropriate, `lang=""`) /// attribute from this element's snapshot or the closest ancestor /// element snapshot with the attribute specified. fn get_lang(&self) -> Option<AttrValue> { let mut current = self.clone(); loop { let lang = match self.snapshot() { Some(snapshot) if snapshot.has_attrs() => snapshot.lang_attr(), _ => current.element.lang_attr(), }; if lang.is_some() { return lang; } current = current.parent_element()?; } } } impl<'a, E> fmt::Debug for ElementWrapper<'a, E> where E: TElement, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Ignore other fields for now, can change later if needed. self.element.fmt(f) } } impl<'a, E> Element for ElementWrapper<'a, E> where E: TElement, { type Impl = SelectorImpl; fn match_non_ts_pseudo_class<F>( &self, pseudo_class: &NonTSPseudoClass, context: &mut MatchingContext<Self::Impl>, _setter: &mut F, ) -> bool where F: FnMut(&Self, ElementSelectorFlags), { // Some pseudo-classes need special handling to evaluate them against // the snapshot. match *pseudo_class { #[cfg(feature = "gecko")] NonTSPseudoClass::MozAny(ref selectors) => { use selectors::matching::matches_complex_selector; return context.nest(|context| { selectors.iter().any(|s| { matches_complex_selector(s.iter(), self, context, _setter) }) }); } // :dir is implemented in terms of state flags, but which state flag // it maps to depends on the argument to :dir. That means we can't // just add its state flags to the NonTSPseudoClass, because if we // added all of them there, and tested via intersects() here, we'd // get incorrect behavior for :not(:dir()) cases. // // FIXME(bz): How can I set this up so once Servo adds :dir() // support we don't forget to update this code? #[cfg(feature = "gecko")] NonTSPseudoClass::Dir(ref dir) => { use invalidation::element::invalidation_map::dir_selector_to_state; let selector_flag = dir_selector_to_state(dir); if selector_flag.is_empty() { // :dir() with some random argument; does not match. return false; } let state = match self.snapshot().and_then(|s| s.state()) { Some(snapshot_state) => snapshot_state, None => self.element.state(), }; return state.contains(selector_flag); } // For :link and :visited, we don't actually want to test the // element state directly. // // Instead, we use the `visited_handling` to determine if they // match. NonTSPseudoClass::Link => { return self.is_link() && context.visited_handling().matches_unvisited() } NonTSPseudoClass::Visited => { return self.is_link() && context.visited_handling().matches_visited() } #[cfg(feature = "gecko")] NonTSPseudoClass::MozTableBorderNonzero => { if let Some(snapshot) = self.snapshot() { if snapshot.has_other_pseudo_class_state() { return snapshot.mIsTableBorderNonzero(); } } } #[cfg(feature = "gecko")] NonTSPseudoClass::MozBrowserFrame => { if let Some(snapshot) = self.snapshot() { if snapshot.has_other_pseudo_class_state() { return snapshot.mIsMozBrowserFrame(); } } } // :lang() needs to match using the closest ancestor xml:lang="" or // lang="" attribtue from snapshots. NonTSPseudoClass::Lang(ref lang_arg) => { return self.element.match_element_lang(Some(self.get_lang()), lang_arg); } _ => {} } let flag = pseudo_class.state_flag(); if flag.is_empty() { return self.element.match_non_ts_pseudo_class( pseudo_class, context, &mut |_, _| {}, ) } match self.snapshot().and_then(|s| s.state()) { Some(snapshot_state) => snapshot_state.intersects(flag), None => { self.element.match_non_ts_pseudo_class( pseudo_class, context, &mut |_, _| {}, ) } } } fn match_pseudo_element( &self, pseudo_element: &PseudoElement, context: &mut MatchingContext<Self::Impl>, ) -> bool { self.element.match_pseudo_element(pseudo_element, context) } fn is_link(&self) -> bool { self.element.is_link() } fn opaque(&self) -> OpaqueElement { self.element.opaque() } fn parent_element(&self) -> Option<Self> { self.element.parent_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn first_child_element(&self) -> Option<Self> { self.element.first_child_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn last_child_element(&self) -> Option<Self> { self.element.last_child_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn prev_sibling_element(&self) -> Option<Self> { self.element.prev_sibling_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn next_sibling_element(&self) -> Option<Self> { self.element.next_sibling_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } #[inline] fn is_html_element_in_html_document(&self) -> bool { self.element.is_html_element_in_html_document() } #[inline] fn is_html_slot_element(&self) -> bool { self.element.is_html_slot_element() } #[inline] fn local_name(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedLocalName { self.element.local_name() } #[inline] fn namespace(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedNamespaceUrl { self.element.namespace() } fn attr_matches( &self, ns: &NamespaceConstraint<&Namespace>, local_name: &LocalName, operation: &AttrSelectorOperation<&AttrValue>, ) -> bool { match self.snapshot() { Some(snapshot) if snapshot.has_attrs() => { snapshot.attr_matches(ns, local_name, operation) } _ => self.element.attr_matches(ns, local_name, operation) } } fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool { match self.snapshot() { Some(snapshot) if snapshot.has_attrs() => { snapshot.id_attr().map_or(false, |atom| case_sensitivity.eq_atom(&atom, id)) } _ => self.element.has_id(id, case_sensitivity) } } fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool { match self.snapshot() { Some(snapshot) if snapshot.has_attrs() => { snapshot.has_class(name, case_sensitivity) } _ => self.element.has_class(name, case_sensitivity) } } fn is_empty(&self) -> bool {<|fim▁hole|> fn is_root(&self) -> bool { self.element.is_root() } fn pseudo_element_originating_element(&self) -> Option<Self> { self.element.pseudo_element_originating_element() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn assigned_slot(&self) -> Option<Self> { self.element.assigned_slot() .map(|e| ElementWrapper::new(e, self.snapshot_map)) } fn blocks_ancestor_combinators(&self) -> bool { self.element.blocks_ancestor_combinators() } }<|fim▁end|>
self.element.is_empty() }
<|file_name|>vselb.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <|fim▁hole|>from quantum.plugins.services.agent_loadbalancer.drivers.vedge.lbapi import LoadBalancerAPI from quantum.plugins.services.agent_loadbalancer.drivers.vedge import ( cfg as hacfg ) from oslo.config import cfg LOG = logging.getLogger(__name__) edgeUri = 'https://10.117.5.245' edgeId = 'edge-7' edgeUser = 'admin' edgePasswd = 'default' OPTS = [ cfg.StrOpt('pool_vseid', help='this is a vseid of pool'), cfg.StrOpt('vip_vseid', help='this is a vseid of vip') ] class VShieldEdgeLB(): supported_extension_aliases = ["lbaas"] def __init__(self): # Hard coded for now vseapi = VseAPI(edgeUri, edgeUser, edgePasswd, edgeId) self.vselbapi = LoadBalancerAPI(vseapi) self.conf = cfg.CONF self._max_monitors = 255 count = 0 while count < self._max_monitors: monitorMap = "monitorMap_%d" % count OPTS.append(cfg.ListOpt(monitorMap)) count = count + 1 self.conf.register_opts(OPTS) def ini_update(self, ini_path): argv = ["--config-file", ini_path] self.conf(argv) def ini2vseid(self, ini_path): pool_vseid = self.conf.pool_vseid vip_vseid = self.conf.vip_vseid return (pool_vseid, vip_vseid) def extract_monitorids(self, monitors): monitor_ids = [] for monitor in monitors: monitor_ids.append(monitor['id']) return monitor_ids def extract_vsemonitor_maps(self): monitor_maps = {} count = 0 while count < self._max_monitors: monitorMap = "monitorMap_%d" % count opt = "self.conf.{}".format(monitorMap) monitorMap = eval(opt) if monitorMap is not None: monitor_id = monitorMap[0] monitor_vseid = monitorMap[1] monitor_maps[monitor_id] = monitor_vseid else: return monitor_maps count = count + 1 return monitor_maps def ini2monitorvseids(self, monitor_ids, monitor_maps): monitor_vseids = {} monitor_vseids_delete = {} for k,v in monitor_maps.items(): if k in monitor_ids: monitor_vseids[k] = v else: monitor_vseids_delete[k] = v return (monitor_vseids,monitor_vseids_delete) # def ini2monitorvseids2(self, ini_path): # monitor_vseids = {} # except_opts = ("config_file", "config_dir", "pool_vseid", "vip_vseid") # opts = self.conf._opts() # print "opts: %s" % opts # for index in opts.keys(): # if index not in except_opts: # opt = "self.conf.{}".format(index) # index = eval(opt) # if index is not None: # monitor_id = index[0] # monitor_vseid = index[1] # monitor_vseids[monitor_id] = monitor_vseid # return monitor_vseids def create(self, logical_config, ini_path, conf_path): monitors = logical_config['healthmonitors'] members = logical_config['members'] pool = logical_config['pool'] vip = logical_config['vip'] if monitors is not None: #try: monitor_vseids,monitors_request = self.vselbapi.create_monitors(monitors) #except Exception: # LOG.error(_("monitors create error %s") % monitors) # exit(1) #try: pool_vseid,pool_request = self.vselbapi.create_pool(pool, members, monitor_vseids) if vip is not None: vip_vseid,vip_request = self.vselbapi.create_vip(vip, pool_vseid) #except Exception: # hacfg.save_ini(ini_path, pool_vseid, None, monitor_vseids) # self.vselbapi.delete_monitors(ini_path) # self.vselbapi.delete_pool(ini_path) # print "pool or vip create error!" # exit(1) hacfg.save_ini(ini_path, pool_vseid, vip_vseid, monitor_vseids) hacfg.save_conf(conf_path, pool_request, vip_request) def update(self, logical_config, ini_path, conf_path): self.ini_update(ini_path) monitors = logical_config['healthmonitors'] members = logical_config['members'] pool = logical_config['pool'] vip = logical_config['vip'] pool_vseid,vip_vseid = self.ini2vseid(ini_path) monitor_ids = self.extract_monitorids(monitors) old_vsemonitor_maps = self.extract_vsemonitor_maps() monitor_vseids_update,monitor_vseids_delete = self.ini2monitorvseids(monitor_ids, old_vsemonitor_maps) #try: if monitors is not None: monitor_vseids,monitors_request = self.vselbapi.update_monitors(monitors, old_vsemonitor_maps, monitor_ids, monitor_vseids_update, monitor_vseids_delete, pool_vseid) pool_vseid,pool_request = self.vselbapi.update_pool(pool, pool_vseid, members, monitor_vseids) if vip is not None: vip_vseid,vip_request = self.vselbapi.update_vip(vip, pool_vseid, vip_vseid) #except Exception: # print "pool or vip update error!" # exit(1) hacfg.save_ini(ini_path, pool_vseid, vip_vseid, monitor_vseids) hacfg.save_conf(conf_path, pool_request, vip_request) def destroy(self, pool_id, ini_path, conf_path): self.ini_update(ini_path) pool_vseid,vip_vseid = self.ini2vseid(ini_path) monitor_vseids = self.extract_vsemonitor_maps() # monitor_vseids = self.ini2monitorvseids2(ini_path) if vip_vseid is not None: self.vselbapi.delete_vip(vip_vseid) self.vselbapi.delete_pool(pool_vseid, monitor_vseids) if monitor_vseids is not None: self.vselbapi.delete_monitors(monitor_vseids, pool_vseid) def get_stats(pool_id, ini_path, conf_path): # self.vselbapi.get_stats() self.vselbapi.get_config()<|fim▁end|>
from quantum.openstack.common import log as logging from quantum.plugins.services.agent_loadbalancer.drivers.vedge.vmware.vshield.vseapi import VseAPI
<|file_name|>ABHelloWorld.java<|end_file_name|><|fim▁begin|>package boun.swe573.accessbadger; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; <|fim▁hole|>public class ABHelloWorld { @RequestMapping("/welcome") public String helloWorld() { String message = "<br><div style='text-align:center;'>" + "<h3>********** Hello World **********</div><br><br>"; return message; } }<|fim▁end|>
@Controller
<|file_name|>throwing-console-patch.js<|end_file_name|><|fim▁begin|>const colors = require('colors/safe'); const shouldSilenceWarnings = (...messages) => [].some((msgRegex) => messages.some((msg) => msgRegex.test(msg))); const shouldNotThrowWarnings = (...messages) => [].some((msgRegex) => messages.some((msg) => msgRegex.test(msg))); const logOrThrow = (log, method, messages) => { const warning = `console.${method} calls not allowed in tests`; if (process.env.CI) { if (shouldSilenceWarnings(messages)) return; log(warning, '\n', ...messages); // NOTE: That some warnings should be logged allowing us to refactor graceully // without having to introduce a breaking change. if (shouldNotThrowWarnings(messages)) return; throw new Error(...messages); } else { log(colors.bgYellow.black(' WARN '), warning, '\n', ...messages); } }; <|fim▁hole|>// eslint-disable-next-line no-console const logMessage = console.log; global.console.log = (...messages) => { logOrThrow(logMessage, 'log', messages); }; // eslint-disable-next-line no-console const logInfo = console.info; global.console.info = (...messages) => { logOrThrow(logInfo, 'info', messages); }; // eslint-disable-next-line no-console const logWarning = console.warn; global.console.warn = (...messages) => { logOrThrow(logWarning, 'warn', messages); }; // eslint-disable-next-line no-console const logError = console.error; global.console.error = (...messages) => { logOrThrow(logError, 'error', messages); };<|fim▁end|>
<|file_name|>script.ts<|end_file_name|><|fim▁begin|>class HobbiesBehavior extends Sup.Behavior { awake() { let vertical:boolean = false; let horizontal:boolean = true; // Configure platforms Sup.getActor("MovingPlatform1").getBehavior(MovingPlatformBehavior).min = -2; Sup.getActor("MovingPlatform1").getBehavior(MovingPlatformBehavior).max = 8; Sup.getActor("MovingPlatform1").getBehavior(MovingPlatformBehavior).speed = 0.06; Sup.getActor("MovingPlatform1").getBehavior(MovingPlatformBehavior).direction = horizontal; Sup.getActor("SlackMovingPlatform1").getBehavior(MovingPlatformBehavior).min = -14,5; Sup.getActor("SlackMovingPlatform1").getBehavior(MovingPlatformBehavior).max = -9;<|fim▁hole|> Sup.getActor("SlackMovingPlatform2").getBehavior(MovingPlatformBehavior).max = -8; Sup.getActor("SlackMovingPlatform2").getBehavior(MovingPlatformBehavior).speed = 0.06; Sup.getActor("SlackMovingPlatform2").getBehavior(MovingPlatformBehavior).direction = vertical; } update() { } } Sup.registerBehavior(HobbiesBehavior);<|fim▁end|>
Sup.getActor("SlackMovingPlatform1").getBehavior(MovingPlatformBehavior).speed = 0.06; Sup.getActor("SlackMovingPlatform1").getBehavior(MovingPlatformBehavior).direction = horizontal; Sup.getActor("SlackMovingPlatform2").getBehavior(MovingPlatformBehavior).min = -11;
<|file_name|>main.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { AppModule } from './heroFinder/heroFinder.module'; import { environment } from './environments/environment'; if (environment.production) { enableProdMode(); } platformBrowserDynamic().bootstrapModule(AppModule) .catch(err => console.log(err));<|fim▁end|>
import { enableProdMode } from '@angular/core'; import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from MSMSeeder.attic import core
<|file_name|>remove_instance_database.py<|end_file_name|><|fim▁begin|>from maintenance.async_jobs import BaseJob from maintenance.models import RemoveInstanceDatabase __all__ = ('RemoveInstanceDatabase',) class RemoveInstanceDatabaseJob(BaseJob): step_manger_class = RemoveInstanceDatabase get_steps_method = 'remove_readonly_instance_steps' success_msg = 'Instance removed with success' error_msg = 'Could not remove instance' def __init__(self, request, database, task, instance, since_step=None, step_manager=None, scheduled_task=None, auto_rollback=False, auto_cleanup=False): super(RemoveInstanceDatabaseJob, self).__init__( request, database, task, since_step, step_manager, scheduled_task, auto_rollback, auto_cleanup<|fim▁hole|> def instances(self): return [self._instance]<|fim▁end|>
) self._instance = instance @property
<|file_name|>whoami.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Display logged-in username. Configuration parameters: format: display format for whoami (default '{username}')<|fim▁hole|>Format placeholders: {username} display current username Inspired by i3 FAQ: https://faq.i3wm.org/question/1618/add-user-name-to-status-bar.1.html @author ultrabug SAMPLE OUTPUT {'full_text': u'ultrabug'} """ from getpass import getuser class Py3status: """ """ # available configuration parameters format = '{username}' class Meta: deprecated = { 'remove': [ { 'param': 'cache_timeout', 'msg': 'obsolete parameter', }, ], } def whoami(self): """ We use the getpass module to get the current user. """ username = '{}'.format(getuser()) return { 'cached_until': self.py3.CACHE_FOREVER, 'full_text': self.py3.safe_format(self.format, {'username': username}) } if __name__ == "__main__": """ Run module in test mode. """ from py3status.module_test import module_test module_test(Py3status)<|fim▁end|>
<|file_name|>websocket.py<|end_file_name|><|fim▁begin|>############################################################################### # # The MIT License (MIT) # # Copyright (c) Tavendo GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### from __future__ import absolute_import from base64 import b64encode, b64decode from zope.interface import implementer import twisted.internet.protocol from twisted.internet.defer import maybeDeferred from twisted.internet.interfaces import ITransport from twisted.internet.error import ConnectionDone, ConnectionAborted, \ ConnectionLost from autobahn.wamp import websocket from autobahn.websocket import protocol from autobahn.websocket import http from autobahn.twisted.util import peer2str from autobahn.logger import make_logger from autobahn.websocket.compress import PerMessageDeflateOffer, \ PerMessageDeflateOfferAccept, \ PerMessageDeflateResponse, \ PerMessageDeflateResponseAccept __all__ = ( 'WebSocketAdapterProtocol', 'WebSocketServerProtocol', 'WebSocketClientProtocol', 'WebSocketAdapterFactory', 'WebSocketServerFactory', 'WebSocketClientFactory', 'WrappingWebSocketAdapter', 'WrappingWebSocketServerProtocol', 'WrappingWebSocketClientProtocol', 'WrappingWebSocketServerFactory', 'WrappingWebSocketClientFactory', 'listenWS', 'connectWS', 'WampWebSocketServerProtocol', 'WampWebSocketServerFactory', 'WampWebSocketClientProtocol', 'WampWebSocketClientFactory', ) class WebSocketAdapterProtocol(twisted.internet.protocol.Protocol): """ Adapter class for Twisted WebSocket client and server protocols. """ peer = '<never connected>' def connectionMade(self): # the peer we are connected to try: peer = self.transport.getPeer() except AttributeError: # ProcessProtocols lack getPeer() self.peer = "process {}".format(self.transport.pid) else: self.peer = peer2str(peer) self._connectionMade() # Set "Nagle" try: self.transport.setTcpNoDelay(self.tcpNoDelay) except: # don't touch this! does not work: AttributeError, OSError # eg Unix Domain sockets throw Errno 22 on this pass def connectionLost(self, reason): if isinstance(reason.value, ConnectionDone): self.factory.log.debug("Connection to/from {peer} was closed cleanly", peer=self.peer) elif isinstance(reason.value, ConnectionAborted): self.factory.log.debug("Connection to/from {peer} was aborted locally", peer=self.peer) elif isinstance(reason.value, ConnectionLost): # The following is ridiculous, but the treatment of reason.value.args # across py2/3 and tx and over various corner cases is deeply fucked up. if hasattr(reason.value, 'message'): message = reason.value.message elif hasattr(reason.value, 'args') and type(reason.value.args) == tuple and len(reason.value.args) > 0: message = reason.value.args[0] else: message = None if message: self.factory.log.debug("Connection to/from {peer} was lost in a non-clean fashion: {message}", peer=self.peer, message=message) else: self.factory.log.debug("Connection to/from {peer} was lost in a non-clean fashion", peer=self.peer) # at least: FileDescriptorOverrun, ConnectionFdescWentAway - but maybe others as well? else: self.factory.log.info("Connection to/from {peer} lost ({error_type}): {error})", peer=self.peer, error_type=type(reason.value), error=reason.value) self._connectionLost(reason) def dataReceived(self, data): self._dataReceived(data) def _closeConnection(self, abort=False): if abort and hasattr(self.transport, 'abortConnection'): self.transport.abortConnection() else: # e.g. ProcessProtocol lacks abortConnection() self.transport.loseConnection() def _onOpen(self): self.onOpen() def _onMessageBegin(self, isBinary): self.onMessageBegin(isBinary) def _onMessageFrameBegin(self, length): self.onMessageFrameBegin(length) def _onMessageFrameData(self, payload): self.onMessageFrameData(payload) def _onMessageFrameEnd(self): self.onMessageFrameEnd() def _onMessageFrame(self, payload): self.onMessageFrame(payload) def _onMessageEnd(self): self.onMessageEnd() def _onMessage(self, payload, isBinary): self.onMessage(payload, isBinary) def _onPing(self, payload): self.onPing(payload) def _onPong(self, payload): self.onPong(payload) def _onClose(self, wasClean, code, reason): self.onClose(wasClean, code, reason) def registerProducer(self, producer, streaming): """ Register a Twisted producer with this protocol. Modes: Hybi, Hixie :param producer: A Twisted push or pull producer. :type producer: object :param streaming: Producer type. :type streaming: bool """ self.transport.registerProducer(producer, streaming) class WebSocketServerProtocol(WebSocketAdapterProtocol, protocol.WebSocketServerProtocol): """ Base class for Twisted-based WebSocket server protocols. """ def _onConnect(self, request): # onConnect() will return the selected subprotocol or None # or a pair (protocol, headers) or raise an HttpException res = maybeDeferred(self.onConnect, request) res.addCallback(self.succeedHandshake) def forwardError(failure): if failure.check(http.HttpException): return self.failHandshake(failure.value.reason, failure.value.code) else: if self.debug: self.factory._log("Unexpected exception in onConnect ['%s']" % failure.value) return self.failHandshake(http.INTERNAL_SERVER_ERROR[1], http.INTERNAL_SERVER_ERROR[0]) res.addErrback(forwardError) class WebSocketClientProtocol(WebSocketAdapterProtocol, protocol.WebSocketClientProtocol): """ Base class for Twisted-based WebSocket client protocols. """ def _onConnect(self, response): self.onConnect(response) class WebSocketAdapterFactory(object): """ Adapter class for Twisted-based WebSocket client and server factories. """ log = make_logger("twisted") # we deliberately subclass t.i.p.Factory, not t.i.p.ServerFactory. See https://github.com/tavendo/AutobahnPython/issues/389 class WebSocketServerFactory(WebSocketAdapterFactory, protocol.WebSocketServerFactory, twisted.internet.protocol.Factory): """ Base class for Twisted-based WebSocket server factories. """ def __init__(self, *args, **kwargs): """ In addition to all arguments to the constructor of :class:`autobahn.websocket.protocol.WebSocketServerFactory`, you can supply a `reactor` keyword argument to specify the Twisted reactor to be used. """ # lazy import to avoid reactor install upon module import reactor = kwargs.pop('reactor', None) if reactor is None: from twisted.internet import reactor self.reactor = reactor protocol.WebSocketServerFactory.__init__(self, *args, **kwargs) # we deliberately subclass t.i.p.Factory, not t.i.p.ClientFactory. See https://github.com/tavendo/AutobahnPython/issues/389 class WebSocketClientFactory(WebSocketAdapterFactory, protocol.WebSocketClientFactory, twisted.internet.protocol.Factory): """ Base class for Twisted-based WebSocket client factories. """ def __init__(self, *args, **kwargs): """ In addition to all arguments to the constructor of :class:`autobahn.websocket.protocol.WebSocketClientFactory`, you can supply a `reactor` keyword argument to specify the Twisted reactor to be used. """ # lazy import to avoid reactor install upon module import reactor = kwargs.pop('reactor', None) if reactor is None: from twisted.internet import reactor self.reactor = reactor protocol.WebSocketClientFactory.__init__(self, *args, **kwargs) @implementer(ITransport) class WrappingWebSocketAdapter(object): """ An adapter for stream-based transport over WebSocket. This follows `websockify <https://github.com/kanaka/websockify>`_ and should be compatible with that. It uses WebSocket subprotocol negotiation and supports the following WebSocket subprotocols: - ``binary`` (or a compatible subprotocol) - ``base64`` Octets are either transmitted as the payload of WebSocket binary messages when using the ``binary`` subprotocol (or an alternative binary compatible subprotocol), or encoded with Base64 and then transmitted as the payload of WebSocket text messages when using the ``base64`` subprotocol. """ def onConnect(self, requestOrResponse): # Negotiate either the 'binary' or the 'base64' WebSocket subprotocol if isinstance(requestOrResponse, protocol.ConnectionRequest): request = requestOrResponse for p in request.protocols: if p in self.factory._subprotocols: self._binaryMode = (p != 'base64') return p raise http.HttpException(http.NOT_ACCEPTABLE[0], "this server only speaks %s WebSocket subprotocols" % self.factory._subprotocols) elif isinstance(requestOrResponse, protocol.ConnectionResponse): response = requestOrResponse if response.protocol not in self.factory._subprotocols: self.failConnection(protocol.WebSocketProtocol.CLOSE_STATUS_CODE_PROTOCOL_ERROR, "this client only speaks %s WebSocket subprotocols" % self.factory._subprotocols) self._binaryMode = (response.protocol != 'base64') else: # should not arrive here raise Exception("logic error") def onOpen(self): self._proto.connectionMade() def onMessage(self, payload, isBinary): if isBinary != self._binaryMode: self.failConnection(protocol.WebSocketProtocol.CLOSE_STATUS_CODE_UNSUPPORTED_DATA, "message payload type does not match the negotiated subprotocol") else: if not isBinary: try: payload = b64decode(payload) except Exception as e: self.failConnection(protocol.WebSocketProtocol.CLOSE_STATUS_CODE_INVALID_PAYLOAD, "message payload base64 decoding error: {0}".format(e)) # print("forwarding payload: {0}".format(binascii.hexlify(payload))) self._proto.dataReceived(payload) # noinspection PyUnusedLocal def onClose(self, wasClean, code, reason): self._proto.connectionLost(None) def write(self, data): # print("sending payload: {0}".format(binascii.hexlify(data))) # part of ITransport assert(type(data) == bytes) if self._binaryMode: self.sendMessage(data, isBinary=True) else: data = b64encode(data) self.sendMessage(data, isBinary=False) def writeSequence(self, data): # part of ITransport for d in data: self.write(d) def loseConnection(self): # part of ITransport self.sendClose() def getPeer(self): # part of ITransport return self.transport.getPeer() def getHost(self): # part of ITransport return self.transport.getHost() class WrappingWebSocketServerProtocol(WrappingWebSocketAdapter, WebSocketServerProtocol): """ Server protocol for stream-based transport over WebSocket. """ class WrappingWebSocketClientProtocol(WrappingWebSocketAdapter, WebSocketClientProtocol): """ Client protocol for stream-based transport over WebSocket. """ class WrappingWebSocketServerFactory(WebSocketServerFactory): """ Wrapping server factory for stream-based transport over WebSocket. """ def __init__(self, factory, url, reactor=None, enableCompression=True, autoFragmentSize=0, subprotocol=None, debug=False): """ :param factory: Stream-based factory to be wrapped. :type factory: A subclass of ``twisted.internet.protocol.Factory`` :param url: WebSocket URL of the server this server factory will work for. :type url: unicode """ self._factory = factory self._subprotocols = ['binary', 'base64'] if subprotocol: self._subprotocols.append(subprotocol) WebSocketServerFactory.__init__(self, url=url, reactor=reactor, protocols=self._subprotocols,<|fim▁hole|> self.setProtocolOptions(autoFragmentSize=autoFragmentSize) # play nice and perform WS closing handshake self.setProtocolOptions(failByDrop=False) if enableCompression: # Enable WebSocket extension "permessage-deflate". # Function to accept offers from the client .. def accept(offers): for offer in offers: if isinstance(offer, PerMessageDeflateOffer): return PerMessageDeflateOfferAccept(offer) self.setProtocolOptions(perMessageCompressionAccept=accept) def buildProtocol(self, addr): proto = WrappingWebSocketServerProtocol() proto.factory = self proto._proto = self._factory.buildProtocol(addr) proto._proto.transport = proto return proto def startFactory(self): self._factory.startFactory() WebSocketServerFactory.startFactory(self) def stopFactory(self): self._factory.stopFactory() WebSocketServerFactory.stopFactory(self) class WrappingWebSocketClientFactory(WebSocketClientFactory): """ Wrapping client factory for stream-based transport over WebSocket. """ def __init__(self, factory, url, reactor=None, enableCompression=True, autoFragmentSize=0, subprotocol=None, debug=False): """ :param factory: Stream-based factory to be wrapped. :type factory: A subclass of ``twisted.internet.protocol.Factory`` :param url: WebSocket URL of the server this client factory will connect to. :type url: unicode """ self._factory = factory self._subprotocols = ['binary', 'base64'] if subprotocol: self._subprotocols.append(subprotocol) WebSocketClientFactory.__init__(self, url=url, reactor=reactor, protocols=self._subprotocols, debug=debug) # automatically fragment outgoing traffic into WebSocket frames # of this size self.setProtocolOptions(autoFragmentSize=autoFragmentSize) # play nice and perform WS closing handshake self.setProtocolOptions(failByDrop=False) if enableCompression: # Enable WebSocket extension "permessage-deflate". # The extensions offered to the server .. offers = [PerMessageDeflateOffer()] self.setProtocolOptions(perMessageCompressionOffers=offers) # Function to accept responses from the server .. def accept(response): if isinstance(response, PerMessageDeflateResponse): return PerMessageDeflateResponseAccept(response) self.setProtocolOptions(perMessageCompressionAccept=accept) def buildProtocol(self, addr): proto = WrappingWebSocketClientProtocol() proto.factory = self proto._proto = self._factory.buildProtocol(addr) proto._proto.transport = proto return proto def connectWS(factory, contextFactory=None, timeout=30, bindAddress=None): """ Establish WebSocket connection to a server. The connection parameters like target host, port, resource and others are provided via the factory. :param factory: The WebSocket protocol factory to be used for creating client protocol instances. :type factory: An :class:`autobahn.websocket.WebSocketClientFactory` instance. :param contextFactory: SSL context factory, required for secure WebSocket connections ("wss"). :type contextFactory: A `twisted.internet.ssl.ClientContextFactory <http://twistedmatrix.com/documents/current/api/twisted.internet.ssl.ClientContextFactory.html>`_ instance. :param timeout: Number of seconds to wait before assuming the connection has failed. :type timeout: int :param bindAddress: A (host, port) tuple of local address to bind to, or None. :type bindAddress: tuple :returns: The connector. :rtype: An object which implements `twisted.interface.IConnector <http://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IConnector.html>`_. """ # lazy import to avoid reactor install upon module import if hasattr(factory, 'reactor'): reactor = factory.reactor else: from twisted.internet import reactor if factory.proxy is not None: if factory.isSecure: raise Exception("WSS over explicit proxies not implemented") else: conn = reactor.connectTCP(factory.proxy['host'], factory.proxy['port'], factory, timeout, bindAddress) else: if factory.isSecure: if contextFactory is None: # create default client SSL context factory when none given from twisted.internet import ssl contextFactory = ssl.ClientContextFactory() conn = reactor.connectSSL(factory.host, factory.port, factory, contextFactory, timeout, bindAddress) else: conn = reactor.connectTCP(factory.host, factory.port, factory, timeout, bindAddress) return conn def listenWS(factory, contextFactory=None, backlog=50, interface=''): """ Listen for incoming WebSocket connections from clients. The connection parameters like listening port and others are provided via the factory. :param factory: The WebSocket protocol factory to be used for creating server protocol instances. :type factory: An :class:`autobahn.websocket.WebSocketServerFactory` instance. :param contextFactory: SSL context factory, required for secure WebSocket connections ("wss"). :type contextFactory: A twisted.internet.ssl.ContextFactory. :param backlog: Size of the listen queue. :type backlog: int :param interface: The interface (derived from hostname given) to bind to, defaults to '' (all). :type interface: str :returns: The listening port. :rtype: An object that implements `twisted.interface.IListeningPort <http://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IListeningPort.html>`_. """ # lazy import to avoid reactor install upon module import if hasattr(factory, 'reactor'): reactor = factory.reactor else: from twisted.internet import reactor if factory.isSecure: if contextFactory is None: raise Exception("Secure WebSocket listen requested, but no SSL context factory given") listener = reactor.listenSSL(factory.port, factory, contextFactory, backlog, interface) else: listener = reactor.listenTCP(factory.port, factory, backlog, interface) return listener class WampWebSocketServerProtocol(websocket.WampWebSocketServerProtocol, WebSocketServerProtocol): """ Base class for Twisted-based WAMP-over-WebSocket server protocols. """ class WampWebSocketServerFactory(websocket.WampWebSocketServerFactory, WebSocketServerFactory): """ Base class for Twisted-based WAMP-over-WebSocket server factories. """ protocol = WampWebSocketServerProtocol def __init__(self, factory, *args, **kwargs): serializers = kwargs.pop('serializers', None) debug_wamp = kwargs.pop('debug_wamp', False) websocket.WampWebSocketServerFactory.__init__(self, factory, serializers, debug_wamp=debug_wamp) kwargs['protocols'] = self._protocols # noinspection PyCallByClass WebSocketServerFactory.__init__(self, *args, **kwargs) class WampWebSocketClientProtocol(websocket.WampWebSocketClientProtocol, WebSocketClientProtocol): """ Base class for Twisted-based WAMP-over-WebSocket client protocols. """ class WampWebSocketClientFactory(websocket.WampWebSocketClientFactory, WebSocketClientFactory): """ Base class for Twisted-based WAMP-over-WebSocket client factories. """ protocol = WampWebSocketClientProtocol def __init__(self, factory, *args, **kwargs): serializers = kwargs.pop('serializers', None) debug_wamp = kwargs.pop('debug_wamp', False) websocket.WampWebSocketClientFactory.__init__(self, factory, serializers, debug_wamp=debug_wamp) kwargs['protocols'] = self._protocols WebSocketClientFactory.__init__(self, *args, **kwargs)<|fim▁end|>
debug=debug) # automatically fragment outgoing traffic into WebSocket frames # of this size
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rustbox; use std::env; use std::io::Result; use std::path::Path; mod filedata; use filedata::FileData; mod display; use display::Display; fn main() { let fd = match init_data() { Ok(v) => v, Err(e) => { println!("could not open specified file: {}", e); return; } }; let display = Display::new(fd); display.run(); } <|fim▁hole|>fn init_data() -> Result<FileData> { let mut args = env::args().skip(1); if args.len() > 0 { let path_str = args.next().unwrap_or(String::new()); let path_str_copy = path_str.clone(); let path = Path::new(&path_str_copy); if path.exists() == false { Ok(FileData::new_with_name(path_str)) } else { FileData::from(path_str) } } else { Ok(FileData::new()) } }<|fim▁end|>
//create a new data object from cmd args
<|file_name|>DomSource.ts<|end_file_name|><|fim▁begin|>import { StandardEvents } from './Events' import { Stream } from '@motorcycle/types' /** * A DOM source interface for objects to declaratively query the DOM. * * @name DomSource * * @example * interface DomSource { * query(cssSelector: CssSelector): DomSource * elements<El extends Element = Element>(): Stream<ReadonlyArray<El>> * events<Ev extends Event = Event>(eventType: StandardEvents, options?: EventListenerOptions): Stream<Ev> * cssSelectors(): ReadonlyArray<CssSelector> * } * * @type */ export interface DomSource<A = Element, B = Event> { /** * Queries for elements and events for a specified CSS selector. * * @name DomSource.query(cssSelector: CssSelector): DomSource * * @example * const queriedDomSource = domSource.query(`.myCssSelector`)<|fim▁hole|> * Retrieves a stream of a list of elements matching previous queries. * * NOTE: Elements will emit every single time the DOM is updated. * * @name DomSource.elements<El extends Element = Element>(): Stream<ReadonlyArray<El>> * * @example * const queriedDomSource = domSource.query(`.myCssSelector`) * const elements$ = queriedDomSource.elements() */ elements(): Stream<ReadonlyArray<A>> /** * Retrieves a stream of events from elements matching previous queries. * * `DomSource.events` optionally takes a second parameter of `EventListernerOptions`, * which specifies whether event listeners will listen to events during the * capturing phase. If not provided, all event listeners will use bubbling phase. * * @name DomSource.events<Ev extends Event = Event>(eventType: StandardEvents, options?: EventListenerOptions): Stream<Ev> * @example * const queriedDomSource = domSource.query(`.myCssSelector`) * const clickEvent$: Stream<MouseEvent> = queriedDomSource.events<MouseEvent>('click') */ events<Ev extends B = B>(eventType: StandardEvents, options?: EventListenerOptions): Stream<Ev> /** * Retrieves a list of all previously queried CSS selectors. * * @name DomSource.cssSelectors(): ReadonlyArray<CssSelector> * @example * const queriedDomSource = domSource.query(`.myCssSelector`) * const cssSelectors = queriedDomSource.cssSelectors() * * console.log(cssSelectors[0]) // .myCssSelector */ cssSelectors(): ReadonlyArray<CssSelector> } export type CssSelector = string<|fim▁end|>
*/ query<C extends A = A>(cssSelector: CssSelector): DomSource<C, B> /**
<|file_name|>webgl.js<|end_file_name|><|fim▁begin|>/** * Class: Webgl * Description: Her goes description */ import {m, utils} from '../../js/main'; import * as THREE from './three.min.js' import dat from './dat.gui.min.js' import Detector from './Detector.js' // GLOBAL var EightBitMode = false; export default class Webgl { /** * @param {number} param this is param. * @return {number} this is return. */ constructor(config) { // put in defaults here //defaults this.config = $.extend({ el:'#snow' },config); this.$el = $(this.config.el); this.renderer, this.scene, this.camera, this.cameraRadius = 50.0, this.cameraTarget, this.cameraX = 0, this.cameraY = 0, this.cameraZ = this.cameraRadius, this.particleSystem, this.particleSystemHeight = 100.0, this.wind = 2.5, this.clock, this.controls, this.parameters, this.onParametersUpdate, this.texture, this.loader; this.init(); } init() { var self = this; this.renderer = new THREE.WebGLRenderer( { alpha: true, antialias: true } ); this.renderer.setSize( window.innerWidth, window.innerHeight ); this.renderer.setClearColor( 0x000000, 0 ); this.renderer.sortObjects = false; this.scene = new THREE.Scene(); this.camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 1, 10000 ); this.cameraTarget = new THREE.Vector3( 0, 0, 0 ); this.loader = new THREE.TextureLoader(); this.loader.crossOrigin = ''; this.texture = this.loader.load( '../assets/textures/snowflake.png', // Resource is loaded function ( texture ) { // create the particles with the texture self.createParticles( texture ); }, // Download progress function ( xhr ) { console.log( (xhr.loaded / xhr.total * 100) + '% loaded' ); }, // Download errors function ( xhr ) { console.log( 'An error happened during the loading of the texture ' ); } ); } createParticles( tex ) { var numParticles = 2000, width = 70, height = this.particleSystemHeight, depth = 80, parameters = { // color: 0xffffff, color: 0xffffff, height: this.particleSystemHeight, radiusX: this.wind, radiusZ: 2.5, size: 100, scale: 4.0, opacity: 1, speedH: 1.0, speedV: 1.0 }, systemGeometry = new THREE.Geometry(), systemMaterial = new THREE.ShaderMaterial({ uniforms: { color: { type: 'c', value: new THREE.Color( parameters.color ) }, height: { type: 'f', value: parameters.height }, elapsedTime: { type: 'f', value: 0 }, radiusX: { type: 'f', value: parameters.radiusX }, radiusZ: { type: 'f', value: parameters.radiusZ }, size: { type: 'f', value: parameters.size }, scale: { type: 'f', value: parameters.scale }, opacity: { type: 'f', value: parameters.opacity }, texture: { type: 't', value: tex }, speedH: { type: 'f', value: parameters.speedH }, speedV: { type: 'f', value: parameters.speedV } }, vertexShader: document.getElementById( 'snow_vs' ).textContent, fragmentShader: document.getElementById( 'snow_fs' ).textContent, blending: THREE.AdditiveBlending, transparent: true, depthTest: false }); // Less particules for mobile if( this.isMobileDevice ) { numParticles = 200; } for( var i = 0; i < numParticles; i++ ) { var vertex = new THREE.Vector3( this.rand( width ), Math.random() * height, this.rand( depth ) ); systemGeometry.vertices.push( vertex ); } this.particleSystem = new THREE.Points( systemGeometry, systemMaterial ); this.particleSystem.position.y = -height/2; <|fim▁hole|> this.clock = new THREE.Clock(); document.getElementById("snow").appendChild( this.renderer.domElement ); this.bindEvents(); } // Events -------------------------------------------------------------------------- bindEvents() { // bind your events here. var self = this; document.addEventListener( 'mousemove', function( e ) { var mouseX = e.clientX, mouseY = e.clientY, width = window.innerWidth, halfWidth = width >> 1, height = window.innerHeight, halfHeight = height >> 1; var targetX = self.cameraRadius * ( mouseX - halfWidth ) / halfWidth; self.cameraX = targetX / 10; //self.cameraY = self.cameraRadius * ( mouseY - halfHeight ) / halfHeight; }, false ); // Activat e8 bit mode button document.getElementById("eightbitmodebutton").addEventListener( 'click', this.activateEightBitMode, false ); // handle resize this.handleWindowResize(); // Animate snow this.animate(); } activateEightBitMode() { var self = this; if( !EightBitMode ){ EightBitMode = true; $('#activate-text').html("DEACTIVATE</br>X-MAS MODE"); emitter = new EightBit_Emitter().init(); } else { EightBitMode = false; $('#activate-text').html("ACTIVATE</br>X-MAS MODE"); $('#eightbits').empty(); } } handleWindowResize() { this.renderer.setSize(window.innerWidth, window.innerHeight); this.camera.aspect = window.innerWidth / window.innerHeight; this.camera.updateProjectionMatrix(); } animate() { requestAnimationFrame( this.animate.bind(this) ); if( EightBitMode ) { var delta = this.clock.getDelta(), elapsedTime = this.clock.getElapsedTime(); this.particleSystem.material.uniforms.elapsedTime.value = elapsedTime * 10; this.camera.position.set( this.cameraX, this.cameraY, this.cameraZ ); this.camera.lookAt( this.cameraTarget ); this.renderer.clear(); this.renderer.render( this.scene, this.camera ); } else { this.renderer.clear(); } } // Getters / Setters -------------------------------------------------------------------------- addWind() { this.particleSystem.material.uniforms.radiusX.value = this.wind; m.TweenMax.from( this.particleSystem.material.uniforms.radiusX, 3, { value:30, ease:Quad.easeOut } ); } // Utils -------------------------------------------------------------------------- rand( v ) { return (v * (Math.random() - 0.5)); } // easy mobile device detection isMobileDevice() { if ( navigator === undefined || navigator.userAgent === undefined ) { return true; } var s = navigator.userAgent; if ( s.match( /iPhone/i ) || s.match( /iPod/i ) || s.match( /webOS/i ) || s.match( /BlackBerry/i ) || ( s.match( /Windows/i ) && s.match( /Phone/i ) ) || ( s.match( /Android/i ) && s.match( /Mobile/i ) ) ) { return true; } return false; } } /////////////////////////////////////////////////////////////////////////////// // 8 Bit Xmas Mode /////////////////////////////////////////////////////////////////////////////// var indexes = 2500; var prefix = '/assets/textures/'; var bitmaps = [ { type:'moose', img:'moose.gif', w:64, h:64 }, { type:'santa', img:'santa.gif', w:100, h:100 }, { type:'mistletoe', img:'mistletoe1.png', w:109, h:99 }, { type:'mistletoe', img:'mistletoe2.png', w:123, h:114 }, { type:'gift', img:'kdo1.png', w:64, h:67 }, { type:'gift', img:'kdo2.png', w:64, h:67 }, { type:'gift', img:'kdo3.png', w:64, h:67 }, { type:'mistletoe', img:'mistletoe1.png', w:109, h:99 }, { type:'mistletoe', img:'mistletoe2.png', w:123, h:114 }, ]; var emitter; class EightBit_Emitter { constructor( $selector = '#eightbits', maxParticles=10 ) { this.$emitter = document.querySelector($selector); this.maxParticles = maxParticles; } init() { var self = this; this.newParticle(); } newParticle() { var self = this; var timing = Math.floor( Math.random() * 5000 + 1000 ); // Element caracteristics var pRand = Math.floor( Math.random() * bitmaps.length ); var life = Math.floor( Math.random() * 10 + 15 ); var particle = new EightBit_Particle( pRand, life ); // check if we are still on EightBit mode if( EightBitMode ) { // Wait if too many guys on the floor if( $('#eightbits .particle').length <= this.maxParticles ) { setTimeout( self.newParticle.bind( this ), timing ); } } } handleWindowResize(){ var wWidth = window.innerWidth; var wHeight = window.innerHeight; } } class EightBit_Particle { constructor( idx, life ) { this.idx = idx; this.life = life; this._create(); } _create() { var self = this; var wWidth = window.innerWidth; var wHeight = window.innerHeight; var pic = '<div class="particle" id="p-' + indexes + '"><img src="' + prefix + bitmaps[this.idx].img + '"></div>'; $('#eightbits').append( pic ); var part = $('#p-' + indexes); indexes++; var tweenObject; if( this.idx <= 1 ) { // Moose & Santa part.css({ top: ( wHeight - bitmaps[this.idx].h ) + "px", left: ( wWidth + 100 ) + "px", 'z-index':indexes }); tweenObject = { left:-100, // easing:Linear.easeNone, onComplete:function(){ $(part).remove(); }, onCompleteParams:[part] } } else if ( this.idx > 1 ) { // falling stuff part.css({ top: "-100px", left: Math.floor( Math.random() * ( wWidth - 100 ) + 100 ) + "px", 'z-index':indexes }); tweenObject = { top:wHeight + 100, // easing:Linear.easeNone, // rotation:Math.floor( Math.random() * 90 ), onComplete:function(){ $(part).remove(); }, onCompleteParams:[part] } } m.TweenMax.to( part, self.life, tweenObject ); } }<|fim▁end|>
this.scene.add( this.particleSystem );
<|file_name|>axt_to_lav_code.py<|end_file_name|><|fim▁begin|>def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr): for name,data in out_data.items(): if name == "seq_file2": <|fim▁hole|> break<|fim▁end|>
data.dbkey = param_dict['dbkey_2'] app.model.context.add( data ) app.model.context.flush()
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2008 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import imp import optparse import os import platform import re import signal import subprocess import sys import tempfile import time import threading import utils from os.path import join, dirname, abspath, basename, isdir, exists from datetime import datetime from Queue import Queue, Empty VERBOSE = False # --------------------------------------------- # --- P r o g r e s s I n d i c a t o r s --- # --------------------------------------------- class ProgressIndicator(object): def __init__(self, cases): self.cases = cases self.queue = Queue(len(cases)) for case in cases: self.queue.put_nowait(case) self.succeeded = 0 self.remaining = len(cases) self.total = len(cases) self.failed = [ ] self.crashed = 0 self.terminate = False self.lock = threading.Lock() def PrintFailureHeader(self, test): if test.IsNegative(): negative_marker = '[negative] ' else: negative_marker = '' print "=== %(label)s %(negative)s===" % { 'label': test.GetLabel(), 'negative': negative_marker } print "Path: %s" % "/".join(test.path) def Run(self, tasks): self.Starting() threads = [] # Spawn N-1 threads and then use this thread as the last one. # That way -j1 avoids threading altogether which is a nice fallback # in case of threading problems. for i in xrange(tasks - 1): thread = threading.Thread(target=self.RunSingle, args=[]) threads.append(thread) thread.start() try: self.RunSingle() # Wait for the remaining threads for thread in threads: # Use a timeout so that signals (ctrl-c) will be processed. thread.join(timeout=10000000) except Exception, e: # If there's an exception we schedule an interruption for any # remaining threads. self.terminate = True # ...and then reraise the exception to bail out raise self.Done() return not self.failed def RunSingle(self): while not self.terminate: try: test = self.queue.get_nowait() except Empty: return case = test.case self.lock.acquire() self.AboutToRun(case) self.lock.release() try:<|fim▁hole|> assert self.terminate return if self.terminate: return self.lock.acquire() if output.UnexpectedOutput(): self.failed.append(output) if output.HasCrashed(): self.crashed += 1 else: self.succeeded += 1 self.remaining -= 1 self.HasRun(output) self.lock.release() def EscapeCommand(command): parts = [] for part in command: if ' ' in part: # Escape spaces. We may need to escape more characters for this # to work properly. parts.append('"%s"' % part) else: parts.append(part) return " ".join(parts) class SimpleProgressIndicator(ProgressIndicator): def Starting(self): print 'Running %i tests' % len(self.cases) def Done(self): print for failed in self.failed: self.PrintFailureHeader(failed.test) if failed.output.stderr: print "--- stderr ---" print failed.output.stderr.strip() if failed.output.stdout: print "--- stdout ---" print failed.output.stdout.strip() print "Command: %s" % EscapeCommand(failed.command) if failed.HasCrashed(): print "--- CRASHED ---" if failed.HasTimedOut(): print "--- TIMEOUT ---" if len(self.failed) == 0: print "===" print "=== All tests succeeded" print "===" else: print print "===" print "=== %i tests failed" % len(self.failed) if self.crashed > 0: print "=== %i tests CRASHED" % self.crashed print "===" class VerboseProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): print 'Starting %s...' % case.GetLabel() sys.stdout.flush() def HasRun(self, output): if output.UnexpectedOutput(): if output.HasCrashed(): outcome = 'CRASH' else: outcome = 'FAIL' else: outcome = 'pass' print 'Done running %s: %s' % (output.test.GetLabel(), outcome) class DotsProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): pass def HasRun(self, output): total = self.succeeded + len(self.failed) if (total > 1) and (total % 50 == 1): sys.stdout.write('\n') if output.UnexpectedOutput(): if output.HasCrashed(): sys.stdout.write('C') sys.stdout.flush() elif output.HasTimedOut(): sys.stdout.write('T') sys.stdout.flush() else: sys.stdout.write('F') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush() class TapProgressIndicator(SimpleProgressIndicator): def Starting(self): print '1..%i' % len(self.cases) self._done = 0 def AboutToRun(self, case): pass def HasRun(self, output): self._done += 1 command = basename(output.command[-1]) if output.UnexpectedOutput(): print 'not ok %i - %s' % (self._done, command) for l in output.output.stderr.splitlines(): print '#' + l for l in output.output.stdout.splitlines(): print '#' + l else: print 'ok %i - %s' % (self._done, command) duration = output.test.duration # total_seconds() was added in 2.7 total_seconds = (duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 10**6) / 10**6 print ' ---' print ' duration_ms: %d.%d' % (total_seconds, duration.microseconds / 1000) print ' ...' def Done(self): pass class CompactProgressIndicator(ProgressIndicator): def __init__(self, cases, templates): super(CompactProgressIndicator, self).__init__(cases) self.templates = templates self.last_status_length = 0 self.start_time = time.time() def Starting(self): pass def Done(self): self.PrintProgress('Done') def AboutToRun(self, case): self.PrintProgress(case.GetLabel()) def HasRun(self, output): if output.UnexpectedOutput(): self.ClearLine(self.last_status_length) self.PrintFailureHeader(output.test) stdout = output.output.stdout.strip() if len(stdout): print self.templates['stdout'] % stdout stderr = output.output.stderr.strip() if len(stderr): print self.templates['stderr'] % stderr print "Command: %s" % EscapeCommand(output.command) if output.HasCrashed(): print "--- CRASHED ---" if output.HasTimedOut(): print "--- TIMEOUT ---" def Truncate(self, str, length): if length and (len(str) > (length - 3)): return str[:(length-3)] + "..." else: return str def PrintProgress(self, name): self.ClearLine(self.last_status_length) elapsed = time.time() - self.start_time status = self.templates['status_line'] % { 'passed': self.succeeded, 'remaining': (((self.total - self.remaining) * 100) // self.total), 'failed': len(self.failed), 'test': name, 'mins': int(elapsed) / 60, 'secs': int(elapsed) % 60 } status = self.Truncate(status, 78) self.last_status_length = len(status) print status, sys.stdout.flush() class ColorProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } super(ColorProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print "\033[1K\r", class MonochromeProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', 'stderr': '%s', 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'max_length': 78 } super(MonochromeProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), PROGRESS_INDICATORS = { 'verbose': VerboseProgressIndicator, 'dots': DotsProgressIndicator, 'color': ColorProgressIndicator, 'tap': TapProgressIndicator, 'mono': MonochromeProgressIndicator } # ------------------------- # --- F r a m e w o r k --- # ------------------------- class CommandOutput(object): def __init__(self, exit_code, timed_out, stdout, stderr): self.exit_code = exit_code self.timed_out = timed_out self.stdout = stdout self.stderr = stderr self.failed = None class TestCase(object): def __init__(self, context, path, mode): self.path = path self.context = context self.duration = None self.mode = mode def IsNegative(self): return False def CompareTime(self, other): return cmp(other.duration, self.duration) def DidFail(self, output): if output.failed is None: output.failed = self.IsFailureOutput(output) return output.failed def IsFailureOutput(self, output): return output.exit_code != 0 def GetSource(self): return "(no source available)" def RunCommand(self, command): full_command = self.context.processor(command) output = Execute(full_command, self.context, self.context.GetTimeout(self.mode)) self.Cleanup() return TestOutput(self, full_command, output, self.context.store_unexpected_output) def BeforeRun(self): pass def AfterRun(self, result): pass def Run(self): self.BeforeRun() try: result = self.RunCommand(self.GetCommand()) finally: # Tests can leave the tty in non-blocking mode. If the test runner # tries to print to stdout/stderr after that and the tty buffer is # full, it'll die with a EAGAIN OSError. Ergo, put the tty back in # blocking mode before proceeding. if sys.platform != 'win32': from fcntl import fcntl, F_GETFL, F_SETFL from os import O_NONBLOCK for fd in 0,1,2: fcntl(fd, F_SETFL, ~O_NONBLOCK & fcntl(fd, F_GETFL)) self.AfterRun(result) return result def Cleanup(self): return class TestOutput(object): def __init__(self, test, command, output, store_unexpected_output): self.test = test self.command = command self.output = output self.store_unexpected_output = store_unexpected_output def UnexpectedOutput(self): if self.HasCrashed(): outcome = CRASH elif self.HasTimedOut(): outcome = TIMEOUT elif self.HasFailed(): outcome = FAIL else: outcome = PASS return not outcome in self.test.outcomes def HasPreciousOutput(self): return self.UnexpectedOutput() and self.store_unexpected_output def HasCrashed(self): if utils.IsWindows(): return 0x80000000 & self.output.exit_code and not (0x3FFFFF00 & self.output.exit_code) else: # Timed out tests will have exit_code -signal.SIGTERM. if self.output.timed_out: return False return self.output.exit_code < 0 and \ self.output.exit_code != -signal.SIGABRT def HasTimedOut(self): return self.output.timed_out; def HasFailed(self): execution_failed = self.test.DidFail(self.output) if self.test.IsNegative(): return not execution_failed else: return execution_failed def KillProcessWithID(pid): if utils.IsWindows(): os.popen('taskkill /T /F /PID %d' % pid) else: os.kill(pid, signal.SIGTERM) MAX_SLEEP_TIME = 0.1 INITIAL_SLEEP_TIME = 0.0001 SLEEP_TIME_FACTOR = 1.25 SEM_INVALID_VALUE = -1 SEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h def Win32SetErrorMode(mode): prev_error_mode = SEM_INVALID_VALUE try: import ctypes prev_error_mode = ctypes.windll.kernel32.SetErrorMode(mode); except ImportError: pass return prev_error_mode def RunProcess(context, timeout, args, **rest): if context.verbose: print "#", " ".join(args) popen_args = args prev_error_mode = SEM_INVALID_VALUE; if utils.IsWindows(): if context.suppress_dialogs: # Try to change the error mode to avoid dialogs on fatal errors. Don't # touch any existing error mode flags by merging the existing error mode. # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx. error_mode = SEM_NOGPFAULTERRORBOX; prev_error_mode = Win32SetErrorMode(error_mode); Win32SetErrorMode(error_mode | prev_error_mode); process = subprocess.Popen( shell = utils.IsWindows(), args = popen_args, **rest ) if utils.IsWindows() and context.suppress_dialogs and prev_error_mode != SEM_INVALID_VALUE: Win32SetErrorMode(prev_error_mode) # Compute the end time - if the process crosses this limit we # consider it timed out. if timeout is None: end_time = None else: end_time = time.time() + timeout timed_out = False # Repeatedly check the exit code from the process in a # loop and keep track of whether or not it times out. exit_code = None sleep_time = INITIAL_SLEEP_TIME while exit_code is None: if (not end_time is None) and (time.time() >= end_time): # Kill the process and wait for it to exit. KillProcessWithID(process.pid) exit_code = process.wait() timed_out = True else: exit_code = process.poll() time.sleep(sleep_time) sleep_time = sleep_time * SLEEP_TIME_FACTOR if sleep_time > MAX_SLEEP_TIME: sleep_time = MAX_SLEEP_TIME return (process, exit_code, timed_out) def PrintError(str): sys.stderr.write(str) sys.stderr.write('\n') def CheckedUnlink(name): try: os.unlink(name) except OSError, e: PrintError("os.unlink() " + str(e)) def Execute(args, context, timeout=None): (fd_out, outname) = tempfile.mkstemp() (fd_err, errname) = tempfile.mkstemp() (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, stdout = fd_out, stderr = fd_err, ) os.close(fd_out) os.close(fd_err) output = file(outname).read() errors = file(errname).read() CheckedUnlink(outname) CheckedUnlink(errname) return CommandOutput(exit_code, timed_out, output, errors) def ExecuteNoCapture(args, context, timeout=None): (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, ) return CommandOutput(exit_code, False, "", "") def CarCdr(path): if len(path) == 0: return (None, [ ]) else: return (path[0], path[1:]) class TestConfiguration(object): def __init__(self, context, root): self.context = context self.root = root def Contains(self, path, file): if len(path) > len(file): return False for i in xrange(len(path)): if not path[i].match(file[i]): return False return True def GetTestStatus(self, sections, defs): pass class TestSuite(object): def __init__(self, name): self.name = name def GetName(self): return self.name # Use this to run several variants of the tests, e.g.: # VARIANT_FLAGS = [[], ['--always_compact', '--noflush_code']] VARIANT_FLAGS = [[]] class TestRepository(TestSuite): def __init__(self, path): normalized_path = abspath(path) super(TestRepository, self).__init__(basename(normalized_path)) self.path = normalized_path self.is_loaded = False self.config = None def GetConfiguration(self, context): if self.is_loaded: return self.config self.is_loaded = True file = None try: (file, pathname, description) = imp.find_module('testcfg', [ self.path ]) module = imp.load_module('testcfg', file, pathname, description) self.config = module.GetConfiguration(context, self.path) finally: if file: file.close() return self.config def GetBuildRequirements(self, path, context): return self.GetConfiguration(context).GetBuildRequirements() def AddTestsToList(self, result, current_path, path, context, mode): for v in VARIANT_FLAGS: tests = self.GetConfiguration(context).ListTests(current_path, path, mode) for t in tests: t.variant_flags = v result += tests def GetTestStatus(self, context, sections, defs): self.GetConfiguration(context).GetTestStatus(sections, defs) class LiteralTestSuite(TestSuite): def __init__(self, tests): super(LiteralTestSuite, self).__init__('root') self.tests = tests def GetBuildRequirements(self, path, context): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: if not name or name.match(test.GetName()): result += test.GetBuildRequirements(rest, context) return result def ListTests(self, current_path, path, context, mode): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: test_name = test.GetName() if not name or name.match(test_name): full_path = current_path + [test_name] test.AddTestsToList(result, full_path, path, context, mode) result.sort(cmp=lambda a, b: cmp(a.GetName(), b.GetName())) return result def GetTestStatus(self, context, sections, defs): for test in self.tests: test.GetTestStatus(context, sections, defs) SUFFIX = { 'debug' : '_g', 'release' : '' } FLAGS = { 'debug' : ['--enable-slow-asserts', '--debug-code', '--verify-heap'], 'release' : []} TIMEOUT_SCALEFACTOR = { 'debug' : 4, 'release' : 1 } class Context(object): def __init__(self, workspace, buildspace, verbose, vm, timeout, processor, suppress_dialogs, store_unexpected_output): self.workspace = workspace self.buildspace = buildspace self.verbose = verbose self.vm_root = vm self.timeout = timeout self.processor = processor self.suppress_dialogs = suppress_dialogs self.store_unexpected_output = store_unexpected_output def GetVm(self, mode): if mode == 'debug': name = 'out/Debug/node' else: name = 'out/Release/node' # Currently GYP does not support output_dir for MSVS. # http://code.google.com/p/gyp/issues/detail?id=40 # It will put the builds into Release/node.exe or Debug/node.exe if utils.IsWindows(): out_dir = os.path.join(dirname(__file__), "..", "out") if not exists(out_dir): if mode == 'debug': name = os.path.abspath('Debug/node.exe') else: name = os.path.abspath('Release/node.exe') else: name = os.path.abspath(name + '.exe') return name def GetVmCommand(self, testcase, mode): return [self.GetVm(mode)] + self.GetVmFlags(testcase, mode) def GetVmFlags(self, testcase, mode): return testcase.variant_flags + FLAGS[mode] def GetTimeout(self, mode): return self.timeout * TIMEOUT_SCALEFACTOR[mode] def RunTestCases(cases_to_run, progress, tasks): progress = PROGRESS_INDICATORS[progress](cases_to_run) return progress.Run(tasks) def BuildRequirements(context, requirements, mode, scons_flags): command_line = (['scons', '-Y', context.workspace, 'mode=' + ",".join(mode)] + requirements + scons_flags) output = ExecuteNoCapture(command_line, context) return output.exit_code == 0 # ------------------------------------------- # --- T e s t C o n f i g u r a t i o n --- # ------------------------------------------- SKIP = 'skip' FAIL = 'fail' PASS = 'pass' OKAY = 'okay' TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' class Expression(object): pass class Constant(Expression): def __init__(self, value): self.value = value def Evaluate(self, env, defs): return self.value class Variable(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in env: return ListSet([env[self.name]]) else: return Nothing() class Outcome(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in defs: return defs[self.name].GetOutcomes(env, defs) else: return ListSet([self.name]) class Set(object): pass class ListSet(Set): def __init__(self, elms): self.elms = elms def __str__(self): return "ListSet%s" % str(self.elms) def Intersect(self, that): if not isinstance(that, ListSet): return that.Intersect(self) return ListSet([ x for x in self.elms if x in that.elms ]) def Union(self, that): if not isinstance(that, ListSet): return that.Union(self) return ListSet(self.elms + [ x for x in that.elms if x not in self.elms ]) def IsEmpty(self): return len(self.elms) == 0 class Everything(Set): def Intersect(self, that): return that def Union(self, that): return self def IsEmpty(self): return False class Nothing(Set): def Intersect(self, that): return self def Union(self, that): return that def IsEmpty(self): return True class Operation(Expression): def __init__(self, left, op, right): self.left = left self.op = op self.right = right def Evaluate(self, env, defs): if self.op == '||' or self.op == ',': return self.left.Evaluate(env, defs) or self.right.Evaluate(env, defs) elif self.op == 'if': return False elif self.op == '==': inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) return not inter.IsEmpty() else: assert self.op == '&&' return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs) def GetOutcomes(self, env, defs): if self.op == '||' or self.op == ',': return self.left.GetOutcomes(env, defs).Union(self.right.GetOutcomes(env, defs)) elif self.op == 'if': if self.right.Evaluate(env, defs): return self.left.GetOutcomes(env, defs) else: return Nothing() else: assert self.op == '&&' return self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) def IsAlpha(str): for char in str: if not (char.isalpha() or char.isdigit() or char == '_'): return False return True class Tokenizer(object): """A simple string tokenizer that chops expressions into variables, parens and operators""" def __init__(self, expr): self.index = 0 self.expr = expr self.length = len(expr) self.tokens = None def Current(self, length = 1): if not self.HasMore(length): return "" return self.expr[self.index:self.index+length] def HasMore(self, length = 1): return self.index < self.length + (length - 1) def Advance(self, count = 1): self.index = self.index + count def AddToken(self, token): self.tokens.append(token) def SkipSpaces(self): while self.HasMore() and self.Current().isspace(): self.Advance() def Tokenize(self): self.tokens = [ ] while self.HasMore(): self.SkipSpaces() if not self.HasMore(): return None if self.Current() == '(': self.AddToken('(') self.Advance() elif self.Current() == ')': self.AddToken(')') self.Advance() elif self.Current() == '$': self.AddToken('$') self.Advance() elif self.Current() == ',': self.AddToken(',') self.Advance() elif IsAlpha(self.Current()): buf = "" while self.HasMore() and IsAlpha(self.Current()): buf += self.Current() self.Advance() self.AddToken(buf) elif self.Current(2) == '&&': self.AddToken('&&') self.Advance(2) elif self.Current(2) == '||': self.AddToken('||') self.Advance(2) elif self.Current(2) == '==': self.AddToken('==') self.Advance(2) else: return None return self.tokens class Scanner(object): """A simple scanner that can serve out tokens from a given list""" def __init__(self, tokens): self.tokens = tokens self.length = len(tokens) self.index = 0 def HasMore(self): return self.index < self.length def Current(self): return self.tokens[self.index] def Advance(self): self.index = self.index + 1 def ParseAtomicExpression(scan): if scan.Current() == "true": scan.Advance() return Constant(True) elif scan.Current() == "false": scan.Advance() return Constant(False) elif IsAlpha(scan.Current()): name = scan.Current() scan.Advance() return Outcome(name.lower()) elif scan.Current() == '$': scan.Advance() if not IsAlpha(scan.Current()): return None name = scan.Current() scan.Advance() return Variable(name.lower()) elif scan.Current() == '(': scan.Advance() result = ParseLogicalExpression(scan) if (not result) or (scan.Current() != ')'): return None scan.Advance() return result else: return None BINARIES = ['=='] def ParseOperatorExpression(scan): left = ParseAtomicExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in BINARIES): op = scan.Current() scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseConditionalExpression(scan): left = ParseOperatorExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() == 'if'): scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left= Operation(left, 'if', right) return left LOGICALS = ["&&", "||", ","] def ParseLogicalExpression(scan): left = ParseConditionalExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in LOGICALS): op = scan.Current() scan.Advance() right = ParseConditionalExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseCondition(expr): """Parses a logical expression into an Expression object""" tokens = Tokenizer(expr).Tokenize() if not tokens: print "Malformed expression: '%s'" % expr return None scan = Scanner(tokens) ast = ParseLogicalExpression(scan) if not ast: print "Malformed expression: '%s'" % expr return None if scan.HasMore(): print "Malformed expression: '%s'" % expr return None return ast class ClassifiedTest(object): def __init__(self, case, outcomes): self.case = case self.outcomes = outcomes class Configuration(object): """The parsed contents of a configuration file""" def __init__(self, sections, defs): self.sections = sections self.defs = defs def ClassifyTests(self, cases, env): sections = [s for s in self.sections if s.condition.Evaluate(env, self.defs)] all_rules = reduce(list.__add__, [s.rules for s in sections], []) unused_rules = set(all_rules) result = [ ] all_outcomes = set([]) for case in cases: matches = [ r for r in all_rules if r.Contains(case.path) ] outcomes = set([]) for rule in matches: outcomes = outcomes.union(rule.GetOutcomes(env, self.defs)) unused_rules.discard(rule) if not outcomes: outcomes = [PASS] case.outcomes = outcomes all_outcomes = all_outcomes.union(outcomes) result.append(ClassifiedTest(case, outcomes)) return (result, list(unused_rules), all_outcomes) class Section(object): """A section of the configuration file. Sections are enabled or disabled prior to running the tests, based on their conditions""" def __init__(self, condition): self.condition = condition self.rules = [ ] def AddRule(self, rule): self.rules.append(rule) class Rule(object): """A single rule that specifies the expected outcome for a single test.""" def __init__(self, raw_path, path, value): self.raw_path = raw_path self.path = path self.value = value def GetOutcomes(self, env, defs): set = self.value.GetOutcomes(env, defs) assert isinstance(set, ListSet) return set.elms def Contains(self, path): if len(self.path) > len(path): return False for i in xrange(len(self.path)): if not self.path[i].match(path[i]): return False return True HEADER_PATTERN = re.compile(r'\[([^]]+)\]') RULE_PATTERN = re.compile(r'\s*([^: ]*)\s*:(.*)') DEF_PATTERN = re.compile(r'^def\s*(\w+)\s*=(.*)$') PREFIX_PATTERN = re.compile(r'^\s*prefix\s+([\w\_\.\-\/]+)$') def ReadConfigurationInto(path, sections, defs): current_section = Section(Constant(True)) sections.append(current_section) prefix = [] for line in utils.ReadLinesFrom(path): header_match = HEADER_PATTERN.match(line) if header_match: condition_str = header_match.group(1).strip() condition = ParseCondition(condition_str) new_section = Section(condition) sections.append(new_section) current_section = new_section continue rule_match = RULE_PATTERN.match(line) if rule_match: path = prefix + SplitPath(rule_match.group(1).strip()) value_str = rule_match.group(2).strip() value = ParseCondition(value_str) if not value: return False current_section.AddRule(Rule(rule_match.group(1), path, value)) continue def_match = DEF_PATTERN.match(line) if def_match: name = def_match.group(1).lower() value = ParseCondition(def_match.group(2).strip()) if not value: return False defs[name] = value continue prefix_match = PREFIX_PATTERN.match(line) if prefix_match: prefix = SplitPath(prefix_match.group(1).strip()) continue print "Malformed line: '%s'." % line return False return True # --------------- # --- M a i n --- # --------------- ARCH_GUESS = utils.GuessArchitecture() def BuildOptions(): result = optparse.OptionParser() result.add_option("-m", "--mode", help="The test modes in which to run (comma-separated)", default='release') result.add_option("-v", "--verbose", help="Verbose output", default=False, action="store_true") result.add_option("-S", dest="scons_flags", help="Flag to pass through to scons", default=[], action="append") result.add_option("-p", "--progress", help="The style of progress indicator (verbose, dots, color, mono, tap)", choices=PROGRESS_INDICATORS.keys(), default="mono") result.add_option("--no-build", help="Don't build requirements", default=True, action="store_true") result.add_option("--build-only", help="Only build requirements, don't run the tests", default=False, action="store_true") result.add_option("--report", help="Print a summary of the tests to be run", default=False, action="store_true") result.add_option("-s", "--suite", help="A test suite", default=[], action="append") result.add_option("-t", "--timeout", help="Timeout in seconds", default=60, type="int") result.add_option("--arch", help='The architecture to run tests for', default='none') result.add_option("--snapshot", help="Run the tests with snapshot turned on", default=False, action="store_true") result.add_option("--simulator", help="Run tests with architecture simulator", default='none') result.add_option("--special-command", default=None) result.add_option("--use-http1", help="Pass --use-http1 switch to node", default=False, action="store_true") result.add_option("--valgrind", help="Run tests through valgrind", default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", default=1, type="int") result.add_option("--time", help="Print timing information after running", default=False, action="store_true") result.add_option("--suppress-dialogs", help="Suppress Windows dialogs for crashing tests", dest="suppress_dialogs", default=True, action="store_true") result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests", dest="suppress_dialogs", action="store_false") result.add_option("--shell", help="Path to V8 shell", default="shell") result.add_option("--store-unexpected-output", help="Store the temporary JS files from tests that fails", dest="store_unexpected_output", default=True, action="store_true") result.add_option("--no-store-unexpected-output", help="Deletes the temporary JS files from tests that fails", dest="store_unexpected_output", action="store_false") return result def ProcessOptions(options): global VERBOSE VERBOSE = options.verbose options.mode = options.mode.split(',') for mode in options.mode: if not mode in ['debug', 'release']: print "Unknown mode %s" % mode return False if options.simulator != 'none': # Simulator argument was set. Make sure arch and simulator agree. if options.simulator != options.arch: if options.arch == 'none': options.arch = options.simulator else: print "Architecture %s does not match sim %s" %(options.arch, options.simulator) return False # Ensure that the simulator argument is handed down to scons. options.scons_flags.append("simulator=" + options.simulator) else: # If options.arch is not set by the command line and no simulator setting # was found, set the arch to the guess. if options.arch == 'none': options.arch = ARCH_GUESS options.scons_flags.append("arch=" + options.arch) if options.snapshot: options.scons_flags.append("snapshot=on") return True REPORT_TEMPLATE = """\ Total: %(total)i tests * %(skipped)4d tests will be skipped * %(nocrash)4d tests are expected to be flaky but not crash * %(pass)4d tests are expected to pass * %(fail_ok)4d tests are expected to fail that we won't fix * %(fail)4d tests are expected to fail that we should fix\ """ def PrintReport(cases): def IsFlaky(o): return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o) def IsFailOk(o): return (len(o) == 2) and (FAIL in o) and (OKAY in o) unskipped = [c for c in cases if not SKIP in c.outcomes] print REPORT_TEMPLATE % { 'total': len(cases), 'skipped': len(cases) - len(unskipped), 'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) } class Pattern(object): def __init__(self, pattern): self.pattern = pattern self.compiled = None def match(self, str): if not self.compiled: pattern = "^" + self.pattern.replace('*', '.*') + "$" self.compiled = re.compile(pattern) return self.compiled.match(str) def __str__(self): return self.pattern def SplitPath(s): stripped = [ c.strip() for c in s.split('/') ] return [ Pattern(s) for s in stripped if len(s) > 0 ] def GetSpecialCommandProcessor(value): if (not value) or (value.find('@') == -1): def ExpandCommand(args): return args return ExpandCommand else: pos = value.find('@') import urllib prefix = urllib.unquote(value[:pos]).split() suffix = urllib.unquote(value[pos+1:]).split() def ExpandCommand(args): return prefix + args + suffix return ExpandCommand BUILT_IN_TESTS = ['simple', 'pummel', 'message', 'internet', 'gc'] def GetSuites(test_root): def IsSuite(path): return isdir(path) and exists(join(path, 'testcfg.py')) return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ] def FormatTime(d): millis = round(d * 1000) % 1000 return time.strftime("%M:%S.", time.gmtime(d)) + ("%03i" % millis) def Main(): parser = BuildOptions() (options, args) = parser.parse_args() if not ProcessOptions(options): parser.print_help() return 1 workspace = abspath(join(dirname(sys.argv[0]), '..')) suites = GetSuites(join(workspace, 'test')) repositories = [TestRepository(join(workspace, 'test', name)) for name in suites] repositories += [TestRepository(a) for a in options.suite] root = LiteralTestSuite(repositories) if len(args) == 0: paths = [SplitPath(t) for t in BUILT_IN_TESTS] else: paths = [ ] for arg in args: path = SplitPath(arg) paths.append(path) # Check for --valgrind option. If enabled, we overwrite the special # command flag with a command that uses the run-valgrind.py script. if options.valgrind: run_valgrind = join(workspace, "tools", "run-valgrind.py") options.special_command = "python -u " + run_valgrind + " @" shell = abspath(options.shell) buildspace = dirname(shell) processor = GetSpecialCommandProcessor(options.special_command) if options.use_http1: def wrap(processor): return lambda args: processor(args[:1] + ['--use-http1'] + args[1:]) processor = wrap(processor) context = Context(workspace, buildspace, VERBOSE, shell, options.timeout, processor, options.suppress_dialogs, options.store_unexpected_output) # First build the required targets if not options.no_build: reqs = [ ] for path in paths: reqs += root.GetBuildRequirements(path, context) reqs = list(set(reqs)) if len(reqs) > 0: if options.j != 1: options.scons_flags += ['-j', str(options.j)] if not BuildRequirements(context, reqs, options.mode, options.scons_flags): return 1 # Just return if we are only building the targets for running the tests. if options.build_only: return 0 # Get status for tests sections = [ ] defs = { } root.GetTestStatus(context, sections, defs) config = Configuration(sections, defs) # List the tests all_cases = [ ] all_unused = [ ] unclassified_tests = [ ] globally_unused_rules = None for path in paths: for mode in options.mode: if not exists(context.GetVm(mode)): print "Can't find shell executable: '%s'" % context.GetVm(mode) continue env = { 'mode': mode, 'system': utils.GuessOS(), 'arch': options.arch, 'simulator': options.simulator } test_list = root.ListTests([], path, context, mode) unclassified_tests += test_list (cases, unused_rules, all_outcomes) = config.ClassifyTests(test_list, env) if globally_unused_rules is None: globally_unused_rules = set(unused_rules) else: globally_unused_rules = globally_unused_rules.intersection(unused_rules) all_cases += cases all_unused.append(unused_rules) if options.cat: visited = set() for test in unclassified_tests: key = tuple(test.path) if key in visited: continue visited.add(key) print "--- begin source: %s ---" % test.GetLabel() source = test.GetSource().strip() print source print "--- end source: %s ---" % test.GetLabel() return 0 if options.warn_unused: for rule in globally_unused_rules: print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path]) if options.report: PrintReport(all_cases) result = None def DoSkip(case): return SKIP in case.outcomes or SLOW in case.outcomes cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if len(cases_to_run) == 0: print "No tests to run." return 0 else: try: start = time.time() if RunTestCases(cases_to_run, options.progress, options.j): result = 0 else: result = 1 duration = time.time() - start except KeyboardInterrupt: print "Interrupted" return 1 if options.time: # Write the times to stderr to make it easy to separate from the # test output. print sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration)) timed_tests = [ t.case for t in cases_to_run if not t.case.duration is None ] timed_tests.sort(lambda a, b: a.CompareTime(b)) index = 1 for entry in timed_tests[:20]: t = FormatTime(entry.duration) sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel())) index += 1 return result if __name__ == '__main__': sys.exit(Main())<|fim▁end|>
start = datetime.now() output = case.Run() case.duration = (datetime.now() - start) except IOError, e:
<|file_name|>interface_with_str.rs<|end_file_name|><|fim▁begin|>foreign_callback!(callback SomeObserver { self_type SomeTrait; onStateChanged = SomeTrait::on_state_changed(&self, _: &str);<|fim▁hole|>foreigner_class!(class ClassWithCallbacks { self_type Foo; constructor Foo::default() -> Foo; method f1(&mut self, cb: Box<dyn SomeTrait>); });<|fim▁end|>
});
<|file_name|>IPostcssCompilerForPreProcessor.ts<|end_file_name|><|fim▁begin|>import ICompiler = require("../postcss/ICompiler"); interface IPostcssCompilerForPreProcessor extends ICompiler { } <|fim▁hole|><|fim▁end|>
export = IPostcssCompilerForPreProcessor;
<|file_name|>Backstab.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
class Backstab: pass
<|file_name|>logger.js<|end_file_name|><|fim▁begin|>'use strict' const config = { local: { pino: { prettyPrint: false } }, ci: { pino: { prettyPrint: false } }, staging: { pino: { prettyPrint: false } }, prod: {<|fim▁hole|> prettyPrint: false } } } module.exports = function resolveConfig (mode) { return config[mode || process.env.NODE_ENV || 'local'] }<|fim▁end|>
pino: {
<|file_name|>regions-early-bound-trait-param.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Tests that you can use an early-bound lifetime parameter as // on of the generic parameters in a trait. #![allow(unknown_features)] #![feature(box_syntax)] trait Trait<'a> { fn long(&'a self) -> isize; fn short<'b>(&'b self) -> isize; } fn poly_invoke<'c, T: Trait<'c>>(x: &'c T) -> (isize, isize) { let l = x.long(); let s = x.short(); (l,s) } fn object_invoke1<'d>(x: &'d Trait<'d>) -> (isize, isize) { let l = x.long(); let s = x.short(); (l,s) } struct Struct1<'e> { f: &'e (Trait<'e>+'e) } fn field_invoke1<'f, 'g>(x: &'g Struct1<'f>) -> (isize,isize) { let l = x.f.long(); let s = x.f.short(); (l,s) } struct Struct2<'h, 'i> { f: &'h (Trait<'i>+'h) } fn object_invoke2<'j, 'k>(x: &'k Trait<'j>) -> isize { x.short() } fn field_invoke2<'l, 'm, 'n>(x: &'n Struct2<'l,'m>) -> isize { x.f.short() } trait MakerTrait { fn mk() -> Self; } fn make_val<T:MakerTrait>() -> T { MakerTrait::mk() } trait RefMakerTrait<'q> { fn mk(Self) -> &'q Self; } fn make_ref<'r, T:RefMakerTrait<'r>>(t:T) -> &'r T { RefMakerTrait::mk(t) } impl<'s> Trait<'s> for (isize,isize) { fn long(&'s self) -> isize { let &(x,_) = self; x } fn short<'b>(&'b self) -> isize { let &(_,y) = self; y } } impl<'t> MakerTrait for Box<Trait<'t>+'static> { fn mk() -> Box<Trait<'t>+'static> { let tup: Box<(isize, isize)> = box() (4,5); tup as Box<Trait> } } enum List<'l> { Cons(isize, &'l List<'l>), Null } impl<'l> List<'l> { fn car<'m>(&'m self) -> isize { match self { &List::Cons(car, _) => car, &List::Null => panic!(), } } fn cdr<'n>(&'n self) -> &'l List<'l> { match self { &List::Cons(_, cdr) => cdr, &List::Null => panic!(), } } } impl<'t> RefMakerTrait<'t> for List<'t> { fn mk(l:List<'t>) -> &'t List<'t> { l.cdr() } } pub fn main() { let t = (2,3); let o = &t as &Trait; let s1 = Struct1 { f: o }; let s2 = Struct2 { f: o }; assert_eq!(poly_invoke(&t), (2,3)); assert_eq!(object_invoke1(&t), (2,3)); assert_eq!(field_invoke1(&s1), (2,3)); assert_eq!(object_invoke2(&t), 3); assert_eq!(field_invoke2(&s2), 3); let m : Box<Trait> = make_val();<|fim▁hole|> // The RefMakerTrait above is pretty strange (i.e. it is strange // to consume a value of type T and return a &T). Easiest thing // that came to my mind: consume a cell of a linked list and // return a reference to the list it points to. let l0 = List::Null; let l1 = List::Cons(1, &l0); let l2 = List::Cons(2, &l1); let rl1 = &l1; let r = make_ref(l2); assert_eq!(rl1.car(), r.car()); }<|fim▁end|>
assert_eq!(object_invoke1(&*m), (4,5)); assert_eq!(object_invoke2(&*m), 5);
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>#[crate_id = "reddit#0.1"]; #[comment = "Rust binding to Reddit API"]; #[license = "LGPLv3"]; #[crate_type = "lib"]; extern mod extra; extern mod http; // Import macros mod macros; pub mod session; pub mod redditor; pub mod subreddit; pub mod post; pub mod comment; pub mod objects; mod util;<|fim▁end|>
#[feature(macro_rules)]; #[feature(managed_boxes)];
<|file_name|>worker.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use devtools_traits::{DevtoolsPageInfo, ScriptToDevtoolsControlMsg}; use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull; use dom::bindings::codegen::Bindings::WorkerBinding; use dom::bindings::codegen::Bindings::WorkerBinding::WorkerMethods; use dom::bindings::error::{Error, ErrorResult, Fallible}; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::Root; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, reflect_dom_object}; use dom::bindings::structuredclone::StructuredCloneData; use dom::bindings::trace::JSTraceable; use dom::dedicatedworkerglobalscope::{DedicatedWorkerGlobalScope, WorkerScriptMsg}; use dom::errorevent::ErrorEvent; use dom::event::{Event, EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::messageevent::MessageEvent; use dom::workerglobalscope::WorkerGlobalScopeInit; use ipc_channel::ipc; use js::jsapi::{HandleValue, JSContext, JSRuntime, RootedValue}; use js::jsapi::{JSAutoCompartment, JSAutoRequest, JS_RequestInterruptCallback}; use js::jsval::UndefinedValue; use js::rust::Runtime; use script_runtime::ScriptChan; use script_thread::Runnable; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::{Sender, channel}; use std::sync::{Arc, Mutex}; use util::str::DOMString; pub type TrustedWorkerAddress = Trusted<Worker>; // https://html.spec.whatwg.org/multipage/#worker #[dom_struct] pub struct Worker { eventtarget: EventTarget, #[ignore_heap_size_of = "Defined in std"] /// Sender to the Receiver associated with the DedicatedWorkerGlobalScope /// this Worker created. sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>, closing: Arc<AtomicBool>, #[ignore_heap_size_of = "Defined in rust-mozjs"] runtime: Arc<Mutex<Option<SharedRt>>> } impl Worker { fn new_inherited(sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>, closing: Arc<AtomicBool>) -> Worker { Worker { eventtarget: EventTarget::new_inherited(), sender: sender, closing: closing, runtime: Arc::new(Mutex::new(None)) } } pub fn new(global: GlobalRef, sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>, closing: Arc<AtomicBool>) -> Root<Worker> { reflect_dom_object(box Worker::new_inherited(sender, closing), global, WorkerBinding::Wrap) } // https://html.spec.whatwg.org/multipage/#dom-worker pub fn Constructor(global: GlobalRef, script_url: DOMString) -> Fallible<Root<Worker>> { // Step 2-4. let worker_url = match global.api_base_url().join(&script_url) { Ok(url) => url, Err(_) => return Err(Error::Syntax), }; let resource_thread = global.resource_thread(); let constellation_chan = global.constellation_chan(); let scheduler_chan = global.scheduler_chan(); let (sender, receiver) = channel(); let closing = Arc::new(AtomicBool::new(false)); let worker = Worker::new(global, sender.clone(), closing.clone()); let worker_ref = Trusted::new(worker.r()); let worker_id = global.get_next_worker_id(); let (devtools_sender, devtools_receiver) = ipc::channel().unwrap(); let optional_sender = match global.devtools_chan() { Some(ref chan) => { let pipeline_id = global.pipeline(); let title = format!("Worker for {}", worker_url); let page_info = DevtoolsPageInfo { title: title, url: worker_url.clone(), }; chan.send(ScriptToDevtoolsControlMsg::NewGlobal((pipeline_id, Some(worker_id)), devtools_sender.clone(), page_info)).unwrap(); Some(devtools_sender) }, None => None, }; let init = WorkerGlobalScopeInit { resource_thread: resource_thread, mem_profiler_chan: global.mem_profiler_chan(), to_devtools_sender: global.devtools_chan(), from_devtools_sender: optional_sender, constellation_chan: constellation_chan, scheduler_chan: scheduler_chan, worker_id: worker_id, closing: closing, };<|fim▁hole|> DedicatedWorkerGlobalScope::run_worker_scope( init, worker_url, global.pipeline(), devtools_receiver, worker.runtime.clone(), worker_ref, global.script_chan(), sender, receiver); Ok(worker) } pub fn is_closing(&self) -> bool { self.closing.load(Ordering::SeqCst) } pub fn handle_message(address: TrustedWorkerAddress, data: StructuredCloneData) { let worker = address.root(); if worker.is_closing() { return; } let global = worker.r().global(); let target = worker.upcast(); let _ar = JSAutoRequest::new(global.r().get_cx()); let _ac = JSAutoCompartment::new(global.r().get_cx(), target.reflector().get_jsobject().get()); let mut message = RootedValue::new(global.r().get_cx(), UndefinedValue()); data.read(global.r(), message.handle_mut()); MessageEvent::dispatch_jsval(target, global.r(), message.handle()); } pub fn dispatch_simple_error(address: TrustedWorkerAddress) { let worker = address.root(); worker.upcast().fire_simple_event("error"); } pub fn handle_error_message(address: TrustedWorkerAddress, message: DOMString, filename: DOMString, lineno: u32, colno: u32) { let worker = address.root(); if worker.is_closing() { return; } let global = worker.r().global(); let error = RootedValue::new(global.r().get_cx(), UndefinedValue()); let errorevent = ErrorEvent::new(global.r(), atom!("error"), EventBubbles::Bubbles, EventCancelable::Cancelable, message, filename, lineno, colno, error.handle()); errorevent.upcast::<Event>().fire(worker.upcast()); } } impl WorkerMethods for Worker { // https://html.spec.whatwg.org/multipage/#dom-worker-postmessage fn PostMessage(&self, cx: *mut JSContext, message: HandleValue) -> ErrorResult { let data = try!(StructuredCloneData::write(cx, message)); let address = Trusted::new(self); self.sender.send((address, WorkerScriptMsg::DOMMessage(data))).unwrap(); Ok(()) } // https://html.spec.whatwg.org/multipage/#terminate-a-worker fn Terminate(&self) { // Step 1 if self.closing.swap(true, Ordering::SeqCst) { return; } // Step 4 if let Some(runtime) = *self.runtime.lock().unwrap() { runtime.request_interrupt(); } } // https://html.spec.whatwg.org/multipage/#handler-worker-onmessage event_handler!(message, GetOnmessage, SetOnmessage); // https://html.spec.whatwg.org/multipage/#handler-workerglobalscope-onerror event_handler!(error, GetOnerror, SetOnerror); } pub struct WorkerMessageHandler { addr: TrustedWorkerAddress, data: StructuredCloneData, } impl WorkerMessageHandler { pub fn new(addr: TrustedWorkerAddress, data: StructuredCloneData) -> WorkerMessageHandler { WorkerMessageHandler { addr: addr, data: data, } } } impl Runnable for WorkerMessageHandler { fn handler(self: Box<WorkerMessageHandler>) { let this = *self; Worker::handle_message(this.addr, this.data); } } pub struct SimpleWorkerErrorHandler { addr: TrustedWorkerAddress, } impl SimpleWorkerErrorHandler { pub fn new(addr: TrustedWorkerAddress) -> SimpleWorkerErrorHandler { SimpleWorkerErrorHandler { addr: addr } } } impl Runnable for SimpleWorkerErrorHandler { fn handler(self: Box<SimpleWorkerErrorHandler>) { let this = *self; Worker::dispatch_simple_error(this.addr); } } pub struct WorkerErrorHandler { addr: TrustedWorkerAddress, msg: DOMString, file_name: DOMString, line_num: u32, col_num: u32, } impl WorkerErrorHandler { pub fn new(addr: TrustedWorkerAddress, msg: DOMString, file_name: DOMString, line_num: u32, col_num: u32) -> WorkerErrorHandler { WorkerErrorHandler { addr: addr, msg: msg, file_name: file_name, line_num: line_num, col_num: col_num, } } } impl Runnable for WorkerErrorHandler { fn handler(self: Box<WorkerErrorHandler>) { let this = *self; Worker::handle_error_message(this.addr, this.msg, this.file_name, this.line_num, this.col_num); } } #[derive(Copy, Clone)] pub struct SharedRt { rt: *mut JSRuntime } impl SharedRt { pub fn new(rt: &Runtime) -> SharedRt { SharedRt { rt: rt.rt() } } #[allow(unsafe_code)] pub fn request_interrupt(&self) { unsafe { JS_RequestInterruptCallback(self.rt); } } } #[allow(unsafe_code)] unsafe impl Send for SharedRt {}<|fim▁end|>
<|file_name|>MyWebMvcConfigure.java<|end_file_name|><|fim▁begin|>package com.imooc.config; import com.imooc.controller.interceptor.handlers.OneInterceptor; import com.imooc.controller.interceptor.handlers.TwoInterceptor; import org.springframework.context.annotation.Configuration; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @Configuration public class MyWebMvcConfigure implements WebMvcConfigurer { <|fim▁hole|> @Override public void addInterceptors(InterceptorRegistry registry) { /** * 单一hook */ registry.addInterceptor(new OneInterceptor()).addPathPatterns("/hook/one/**"); registry.addInterceptor(new TwoInterceptor()).addPathPatterns("/hook/two/**"); /** * 拦截器,按顺序hook */ registry.addInterceptor(new OneInterceptor()).addPathPatterns("/hook/one-two/**"); registry.addInterceptor(new TwoInterceptor()).addPathPatterns("/hook/one-two/**"); registry.addInterceptor(new TwoInterceptor()).addPathPatterns("/hook/two-one/**"); registry.addInterceptor(new OneInterceptor()).addPathPatterns("/hook/two-one/**"); } }<|fim▁end|>
<|file_name|>test_df_hf.py<|end_file_name|><|fim▁begin|>import pytest from forte.solvers import solver_factory, HF def test_df_rhf(): """Test DF-RHF on HF.""" ref_energy = -100.04775218911111 # define a molecule xyz = """ H 0.0 0.0 0.0 F 0.0 0.0 1.0 """ # create a molecular model input = solver_factory(molecule=xyz, basis='cc-pVTZ', int_type='df') # specify the electronic state state = input.state(charge=0, multiplicity=1, sym='a1') # create a HF object and run hf = HF(input, state=state)<|fim▁hole|> assert hf.value('hf energy') == pytest.approx(ref_energy, 1.0e-10) def test_df_rhf_select_aux(): """Test DF-RHF on HF.""" ref_energy = -100.04775602524956 # define a molecule xyz = """ H 0.0 0.0 0.0 F 0.0 0.0 1.0 """ # create a molecular model input = solver_factory(molecule=xyz, int_type='df', basis='cc-pVTZ', scf_aux_basis='cc-pVQZ-JKFIT') # specify the electronic state state = input.state(charge=0, multiplicity=1, sym='a1') # create a HF object and run hf = HF(input, state=state) hf.run() assert hf.value('hf energy') == pytest.approx(ref_energy, 1.0e-10) if __name__ == "__main__": test_df_rhf() test_df_rhf_select_aux()<|fim▁end|>
hf.run()
<|file_name|>diff.py<|end_file_name|><|fim▁begin|>"""Clowder command line diff controller .. codeauthor:: Joe DeCapo <[email protected]> """ import argparse import clowder.util.formatting as fmt from clowder.clowder_controller import CLOWDER_CONTROLLER, print_clowder_name, valid_clowder_yaml_required from clowder.config import Config from clowder.git.clowder_repo import print_clowder_repo_status from clowder.util.console import CONSOLE from .util import add_parser_arguments def add_diff_parser(subparsers: argparse._SubParsersAction) -> None: # noqa """Add clowder diff parser :param argparse._SubParsersAction subparsers: Subparsers action to add parser to """ parser = subparsers.add_parser('diff', help='Show git diff for projects') parser.formatter_class = argparse.RawTextHelpFormatter parser.set_defaults(func=diff) add_parser_arguments(parser, [ (['projects'], dict(metavar='<project|group>', default='default', nargs='*', choices=CLOWDER_CONTROLLER.project_choices_with_default, help=fmt.project_options_help_message('projects and groups to show diff for'))),<|fim▁hole|>@print_clowder_name @print_clowder_repo_status def diff(args) -> None: """Clowder diff command private implementation""" projects = Config().process_projects_arg(args.projects) projects = CLOWDER_CONTROLLER.filter_projects(CLOWDER_CONTROLLER.projects, projects) for project in projects: CONSOLE.stdout(project.status()) project.diff()<|fim▁end|>
]) @valid_clowder_yaml_required
<|file_name|>EnumTyp.java<|end_file_name|><|fim▁begin|>/* * Copyright 2004-2007 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, * CA 95054 USA or visit www.sun.com if you need additional information or * have any questions. */ /* * @test * @bug 4853450 * @summary EnumType tests * @library ../../lib * @compile -source 1.5 EnumTyp.java * @run main EnumTyp */ import java.util.*; import com.sun.mirror.declaration.*; import com.sun.mirror.type.*; import com.sun.mirror.util.*; public class EnumTyp extends Tester { public static void main(String[] args) { (new EnumTyp()).run(); } // Declarations used by tests enum Suit { CIVIL, CRIMINAL } private Suit s; private EnumType e; // an enum type protected void init() { e = (EnumType) getField("s").getType(); } // TypeMirror methods @Test(result="enum") Collection<String> accept() { final Collection<String> res = new ArrayList<String>(); e.accept(new SimpleTypeVisitor() { public void visitTypeMirror(TypeMirror t) { res.add("type"); } public void visitReferenceType(ReferenceType t) { res.add("ref type"); } public void visitClassType(ClassType t) { res.add("class"); } public void visitEnumType(EnumType t) { res.add("enum"); } public void visitInterfaceType(InterfaceType t) { res.add("interface"); } }); return res; } // EnumType method @Test(result="EnumTyp.Suit")<|fim▁hole|> EnumDeclaration getDeclaration() { return e.getDeclaration(); } }<|fim▁end|>
<|file_name|>api_op_DescribeNetworkInsightsPaths.go<|end_file_name|><|fim▁begin|>// Code generated by smithy-go-codegen DO NOT EDIT. package ec2 import ( "context" "fmt" awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" "github.com/aws/aws-sdk-go-v2/aws/signer/v4" "github.com/aws/aws-sdk-go-v2/service/ec2/types" "github.com/aws/smithy-go/middleware" smithyhttp "github.com/aws/smithy-go/transport/http" ) // Describes one or more of your paths. func (c *Client) DescribeNetworkInsightsPaths(ctx context.Context, params *DescribeNetworkInsightsPathsInput, optFns ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) { if params == nil { params = &DescribeNetworkInsightsPathsInput{} } result, metadata, err := c.invokeOperation(ctx, "DescribeNetworkInsightsPaths", params, optFns, c.addOperationDescribeNetworkInsightsPathsMiddlewares) if err != nil { return nil, err } out := result.(*DescribeNetworkInsightsPathsOutput) out.ResultMetadata = metadata return out, nil } type DescribeNetworkInsightsPathsInput struct { // Checks whether you have the required permissions for the action, without // actually making the request, and provides an error response. If you have the // required permissions, the error response is DryRunOperation. Otherwise, it is // UnauthorizedOperation. DryRun *bool // The filters. The following are possible values: // // * Destination - The ID of the // resource. // // * DestinationPort - The destination port. // // * Name - The path name. // // * // Protocol - The protocol. // // * Source - The ID of the resource. Filters []types.Filter // The maximum number of results to return with a single call. To retrieve the // remaining results, make another call with the returned nextToken value. MaxResults *int32 // The IDs of the paths. NetworkInsightsPathIds []string // The token for the next page of results. NextToken *string noSmithyDocumentSerde } type DescribeNetworkInsightsPathsOutput struct { // Information about the paths. NetworkInsightsPaths []types.NetworkInsightsPath // The token to use to retrieve the next page of results. This value is null when // there are no more results to return. NextToken *string // Metadata pertaining to the operation's result. ResultMetadata middleware.Metadata noSmithyDocumentSerde } func (c *Client) addOperationDescribeNetworkInsightsPathsMiddlewares(stack *middleware.Stack, options Options) (err error) { err = stack.Serialize.Add(&awsEc2query_serializeOpDescribeNetworkInsightsPaths{}, middleware.After) if err != nil { return err } err = stack.Deserialize.Add(&awsEc2query_deserializeOpDescribeNetworkInsightsPaths{}, middleware.After) if err != nil {<|fim▁hole|> return err } if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil { return err } if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil { return err } if err = addResolveEndpointMiddleware(stack, options); err != nil { return err } if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil { return err } if err = addRetryMiddlewares(stack, options); err != nil { return err } if err = addHTTPSignerV4Middleware(stack, options); err != nil { return err } if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil { return err } if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil { return err } if err = addClientUserAgent(stack); err != nil { return err } if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil { return err } if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil { return err } if err = stack.Initialize.Add(newServiceMetadataMiddleware_opDescribeNetworkInsightsPaths(options.Region), middleware.Before); err != nil { return err } if err = addRequestIDRetrieverMiddleware(stack); err != nil { return err } if err = addResponseErrorMiddleware(stack); err != nil { return err } if err = addRequestResponseLogging(stack, options); err != nil { return err } return nil } // DescribeNetworkInsightsPathsAPIClient is a client that implements the // DescribeNetworkInsightsPaths operation. type DescribeNetworkInsightsPathsAPIClient interface { DescribeNetworkInsightsPaths(context.Context, *DescribeNetworkInsightsPathsInput, ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) } var _ DescribeNetworkInsightsPathsAPIClient = (*Client)(nil) // DescribeNetworkInsightsPathsPaginatorOptions is the paginator options for // DescribeNetworkInsightsPaths type DescribeNetworkInsightsPathsPaginatorOptions struct { // The maximum number of results to return with a single call. To retrieve the // remaining results, make another call with the returned nextToken value. Limit int32 // Set to true if pagination should stop if the service returns a pagination token // that matches the most recent token provided to the service. StopOnDuplicateToken bool } // DescribeNetworkInsightsPathsPaginator is a paginator for // DescribeNetworkInsightsPaths type DescribeNetworkInsightsPathsPaginator struct { options DescribeNetworkInsightsPathsPaginatorOptions client DescribeNetworkInsightsPathsAPIClient params *DescribeNetworkInsightsPathsInput nextToken *string firstPage bool } // NewDescribeNetworkInsightsPathsPaginator returns a new // DescribeNetworkInsightsPathsPaginator func NewDescribeNetworkInsightsPathsPaginator(client DescribeNetworkInsightsPathsAPIClient, params *DescribeNetworkInsightsPathsInput, optFns ...func(*DescribeNetworkInsightsPathsPaginatorOptions)) *DescribeNetworkInsightsPathsPaginator { if params == nil { params = &DescribeNetworkInsightsPathsInput{} } options := DescribeNetworkInsightsPathsPaginatorOptions{} if params.MaxResults != nil { options.Limit = *params.MaxResults } for _, fn := range optFns { fn(&options) } return &DescribeNetworkInsightsPathsPaginator{ options: options, client: client, params: params, firstPage: true, nextToken: params.NextToken, } } // HasMorePages returns a boolean indicating whether more pages are available func (p *DescribeNetworkInsightsPathsPaginator) HasMorePages() bool { return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0) } // NextPage retrieves the next DescribeNetworkInsightsPaths page. func (p *DescribeNetworkInsightsPathsPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*DescribeNetworkInsightsPathsOutput, error) { if !p.HasMorePages() { return nil, fmt.Errorf("no more pages available") } params := *p.params params.NextToken = p.nextToken var limit *int32 if p.options.Limit > 0 { limit = &p.options.Limit } params.MaxResults = limit result, err := p.client.DescribeNetworkInsightsPaths(ctx, &params, optFns...) if err != nil { return nil, err } p.firstPage = false prevToken := p.nextToken p.nextToken = result.NextToken if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken { p.nextToken = nil } return result, nil } func newServiceMetadataMiddleware_opDescribeNetworkInsightsPaths(region string) *awsmiddleware.RegisterServiceMetadata { return &awsmiddleware.RegisterServiceMetadata{ Region: region, ServiceID: ServiceID, SigningName: "ec2", OperationName: "DescribeNetworkInsightsPaths", } }<|fim▁end|>
return err } if err = addSetLoggerMiddleware(stack, options); err != nil {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate byteorder; use byteorder::{ByteOrder, BigEndian}; use std::io::{Read, Write, Result}; /// A byte buffer object specifically turned to easily read and write binary values pub struct ByteBuffer { data: Vec<u8>, wpos: usize, rpos: usize, rbit: usize, wbit: usize, } impl ByteBuffer { /// Construct a new, empty, ByteBuffer pub fn new() -> ByteBuffer { ByteBuffer { data: vec![], wpos: 0, rpos: 0, rbit: 0, wbit: 0, } } /// Construct a new ByteBuffer filled with the data array. pub fn from_bytes(bytes: &[u8]) -> ByteBuffer { let mut buffer = ByteBuffer::new(); buffer.write_bytes(bytes); buffer } /// Return the buffer size pub fn len(&self) -> usize { self.data.len() } /// Clear the buffer and reinitialize the reading and writing cursor pub fn clear(&mut self) { self.data.clear(); self.wpos = 0; self.rpos = 0; } /// Change the buffer size to size. /// /// _Note_: You cannot shrink a buffer with this method pub fn resize(&mut self, size: usize) { let diff = size - self.data.len(); if diff > 0 { self.data.extend(std::iter::repeat(0).take(diff)) } } // Write operations /// Append a byte array to the buffer. The buffer is automatically extended if needed /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_bytes(&vec![0x1, 0xFF, 0x45]); // buffer contains [0x1, 0xFF, 0x45] /// ``` pub fn write_bytes(&mut self, bytes: &[u8]) { self.flush_bit(); let size = bytes.len() + self.wpos; if size > self.data.len() { self.resize(size); } for v in bytes { self.data[self.wpos] = *v; self.wpos += 1; } } /// Append a byte (8 bits value) to the buffer /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_u8(1) // buffer contains [0x1] /// ``` pub fn write_u8(&mut self, val: u8) { self.write_bytes(&[val]); } /// Same as `write_u8()` but for signed values pub fn write_i8(&mut self, val: i8) { self.write_u8(val as u8); } /// Append a word (16 bits value) to the buffer /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_u16(1) // buffer contains [0x00, 0x1] if little endian /// ``` pub fn write_u16(&mut self, val: u16) { let mut buf = [0; 2]; BigEndian::write_u16(&mut buf, val); self.write_bytes(&buf); } /// Same as `write_u16()` but for signed values pub fn write_i16(&mut self, val: i16) { self.write_u16(val as u16); } /// Append a double word (32 bits value) to the buffer /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_u32(1) // buffer contains [0x00, 0x00, 0x00, 0x1] if little endian /// ``` pub fn write_u32(&mut self, val: u32) { let mut buf = [0; 4]; BigEndian::write_u32(&mut buf, val); self.write_bytes(&buf); } /// Same as `write_u32()` but for signed values pub fn write_i32(&mut self, val: i32) { self.write_u32(val as u32); } /// Append a quaddruple word (64 bits value) to the buffer /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_u64(1) // buffer contains [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1] if little endian /// ``` pub fn write_u64(&mut self, val: u64) { let mut buf = [0; 8]; BigEndian::write_u64(&mut buf, val); self.write_bytes(&buf); } /// Same as `write_u64()` but for signed values pub fn write_i64(&mut self, val: i64) { self.write_u64(val as u64); } /// Append a 32 bits floating point number to the buffer. /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_f32(0.1) /// ``` pub fn write_f32(&mut self, val: f32) { let mut buf = [0; 4]; BigEndian::write_f32(&mut buf, val); self.write_bytes(&buf); } /// Append a 64 bits floating point number to the buffer. /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_f64(0.1) /// ``` pub fn write_f64(&mut self, val: f64) { let mut buf = [0; 8]; BigEndian::write_f64(&mut buf, val); self.write_bytes(&buf); } /// Append a string to the buffer. /// /// *Format* The format is `(u32)size + size * (u8)characters` /// /// #Exapmle /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_string("Hello") /// ``` pub fn write_string(&mut self, val: &str) { self.write_u32(val.len() as u32); self.write_bytes(val.as_bytes()); } // Read operations /// Read a defined amount of raw bytes. The program crash if not enough bytes are available pub fn read_bytes(&mut self, size: usize) -> Vec<u8> { self.flush_bit(); assert!(self.rpos + size <= self.data.len()); let range = self.rpos..self.rpos + size; let mut res = Vec::<u8>::new(); res.write(&self.data[range]).unwrap(); self.rpos += size; res } /// Read one byte. The program crash if not enough bytes are available /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![0x1]); /// let value = buffer.read_u8(); //Value contains 1 /// ``` pub fn read_u8(&mut self) -> u8 { self.flush_bit(); assert!(self.rpos < self.data.len()); let pos = self.rpos; self.rpos += 1; self.data[pos] } /// Same as `read_u8()` but for signed values pub fn read_i8(&mut self) -> i8 { self.read_u8() as i8 } /// Read a 2-bytes long value. The program crash if not enough bytes are available /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![0x0, 0x1]); /// let value = buffer.read_u16(); //Value contains 1 /// ``` pub fn read_u16(&mut self) -> u16 { self.flush_bit(); assert!(self.rpos + 2 <= self.data.len()); let range = self.rpos..self.rpos + 2; self.rpos += 2; BigEndian::read_u16(&self.data[range]) } /// Same as `read_u16()` but for signed values pub fn read_i16(&mut self) -> i16 { self.read_u16() as i16 } /// Read a four-bytes long value. The program crash if not enough bytes are available /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![0x0, 0x0, 0x0, 0x1]); /// let value = buffer.read_u32(); // Value contains 1 /// ``` pub fn read_u32(&mut self) -> u32 { self.flush_bit(); assert!(self.rpos + 4 <= self.data.len()); let range = self.rpos..self.rpos + 4; self.rpos += 4; BigEndian::read_u32(&self.data[range]) } /// Same as `read_u32()` but for signed values pub fn read_i32(&mut self) -> i32 { self.read_u32() as i32 } /// Read an eight bytes long value. The program crash if not enough bytes are available /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1]); /// let value = buffer.read_u64(); //Value contains 1 /// ``` pub fn read_u64(&mut self) -> u64 { self.flush_bit(); assert!(self.rpos + 8 <= self.data.len()); let range = self.rpos..self.rpos + 8; self.rpos += 8; BigEndian::read_u64(&self.data[range]) } /// Same as `read_u64()` but for signed values pub fn read_i64(&mut self) -> i64 { self.read_u64() as i64 } /// Read a 32 bits floating point value. The program crash if not enough bytes are available pub fn read_f32(&mut self) -> f32 { self.flush_bit(); assert!(self.rpos + 4 <= self.data.len()); let range = self.rpos..self.rpos + 4; self.rpos += 4; BigEndian::read_f32(&self.data[range]) } /// Read a 64 bits floating point value. The program crash if not enough bytes are available pub fn read_f64(&mut self) -> f64 { self.flush_bit(); assert!(self.rpos + 8 <= self.data.len()); let range = self.rpos..self.rpos + 8; self.rpos += 8; BigEndian::read_f64(&self.data[range]) } /// Read a string. /// /// *Note* : First it reads a 32 bits value representing the size, the read 'size' raw bytes. pub fn read_string(&mut self) -> String { let size = self.read_u32(); String::from_utf8(self.read_bytes(size as usize)).unwrap() } // Other /// Dump the byte buffer to a string. pub fn to_string(&self) -> String { let mut str = String::new(); for b in &self.data { str = str + &format!("0x{:01$x} ", b, 2); } str.pop(); str } /// Return the position of the reading cursor pub fn get_rpos(&self) -> usize { self.rpos } /// Set the reading cursor position. /// *Note* : Set the reading cursor to `min(newPosition, self.len())` to prevent overflow pub fn set_rpos(&mut self, rpos: usize) { self.rpos = std::cmp::min(rpos, self.data.len()); } /// Return the writing cursor position pub fn get_wpos(&self) -> usize { self.wpos } /// Set the writing cursor position. /// *Note* : Set the writing cursor to `min(newPosition, self.len())` to prevent overflow pub fn set_wpos(&mut self, wpos: usize) { self.wpos = std::cmp::min(wpos, self.data.len()); } /// Return the raw byte buffer. pub fn to_bytes(&self) -> Vec<u8> { self.data.to_vec() } //Bit manipulation functions /// Read 1 bit. Return true if the bit is set to 1, otherwhise, return false. /// /// **Note** Bits are read from left to right /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![128]); // 10000000b /// let value1 = buffer.read_bit(); //value1 contains true (eg: bit is 1) /// let value2 = buffer.read_bit(); //value2 contains false (eg: bit is 0) /// ``` pub fn read_bit(&mut self) -> bool { assert!(self.rpos <= self.data.len()); let bit = self.data[self.rpos] & (1 << 7 - self.rbit) != 0; self.rbit += 1; if self.rbit > 7 { self.rbit = 0; self.rpos += 1; } bit } /// Read n bits. an return the corresponding value an u64. /// /// **Note 1** : We cannot read more than 64 bits /// /// **Note 2** Bits are read from left to right /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::from_bytes(&vec![128]); // 10000000b /// let value = buffer.read_bits(3); // value contains 4 (eg: 100b) /// ``` pub fn read_bits(&mut self, n: u8) -> u64 { // TODO : Assert that n <= 64 if n > 0 { ((if self.read_bit() { 1 } else { 0 }) << n - 1) | self.read_bits(n - 1) } else { 0 } } /// Discard all the pending bits available for reading or writing and place the the corresponding cursor to the next byte. /// /// **Note 1** : If no bits are currently read or written, this function does nothing. /// **Note 2** : This function is automatically called for each write or read operations. /// #Example /// /// ```text /// 10010010 | 00000001 /// ^ /// 10010010 | 00000001 // read_bit called /// ^ /// 10010010 | 00000001 // flush_bit() called /// ^ /// ``` pub fn flush_bit(&mut self) { if self.rbit > 0 { self.rpos += 1; self.rbit = 0 } if self.wbit > 0 { self.wpos += 1; self.wbit = 0 } }<|fim▁hole|> /// Append 1 bit value to the buffer. /// The bit is happened like this : /// /// ```text /// ...| XXXXXXXX | 10000000 |.... /// ``` pub fn write_bit(&mut self, bit: bool) { let size = self.wpos + 1; if size > self.data.len() { self.resize(size); } if bit { self.data[self.wpos] |= 1 << (7 - self.wbit); } self.wbit += 1; if self.wbit > 7 { self.wbit = 0; self.wpos += 1; } } /// Write the given value as a sequence of n bits /// /// #Example /// /// ``` /// # use bytebuffer::*; /// let mut buffer = ByteBuffer::new(); /// buffer.write_bits(4, 3); // append 100b /// ``` pub fn write_bits(&mut self, value: u64, n: u8) { if n > 0 { self.write_bit((value >> n - 1) & 1 != 0); self.write_bits(value, n - 1); } else { self.write_bit((value & 1) != 0); } } } impl Read for ByteBuffer { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { self.flush_bit(); let read_len = std::cmp::min(self.data.len() - self.rpos, buf.len()); let range = self.rpos..self.rpos + read_len; for (i, val) in (&self.data[range]).iter().enumerate() { buf[i] = *val; } self.rpos += read_len; Ok(read_len) } } impl Write for ByteBuffer { fn write(&mut self, buf: &[u8]) -> Result<usize> { self.write_bytes(buf); Ok(buf.len()) } fn flush(&mut self) -> Result<()> { Ok(()) } } impl std::fmt::Debug for ByteBuffer { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let rpos = if self.rbit > 0 { self.rpos + 1 } else { self.rpos }; let read_len = self.data.len() - rpos; let mut remaining_data = vec![0; read_len]; let range = rpos..rpos + read_len; for (i, val) in (&self.data[range]).iter().enumerate() { remaining_data[i] = *val; } write!(f, "ByteBuffer {{ remaining_data: {:?}, total_data: {:?} }}", remaining_data, self.data) } }<|fim▁end|>
<|file_name|>server.js<|end_file_name|><|fim▁begin|>/** * Runs a webserver and socket server for visualizating interactions with TJBot */ var ip = require('ip'); var express = require("express") var config = require('./config.js') var path = require("path") var app = express(); var http = require('http'); var exports = module.exports = {}; // routes var routes = require('./routes/index'); var server = http.createServer(app).listen(config.webServerNumber, function() { var addr = server.address(); console.log('Dashboard running at : http://' + ip.address() + ':' + addr.port); }); var bodyParser = require('body-parser'); var bodyParser = require('body-parser'); // parse application/x-www-form-urlencoded app.use(bodyParser.urlencoded({ extended: false })) // parse application/json app.use(bodyParser.json()); app.set('view engine', 'pug'); app.use(express.static('public')); app.use('/', routes); var WebSocket = require('ws'); var wss = new WebSocket.Server({ server }); var clients = new Map(); wss.on('connection', function connection(ws) { ws.on('message', function incoming(message) { }); clients.set(ws._socket._handle.fd, ws); //clients.push({id: ws._socket._handle.fd , client: }); // ws.send('something ......'); var hold = ws; ws.on('close', function close() { console.log("closing "); // clients.delete(ws._socket._handle.fd); }); }); var sendEvent = function(data) { console.log("Number of connectd clients", clients.size) for (var [key, client] of clients) { //console.log(key + ' = sending'); if (client.readyState === WebSocket.OPEN) { client.send(JSON.stringify(data)) } else { clients.delete(key); } } } <|fim▁hole|><|fim▁end|>
exports.sendEvent = sendEvent; exports.wss = wss
<|file_name|>CLIExample.java<|end_file_name|><|fim▁begin|>/* * Copyright 2015-2016 DevCon5 GmbH, [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.devcon5.cli; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import org.junit.Test; /** */ public class CLIExample { @CliOption(value = "x", hasArg = true) private String example; <|fim▁hole|> private Structured credentials; private String postProcessed; @PostInject private void init(){ postProcessed = "an " + example; } @Test public void example() { //arrange String[] exampleArgs = {"-u", "hans", "-p", "wurst", "-x", "example"}; //act CLI.parse(exampleArgs).into(this); run(); //assert assertEquals("an example", postProcessed); } public void run() { assertThat(example, is(not(nullValue()))); assertThat(credentials.user, is(not(nullValue()))); assertThat(credentials.password, is(not(nullValue()))); } static class Structured { @CliOption(value = "u", hasArg = true) private String user; @CliOption(value = "p", hasArg = true) private String password; } }<|fim▁end|>
@CliOptionGroup
<|file_name|>SceneGenerator.py<|end_file_name|><|fim▁begin|>import json import math ###################################################### # add parameters ###################################################### def addParameters(scene, h=0.005, maxIter=5, maxIterVel=5, velocityUpdateMethod=0, contactTolerance=0.05, triangleModelSimulationMethod=2, triangleModelBendingMethod=2, contactStiffnessRigidBody=1.0, contactStiffnessParticleRigidBody=100.0, cloth_stiffness=1.0, cloth_bendingStiffness=0.005, cloth_xxStiffness=1.0, cloth_yyStiffness=1.0, cloth_xyStiffness=1.0, cloth_xyPoissonRatio=0.3, cloth_yxPoissonRatio=0.3, cloth_normalizeStretch=0, cloth_normalizeShear=0, gravity=[0,-9.81,0], numberOfStepsPerRenderUpdate=4): parameters = { 'timeStepSize': h, 'gravity': gravity, 'maxIter' : maxIter, 'maxIterVel' : maxIterVel, 'numberOfStepsPerRenderUpdate': numberOfStepsPerRenderUpdate, 'velocityUpdateMethod' : velocityUpdateMethod, 'contactTolerance': contactTolerance, 'triangleModelSimulationMethod': triangleModelSimulationMethod, 'triangleModelBendingMethod': triangleModelBendingMethod, 'contactStiffnessRigidBody' : contactStiffnessRigidBody, 'contactStiffnessParticleRigidBody': contactStiffnessParticleRigidBody, 'cloth_stiffness': cloth_stiffness, 'cloth_bendingStiffness': cloth_bendingStiffness, 'cloth_xxStiffness': cloth_xxStiffness, 'cloth_yyStiffness': cloth_yyStiffness, 'cloth_xyStiffness': cloth_xyStiffness, 'cloth_xyPoissonRatio': cloth_xyPoissonRatio, 'cloth_yxPoissonRatio': cloth_yxPoissonRatio, 'cloth_normalizeStretch': cloth_normalizeStretch, 'cloth_normalizeShear': cloth_normalizeShear } scene['Simulation'] = parameters return ###################################################### # add rigid bodies ###################################################### def addRigidBody(scene, geoFile, coType, coFile='', coScale=[1,1,1], translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1], v=[0,0,0], omega=[0,0,0], dynamic=1, density=500, rest=0.6, friction=0.3, testMesh = 1): global current_id rb = { 'id': current_id, 'geometryFile': geoFile, 'isDynamic': dynamic, 'density': density, 'translation': translation, 'rotationAxis': axis, 'rotationAngle': angle, 'scale': scale, 'velocity': v, 'angularVelocity': omega, 'restitution' : rest, 'friction' : friction, 'collisionObjectType': coType, 'collisionObjectScale': coScale, 'collisionObjectFileName': coFile, 'testMesh': testMesh } current_id += 1 scene['RigidBodies'].append(rb) return current_id-1 ###################################################### # add triangle models ###################################################### def addTriangleModel(scene, geoFile, translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1], rest=0.6, friction=0.3, staticParticles=[]): global current_id tri = { 'id': current_id, 'geometryFile': geoFile, 'translation': translation, 'rotationAxis': axis, 'rotationAngle': angle, 'scale': scale, 'staticParticles': staticParticles, 'restitution' : rest, 'friction' : friction } current_id += 1 scene['TriangleModels'].append(tri) return current_id-1 ###################################################### # add tet models ###################################################### def addTetModel(scene, nodeFile, eleFile, visFile='', translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1], rest=0.6, friction=0.3, staticParticles=[]): global current_id tet = { 'id': current_id, 'nodeFile': nodeFile, 'eleFile': eleFile, 'translation': translation, 'rotationAxis': axis, 'rotationAngle': angle, 'scale': scale, 'staticParticles': staticParticles, 'restitution' : rest, 'friction' : friction } if visFile != '': tet['visFile'] = visFile current_id += 1 scene['TetModels'].append(tet) return current_id-1 ###################################################### # add ball joint ###################################################### def addBallJoint(scene, rbId1, rbId2, position): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position } scene['BallJoints'].append(joint) return ###################################################### # add ball-on-line joint ###################################################### def addBallOnLineJoint(scene, rbId1, rbId2, position, axis): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position, 'axis': axis } scene['BallOnLineJoints'].append(joint) return ###################################################### # add hinge joint ###################################################### def addHingeJoint(scene, rbId1, rbId2, position, axis): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position, 'axis': axis } scene['HingeJoints'].append(joint) return ###################################################### # add universal joint ###################################################### def addUniversalJoint(scene, rbId1, rbId2, position, axis1, axis2): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position, 'axis1': axis1, 'axis2': axis2 } scene['UniversalJoints'].append(joint) return ###################################################### # add slider joint ###################################################### def addSliderJoint(scene, rbId1, rbId2, axis): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'axis': axis } scene['SliderJoints'].append(joint) return ###################################################### # add damper joint ###################################################### def addDamperJoint(scene, rbId1, rbId2, axis, stiffness): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'axis': axis, 'stiffness': stiffness } scene['DamperJoints'].append(joint) return ###################################################### # add RigidBodyParticleBallJoint ###################################################### def addRigidBodyParticleBallJoint(scene, rbId, particleId): joint = { 'rbID': rbId, 'particleID': particleId } scene['RigidBodyParticleBallJoints'].append(joint) return ###################################################### # add TargetAngleMotorHingeJoint ###################################################### def addTargetAngleMotorHingeJoint(scene, rbId1, rbId2, position, axis, target, targetSequence=None, repeatSequence=0): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position, 'axis': axis } if targetSequence != None: joint['targetSequence'] = targetSequence joint['repeatSequence'] = repeatSequence else: joint['target'] = target scene['TargetAngleMotorHingeJoints'].append(joint) return ###################################################### # add TargetVelocityMotorHingeJoint ###################################################### def addTargetVelocityMotorHingeJoint(scene, rbId1, rbId2, position, axis, target): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position': position, 'axis': axis, 'target': target } scene['TargetVelocityMotorHingeJoints'].append(joint) return ###################################################### # add TargetPositionMotorSliderJoint ###################################################### def addTargetPositionMotorSliderJoint(scene, rbId1, rbId2, axis, target): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'axis': axis, 'target': target } scene['TargetPositionMotorSliderJoints'].append(joint) return ###################################################### # add TargetVelocityMotorSliderJoint ###################################################### def addTargetVelocityMotorSliderJoint(scene, rbId1, rbId2, axis, target): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'axis': axis, 'target': target } scene['TargetVelocityMotorSliderJoints'].append(joint) return ###################################################### # add spring ###################################################### def addRigidBodySpring(scene, rbId1, rbId2, position1, position2, stiffness): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position1': position1, 'position2': position2, 'stiffness': stiffness } scene['RigidBodySprings'].append(joint) return ###################################################### # add distance joint ###################################################### def addDistanceJoint(scene, rbId1, rbId2, position1, position2): joint = { 'bodyID1': rbId1, 'bodyID2': rbId2, 'position1': position1, 'position2': position2 } scene['DistanceJoints'].append(joint) return ###################################################### # generate scene ###################################################### def generateScene(name, camPosition=[0, 10, 30], camLookat=[0,0,0]): scene = {'Name' : name} scene['cameraPosition'] = camPosition scene['cameraLookat'] = camLookat scene['RigidBodies'] = [] scene['BallJoints'] = [] scene['BallOnLineJoints'] = [] scene['HingeJoints'] = [] scene['UniversalJoints'] = [] scene['SliderJoints'] = [] scene['DamperJoints'] = [] scene['RigidBodyParticleBallJoints'] = [] scene['TargetAngleMotorHingeJoints'] = [] scene['TargetVelocityMotorHingeJoints'] = [] scene['TargetPositionMotorSliderJoints'] = [] scene['TargetVelocityMotorSliderJoints'] = [] scene['RigidBodySprings'] = [] scene['DistanceJoints'] = [] scene['TriangleModels'] = [] scene['TetModels'] = [] return scene ###################################################### # write scene to file ###################################################### def writeScene(scene, fileName): f = open(fileName, 'w') json_str = json.dumps(scene, sort_keys=True,indent=4, separators=(',', ': ')) f.write(json_str) #print json_str f.close() ######################################################<|fim▁hole|> x = axis[0] y = axis[1] z = axis[2] d = math.sqrt(x*x + y*y + z*z) if d < 1.0e-6: print ("Vector of rotation matrix is zero!") return x = x/d; y = y/d; z = z/d; x2 = x*x; y2 = y*y; z2 = z*z; s = math.sin(angle); c = math.cos(angle); c1 = 1.0-c; xyc = x*y*c1; xzc = x*z*c1; yzc = y*z*c1; xs=x*s; ys=y*s; zs=z*s; return [[c + x2*c1, xyc-zs, xzc+ys], [xyc+zs, c+y2*c1, yzc-xs], [xzc-ys, yzc+xs, c+z2*c1]] ###################################################### # compute matrix vector product ###################################################### def matrix_vec_product(A, v): res = [0,0,0] for i in range(0,3): for j in range(0,3): res[i] += A[i][j] * v[j]; return res ###################################################### # compute cross product ###################################################### def cross_product(a, b): res = [0,0,0] res[0] = a[1]*b[2] - a[2]*b[1]; res[1] = a[2]*b[0] - a[0]*b[2]; res[2] = a[0]*b[1] - a[1]*b[0]; return res ###################################################### # scale vector ###################################################### def scale_vector(v, s): res = [0,0,0] res[0] = s*v[0]; res[1] = s*v[1]; res[2] = s*v[2]; return res ###################################################### # add vector ###################################################### def add_vector(v1, v2): res = [0,0,0] res[0] = v1[0] + v2[0]; res[1] = v1[1] + v2[1]; res[2] = v1[2] + v2[2]; return res current_id=1<|fim▁end|>
# compute rotation matrix ###################################################### def rotation_matrix(axis, angle):
<|file_name|>process_json.py<|end_file_name|><|fim▁begin|># directory = 'Congressional_Bill_Corpus.v1.00/raw/' directory = '' text_file = directory + 'billtext_org.json' labels_file = directory + 'train.json' output_dir = '/Users/katya/datasets/congress_bills_2/' import sys #pcogennen noah's congress_bills_2 into useable format #to dict: def skip_ahead_n_quotes(line, char_counter, maximum): quote_counter = 0 while quote_counter < maximum: if line[char_counter:char_counter+1] == '\"': quote_counter += 1 char_counter += 1 # print 'to',line[char_counter:char_counter+10] return char_counter def parse_inside_char(line, char_counter, char): string = '' while line[char_counter] != char: string += line[char_counter] char_counter += 1 return string, char_counter <|fim▁hole|> # string.replace('\\\n', ' ') string = string.replace('\\' + 'n', ' ') for i in range(1,10): string = string.replace(' ', ' ') return string def tokenize(line): list_of_words = [] word = '' for char in line: if char == ' ': list_of_words.append(word) word = '' else: word += char list_of_words.append(word.strip()) return tuple(list_of_words) d = {} for line in open(text_file): if "\"\"" in line: d[name] = '' else: # d = json.load(json_data) # print d char_counter = 0 # print "success" name, char_counter = parse_inside_char(line, char_counter, '\t') # print 'parse' if '\"body\"' in line: char_counter = skip_ahead_n_quotes(line, char_counter, 2) # print 'skip ahead' char_counter += 3 body, char_counter = parse_inside_char(line, char_counter, '\"') # print 'parsed' else: body = '' char_counter = skip_ahead_n_quotes(line, char_counter, 3) char_counter += 3 # print 'skip 2' title, char_counter = parse_inside_char(line, char_counter, '\"') # print 'parsed2' d[name] = rm_newlines(title) + ' ' + rm_newlines(body) print 'quit' with open(labels_file, 'r') as labels, open(output_dir + 'train.data', 'w') as data_out, open(output_dir + 'train.labels', 'w') as labels_out: for line in labels: line = line.replace('\t', ' ') example_name, label = tokenize(line) try: data_out.write(d[example_name] + '\n') except KeyError: print example_name else: labels_out.write(label + '\n') sys.stdout.flush() print 'done'<|fim▁end|>
def rm_newlines(string):
<|file_name|>test_sensor.py<|end_file_name|><|fim▁begin|>"""The tests for the Tasmota sensor platform.""" import copy import datetime from datetime import timedelta import json from unittest.mock import Mock, patch import hatasmota from hatasmota.utils import ( get_topic_stat_status, get_topic_tele_sensor, get_topic_tele_will, ) import pytest from homeassistant import config_entries from homeassistant.components import sensor from homeassistant.components.tasmota.const import DEFAULT_PREFIX from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNKNOWN from homeassistant.helpers import entity_registry as er from homeassistant.util import dt from .test_common import ( DEFAULT_CONFIG, help_test_availability, help_test_availability_discovery_update, help_test_availability_poll_state, help_test_availability_when_connection_lost, help_test_discovery_device_remove, help_test_discovery_removal, help_test_discovery_update_unchanged, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, ) from tests.common import async_fire_mqtt_message, async_fire_time_changed DEFAULT_SENSOR_CONFIG = { "sn": { "Time": "2020-09-25T12:47:15", "DHT11": {"Temperature": None}, "TempUnit": "C", } } BAD_INDEXED_SENSOR_CONFIG_3 = { "sn": { "Time": "2020-09-25T12:47:15", "ENERGY": { "ApparentPower": [7.84, 1.23, 2.34], }, } } INDEXED_SENSOR_CONFIG = { "sn": { "Time": "2020-09-25T12:47:15", "ENERGY": { "TotalStartTime": "2018-11-23T15:33:47", "Total": 0.017, "TotalTariff": [0.000, 0.017], "Yesterday": 0.000, "Today": 0.002, "ExportActive": 0.000, "ExportTariff": [0.000, 0.000], "Period": 0.00, "Power": 0.00, "ApparentPower": 7.84, "ReactivePower": -7.21, "Factor": 0.39, "Frequency": 50.0, "Voltage": 234.31, "Current": 0.039, "ImportActive": 12.580, "ImportReactive": 0.002, "ExportReactive": 39.131, "PhaseAngle": 290.45, }, } } INDEXED_SENSOR_CONFIG_2 = { "sn": { "Time": "2020-09-25T12:47:15", "ENERGY": { "TotalStartTime": "2018-11-23T15:33:47", "Total": [0.000, 0.017], "TotalTariff": [0.000, 0.017], "Yesterday": 0.000, "Today": 0.002, "ExportActive": 0.000, "ExportTariff": [0.000, 0.000], "Period": 0.00, "Power": 0.00, "ApparentPower": 7.84, "ReactivePower": -7.21, "Factor": 0.39, "Frequency": 50.0, "Voltage": 234.31, "Current": 0.039, "ImportActive": 12.580, "ImportReactive": 0.002, "ExportReactive": 39.131, "PhaseAngle": 290.45, }, } } NESTED_SENSOR_CONFIG = { "sn": { "Time": "2020-03-03T00:00:00+00:00", "TX23": { "Speed": {"Act": 14.8, "Avg": 8.5, "Min": 12.2, "Max": 14.8}, "Dir": { "Card": "WSW", "Deg": 247.5, "Avg": 266.1, "AvgCard": "W", "Range": 0, }, }, "SpeedUnit": "km/h", } } async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_dht11_temperature") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) entity_reg = er.async_get(hass) entry = entity_reg.async_get("sensor.tasmota_dht11_temperature") assert entry.disabled is False assert entry.disabled_by is None assert entry.entity_category is None async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_dht11_temperature") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"DHT11":{"Temperature":20.5}}' ) state = hass.states.get("sensor.tasmota_dht11_temperature") assert state.state == "20.5" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"DHT11":{"Temperature":20.0}}}', ) state = hass.states.get("sensor.tasmota_dht11_temperature") assert state.state == "20.0" async def test_nested_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_tx23_speed_act") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_tx23_speed_act") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"TX23":{"Speed":{"Act":"12.3"}}}' ) state = hass.states.get("sensor.tasmota_tx23_speed_act") assert state.state == "12.3" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"}}}}', ) state = hass.states.get("sensor.tasmota_tx23_speed_act") assert state.state == "23.4" async def test_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_energy_totaltariff_1") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_energy_totaltariff_1") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"TotalTariff":[1.2,3.4]}}' ) state = hass.states.get("sensor.tasmota_energy_totaltariff_1") assert state.state == "3.4" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}', ) state = hass.states.get("sensor.tasmota_energy_totaltariff_1") assert state.state == "7.8" async def test_indexed_sensor_state_via_mqtt2(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT for sensor with last_reset property.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_energy_total") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) assert ( state.attributes[sensor.ATTR_STATE_CLASS] == sensor.STATE_CLASS_TOTAL_INCREASING ) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_energy_total") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"Total":1.2,"TotalStartTime":"2018-11-23T15:33:47"}}', ) state = hass.states.get("sensor.tasmota_energy_total") assert state.state == "1.2" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"Total":5.6,"TotalStartTime":"2018-11-23T16:33:47"}}}', ) state = hass.states.get("sensor.tasmota_energy_total") assert state.state == "5.6" async def test_indexed_sensor_state_via_mqtt3(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT for indexed sensor with last_reset property.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG_2) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass,<|fim▁hole|> ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_energy_total_1") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) assert ( state.attributes[sensor.ATTR_STATE_CLASS] == sensor.STATE_CLASS_TOTAL_INCREASING ) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_energy_total_1") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"Total":[1.2, 3.4],"TotalStartTime":"2018-11-23T15:33:47"}}', ) state = hass.states.get("sensor.tasmota_energy_total_1") assert state.state == "3.4" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"Total":[5.6,7.8],"TotalStartTime":"2018-11-23T16:33:47"}}}', ) state = hass.states.get("sensor.tasmota_energy_total_1") assert state.state == "7.8" async def test_bad_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT where sensor is not matching configuration.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(BAD_INDEXED_SENSOR_CONFIG_3) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test periodic state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":[1.2,3.4,5.6]}}' ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "1.2" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "3.4" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" # Test periodic state update with too few values async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":[7.8,9.0]}}' ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "7.8" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "9.0" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"ApparentPower":2.3}}' ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "2.3" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "9.0" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"ApparentPower":[1.2,3.4,5.6]}}}', ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "1.2" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "3.4" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" # Test polled state update with too few values async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"ApparentPower":[7.8,9.0]}}}', ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "7.8" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "9.0" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"ENERGY":{"ApparentPower":2.3}}}', ) state = hass.states.get("sensor.tasmota_energy_apparentpower_0") assert state.state == "2.3" state = hass.states.get("sensor.tasmota_energy_apparentpower_1") assert state.state == "9.0" state = hass.states.get("sensor.tasmota_energy_apparentpower_2") assert state.state == "5.6" @pytest.mark.parametrize("status_sensor_disabled", [False]) async def test_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" entity_reg = er.async_get(hass) # Pre-enable the status sensor entity_reg.async_get_or_create( sensor.DOMAIN, "tasmota", "00000049A3BC_status_sensor_status_sensor_status_signal", suggested_object_id="tasmota_status", disabled_by=None, ) config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_status") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test pushed state update async_fire_mqtt_message( hass, "tasmota_49A3BC/tele/STATE", '{"Wifi":{"Signal":20.5}}' ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "20.5" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS11", '{"StatusSTS":{"Wifi":{"Signal":20.0}}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "20.0" # Test force update flag entity = hass.data["entity_components"]["sensor"].get_entity( "sensor.tasmota_status" ) assert entity.force_update @pytest.mark.parametrize("status_sensor_disabled", [False]) async def test_single_shot_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" entity_reg = er.async_get(hass) # Pre-enable the status sensor entity_reg.async_get_or_create( sensor.DOMAIN, "tasmota", "00000049A3BC_status_sensor_status_sensor_status_restart_reason", suggested_object_id="tasmota_status", disabled_by=None, ) config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_status") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS1", '{"StatusPRM":{"RestartReason":"Some reason"}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "Some reason" # Test polled state update is ignored async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS1", '{"StatusPRM":{"RestartReason":"Another reason"}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "Some reason" # Device signals online again async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "Some reason" # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS1", '{"StatusPRM":{"RestartReason":"Another reason"}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "Another reason" # Test polled state update is ignored async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS1", '{"StatusPRM":{"RestartReason":"Third reason"}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "Another reason" @pytest.mark.parametrize("status_sensor_disabled", [False]) @patch.object(hatasmota.status_sensor, "datetime", Mock(wraps=datetime.datetime)) async def test_restart_time_status_sensor_state_via_mqtt( hass, mqtt_mock, setup_tasmota ): """Test state update via MQTT.""" entity_reg = er.async_get(hass) # Pre-enable the status sensor entity_reg.async_get_or_create( sensor.DOMAIN, "tasmota", "00000049A3BC_status_sensor_status_sensor_last_restart_time", suggested_object_id="tasmota_status", disabled_by=None, ) config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_status") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test polled state update utc_now = datetime.datetime(2020, 11, 11, 8, 0, 0, tzinfo=dt.UTC) hatasmota.status_sensor.datetime.now.return_value = utc_now async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS11", '{"StatusSTS":{"UptimeSec":"3600"}}', ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_status") assert state.state == "2020-11-11T07:00:00+00:00" async def test_attributes(hass, mqtt_mock, setup_tasmota): """Test correct attributes for sensors.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = { "sn": { "DHT11": {"Temperature": None}, "Beer": {"CarbonDioxide": None}, "TempUnit": "C", } } mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_dht11_temperature") assert state.attributes.get("device_class") == "temperature" assert state.attributes.get("friendly_name") == "Tasmota DHT11 Temperature" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == "°C" state = hass.states.get("sensor.tasmota_beer_CarbonDioxide") assert state.attributes.get("device_class") == "carbon_dioxide" assert state.attributes.get("friendly_name") == "Tasmota Beer CarbonDioxide" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == "ppm" async def test_nested_sensor_attributes(hass, mqtt_mock, setup_tasmota): """Test correct attributes for sensors.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_tx23_speed_act") assert state.attributes.get("device_class") is None assert state.attributes.get("friendly_name") == "Tasmota TX23 Speed Act" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == "km/h" state = hass.states.get("sensor.tasmota_tx23_dir_avg") assert state.attributes.get("device_class") is None assert state.attributes.get("friendly_name") == "Tasmota TX23 Dir Avg" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == " " async def test_indexed_sensor_attributes(hass, mqtt_mock, setup_tasmota): """Test correct attributes for sensors.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = { "sn": { "Dummy1": {"Temperature": [None, None]}, "Dummy2": {"CarbonDioxide": [None, None]}, "TempUnit": "C", } } mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_dummy1_temperature_0") assert state.attributes.get("device_class") == "temperature" assert state.attributes.get("friendly_name") == "Tasmota Dummy1 Temperature 0" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == "°C" state = hass.states.get("sensor.tasmota_dummy2_carbondioxide_1") assert state.attributes.get("device_class") == "carbon_dioxide" assert state.attributes.get("friendly_name") == "Tasmota Dummy2 CarbonDioxide 1" assert state.attributes.get("icon") is None assert state.attributes.get("unit_of_measurement") == "ppm" @pytest.mark.parametrize("status_sensor_disabled", [False]) @pytest.mark.parametrize( "sensor_name, disabled, disabled_by", [ ("tasmota_firmware_version", True, er.DISABLED_INTEGRATION), ("tasmota_ip", True, er.DISABLED_INTEGRATION), ("tasmota_last_restart_time", False, None), ("tasmota_mqtt_connect_count", False, None), ("tasmota_rssi", True, er.DISABLED_INTEGRATION), ("tasmota_signal", True, er.DISABLED_INTEGRATION), ("tasmota_ssid", False, None), ("tasmota_wifi_connect_count", False, None), ], ) async def test_diagnostic_sensors( hass, mqtt_mock, setup_tasmota, sensor_name, disabled, disabled_by ): """Test properties of diagnostic sensors.""" entity_reg = er.async_get(hass) config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get(f"sensor.{sensor_name}") assert bool(state) != disabled entry = entity_reg.async_get(f"sensor.{sensor_name}") assert entry.disabled == disabled assert entry.disabled_by == disabled_by assert entry.entity_category == "diagnostic" @pytest.mark.parametrize("status_sensor_disabled", [False]) async def test_enable_status_sensor(hass, mqtt_mock, setup_tasmota): """Test enabling status sensor.""" entity_reg = er.async_get(hass) config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_signal") assert state is None entry = entity_reg.async_get("sensor.tasmota_signal") assert entry.disabled assert entry.disabled_by == er.DISABLED_INTEGRATION # Enable the signal level status sensor updated_entry = entity_reg.async_update_entity( "sensor.tasmota_signal", disabled_by=None ) assert updated_entry != entry assert updated_entry.disabled is False await hass.async_block_till_done() async_fire_time_changed( hass, dt.utcnow() + timedelta(seconds=config_entries.RELOAD_AFTER_UPDATE_DELAY + 1), ) await hass.async_block_till_done() # Fake re-send of retained discovery message async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() state = hass.states.get("sensor.tasmota_signal") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("sensor.tasmota_signal") assert state.state == STATE_UNKNOWN assert not state.attributes.get(ATTR_ASSUMED_STATE) async def test_availability_when_connection_lost( hass, mqtt_client_mock, mqtt_mock, setup_tasmota ): """Test availability after MQTT disconnection.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) await help_test_availability_when_connection_lost( hass, mqtt_client_mock, mqtt_mock, sensor.DOMAIN, config, sensor_config, "tasmota_dht11_temperature", ) async def test_availability(hass, mqtt_mock, setup_tasmota): """Test availability.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) await help_test_availability( hass, mqtt_mock, sensor.DOMAIN, config, sensor_config, "tasmota_dht11_temperature", ) async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota): """Test availability discovery update.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) await help_test_availability_discovery_update( hass, mqtt_mock, sensor.DOMAIN, config, sensor_config, "tasmota_dht11_temperature", ) async def test_availability_poll_state( hass, mqtt_client_mock, mqtt_mock, setup_tasmota ): """Test polling after MQTT connection (re)established.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) poll_topic = "tasmota_49A3BC/cmnd/STATUS" await help_test_availability_poll_state( hass, mqtt_client_mock, mqtt_mock, sensor.DOMAIN, config, poll_topic, "10", sensor_config, ) async def test_discovery_removal_sensor(hass, mqtt_mock, caplog, setup_tasmota): """Test removal of discovered sensor.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config1 = copy.deepcopy(DEFAULT_SENSOR_CONFIG) await help_test_discovery_removal( hass, mqtt_mock, caplog, sensor.DOMAIN, config, config, sensor_config1, {}, "tasmota_dht11_temperature", "Tasmota DHT11 Temperature", ) async def test_discovery_update_unchanged_sensor( hass, mqtt_mock, caplog, setup_tasmota ): """Test update of discovered sensor.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) with patch( "homeassistant.components.tasmota.sensor.TasmotaSensor.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( hass, mqtt_mock, caplog, sensor.DOMAIN, config, discovery_update, sensor_config, "tasmota_dht11_temperature", "Tasmota DHT11 Temperature", ) async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota): """Test device registry remove.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) unique_id = f"{DEFAULT_CONFIG['mac']}_sensor_sensor_DHT11_Temperature" await help_test_discovery_device_remove( hass, mqtt_mock, sensor.DOMAIN, unique_id, config, sensor_config ) async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota): """Test MQTT subscriptions are managed when entity_id is updated.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) topics = [ get_topic_tele_sensor(config), get_topic_stat_status(config, 10), get_topic_tele_will(config), ] await help_test_entity_id_update_subscriptions( hass, mqtt_mock, sensor.DOMAIN, config, topics, sensor_config, "tasmota_dht11_temperature", ) async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota): """Test MQTT discovery update when entity_id is updated.""" config = copy.deepcopy(DEFAULT_CONFIG) sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG) await help_test_entity_id_update_discovery_update( hass, mqtt_mock, sensor.DOMAIN, config, sensor_config, "tasmota_dht11_temperature", )<|fim▁end|>
f"{DEFAULT_PREFIX}/{mac}/sensors", json.dumps(sensor_config),
<|file_name|>SystemUserAuthenticator.java<|end_file_name|><|fim▁begin|>/** * */ package com.sirma.itt.emf.authentication.sso.saml.authenticator; import java.nio.charset.StandardCharsets; import java.util.Map; import javax.crypto.SecretKey; import javax.enterprise.inject.Instance; import javax.inject.Inject; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import com.sirma.itt.emf.authentication.sso.saml.SAMLMessageProcessor; import com.sirma.itt.seip.configuration.SystemConfiguration; import com.sirma.itt.seip.idp.config.IDPConfiguration; import com.sirma.itt.seip.plugin.Extension; import com.sirma.itt.seip.security.User; import com.sirma.itt.seip.security.authentication.AuthenticationContext; import com.sirma.itt.seip.security.authentication.Authenticator; import com.sirma.itt.seip.security.configuration.SecurityConfiguration; import com.sirma.itt.seip.security.context.SecurityContext; import com.sirma.itt.seip.security.util.SecurityUtil; import com.sirma.itt.seip.util.EqualsHelper; /** * Th SystemUserAuthenticator is responsible to login system users only using a generated token. * * @author bbanchev */ @Extension(target = Authenticator.NAME, order = 20) public class SystemUserAuthenticator extends BaseSamlAuthenticator { @Inject private Instance<SecurityConfiguration> securityConfiguration; @Inject private IDPConfiguration idpConfiguration; @Inject private SAMLMessageProcessor samlMessageProcessor; @Inject private SystemConfiguration systemConfiguration; @Override public User authenticate(AuthenticationContext authenticationContext) { return null; } @Override public Object authenticate(User toAuthenticate) { return authenticateById(toAuthenticate, toAuthenticate.getIdentityId()); } private Object authenticateById(User toAuthenticate, final String username) { if (StringUtils.isBlank(username)) { return null; } String userSimpleName = SecurityUtil.getUserWithoutTenant(username); if (isSystemUser(userSimpleName) || isSystemAdmin(userSimpleName)) { return authenticateWithTokenAndGetTicket(toAuthenticate, createToken(username, securityConfiguration.get().getCryptoKey().get())); } return null; } @SuppressWarnings("static-method") protected boolean isSystemAdmin(String userSimpleName) { return EqualsHelper.nullSafeEquals(SecurityContext.getSystemAdminName(), userSimpleName, true); } protected boolean isSystemUser(String userSimpleName) { return EqualsHelper.nullSafeEquals(userSimpleName, SecurityUtil.getUserWithoutTenant(SecurityContext.SYSTEM_USER_NAME), true); } @Override protected void completeAuthentication(User authenticated, SAMLTokenInfo info, Map<String, String> processedToken) { authenticated.getProperties().putAll(processedToken); } /** * Creates a token for given user. * * @param user * the user to create for * @param secretKey * is the encrypt key for saml token * @return the saml token */ protected byte[] createToken(String user, SecretKey secretKey) { return Base64.encodeBase64(SecurityUtil.encrypt( createResponse(systemConfiguration.getSystemAccessUrl().getOrFail().toString(), samlMessageProcessor.getIssuerId().get(), idpConfiguration.getIdpServerURL().get(), user), secretKey)); } /** * Creates the response for authentication in DMS. The time should be synchronized * * @param assertionUrl<|fim▁hole|> * the alfresco url * @param audianceUrl * the audiance url * @param samlURL * the saml url * @param user * the user to authenticate * @return the resulted saml2 message */ @SuppressWarnings("static-method") protected byte[] createResponse(String assertionUrl, String audianceUrl, String samlURL, String user) { DateTime now = new DateTime(DateTimeZone.UTC); DateTime barrier = now.plusMinutes(10); StringBuilder saml = new StringBuilder(2048); saml.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>") .append("<saml2p:Response ID=\"inppcpljfhhckioclinjenlcneknojmngnmgklab\" IssueInstant=\"").append(now) .append("\" Version=\"2.0\" xmlns:saml2p=\"urn:oasis:names:tc:SAML:2.0:protocol\">") .append("<saml2p:Status>") .append("<saml2p:StatusCode Value=\"urn:oasis:names:tc:SAML:2.0:status:Success\"/>") .append("</saml2p:Status>") .append("<saml2:Assertion ID=\"ehmifefpmmlichdcpeiogbgcmcbafafckfgnjfnk\" IssueInstant=\"").append(now) .append("\" Version=\"2.0\" xmlns:saml2=\"urn:oasis:names:tc:SAML:2.0:assertion\">") .append("<saml2:Issuer Format=\"urn:oasis:names:tc:SAML:2.0:nameid-format:entity\">").append(samlURL) .append("</saml2:Issuer>").append("<saml2:Subject>").append("<saml2:NameID>").append(user) .append("</saml2:NameID>") .append("<saml2:SubjectConfirmation Method=\"urn:oasis:names:tc:SAML:2.0:cm:bearer\">") .append("<saml2:SubjectConfirmationData InResponseTo=\"0\" NotOnOrAfter=\"").append(barrier) .append("\" Recipient=\"").append(assertionUrl).append("\"/>").append("</saml2:SubjectConfirmation>") .append("</saml2:Subject>").append("<saml2:Conditions NotBefore=\"").append(now) .append("\" NotOnOrAfter=\"").append(barrier).append("\">").append("<saml2:AudienceRestriction>") .append("<saml2:Audience>").append(audianceUrl).append("</saml2:Audience>") .append("</saml2:AudienceRestriction>").append("</saml2:Conditions>") .append("<saml2:AuthnStatement AuthnInstant=\"").append(now).append("\">") .append("<saml2:AuthnContext>") .append("<saml2:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml2:AuthnContextClassRef>") .append("</saml2:AuthnContext>").append("</saml2:AuthnStatement>").append("</saml2:Assertion>") .append("</saml2p:Response>"); return saml.toString().getBytes(StandardCharsets.UTF_8); } }<|fim▁end|>
<|file_name|>ipxact2hdlCommon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # This file is part of ipxact2systemverilog # Copyright (C) 2013 Andreas Lindh # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # andreas.lindh (a) hiced.com import math import os import sys import xml.etree.ElementTree as ETree import tabulate from mdutils.mdutils import MdUtils DEFAULT_INI = {'global': {'unusedholes': 'yes', 'onebitenum': 'no'}} def sortRegisterAndFillHoles(regName,<|fim▁hole|> enumTypeList, unusedHoles=True): # sort the lists, highest offset first fieldNameList = fieldNameList bitOffsetList = [int(x) for x in bitOffsetList] bitWidthList = [int(x) for x in bitWidthList] fieldDescList = fieldDescList enumTypeList = enumTypeList matrix = list(zip(bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList)) matrix.sort(key=lambda x: x[0]) # , reverse=True) bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList = list(zip(*matrix)) # zip return tuples not lists fieldNameList = list(fieldNameList) bitOffsetList = list([int(x) for x in bitOffsetList]) bitWidthList = list([int(x) for x in bitWidthList]) fieldDescList = list(fieldDescList) enumTypeList = list(enumTypeList) if unusedHoles: unUsedCnt = 0 nextFieldStartingPos = 0 # fill up the holes index = 0 register_width = bitOffsetList[-1] + bitWidthList[-1] while register_width > nextFieldStartingPos: if nextFieldStartingPos != bitOffsetList[index]: newBitWidth = bitOffsetList[index] - nextFieldStartingPos bitOffsetList.insert(index, nextFieldStartingPos) fieldNameList.insert(index, 'unused' + str(unUsedCnt)) bitWidthList.insert(index, newBitWidth) fieldDescList.insert(index, 'unused') enumTypeList.insert(index, '') unUsedCnt += 1 nextFieldStartingPos = int(bitOffsetList[index]) + int(bitWidthList[index]) index += 1 return regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList class documentClass(): def __init__(self, name): self.name = name self.memoryMapList = [] def addMemoryMap(self, memoryMap): self.memoryMapList.append(memoryMap) class memoryMapClass(): def __init__(self, name): self.name = name self.addressBlockList = [] def addAddressBlock(self, addressBlock): self.addressBlockList.append(addressBlock) class addressBlockClass(): def __init__(self, name, addrWidth, dataWidth): self.name = name self.addrWidth = addrWidth self.dataWidth = dataWidth self.registerList = [] self.suffix = "" def addRegister(self, reg): assert isinstance(reg, registerClass) self.registerList.append(reg) def setRegisterList(self, registerList): self.registerList = registerList def returnAsString(self): raise NotImplementedError("method returnAsString() is virutal and must be overridden.") class registerClass(): def __init__(self, name, address, resetValue, size, access, desc, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList): assert isinstance(enumTypeList, list), 'enumTypeList is not a list' self.name = name self.address = address self.resetValue = resetValue self.size = size self.access = access self.desc = desc self.fieldNameList = fieldNameList self.bitOffsetList = bitOffsetList self.bitWidthList = bitWidthList self.fieldDescList = fieldDescList self.enumTypeList = enumTypeList class enumTypeClassRegistry(): """ should perhaps be a singleton instead """ def __init__(self): self.listOfEnums = [] def enumAllReadyExist(self, enum): for e in self.listOfEnums: if e.compare(enum): enum.allReadyExist = True enum.enumName = e.name break self.listOfEnums.append(enum) return enum class enumTypeClass(): def __init__(self, name, bitWidth, keyList, valueList, descrList): self.name = name self.bitWidth = bitWidth matrix = list(zip(valueList, keyList, descrList)) matrix.sort(key=lambda x: x[0]) valueList, keyList, descrList = list(zip(*matrix)) self.keyList = list(keyList) self.valueList = list(valueList) self.allReadyExist = False self.enumName = None self.descrList = descrList def compare(self, other): result = True result = self.bitWidth == other.bitWidth and result result = self.compareLists(self.keyList, other.keyList) and result return result def compareLists(self, list1, list2): for val in list1: if val in list2: return True return False class rstAddressBlock(addressBlockClass): """Generates a ReStructuredText file from a IP-XACT register description""" def __init__(self, name, addrWidth, dataWidth): self.name = name self.addrWidth = addrWidth self.dataWidth = dataWidth self.registerList = [] self.suffix = ".rst" def returnEnumValueString(self, enumTypeObj): if isinstance(enumTypeObj, enumTypeClass): l = [] for i in range(len(enumTypeObj.keyList)): l.append(enumTypeObj.keyList[i] + '=' + enumTypeObj.valueList[i]) s = ", ".join(l) else: s = '' return s def returnAsString(self): r = "" regNameList = [reg.name for reg in self.registerList] regAddressList = [reg.address for reg in self.registerList] regDescrList = [reg.desc for reg in self.registerList] r += self.returnRstTitle() r += self.returnRstSubTitle() summary_table = [] for i in range(len(regNameList)): summary_table.append(["%#04x" % regAddressList[i], str(regNameList[i]) + "_", str(regDescrList[i])]) r += tabulate.tabulate(summary_table, headers=['Address', 'Register Name', 'Description'], tablefmt="grid") r += "\n" r += "\n" for reg in self.registerList: r += self.returnRstRegDesc(reg.name, reg.address, reg.size, reg.resetValue, reg.desc, reg.access) reg_table = [] for fieldIndex in reversed(list(range(len(reg.fieldNameList)))): bits = "[" + str(reg.bitOffsetList[fieldIndex] + reg.bitWidthList[fieldIndex] - 1) + \ ":" + str(reg.bitOffsetList[fieldIndex]) + "]" _line = [bits, reg.fieldNameList[fieldIndex]] if reg.resetValue: temp = (int(reg.resetValue, 0) >> reg.bitOffsetList[fieldIndex]) mask = (2 ** reg.bitWidthList[fieldIndex]) - 1 temp &= mask temp = "{value:#0{width}x}".format(value=temp, width=math.ceil(reg.bitWidthList[fieldIndex] / 4) + 2) _line.append(temp) _line.append(reg.fieldDescList[fieldIndex]) reg_table.append(_line) _headers = ['Bits', 'Field name'] if reg.resetValue: _headers.append('Reset') _headers.append('Description') r += tabulate.tabulate(reg_table, headers=_headers, tablefmt="grid") r += "\n" r += "\n" # enumerations for enum in reg.enumTypeList: if enum: # header r += enum.name + "\n" r += ',' * len(enum.name) + "\n" r += "\n" # table enum_table = [] for i in range(len(enum.keyList)): _value = "{value:#0{width}x}".format(value=int(enum.valueList[i], 0), width=math.ceil(int(enum.bitWidth, 0) / 4) + 2) _line = [enum.keyList[i], _value, enum.descrList[i]] enum_table.append(_line) r += tabulate.tabulate(enum_table, headers=['Name', 'Value', 'Description'], tablefmt="grid") r += "\n\n" return r def returnRstTitle(self): r = '' r += "====================\n" r += "Register description\n" r += "====================\n\n" return r def returnRstSubTitle(self): r = '' r += "Registers\n" r += "---------\n\n" return r def returnRstRegDesc(self, name, address, size, resetValue, desc, access): r = "" r += name + "\n" r += len(name) * '-' + "\n" r += "\n" r += ":Name: " + str(name) + "\n" r += ":Address: " + hex(address) + "\n" if resetValue: # display the resetvalue in hex notation in the full length of the register r += ":Reset Value: {value:#0{size:d}x}\n".format(value=int(resetValue, 0), size=size // 4 + 2) r += ":Access: " + access + "\n" r += ":Description: " + desc + "\n" r += "\n" return r class mdAddressBlock(addressBlockClass): """Generates a Markdown file from a IP-XACT register description""" def __init__(self, name, addrWidth, dataWidth): self.name = name self.addrWidth = addrWidth self.dataWidth = dataWidth self.registerList = [] self.suffix = ".md" self.mdFile = MdUtils(file_name="none", title="") def returnEnumValueString(self, enumTypeObj): if isinstance(enumTypeObj, enumTypeClass): l = [] for i in range(len(enumTypeObj.keyList)): l.append(enumTypeObj.keyList[i] + '=' + enumTypeObj.valueList[i]) s = ", ".join(l) else: s = '' return s def returnAsString(self): regNameList = [reg.name for reg in self.registerList] regAddressList = [reg.address for reg in self.registerList] regDescrList = [reg.desc for reg in self.registerList] self.mdFile.new_header(level=1, title="Register description") self.mdFile.new_header(level=2, title="Registers") # summary header = ['Address', 'Register Name', 'Description'] rows = [] for i in range(len(regNameList)): rows.extend(["{:#04x}".format(regAddressList[i]), f"[{regNameList[i]}](#{regNameList[i]})", str(regDescrList[i])]) self.mdFile.new_table(columns=len(header), rows=len(regNameList) + 1, # header + data text=header + rows, text_align='left') # all registers for reg in self.registerList: headers = ['Bits', 'Field name'] if reg.resetValue: headers.append('Reset') headers.append('Description') self.returnMdRegDesc(reg.name, reg.address, reg.size, reg.resetValue, reg.desc, reg.access) reg_table = [] for fieldIndex in reversed(list(range(len(reg.fieldNameList)))): bits = "[" + str(reg.bitOffsetList[fieldIndex] + reg.bitWidthList[fieldIndex] - 1) + \ ":" + str(reg.bitOffsetList[fieldIndex]) + "]" reg_table.append(bits) reg_table.append(reg.fieldNameList[fieldIndex]) if reg.resetValue: temp = (int(reg.resetValue, 0) >> reg.bitOffsetList[fieldIndex]) mask = (2 ** reg.bitWidthList[fieldIndex]) - 1 temp &= mask temp = "{value:#0{width}x}".format(value=temp, width=math.ceil(reg.bitWidthList[fieldIndex] / 4) + 2) reg_table.append(temp) reg_table.append(reg.fieldDescList[fieldIndex]) self.mdFile.new_table(columns=len(headers), rows=len(reg.fieldNameList) + 1, text=headers + reg_table, text_align='left') # enumerations for enum in reg.enumTypeList: if enum: self.mdFile.new_header(level=4, title=enum.name) enum_table = [] for i in range(len(enum.keyList)): _value = "{value:#0{width}x}".format(value=int(enum.valueList[i], 0), width=math.ceil(int(enum.bitWidth, 0) / 4) + 2) enum_table.append(enum.keyList[i]) enum_table.append(_value) enum_table.append(enum.descrList[i]) headers = ['Name', 'Value', 'Description'] self.mdFile.new_table(columns=len(headers), rows=len(enum.keyList) + 1, text=headers + enum_table, text_align='left') return self.mdFile.file_data_text def returnMdRegDesc(self, name, address, size, resetValue, desc, access): self.mdFile.new_header(level=3, title=name) self.mdFile.new_line("**Name** " + str(name)) self.mdFile.new_line("**Address** " + hex(address)) if resetValue: # display the resetvalue in hex notation in the full length of the register self.mdFile.new_line( "**Reset Value** {value:#0{size:d}x}".format(value=int(resetValue, 0), size=size // 4 + 2)) self.mdFile.new_line("**Access** " + access) self.mdFile.new_line("**Description** " + desc) class vhdlAddressBlock(addressBlockClass): """Generates a vhdl file from a IP-XACT register description""" def __init__(self, name, addrWidth, dataWidth): self.name = name self.addrWidth = addrWidth self.dataWidth = dataWidth self.registerList = [] self.suffix = "_vhd_pkg.vhd" def returnAsString(self): r = '' r += self.returnPkgHeaderString() r += "\n\n" r += self.returnPkgBodyString() return r def returnPkgHeaderString(self): r = '' r += "--\n" r += "-- Automatically generated\n" r += "-- with the command '%s'\n" % (' '.join(sys.argv)) r += "--\n" r += "-- Do not manually edit!\n" r += "--\n" r += "-- VHDL 93\n" r += "--\n" r += "\n" r += "library ieee;\n" r += "use ieee.std_logic_1164.all;\n" r += "use ieee.numeric_std.all;\n" r += "\n" r += "package " + self.name + "_vhd_pkg is\n" r += "\n" r += " constant addr_width : natural := " + str(self.addrWidth) + ";\n" r += " constant data_width : natural := " + str(self.dataWidth) + ";\n" r += "\n\n" r += self.returnRegFieldEnumTypeStrings(True) for reg in self.registerList: r += " constant {name}_addr : natural := {address} ; -- {address:#0{width}x}\n".format(name=reg.name, address=reg.address, width=math.ceil( self.addrWidth / 4) + 2) # +2 for the '0x' r += "\n" for reg in self.registerList: if reg.resetValue: r += " constant {name}_reset_value : std_ulogic_vector(data_width-1 downto 0) := std_ulogic_vector(to_unsigned({value:d}, data_width)); -- {value:#0{width}x}\n".format( name=reg.name, value=int(reg.resetValue, 0), width=math.ceil((self.dataWidth / 4)) + 2) r += "\n\n" for reg in self.registerList: r += self.returnRegRecordTypeString(reg) r += self.returnRegistersInRecordTypeString() r += self.returnRegistersOutRecordTypeString() r += self.returnRegistersReadFunction() r += self.returnRegistersWriteFunction() r += self.returnRegistersResetFunction() r += "end;\n" return r def returnRegFieldEnumTypeStrings(self, prototype): r = '' for reg in self.registerList: for enum in reg.enumTypeList: if isinstance(enum, enumTypeClass) and not enum.allReadyExist: r += " -- {}\n".format(enum.name) # group the enums in the package if prototype: t = " type " + enum.name + "_enum is (" indent = t.find('(') + 1 r += t for ki in range(len(enum.keyList)): if ki != 0: # no indentation for the first element r += " " * indent r += enum.keyList[ki] if ki != len(enum.keyList) - 1: # no ',' for the last element r += "," else: # last element r += ");" if enum.descrList[ki]: r += " -- " + enum.descrList[ki] if ki != len(enum.keyList) - 1: # no new line for the last element r += "\n" r += "\n" r += " function " + enum.name + \ "_enum_to_sulv(v: " + enum.name + "_enum ) return std_ulogic_vector" if prototype: r += ";\n" else: r += " is\n" r += " variable r : std_ulogic_vector(" + str(enum.bitWidth) + "-1 downto 0);\n" r += " begin\n" r += " case v is\n" for i in range(len(enum.keyList)): r += ' when {key} => r:="{value_int:0{bitwidth}b}"; -- {value}\n'.format( key=enum.keyList[i], value=enum.valueList[i], value_int=int(enum.valueList[i]), bitwidth=int(enum.bitWidth)) r += " end case;\n" r += " return r;\n" r += " end function;\n\n" r += " function sulv_to_" + enum.name + \ "_enum(v: std_ulogic_vector(" + str(enum.bitWidth) + "-1 downto 0)) return " + \ enum.name + "_enum" if prototype: r += ";\n" else: r += " is\n" r += " variable r : " + enum.name + "_enum;\n" r += " begin\n" r += " case v is\n" for i in range(len(enum.keyList)): r += ' when "{value_int:0{bitwidth}b}" => r:={key};\n'.format(key=enum.keyList[i], value_int=int( enum.valueList[ i]), bitwidth=int( enum.bitWidth)) r += ' when others => r:=' + enum.keyList[0] + '; -- error\n' r += " end case;\n" r += " return r;\n" r += " end function;\n\n" if prototype: r += "\n" if prototype: r += "\n" return r def returnRegRecordTypeString(self, reg): r = '' r += " type " + reg.name + "_record_type is record\n" for i in reversed(list(range(len(reg.fieldNameList)))): bits = "[" + str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + ":" + str(reg.bitOffsetList[i]) + "]" bit = "[" + str(reg.bitOffsetList[i]) + "]" if isinstance(reg.enumTypeList[i], enumTypeClass): if not reg.enumTypeList[i].allReadyExist: r += " " + reg.fieldNameList[i] + " : " + \ reg.enumTypeList[i].name + "_enum; -- " + bits + "\n" else: r += " " + reg.fieldNameList[i] + " : " + \ reg.enumTypeList[i].enumName + "_enum; -- " + bits + "\n" else: if reg.bitWidthList[i] == 1: # single bit r += " " + reg.fieldNameList[i] + " : std_ulogic; -- " + bit + "\n" else: # vector r += " " + reg.fieldNameList[i] + " : std_ulogic_vector(" + str(reg.bitWidthList[i] - 1) + \ " downto 0); -- " + bits + "\n" r += " end record;\n\n" return r def returnRegistersInRecordTypeString(self): r = "" r += " type " + self.name + "_in_record_type is record\n" for reg in self.registerList: if reg.access == "read-only": r += " {name} : {name}_record_type; -- addr {addr:#0{width}x}\n".format(name=reg.name, addr=reg.address, width=math.ceil( self.addrWidth / 4) + 2) # +2 for the '0x' r += " end record;\n\n" return r def returnRegistersOutRecordTypeString(self): r = "" r += " type " + self.name + "_out_record_type is record\n" for reg in self.registerList: if reg.access != "read-only": r += " {name} : {name}_record_type; -- addr {addr:#0{width}x}\n".format(name=reg.name, addr=reg.address, width=math.ceil( self.addrWidth / 4) + 2) # +2 for the '0x' r += " end record;\n\n" return r def returnRegistersReadFunction(self): r = " function read_" + self.name + "(registers_i : " + self.name + "_in_record_type;\n" indent = r.find('(') + 1 r += " " * indent + "registers_o : " + self.name + "_out_record_type;\n" r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0)\n" r += " " * indent + ") return std_ulogic_vector;\n\n" return r def returnRegistersWriteFunction(self): r = " function write_" + self.name + "(value : std_ulogic_vector(data_width-1 downto 0);\n" indent = r.find('(') + 1 r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0);\n" r += " " * indent + "registers_o : " + self.name + "_out_record_type\n" r += " " * indent + ") return " + self.name + "_out_record_type;\n\n" return r def returnRegistersResetFunction(self): r = " function reset_" + self.name + " return " + self.name + "_out_record_type;\n" r += " function reset_" + self.name + "(address: std_ulogic_vector(addr_width-1 downto 0);\n" indent = r.splitlines()[-1].find('(') + 1 r += " " * indent + "registers_o : " + self.name + "_out_record_type\n" r += " " * indent + ") return " + self.name + "_out_record_type;\n\n" return r def returnRecToSulvFunctionString(self, reg): r = "" r += " function " + reg.name + \ "_record_type_to_sulv(v : " + reg.name + "_record_type) return std_ulogic_vector is\n" r += " variable r : std_ulogic_vector(data_width-1 downto 0);\n" r += " begin\n" r += " r := (others => '0');\n" for i in reversed(list(range(len(reg.fieldNameList)))): bits = str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + " downto " + str(reg.bitOffsetList[i]) bit = str(reg.bitOffsetList[i]) if isinstance(reg.enumTypeList[i], enumTypeClass): if not reg.enumTypeList[i].allReadyExist: r += " r(" + bits + ") := " + \ reg.enumTypeList[i].name + "_enum_to_sulv(v." + reg.fieldNameList[i] + ");\n" else: r += " r(" + bits + ") := " + \ reg.enumTypeList[i].enumName + "_enum_to_sulv(v." + reg.fieldNameList[i] + ");\n" else: if reg.bitWidthList[i] == 1: # single bit r += " r(" + bit + ") := v." + reg.fieldNameList[i] + ";\n" else: # vector r += " r(" + bits + ") := v." + reg.fieldNameList[i] + ";\n" r += " return r;\n" r += " end function;\n\n" return r def returnSulvToRecFunctionString(self, reg): r = "" r += " function sulv_to_" + reg.name + \ "_record_type(v : std_ulogic_vector) return " + reg.name + "_record_type is\n" r += " variable r : " + reg.name + "_record_type;\n" r += " begin\n" for i in reversed(list(range(len(reg.fieldNameList)))): bits = str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + " downto " + str(reg.bitOffsetList[i]) bit = str(reg.bitOffsetList[i]) if isinstance(reg.enumTypeList[i], enumTypeClass): if not reg.enumTypeList[i].allReadyExist: r += " r." + reg.fieldNameList[i] + " := sulv_to_" + \ reg.enumTypeList[i].name + "_enum(v(" + bits + "));\n" else: r += " r." + reg.fieldNameList[i] + " := sulv_to_" + \ reg.enumTypeList[i].enumName + "_enum(v(" + bits + "));\n" else: if reg.bitWidthList[i] == 1: # single bit r += " r." + reg.fieldNameList[i] + " := v(" + bit + ");\n" else: r += " r." + reg.fieldNameList[i] + " := v(" + bits + ");\n" r += " return r;\n" r += " end function;\n\n" return r def returnReadFunctionString(self): r = "" t = " function read_" + self.name + "(registers_i : " + self.name + "_in_record_type;\n" indent = t.find('(') + 1 r += t r += " " * indent + "registers_o : " + self.name + "_out_record_type;\n" r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0)\n" r += " " * indent + ") return std_ulogic_vector is\n" r += " variable r : std_ulogic_vector(data_width-1 downto 0);\n" r += " begin\n" r += " case to_integer(unsigned(address)) is\n" for reg in self.registerList: if reg.access == "read-only": r += " when " + reg.name + "_addr => r:= " + reg.name + \ "_record_type_to_sulv(registers_i." + reg.name + ");\n" else: r += " when " + reg.name + "_addr => r:= " + reg.name + \ "_record_type_to_sulv(registers_o." + reg.name + ");\n" r += " when others => r := (others => '0');\n" r += " end case;\n" r += " return r;\n" r += " end function;\n\n" return r def returnWriteFunctionString(self): r = "" t = " function write_" + self.name + "(value : std_ulogic_vector(data_width-1 downto 0);\n" r += t indent = t.find('(') + 1 r += " " * indent + "address : std_ulogic_vector(addr_width-1 downto 0);\n" r += " " * indent + "registers_o : " + self.name + "_out_record_type\n" r += " " * indent + ") return " + self.name + "_out_record_type is\n" r += " variable r : " + self.name + "_out_record_type;\n" r += " begin\n" r += " r := registers_o;\n" r += " case to_integer(unsigned(address)) is\n" for reg in self.registerList: if reg.access != "read-only": r += " when " + reg.name + "_addr => r." + reg.name + \ " := sulv_to_" + reg.name + "_record_type(value);\n" r += " when others => null;\n" r += " end case;\n" r += " return r;\n" r += " end function;\n\n" return r def returnResetFunctionString(self): r = "" r += " function reset_" + self.name + " return " + self.name + "_out_record_type is\n" r += " variable r : " + self.name + "_out_record_type;\n" r += " begin\n" for reg in self.registerList: if reg.resetValue: if reg.access != "read-only": r += " r." + reg.name + " := sulv_to_" + \ reg.name + "_record_type(" + reg.name + "_reset_value);\n" r += " return r;\n" r += " end function;\n" r += "\n" r += " function reset_" + self.name + "(address: std_ulogic_vector(addr_width-1 downto 0);\n" indent = r.splitlines()[-1].find('(') + 1 r += " " * indent + "registers_o : " + self.name + "_out_record_type\n" r += " " * indent + ") return " + self.name + "_out_record_type is\n" r += " variable r : " + self.name + "_out_record_type;\n" r += " begin\n" r += " r := registers_o;\n" r += " case to_integer(unsigned(address)) is\n" for reg in self.registerList: if reg.resetValue: if reg.access != "read-only": r += " when " + reg.name + "_addr => r." + reg.name + \ " := sulv_to_" + reg.name + "_record_type(" + reg.name + "_reset_value);\n" r += " when others => null;\n" r += " end case;\n" r += " return r;\n" r += " end function;\n\n" return r def returnPkgBodyString(self): r = "" r += "package body " + self.name + "_vhd_pkg is\n\n" r += self.returnRegFieldEnumTypeStrings(False) for reg in self.registerList: r += self.returnRecToSulvFunctionString(reg) r += self.returnSulvToRecFunctionString(reg) r += self.returnReadFunctionString() r += self.returnWriteFunctionString() r += self.returnResetFunctionString() r += "end package body;\n" return r class systemVerilogAddressBlock(addressBlockClass): def __init__(self, name, addrWidth, dataWidth): self.name = name self.addrWidth = addrWidth self.dataWidth = dataWidth self.registerList = [] self.suffix = "_sv_pkg.sv" def returnIncludeString(self): r = "\n" r += "`define " + self.name + "_addr_width " + str(self.addrWidth) + "\n" r += "`define " + self.name + "_data_width " + str(self.dataWidth) + "\n" return r def returnSizeString(self): r = "\n" r += "const int addr_width = " + str(self.addrWidth) + ";\n" r += "const int data_width = " + str(self.dataWidth) + ";\n" return r def returnAddressesString(self): r = "\n" for reg in self.registerList: r += "const int " + reg.name + "_addr = " + str(reg.address) + ";\n" r += "\n" return r def returnAddressListString(self): r = "\n" r = "//synopsys translate_off\n" r += "const int " + self.name + "_regAddresses [" + str(len(self.registerList)) + "] = '{" l = [] for reg in self.registerList: l.append("\n " + reg.name + "_addr") r += ",".join(l) r += "};\n" r += "\n" r += "const string " + self.name + "_regNames [" + str(len(self.registerList)) + "] = '{" l = [] for reg in self.registerList: l.append('\n "' + reg.name + '"') r += ",".join(l) r += "};\n" r += "const reg " + self.name + "_regUnResetedAddresses [" + str(len(self.registerList)) + "] = '{" l = [] for reg in self.registerList: if reg.resetValue: l.append("\n 1'b0") else: l.append("\n 1'b1") r += ",".join(l) r += "};\n" r += "\n" r += "//synopsys translate_on\n\n" return r def enumeratedType(self, prepend, fieldName, valueNames, values): r = "\n" members = [] # dont want to create to simple names in the global names space. # should preppend with name from ipxact file for index in range(len(valueNames)): name = valueNames[index] value = values[index] members.append(name + "=" + value) r += "typedef enum { " + ",".join(members) + "} enum_" + fieldName + ";\n" return r def returnResetValuesString(self): r = "" for reg in self.registerList: if reg.resetValue: r += "const " + reg.name + "_struct_type " + reg.name + \ "_reset_value = " + str(int(reg.resetValue, 0)) + ";\n" r += "\n" return r def returnStructString(self): r = "\n" for reg in self.registerList: r += "\ntypedef struct packed {\n" for i in reversed(list(range(len(reg.fieldNameList)))): bits = "bits [" + str(reg.bitOffsetList[i] + reg.bitWidthList[i] - 1) + \ ":" + str(reg.bitOffsetList[i]) + "]" r += " bit [" + str(reg.bitWidthList[i] - 1) + ":0] " + \ str(reg.fieldNameList[i]) + ";//" + bits + "\n" r += "} " + reg.name + "_struct_type;\n\n" return r def returnRegistersStructString(self): r = "typedef struct packed {\n" for reg in self.registerList: r += " " + reg.name + "_struct_type " + reg.name + ";\n" r += "} " + self.name + "_struct_type;\n\n" return r def returnReadFunctionString(self): r = "function bit [31:0] read_" + self.name + "(" + self.name + "_struct_type registers,int address);\n" r += " bit [31:0] r;\n" r += " case(address)\n" for reg in self.registerList: r += " " + reg.name + "_addr: r[$bits(registers." + reg.name + ")-1:0] = registers." + reg.name + ";\n" r += " default: r =0;\n" r += " endcase\n" r += " return r;\n" r += "endfunction\n\n" return r def returnWriteFunctionString(self): t = "function " + self.name + "_struct_type write_" + self.name + "(bit [31:0] data, int address,\n" r = t indent = r.find('(') + 1 r += " " * indent + self.name + "_struct_type registers);\n" r += " " + self.name + "_struct_type r;\n" r += " r = registers;\n" r += " case(address)\n" for reg in self.registerList: r += " " + reg.name + "_addr: r." + reg.name + " = data[$bits(registers." + reg.name + ")-1:0];\n" r += " endcase // case address\n" r += " return r;\n" r += "endfunction\n\n" return r def returnResetFunctionString(self): r = "function " + self.name + "_struct_type reset_" + self.name + "();\n" r += " " + self.name + "_struct_type r;\n" for reg in self.registerList: if reg.resetValue: r += " r." + reg.name + "=" + reg.name + "_reset_value;\n" r += " return r;\n" r += "endfunction\n" r += "\n" return r def returnAsString(self): r = '' r += "// Automatically generated\n" r += "// with the command '%s'\n" % (' '.join(sys.argv)) r += "//\n" r += "// Do not manually edit!\n" r += "//\n" r += "package " + self.name + "_sv_pkg;\n\n" r += self.returnSizeString() r += self.returnAddressesString() r += self.returnAddressListString() r += self.returnStructString() r += self.returnResetValuesString() r += self.returnRegistersStructString() r += self.returnReadFunctionString() r += self.returnWriteFunctionString() r += self.returnResetFunctionString() r += "endpackage //" + self.name + "_sv_pkg\n" return r class ipxactParser(): def __init__(self, srcFile, config): self.srcFile = srcFile self.config = config self.enumTypeClassRegistry = enumTypeClassRegistry() def returnDocument(self): spirit_ns = 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.5' tree = ETree.parse(self.srcFile) ETree.register_namespace('spirit', spirit_ns) namespace = tree.getroot().tag[1:].split("}")[0] spiritString = '{' + spirit_ns + '}' docName = tree.find(spiritString + "name").text d = documentClass(docName) memoryMaps = tree.find(spiritString + "memoryMaps") memoryMapList = memoryMaps.findall(spiritString + "memoryMap") if memoryMaps is not None else [] for memoryMap in memoryMapList: memoryMapName = memoryMap.find(spiritString + "name").text addressBlockList = memoryMap.findall(spiritString + "addressBlock") m = memoryMapClass(memoryMapName) for addressBlock in addressBlockList: addressBlockName = addressBlock.find(spiritString + "name").text registerList = addressBlock.findall(spiritString + "register") baseAddress = int(addressBlock.find(spiritString + "baseAddress").text, 0) nbrOfAddresses = int(addressBlock.find(spiritString + "range").text, 0) # TODO, this is wrong addrWidth = int(math.ceil((math.log(baseAddress + nbrOfAddresses, 2)))) dataWidth = int(addressBlock.find(spiritString + "width").text, 0) a = addressBlockClass(addressBlockName, addrWidth, dataWidth) for registerElem in registerList: regName = registerElem.find(spiritString + "name").text reset = registerElem.find(spiritString + "reset") if reset is not None: resetValue = reset.find(spiritString + "value").text else: resetValue = None size = int(registerElem.find(spiritString + "size").text, 0) access = registerElem.find(spiritString + "access").text if registerElem.find(spiritString + "description") != None: desc = registerElem.find(spiritString + "description").text else: desc = "" regAddress = baseAddress + int(registerElem.find(spiritString + "addressOffset").text, 0) r = self.returnRegister(spiritString, registerElem, regAddress, resetValue, size, access, desc, dataWidth) a.addRegister(r) m.addAddressBlock(a) d.addMemoryMap(m) return d def returnRegister(self, spiritString, registerElem, regAddress, resetValue, size, access, regDesc, dataWidth): regName = registerElem.find(spiritString + "name").text fieldList = registerElem.findall(spiritString + "field") fieldNameList = [item.find(spiritString + "name").text for item in fieldList] bitOffsetList = [item.find(spiritString + "bitOffset").text for item in fieldList] bitWidthList = [item.find(spiritString + "bitWidth").text for item in fieldList] fieldDescList = [item.find(spiritString + "description").text for item in fieldList] enumTypeList = [] for index in range(len(fieldList)): fieldElem = fieldList[index] bitWidth = bitWidthList[index] fieldName = fieldNameList[index] enumeratedValuesElem = fieldElem.find(spiritString + "enumeratedValues") if enumeratedValuesElem is not None: enumeratedValueList = enumeratedValuesElem.findall(spiritString + "enumeratedValue") valuesNameList = [item.find(spiritString + "name").text for item in enumeratedValueList] descrList = [item.find(spiritString + "description").text if item.find( spiritString + "description") is not None else "" for item in enumeratedValueList] valuesList = [item.find(spiritString + "value").text for item in enumeratedValueList] if len(valuesNameList) > 0: if int(bitWidth) > 1: # if the field of a enum is longer than 1 bit, always use enums enum = enumTypeClass(fieldName, bitWidth, valuesNameList, valuesList, descrList) enum = self.enumTypeClassRegistry.enumAllReadyExist(enum) enumTypeList.append(enum) else: # bit field of 1 bit if self.config['global'].getboolean('onebitenum'): # do create one bit enums enum = enumTypeClass(fieldName, bitWidth, valuesNameList, valuesList, descrList) enum = self.enumTypeClassRegistry.enumAllReadyExist(enum) enumTypeList.append(enum) else: # dont create enums of booleans because this only decreases readability enumTypeList.append(None) else: enumTypeList.append(None) else: enumTypeList.append(None) if len(fieldNameList) == 0: fieldNameList.append(regName) bitOffsetList.append(0) bitWidthList.append(dataWidth) fieldDescList.append('') enumTypeList.append(None) (regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList) = sortRegisterAndFillHoles( regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList, self.config['global'].getboolean('unusedholes')) reg = registerClass(regName, regAddress, resetValue, size, access, regDesc, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList) return reg class ipxact2otherGenerator(): def __init__(self, destDir, namingScheme="addressBlockName"): self.destDir = destDir self.namingScheme = namingScheme def write(self, fileName, string): _dest = os.path.join(self.destDir, fileName) print("writing file " + _dest) if not os.path.exists(os.path.dirname(_dest)): os.makedirs(os.path.dirname(_dest)) with open(_dest, "w") as f: f.write(string) def generate(self, generatorClass, document): self.document = document docName = document.name for memoryMap in document.memoryMapList: mapName = memoryMap.name for addressBlock in memoryMap.addressBlockList: blockName = addressBlock.name block = generatorClass(addressBlock.name, addressBlock.addrWidth, addressBlock.dataWidth) block.setRegisterList(addressBlock.registerList) s = block.returnAsString() if self.namingScheme == "addressBlockName": fileName = blockName + block.suffix else: fileName = docName + '_' + mapName + '_' + blockName + block.suffix self.write(fileName, s) if generatorClass == systemVerilogAddressBlock: includeFileName = fileName + "h" includeString = block.returnIncludeString() self.write(includeFileName, includeString)<|fim▁end|>
fieldNameList, bitOffsetList, bitWidthList, fieldDescList,
<|file_name|>RectInternalRepresentation.js<|end_file_name|><|fim▁begin|>(function(window) { var RectInternalRepresentation = function(primitive) {<|fim▁hole|> this._height = 10; me.GetWidth = function() { return this._width; }; me.SetWidth = function(O) { gizmo.Filter(O, "Number"); this.Update({ width: O, height: this.GetHeight() }); }; me.GetHeight = function() { return this._height; }; me.SetHeight = function(O) { gizmo.Filter(O, "Number"); this.UpdatePoints({x: this.GetX(), y: this.GetY(), width: this.GetWidth(), height: O}); }; me.GetPointsOfMatrix = function() { return this._points; }; me.SetPointsOfMatrix = function(O) { gizmo.Filter(O, "Array"); return this._points; }; me.Set = function(O) { for(var name in O) { switch( name ) { case "width" : { this._width = O[name]; if(this._onChanged) { this._onChanged.call(primitive); }; }; break; case "height" : { this._height = O[name]; if(this._onChanged) { this._onChanged.call(primitive); }; }; break; }; }; this._points = new $M( [ [0 ,0 ,1], [this._width,0 ,1], [this._width,this._height,1], [0 ,this._height,1] ] ); }; return me; } ArmContext.RectInternalRepresentation = RectInternalRepresentation; })();<|fim▁end|>
var me = new ArmContext.InternalRepresentation(primitive); this._width = 10;
<|file_name|>request.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from future.standard_library import suspend_hooks from future.utils import PY3 if PY3: from urllib.request import * # This aren't in __all__: from urllib.request import (getproxies, pathname2url, proxy_bypass, quote, request_host, thishost, unquote, url2pathname, urlcleanup, urljoin, urlopen, urlparse, urlretrieve, urlsplit, urlunparse) from urllib.parse import (splitattr, splithost, splitpasswd, splitport, splitquery, splittag, splittype, splituser, splitvalue, to_bytes,<|fim▁hole|> __future_module__ = True with suspend_hooks(): from urllib import * from urllib2 import * from urlparse import * # Rename: from urllib import toBytes # missing from __all__ on Py2.6 to_bytes = toBytes # from urllib import (pathname2url, # url2pathname, # getproxies, # urlretrieve, # urlcleanup, # URLopener, # FancyURLopener, # proxy_bypass) # from urllib2 import ( # AbstractBasicAuthHandler, # AbstractDigestAuthHandler, # BaseHandler, # CacheFTPHandler, # FileHandler, # FTPHandler, # HTTPBasicAuthHandler, # HTTPCookieProcessor, # HTTPDefaultErrorHandler, # HTTPDigestAuthHandler, # HTTPErrorProcessor, # HTTPHandler, # HTTPPasswordMgr, # HTTPPasswordMgrWithDefaultRealm, # HTTPRedirectHandler, # HTTPSHandler, # URLError, # build_opener, # install_opener, # OpenerDirector, # ProxyBasicAuthHandler, # ProxyDigestAuthHandler, # ProxyHandler, # Request, # UnknownHandler, # urlopen, # ) # from urlparse import ( # urldefrag # urljoin, # urlparse, # urlunparse, # urlsplit, # urlunsplit, # parse_qs, # parse_q" # )<|fim▁end|>
unwrap) else:
<|file_name|>pos.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import json import frappe from erpnext.accounts.party import get_party_account_currency from erpnext.controllers.accounts_controller import get_taxes_and_charges from erpnext.setup.utils import get_exchange_rate from erpnext.stock.get_item_details import get_pos_profile from frappe import _ from frappe.core.doctype.communication.email import make from frappe.utils import nowdate, cint from six import string_types, iteritems @frappe.whitelist() def get_pos_data(): doc = frappe.new_doc('Sales Invoice') doc.is_pos = 1 pos_profile = get_pos_profile(doc.company) or {} if not pos_profile: frappe.throw(_("POS Profile is required to use Point-of-Sale")) if not doc.company: doc.company = pos_profile.get('company') doc.update_stock = pos_profile.get('update_stock') if pos_profile.get('name'): pos_profile = frappe.get_doc('POS Profile', pos_profile.get('name')) pos_profile.validate() company_data = get_company_data(doc.company) update_pos_profile_data(doc, pos_profile, company_data) update_multi_mode_option(doc, pos_profile) default_print_format = pos_profile.get('print_format') or "Point of Sale" print_template = frappe.db.get_value('Print Format', default_print_format, 'html') items_list = get_items_list(pos_profile, doc.company) customers = get_customers_list(pos_profile) doc.plc_conversion_rate = update_plc_conversion_rate(doc, pos_profile) return { 'doc': doc, 'default_customer': pos_profile.get('customer'), 'items': items_list, 'item_groups': get_item_groups(pos_profile), 'customers': customers, 'address': get_customers_address(customers), 'contacts': get_contacts(customers), 'serial_no_data': get_serial_no_data(pos_profile, doc.company), 'batch_no_data': get_batch_no_data(), 'barcode_data': get_barcode_data(items_list), 'tax_data': get_item_tax_data(), 'price_list_data': get_price_list_data(doc.selling_price_list, doc.plc_conversion_rate), 'customer_wise_price_list': get_customer_wise_price_list(), 'bin_data': get_bin_data(pos_profile), 'pricing_rules': get_pricing_rule_data(doc), 'print_template': print_template, 'pos_profile': pos_profile, 'meta': get_meta() } def update_plc_conversion_rate(doc, pos_profile): conversion_rate = 1.0 price_list_currency = frappe.get_cached_value("Price List", doc.selling_price_list, "currency") if pos_profile.get("currency") != price_list_currency: conversion_rate = get_exchange_rate(price_list_currency, pos_profile.get("currency"), nowdate(), args="for_selling") or 1.0 return conversion_rate def get_meta(): doctype_meta = { 'customer': frappe.get_meta('Customer'), 'invoice': frappe.get_meta('Sales Invoice') } for row in frappe.get_all('DocField', fields=['fieldname', 'options'], filters={'parent': 'Sales Invoice', 'fieldtype': 'Table'}): doctype_meta[row.fieldname] = frappe.get_meta(row.options) return doctype_meta def get_company_data(company): return frappe.get_all('Company', fields=["*"], filters={'name': company})[0] def update_pos_profile_data(doc, pos_profile, company_data): doc.campaign = pos_profile.get('campaign') if pos_profile and not pos_profile.get('country'): pos_profile.country = company_data.country doc.write_off_account = pos_profile.get('write_off_account') or \ company_data.write_off_account doc.change_amount_account = pos_profile.get('change_amount_account') or \ company_data.default_cash_account doc.taxes_and_charges = pos_profile.get('taxes_and_charges') if doc.taxes_and_charges: update_tax_table(doc) doc.currency = pos_profile.get('currency') or company_data.default_currency doc.conversion_rate = 1.0 if doc.currency != company_data.default_currency: doc.conversion_rate = get_exchange_rate(doc.currency, company_data.default_currency, doc.posting_date, args="for_selling") doc.selling_price_list = pos_profile.get('selling_price_list') or \ frappe.db.get_value('Selling Settings', None, 'selling_price_list') doc.naming_series = pos_profile.get('naming_series') or 'SINV-' doc.letter_head = pos_profile.get('letter_head') or company_data.default_letter_head doc.ignore_pricing_rule = pos_profile.get('ignore_pricing_rule') or 0 doc.apply_discount_on = pos_profile.get('apply_discount_on') or 'Grand Total' doc.customer_group = pos_profile.get('customer_group') or get_root('Customer Group') doc.territory = pos_profile.get('territory') or get_root('Territory') doc.terms = frappe.db.get_value('Terms and Conditions', pos_profile.get('tc_name'), 'terms') or doc.terms or '' doc.offline_pos_name = '' def get_root(table): root = frappe.db.sql(""" select name from `tab%(table)s` having min(lft)""" % {'table': table}, as_dict=1) return root[0].name def update_multi_mode_option(doc, pos_profile): from frappe.model import default_fields if not pos_profile or not pos_profile.get('payments'): for payment in get_mode_of_payment(doc): payments = doc.append('payments', {}) payments.mode_of_payment = payment.parent payments.account = payment.default_account payments.type = payment.type return for payment_mode in pos_profile.payments: payment_mode = payment_mode.as_dict() for fieldname in default_fields: if fieldname in payment_mode: del payment_mode[fieldname] doc.append('payments', payment_mode) def get_mode_of_payment(doc): return frappe.db.sql(""" select mpa.default_account, mpa.parent, mp.type as type from `tabMode of Payment Account` mpa,`tabMode of Payment` mp where mpa.parent = mp.name and mpa.company = %(company)s and mp.enabled = 1""", {'company': doc.company}, as_dict=1) def update_tax_table(doc): taxes = get_taxes_and_charges('Sales Taxes and Charges Template', doc.taxes_and_charges) for tax in taxes: doc.append('taxes', tax) def get_items_list(pos_profile, company): cond = "" args_list = [] if pos_profile.get('item_groups'): # Get items based on the item groups defined in the POS profile for d in pos_profile.get('item_groups'): args_list.extend([d.name for d in get_child_nodes('Item Group', d.item_group)]) if args_list: cond = "and i.item_group in (%s)" % (', '.join(['%s'] * len(args_list))) return frappe.db.sql(""" select i.name, i.item_code, i.item_name, i.description, i.item_group, i.has_batch_no, i.has_serial_no, i.is_stock_item, i.brand, i.stock_uom, i.image, id.expense_account, id.selling_cost_center, id.default_warehouse, i.sales_uom, c.conversion_factor, it.item_tax_template, it.valid_from from `tabItem` i left join `tabItem Default` id on id.parent = i.name and id.company = %s left join `tabItem Tax` it on it.parent = i.name left join `tabUOM Conversion Detail` c on i.name = c.parent and i.sales_uom = c.uom where i.disabled = 0 and i.has_variants = 0 and i.is_sales_item = 1 {cond} group by i.item_code """.format(cond=cond), tuple([company] + args_list), as_dict=1) def get_item_groups(pos_profile): item_group_dict = {} item_groups = frappe.db.sql("""Select name, lft, rgt from `tabItem Group` order by lft""", as_dict=1) for data in item_groups: item_group_dict[data.name] = [data.lft, data.rgt] return item_group_dict def get_customers_list(pos_profile={}): cond = "1=1" customer_groups = [] if pos_profile.get('customer_groups'): # Get customers based on the customer groups defined in the POS profile for d in pos_profile.get('customer_groups'): customer_groups.extend([d.get('name') for d in get_child_nodes('Customer Group', d.get('customer_group'))]) cond = "customer_group in (%s)" % (', '.join(['%s'] * len(customer_groups))) return frappe.db.sql(""" select name, customer_name, customer_group, territory, customer_pos_id from tabCustomer where disabled = 0 and {cond}""".format(cond=cond), tuple(customer_groups), as_dict=1) or {} def get_customers_address(customers): customer_address = {} if isinstance(customers, string_types): customers = [frappe._dict({'name': customers})] for data in customers: address = frappe.db.sql(""" select name, address_line1, address_line2, city, state, email_id, phone, fax, pincode from `tabAddress` where is_primary_address =1 and name in (select parent from `tabDynamic Link` where link_doctype = 'Customer' and link_name = %s and parenttype = 'Address')""", data.name, as_dict=1) address_data = {} if address: address_data = address[0] address_data.update({'full_name': data.customer_name, 'customer_pos_id': data.customer_pos_id}) customer_address[data.name] = address_data return customer_address def get_contacts(customers): customer_contact = {} if isinstance(customers, string_types): customers = [frappe._dict({'name': customers})] for data in customers: contact = frappe.db.sql(""" select email_id, phone, mobile_no from `tabContact` where is_primary_contact=1 and name in (select parent from `tabDynamic Link` where link_doctype = 'Customer' and link_name = %s and parenttype = 'Contact')""", data.name, as_dict=1) if contact: customer_contact[data.name] = contact[0] return customer_contact def get_child_nodes(group_type, root): lft, rgt = frappe.db.get_value(group_type, root, ["lft", "rgt"]) return frappe.db.sql(""" Select name, lft, rgt from `tab{tab}` where lft >= {lft} and rgt <= {rgt} order by lft""".format(tab=group_type, lft=lft, rgt=rgt), as_dict=1) def get_serial_no_data(pos_profile, company): # get itemwise serial no data # example {'Nokia Lumia 1020': {'SN0001': 'Pune'}} # where Nokia Lumia 1020 is item code, SN0001 is serial no and Pune is warehouse cond = "1=1" if pos_profile.get('update_stock') and pos_profile.get('warehouse'): cond = "warehouse = %(warehouse)s" serial_nos = frappe.db.sql("""select name, warehouse, item_code from `tabSerial No` where {0} and company = %(company)s """.format(cond),{ 'company': company, 'warehouse': frappe.db.escape(pos_profile.get('warehouse')) }, as_dict=1) itemwise_serial_no = {} for sn in serial_nos: if sn.item_code not in itemwise_serial_no: itemwise_serial_no.setdefault(sn.item_code, {}) itemwise_serial_no[sn.item_code][sn.name] = sn.warehouse return itemwise_serial_no def get_batch_no_data(): # get itemwise batch no data # exmaple: {'LED-GRE': [Batch001, Batch002]} # where LED-GRE is item code, SN0001 is serial no and Pune is warehouse itemwise_batch = {} batches = frappe.db.sql("""select name, item from `tabBatch` where ifnull(expiry_date, '4000-10-10') >= curdate()""", as_dict=1) for batch in batches: if batch.item not in itemwise_batch: itemwise_batch.setdefault(batch.item, []) itemwise_batch[batch.item].append(batch.name) return itemwise_batch def get_barcode_data(items_list): # get itemwise batch no data # exmaple: {'LED-GRE': [Batch001, Batch002]} # where LED-GRE is item code, SN0001 is serial no and Pune is warehouse itemwise_barcode = {} for item in items_list: barcodes = frappe.db.sql(""" select barcode from `tabItem Barcode` where parent = %s """, item.item_code, as_dict=1) for barcode in barcodes: if item.item_code not in itemwise_barcode: itemwise_barcode.setdefault(item.item_code, []) itemwise_barcode[item.item_code].append(barcode.get("barcode")) return itemwise_barcode def get_item_tax_data(): # get default tax of an item # example: {'Consulting Services': {'Excise 12 - TS': '12.000'}} itemwise_tax = {} taxes = frappe.db.sql(""" select parent, tax_type, tax_rate from `tabItem Tax Template Detail`""", as_dict=1) for tax in taxes: if tax.parent not in itemwise_tax: itemwise_tax.setdefault(tax.parent, {}) itemwise_tax[tax.parent][tax.tax_type] = tax.tax_rate return itemwise_tax def get_price_list_data(selling_price_list, conversion_rate): itemwise_price_list = {} price_lists = frappe.db.sql("""Select ifnull(price_list_rate, 0) as price_list_rate, item_code from `tabItem Price` ip where price_list = %(price_list)s""", {'price_list': selling_price_list}, as_dict=1) for item in price_lists: itemwise_price_list[item.item_code] = item.price_list_rate * conversion_rate return itemwise_price_list def get_customer_wise_price_list(): customer_wise_price = {} customer_price_list_mapping = frappe._dict(frappe.get_all('Customer',fields = ['default_price_list', 'name'], as_list=1)) price_lists = frappe.db.sql(""" Select ifnull(price_list_rate, 0) as price_list_rate, item_code, price_list from `tabItem Price` """, as_dict=1) for item in price_lists: if item.price_list and customer_price_list_mapping.get(item.price_list): customer_wise_price.setdefault(customer_price_list_mapping.get(item.price_list),{}).setdefault( item.item_code, item.price_list_rate ) return customer_wise_price def get_bin_data(pos_profile): itemwise_bin_data = {} filters = { 'actual_qty': ['>', 0] } if pos_profile.get('warehouse'): filters.update({ 'warehouse': pos_profile.get('warehouse') }) bin_data = frappe.db.get_all('Bin', fields = ['item_code', 'warehouse', 'actual_qty'], filters=filters) for bins in bin_data: if bins.item_code not in itemwise_bin_data: itemwise_bin_data.setdefault(bins.item_code, {}) itemwise_bin_data[bins.item_code][bins.warehouse] = bins.actual_qty return itemwise_bin_data def get_pricing_rule_data(doc): pricing_rules = "" if doc.ignore_pricing_rule == 0: pricing_rules = frappe.db.sql(""" Select * from `tabPricing Rule` where docstatus < 2 and ifnull(for_price_list, '') in (%(price_list)s, '') and selling = 1 and ifnull(company, '') in (%(company)s, '') and disable = 0 and %(date)s between ifnull(valid_from, '2000-01-01') and ifnull(valid_upto, '2500-12-31') order by priority desc, name desc""", {'company': doc.company, 'price_list': doc.selling_price_list, 'date': nowdate()}, as_dict=1) return pricing_rules @frappe.whitelist() def make_invoice(pos_profile, doc_list={}, email_queue_list={}, customers_list={}): import json if isinstance(doc_list, string_types): doc_list = json.loads(doc_list) if isinstance(email_queue_list, string_types): email_queue_list = json.loads(email_queue_list) if isinstance(customers_list, string_types): customers_list = json.loads(customers_list) customers_list = make_customer_and_address(customers_list) name_list = [] for docs in doc_list: for name, doc in iteritems(docs): if not frappe.db.exists('Sales Invoice', {'offline_pos_name': name}): if isinstance(doc, dict): validate_records(doc) si_doc = frappe.new_doc('Sales Invoice') si_doc.offline_pos_name = name si_doc.update(doc) si_doc.set_posting_time = 1 si_doc.customer = get_customer_id(doc) si_doc.due_date = doc.get('posting_date') name_list = submit_invoice(si_doc, name, doc, name_list) else: doc.due_date = doc.get('posting_date') doc.customer = get_customer_id(doc) doc.set_posting_time = 1 doc.offline_pos_name = name name_list = submit_invoice(doc, name, doc, name_list) else: name_list.append(name) email_queue = make_email_queue(email_queue_list) if isinstance(pos_profile, string_types): pos_profile = json.loads(pos_profile) customers = get_customers_list(pos_profile) return { 'invoice': name_list, 'email_queue': email_queue, 'customers': customers_list, 'synced_customers_list': customers, 'synced_address': get_customers_address(customers), 'synced_contacts': get_contacts(customers) } def validate_records(doc): validate_item(doc) def get_customer_id(doc, customer=None): cust_id = None if doc.get('customer_pos_id'): cust_id = frappe.db.get_value('Customer',{'customer_pos_id': doc.get('customer_pos_id')}, 'name') if not cust_id: customer = customer or doc.get('customer') if frappe.db.exists('Customer', customer): cust_id = customer else: cust_id = add_customer(doc) return cust_id def make_customer_and_address(customers): customers_list = [] for customer, data in iteritems(customers): data = json.loads(data) cust_id = get_customer_id(data, customer) if not cust_id: cust_id = add_customer(data) else: frappe.db.set_value("Customer", cust_id, "customer_name", data.get('full_name')) make_contact(data, cust_id) make_address(data, cust_id) customers_list.append(customer) frappe.db.commit() return customers_list def add_customer(data): customer = data.get('full_name') or data.get('customer') if frappe.db.exists("Customer", customer.strip()): return customer.strip() customer_doc = frappe.new_doc('Customer') customer_doc.customer_name = data.get('full_name') or data.get('customer') customer_doc.customer_pos_id = data.get('customer_pos_id') customer_doc.customer_type = 'Company' customer_doc.customer_group = get_customer_group(data) customer_doc.territory = get_territory(data) customer_doc.flags.ignore_mandatory = True customer_doc.save(ignore_permissions=True) frappe.db.commit() return customer_doc.name def get_territory(data): if data.get('territory'): return data.get('territory') return frappe.db.get_single_value('Selling Settings','territory') or _('All Territories') def get_customer_group(data): if data.get('customer_group'): return data.get('customer_group') return frappe.db.get_single_value('Selling Settings', 'customer_group') or frappe.db.get_value('Customer Group', {'is_group': 0}, 'name') def make_contact(args, customer): if args.get('email_id') or args.get('phone'): name = frappe.db.get_value('Dynamic Link', {'link_doctype': 'Customer', 'link_name': customer, 'parenttype': 'Contact'}, 'parent') args = { 'first_name': args.get('full_name'), 'email_id': args.get('email_id'), 'phone': args.get('phone') } doc = frappe.new_doc('Contact') if name: doc = frappe.get_doc('Contact', name) doc.update(args) doc.is_primary_contact = 1 if not name: doc.append('links', { 'link_doctype': 'Customer', 'link_name': customer }) doc.flags.ignore_mandatory = True doc.save(ignore_permissions=True) def make_address(args, customer): if not args.get('address_line1'): return name = args.get('name') if not name: data = get_customers_address(customer) name = data[customer].get('name') if data else None if name: address = frappe.get_doc('Address', name) else: address = frappe.new_doc('Address') if args.get('company'): address.country = frappe.get_cached_value('Company', args.get('company'), 'country') address.append('links', { 'link_doctype': 'Customer', 'link_name': customer }) address.is_primary_address = 1 address.is_shipping_address = 1 address.update(args) address.flags.ignore_mandatory = True address.save(ignore_permissions=True) def make_email_queue(email_queue): name_list = [] for key, data in iteritems(email_queue): name = frappe.db.get_value('Sales Invoice', {'offline_pos_name': key}, 'name') if not name: continue data = json.loads(data) sender = frappe.session.user print_format = "POS Invoice" if not cint(frappe.db.get_value('Print Format', 'POS Invoice', 'disabled')) else None attachments = [frappe.attach_print('Sales Invoice', name, print_format=print_format)] make(subject=data.get('subject'), content=data.get('content'), recipients=data.get('recipients'), sender=sender, attachments=attachments, send_email=True, doctype='Sales Invoice', name=name) name_list.append(key) return name_list def validate_item(doc): for item in doc.get('items'): if not frappe.db.exists('Item', item.get('item_code')): item_doc = frappe.new_doc('Item') item_doc.name = item.get('item_code') item_doc.item_code = item.get('item_code') item_doc.item_name = item.get('item_name') item_doc.description = item.get('description') item_doc.stock_uom = item.get('stock_uom') item_doc.uom = item.get('uom') item_doc.item_group = item.get('item_group')<|fim▁hole|> }) item_doc.save(ignore_permissions=True) frappe.db.commit() def submit_invoice(si_doc, name, doc, name_list): try: si_doc.insert() si_doc.submit() frappe.db.commit() name_list.append(name) except Exception as e: if frappe.message_log: frappe.message_log.pop() frappe.db.rollback() frappe.log_error(frappe.get_traceback()) name_list = save_invoice(doc, name, name_list) return name_list def save_invoice(doc, name, name_list): try: if not frappe.db.exists('Sales Invoice', {'offline_pos_name': name}): si = frappe.new_doc('Sales Invoice') si.update(doc) si.set_posting_time = 1 si.customer = get_customer_id(doc) si.due_date = doc.get('posting_date') si.flags.ignore_mandatory = True si.insert(ignore_permissions=True) frappe.db.commit() name_list.append(name) except Exception: frappe.db.rollback() frappe.log_error(frappe.get_traceback()) return name_list<|fim▁end|>
item_doc.append('item_defaults', { "company": doc.get("company"), "default_warehouse": item.get('warehouse')
<|file_name|>AttitudeChartPanel.java<|end_file_name|><|fim▁begin|>package drone_slam.apps.controlcenter.plugins.attitudechart; import drone_slam.apps.controlcenter.ICCPlugin; import drone_slam.base.IARDrone; import drone_slam.base.navdata.AttitudeListener; import org.jfree.chart.ChartPanel; import javax.swing.*; import java.awt.*; public class AttitudeChartPanel extends JPanel implements ICCPlugin { private IARDrone drone; private AttitudeChart chart; public AttitudeChartPanel() { super(new GridBagLayout()); <|fim▁hole|> JPanel chartPanel = new ChartPanel(chart.getChart(), true, true, true, true, true); add(chartPanel, new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.FIRST_LINE_START, GridBagConstraints.BOTH, new Insets(0, 0, 5, 0), 0, 0)); } private AttitudeListener attitudeListener = new AttitudeListener() { public void windCompensation(float pitch, float roll) { } public void attitudeUpdated(float pitch, float roll) { } public void attitudeUpdated(float pitch, float roll, float yaw) { chart.setAttitude(pitch / 1000, roll / 1000, yaw / 1000); } }; public void activate(IARDrone drone) { this.drone = drone; drone.getNavDataManager().addAttitudeListener(attitudeListener); } public void deactivate() { drone.getNavDataManager().removeAttitudeListener(attitudeListener); } public String getTitle() { return "Attitude Chart"; } public String getDescription() { return "Displays a chart with the latest pitch, roll and yaw"; } public boolean isVisual() { return true; } public Dimension getScreenSize() { return new Dimension(330, 250); } public Point getScreenLocation() { return new Point(330, 390); } public JPanel getPanel() { return this; } }<|fim▁end|>
this.chart = new AttitudeChart();
<|file_name|>combinators.js<|end_file_name|><|fim▁begin|>'use strict'; function curry (fn) { var args = Array.prototype.slice.call(arguments, 1); return function () { var nextArgs = Array.prototype.slice.call(arguments), allArgs = args.concat(nextArgs); return fn.apply(this, allArgs); }; } function compose () { var args = Array.prototype.slice.call(arguments).reverse(); return function (obj) { return args.reduce(function (result, F) { return F(result); }, obj); }; } module.exports = {<|fim▁hole|> constant: function (k) { return function () { return k; }; }, noop: function () {}, compose: compose, curry: curry };<|fim▁end|>
identity: function (i) { return i; },
<|file_name|>angular-tags.module.js<|end_file_name|><|fim▁begin|>(function (angular) { 'use strict'; angular .module('ngTagsInput', []);<|fim▁hole|><|fim▁end|>
})(window.angular);
<|file_name|>mhtml_generation_manager.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/download/mhtml_generation_manager.h" #include <map> #include <queue> #include <utility> #include "base/bind.h" #include "base/files/file.h" #include "base/guid.h" #include "base/macros.h" #include "base/scoped_observer.h" #include "base/stl_util.h" #include "base/strings/stringprintf.h" #include "content/browser/bad_message.h" #include "content/browser/frame_host/frame_tree_node.h" #include "content/browser/frame_host/render_frame_host_impl.h" #include "content/common/frame_messages.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/render_frame_host.h" #include "content/public/browser/render_process_host.h" #include "content/public/browser/render_process_host_observer.h" #include "content/public/browser/web_contents.h" #include "content/public/common/mhtml_generation_params.h" #include "net/base/mime_util.h" namespace content { // The class and all of its members live on the UI thread. Only static methods // are executed on other threads. class MHTMLGenerationManager::Job : public RenderProcessHostObserver { public: Job(int job_id, WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback); ~Job() override; int id() const { return job_id_; } void set_browser_file(base::File file) { browser_file_ = std::move(file); } const GenerateMHTMLCallback& callback() const { return callback_; } // Indicates whether we expect a message from the |sender| at this time. // We expect only one message per frame - therefore calling this method // will always clear |frame_tree_node_id_of_busy_frame_|. bool IsMessageFromFrameExpected(RenderFrameHostImpl* sender); // Handler for FrameHostMsg_SerializeAsMHTMLResponse (a notification from the // renderer that the MHTML generation for previous frame has finished). // Returns |true| upon success; |false| otherwise. bool OnSerializeAsMHTMLResponse( const std::set<std::string>& digests_of_uris_of_serialized_resources); // Sends IPC to the renderer, asking for MHTML generation of the next frame. // // Returns true if the message was sent successfully; false otherwise. bool SendToNextRenderFrame(); // Indicates if more calls to SendToNextRenderFrame are needed. bool IsDone() const { bool waiting_for_response_from_renderer = frame_tree_node_id_of_busy_frame_ != FrameTreeNode::kFrameTreeNodeInvalidId; bool no_more_requests_to_send = pending_frame_tree_node_ids_.empty(); return !waiting_for_response_from_renderer && no_more_requests_to_send; } // Close the file on the file thread and respond back on the UI thread with // file size. void CloseFile(base::Callback<void(int64_t file_size)> callback); // RenderProcessHostObserver: void RenderProcessExited(RenderProcessHost* host, base::TerminationStatus status, int exit_code) override; void RenderProcessHostDestroyed(RenderProcessHost* host) override; void MarkAsFinished(); private: static int64_t CloseFileOnFileThread(base::File file); void AddFrame(RenderFrameHost* render_frame_host); // Creates a new map with values (content ids) the same as in // |frame_tree_node_to_content_id_| map, but with the keys translated from // frame_tree_node_id into a |site_instance|-specific routing_id. std::map<int, std::string> CreateFrameRoutingIdToContentId( SiteInstance* site_instance); // Id used to map renderer responses to jobs. // See also MHTMLGenerationManager::id_to_job_ map. int job_id_; // User-configurable parameters. Includes the file location, binary encoding // choices, and whether to skip storing resources marked // Cache-Control: no-store. MHTMLGenerationParams params_; // The IDs of frames that still need to be processed. std::queue<int> pending_frame_tree_node_ids_; // Identifies a frame to which we've sent FrameMsg_SerializeAsMHTML but for // which we didn't yet process FrameHostMsg_SerializeAsMHTMLResponse via // OnSerializeAsMHTMLResponse. int frame_tree_node_id_of_busy_frame_; // The handle to the file the MHTML is saved to for the browser process. base::File browser_file_; // Map from frames to content ids (see WebFrameSerializer::generateMHTMLParts // for more details about what "content ids" are and how they are used). std::map<int, std::string> frame_tree_node_to_content_id_; // MIME multipart boundary to use in the MHTML doc. std::string mhtml_boundary_marker_; // Digests of URIs of already generated MHTML parts. std::set<std::string> digests_of_already_serialized_uris_; std::string salt_; // The callback to call once generation is complete. const GenerateMHTMLCallback callback_; // Whether the job is finished (set to true only for the short duration of // time between MHTMLGenerationManager::JobFinished is called and the job is // destroyed by MHTMLGenerationManager::OnFileClosed). bool is_finished_; // RAII helper for registering this Job as a RenderProcessHost observer. ScopedObserver<RenderProcessHost, MHTMLGenerationManager::Job> observed_renderer_process_host_; DISALLOW_COPY_AND_ASSIGN(Job); }; MHTMLGenerationManager::Job::Job(int job_id, WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) : job_id_(job_id), params_(params), frame_tree_node_id_of_busy_frame_(FrameTreeNode::kFrameTreeNodeInvalidId), mhtml_boundary_marker_(net::GenerateMimeMultipartBoundary()), salt_(base::GenerateGUID()), callback_(callback), is_finished_(false), observed_renderer_process_host_(this) { DCHECK_CURRENTLY_ON(BrowserThread::UI); web_contents->ForEachFrame(base::Bind( &MHTMLGenerationManager::Job::AddFrame, base::Unretained(this))); // Safe because ForEachFrame is synchronous. // Main frame needs to be processed first. DCHECK(!pending_frame_tree_node_ids_.empty()); DCHECK(FrameTreeNode::GloballyFindByID(pending_frame_tree_node_ids_.front()) ->parent() == nullptr); } MHTMLGenerationManager::Job::~Job() { DCHECK_CURRENTLY_ON(BrowserThread::UI); } std::map<int, std::string> MHTMLGenerationManager::Job::CreateFrameRoutingIdToContentId( SiteInstance* site_instance) { std::map<int, std::string> result; for (const auto& it : frame_tree_node_to_content_id_) { int ftn_id = it.first; const std::string& content_id = it.second; FrameTreeNode* ftn = FrameTreeNode::GloballyFindByID(ftn_id); if (!ftn) continue; int routing_id = ftn->render_manager()->GetRoutingIdForSiteInstance(site_instance); if (routing_id == MSG_ROUTING_NONE) continue; result[routing_id] = content_id; } return result; } bool MHTMLGenerationManager::Job::SendToNextRenderFrame() { DCHECK(browser_file_.IsValid()); DCHECK(!pending_frame_tree_node_ids_.empty()); FrameMsg_SerializeAsMHTML_Params ipc_params; ipc_params.job_id = job_id_; ipc_params.mhtml_boundary_marker = mhtml_boundary_marker_; ipc_params.mhtml_binary_encoding = params_.use_binary_encoding; ipc_params.mhtml_cache_control_policy = params_.cache_control_policy; int frame_tree_node_id = pending_frame_tree_node_ids_.front(); pending_frame_tree_node_ids_.pop(); ipc_params.is_last_frame = pending_frame_tree_node_ids_.empty(); FrameTreeNode* ftn = FrameTreeNode::GloballyFindByID(frame_tree_node_id); if (!ftn) // The contents went away. return false; RenderFrameHost* rfh = ftn->current_frame_host(); // Get notified if the target of the IPC message dies between responding. observed_renderer_process_host_.RemoveAll(); observed_renderer_process_host_.Add(rfh->GetProcess()); // Tell the renderer to skip (= deduplicate) already covered MHTML parts. ipc_params.salt = salt_; ipc_params.digests_of_uris_to_skip = digests_of_already_serialized_uris_; ipc_params.destination_file = IPC::GetPlatformFileForTransit( browser_file_.GetPlatformFile(), false); // |close_source_handle|. ipc_params.frame_routing_id_to_content_id = CreateFrameRoutingIdToContentId(rfh->GetSiteInstance()); // Send the IPC asking the renderer to serialize the frame. DCHECK_EQ(FrameTreeNode::kFrameTreeNodeInvalidId, frame_tree_node_id_of_busy_frame_); frame_tree_node_id_of_busy_frame_ = frame_tree_node_id; rfh->Send(new FrameMsg_SerializeAsMHTML(rfh->GetRoutingID(), ipc_params)); return true; } void MHTMLGenerationManager::Job::RenderProcessExited( RenderProcessHost* host, base::TerminationStatus status, int exit_code) { DCHECK_CURRENTLY_ON(BrowserThread::UI); MHTMLGenerationManager::GetInstance()->RenderProcessExited(this); } void MHTMLGenerationManager::Job::MarkAsFinished() { DCHECK(!is_finished_); is_finished_ = true; // Stopping RenderProcessExited notifications is needed to avoid calling // JobFinished twice. See also https://crbug.com/612098. observed_renderer_process_host_.RemoveAll(); } void MHTMLGenerationManager::Job::AddFrame(RenderFrameHost* render_frame_host) { auto* rfhi = static_cast<RenderFrameHostImpl*>(render_frame_host); int frame_tree_node_id = rfhi->frame_tree_node()->frame_tree_node_id(); pending_frame_tree_node_ids_.push(frame_tree_node_id); std::string guid = base::GenerateGUID(); std::string content_id = base::StringPrintf("<frame-%d-%[email protected]>", frame_tree_node_id, guid.c_str()); frame_tree_node_to_content_id_[frame_tree_node_id] = content_id; } void MHTMLGenerationManager::Job::RenderProcessHostDestroyed( RenderProcessHost* host) { DCHECK_CURRENTLY_ON(BrowserThread::UI); observed_renderer_process_host_.Remove(host); } void MHTMLGenerationManager::Job::CloseFile( base::Callback<void(int64_t)> callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (!browser_file_.IsValid()) { callback.Run(-1); return; } BrowserThread::PostTaskAndReplyWithResult( BrowserThread::FILE, FROM_HERE, base::Bind(&MHTMLGenerationManager::Job::CloseFileOnFileThread, base::Passed(std::move(browser_file_))), callback); } bool MHTMLGenerationManager::Job::IsMessageFromFrameExpected( RenderFrameHostImpl* sender) { int sender_id = sender->frame_tree_node()->frame_tree_node_id(); if (sender_id != frame_tree_node_id_of_busy_frame_) return false; // We only expect one message per frame - let's make sure subsequent messages // from the same |sender| will be rejected. frame_tree_node_id_of_busy_frame_ = FrameTreeNode::kFrameTreeNodeInvalidId; return true; } bool MHTMLGenerationManager::Job::OnSerializeAsMHTMLResponse( const std::set<std::string>& digests_of_uris_of_serialized_resources) { // Renderer should be deduping resources with the same uris. DCHECK_EQ(0u, base::STLSetIntersection<std::set<std::string>>( digests_of_already_serialized_uris_, digests_of_uris_of_serialized_resources).size()); digests_of_already_serialized_uris_.insert( digests_of_uris_of_serialized_resources.begin(), digests_of_uris_of_serialized_resources.end()); if (pending_frame_tree_node_ids_.empty()) return true; // Report success - all frames have been processed. return SendToNextRenderFrame(); } // static int64_t MHTMLGenerationManager::Job::CloseFileOnFileThread(base::File file) { DCHECK_CURRENTLY_ON(BrowserThread::FILE); DCHECK(file.IsValid()); int64_t file_size = file.GetLength(); file.Close(); return file_size; } MHTMLGenerationManager* MHTMLGenerationManager::GetInstance() { return base::Singleton<MHTMLGenerationManager>::get(); } MHTMLGenerationManager::MHTMLGenerationManager() : next_job_id_(0) {} MHTMLGenerationManager::~MHTMLGenerationManager() { STLDeleteValues(&id_to_job_); } void MHTMLGenerationManager::SaveMHTML(WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); <|fim▁hole|> BrowserThread::PostTaskAndReplyWithResult( BrowserThread::FILE, FROM_HERE, base::Bind(&MHTMLGenerationManager::CreateFile, params.file_path), base::Bind(&MHTMLGenerationManager::OnFileAvailable, base::Unretained(this), // Safe b/c |this| is a singleton. job_id)); } void MHTMLGenerationManager::OnSerializeAsMHTMLResponse( RenderFrameHostImpl* sender, int job_id, bool mhtml_generation_in_renderer_succeeded, const std::set<std::string>& digests_of_uris_of_serialized_resources) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); if (!job || !job->IsMessageFromFrameExpected(sender)) { NOTREACHED(); ReceivedBadMessage(sender->GetProcess(), bad_message::DWNLD_INVALID_SERIALIZE_AS_MHTML_RESPONSE); return; } if (!mhtml_generation_in_renderer_succeeded) { JobFinished(job, JobStatus::FAILURE); return; } if (!job->OnSerializeAsMHTMLResponse( digests_of_uris_of_serialized_resources)) { JobFinished(job, JobStatus::FAILURE); return; } if (job->IsDone()) JobFinished(job, JobStatus::SUCCESS); } // static base::File MHTMLGenerationManager::CreateFile(const base::FilePath& file_path) { DCHECK_CURRENTLY_ON(BrowserThread::FILE); // SECURITY NOTE: A file descriptor to the file created below will be passed // to multiple renderer processes which (in out-of-process iframes mode) can // act on behalf of separate web principals. Therefore it is important to // only allow writing to the file and forbid reading from the file (as this // would allow reading content generated by other renderers / other web // principals). uint32_t file_flags = base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE; base::File browser_file(file_path, file_flags); if (!browser_file.IsValid()) { LOG(ERROR) << "Failed to create file to save MHTML at: " << file_path.value(); } return browser_file; } void MHTMLGenerationManager::OnFileAvailable(int job_id, base::File browser_file) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); DCHECK(job); if (!browser_file.IsValid()) { LOG(ERROR) << "Failed to create file"; JobFinished(job, JobStatus::FAILURE); return; } job->set_browser_file(std::move(browser_file)); if (!job->SendToNextRenderFrame()) { JobFinished(job, JobStatus::FAILURE); } } void MHTMLGenerationManager::JobFinished(Job* job, JobStatus job_status) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK(job); job->MarkAsFinished(); job->CloseFile( base::Bind(&MHTMLGenerationManager::OnFileClosed, base::Unretained(this), // Safe b/c |this| is a singleton. job->id(), job_status)); } void MHTMLGenerationManager::OnFileClosed(int job_id, JobStatus job_status, int64_t file_size) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); job->callback().Run(job_status == JobStatus::SUCCESS ? file_size : -1); id_to_job_.erase(job_id); delete job; } int MHTMLGenerationManager::NewJob(WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); int job_id = next_job_id_++; id_to_job_[job_id] = new Job(job_id, web_contents, params, callback); return job_id; } MHTMLGenerationManager::Job* MHTMLGenerationManager::FindJob(int job_id) { DCHECK_CURRENTLY_ON(BrowserThread::UI); IDToJobMap::iterator iter = id_to_job_.find(job_id); if (iter == id_to_job_.end()) { NOTREACHED(); return nullptr; } return iter->second; } void MHTMLGenerationManager::RenderProcessExited(Job* job) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK(job); JobFinished(job, JobStatus::FAILURE); } } // namespace content<|fim▁end|>
int job_id = NewJob(web_contents, params, callback);
<|file_name|>size_hint.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::iter::Iterator; use core::iter::Peekable; struct A<T> { begin: T, end: T } macro_rules! Iterator_impl { ($T:ty) => { impl Iterator for A<$T> { type Item = $T; fn next(&mut self) -> Option<Self::Item> { if self.begin < self.end { let result = self.begin; self.begin = self.begin.wrapping_add(1); Some::<Self::Item>(result) } else { None::<Self::Item> } } fn size_hint(&self) -> (usize, Option<usize>) { debug_assert!(self.begin <= self.end); let exact: usize = (self.end - self.begin) as usize; (exact, Some::<usize>(exact)) } // fn peekable(self) -> Peekable<Self> where Self: Sized { // Peekable{iter: self, peeked: None} // } } } } type T = i32; Iterator_impl!(T); // impl<I: Iterator> Iterator for Peekable<I> { // type Item = I::Item; // // #[inline] // fn next(&mut self) -> Option<I::Item> { // match self.peeked { // Some(_) => self.peeked.take(), // None => self.iter.next(), // } // } // // #[inline] // fn count(self) -> usize { // (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count() // } // // #[inline] // fn nth(&mut self, n: usize) -> Option<I::Item> { // match self.peeked { // Some(_) if n == 0 => self.peeked.take(), // Some(_) => { // self.peeked = None; // self.iter.nth(n-1) // }, // None => self.iter.nth(n) // } // } // // #[inline] // fn last(self) -> Option<I::Item> { // self.iter.last().or(self.peeked) // } // // #[inline] // fn size_hint(&self) -> (usize, Option<usize>) { // let (lo, hi) = self.iter.size_hint(); // if self.peeked.is_some() { // let lo = lo.saturating_add(1); // let hi = hi.and_then(|x| x.checked_add(1)); // (lo, hi) // } else { // (lo, hi) // } // } // } #[test] fn size_hint_test1() { let a: A<T> = A { begin: 0, end: 10 }; let peekable: Peekable<A<T>> = a.peekable(); let (lower, upper): (usize, Option<usize>) = peekable.size_hint(); <|fim▁hole|> assert_eq!(lower, 10); assert_eq!(upper, Some::<usize>(10)); } #[test] fn size_hint_test2() { let a: A<T> = A { begin: 0, end: 10 }; let mut peekable: Peekable<A<T>> = a.peekable(); peekable.next(); let (lower, upper): (usize, Option<usize>) = peekable.size_hint(); assert_eq!(lower, 9); assert_eq!(upper, Some::<usize>(9)); } }<|fim▁end|>
<|file_name|>VectorParameterUI.py<|end_file_name|><|fim▁begin|>########################################################################## # # Copyright (c) 2010, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or<|fim▁hole|># THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import maya.cmds import IECore import IECoreMaya class VectorParameterUI( IECoreMaya.ParameterUI ) : def __init__( self, node, parameter, **kw ) : self.__dim = parameter.getTypedValue().dimensions() if self.__dim == 2: layout = maya.cmds.rowLayout( numberOfColumns = 3, columnWidth3 = [ IECoreMaya.ParameterUI.textColumnWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex ] ) elif self.__dim == 3: layout = maya.cmds.rowLayout( numberOfColumns = 4, columnWidth4 = [ IECoreMaya.ParameterUI.textColumnWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex ] ) else: raise RuntimeError("Unsupported vector dimension in VectorParameterUI") IECoreMaya.ParameterUI.__init__( self, node, parameter, layout, **kw ) self.__fields = [] maya.cmds.text( label = self.label(), font = "smallPlainLabelFont", align = "right", annotation = self.description() ) plug = self.plug() for i in range(0, self.__dim) : self.__fields.append( self.__fieldType()( value = parameter.getTypedValue()[i] ) ) maya.cmds.setParent("..") self.replace( self.node(), self.parameter ) def replace( self, node, parameter ) : IECoreMaya.ParameterUI.replace( self, node, parameter ) plug = self.plug() for i in range(0, self.__dim): childPlugName = self.nodeName() + "." + plug.child(i).partialName() maya.cmds.connectControl( self.__fields[i], childPlugName ) self._addPopupMenu( parentUI = self.__fields[i], attributeName = childPlugName ) def __fieldType( self ): if self.parameter.isInstanceOf( IECore.TypeId.V2iParameter ) or self.parameter.isInstanceOf( IECore.TypeId.V3iParameter ): return maya.cmds.intField else: return maya.cmds.floatField IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V2iParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V3iParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V2fParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V2dParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V3fParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.V3dParameter, VectorParameterUI ) IECoreMaya.ParameterUI.registerUI( IECore.TypeId.Color3fParameter, VectorParameterUI, "numeric" )<|fim▁end|>
# promote products derived from this software without specific prior # written permission. #
<|file_name|>basic-spec.js<|end_file_name|><|fim▁begin|>"use strict"; describe("This package", function(){ it("rubs the lotion on its skin, or else", function(){ 2..should.equal(2); // In this universe, it'd damn well better }); it("gets the hose again", function(){ this.should.be.extensible.and.ok; // Eventually }); it("should not fail", function(){ NaN.should.not.equal(NaN); // NaH global.foo = "Foo"; }); it("might be written later"); // Nah it("should fail", function(){ const A = { alpha: "A", beta: "B", gamma: "E", delta: "D", }; const B = { Alpha: "A", beta: "B", gamma: "E", delta: "d", }; A.should.equal(B); }); describe("Suite nesting", function(){ it("does something useful eventually", function(done){ setTimeout(() => done(), 40); }); it("cleans anonymous async functions", async function(){ if(true){ true.should.be.true; } }); it("cleans anonymous generators", function * (){ if(true){ true.should.be.true; } }); it("cleans named async functions", async function foo() { if(true){ true.should.be.true; } }); it("cleans named generators", function * foo (){ if(true){ true.should.be.true; } }); it("cleans async arrow functions", async () => { if(true){ true.should.be.true; } }); }); }); describe("Second suite at top-level", function(){ it("shows another block", function(){ Chai.expect(Date).to.be.an.instanceOf(Function); });<|fim▁hole|> something(); }); it("loads locally-required files", () => { expect(global.someGlobalThing).to.equal("fooXYZ"); }); unlessOnWindows.it("enjoys real symbolic links", () => { "Any Unix-like system".should.be.ok; }); }); describe("Aborted tests", () => { before(() => {throw new Error("Nah, not really")}); it("won't reach this", () => true.should.not.be.false); it.skip("won't reach this either", () => true.should.be.true); });<|fim▁end|>
it("breaks something", function(){
<|file_name|>person.js<|end_file_name|><|fim▁begin|>var Model = require('./model'); var schema = { name : String, stuff: { electronics: [{ type: String }], computing_dev: [{ type: String }] },<|fim▁hole|> biological: Number }, fruits: [ { name: String, fav: Boolean, about: [{ type: String }] } ] }; var person = function(data){ Model.call(this, schema, data); } person.prototype = Object.create(Model.prototype); module.exports = person;<|fim▁end|>
age:{
<|file_name|>partial.py<|end_file_name|><|fim▁begin|>from polybori import BooleSet, interpolate_smallest_lex class PartialFunction(object): """docstring for PartialFunction""" def __init__(self, zeros, ones): super(PartialFunction, self).__init__() self.zeros = zeros.set() self.ones = ones.set() def interpolate_smallest_lex(self): return interpolate_smallest_lex(self.zeros, self.ones) def __str__(self): return "PartialFunction(zeros=" + str(self.zeros) + ", ones=" + str( self.ones) + ")" def definedOn(self): return self.zeros.union(self.ones) def __add__(self, other):<|fim▁hole|> other.zeros)) assert zeros.diff(domain).empty() assert ones.diff(domain).empty() return PartialFunction(zeros, ones) def __repr__(self): return str(self) def __mul__(self, other): zeros = self.zeros.union(other.zeros) ones = self.ones.intersect(other.ones) return PartialFunction(zeros, ones) def __or__(self, other): zeros = self.zeros.intersect(other.zeros) ones = self.ones.union(other.ones) return PartialFunction(zeros, ones) def __xor__(self, other): return self + other def __and__(self, other): return self * other<|fim▁end|>
domain = self.definedOn().intersect(other.definedOn()) zeros = self.zeros.intersect(other.zeros).union(self.ones.intersect( other.ones)) ones = self.zeros.intersect(other.ones).union(self.ones.intersect(
<|file_name|>moleculetype.py<|end_file_name|><|fim▁begin|>""" This file is part of the TheLMA (THe Laboratory Management Application) project. See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information. MoleculeType entity classes. """ from everest.entities.base import Entity from everest.entities.utils import slug_from_string __docformat__ = "reStructuredText en" __all__ = ['MoleculeType', 'MOLECULE_TYPE_IDS'] class MOLECULE_TYPE_IDS(object): """ Known molecule types. """ # FIXME: reconcile with `thelma.data.moleculetype` # pylint:disable=W0511 SSDNA = 'SSDNA' AMPLICON = 'AMPLICON' SIRNA = 'SIRNA' COMPOUND = 'COMPOUND' LONG_DSRNA = 'LONG_DSRNA' ANTI_MIR = 'ANTI_MIR' ESI_RNA = 'ESI_RNA' MIRNA_INHI = 'MIRNA_INHI' CLND_DSDNA = 'CLND_DSDNA' MIRNA_MIMI = 'MIRNA_MIMI' __ALL = [nm for nm in sorted(locals().keys()) if not nm.startswith('_')] @classmethod def is_known_type(cls, molecule_type_name): """ Checks whether the given molecule type name is a known one. """ return molecule_type_name in cls.__ALL class MoleculeType(Entity): """ Instances of this class describe molecule types, such as \'siRna\'. """ #: The name of the molecule type. name = None #: A more detailed description. description = None #: An number indicating the time it takes for molecules of this type to<|fim▁hole|> #: A list of modification chemical structures #: (:class:`thelma.entities.chemicalstructure.ChemicalStructure`) #: that are associated with this molecule type. modifications = None #: The default stock concentration for this molecule type. default_stock_concentration = None def __init__(self, name, default_stock_concentration, description='', thaw_time=0, modifications=None, **kw): if not 'id' in kw: kw['id'] = name.lower() Entity.__init__(self, **kw) self.name = name self.default_stock_concentration = default_stock_concentration self.description = description self.thaw_time = thaw_time if modifications == None: self.modifications = [] @property def slug(self): #: For instances of this class, the slug is derived from the #: :attr:`name`. return slug_from_string(self.name) def __str__(self): return self.id def __repr__(self): str_format = '<%s id: %s, name: %s, thaw_time: %s>' params = (self.__class__.__name__, self.id, self.name, self.thaw_time) return str_format % params<|fim▁end|>
#: thaw. thaw_time = None
<|file_name|>scene.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Pikkpoiss // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package gamejam type SceneID int type Scene interface { AddComponent(c Component) Load(r Resources) (err error) Unload(r Resources) (err error) Render() Update(mgr SceneManager) SetSceneID(id SceneID) SceneID() SceneID } type BaseScene struct { components map[ComponentID]Component id SceneID } func NewBaseScene() *BaseScene { return &BaseScene{ components: map[ComponentID]Component{}, } } func (s *BaseScene) AddComponent(c Component) { c.SetScene(s) s.components[c.GetID()] = c }<|fim▁hole|>} func (s *BaseScene) Render() { } func (s *BaseScene) SetSceneID(id SceneID) { s.id = id } func (s *BaseScene) SceneID() SceneID { return s.id } func (s *BaseScene) Unload(r Resources) (err error) { var ( id ComponentID c Component ) for id, c = range s.components { s.components[id] = nil c.Delete() } //s.DeleteObservers() return } func (s *BaseScene) Update(mgr SceneManager) { }<|fim▁end|>
func (s *BaseScene) Load(r Resources) (err error) { return
<|file_name|>test_simple_replace.rs<|end_file_name|><|fim▁begin|>use std::process::{Command, Stdio}; use std::io::{Read, Write}; use common::*; #[test] fn simple_replace1() { let _fs = setup(&[tf("file3")]); set_file_content("file3", "baca"); let output = Command::new(BINARY_PATH) .args(&["-s", "a", "-x", "Z", "-Y", "-Q", "it/file3"]) .output() .expect("123"); assert!(output.status.success()); assert_eq!("", String::from_utf8_lossy(&output.stderr)); assert_eq!("", String::from_utf8_lossy(&output.stdout)); assert_eq!("bZcZ", get_file_content("file3")); } #[test] fn ask_user_user_accepts_all() { let _fs = setup(&[tf("file3")]); set_file_content("file3", "baca"); let process = Command::new(BINARY_PATH) .args(&["-s", "a", "-x", "Z", "-C", "-Q", "it/file3"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .expect("123"); let mut output = String::new(); let mut stdin = process.stdin.unwrap(); stdin.write_all("yy".as_bytes()).unwrap(); let mut stdout = process.stdout.unwrap(); stdout.read_to_string(&mut output).unwrap(); assert_eq!( "Should replace:<|fim▁hole|>baca With: bZca Should replace: bZca With: bZcZ ", output ); assert_eq!("bZcZ", get_file_content("file3")) } #[test] fn ask_user_user_rejects_some() { let _fs = setup(&[tf("file3")]); set_file_content("file3", "babababa"); let process = Command::new(BINARY_PATH) .args(&["-s", "a", "-x", "Z", "-C", "-Q", "it/file3"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .expect("123"); let mut output = String::new(); let mut stdin = process.stdin.unwrap(); //.write_all("alfa".as_bytes()); // process.stdin.unwrap().write_all("beta".as_bytes()); stdin.write_all("ynyn".as_bytes()).unwrap(); let mut stdout = process.stdout.unwrap(); stdout.read_to_string(&mut output).unwrap(); assert_eq!( "Should replace: babababa With: bZbababa Should replace: bZbababa With: bZbZbaba Should replace: bZbababa With: bZbabZba Should replace: bZbabZba With: bZbabZbZ ", output ); assert_eq!("bZbabZba", get_file_content("file3")); }<|fim▁end|>
<|file_name|>does-not-have-mapdata-internal-slot.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 the V8 project authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- es6id: 23.1.3.8 description: > Throws a TypeError if `this` object does not have a [[MapData]] internal slot. info: | Map.prototype.keys () 1. Let M be the this value. 2. Return CreateMapIterator(M, "key"). 23.1.5.1 CreateMapIterator Abstract Operation ... 2. If map does not have a [[MapData]] internal slot, throw a TypeError exception. ... ---*/ var m = new Map(); assert.throws(TypeError, function() { Map.prototype.keys.call([]); }); assert.throws(TypeError, function() { m.keys.call([]); }); assert.throws(TypeError, function() { Map.prototype.keys.call({}); }); assert.throws(TypeError, function() { m.keys.call({});<|fim▁hole|><|fim▁end|>
});
<|file_name|>convertToSparseMatrix.py<|end_file_name|><|fim▁begin|>""" Script used to convert data into sparse matrix format that can easily be imported into MATLAB. Use like this python convertToSparseMatrix.py ../../../../../data/train_triplets.txt 1000 ../../../../../data/eval/year1_test_triplets_visible.txt ../../../../../data/eval/year1_test_triplets_hidden.txt 100<|fim▁hole|>import time # Analysing command line arguments if len(sys.argv) < 5: print 'Usage:' print ' python %s <triplets training file> <number of triplets> <triplets visible history file> <triplets hidden history file> <number of triplets>' % sys.argv[0] exit() inputTrainingFile = sys.argv[1] numTriplets = int(sys.argv[2]) inputTestFile = sys.argv[3] inputHiddenTestFile = sys.argv[4] numTripletsTest = int(sys.argv[5]) start = time.time() userIdToIndex = {} # Key: userid, Value: Row in matrix songIdToIndex = {} # Key: songid, Value: Column in matrix userIndex = 0 songIndex = 0 rows = [] columns = [] entries = [] linesRead = 0 maxLines = numTriplets for inputFile in [inputTrainingFile, inputTestFile, inputHiddenTestFile]: linesRead = 0 f = open(inputFile) for line in f: userid, song, songCount = line.strip().split('\t') # Fill in indices if song not in songIdToIndex: songIdToIndex[song] = songIndex songIndex += 1 if userid not in userIdToIndex: userIdToIndex[userid] = userIndex userIndex += 1 # Fill in rows, columns and entries rows.append(userIdToIndex[userid]) columns.append(songIdToIndex[song]) entries.append(int(songCount)) linesRead += 1 if linesRead >= maxLines: break if inputFile == inputTrainingFile: numUsersInTraining = userIndex maxLines = numTripletsTest if inputFile == inputTestFile: numSongs = songIndex numUsers = userIndex numNonZeros = len(entries) rows = rows columns = columns entries = entries # Write to a sparse matrix file that can be read with MATLAB matrix_file = open('UserSongSparseMatrix' + str(numTriplets) + '_' + str(numTripletsTest) + '.txt', 'w') for i in range(len(entries)): matrix_file.write(str(rows[i]+1) + "\t" + str(columns[i]+1) + "\t" + str(entries[i]) + "\n") #matrix_file.write(str(numUsers-1) + "\t" + str(numSongs-1) + "\t" + str(0.000000) + "\n") matrix_file.close() # reset everything to zero to read in the hidden matrix rows = [] columns = [] entries = [] if inputFile == inputHiddenTestFile: # Write to a sparse matrix file that can be read with MATLAB matrix_file_test = open('UserSongSparseMatrixTest' + str(numTriplets) + '_' + str(numTripletsTest) + '.txt', 'w') for i in range(len(entries)): matrix_file_test.write(str(rows[i]+1) + "\t" + str(columns[i]+1) + "\t" + str(entries[i]) + "\n") #matrix_file_test.write(str(userIndex-1) + "\t" + str(songIndex-1) + "\t" + str(0.000000) + "\n") matrix_file_test.close() f.close() print "Done loading %d triplets!" % (numTriplets + numTripletsTest) end = time.time() print "Took %s seconds" % (end - start) print "Number of users", numUsers print "Number of songs", numSongs print "You need to predict for the last %s users" % (numUsers - numUsersInTraining)<|fim▁end|>
""" import sys
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import gzip import json import os from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required from django.core.paginator import EmptyPage from django.core.paginator import PageNotAnInteger from django.core.paginator import Paginator from django.db.models import Avg from django.db.models import Count from django.db.models import Max from django.db.models import Min from django.db.models import Q from django.db.models import Sum from django.http import HttpResponse from django.shortcuts import get_object_or_404 from django.shortcuts import redirect from django.shortcuts import render from django.template import RequestContext from django.utils.text import slugify from django.views.generic import DeleteView from individuals.forms import IndividualForm, ComparisonForm, GroupForm, BrowserForm from individuals.models import Individual, Group from individuals.tasks import VerifyVCF, AnnotateVariants, PopulateVariants from variants.models import Variant def response_mimetype(request): if "application/json" in request.META['HTTP_ACCEPT']: return "application/json" else: return "text/plain" class JSONResponse(HttpResponse): """JSON response class.""" def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs): content = json.dumps(obj,**json_opts) super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs) def create(request): if request.method == 'POST': form = IndividualForm(request.POST, request.FILES) if form.is_valid(): if request.user.is_authenticated: individual = Individual.objects.create(user=request.user, status='new') else: individual = Individual.objects.create(user=None, status='new') individual.vcf_file= request.FILES.get('file') print('file') print(request.FILES.get('file')) filename = individual.vcf_file.name.split('.') new_filename = [] for tag in filename: new_filename.append(slugify(tag)) individual.vcf_file.name = ".".join(new_filename) print('filename ', filename) #get name from inside vcf file individual.name= str(os.path.splitext(individual.vcf_file.name)[0]).replace('.vcf','').replace('.gz','').replace('.rar','').replace('.zip','').replace('._',' ').replace('.',' ') # individual.shared_with_groups = form.cleaned_data['shared_with_groups'] individual.shared_with_groups.set(form.cleaned_data['shared_with_groups']) individual.save() f = individual.vcf_file #fix permissions #os.chmod("%s/genomes/%s/" % (settings.BASE_DIR, individual.user), 0777) #if request.user.is_authenticated: # os.chmod("%s/genomes/%s/%s" % (settings.BASE_DIR, slugify(individual.user), individual.id), 0o777) #else: # os.chmod("%s/genomes/public/%s" % (settings.BASE_DIR, individual.id), 0o777) # AnnotateVariants.delay(individual.id) # VerifyVCF.delay(individual.id) data = {'files': [{'deleteType': 'DELETE', 'name': individual.name, 'url': '', 'thumbnailUrl': '', 'type': 'image/png', 'deleteUrl': '', 'size': f.size}]} response = JSONResponse(data, mimetype=response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response else: print(form.errors) else: form = IndividualForm() return render(request, 'individuals/create.html', {'form':form}) # Create your views here. @login_required def edit(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) if request.method == 'POST': form = IndividualForm(request.POST, instance=individual) if form.is_valid(): form.save() return redirect('dashboard') # form = IndividualForm(request.POST, request.FILES) # if form.is_valid(): # individual = form.save(commit=False) # individual.user = request.user # individual.save() # return redirect('dashboard') else: form = IndividualForm(instance=individual) return render(request, 'individuals/individual_form.html', {'form':form}) class IndividualDeleteView(DeleteView): model = Individual def delete(self, request, *args, **kwargs): """ This does not actually delete the file, only the database record. But that is easy to implement. """ self.object = self.get_object() individual_id = self.object.id if self.object.user: username = self.object.user.username else: username = 'public' #delete files if self.object.vcf_file: self.object.vcf_file.delete() # if self.object.strs_file: # self.object.strs_file.delete() # if self.object.cnvs_file: # self.object.cnvs_file.delete() os.system('rm -rf %s/genomes/%s/%s' % (settings.BASE_DIR, username, individual_id)) self.object.delete() # response = JSONResponse(True, {}, response_mimetype(self.request)) # response['Content-Disposition'] = 'inline; filename=files.json' # return response messages.add_message(request, messages.INFO, "Individual deleted with success!") #return redirect('individuals_list') return redirect('individuals_list') def view(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) variant_list = Variant.objects.filter(individual=individual) # snpeff = SnpeffAnnotation.objects.filter(individual=individual) individual.n_variants = variant_list.count() individual.novel_variants = variant_list.filter(variant_id = '.').count() individual.summary = [] #get calculated values from database summary_item = { 'type': 'Total SNVs', 'total': variant_list.values('genotype').count(), 'discrete': variant_list.values('genotype').annotate(total=Count('genotype')) } individual.summary.append(summary_item) summary_item = { 'type': 'Total Gene-associated SNVs', 'total': variant_list.values('gene').exclude(gene="").count(), 'discrete': variant_list.exclude(gene="").values('genotype').annotate(total=Count('genotype')) } individual.summary.append(summary_item) individual.snp_eff = variant_list.values('snpeff_effect').annotate(Count('snpeff_effect')).order_by('snpeff_effect') # print 'individual.snp_eff', individual.snp_eff # variant_list.values('snpeff__effect').annotate(Count('snpeff__effect')).order_by('snpeff__effect') # individual.functional_class = variant_list.values('snpeff_func_class').annotate(Count('snpeff_func_class')).order_by('snpeff_func_class') individual.impact_variants = variant_list.values('snpeff_impact').annotate(Count('snpeff_impact')).order_by('snpeff_impact') individual.filter_variants = variant_list.values('filter').annotate(Count('filter')).order_by('filter') individual.quality = variant_list.aggregate(Avg('qual'), Max('qual'), Min('qual')) individual.read_depth = variant_list.aggregate(Avg('read_depth'), Max('read_depth'), Min('read_depth')) individual.clinvar_clnsig = variant_list.values('clinvar_clnsig').annotate(total=Count('clinvar_clnsig')) individual.chromossome = variant_list.values('chr').annotate(total=Count('chr')).order_by('chr') # variants_with_snpid = variant_list.values('variant_id').exclude(variant_id=".") #print variants_with_snpid # fields = Variant._meta.get_all_field_names() paginator = Paginator(variant_list, 25) # Show 25 contacts per page try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 try: variants = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. variants = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. variants = paginator.page(paginator.num_pages) #'fields':fields return render(request, 'individuals/view.html', {'individual': individual, 'variants':variants}) @login_required def browse(request, individual_id): query_string = request.META['QUERY_STRING'] individual = get_object_or_404(Individual, pk=individual_id) query = {} # DEFAULT_SORT = 'pk' # sort_key = request.GET.get('sort', DEFAULT_SORT) # tags = ['genotype', 'snpeffannotation__effect']#, 'func_class', 'impact', 'cln_omim', 'chr' # for tag in tags: # criteria = request.GET.get(tag, '') # if criteria: # query[tag] = criteria if request.method == 'GET': form = BrowserForm(request.GET) if form.is_valid(): print('form is valid') #chr chr = request.GET.get('chr', '') if chr != '': query['chr'] = chr #pos pos = request.GET.get('pos', '') if pos != '': query['pos'] = pos effect = request.GET.get('effect', '') if effect != '': print('effect', effect) query['snpeff_effect'] = effect #snp_id # snp_id = request.GET.get('snp_id', '') # if snp_id != '': # query['variant_id'] = snp_id # snp_list = request.GET.get('snp_list', '') # snp_list = snp_list.split('\r\n') # if snp_list[0] != u'': # query['variant_id__in'] = snp_list # snp_eff = request.GET.getlist('effect') # if len(snp_eff) > 0: # query['snp_eff__in'] = snp_eff # func_class = request.GET.getlist('func_class') # if len(func_class) > 0: # query['snp_eff_functional_class__in'] = func_class # gene = request.GET.get('gene', '') # if gene != '': # query['gene_name'] = gene # gene_list = request.GET.get('gene_list', '') # gene_list = gene_list.split('\r\n') # if gene_list[0] != u'': # query['gene_name__in'] = gene_list # cln = request.GET.get('cln_omim', '') # print 'clnomim', cln # if cln == 'on': # query['cln_omim'] != '' variants = Variant.objects.filter(individual=individual, **query) # snpeff_annotations = SnpeffAnnotation.objects.filter(variant__in=variants) # #b.entry_set.filter(headline__contains='Lennon') # print 'snpeff_annotations', len(snpeff_annotations) # for variant in variants: # print variant.entry_set.all() # variant.snpeff= else: form = BrowserForm(request.GET) variants = Variant.objects.filter(individual=individual, **query) #Pagination paginator = Paginator(variants, 25) # Show 25 contacts per page try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 try: variants = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. variants = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. variants = paginator.page(paginator.num_pages) return render(request, 'variants/variants.html', {'individual': individual, 'variants':variants, 'form':form, 'query_string':query_string}) @login_required def list(request): if request.method == 'POST': individuals = request.POST.getlist('individuals') print(individuals) individuals = [int(x) for x in individuals] print(individuals) if request.POST['selectionField'] == "Show": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) individual.is_featured = True individual.save() if request.POST['selectionField'] == "Hide": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) individual.is_featured = False individual.save() if request.POST['selectionField'] == "Delete": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) individual_id = individual.id username = individual.user.username #delete files if individual.vcf_file: individual.vcf_file.delete() # if individual.strs_file: # individual.strs_file.delete() # if individual.cnvs_file: # individual.cnvs_file.delete() os.system('rm -rf %s/genomes/%s/%s' % (settings.BASE_DIR, username, individual_id)) individual.delete() #os.system('rm -rf mendelmd14/site_media/media/genomes/%s/%s' % (username, individual_id)) if request.POST['selectionField'] == "Populate": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) PopulateVariants.delay(individual.id) if request.POST['selectionField'] == "Annotate": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) AnnotateVariants.delay(individual.id) if request.POST['selectionField'] == "Find_Medical_Conditions_and_Medicines": for individual_id in individuals: individual = get_object_or_404(Individual, pk=individual_id) Find_Medical_Conditions_and_Medicines.delay(individual.id) args = [] # groups = Groups.objects.filter(user=request.user, shared_with_users=).order_by("-id") args.append(Q(user=request.user) | Q(shared_with_users=request.user) | Q(shared_with_groups__members=request.user)) if request.user.is_staff: individuals = Individual.objects.all() else: individuals = Individual.objects.filter(*args).order_by("-id") ind_featured = Individual.objects.filter(is_featured= True).order_by("id") # paginator = Paginator(individuals, 25) # Show 25 contacts per page # try: # page = int(request.GET.get('page', '1')) # except ValueError: # page = 1 # try: # individuals = paginator.page(page) # except PageNotAnInteger: # # If page is not an integer, deliver first page. # individuals = paginator.page(1) # except EmptyPage: # # If page is out of range (e.g. 9999), deliver last page of results. # individuals = paginator.page(paginator.num_pages) groups = Group.objects.all() # individuals = Individual.objects.annotate(number_of_variants=Count('variant')) return render(request, 'individuals/list.html', {'individuals': individuals, 'groups':groups, 'ind_featured':ind_featured}) @login_required def annotate(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) individual.status = 'new' individual.n_lines = 0 VerifyVCF.delay(individual.id) individual.save() messages.add_message(request, messages.INFO, "Your individual is being annotated.") return redirect('dashboard') @login_required def populate(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) PopulateVariants.delay(individual.id) messages.add_message(request, messages.INFO, "Your individual is being populated.") return redirect('dashboard') @login_required def populate_mongo(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) PopulateMongoVariants.delay(individual.id) messages.add_message(request, messages.INFO, "Your individual is being inserted at MongoDB.") return redirect('individuals_list') def download(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) filepath = os.path.dirname(str(individual.vcf_file.name)) filename = os.path.basename(str(individual.vcf_file.name)) path = '' # os.chmod("%s/genomes/%s/%s" % (settings.MEDIA_ROOT, individual.user, individual.id), 0777) # if filename.endswith('vcf.zip'): # basename = filename.split('.vcf.zip')[0] # elif filename.endswith('.zip'): # basename = filename.split('.zip')[0] # else: # basename = filename.split('.vcf')[0] #print basename #print path #print filepath fullpath = '%s/%s' % (filepath, filename) if filename.endswith('.gz'): vcffile = gzip.open(fullpath, 'r') else: vcffile = open(fullpath, 'r') content = vcffile.read() vcffile.close() response = HttpResponse(content, content_type='text/plain') response['Content-Disposition'] = 'attachment; filename=%s' % filename response['Content-Length'] = os.path.getsize(fullpath) return response def download_annotated(request, individual_id): individual = get_object_or_404(Individual, pk=individual_id) filepath = os.path.dirname(str(individual.vcf_file.name)) filename = os.path.basename(str(individual.vcf_file.name)) # path = settings.MEDIA_ROOT # if filename.endswith('vcf.zip'): # basename = filename.split('.vcf.zip')[0] # else: basename = filename.split('.vcf')[0] fullpath = '%s/annotation.final.vcf.zip' % (filepath) vcffile = open(fullpath, 'rb') response = HttpResponse(vcffile, content_type='application/x-zip-compressed') # # response['Content-Encoding'] = 'gzip' response['Content-Disposition'] = 'attachment; filename=%s.annotated.mendelmd.vcf.zip' % basename response['Content-Length'] = os.path.getsize(fullpath) return response @login_required def create_group(request): if request.method == 'POST': form = GroupForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('individuals_list') else: form = GroupForm() return render(request, 'groups/create_group.html', {'form': form}) @login_required def view_group(request, group_id): group = get_object_or_404(Group, pk=group_id) return render(request, 'groups/view_group.html', {'group': group}) class GroupDeleteView(DeleteView): model = Group def delete(self, request, *args, **kwargs): """ This does not actually delete the file, only the database record. But that is easy to implement. """ self.object = self.get_object() #username = self.object.user.username self.object.delete() messages.add_message(request, messages.INFO, "Group deleted with success!") return redirect('individuals_list') def comparison(request): query = {} summary = {} variants = [] query_string = request.META['QUERY_STRING'] if request.method == 'GET': form = ComparisonForm(request.user, request.GET, request.FILES) if form.is_valid(): individual_one_id = request.GET.get('individual_one', '') individual_two_id = request.GET.get('individual_two', '')<|fim▁hole|> query['read_depth__gte'] = float(read_depth) if individual_one_id != '' and individual_two_id != '': variants_ind_one = Variant.objects.filter(individual__id=individual_one_id, **query).values('chr', 'pos', 'genotype') variants_ind_two = Variant.objects.filter(individual__id=individual_two_id, **query).values('chr', 'pos', 'genotype') print('Got Variants from Both!') genotypes_in_common = 0 genotypes_not_in_common = 0 ind_one = {} ind_two = {} summary['variants_ind_one'] = variants_ind_one.count() for variant in variants_ind_one: id = '%s-%s' % (variant['chr'], variant['pos']) if id in ind_one: ind_one[id].append(variant['genotype']) else: ind_one[id] = [] ind_one[id].append(variant['genotype']) summary['variants_ind_two'] = variants_ind_two.count() for variant in variants_ind_two: id = '%s-%s' % (variant['chr'], variant['pos']) if id in ind_two: ind_two[id].append(variant['genotype']) else: ind_two[id] = [] ind_two[id].append(variant['genotype']) print('Finished creating indexes') for pos in ind_one: if pos in ind_two: for genotype in ind_one[pos]: if genotype in ind_two[pos]: genotypes_in_common += 1 # variant ={} # variant['chr'] = item.split('-')[0] # variant['pos'] = item.split('-')[1] # variant['genotype'] = ind_two[item] # variants.append(variant) else: genotypes_not_in_common += 1 # print('genotypes in common: %s' % genotypes_in_common) summary['genotypes_in_common'] = genotypes_in_common summary['genotypes_not_in_common'] = genotypes_not_in_common summary['total_variants'] = genotypes_in_common + genotypes_not_in_common summary['percent_ind_one'] = round((float(genotypes_in_common)/summary['variants_ind_one'])*100, 2) summary['percent_ind_two'] = round((float(genotypes_in_common)/summary['variants_ind_two'])*100, 2) print(summary) else: form = ComparisonForm(request.user) return render(request, 'individuals/comparison.html', {'form':form, 'summary':summary, 'query_string':query_string})<|fim▁end|>
read_depth = request.GET.get('read_depth', '') if read_depth != '':
<|file_name|>random_sources.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # -*- mode: python -*- """Sources of random data Copyright (C) 2013 Dan Meliza <[email protected]> Created Wed May 29 14:50:02 2013 """ from mspikes import util from mspikes.types import DataBlock, Source, Node, tag_set from numpy.random import RandomState class rand_samples(Source): """Generates random values from N(0,1)""" seed = 1 nsamples = 4096 def __init__(self, **options): util.set_option_attributes(self, options, seed=1, nsamples=4096) self.chunk_size = 1024 self.channel = "random" self.sampling_rate = 1 self._randg = RandomState(self.seed) @classmethod def options(cls, addopt_f, **defaults): addopt_f("--seed", help="seed for random number generator", type=int, metavar='INT', default=defaults.get('seed',cls.seed)) addopt_f("--nsamples", help="number of samples to generate",<|fim▁hole|> type=int, metavar='INT', default=defaults.get('nsamples',cls.nsamples)) def data(self, t=0): """Generates a data chunk""" return DataBlock(id=self.channel, offset=t, ds=self.sampling_rate, data=self._randg.randn(self.chunk_size), tags=tag_set("samples")) def __iter__(self): t = 0 while t < self.nsamples: data = self.data(t) Node.send(self, data) yield data t += self.chunk_size ## TODO random_events # Variables: # End:<|fim▁end|>
<|file_name|>version.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017, The MITRE Corporation. All rights reserved. # See LICENSE.txt for complete terms.<|fim▁hole|><|fim▁end|>
__version__ = "1.2.0.11"
<|file_name|>rektifye.js<|end_file_name|><|fim▁begin|>showWord(["v. ","korije, redrese."<|fim▁hole|><|fim▁end|>
])
<|file_name|>fabfile.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement from fabric.api import local, abort, run, cd, env from fabric.context_managers import prefix env.directory = '/home/pestileaks/pestileaks' env.activate = 'source /home/pestileaks/env/bin/activate' env.user = 'pestileaks' env.hosts = ['pestileaks.nl'] env.restart = 'killall -HUP gunicorn' #Show current status versus current github master state def status(): with cd(env.directory): run('git status') def deploy(): with cd(env.directory): run("git pull") #run("rm -rf /home/pestileaks/run/static") run("mkdir -p /home/pestileaks/run/static") with prefix(env.activate): run("if [ doc/requirements.txt -nt doc/requirements.pyc ]; then pip install -r doc/requirements.txt; touch doc/requirements.pyc; fi") run('./manage.py syncdb') run('./manage.py migrate --noinput') run('./manage.py collectstatic --noinput') <|fim▁hole|> run(env.restart)<|fim▁end|>
<|file_name|>IVisitedSort.java<|end_file_name|><|fim▁begin|>package visitors; /**<|fim▁hole|> Object accept(ISortVisitor visitor); }<|fim▁end|>
* Created by stratosphr on 20/11/15. */ public interface IVisitedSort extends IVisited {
<|file_name|>disables_wer.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Kevin Ross # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. from lib.cuckoo.common.abstracts import Signature class DisablesWER(Signature): name = "disables_wer" description = "Attempts to disable Windows Error Reporting" severity = 3 categories = ["stealth"] authors = ["Kevin Ross"] minimum = "1.2" def run(self): if self.check_write_key(pattern=".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\Windows\\ Error\\ Reporting\\\\Disabled$", regex=True):<|fim▁hole|><|fim▁end|>
return True return False
<|file_name|>type_enforcement.rs<|end_file_name|><|fim▁begin|>use crate::ast::AstNode; use secsp_parser::syntax::{SyntaxKind, SyntaxNode}; use secsp_syntax_derive::AstType; #[derive(AstType)] #[ast(kind = "NODE_TE_RULE")] pub struct TeRule(SyntaxNode); #[derive(Debug, PartialEq, Eq)] pub enum TeRuleKind { Allow, AuditAllow, DontAudit, NeverAllow, }<|fim▁hole|> impl TeRule { fn rule_kind(&self) -> TeRuleKind { self.syntax() .children_with_tokens() .find_map(|child| { if let Some(tok) = child.into_token() { let rule_kind = match tok.kind() { SyntaxKind::KW_ALLOW => TeRuleKind::Allow, SyntaxKind::KW_AUDIT_ALLOW => TeRuleKind::AuditAllow, SyntaxKind::KW_DONT_AUDIT => TeRuleKind::DontAudit, SyntaxKind::KW_NEVER_ALLOW => TeRuleKind::NeverAllow, _ => return None, }; Some(rule_kind) } else { None } }) .expect("TeRule nodes must have a TeRuleType token") } } #[cfg(test)] mod tests { use super::*; use crate::ast::testing::parse_and_find; fn test_rule_kind(kind: TeRuleKind, code: &str) { let rule: TeRule = parse_and_find(code); assert_eq!(kind, rule.rule_kind()); } #[test] fn test_allow() { test_rule_kind(TeRuleKind::Allow, "allow src dest : perms;"); } #[test] fn test_dont_audit() { test_rule_kind(TeRuleKind::DontAudit, "dont_audit src dest : perms;"); } #[test] fn test_audit_allow() { test_rule_kind(TeRuleKind::AuditAllow, "audit_allow src dest : perms;"); } #[test] fn test_never_allow() { test_rule_kind(TeRuleKind::NeverAllow, "never_allow src dest : perms;"); } }<|fim▁end|>
<|file_name|>profile.py<|end_file_name|><|fim▁begin|>from typing import Any, List, Tuple, Dict #cast from sphinx.application import Sphinx # from sphinx.ext.autodoc import Documenter from sphinx.ext.autodoc import ModuleLevelDocumenter from sphinx.pycode import ModuleAnalyzer, PycodeError #from sphinx.domains.python import PythonDomain from sphinx.locale import __ from sphinx.domains.python import PyObject from sphinx import addnodes from sphinx.util.inspect import signature as Signature from sphinx.util.inspect import stringify_signature import logging logger = logging.getLogger(__name__) # we can get source code first line numbers with this module for object import inspect from fontbakery.callable import ( FontbakeryCallable , FontBakeryCondition , FontBakeryCheck , Disabled , FontBakeryExpectedValue ) # mute the style checks for unused names # will be removed eventually if False: #pylint: disable=using-constant-test FontbakeryCallable FontBakeryCondition FontBakeryCheck Disabled FontBakeryExpectedValue __version__ = '0.0.1' # ModuleLevelDocumenter(Documenter): Specialized Documenter subclass for objects on module level (functions, # classes, data/constants). Implements: resolve_name # https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/autodoc/__init__.py#L850 # Documenter class FontBakeryCallableDocumenter(ModuleLevelDocumenter): """ Specialized Documenter subclass for instances of FontBakeryCheck. """ objtype = 'fontbakerycallable' can_doc_cls = FontbakeryCallable member_order = 30 @classmethod def can_document_member(cls, member, membername, isattr, parent): # type: (Any, str, bool, Any) -> bool return isinstance(member, cls.can_doc_cls) def format_args(self): # pylint: disable=arguments-differ # I am really not sure what went wrong here... # type: () -> str # We use the original signature from the wrapped _function has_retval = isinstance(self.object, FontBakeryCondition) if not hasattr(self.object, '_func'): # FIXME! I don't know what's this. return None sig = Signature(self.object._func, bound_method=False, has_retval=has_retval) args = stringify_signature(sig) # escape backslashes for reST args = args.replace('\\', '\\\\') return args def format_name(self): # I'm using this to inject some new info into the check # search for the separator ":::" in this document to see where # the info is received. This is not a clean solution! # # in https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/autodoc/__init__.py#L374 # it says: # > This normally should be something that can be parsed by the generated # > directive, but doesn't need to be (Sphinx will display it unparsed # > then). # See below in `handle_signature` # where that ipdb debugger is started, usually that eception would be # dropped and we drop out of signature building. (RAISED here in `_handle_signature` # The ValueError when the regex doesn't match...) # seems like the slash (/) Is killing most of the header! # Otherwise the ids display fine, the dots are fine. # Also, in any case of name change, the [source] view is killed (removed!) # the document and also genindex.html anchor works so far (with 7 instead of /) # res = super().format_name() if self.objtype == 'fontbakerycheck': # A bit hackish, splitting somwhere else by ::: to retrieve the checkid # we can get the source file first line number of self.object: lineno = inspect.getsourcelines(self.object)[1] res = self.object.id + ':::' + f'{lineno}' + ':::' + res#.replace('/', '7') # else: # res = super().format_name() # print('formatted name:', res) # > formatted name: com.google.fonts/check/xavgcharwidth:::59:::com_google_fonts_check_xavgcharwidth # > formatted name: bold_wght_coord return res # handle_signature: com_google_fonts_check_post_table_version(ttFont, is_ttf) <desc_signature first="False"/> # sig signature: com_google_fonts_check_post_table_version(ttFont, is_ttf)<|fim▁hole|> # type: (Any, str, bool, bool) -> None """Generate reST for the object given by *self.name*, and possibly for its members. If *more_content* is given, include that content. If *real_modname* is given, use that module name to find attribute docs. If *check_module* is True, only generate if the object is defined in the module name it is imported from. If *all_members* is True, document all members. """ # print('generate', more_content, real_modname, check_module, all_members) # print(self.name) # print('---------------------') # > generate None fontbakery.profiles.post True True # > fontbakery.profiles.post::com_google_fonts_check_post_table_version # > --------------------- # # > generate None fontbakery.profiles.shared_conditions True True # > fontbakery.profiles.shared_conditions::glyph_metrics_stats # > --------------------- if not self.parse_name(): # need a module to import logger.warning( __('don\'t know which module to import for autodocumenting ' '%r (try placing a "module" or "currentmodule" directive ' 'in the document, or giving an explicit module name)') % self.name, type='autodoc') return # now, import the module and get object to document if not self.import_object(): return # doesn't do anything! # if self.objtype == 'fontbakerycheck': # self.name = self.object.id # If there is no real module defined, figure out which to use. # The real module is used in the module analyzer to look up the module # where the attribute documentation would actually be found in. # This is used for situations where you have a module that collects the # functions and classes of internal submodules. self.real_modname = real_modname or self.get_real_modname() # type: str # try to also get a source code analyzer for attribute docs try: self.analyzer = ModuleAnalyzer.for_module(self.real_modname) # parse right now, to get PycodeErrors on parsing (results will # be cached anyway) self.analyzer.find_attr_docs() except PycodeError as err: logger.debug('[autodoc] module analyzer failed: %s', err) # no source file -- e.g. for builtin and C modules self.analyzer = None # at least add the module.__file__ as a dependency if hasattr(self.module, '__file__') and self.module.__file__: self.directive.filename_set.add(self.module.__file__) else: self.directive.filename_set.add(self.analyzer.srcname) # check __module__ of object (for members not given explicitly) if check_module: if not self.check_module(): return sourcename = self.get_sourcename() # make sure that the result starts with an empty line. This is # necessary for some situations where another directive preprocesses # reST and no starting newline is present self.add_line('', sourcename) # format the object's signature, if any sig = self.format_signature() # generate the directive header and options, if applicable self.add_directive_header(sig) self.add_line('', sourcename) # e.g. the module directive doesn't have content self.indent += self.content_indent # add all content (from docstrings, attribute docs etc.) self.add_content(more_content) # document members, if possible self.document_members(all_members) class FontBakeryCheckDocumenter(FontBakeryCallableDocumenter): objtype = 'fontbakerycheck' can_doc_cls = FontBakeryCheck class FontBakeryConditionDocumenter(FontBakeryCallableDocumenter): objtype = 'fontbakerycondition' can_doc_cls = FontBakeryCondition from sphinx.domains.python import _pseudo_parse_arglist import re # REs for Python signatures py_sig_re = re.compile( r'''^ ([\w.]*\.)? # class name(s) (\w+) \s* # thing name (?: \(\s*(.*)\s*\) # optional: arguments (?:\s* -> \s* (.*))? # return annotation )? $ # and nothing more ''', re.VERBOSE) # PyObject: https://github.com/sphinx-doc/sphinx/blob/master/sphinx/domains/python.py#L189 # PyObject is a subclass of sphinx.directives.ObjectDescription # ObjectDescription is a sphinx.util.docutils.SphinxDirective # SphinxDirective is a docutils.parsers.rst.Directive class PyFontBakeryObject(PyObject): """ Description of a class-like object (classes, interfaces, exceptions). """ allow_nesting = True @property def pretty_objtype(self): if self.objtype.startswith('fontbakery'): suffix = self.objtype[len('fontbakery'):] return 'FontBakery' + suffix[0].upper() + suffix[1:] return self.objtype def get_signature_prefix(self, sig): # type: (str) -> str # import ipdb # ipdb.set_trace() # print('sig signature:', sig) # > sig signature: com_google_fonts_check_all_glyphs_have_codepoints(ttFont) return self.pretty_objtype + ' ' # this is bullshit, returns two values but manipulates # signode massively, which is undocumented. # signode is an instance of <class 'sphinx.addnodes.desc_signature'> # from https://github.com/sphinx-doc/sphinx/blob/master/sphinx/domains/python.py#L237 def _handle_signature(self, cid, lineno, sig, signode): # type: (str, addnodes.desc_signature) -> Tuple[str, str] """Transform a Python signature into RST nodes. Return (fully qualified name of the thing, classname if any). If inside a class, the current class name is handled intelligently: * it is stripped from the displayed name if present * it is added to the full name (return value) if not present This is the xml string result of signode, whitespace is not equivalent for readability. <desc_signature class="" first="False" fullname="com.google.fonts/check/all_glyphs_have_codepoints" module="fontbakery.profiles.cmap" > <desc_annotation xml:space="preserve">FontBakeryCheck </desc_annotation> <desc_addname xml:space="preserve">fontbakery.profiles.cmap.</desc_addname> <desc_name xml:space="preserve">com_google_fonts_check_all_glyphs_have_codepoints</desc_name> <desc_parameterlist xml:space="preserve"> <desc_parameter xml:space="preserve">ttFont</desc_parameter> </desc_parameterlist> </desc_signature> """ m = py_sig_re.match(sig) if m is None: # this is the immediate fail!!! raise ValueError prefix, name, arglist, retann = m.groups() # print('prefix, name, arglist, retann =', prefix, name, arglist, retann) # > prefix, name, arglist, retann = None com_google_fonts_check_all_glyphs_have_codepoints ttFont None # determine module and class name (if applicable), as well as full name modname = self.options.get('module', self.env.ref_context.get('py:module')) classname = self.env.ref_context.get('py:class') if classname: add_module = False if prefix and (prefix == classname or prefix.startswith(classname + ".")): fullname = prefix + name # class name is given again in the signature prefix = prefix[len(classname):].lstrip('.') elif prefix: # class name is given in the signature, but different # (shouldn't happen) fullname = classname + '.' + prefix + name else: # class name is not given in the signature fullname = classname + '.' + name else: add_module = True if prefix: classname = prefix.rstrip('.') fullname = prefix + name else: classname = '' fullname = name signode['module'] = modname signode['class'] = classname signode['fullname'] = fullname signode.attributes['lineno'] = lineno #sig_prefix = self.get_signature_prefix(sig) #if sig_prefix: # signode += addnodes.desc_annotation(sig_prefix, sig_prefix) if prefix: signode += addnodes.desc_addname(prefix, prefix) elif add_module and self.env.config.add_module_names: if modname and modname != 'exceptions': # exceptions are a special case, since they are documented in the # 'exceptions' module. #nodetext = modname + ' ID: ' #signode += addnodes.desc_addname(nodetext, nodetext) pass signode += addnodes.desc_name(name, cid) if arglist: _pseudo_parse_arglist(signode, arglist) else: if self.needs_arglist(): # for callables, add an empty parameter list signode += addnodes.desc_parameterlist() if retann: signode += addnodes.desc_returns(retann, retann) anno = self.options.get('annotation') if anno: signode += addnodes.desc_annotation(' ' + anno, ' ' + anno) return cid, prefix def handle_signature(self, sig, signode): # print('>>>>>>>>>>>>>>>>>handle_signature:', sig, signode) # > >>>>>>>>>>>>>>>>>handle_signature: com.google.fonts/check/all_glyphs_have_codepoints:::36:::com_google_fonts_check_all_glyphs_have_codepoints(ttFont) <desc_signature first="False"/> cid = None if ':::' in sig: cid, lineno, sig = sig.split(':::') # print('GOT id:', cid, lineno, 'for:', sig) # > GOT id: com.google.fonts/check/all_glyphs_have_codepoints 36 for: com_google_fonts_check_all_glyphs_have_codepoints(ttFont) res = '(NONE!)' try: res = self._handle_signature(cid, lineno, sig, signode) if cid is not None\ else super().handle_signature(sig, signode) except Exception as e: print('!!!', e) raise e return res # This ends in: path-to-docs/html/genindex.html def get_index_text(self, modname, name): # type: (str, Tuple[str, str]) -> str return f'{name[0]} ({self.pretty_objtype} in {modname})' # fontbakerycheck # modname: fontbakery.profiles.cmap # name_cls:('com_google_fonts_check_all_glyphs_have_codepoints', None) # return f' {self.objtype} modname: {modname} name_cls:{name_cls}' def add_target_and_index(self, name_cls, sig, signode): # type: (Tuple[str, str], str, addnodes.desc_signature) -> None modname = self.options.get('module', self.env.ref_context.get('py:module')) # fullname = (modname and modname + '.' or '') + name_cls[0] fullname = name_cls[0] # note target if fullname not in self.state.document.ids: signode['names'].append(fullname) signode['ids'].append(fullname) signode['first'] = (not self.names) self.state.document.note_explicit_target(signode) # note, there will be a change to this in a future release # https://github.com/sphinx-doc/sphinx/commit/259be8716ad4b2332aa4d7693d73400eb06fa7d7 ## in the past (now) objects = self.env.domaindata['py']['objects'] if fullname in objects: self.state_machine.reporter.warning( 'duplicate object description of %s, ' % fullname + 'other instance in ' + self.env.doc2path(objects[fullname][0]) + ', use :noindex: for one of them', line=self.lineno) objects[fullname] = (self.env.docname, self.objtype) ## in the future # domain = cast(PythonDomain, self.env.get_domain('py')) # domain.note_object(fullname, self.objtype) indextext = self.get_index_text(modname, name_cls) if indextext: self.indexnode['entries'].append(('single', indextext, fullname, '', None)) # Copied a lot from napoleon extension: # https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/napoleon/__init__.py # To get started, hooking into autodoc seems the way to go, hence that was # a good fit. def setup(app): # type: (Sphinx) -> Dict[str, Any] """Sphinx extension setup function. When the extension is loaded, Sphinx imports this module and executes the ``setup()`` function, which in turn notifies Sphinx of everything the extension offers. Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process See Also -------- `The Sphinx documentation on Extensions <http://sphinx-doc.org/extensions.html>`_ `The Extension Tutorial <http://sphinx-doc.org/extdev/tutorial.html>`_ `The Extension API <http://sphinx-doc.org/extdev/appapi.html>`_ """ if not isinstance(app, Sphinx): # probably called by tests return {'version': __version__, 'parallel_read_safe': True} # _patch_python_domain() #=> this: app.add_autodocumenter(FontBakeryCallableDocumenter) app.add_autodocumenter(FontBakeryCheckDocumenter) app.add_autodocumenter(FontBakeryConditionDocumenter) # https://github.com/sphinx-doc/sphinx/blob/master/sphinx/domains/python.py app.add_directive_to_domain('py', 'fontbakerycallable', PyFontBakeryObject, override=False) app.add_directive_to_domain('py', 'fontbakerycheck', PyFontBakeryObject, override=False) app.add_directive_to_domain('py', 'fontbakerycondition', PyFontBakeryObject, override=False) # => see e.g.: https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/autodoc/__init__.py#L984 app.setup_extension('sphinx.ext.autodoc') app.connect('autodoc-process-docstring', _process_docstring) app.connect('autodoc-skip-member', _skip_member) #for name, (default, rebuild) in Config._config_values.items(): # app.add_config_value(name, default, rebuild) return {'version': __version__, 'parallel_read_safe': True} def _skip_member(app, what, name, obj, skip, options): # type: (Sphinx, str, str, Any, bool, Any) -> bool """Determine if private and special class members are included in docs. The following settings in conf.py determine if private and special class members or init methods are included in the generated documentation: * ``napoleon_include_init_with_doc`` -- include init methods if they have docstrings * ``napoleon_include_private_with_doc`` -- include private members if they have docstrings * ``napoleon_include_special_with_doc`` -- include special members if they have docstrings Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process what : str A string specifying the type of the object to which the member belongs. Valid values: "module", "class", "exception", "function", "method", "attribute". name : str The name of the member. obj : module, class, exception, function, method, or attribute. For example, if the member is the __init__ method of class A, then `obj` will be `A.__init__`. skip : bool A boolean indicating if autodoc will skip this member if `_skip_member` does not override the decision options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. Returns ------- bool True if the member should be skipped during creation of the docs, False if it should be included in the docs. """ if name in ['check_skip_filter', 'conditions', 'configs', 'description', 'documentation', 'force', 'id', 'is_librebarcode', 'name', 'proposal', 'rationale', 'severity']: return True else: return None def _process_docstring(app, what, name, obj, options, lines): # type: (Sphinx, str, str, Any, Any, List[str]) -> None """Process the docstring for a given python object. Called when autodoc has read and processed a docstring. `lines` is a list of docstring lines that `_process_docstring` modifies in place to change what Sphinx outputs. The following settings in conf.py control what styles of docstrings will be parsed: * ``napoleon_google_docstring`` -- parse Google style docstrings * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process. what : str A string specifying the type of the object to which the docstring belongs. Valid values: "module", "class", "exception", "function", "method", "attribute". name : str The fully qualified name of the object. obj : module, class, exception, function, method, or attribute The object to which the docstring belongs. options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. lines : list of str The lines of the docstring, see above. .. note:: `lines` is modified *in place* """ if hasattr(obj, 'rationale') and obj.rationale: lines.append("**Rationale:**") for line in obj.rationale.split('\n'): lines.append(line) if hasattr(obj, 'proposal') and obj.proposal: proposal = obj.proposal if not isinstance(obj.proposal, list): proposal = [obj.proposal] proposals = [p for p in proposal if "legacy:" not in p] legacy_name = [p.split('legacy:')[1] for p in proposal if "legacy:" in p] if legacy_name: lines.append(f"**Legacy check** originally simply called '{legacy_name[0]}'." f" We used to lack richer metadata back in 2015. We're open to" f" further improvements to this description.") else: if proposals: lines.append(f"**Originally proposed at** {proposals.pop(0)}") if proposals: proposals = ' / '.join(proposals) lines.append(f"**Some additional changes** were proposed at {proposals}")<|fim▁end|>
# result: ('com_google_fonts_check_post_table_version', None) signode: <desc_signature class="" first="False" fullname="com_google_fonts_check_post_table_version" module="fontbakery.profiles.post"><desc_annotation xml:space="preserve">FontBakeryCheck </desc_annotation><desc_addname xml:space="preserve">fontbakery.profiles.post.</desc_addname><desc_name xml:space="preserve">com_google_fonts_check_post_table_version</desc_name><desc_parameterlist xml:space="preserve"><desc_parameter xml:space="preserve">ttFont</desc_parameter><desc_parameter xml:space="preserve">is_ttf</desc_parameter></desc_parameterlist></desc_signature> def generate(self, more_content=None, real_modname=None, check_module=False, all_members=False):
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::io; use std::result; use mqtt3; use tokio_timer::TimerError; pub type Result<T> = result::Result<T, Error>; quick_error! { #[derive(Debug)] pub enum Error { Io(err: io::Error) { from() description("io error") display("I/O error: {}", err) cause(err) } Mqtt3(err: mqtt3::Error) { from() display("mqtt3 error: {}", err) description("Mqtt3 error {}")<|fim▁hole|> description("Timer error") cause(err) display("timer error: {}", err) } NoClient { description("No client with this ID") } ClientIdExists { description("Client with that ID already exists") } InvalidMqttPacket { description("Invalid Mqtt Packet") } InvalidClientId { description("Invalid Client ID") } DisconnectRequest { description("Received Disconnect Request") } NotInQueue { description("Couldn't find requested message in the queue") } DisconnectPacket { description("Received disconnect packet from client") } Other } }<|fim▁end|>
cause(err) } Timer(err: TimerError) { from()
<|file_name|>k_medoids.py<|end_file_name|><|fim▁begin|>""" Special purpose k - medoids algorithm """ import numpy as np def fit(sim_mat, D_len, cidx): """ Algorithm maximizes energy between clusters, which is distinction in this algorithm. Distance matrix contains mostly 0, which are overlooked due to search of maximal distances. Algorithm does not try to retain k clusters. D: numpy array - Symmetric distance matrix k: int - number of clusters """ min_energy = np.inf for j in range(3): # select indices in each sample that maximizes its dimension inds = [np.argmin([sim_mat[idy].get(idx, 0) for idx in cidx]) for idy in range(D_len) if idy in sim_mat] cidx = [] energy = 0 # current enengy for i in np.unique(inds): indsi = np.where(inds == i)[0] # find indices for every cluster minind, min_value = 0, 0 for index, idy in enumerate(indsi): if idy in sim_mat: # value = sum([sim_mat[idy].get(idx,0) for idx in indsi]) value = 0<|fim▁hole|> if value < min_value: minind, min_value = index, value energy += min_value cidx.append(indsi[minind]) # new centers if energy < min_energy: min_energy, inds_min, cidx_min = energy, inds, cidx return inds_min, cidx_min # cluster for every instance, medoids indices<|fim▁end|>
for idx in indsi: value += sim_mat[idy].get(idx, 0)
<|file_name|>mpsc_queue.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT // SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // The views and conclusions contained in the software and documentation are // those of the authors and should not be interpreted as representing official // policies, either expressed or implied, of Dmitry Vyukov. // //! A mostly lock-free multi-producer, single consumer queue. //! //! This module contains an implementation of a concurrent MPSC queue. This //! queue can be used to share data between threads, and is also used as the //! building block of channels in rust. //! //! Note that the current implementation of this queue has a caveat of the `pop` //! method, and see the method for more information about it. Due to this //! caveat, this queue may not be appropriate for all use-cases. // http://www.1024cores.net/home/lock-free-algorithms // /queues/non-intrusive-mpsc-node-based-queue pub use self::PopResult::*; use std::boxed::Box; use core::ptr; use core::cell::UnsafeCell; use std::sync::atomic::{AtomicPtr, Ordering}; /// A result of the `pop` function. pub enum PopResult<T> { /// Some data has been popped Data(T), /// The queue is empty Empty, /// The queue is in an inconsistent state. Popping data should succeed, but /// some pushers have yet to make enough progress in order allow a pop to /// succeed. It is recommended that a pop() occur "in the near future" in<|fim▁hole|> struct Node<T> { next: AtomicPtr<Node<T>>, value: Option<T>, } /// The multi-producer single-consumer structure. This is not cloneable, but it /// may be safely shared so long as it is guaranteed that there is only one /// popper at a time (many pushers are allowed). pub struct Queue<T> { head: AtomicPtr<Node<T>>, tail: UnsafeCell<*mut Node<T>>, } unsafe impl<T: Send> Send for Queue<T> {} unsafe impl<T: Send> Sync for Queue<T> {} impl<T> Node<T> { unsafe fn new(v: Option<T>) -> *mut Node<T> { Box::into_raw(box Node { next: AtomicPtr::new(ptr::null_mut()), value: v, }) } } impl<T> Queue<T> { /// Creates a new queue that is safe to share among multiple producers and /// one consumer. pub fn new() -> Queue<T> { let stub = unsafe { Node::new(None) }; Queue { head: AtomicPtr::new(stub), tail: UnsafeCell::new(stub), } } /// Pushes a new value onto this queue. pub fn push(&self, t: T) { unsafe { let n = Node::new(Some(t)); let prev = self.head.swap(n, Ordering::AcqRel); (*prev).next.store(n, Ordering::Release); } } /// Pops some data from this queue. /// /// Note that the current implementation means that this function cannot /// return `Option<T>`. It is possible for this queue to be in an /// inconsistent state where many pushes have succeeded and completely /// finished, but pops cannot return `Some(t)`. This inconsistent state /// happens when a pusher is pre-empted at an inopportune moment. /// /// This inconsistent state means that this queue does indeed have data, but /// it does not currently have access to it at this time. pub fn pop(&self) -> PopResult<T> { unsafe { let tail = *self.tail.get(); let next = (*tail).next.load(Ordering::Acquire); if !next.is_null() { *self.tail.get() = next; assert!((*tail).value.is_none()); assert!((*next).value.is_some()); let ret = (*next).value.take().unwrap(); let _: Box<Node<T>> = Box::from_raw(tail); return Data(ret); } if self.head.load(Ordering::Acquire) == tail { Empty } else { Inconsistent } } } } impl<T> Drop for Queue<T> { fn drop(&mut self) { unsafe { let mut cur = *self.tail.get(); while !cur.is_null() { let next = (*cur).next.load(Ordering::Relaxed); let _: Box<Node<T>> = Box::from_raw(cur); cur = next; } } } } #[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sync::mpsc::channel; use super::{Queue, Data, Empty, Inconsistent}; use sync::Arc; use thread; #[test] fn test_full() { let q: Queue<Box<_>> = Queue::new(); q.push(box 1); q.push(box 2); } #[test] fn test() { let nthreads = 8; let nmsgs = 1000; let q = Queue::new(); match q.pop() { Empty => {} Inconsistent | Data(..) => panic!(), } let (tx, rx) = channel(); let q = Arc::new(q); for _ in 0..nthreads { let tx = tx.clone(); let q = q.clone(); thread::spawn(move || { for i in 0..nmsgs { q.push(i); } tx.send(()).unwrap(); }); } let mut i = 0; while i < nthreads * nmsgs { match q.pop() { Empty | Inconsistent => {} Data(_) => i += 1, } } drop(tx); for _ in 0..nthreads { rx.recv().unwrap(); } } }<|fim▁end|>
/// order to see if the sender has made progress or not Inconsistent, }
<|file_name|>mtp_device_delegate_impl_linux.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/media_galleries/linux/mtp_device_delegate_impl_linux.h" #include <fcntl.h> #include <algorithm> #include <vector> #include "base/bind.h" #include "base/files/file_util.h" #include "base/numerics/safe_conversions.h" #include "base/posix/eintr_wrapper.h" #include "base/strings/string_number_conversions.h" #include "base/strings/string_split.h" #include "base/strings/string_util.h" #include "chrome/browser/media_galleries/linux/mtp_device_task_helper.h" #include "chrome/browser/media_galleries/linux/mtp_device_task_helper_map_service.h" #include "chrome/browser/media_galleries/linux/snapshot_file_details.h" #include "net/base/io_buffer.h" #include "third_party/cros_system_api/dbus/service_constants.h" namespace { // File path separator constant. const char kRootPath[] = "/"; // Returns the device relative file path given |file_path|. // E.g.: If the |file_path| is "/usb:2,2:12345/DCIM" and |registered_dev_path| // is "/usb:2,2:12345", this function returns the device relative path which is // "DCIM". // In the special case when |registered_dev_path| and |file_path| are the same, // return |kRootPath|. std::string GetDeviceRelativePath(const base::FilePath& registered_dev_path, const base::FilePath& file_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!registered_dev_path.empty()); DCHECK(!file_path.empty()); std::string result; if (registered_dev_path == file_path) { result = kRootPath; } else { base::FilePath relative_path; if (registered_dev_path.AppendRelativePath(file_path, &relative_path)) { DCHECK(!relative_path.empty()); result = relative_path.value(); } } return result; } // Returns the MTPDeviceTaskHelper object associated with the MTP device // storage. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // Returns NULL if the |storage_name| is no longer valid (e.g. because the // corresponding storage device is detached, etc). MTPDeviceTaskHelper* GetDeviceTaskHelperForStorage( const std::string& storage_name, const bool read_only) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); return MTPDeviceTaskHelperMapService::GetInstance()->GetDeviceTaskHelper( storage_name, read_only); } // Opens the storage device for communication. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |reply_callback| is called when the OpenStorage request completes. // |reply_callback| runs on the IO thread. void OpenStorageOnUIThread( const std::string& storage_name, const bool read_only, const MTPDeviceTaskHelper::OpenStorageCallback& reply_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) { task_helper = MTPDeviceTaskHelperMapService::GetInstance()->CreateDeviceTaskHelper( storage_name, read_only); } task_helper->OpenStorage(storage_name, read_only, reply_callback); } // Creates |directory_name| on |parent_id|. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |parent_id| is an object id of the parent directory. // |directory_name| is name of the new directory. // |success_callback| is called when the directory is created successfully. // |error_callback| is called when it fails to create a directory. // |success_callback| and |error_callback| runs on the IO thread. void CreateDirectoryOnUIThread( const std::string& storage_name, const bool read_only, const uint32 parent_id, const std::string& directory_name, const MTPDeviceTaskHelper::CreateDirectorySuccessCallback& success_callback, const MTPDeviceTaskHelper::ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->CreateDirectory(parent_id, directory_name, success_callback, error_callback); } // Enumerates the |dir_id| directory file entries. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |directory_id| is an id of a directory to read. // |max_size| is a maximum size to read. Set 0 not to specify the maximum size. // |success_callback| is called when the ReadDirectory request succeeds. // |error_callback| is called when the ReadDirectory request fails. // |success_callback| and |error_callback| runs on the IO thread. void ReadDirectoryOnUIThread( const std::string& storage_name, const bool read_only, const uint32 directory_id, const size_t max_size, const MTPDeviceTaskHelper::ReadDirectorySuccessCallback& success_callback, const MTPDeviceTaskHelper::ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->ReadDirectory(directory_id, max_size, success_callback, error_callback); } // Gets the |file_path| details. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |success_callback| is called when the GetFileInfo request succeeds. // |error_callback| is called when the GetFileInfo request fails. // |success_callback| and |error_callback| runs on the IO thread. void GetFileInfoOnUIThread( const std::string& storage_name, const bool read_only, uint32 file_id, const MTPDeviceTaskHelper::GetFileInfoSuccessCallback& success_callback, const MTPDeviceTaskHelper::ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->GetFileInfo(file_id, success_callback, error_callback); } // Copies the contents of |device_file_path| to |snapshot_file_path|. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |device_file_path| specifies the media device file path. // |snapshot_file_path| specifies the platform path of the snapshot file. // |file_size| specifies the number of bytes that will be written to the // snapshot file. // |success_callback| is called when the copy operation succeeds. // |error_callback| is called when the copy operation fails. // |success_callback| and |error_callback| runs on the IO thread. void WriteDataIntoSnapshotFileOnUIThread( const std::string& storage_name, const bool read_only, const SnapshotRequestInfo& request_info, const base::File::Info& snapshot_file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->WriteDataIntoSnapshotFile(request_info, snapshot_file_info); } // Copies the contents of |device_file_path| to |snapshot_file_path|. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |request| is a struct containing details about the byte read request. void ReadBytesOnUIThread( const std::string& storage_name, const bool read_only, const MTPDeviceAsyncDelegate::ReadBytesRequest& request) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->ReadBytes(request); } // Renames |object_id| to |new_name|. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |object_id| is an id of object to be renamed. // |new_name| is new name of the object. // |success_callback| is called when the object is renamed successfully. // |error_callback| is called when it fails to rename the object. // |success_callback| and |error_callback| runs on the IO thread. void RenameObjectOnUIThread( const std::string& storage_name, const bool read_only, const uint32 object_id, const std::string& new_name, const MTPDeviceTaskHelper::RenameObjectSuccessCallback& success_callback, const MTPDeviceTaskHelper::ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->RenameObject(object_id, new_name, success_callback, error_callback); } // Copies the file |source_file_descriptor| to |file_name| in |parent_id|. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |source_file_descriptor| file descriptor of source file. // |parent_id| object id of a target directory. // |file_name| file name of a target file. // |success_callback| is called when the file is copied successfully. // |error_callback| is called when it fails to copy file. // Since this method does not close the file descriptor, callbacks are // responsible for closing it. void CopyFileFromLocalOnUIThread( const std::string& storage_name, const bool read_only, const int source_file_descriptor, const uint32 parent_id, const std::string& file_name, const MTPDeviceTaskHelper::CopyFileFromLocalSuccessCallback& success_callback, const MTPDeviceTaskHelper::ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->CopyFileFromLocal(storage_name, source_file_descriptor, parent_id, file_name, success_callback, error_callback); } // Deletes |object_id|. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. // // |storage_name| specifies the name of the storage device. // |read_only| specifies the mode of the storage device. // |object_id| is the object to be deleted. // |success_callback| is called when the object is deleted successfully. // |error_callback| is called when it fails to delete the object. // |success_callback| and |error_callback| runs on the IO thread. void DeleteObjectOnUIThread( const std::string storage_name, const bool read_only, const uint32 object_id, const MTPDeviceTaskHelper::DeleteObjectSuccessCallback success_callback, const MTPDeviceTaskHelper::ErrorCallback error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->DeleteObject(object_id, success_callback, error_callback); } // Closes the device storage specified by the |storage_name| and destroys the // MTPDeviceTaskHelper object associated with the device storage. // // Called on the UI thread to dispatch the request to the // MediaTransferProtocolManager. void CloseStorageAndDestroyTaskHelperOnUIThread( const std::string& storage_name, const bool read_only) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); MTPDeviceTaskHelper* task_helper = GetDeviceTaskHelperForStorage(storage_name, read_only); if (!task_helper) return; task_helper->CloseStorage(); MTPDeviceTaskHelperMapService::GetInstance()->DestroyDeviceTaskHelper( storage_name, read_only); } // Opens |file_path| with |flags|. Returns the result as a pair. // first is file descriptor. // second is base::File::Error. This value is set as following. // - When it succeeds to open a file descriptor, base::File::FILE_OK is set. // - When |file_path| is a directory, base::File::FILE_ERROR_NOT_A_FILE is set. // - When |file_path| does not exist, base::File::FILE_ERROR_NOT_FOUND is set. // - For other error cases, base::File::FILE_ERROR_FAILED is set. std::pair<int, base::File::Error> OpenFileDescriptor(const char* file_path, const int flags) { DCHECK_CURRENTLY_ON(content::BrowserThread::FILE); if (base::DirectoryExists(base::FilePath(file_path))) return std::make_pair(-1, base::File::FILE_ERROR_NOT_A_FILE); int file_descriptor = open(file_path, flags); if (file_descriptor >= 0) return std::make_pair(file_descriptor, base::File::FILE_OK); if (errno == ENOENT) return std::make_pair(file_descriptor, base::File::FILE_ERROR_NOT_FOUND); return std::make_pair(file_descriptor, base::File::FILE_ERROR_FAILED); } // Closes |file_descriptor| on file thread. void CloseFileDescriptor(const int file_descriptor) { DCHECK_CURRENTLY_ON(content::BrowserThread::FILE); IGNORE_EINTR(close(file_descriptor)); } // Deletes a temporary file |file_path|. void DeleteTemporaryFile(const base::FilePath& file_path) { content::BrowserThread::PostBlockingPoolTask( FROM_HERE, base::Bind(base::IgnoreResult(base::DeleteFile), file_path, false /* not recursive*/)); } // A fake callback to be passed as CopyFileProgressCallback. void FakeCopyFileProgressCallback(int64 size) { } } // namespace MTPDeviceDelegateImplLinux::PendingTaskInfo::PendingTaskInfo( const base::FilePath& path, content::BrowserThread::ID thread_id, const tracked_objects::Location& location, const base::Closure& task) : path(path), thread_id(thread_id), location(location), task(task) { } MTPDeviceDelegateImplLinux::PendingTaskInfo::~PendingTaskInfo() { } // Represents a file on the MTP device. // Lives on the IO thread. class MTPDeviceDelegateImplLinux::MTPFileNode { public: MTPFileNode(uint32 file_id, const std::string& file_name, MTPFileNode* parent, FileIdToMTPFileNodeMap* file_id_to_node_map); ~MTPFileNode(); const MTPFileNode* GetChild(const std::string& name) const; void EnsureChildExists(const std::string& name, uint32 id); // Clears all the children, except those in |children_to_keep|. void ClearNonexistentChildren( const std::set<std::string>& children_to_keep); bool DeleteChild(uint32 file_id); bool HasChildren() const; uint32 file_id() const { return file_id_; } const std::string& file_name() const { return file_name_; } MTPFileNode* parent() { return parent_; } private: // Container for holding a node's children. typedef base::ScopedPtrHashMap<std::string, MTPFileNode> ChildNodes; const uint32 file_id_; const std::string file_name_; ChildNodes children_; MTPFileNode* const parent_; FileIdToMTPFileNodeMap* file_id_to_node_map_; DISALLOW_COPY_AND_ASSIGN(MTPFileNode); }; MTPDeviceDelegateImplLinux::MTPFileNode::MTPFileNode( uint32 file_id, const std::string& file_name, MTPFileNode* parent, FileIdToMTPFileNodeMap* file_id_to_node_map) : file_id_(file_id), file_name_(file_name), parent_(parent), file_id_to_node_map_(file_id_to_node_map) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(file_id_to_node_map_); DCHECK(!ContainsKey(*file_id_to_node_map_, file_id_)); (*file_id_to_node_map_)[file_id_] = this; } MTPDeviceDelegateImplLinux::MTPFileNode::~MTPFileNode() { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); size_t erased = file_id_to_node_map_->erase(file_id_); DCHECK_EQ(1U, erased); } const MTPDeviceDelegateImplLinux::MTPFileNode* MTPDeviceDelegateImplLinux::MTPFileNode::GetChild( const std::string& name) const { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); return children_.get(name); } void MTPDeviceDelegateImplLinux::MTPFileNode::EnsureChildExists( const std::string& name, uint32 id) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); const MTPFileNode* child = GetChild(name); if (child && child->file_id() == id) return; children_.set( name, make_scoped_ptr(new MTPFileNode(id, name, this, file_id_to_node_map_))); } void MTPDeviceDelegateImplLinux::MTPFileNode::ClearNonexistentChildren( const std::set<std::string>& children_to_keep) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); std::set<std::string> children_to_erase; for (ChildNodes::const_iterator it = children_.begin(); it != children_.end(); ++it) { if (ContainsKey(children_to_keep, it->first)) continue; children_to_erase.insert(it->first); } for (std::set<std::string>::iterator it = children_to_erase.begin(); it != children_to_erase.end(); ++it) { children_.take_and_erase(*it); } } bool MTPDeviceDelegateImplLinux::MTPFileNode::DeleteChild(uint32 file_id) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); for (ChildNodes::iterator it = children_.begin(); it != children_.end(); ++it) { if (it->second->file_id() == file_id) { DCHECK(!it->second->HasChildren()); children_.erase(it); return true; } } return false; } bool MTPDeviceDelegateImplLinux::MTPFileNode::HasChildren() const { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); return children_.size() > 0; } MTPDeviceDelegateImplLinux::MTPDeviceDelegateImplLinux( const std::string& device_location, const bool read_only) : init_state_(UNINITIALIZED), task_in_progress_(false), device_path_(device_location), read_only_(read_only), root_node_(new MTPFileNode(mtpd::kRootFileId, "", // Root node has no name. NULL, // And no parent node. &file_id_to_node_map_)), weak_ptr_factory_(this) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!device_path_.empty()); base::RemoveChars(device_location, kRootPath, &storage_name_); DCHECK(!storage_name_.empty()); } MTPDeviceDelegateImplLinux::~MTPDeviceDelegateImplLinux() { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); } void MTPDeviceDelegateImplLinux::CreateDirectory( const base::FilePath& directory_path, const bool exclusive, const bool recursive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!directory_path.empty()); // If |directory_path| is not the path in this device, fails with error. if (!device_path_.IsParent(directory_path)) { error_callback.Run(base::File::FILE_ERROR_FAILED); return; } // Decomposes |directory_path| to components. CreateDirectoryInternal creates // directories by reading |components| from back. std::vector<base::FilePath> components; if (recursive) { for (base::FilePath path = directory_path; path != device_path_; path = path.DirName()) { components.push_back(path); } } else { components.push_back(directory_path); } const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), components, exclusive, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo( directory_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::GetFileInfo( const base::FilePath& file_path, const GetFileInfoSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!file_path.empty()); // If a ReadDirectory operation is in progress, the file info may already be // cached. FileInfoCache::const_iterator it = file_info_cache_.find(file_path); if (it != file_info_cache_.end()) { // TODO(thestig): This code is repeated in several places. Combine them. // e.g. c/b/media_galleries/win/mtp_device_operations_util.cc const storage::DirectoryEntry& cached_file_entry = it->second; base::File::Info info; info.size = cached_file_entry.size; info.is_directory = cached_file_entry.is_directory; info.is_symbolic_link = false; info.last_modified = cached_file_entry.last_modified_time; info.creation_time = base::Time(); success_callback.Run(info); return; } base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), file_path, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo(file_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::ReadDirectory( const base::FilePath& root, const ReadDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!root.empty()); base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::ReadDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), root, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo(root, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::CreateSnapshotFile( const base::FilePath& device_file_path, const base::FilePath& local_path, const CreateSnapshotFileSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!device_file_path.empty()); DCHECK(!local_path.empty()); base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateSnapshotFileInternal, weak_ptr_factory_.GetWeakPtr(), device_file_path, local_path, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo(device_file_path, content::BrowserThread::IO, FROM_HERE, closure)); } bool MTPDeviceDelegateImplLinux::IsStreaming() { return true; } void MTPDeviceDelegateImplLinux::ReadBytes( const base::FilePath& device_file_path, const scoped_refptr<net::IOBuffer>& buf, int64 offset, int buf_len, const ReadBytesSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!device_file_path.empty()); base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::ReadBytesInternal, weak_ptr_factory_.GetWeakPtr(), device_file_path, buf, offset, buf_len, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo(device_file_path, content::BrowserThread::IO, FROM_HERE, closure)); } bool MTPDeviceDelegateImplLinux::IsReadOnly() const { return read_only_; } void MTPDeviceDelegateImplLinux::CopyFileLocal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CreateTemporaryFileCallback& create_temporary_file_callback, const CopyFileProgressCallback& progress_callback, const CopyFileLocalSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!source_file_path.empty()); DCHECK(!device_file_path.empty()); // Create a temporary file for creating a copy of source file on local. content::BrowserThread::PostTaskAndReplyWithResult( content::BrowserThread::FILE, FROM_HERE, create_temporary_file_callback, base::Bind( &MTPDeviceDelegateImplLinux::OnDidCreateTemporaryFileToCopyFileLocal, weak_ptr_factory_.GetWeakPtr(), source_file_path, device_file_path, progress_callback, success_callback, error_callback)); } void MTPDeviceDelegateImplLinux::MoveFileLocal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CreateTemporaryFileCallback& create_temporary_file_callback, const MoveFileLocalSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!source_file_path.empty()); DCHECK(!device_file_path.empty()); // Get file info to move file on local. const GetFileInfoSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::MoveFileLocalInternal, weak_ptr_factory_.GetWeakPtr(), source_file_path, device_file_path, create_temporary_file_callback, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), source_file_path, success_callback_wrapper, error_callback); EnsureInitAndRunTask(PendingTaskInfo( source_file_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::CopyFileFromLocal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CopyFileFromLocalSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!source_file_path.empty()); DCHECK(!device_file_path.empty()); // Get file info of destination file path. const GetFileInfoSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidGetDestFileInfoToCopyFileFromLocal, weak_ptr_factory_.GetWeakPtr(), error_callback); const ErrorCallback error_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnGetDestFileInfoErrorToCopyFileFromLocal, weak_ptr_factory_.GetWeakPtr(), source_file_path, device_file_path, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), device_file_path, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( device_file_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::DeleteFile( const base::FilePath& file_path, const DeleteFileSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!file_path.empty()); const GetFileInfoSuccessCallback& success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::DeleteFileInternal, weak_ptr_factory_.GetWeakPtr(), file_path, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), file_path, success_callback_wrapper, error_callback); EnsureInitAndRunTask(PendingTaskInfo(file_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::DeleteDirectory( const base::FilePath& file_path, const DeleteDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!file_path.empty()); const GetFileInfoSuccessCallback& success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::DeleteDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), file_path, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), file_path, success_callback_wrapper, error_callback); EnsureInitAndRunTask(PendingTaskInfo(file_path, content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::AddWatcher( const GURL& origin, const base::FilePath& file_path, const bool recursive, const storage::WatcherManager::StatusCallback& callback, const storage::WatcherManager::NotificationCallback& notification_callback) { if (recursive) { callback.Run(base::File::FILE_ERROR_INVALID_OPERATION); return; } // TODO(yawano) Checks existence of |file_path|. const auto it = subscribers_.find(file_path); if (it != subscribers_.end()) { // Adds to existing origin callback map. if (ContainsKey(it->second, origin)) { callback.Run(base::File::FILE_ERROR_EXISTS); return; } it->second.insert(std::make_pair(origin, notification_callback)); } else { // Creates new origin callback map. OriginNotificationCallbackMap callback_map; callback_map.insert(std::make_pair(origin, notification_callback)); subscribers_.insert(std::make_pair(file_path, callback_map)); } callback.Run(base::File::FILE_OK); } void MTPDeviceDelegateImplLinux::RemoveWatcher( const GURL& origin, const base::FilePath& file_path, const bool recursive, const storage::WatcherManager::StatusCallback& callback) { if (recursive) { callback.Run(base::File::FILE_ERROR_INVALID_OPERATION); return; } const auto it = subscribers_.find(file_path); if (it == subscribers_.end()) { callback.Run(base::File::FILE_ERROR_NOT_FOUND); return; } if (it->second.erase(origin) == 0) { callback.Run(base::File::FILE_ERROR_NOT_FOUND); return; } if (it->second.empty()) subscribers_.erase(it); callback.Run(base::File::FILE_OK); } void MTPDeviceDelegateImplLinux::NotifyFileChange( const base::FilePath& file_path, const storage::WatcherManager::ChangeType change_type) { const auto it = subscribers_.find(file_path); if (it != subscribers_.end()) { for (const auto& origin_callback : it->second) { origin_callback.second.Run(change_type); } } } void MTPDeviceDelegateImplLinux::CancelPendingTasksAndDeleteDelegate() { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); // To cancel all the pending tasks, destroy the MTPDeviceTaskHelper object. content::BrowserThread::PostTask( content::BrowserThread::UI, FROM_HERE, base::Bind(&CloseStorageAndDestroyTaskHelperOnUIThread, storage_name_, read_only_)); delete this; } void MTPDeviceDelegateImplLinux::GetFileInfoInternal( const base::FilePath& file_path, const GetFileInfoSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); uint32 file_id; if (CachedPathToId(file_path, &file_id)) { GetFileInfoSuccessCallback success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::OnDidGetFileInfo, weak_ptr_factory_.GetWeakPtr(), success_callback); ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, file_id); base::Closure closure = base::Bind(&GetFileInfoOnUIThread, storage_name_, read_only_, file_id, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo(base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } PendingRequestDone(); } void MTPDeviceDelegateImplLinux::CreateDirectoryInternal( const std::vector<base::FilePath>& components, const bool exclusive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); const base::FilePath current_component = components.back(); std::vector<base::FilePath> other_components = components; other_components.pop_back(); if (other_components.empty()) { // Either we reached the last component in the recursive case, or this is // the non-recursive case. uint32 parent_id; if (CachedPathToId(current_component.DirName(), &parent_id)) { const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateSingleDirectory, weak_ptr_factory_.GetWeakPtr(), current_component, exclusive, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } } else { // Ensures that parent directories are created for recursive case. uint32 directory_id; if (CachedPathToId(current_component, &directory_id)) { // Parent directory |current_component| already exists, continue creating // directories. const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), other_components, exclusive, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); } else { // If parent directory |current_component| does not exist, create it. const CreateDirectorySuccessCallback success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux:: OnDidCreateParentDirectoryToCreateDirectory, weak_ptr_factory_.GetWeakPtr(), current_component, other_components, exclusive, success_callback, error_callback); // Wraps error callback to return all errors of creating parent // directories as FILE_ERROR_FAILED. const ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux:: OnCreateParentDirectoryErrorToCreateDirectory, weak_ptr_factory_.GetWeakPtr(), error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateSingleDirectory, weak_ptr_factory_.GetWeakPtr(), current_component, false /* not exclusive */, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); } } PendingRequestDone(); } void MTPDeviceDelegateImplLinux::ReadDirectoryInternal( const base::FilePath& root, const ReadDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); uint32 dir_id; if (CachedPathToId(root, &dir_id)) { GetFileInfoSuccessCallback success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::OnDidGetFileInfoToReadDirectory, weak_ptr_factory_.GetWeakPtr(), dir_id, success_callback, error_callback); ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, dir_id); base::Closure closure = base::Bind(&GetFileInfoOnUIThread, storage_name_, read_only_, dir_id, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo(base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } PendingRequestDone(); } void MTPDeviceDelegateImplLinux::CreateSnapshotFileInternal( const base::FilePath& device_file_path, const base::FilePath& local_path, const CreateSnapshotFileSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); uint32 file_id; if (CachedPathToId(device_file_path, &file_id)) { scoped_ptr<SnapshotRequestInfo> request_info( new SnapshotRequestInfo(file_id, local_path, success_callback, error_callback)); GetFileInfoSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidGetFileInfoToCreateSnapshotFile, weak_ptr_factory_.GetWeakPtr(), base::Passed(&request_info)); ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, file_id); base::Closure closure = base::Bind(&GetFileInfoOnUIThread, storage_name_, read_only_, file_id, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo(base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } PendingRequestDone(); } void MTPDeviceDelegateImplLinux::ReadBytesInternal( const base::FilePath& device_file_path, net::IOBuffer* buf, int64 offset, int buf_len, const ReadBytesSuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); uint32 file_id; if (CachedPathToId(device_file_path, &file_id)) { ReadBytesRequest request( file_id, buf, offset, buf_len, base::Bind(&MTPDeviceDelegateImplLinux::OnDidReadBytes, weak_ptr_factory_.GetWeakPtr(), success_callback), base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, file_id)); base::Closure closure = base::Bind(&ReadBytesOnUIThread, storage_name_, read_only_, request); EnsureInitAndRunTask(PendingTaskInfo(base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } PendingRequestDone(); } void MTPDeviceDelegateImplLinux::MoveFileLocalInternal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CreateTemporaryFileCallback& create_temporary_file_callback, const MoveFileLocalSuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& source_file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (source_file_info.is_directory) { error_callback.Run(base::File::FILE_ERROR_NOT_A_FILE); return; } if (source_file_path.DirName() == device_file_path.DirName()) { // If a file is moved in a same directory, rename the file. uint32 file_id; if (CachedPathToId(source_file_path, &file_id)) { const MTPDeviceTaskHelper::RenameObjectSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidMoveFileLocalWithRename, weak_ptr_factory_.GetWeakPtr(), success_callback, file_id); const MTPDeviceTaskHelper::ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, file_id); const base::Closure closure = base::Bind(&RenameObjectOnUIThread, storage_name_, read_only_, file_id, device_file_path.BaseName().value(), success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } } else { // If a file is moved to a different directory, create a copy to the // destination path, and remove source file. const CopyFileLocalSuccessCallback& success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::DeleteFileInternal, weak_ptr_factory_.GetWeakPtr(), source_file_path, success_callback, error_callback, source_file_info); // TODO(yawano): Avoid to call external method from internal code. CopyFileLocal(source_file_path, device_file_path, create_temporary_file_callback, base::Bind(&FakeCopyFileProgressCallback), success_callback_wrapper, error_callback); } } void MTPDeviceDelegateImplLinux::OnDidOpenFDToCopyFileFromLocal( const base::FilePath& device_file_path, const CopyFileFromLocalSuccessCallback& success_callback, const ErrorCallback& error_callback, const std::pair<int, base::File::Error>& open_fd_result) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (open_fd_result.second != base::File::FILE_OK) { error_callback.Run(open_fd_result.second); return; } const int source_file_descriptor = open_fd_result.first; uint32 parent_id; if (CachedPathToId(device_file_path.DirName(), &parent_id)) { CopyFileFromLocalSuccessCallback success_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::OnDidCopyFileFromLocal, weak_ptr_factory_.GetWeakPtr(), success_callback, source_file_descriptor); ErrorCallback error_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::HandleCopyFileFromLocalError, weak_ptr_factory_.GetWeakPtr(), error_callback, source_file_descriptor); base::Closure closure = base::Bind(&CopyFileFromLocalOnUIThread, storage_name_, read_only_, source_file_descriptor, parent_id, device_file_path.BaseName().value(), success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } else { HandleCopyFileFromLocalError(error_callback, source_file_descriptor, base::File::FILE_ERROR_NOT_FOUND); } } void MTPDeviceDelegateImplLinux::DeleteFileInternal( const base::FilePath& file_path, const DeleteFileSuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (file_info.is_directory) { error_callback.Run(base::File::FILE_ERROR_NOT_A_FILE); } else { uint32 file_id; if (CachedPathToId(file_path, &file_id)) RunDeleteObjectOnUIThread(file_id, success_callback, error_callback); else error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); } } void MTPDeviceDelegateImplLinux::DeleteDirectoryInternal( const base::FilePath& file_path, const DeleteDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (!file_info.is_directory) { error_callback.Run(base::File::FILE_ERROR_NOT_A_DIRECTORY); return; } uint32 directory_id; if (!CachedPathToId(file_path, &directory_id)) { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); return; } // Checks the cache first. If it has children in cache, the directory cannot // be empty. FileIdToMTPFileNodeMap::const_iterator it = file_id_to_node_map_.find(directory_id); if (it != file_id_to_node_map_.end() && it->second->HasChildren()) { error_callback.Run(base::File::FILE_ERROR_NOT_EMPTY); return; } // Since the directory can contain a file even if the cache returns it as // empty, read the directory and confirm the directory is actually empty. const MTPDeviceTaskHelper::ReadDirectorySuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidReadDirectoryToDeleteDirectory, weak_ptr_factory_.GetWeakPtr(), directory_id, success_callback, error_callback); const MTPDeviceTaskHelper::ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, directory_id); const base::Closure closure = base::Bind( &ReadDirectoryOnUIThread, storage_name_, read_only_, directory_id, 1 /* max_size */, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::CreateSingleDirectory( const base::FilePath& directory_path, const bool exclusive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); const GetFileInfoSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnPathAlreadyExistsForCreateSingleDirectory, weak_ptr_factory_.GetWeakPtr(), exclusive, success_callback, error_callback); const ErrorCallback error_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnPathDoesNotExistForCreateSingleDirectory, weak_ptr_factory_.GetWeakPtr(), directory_path, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::GetFileInfoInternal, weak_ptr_factory_.GetWeakPtr(), directory_path, success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidReadDirectoryToCreateDirectory( const std::vector<base::FilePath>& components, const bool exclusive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const storage::AsyncFileUtil::EntryList& /* file_list */, const bool has_more) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (has_more) return; // Wait until all entries have been read. const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::CreateDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), components, exclusive, success_callback, error_callback); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::OnDidReadDirectoryToDeleteDirectory( const uint32 directory_id, const DeleteDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const storage::AsyncFileUtil::EntryList& entries, const bool has_more) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!has_more); if (entries.size() > 0) error_callback.Run(base::File::FILE_ERROR_NOT_EMPTY); else RunDeleteObjectOnUIThread(directory_id, success_callback, error_callback); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::RunDeleteObjectOnUIThread( const uint32 object_id, const DeleteObjectSuccessCallback& success_callback, const ErrorCallback& error_callback) { const MTPDeviceTaskHelper::DeleteObjectSuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidDeleteObject, weak_ptr_factory_.GetWeakPtr(), object_id, success_callback); const MTPDeviceTaskHelper::ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeleteFileOrDirectoryError, weak_ptr_factory_.GetWeakPtr(), error_callback); const base::Closure closure = base::Bind(&DeleteObjectOnUIThread, storage_name_, read_only_, object_id,<|fim▁hole|>} void MTPDeviceDelegateImplLinux::EnsureInitAndRunTask( const PendingTaskInfo& task_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if ((init_state_ == INITIALIZED) && !task_in_progress_) { RunTask(task_info); return; } // Only *Internal functions have empty paths. Since they are the continuation // of the current running task, they get to cut in line. if (task_info.path.empty()) pending_tasks_.push_front(task_info); else pending_tasks_.push_back(task_info); if (init_state_ == UNINITIALIZED) { init_state_ = PENDING_INIT; task_in_progress_ = true; content::BrowserThread::PostTask( content::BrowserThread::UI, FROM_HERE, base::Bind(&OpenStorageOnUIThread, storage_name_, read_only_, base::Bind(&MTPDeviceDelegateImplLinux::OnInitCompleted, weak_ptr_factory_.GetWeakPtr()))); } } void MTPDeviceDelegateImplLinux::RunTask(const PendingTaskInfo& task_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK_EQ(INITIALIZED, init_state_); DCHECK(!task_in_progress_); task_in_progress_ = true; bool need_to_check_cache = !task_info.path.empty(); if (need_to_check_cache) { base::FilePath uncached_path = NextUncachedPathComponent(task_info.path, task_info.cached_path); if (!uncached_path.empty()) { // Save the current task and do a cache lookup first. pending_tasks_.push_front(task_info); FillFileCache(uncached_path); return; } } content::BrowserThread::PostTask(task_info.thread_id, task_info.location, task_info.task); } void MTPDeviceDelegateImplLinux::WriteDataIntoSnapshotFile( const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(current_snapshot_request_info_.get()); DCHECK_GT(file_info.size, 0); DCHECK(task_in_progress_); SnapshotRequestInfo request_info( current_snapshot_request_info_->file_id, current_snapshot_request_info_->snapshot_file_path, base::Bind( &MTPDeviceDelegateImplLinux::OnDidWriteDataIntoSnapshotFile, weak_ptr_factory_.GetWeakPtr()), base::Bind( &MTPDeviceDelegateImplLinux::OnWriteDataIntoSnapshotFileError, weak_ptr_factory_.GetWeakPtr())); base::Closure task_closure = base::Bind(&WriteDataIntoSnapshotFileOnUIThread, storage_name_, read_only_, request_info, file_info); content::BrowserThread::PostTask(content::BrowserThread::UI, FROM_HERE, task_closure); } void MTPDeviceDelegateImplLinux::PendingRequestDone() { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(task_in_progress_); task_in_progress_ = false; ProcessNextPendingRequest(); } void MTPDeviceDelegateImplLinux::ProcessNextPendingRequest() { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!task_in_progress_); if (pending_tasks_.empty()) return; PendingTaskInfo task_info = pending_tasks_.front(); pending_tasks_.pop_front(); RunTask(task_info); } void MTPDeviceDelegateImplLinux::OnInitCompleted(bool succeeded) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); init_state_ = succeeded ? INITIALIZED : UNINITIALIZED; PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidGetFileInfo( const GetFileInfoSuccessCallback& success_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); success_callback.Run(file_info); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnPathAlreadyExistsForCreateSingleDirectory( const bool exclusive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (!file_info.is_directory || exclusive) error_callback.Run(base::File::FILE_ERROR_EXISTS); else success_callback.Run(); } void MTPDeviceDelegateImplLinux::OnPathDoesNotExistForCreateSingleDirectory( const base::FilePath& directory_path, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (error != base::File::FILE_ERROR_NOT_FOUND) { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); return; } uint32 parent_id; if (!CachedPathToId(directory_path.DirName(), &parent_id)) { error_callback.Run(base::File::FILE_ERROR_NOT_FOUND); return; } const MTPDeviceTaskHelper::CreateDirectorySuccessCallback success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidCreateSingleDirectory, weak_ptr_factory_.GetWeakPtr(), directory_path, success_callback); const MTPDeviceTaskHelper::ErrorCallback error_callback_wrapper = base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, parent_id); const base::Closure closure = base::Bind(&CreateDirectoryOnUIThread, storage_name_, read_only_, parent_id, directory_path.BaseName().value(), success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::OnDidGetFileInfoToReadDirectory( uint32 dir_id, const ReadDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(task_in_progress_); if (!file_info.is_directory) { return HandleDeviceFileError(error_callback, dir_id, base::File::FILE_ERROR_NOT_A_DIRECTORY); } base::Closure task_closure = base::Bind( &ReadDirectoryOnUIThread, storage_name_, read_only_, dir_id, 0 /* max_size */, base::Bind(&MTPDeviceDelegateImplLinux::OnDidReadDirectory, weak_ptr_factory_.GetWeakPtr(), dir_id, success_callback), base::Bind(&MTPDeviceDelegateImplLinux::HandleDeviceFileError, weak_ptr_factory_.GetWeakPtr(), error_callback, dir_id)); content::BrowserThread::PostTask(content::BrowserThread::UI, FROM_HERE, task_closure); } void MTPDeviceDelegateImplLinux::OnDidGetFileInfoToCreateSnapshotFile( scoped_ptr<SnapshotRequestInfo> snapshot_request_info, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(!current_snapshot_request_info_.get()); DCHECK(snapshot_request_info.get()); DCHECK(task_in_progress_); base::File::Error error = base::File::FILE_OK; if (file_info.is_directory) error = base::File::FILE_ERROR_NOT_A_FILE; else if (file_info.size < 0 || file_info.size > kuint32max) error = base::File::FILE_ERROR_FAILED; if (error != base::File::FILE_OK) return HandleDeviceFileError(snapshot_request_info->error_callback, snapshot_request_info->file_id, error); base::File::Info snapshot_file_info(file_info); // Modify the last modified time to null. This prevents the time stamp // verfication in LocalFileStreamReader. snapshot_file_info.last_modified = base::Time(); current_snapshot_request_info_.reset(snapshot_request_info.release()); if (file_info.size == 0) { // Empty snapshot file. return OnDidWriteDataIntoSnapshotFile( snapshot_file_info, current_snapshot_request_info_->snapshot_file_path); } WriteDataIntoSnapshotFile(snapshot_file_info); } void MTPDeviceDelegateImplLinux::OnDidGetDestFileInfoToCopyFileFromLocal( const ErrorCallback& error_callback, const base::File::Info& file_info) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (file_info.is_directory) error_callback.Run(base::File::FILE_ERROR_INVALID_OPERATION); else error_callback.Run(base::File::FILE_ERROR_FAILED); } void MTPDeviceDelegateImplLinux::OnGetDestFileInfoErrorToCopyFileFromLocal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CopyFileFromLocalSuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (error != base::File::FILE_ERROR_NOT_FOUND) { error_callback.Run(error); return; } content::BrowserThread::PostTaskAndReplyWithResult( content::BrowserThread::FILE, FROM_HERE, base::Bind(&OpenFileDescriptor, source_file_path.value().c_str(), O_RDONLY), base::Bind(&MTPDeviceDelegateImplLinux::OnDidOpenFDToCopyFileFromLocal, weak_ptr_factory_.GetWeakPtr(), device_file_path, success_callback, error_callback)); } void MTPDeviceDelegateImplLinux::OnDidCreateSingleDirectory( const base::FilePath& directory_path, const CreateDirectorySuccessCallback& success_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); success_callback.Run(); NotifyFileChange(directory_path.DirName(), storage::WatcherManager::ChangeType::CHANGED); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidCreateParentDirectoryToCreateDirectory( const base::FilePath& created_directory, const std::vector<base::FilePath>& components, const bool exclusive, const CreateDirectorySuccessCallback& success_callback, const ErrorCallback& error_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); // Calls ReadDirectoryInternal to fill the cache for created directory. // Calls ReadDirectoryInternal in this method to call it via // EnsureInitAndRunTask. const ReadDirectorySuccessCallback& success_callback_wrapper = base::Bind( &MTPDeviceDelegateImplLinux::OnDidReadDirectoryToCreateDirectory, weak_ptr_factory_.GetWeakPtr(), components, exclusive, success_callback, error_callback); const base::Closure closure = base::Bind(&MTPDeviceDelegateImplLinux::ReadDirectoryInternal, weak_ptr_factory_.GetWeakPtr(), created_directory.DirName(), success_callback_wrapper, error_callback); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::IO, FROM_HERE, closure)); } void MTPDeviceDelegateImplLinux::OnCreateParentDirectoryErrorToCreateDirectory( const ErrorCallback& callback, const base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); callback.Run(base::File::FILE_ERROR_FAILED); } void MTPDeviceDelegateImplLinux::OnDidReadDirectory( uint32 dir_id, const ReadDirectorySuccessCallback& success_callback, const storage::AsyncFileUtil::EntryList& file_list, bool has_more) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); FileIdToMTPFileNodeMap::iterator it = file_id_to_node_map_.find(dir_id); DCHECK(it != file_id_to_node_map_.end()); MTPFileNode* dir_node = it->second; // Traverse the MTPFileNode tree to reconstuct the full path for |dir_id|. std::deque<std::string> dir_path_parts; MTPFileNode* parent_node = dir_node; while (parent_node->parent()) { dir_path_parts.push_front(parent_node->file_name()); parent_node = parent_node->parent(); } base::FilePath dir_path = device_path_; for (size_t i = 0; i < dir_path_parts.size(); ++i) dir_path = dir_path.Append(dir_path_parts[i]); storage::AsyncFileUtil::EntryList normalized_file_list; for (size_t i = 0; i < file_list.size(); ++i) { normalized_file_list.push_back(file_list[i]); storage::DirectoryEntry& entry = normalized_file_list.back(); // |entry.name| has the file id encoded in it. Decode here. size_t separator_idx = entry.name.find_last_of(','); DCHECK_NE(std::string::npos, separator_idx); std::string file_id_str = entry.name.substr(separator_idx); file_id_str = file_id_str.substr(1); // Get rid of the comma. uint32 file_id = 0; bool ret = base::StringToUint(file_id_str, &file_id); DCHECK(ret); entry.name = entry.name.substr(0, separator_idx); // Refresh the in memory tree. dir_node->EnsureChildExists(entry.name, file_id); child_nodes_seen_.insert(entry.name); // Add to |file_info_cache_|. file_info_cache_[dir_path.Append(entry.name)] = entry; } success_callback.Run(normalized_file_list, has_more); if (has_more) return; // Wait to be called again. // Last call, finish book keeping and continue with the next request. dir_node->ClearNonexistentChildren(child_nodes_seen_); child_nodes_seen_.clear(); file_info_cache_.clear(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidWriteDataIntoSnapshotFile( const base::File::Info& file_info, const base::FilePath& snapshot_file_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(current_snapshot_request_info_.get()); current_snapshot_request_info_->success_callback.Run( file_info, snapshot_file_path); current_snapshot_request_info_.reset(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnWriteDataIntoSnapshotFileError( base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(current_snapshot_request_info_.get()); current_snapshot_request_info_->error_callback.Run(error); current_snapshot_request_info_.reset(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidReadBytes( const ReadBytesSuccessCallback& success_callback, const base::File::Info& file_info, int bytes_read) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); success_callback.Run(file_info, bytes_read); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidFillFileCache( const base::FilePath& path, const storage::AsyncFileUtil::EntryList& /* file_list */, bool has_more) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(path.IsParent(pending_tasks_.front().path)); if (has_more) return; // Wait until all entries have been read. pending_tasks_.front().cached_path = path; } void MTPDeviceDelegateImplLinux::OnFillFileCacheFailed( base::File::Error /* error */) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); // When filling the cache fails for the task at the front of the queue, clear // the path of the task so it will not try to do any more caching. Instead, // the task will just run and fail the CachedPathToId() lookup. pending_tasks_.front().path.clear(); } void MTPDeviceDelegateImplLinux::OnDidCreateTemporaryFileToCopyFileLocal( const base::FilePath& source_file_path, const base::FilePath& device_file_path, const CopyFileProgressCallback& progress_callback, const CopyFileLocalSuccessCallback& success_callback, const ErrorCallback& error_callback, const base::FilePath& temporary_file_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); if (temporary_file_path.empty()) { error_callback.Run(base::File::FILE_ERROR_FAILED); return; } CreateSnapshotFile( source_file_path, temporary_file_path, base::Bind( &MTPDeviceDelegateImplLinux::OnDidCreateSnapshotFileOfCopyFileLocal, weak_ptr_factory_.GetWeakPtr(), device_file_path, progress_callback, success_callback, error_callback), base::Bind(&MTPDeviceDelegateImplLinux::HandleCopyFileLocalError, weak_ptr_factory_.GetWeakPtr(), error_callback, temporary_file_path)); } void MTPDeviceDelegateImplLinux::OnDidCreateSnapshotFileOfCopyFileLocal( const base::FilePath& device_file_path, const CopyFileProgressCallback& progress_callback, const CopyFileLocalSuccessCallback& success_callback, const ErrorCallback& error_callback, const base::File::Info& file_info, const base::FilePath& temporary_file_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); // Consider that half of copy is completed by creating a temporary file. progress_callback.Run(file_info.size / 2); // TODO(yawano): Avoid to call external method from internal code. CopyFileFromLocal( temporary_file_path, device_file_path, base::Bind( &MTPDeviceDelegateImplLinux::OnDidCopyFileFromLocalOfCopyFileLocal, weak_ptr_factory_.GetWeakPtr(), success_callback, temporary_file_path), base::Bind(&MTPDeviceDelegateImplLinux::HandleCopyFileLocalError, weak_ptr_factory_.GetWeakPtr(), error_callback, temporary_file_path)); } void MTPDeviceDelegateImplLinux::OnDidCopyFileFromLocalOfCopyFileLocal( const CopyFileFromLocalSuccessCallback success_callback, const base::FilePath& temporary_file_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DeleteTemporaryFile(temporary_file_path); success_callback.Run(); } void MTPDeviceDelegateImplLinux::OnDidMoveFileLocalWithRename( const MoveFileLocalSuccessCallback& success_callback, const uint32 file_id) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); EvictCachedPathToId(file_id); success_callback.Run(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidCopyFileFromLocal( const CopyFileFromLocalSuccessCallback& success_callback, const int source_file_descriptor) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); const base::Closure closure = base::Bind(&CloseFileDescriptor, source_file_descriptor); content::BrowserThread::PostTask(content::BrowserThread::FILE, FROM_HERE, closure); success_callback.Run(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::HandleCopyFileLocalError( const ErrorCallback& error_callback, const base::FilePath& temporary_file_path, const base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DeleteTemporaryFile(temporary_file_path); error_callback.Run(error); } void MTPDeviceDelegateImplLinux::HandleCopyFileFromLocalError( const ErrorCallback& error_callback, const int source_file_descriptor, base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); const base::Closure closure = base::Bind(&CloseFileDescriptor, source_file_descriptor); content::BrowserThread::PostTask(content::BrowserThread::FILE, FROM_HERE, closure); error_callback.Run(error); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::OnDidDeleteObject( const uint32 object_id, const DeleteObjectSuccessCallback success_callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); EvictCachedPathToId(object_id); success_callback.Run(); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::HandleDeleteFileOrDirectoryError( const ErrorCallback& error_callback, base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); error_callback.Run(error); PendingRequestDone(); } void MTPDeviceDelegateImplLinux::HandleDeviceFileError( const ErrorCallback& error_callback, uint32 file_id, base::File::Error error) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); EvictCachedPathToId(file_id); error_callback.Run(error); PendingRequestDone(); } base::FilePath MTPDeviceDelegateImplLinux::NextUncachedPathComponent( const base::FilePath& path, const base::FilePath& cached_path) const { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(cached_path.empty() || cached_path.IsParent(path)); base::FilePath uncached_path; std::string device_relpath = GetDeviceRelativePath(device_path_, path); if (!device_relpath.empty() && device_relpath != kRootPath) { uncached_path = device_path_; std::vector<std::string> device_relpath_components; base::SplitString(device_relpath, '/', &device_relpath_components); DCHECK(!device_relpath_components.empty()); bool all_components_cached = true; const MTPFileNode* current_node = root_node_.get(); for (size_t i = 0; i < device_relpath_components.size(); ++i) { current_node = current_node->GetChild(device_relpath_components[i]); if (!current_node) { // With a cache miss, check if it is a genuine failure. If so, pretend // the entire |path| is cached, so there is no further attempt to do // more caching. The actual operation will then fail. all_components_cached = !cached_path.empty() && (uncached_path == cached_path); break; } uncached_path = uncached_path.Append(device_relpath_components[i]); } if (all_components_cached) uncached_path.clear(); } return uncached_path; } void MTPDeviceDelegateImplLinux::FillFileCache( const base::FilePath& uncached_path) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); DCHECK(task_in_progress_); ReadDirectorySuccessCallback success_callback = base::Bind(&MTPDeviceDelegateImplLinux::OnDidFillFileCache, weak_ptr_factory_.GetWeakPtr(), uncached_path); ErrorCallback error_callback = base::Bind(&MTPDeviceDelegateImplLinux::OnFillFileCacheFailed, weak_ptr_factory_.GetWeakPtr()); ReadDirectoryInternal(uncached_path, success_callback, error_callback); } bool MTPDeviceDelegateImplLinux::CachedPathToId(const base::FilePath& path, uint32* id) const { DCHECK(id); std::string device_relpath = GetDeviceRelativePath(device_path_, path); if (device_relpath.empty()) return false; std::vector<std::string> device_relpath_components; if (device_relpath != kRootPath) base::SplitString(device_relpath, '/', &device_relpath_components); const MTPFileNode* current_node = root_node_.get(); for (size_t i = 0; i < device_relpath_components.size(); ++i) { current_node = current_node->GetChild(device_relpath_components[i]); if (!current_node) return false; } *id = current_node->file_id(); return true; } void MTPDeviceDelegateImplLinux::EvictCachedPathToId(const uint32 id) { FileIdToMTPFileNodeMap::iterator it = file_id_to_node_map_.find(id); if (it != file_id_to_node_map_.end()) { DCHECK(!it->second->HasChildren()); MTPFileNode* parent = it->second->parent(); if (parent) { const bool ret = parent->DeleteChild(id); DCHECK(ret); } } } void CreateMTPDeviceAsyncDelegate( const std::string& device_location, const bool read_only, const CreateMTPDeviceAsyncDelegateCallback& callback) { DCHECK_CURRENTLY_ON(content::BrowserThread::IO); callback.Run(new MTPDeviceDelegateImplLinux(device_location, read_only)); }<|fim▁end|>
success_callback_wrapper, error_callback_wrapper); EnsureInitAndRunTask(PendingTaskInfo( base::FilePath(), content::BrowserThread::UI, FROM_HERE, closure));
<|file_name|>convert_test.go<|end_file_name|><|fim▁begin|>package service import ( "path/filepath" "reflect" "testing" "github.com/docker/docker/api/types/container" "github.com/docker/libcompose/config" "github.com/docker/libcompose/docker/ctx" "github.com/docker/libcompose/lookup" "github.com/docker/libcompose/yaml" shlex "github.com/flynn/go-shlex" "github.com/stretchr/testify/assert" ) func TestParseCommand(t *testing.T) { exp := []string{"sh", "-c", "exec /opt/bin/flanneld -logtostderr=true -iface=${NODE_IP}"} cmd, err := shlex.Split("sh -c 'exec /opt/bin/flanneld -logtostderr=true -iface=${NODE_IP}'") assert.Nil(t, err) assert.Equal(t, exp, cmd) } func TestParseBindsAndVolumes(t *testing.T) { ctx := &ctx.Context{} ctx.ComposeFiles = []string{"foo/docker-compose.yml"} ctx.ResourceLookup = &lookup.FileResourceLookup{} abs, err := filepath.Abs(".") assert.Nil(t, err) cfg, hostCfg, err := Convert(&config.ServiceConfig{ Volumes: &yaml.Volumes{ Volumes: []*yaml.Volume{ { Destination: "/foo", }, { Source: "/home", Destination: "/home", }, { Destination: "/bar/baz", }, { Source: ".", Destination: "/home", }, { Source: "/usr/lib", Destination: "/usr/lib", AccessMode: "ro", }, }, }, }, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, map[string]struct{}{"/foo": {}, "/bar/baz": {}}, cfg.Volumes) assert.Equal(t, []string{"/home:/home", abs + "/foo:/home", "/usr/lib:/usr/lib:ro"}, hostCfg.Binds) } func TestParseLabels(t *testing.T) { ctx := &ctx.Context{} ctx.ComposeFiles = []string{"foo/docker-compose.yml"} ctx.ResourceLookup = &lookup.FileResourceLookup{} bashCmd := "bash" fooLabel := "foo.label" fooLabelValue := "service.config.value" sc := &config.ServiceConfig{ Entrypoint: yaml.Command([]string{bashCmd}), Labels: yaml.SliceorMap{fooLabel: "service.config.value"}, } cfg, _, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) cfg.Labels[fooLabel] = "FUN" cfg.Entrypoint[0] = "less" assert.Equal(t, fooLabelValue, sc.Labels[fooLabel]) assert.Equal(t, "FUN", cfg.Labels[fooLabel]) assert.Equal(t, yaml.Command{bashCmd}, sc.Entrypoint) assert.Equal(t, []string{"less"}, []string(cfg.Entrypoint)) } func TestDNSOpt(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ DNSOpts: []string{ "use-vc", "no-tld-query", }, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.True(t, reflect.DeepEqual([]string{ "use-vc", "no-tld-query", }, hostCfg.DNSOptions)) } func TestGroupAdd(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ GroupAdd: []string{ "root", "1", }, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.True(t, reflect.DeepEqual([]string{ "root", "1", }, hostCfg.GroupAdd)) } func TestIsolation(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ Isolation: "default", } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, container.Isolation("default"), hostCfg.Isolation) } func TestMemSwappiness(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ MemSwappiness: yaml.MemStringorInt(10), } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, int64(10), *hostCfg.MemorySwappiness) } func TestMemReservation(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ MemReservation: 100000, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, int64(100000), hostCfg.MemoryReservation) } func TestOomKillDisable(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ OomKillDisable: true, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, true, *hostCfg.OomKillDisable) } func TestOomScoreAdj(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ OomScoreAdj: 500, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, 500, hostCfg.OomScoreAdj) } func TestStopSignal(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ StopSignal: "SIGTERM", } cfg, _, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.Equal(t, "SIGTERM", cfg.StopSignal) } func TestTmpfs(t *testing.T) { ctx := &ctx.Context{} sc := &config.ServiceConfig{ Tmpfs: yaml.Stringorslice{"/run"}, } _, hostCfg, err := Convert(sc, ctx.Context, nil) assert.Nil(t, err) assert.True(t, reflect.DeepEqual(map[string]string{ "/run": "", }, hostCfg.Tmpfs)) sc = &config.ServiceConfig{ Tmpfs: yaml.Stringorslice{"/run:rw,noexec,nosuid,size=65536k"},<|fim▁hole|> assert.True(t, reflect.DeepEqual(map[string]string{ "/run": "rw,noexec,nosuid,size=65536k", }, hostCfg.Tmpfs)) }<|fim▁end|>
} _, hostCfg, err = Convert(sc, ctx.Context, nil) assert.Nil(t, err)
<|file_name|>rasterize.py<|end_file_name|><|fim▁begin|>import json import logging from math import ceil import os import click import cligj from .helpers import resolve_inout from . import options import rasterio from rasterio.errors import CRSError from rasterio.transform import Affine from rasterio.coords import disjoint_bounds logger = logging.getLogger('rio') # Common options used below # Unlike the version in cligj, this one doesn't require values. files_inout_arg = click.argument( 'files', nargs=-1, type=click.Path(resolve_path=True), metavar="INPUTS... OUTPUT") @click.command(short_help='Rasterize features.') @files_inout_arg @options.output_opt @cligj.format_opt @options.like_file_opt @options.bounds_opt @options.dimensions_opt @options.resolution_opt @click.option('--src-crs', '--src_crs', 'src_crs', default=None, help='Source coordinate reference system. Limited to EPSG ' 'codes for now. Used as output coordinate system if output ' 'does not exist or --like option is not used. ' 'Default: EPSG:4326') @options.all_touched_opt @click.option('--default-value', '--default_value', 'default_value', type=float, default=1, help='Default value for rasterized pixels') @click.option('--fill', type=float, default=0, help='Fill value for all pixels not overlapping features. Will ' 'be evaluated as NoData pixels for output. Default: 0') @click.option('--property', 'prop', type=str, default=None, help='Property in ' 'GeoJSON features to use for rasterized values. Any features ' 'that lack this property will be given --default_value instead.') @options.force_overwrite_opt @options.creation_options @click.pass_context def rasterize( ctx, files, output, driver, like, bounds, dimensions, res, src_crs, all_touched, default_value, fill, prop, force_overwrite, creation_options): """Rasterize GeoJSON into a new or existing raster. If the output raster exists, rio-rasterize will rasterize feature values into all bands of that raster. The GeoJSON is assumed to be in the same coordinate reference system as the output unless --src-crs is provided. --default_value or property values when using --property must be using a data type valid for the data type of that raster. If a template raster is provided using the --like option, the affine transform and data type from that raster will be used to create the output. Only a single band will be output. The GeoJSON is assumed to be in the same coordinate reference system unless --src-crs is provided. --default_value or property values when using --property must be using a data type valid for the data type of that raster. --driver, --bounds, --dimensions, and --res are ignored when output exists or --like raster is provided If the output does not exist and --like raster is not provided, the input GeoJSON will be used to determine the bounds of the output unless provided using --bounds. --dimensions or --res are required in this case. If --res is provided, the bottom and right coordinates of bounds are ignored. Note: The GeoJSON is not projected to match the coordinate reference system of the output or --like rasters at this time. This functionality may be added in the future. """ from rasterio.crs import CRS from rasterio.features import rasterize from rasterio.features import bounds as calculate_bounds verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1 output, files = resolve_inout( files=files, output=output, force_overwrite=force_overwrite) bad_param = click.BadParameter('invalid CRS. Must be an EPSG code.', ctx, param=src_crs, param_hint='--src_crs') has_src_crs = src_crs is not None try: src_crs = CRS.from_string(src_crs) if has_src_crs else CRS.from_string('EPSG:4326') except CRSError: raise bad_param # If values are actually meant to be integers, we need to cast them # as such or rasterize creates floating point outputs if default_value == int(default_value): default_value = int(default_value) if fill == int(fill): fill = int(fill) with rasterio.Env(CPL_DEBUG=verbosity > 2): def feature_value(feature): if prop and 'properties' in feature: return feature['properties'].get(prop, default_value) return default_value with click.open_file(files.pop(0) if files else '-') as gj_f: geojson = json.loads(gj_f.read()) if 'features' in geojson: geometries = [] for f in geojson['features']: geometries.append((f['geometry'], feature_value(f))) elif 'geometry' in geojson: geometries = ((geojson['geometry'], feature_value(geojson)), ) else: raise click.BadParameter('Invalid GeoJSON', param=input, param_hint='input') geojson_bounds = geojson.get('bbox', calculate_bounds(geojson)) if os.path.exists(output): with rasterio.open(output, 'r+') as out: if has_src_crs and src_crs != out.crs: raise click.BadParameter('GeoJSON does not match crs of ' 'existing output raster', param='input', param_hint='input') if disjoint_bounds(geojson_bounds, out.bounds): click.echo("GeoJSON outside bounds of existing output " "raster. Are they in different coordinate " "reference systems?", err=True) meta = out.meta.copy() result = rasterize( geometries, out_shape=(meta['height'], meta['width']), transform=meta.get('affine', meta['transform']), all_touched=all_touched, dtype=meta.get('dtype', None), default_value=default_value, fill=fill) for bidx in range(1, meta['count'] + 1): data = out.read(bidx, masked=True) # Burn in any non-fill pixels, and update mask accordingly ne = result != fill data[ne] = result[ne] data.mask[ne] = False out.write(data, indexes=bidx) else: if like is not None: template_ds = rasterio.open(like)<|fim▁hole|> raise click.BadParameter('GeoJSON does not match crs of ' '--like raster', param='input', param_hint='input') if disjoint_bounds(geojson_bounds, template_ds.bounds): click.echo("GeoJSON outside bounds of --like raster. " "Are they in different coordinate reference " "systems?", err=True) kwargs = template_ds.meta.copy() kwargs['count'] = 1 # DEPRECATED # upgrade transform to affine object or we may get an invalid # transform set on output kwargs['transform'] = template_ds.affine template_ds.close() else: bounds = bounds or geojson_bounds if src_crs.is_geographic: if (bounds[0] < -180 or bounds[2] > 180 or bounds[1] < -80 or bounds[3] > 80): raise click.BadParameter( "Bounds are beyond the valid extent for " "EPSG:4326.", ctx, param=bounds, param_hint='--bounds') if dimensions: width, height = dimensions res = ( (bounds[2] - bounds[0]) / float(width), (bounds[3] - bounds[1]) / float(height) ) else: if not res: raise click.BadParameter( 'pixel dimensions are required', ctx, param=res, param_hint='--res') elif len(res) == 1: res = (res[0], res[0]) width = max(int(ceil((bounds[2] - bounds[0]) / float(res[0]))), 1) height = max(int(ceil((bounds[3] - bounds[1]) / float(res[1]))), 1) kwargs = { 'count': 1, 'crs': src_crs, 'width': width, 'height': height, 'transform': Affine(res[0], 0, bounds[0], 0, -res[1], bounds[3]), 'driver': driver } kwargs.update(**creation_options) result = rasterize( geometries, out_shape=(kwargs['height'], kwargs['width']), transform=kwargs.get('affine', kwargs['transform']), all_touched=all_touched, dtype=kwargs.get('dtype', None), default_value=default_value, fill=fill) if 'dtype' not in kwargs: kwargs['dtype'] = result.dtype kwargs['nodata'] = fill with rasterio.open(output, 'w', **kwargs) as out: out.write(result, indexes=1)<|fim▁end|>
if has_src_crs and src_crs != template_ds.crs:
<|file_name|>app.js<|end_file_name|><|fim▁begin|><|fim▁hole|> SanctuaryApp.config(['$routeProvider', function($routeProvider) { $routeProvider .when('/youtubelist', { templateUrl: 'partials/youtubelist.html', controller: 'YoutubeListController' }) .otherwise({ redirectTo: '/youtubelist' }); }]);<|fim▁end|>
var SanctuaryApp = angular.module('SanctuaryApp', [ 'ngRoute', 'SanctuaryControllers' ]);
<|file_name|>admintools_bootstrap.py<|end_file_name|><|fim▁begin|>from django import template from django.utils.safestring import mark_safe from django.utils.html import escape from django.utils.translation import ugettext as _ from django.contrib.admin.views.main import PAGE_VAR, ALL_VAR from django.conf import settings from django.contrib.sites.models import Site from BeautifulSoup import BeautifulSoup register = template.Library() @register.simple_tag def atb_site_link(): if settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK: return ''' <li><a href="%s" class="top-icon" title="%s" rel="popover" data-placement="below"><i class="icon-home icon-white"></i></a></li> <li class="divider-vertical"></li> ''' % (settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK, _('Open site')) else: return '' @register.simple_tag def atb_site_name(): if 'django.contrib.sites' in settings.INSTALLED_APPS: return Site.objects.get_current().name else: return _('Django site') @register.simple_tag def bootstrap_page_url(cl, page_num): """ generates page URL for given page_num, uses for prev and next links django numerates pages from 0 """ return escape(cl.get_query_string({PAGE_VAR: page_num-1})) DOT = '.' def bootstrap_paginator_number(cl,i, li_class=None): """ Generates an individual page index link in a paginated list. """ if i == DOT: return u'<li><a href="#">...</a></li>' elif i == cl.page_num: return mark_safe(u'<li class="active"><a href="#">%d</a></li> ' % (i+1)) else: return mark_safe(u'<li><a href="%s">%d</a></li>' % (escape(cl.get_query_string({PAGE_VAR: i})), i+1)) paginator_number = register.simple_tag(bootstrap_paginator_number) def bootstrap_pagination(cl): """ Generates the series of links to the pages in a paginated list. """ paginator, page_num = cl.paginator, cl.page_num pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page if not pagination_required: page_range = [] else: ON_EACH_SIDE = 3 ON_ENDS = 2 # If there are 10 or fewer pages, display links to every page. # Otherwise, do some fancy if paginator.num_pages <= 10: page_range = range(paginator.num_pages) else: # Insert "smart" pagination links, so that there are always ON_ENDS # links at either end of the list of pages, and there are always # ON_EACH_SIDE links at either end of the "current page" link. page_range = [] if page_num > (ON_EACH_SIDE + ON_ENDS): page_range.extend(range(0, ON_EACH_SIDE - 1)) page_range.append(DOT) page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1)) else: page_range.extend(range(0, page_num + 1)) if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1): page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1)) page_range.append(DOT) page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages)) else: page_range.extend(range(page_num + 1, paginator.num_pages)) need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page<|fim▁hole|> 'pagination_required': pagination_required, 'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}), 'page_range': page_range, 'ALL_VAR': ALL_VAR, '1': 1, 'curr_page': cl.paginator.page(cl.page_num+1), } bootstrap_pagination = register.inclusion_tag('admin/pagination.html')(bootstrap_pagination) # breadcrumbs tag class BreadcrumbsNode(template.Node): """ renders bootstrap breadcrumbs list. usage:: {% breadcrumbs %} url1|text1 url2|text2 text3 {% endbreadcrumbs %} | is delimiter by default, you can use {% breadcrumbs delimiter_char %} to change it. lines without delimiters are interpreted as active breadcrumbs """ def __init__(self, nodelist, delimiter): self.nodelist = nodelist self.delimiter = delimiter def render(self, context): data = self.nodelist.render(context).strip() if not data: return '' try: data.index('<div class="breadcrumbs">') except ValueError: lines = [ l.strip().split(self.delimiter) for l in data.split("\n") if l.strip() ] else: # data is django-style breadcrumbs, parsing try: soup = BeautifulSoup(data) lines = [ (a.get('href'), a.text) for a in soup.findAll('a')] lines.append([soup.find('div').text.split('&rsaquo;')[-1].strip()]) except Exception, e: lines = [["Cannot parse breadcrumbs: %s" % unicode(e)]] out = '<ul class="breadcrumb">' curr = 0 for d in lines: if d[0][0] == '*': active = ' class="active"' d[0] = d[0][1:] else: active = '' curr += 1 if (len(lines) == curr): # last divider = '' else: divider = '<span class="divider">/</span>' if len(d) == 2: out += '<li%s><a href="%s">%s</a>%s</li>' % (active, d[0], d[1], divider) elif len(d) == 1: out += '<li%s>%s%s</li>' % (active, d[0], divider) else: raise ValueError('Invalid breadcrumb line: %s' % self.delimiter.join(d)) out += '</ul>' return out @register.tag(name='breadcrumbs') def do_breadcrumbs(parser, token): try: tag_name, delimiter = token.contents.split(None, 1) except ValueError: delimiter = '|' nodelist = parser.parse(('endbreadcrumbs',)) parser.delete_first_token() return BreadcrumbsNode(nodelist, delimiter)<|fim▁end|>
return { 'cl': cl,
<|file_name|>rda_main.cc<|end_file_name|><|fim▁begin|>#include <iostream> #include <fstream> #include <vector> #include <string> #include <stdexcept> #include <boost/program_options.hpp> #include <boost/lexical_cast.hpp> #include <boost/tuple/tuple.hpp> #include <btl/fasta_writer.h> #include "ereal.h" #include "dna_sequence.h" #include "main_io.h" #include "io.h" #include "dna_alignment_sequence.h" #include "rda_functions.h" #include "krda_improve.h" #include "needleman.h" #include "nw_model_parameters.h" #include "alignment_functions.h" #include "utility.h" #include "annotation.h" #define PARAMETER_ERROR 1 // forward declare version variable extern const char* build_string; int main( int argc, char* argv[] ) { // load program options using namespace boost::program_options; using namespace std; <|fim▁hole|> desc.add_options() ( "help", "produce help message" ) ( "target,t", value<string>(), "first sequence" ) ( "query,q", value<string>(), "second sequence" ) ( "out,o", value<string>(), "output alignment file" ) ( "parameter-file,p", value<string>(), "set parameters from file" ) ( "align-method", value<unsigned int>()->default_value(0), "alignment method: 0 RDA, 1 needleman" ) ( "block-size,k", value<unsigned int>()->default_value(40), "maximum block size" ) ( "block-breaks", "add pure gap sites as block breaks" ) ( "output-maf", "output alignment in MAF format") ; variables_map vm; store( parse_command_line( argc, argv, desc ), vm ); notify(vm); if( (argc < 2) || vm.count("help")) { cout << program_name << endl << desc << endl; return 1; } require_option( vm, "target", PARAMETER_ERROR ); require_option( vm, "query", PARAMETER_ERROR ); unsigned int K = vm.count("block-size") ? vm["block-size"].as<unsigned int>() : 40; // precompute lookup tables ereal::init(); try { dna_sequence_ptr target,query; target = load_sequence( vm["target"].as<string>() ); query = load_sequence( vm["query"].as<string>() ); require_size_above( *target, 1 ); require_size_above( *query, 1 ); krda_improve<needleman> krda; krda.set_k(K); needleman nw; // load parameter file if needed if( vm.count("parameter-file") ) { nw_model_parameters_ptr mp = load_parameters_from_file( vm["parameter-file"].as<string>() ); krda.set_parameters( *mp ); nw.set_s( mp->pr_open, mp->p, mp->q ); nw.set_mean_gap_length( mp->mean_gap_length ); } if( vm.count("block-breaks") ) krda.set_block_marker( true ); // build final alignment by aligning each identified region with needleman wunch dna_alignment_sequence_ptr alignmenta = new_dna_alignment_sequence(); dna_alignment_sequence_ptr alignmentb = new_dna_alignment_sequence(); pairwise_dna_alignment final = pairwise_dna_alignment( alignmenta, alignmentb, 0 ); dna_sequence_region_ptr alla = new_dna_sequence_region(target), allb = new_dna_sequence_region(query); vector< pair<dna_sequence_region_ptr,dna_sequence_region_ptr> > training_set; training_set.push_back( make_pair(alla,allb) ); // open output file now so that if there is a problem we don't waste time aligning stuff ofstream out_file; if( vm.count("out") ) { out_file.open( vm["out"].as<string>().c_str(), ios::out|ios::trunc ); if( out_file.fail() ) { cerr << "unable to open " << vm["out"].as<string>() << " for writing" << endl; return 4; } } annotation_ptr myann; switch( vm["align-method"].as<unsigned int>() ) { case 1: final = nw.align(*alla,*allb); break; case 0: final = krda.align(*alla,*allb); break; default: throw runtime_error("unknown alignment method"); } // label alignments final.a->tags["accession"] = target->tags["accession"]; final.b->tags["accession"] = query->tags["accession"]; string info = program_name; switch( vm["align-method"].as<unsigned int>() ) { case 1: info += string(" nm score=") + boost::lexical_cast<string>((double)final.score); break; case 0: info += string(" rda k=") + boost::lexical_cast<string>(K); info += string(" score=") + boost::lexical_cast<string>((double)final.score); } final.a->tags["description"] = final.b->tags["description"] = info; ostream *out = vm.count("out") ? &out_file : &cout; if( vm.count("output-maf") ) { // Output MAF format. *out << "##maf version=1" << endl; *out << "a score=" << final.score << endl; *out << "s " << final.a->tags["accession"] << "\t0\t" << target->data.size() << "\t+\t" << target->data.size() << "\t"; for( dna_alignment_sequence_data::const_iterator j = final.a->data.begin(); j != final.a->data.end(); ++j ) *out << *j; *out << endl; *out << "s " << final.b->tags["accession"] << "\t0\t" << query->data.size() << "\t+\t" << query->data.size() << "\t"; for( dna_alignment_sequence_data::const_iterator j = final.b->data.begin(); j != final.b->data.end(); ++j ) *out << *j; *out << endl; } else { *out << btl::fasta_writer( *final.a ); *out << btl::fasta_writer( *final.b ); } if( vm.count("out") ) out_file.close(); } catch( exception &e ) { cerr << "FATAL: " << e.what() << endl; } return 0; }<|fim▁end|>
string program_name("rda build "); program_name += string(build_string); options_description desc("Allowed options");
<|file_name|>button.module.ts<|end_file_name|><|fim▁begin|>import {NgModule} from '@angular/core'; import {CommonModule} from '@angular/common'; import {FormlyModule} from '@ngx-formly/core';<|fim▁hole|>import {ReactiveFormsModule} from '@angular/forms'; import {JigsawButtonModule} from "@rdkmaster/jigsaw"; import {FormlyJigsawFormFieldModule} from "@ngx-formly/jigsaw/form-field"; import {FormlyFieldButton} from "./button.type"; @NgModule({ declarations: [FormlyFieldButton], imports: [ CommonModule, ReactiveFormsModule, JigsawButtonModule, FormlyJigsawFormFieldModule, FormlyModule.forChild({ types: [ { name: 'button', component: FormlyFieldButton, wrappers: ['form-field'], } ], }), ], }) export class FormlyJigsawButtonModule { }<|fim▁end|>
<|file_name|>_collections.py<|end_file_name|><|fim▁begin|># util/_collections.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Collection classes and helpers.""" from __future__ import absolute_import import operator import types import weakref from .compat import binary_types from .compat import collections_abc from .compat import itertools_filterfalse from .compat import py2k from .compat import string_types from .compat import threading EMPTY_SET = frozenset() class AbstractKeyedTuple(tuple): __slots__ = () def keys(self): """Return a list of string key names for this :class:`.KeyedTuple`. .. seealso:: :attr:`.KeyedTuple._fields` """ return list(self._fields) class KeyedTuple(AbstractKeyedTuple): """``tuple`` subclass that adds labeled names. E.g.:: >>> k = KeyedTuple([1, 2, 3], labels=["one", "two", "three"]) >>> k.one 1 >>> k.two 2 Result rows returned by :class:`_query.Query` that contain multiple ORM entities and/or column expressions make use of this class to return rows. The :class:`.KeyedTuple` exhibits similar behavior to the ``collections.namedtuple()`` construct provided in the Python standard library, however is architected very differently. Unlike ``collections.namedtuple()``, :class:`.KeyedTuple` is does not rely on creation of custom subtypes in order to represent a new series of keys, instead each :class:`.KeyedTuple` instance receives its list of keys in place. The subtype approach of ``collections.namedtuple()`` introduces significant complexity and performance overhead, which is not necessary for the :class:`_query.Query` object's use case. .. seealso:: :ref:`ormtutorial_querying` """ def __new__(cls, vals, labels=None): t = tuple.__new__(cls, vals) if labels: t.__dict__.update(zip(labels, vals)) else: labels = [] t.__dict__["_labels"] = labels return t @property def _fields(self): """Return a tuple of string key names for this :class:`.KeyedTuple`. This method provides compatibility with ``collections.namedtuple()``. .. seealso:: :meth:`.KeyedTuple.keys` """ return tuple([l for l in self._labels if l is not None]) def __setattr__(self, key, value): raise AttributeError("Can't set attribute: %s" % key) def _asdict(self): """Return the contents of this :class:`.KeyedTuple` as a dictionary. This method provides compatibility with ``collections.namedtuple()``, with the exception that the dictionary returned is **not** ordered. """ return {key: self.__dict__[key] for key in self.keys()} class _LW(AbstractKeyedTuple): __slots__ = () def __new__(cls, vals): return tuple.__new__(cls, vals) def __reduce__(self): # for pickling, degrade down to the regular # KeyedTuple, thus avoiding anonymous class pickling # difficulties return KeyedTuple, (list(self), self._real_fields) def _asdict(self): """Return the contents of this :class:`.KeyedTuple` as a dictionary.""" d = dict(zip(self._real_fields, self)) d.pop(None, None) return d class ImmutableContainer(object): def _immutable(self, *arg, **kw): raise TypeError("%s object is immutable" % self.__class__.__name__) __delitem__ = __setitem__ = __setattr__ = _immutable class immutabledict(ImmutableContainer, dict): clear = pop = popitem = setdefault = update = ImmutableContainer._immutable def __new__(cls, *args): new = dict.__new__(cls) dict.__init__(new, *args) return new def __init__(self, *args): pass def __reduce__(self): return immutabledict, (dict(self),) def union(self, d): if not d: return self elif not self: if isinstance(d, immutabledict): return d else: return immutabledict(d) else: d2 = immutabledict(self) dict.update(d2, d) return d2 def __repr__(self): return "immutabledict(%s)" % dict.__repr__(self) class Properties(object): """Provide a __getattr__/__setattr__ interface over a dict.""" __slots__ = ("_data",) def __init__(self, data): object.__setattr__(self, "_data", data) def __len__(self): return len(self._data) def __iter__(self): return iter(list(self._data.values())) def __dir__(self): return dir(super(Properties, self)) + [ str(k) for k in self._data.keys() ] def __add__(self, other): return list(self) + list(other) def __setitem__(self, key, obj): self._data[key] = obj def __getitem__(self, key): return self._data[key] def __delitem__(self, key): del self._data[key] def __setattr__(self, key, obj): self._data[key] = obj def __getstate__(self): return {"_data": self._data} def __setstate__(self, state): object.__setattr__(self, "_data", state["_data"]) def __getattr__(self, key): try: return self._data[key] except KeyError: raise AttributeError(key) def __contains__(self, key): return key in self._data def as_immutable(self): """Return an immutable proxy for this :class:`.Properties`.""" return ImmutableProperties(self._data) def update(self, value): self._data.update(value) def get(self, key, default=None): if key in self: return self[key] else: return default def keys(self): return list(self._data) def values(self): return list(self._data.values()) def items(self): return list(self._data.items()) def has_key(self, key): return key in self._data def clear(self): self._data.clear() class OrderedProperties(Properties): """Provide a __getattr__/__setattr__ interface with an OrderedDict as backing store.""" __slots__ = () def __init__(self): Properties.__init__(self, OrderedDict()) class ImmutableProperties(ImmutableContainer, Properties): """Provide immutable dict/object attribute to an underlying dictionary.""" __slots__ = () class OrderedDict(dict): """A dict that returns keys/values/items in the order they were added.""" __slots__ = ("_list",) def __reduce__(self): return OrderedDict, (self.items(),) def __init__(self, ____sequence=None, **kwargs): self._list = [] if ____sequence is None: if kwargs: self.update(**kwargs) else: self.update(____sequence, **kwargs) def clear(self): self._list = [] dict.clear(self) def copy(self): return self.__copy__() def __copy__(self): return OrderedDict(self) def sort(self, *arg, **kw): self._list.sort(*arg, **kw) def update(self, ____sequence=None, **kwargs): if ____sequence is not None: if hasattr(____sequence, "keys"): for key in ____sequence.keys(): self.__setitem__(key, ____sequence[key]) else: for key, value in ____sequence: self[key] = value if kwargs: self.update(kwargs) def setdefault(self, key, value): if key not in self: self.__setitem__(key, value) return value else: return self.__getitem__(key) def __iter__(self): return iter(self._list) def keys(self): return list(self) def values(self): return [self[key] for key in self._list] def items(self): return [(key, self[key]) for key in self._list] if py2k: def itervalues(self): return iter(self.values()) def iterkeys(self): return iter(self) def iteritems(self): return iter(self.items()) def __setitem__(self, key, obj): if key not in self: try: self._list.append(key) except AttributeError: # work around Python pickle loads() with # dict subclass (seems to ignore __setstate__?) self._list = [key] dict.__setitem__(self, key, obj) def __delitem__(self, key): dict.__delitem__(self, key) self._list.remove(key) def pop(self, key, *default): present = key in self value = dict.pop(self, key, *default) if present: self._list.remove(key) return value def popitem(self): item = dict.popitem(self) self._list.remove(item[0]) return item class OrderedSet(set): def __init__(self, d=None): set.__init__(self) self._list = [] if d is not None: self._list = unique_list(d) set.update(self, self._list) else: self._list = [] def add(self, element): if element not in self: self._list.append(element) set.add(self, element) def remove(self, element): set.remove(self, element) self._list.remove(element) def insert(self, pos, element): if element not in self: self._list.insert(pos, element) set.add(self, element) def discard(self, element): if element in self: self._list.remove(element) set.remove(self, element) def clear(self): set.clear(self) self._list = [] def __getitem__(self, key): return self._list[key] def __iter__(self): return iter(self._list) def __add__(self, other): return self.union(other) def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._list) __str__ = __repr__ def update(self, iterable): for e in iterable: if e not in self: self._list.append(e) set.add(self, e) return self __ior__ = update def union(self, other): result = self.__class__(self) result.update(other) return result __or__ = union def intersection(self, other): other = set(other) return self.__class__(a for a in self if a in other) __and__ = intersection def symmetric_difference(self, other): other = set(other) result = self.__class__(a for a in self if a not in other) result.update(a for a in other if a not in self) return result __xor__ = symmetric_difference def difference(self, other): other = set(other) return self.__class__(a for a in self if a not in other) __sub__ = difference def intersection_update(self, other): other = set(other) set.intersection_update(self, other) self._list = [a for a in self._list if a in other] return self __iand__ = intersection_update def symmetric_difference_update(self, other): set.symmetric_difference_update(self, other) self._list = [a for a in self._list if a in self] self._list += [a for a in other._list if a in self] return self __ixor__ = symmetric_difference_update def difference_update(self, other): set.difference_update(self, other) self._list = [a for a in self._list if a in self] return self __isub__ = difference_update class IdentitySet(object): """A set that considers only object id() for uniqueness. This strategy has edge cases for builtin types- it's possible to have two 'foo' strings in one of these sets, for example. Use sparingly. """ def __init__(self, iterable=None): self._members = dict() if iterable: self.update(iterable) def add(self, value): self._members[id(value)] = value def __contains__(self, value): return id(value) in self._members def remove(self, value): del self._members[id(value)] def discard(self, value): try: self.remove(value) except KeyError: pass def pop(self): try: pair = self._members.popitem() return pair[1] except KeyError: raise KeyError("pop from an empty set") def clear(self): self._members.clear() def __cmp__(self, other): raise TypeError("cannot compare sets using cmp()") def __eq__(self, other): if isinstance(other, IdentitySet): return self._members == other._members else: return False def __ne__(self, other): if isinstance(other, IdentitySet): return self._members != other._members else: return True def issubset(self, iterable): other = self.__class__(iterable) if len(self) > len(other): return False for m in itertools_filterfalse( other._members.__contains__, iter(self._members.keys()) ): return False return True def __le__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.issubset(other) def __lt__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return len(self) < len(other) and self.issubset(other) def issuperset(self, iterable): other = self.__class__(iterable) if len(self) < len(other): return False for m in itertools_filterfalse( self._members.__contains__, iter(other._members.keys()) ): return False return True def __ge__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.issuperset(other) def __gt__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return len(self) > len(other) and self.issuperset(other) def union(self, iterable): result = self.__class__() members = self._members result._members.update(members) result._members.update((id(obj), obj) for obj in iterable) return result def __or__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.union(other) def update(self, iterable): self._members.update((id(obj), obj) for obj in iterable) def __ior__(self, other): if not isinstance(other, IdentitySet): return NotImplemented self.update(other) return self def difference(self, iterable): result = self.__class__() members = self._members other = {id(obj) for obj in iterable} result._members.update( ((k, v) for k, v in members.items() if k not in other) ) return result def __sub__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.difference(other) def difference_update(self, iterable): self._members = self.difference(iterable)._members def __isub__(self, other): if not isinstance(other, IdentitySet): return NotImplemented self.difference_update(other) return self def intersection(self, iterable): result = self.__class__() members = self._members other = {id(obj) for obj in iterable} result._members.update( (k, v) for k, v in members.items() if k in other ) return result def __and__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.intersection(other) def intersection_update(self, iterable): self._members = self.intersection(iterable)._members def __iand__(self, other): if not isinstance(other, IdentitySet): return NotImplemented self.intersection_update(other) return self def symmetric_difference(self, iterable): result = self.__class__() members = self._members other = {id(obj): obj for obj in iterable} result._members.update( ((k, v) for k, v in members.items() if k not in other) ) result._members.update( ((k, v) for k, v in other.items() if k not in members) ) return result def __xor__(self, other): if not isinstance(other, IdentitySet): return NotImplemented return self.symmetric_difference(other) def symmetric_difference_update(self, iterable): self._members = self.symmetric_difference(iterable)._members def __ixor__(self, other): if not isinstance(other, IdentitySet): return NotImplemented self.symmetric_difference(other) return self def copy(self): return type(self)(iter(self._members.values())) __copy__ = copy def __len__(self): return len(self._members) def __iter__(self): return iter(self._members.values()) def __hash__(self): raise TypeError("set objects are unhashable") def __repr__(self): return "%s(%r)" % (type(self).__name__, list(self._members.values())) class WeakSequence(object): def __init__(self, __elements=()): # adapted from weakref.WeakKeyDictionary, prevent reference # cycles in the collection itself def _remove(item, selfref=weakref.ref(self)): self = selfref() if self is not None: self._storage.remove(item) self._remove = _remove self._storage = [ weakref.ref(element, _remove) for element in __elements ] def append(self, item): self._storage.append(weakref.ref(item, self._remove)) def __len__(self): return len(self._storage) def __iter__(self): return ( obj for obj in (ref() for ref in self._storage) if obj is not None ) def __getitem__(self, index): try: obj = self._storage[index] except KeyError: raise IndexError("Index %s out of range" % index) else: return obj() class OrderedIdentitySet(IdentitySet): def __init__(self, iterable=None): IdentitySet.__init__(self) self._members = OrderedDict() if iterable: for o in iterable: self.add(o) class PopulateDict(dict): """A dict which populates missing values via a creation function. Note the creation function takes a key, unlike collections.defaultdict. """ def __init__(self, creator): self.creator = creator def __missing__(self, key): self[key] = val = self.creator(key) return val class WeakPopulateDict(dict): """Like PopulateDict, but assumes a self + a method and does not create a reference cycle. """ def __init__(self, creator_method): self.creator = creator_method.__func__ weakself = creator_method.__self__ self.weakself = weakref.ref(weakself) def __missing__(self, key): self[key] = val = self.creator(self.weakself(), key) return val # Define collections that are capable of storing # ColumnElement objects as hashable keys/elements. # At this point, these are mostly historical, things # used to be more complicated. column_set = set column_dict = dict ordered_column_set = OrderedSet _getters = PopulateDict(operator.itemgetter) _property_getters = PopulateDict( lambda idx: property(operator.itemgetter(idx))<|fim▁hole|>def unique_list(seq, hashfunc=None): seen = set() seen_add = seen.add if not hashfunc: return [x for x in seq if x not in seen and not seen_add(x)] else: return [ x for x in seq if hashfunc(x) not in seen and not seen_add(hashfunc(x)) ] class UniqueAppender(object): """Appends items to a collection ensuring uniqueness. Additional appends() of the same object are ignored. Membership is determined by identity (``is a``) not equality (``==``). """ def __init__(self, data, via=None): self.data = data self._unique = {} if via: self._data_appender = getattr(data, via) elif hasattr(data, "append"): self._data_appender = data.append elif hasattr(data, "add"): self._data_appender = data.add def append(self, item): id_ = id(item) if id_ not in self._unique: self._data_appender(item) self._unique[id_] = True def __iter__(self): return iter(self.data) def coerce_generator_arg(arg): if len(arg) == 1 and isinstance(arg[0], types.GeneratorType): return list(arg[0]) else: return arg def to_list(x, default=None): if x is None: return default if not isinstance(x, collections_abc.Iterable) or isinstance( x, string_types + binary_types ): return [x] elif isinstance(x, list): return x else: return list(x) def has_intersection(set_, iterable): r"""return True if any items of set\_ are present in iterable. Goes through special effort to ensure __hash__ is not called on items in iterable that don't support it. """ # TODO: optimize, write in C, etc. return bool(set_.intersection([i for i in iterable if i.__hash__])) def to_set(x): if x is None: return set() if not isinstance(x, set): return set(to_list(x)) else: return x def to_column_set(x): if x is None: return column_set() if not isinstance(x, column_set): return column_set(to_list(x)) else: return x def update_copy(d, _new=None, **kw): """Copy the given dict and update with the given values.""" d = d.copy() if _new: d.update(_new) d.update(**kw) return d def flatten_iterator(x): """Given an iterator of which further sub-elements may also be iterators, flatten the sub-elements into a single iterator. """ for elem in x: if not isinstance(elem, str) and hasattr(elem, "__iter__"): for y in flatten_iterator(elem): yield y else: yield elem class LRUCache(dict): """Dictionary with 'squishy' removal of least recently used items. Note that either get() or [] should be used here, but generally its not safe to do an "in" check first as the dictionary can change subsequent to that call. """ __slots__ = "capacity", "threshold", "size_alert", "_counter", "_mutex" def __init__(self, capacity=100, threshold=0.5, size_alert=None): self.capacity = capacity self.threshold = threshold self.size_alert = size_alert self._counter = 0 self._mutex = threading.Lock() def _inc_counter(self): self._counter += 1 return self._counter def get(self, key, default=None): item = dict.get(self, key, default) if item is not default: item[2] = self._inc_counter() return item[1] else: return default def __getitem__(self, key): item = dict.__getitem__(self, key) item[2] = self._inc_counter() return item[1] def values(self): return [i[1] for i in dict.values(self)] def setdefault(self, key, value): if key in self: return self[key] else: self[key] = value return value def __setitem__(self, key, value): item = dict.get(self, key) if item is None: item = [key, value, self._inc_counter()] dict.__setitem__(self, key, item) else: item[1] = value self._manage_size() @property def size_threshold(self): return self.capacity + self.capacity * self.threshold def _manage_size(self): if not self._mutex.acquire(False): return try: size_alert = bool(self.size_alert) while len(self) > self.capacity + self.capacity * self.threshold: if size_alert: size_alert = False self.size_alert(self) by_counter = sorted( dict.values(self), key=operator.itemgetter(2), reverse=True ) for item in by_counter[self.capacity :]: try: del self[item[0]] except KeyError: # deleted elsewhere; skip continue finally: self._mutex.release() _lw_tuples = LRUCache(100) def lightweight_named_tuple(name, fields): hash_ = (name,) + tuple(fields) tp_cls = _lw_tuples.get(hash_) if tp_cls: return tp_cls tp_cls = type( name, (_LW,), dict( [ (field, _property_getters[idx]) for idx, field in enumerate(fields) if field is not None ] + [("__slots__", ())] ), ) tp_cls._real_fields = fields tp_cls._fields = tuple([f for f in fields if f is not None]) _lw_tuples[hash_] = tp_cls return tp_cls class ScopedRegistry(object): """A Registry that can store one or multiple instances of a single class on the basis of a "scope" function. The object implements ``__call__`` as the "getter", so by calling ``myregistry()`` the contained object is returned for the current scope. :param createfunc: a callable that returns a new object to be placed in the registry :param scopefunc: a callable that will return a key to store/retrieve an object. """ def __init__(self, createfunc, scopefunc): """Construct a new :class:`.ScopedRegistry`. :param createfunc: A creation function that will generate a new value for the current scope, if none is present. :param scopefunc: A function that returns a hashable token representing the current scope (such as, current thread identifier). """ self.createfunc = createfunc self.scopefunc = scopefunc self.registry = {} def __call__(self): key = self.scopefunc() try: return self.registry[key] except KeyError: return self.registry.setdefault(key, self.createfunc()) def has(self): """Return True if an object is present in the current scope.""" return self.scopefunc() in self.registry def set(self, obj): """Set the value for the current scope.""" self.registry[self.scopefunc()] = obj def clear(self): """Clear the current scope, if any.""" try: del self.registry[self.scopefunc()] except KeyError: pass class ThreadLocalRegistry(ScopedRegistry): """A :class:`.ScopedRegistry` that uses a ``threading.local()`` variable for storage. """ def __init__(self, createfunc): self.createfunc = createfunc self.registry = threading.local() def __call__(self): try: return self.registry.value except AttributeError: val = self.registry.value = self.createfunc() return val def has(self): return hasattr(self.registry, "value") def set(self, obj): self.registry.value = obj def clear(self): try: del self.registry.value except AttributeError: pass def has_dupes(sequence, target): """Given a sequence and search object, return True if there's more than one, False if zero or one of them. """ # compare to .index version below, this version introduces less function # overhead and is usually the same speed. At 15000 items (way bigger than # a relationship-bound collection in memory usually is) it begins to # fall behind the other version only by microseconds. c = 0 for item in sequence: if item is target: c += 1 if c > 1: return True return False # .index version. the two __contains__ calls as well # as .index() and isinstance() slow this down. # def has_dupes(sequence, target): # if target not in sequence: # return False # elif not isinstance(sequence, collections_abc.Sequence): # return False # # idx = sequence.index(target) # return target in sequence[idx + 1:]<|fim▁end|>
)
<|file_name|>updateScopeAcl.js<|end_file_name|><|fim▁begin|>"use strict"; const apisObject = { "type": "object", "required": false, "patternProperties": { "^[_a-z\/][_a-zA-Z0-9\/:]*$": { //pattern to match an api route "type": "object", "required": true, "properties": { "access": {"type": "boolean", "required": false}, } } } }; const aclRoute = { "type": "array", "required": false, "items": { "type": "object", "required": false, "properties": { "access": {"type": "string", "required": false}, "apis": apisObject } } }; const scope = { "type": "object", "patternProperties": { "^[^\W\.]+$": { "type": "object", "required": false, "patternProperties": { ".+": { "type": "object", "required": false, "properties": { "access": {"type": "boolean", "required": false}, "apisPermission": { "type": "string", "enum": ["restricted"], "required": false }, "get": aclRoute, "post": aclRoute, "put": aclRoute, "delete": aclRoute, "head": aclRoute, "options": aclRoute, "other": aclRoute,<|fim▁hole|> }, "additionalProperties": false }, }, "additionalProperties": false }; module.exports = scope;<|fim▁end|>
"additionalProperties": false }, "additionalProperties": false }
<|file_name|>test_matcher.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013-2015 eNovance SAS <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from hardware import matcher class TestMatcher(unittest.TestCase): def test_equal(self): lines = [('system', 'product', 'serial', 'CZJ31402CD')] spec = ('system', 'product', 'serial', 'CZJ31402CD') arr = {} self.assertTrue(matcher.match_spec(spec, lines, arr)) def test_not_equal(self): lines = [('system', 'product', 'serial', 'CZJ31402CD')] spec = ('system', 'product', 'serial', 'CZJ31402CE') arr = {} self.assertFalse(matcher.match_spec(spec, lines, arr)) def test_var(self): lines = [('disk', '1I:1:1', 'size', '1000GB')] spec = ('disk', '$disk8', 'size', '1000GB') arr = {} self.assertTrue(matcher.match_spec(spec, lines, arr)) self.assertEqual(arr, {'disk8': '1I:1:1'}) def test_vars(self): lines = [ ('system', 'product', 'serial', 'CZJ31402CD'), ('disk', '1I:1:1', 'size', '1000GB'), ('disk', '1I:1:1', 'type', 'SATA'), ('disk', '1I:1:1', 'control', 'hpa'), ('disk', '1I:1:2', 'size', '1000GB'), ('disk', '1I:1:2', 'type', 'SATA'), ('disk', '1I:1:2', 'control', 'hpa'), ('disk', '1I:1:3', 'size', '1000GB'), ('disk', '1I:1:3', 'type', 'SATA'), ('disk', '1I:1:3', 'control', 'hpa'), ('disk', '1I:1:4', 'size', '1000GB'), ('disk', '1I:1:4', 'type', 'SATA'), ('disk', '1I:1:4', 'control', 'hpa'), ('disk', '2I:1:5', 'size', '1000GB'), ('disk', '2I:1:5', 'type', 'SATA'), ('disk', '2I:1:5', 'control', 'hpa'), ('disk', '2I:1:6', 'size', '1000GB'), ('disk', '2I:1:6', 'type', 'SATA'), ('disk', '2I:1:6', 'control', 'hpa'), ('disk', '2I:1:7', 'size', '100GB'), ('disk', '2I:1:7', 'type', 'SSDSATA'), ('disk', '2I:1:7', 'control', 'hpa'), ('disk', '2I:1:8', 'size', '100GB'), ('disk', '2I:1:8', 'type', 'SSDSATA'), ('disk', '2I:1:8', 'control', 'hpa'), ] specs = [('system', 'product', 'serial', 'CZJ31402CD'), ('disk', '$disk1', 'size', '100GB'), ('disk', '$disk2', 'size', '100GB'), ('disk', '$disk3', 'size', '1000GB'), ('disk', '$disk4', 'size', '1000GB'), ('disk', '$disk5', 'size', '1000GB'), ('disk', '$disk6', 'size', '1000GB'), ('disk', '$disk7', 'size', '1000GB'), ('disk', '$disk8', 'size', '1000GB')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr, {'disk1': '2I:1:7', 'disk2': '2I:1:8', 'disk3': '1I:1:1', 'disk4': '1I:1:2', 'disk5': '1I:1:3', 'disk6': '1I:1:4', 'disk7': '2I:1:5', 'disk8': '2I:1:6', } ) def test_already_bound(self): lines = [ ('disk', '1I:1:2', 'size', '100GB'), ('disk', '1I:1:1', 'size', '1000GB'), ('disk', '1I:1:1', 'control', 'hpa'), ('disk', '1I:1:2', 'control', 'hpa'), ] specs = [ ('disk', '$disk1', 'size', '100GB'), ('disk', '$disk1', 'control', 'hpa'), ('disk', '$disk2', 'size', '1000GB'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr, {'disk1': '1I:1:2', 'disk2': '1I:1:1', }) def test_order(self): specs = [ ('disk', '$disk1', 'size', '100'), ('disk', '$disk1', 'slot', '$slot1'), ('disk', '$disk2', 'size', '1000'), ('disk', '$disk2', 'slot', '$slot2'), ] lines = [ ('disk', '1I:1:1', 'size', '1000'), ('disk', '1I:1:1', 'control', 'hpa'), ('disk', '1I:1:1', 'slot', '2'), ('disk', '2I:1:8', 'size', '100'), ('disk', '2I:1:8', 'control', 'hpa'), ('disk', '2I:1:8', 'slot', '2'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) def test_2vars(self): specs = [ ('disk', '$disk', 'size', '$size'), ] lines = [ ('disk', 'vda', 'size', '8'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr, {'size': '8', 'disk': 'vda', }) def test_2dollars(self): specs = [ ('disk', '$$disk', 'size', '$size'), ] lines = [ ('disk', 'vda', 'size', '8'), ] arr = {} arr2 = {} self.assertTrue(matcher.match_all(lines, specs, arr, arr2)) self.assertEqual(arr, {'size': '8', 'disk': 'vda', }) self.assertEqual(arr2, {'disk': 'vda', }) def test_multiple_vars(self): specs = [ ('disk', 'vda', 'size', '8'), ('disk', 'vdb', 'size', '16'), ] specs2 = [ ('disk', 'vda', 'size', '8'), ('disk', 'vdb', 'size', '8'), ] lines = [ ('disk', 'vda', 'size', '8'), ('disk', 'vdb', 'size', '8'), ] arr = {} self.assertFalse(matcher.match_all(lines, specs, arr, {})) self.assertTrue(matcher.match_all(lines, specs2, arr, {}), lines) def test_multiple(self): spec = ('disk', '$disk', 'size', '8') lines = [ ('disk', 'vda', 'size', '8'), ('disk', 'vdb', 'size', '8'), ] arr = {} self.assertTrue(matcher.match_multiple(lines, spec, arr)) self.assertEqual(arr['disk'], ['vda', 'vdb']) def test_gt(self): specs = [('disk', '$disk', 'size', 'gt(10)')] lines = [ ('disk', 'vda', 'size', '20'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_ge(self): specs = [('disk', '$disk', 'size', 'ge(10.1)')] lines = [ ('disk', 'vda', 'size', '10.5'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_lt(self): specs = [('disk', '$disk', 'size', 'lt(30)')] lines = [ ('disk', 'vda', 'size', '20'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_le(self): specs = [('disk', '$disk', 'size', 'le(20)')] lines = [ ('disk', 'vda', 'size', '20'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_not(self): specs = [('network', '$eth', 'serial', '$mac=not(regexp(^28:d2:))')] lines = [('network', 'eth0', 'serial', '20:d2:44:1b:0a:8b')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['eth'], 'eth0') self.assertEqual(arr['mac'], '20:d2:44:1b:0a:8b') def test_and(self): specs = [('disk', '$disk', 'size', 'and(gt(20), lt(50))')] lines = [('disk', 'vda', 'size', '40')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_or(self): specs = [('disk', '$disk', 'size', 'or(lt(20), gt(30))')] lines = [('disk', 'vda', 'size', '40')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_network(self): specs = [('network', '$eth', 'ipv4', u'network(192.168.2.0/24)')] lines = [('network', 'eth0', 'ipv4', u'192.168.2.2')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['eth'], 'eth0') def test_le_var(self): specs = [('disk', '$disk', 'size', '$size=le(20)')] lines = [('disk', 'vda', 'size', '20')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') self.assertEqual(arr['size'], '20') def test_in(self): specs = [('disk', '$disk', 'size', 'in(10, 20, 30)')] lines = [('disk', 'vda', 'size', '20')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_in2(self): specs = [('disk', '$disk=in("vda", "vdb")', 'size', '20')] lines = [('disk', 'vda', 'size', '20')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_rangeint(self): specs = [('disk', '$disk', 'size', 'range(20, 25)')] lines = [('disk', 'vda', 'size', '20')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vda') def test_rangefloat(self): specs = [('ipmi', '+12V', 'value', 'range(11.9, 12.2)')] lines = [('ipmi', '+12V', 'value', '12.14')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) def test_regexp(self): specs = [('network', '$eth', 'serial', 'regexp(^28:d2:)')] lines = [('network', 'eth0', 'serial', '28:d2:44:1b:0a:8b')] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) def test_backtrack(self): specs = [ ('disk', '$disk', 'size', '8'), ('disk', '$disk', 'type', 'b'), ] lines = [ ('disk', 'vda', 'size', '8'), ('disk', 'vda', 'type', 'a'), ('disk', 'vdb', 'size', '8'), ('disk', 'vdb', 'type', 'b'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk'], 'vdb', arr) def test_backtrack2(self): specs = [ ('disk', '$disk', 'size', '8'), ('disk', '$disk', 'type', 'b'), ('disk', '$disk2', 'size', '8'), ] lines = [ ('disk', 'vda', 'size', '8'), ('disk', 'vda', 'type', 'a'), ('disk', 'vdb', 'size', '8'), ('disk', 'vdb', 'type', 'b'), ] arr = {} self.assertTrue(matcher.match_all(lines, specs, arr, {})) self.assertEqual(arr['disk2'], 'vda', arr) self.assertEqual(arr['disk'], 'vdb', arr) def test_backtrack3(self): specs = [ ('disk', '$disk', 'size', '8'), ('disk', '$disk', 'type', 'c'), ('disk', '$disk2', 'size', '8'), ] lines = [ ('disk', 'vda', 'size', '8'), ('disk', 'vda', 'type', 'a'), ('disk', 'vdb', 'size', '8'), ('disk', 'vdb', 'type', 'b'), ] arr = {} self.assertFalse(matcher.match_all(lines, specs, arr, {})) def test_backtracklong(self): specs = [ ('disk', 'logical', 'count', '8'), ('disk', '$disk1', 'size', '1000'), ('disk', '$disk1', 'vendor', 'Hitachi'), ('disk', '$disk1', 'model', 'HUA722010CLA330'), ('disk', '$disk1', 'rev', 'R001'), ('disk', '$disk1', 'optimal_io_size', '0'), ('disk', '$disk1', 'physical_block_size', '512'), ('disk', '$disk1', 'rotational', '1'), ('disk', '$disk1', 'Write Cache Enable', '1'), ('disk', '$disk1', 'Read Cache Disable', '0'), ('disk', '$disk2', 'size', '1000'), ('disk', '$disk2', 'vendor', 'Seagate'), ('disk', '$disk2', 'model', 'ST31000528AS'), ('disk', '$disk2', 'rev', 'R001'), ('disk', '$disk2', 'optimal_io_size', '0'), ('disk', '$disk2', 'physical_block_size', '512'), ('disk', '$disk2', 'rotational', '1'), ('disk', '$disk2', 'Write Cache Enable', '1'), ('disk', '$disk2', 'Read Cache Disable', '0'), ('disk', '$disk3', 'size', '1000'), ('disk', '$disk3', 'optimal_io_size', '0'), ('disk', '$disk3', 'physical_block_size', '512'), ('disk', '$disk3', 'rotational', '1'), ('disk', '$disk3', 'Write Cache Enable', '1'), ('disk', '$disk3', 'Read Cache Disable', '0'), ('disk', '$disk4', 'size', '1000'), ('disk', '$disk4', 'optimal_io_size', '0'), ('disk', '$disk4', 'physical_block_size', '512'), ('disk', '$disk4', 'rotational', '1'), ('disk', '$disk4', 'Write Cache Enable', '1'), ('disk', '$disk4', 'Read Cache Disable', '0'), ('disk', '$disk5', 'size', '1000'), ('disk', '$disk5', 'optimal_io_size', '0'), ('disk', '$disk5', 'physical_block_size', '512'), ('disk', '$disk5', 'rotational', '1'), ('disk', '$disk5', 'Write Cache Enable', '1'), ('disk', '$disk5', 'Read Cache Disable', '0'), ('disk', '$disk6', 'size', '1000'), ('disk', '$disk6', 'optimal_io_size', '0'), ('disk', '$disk6', 'physical_block_size', '512'), ('disk', '$disk6', 'rotational', '1'), ('disk', '$disk6', 'Write Cache Enable', '1'), ('disk', '$disk6', 'Read Cache Disable', '0'), ('disk', '$disk7', 'size', '1000'), ('disk', '$disk7', 'optimal_io_size', '0'), ('disk', '$disk7', 'physical_block_size', '512'), ('disk', '$disk7', 'rotational', '1'), ('disk', '$disk7', 'Write Cache Enable', '1'), ('disk', '$disk7', 'Read Cache Disable', '0'), ('disk', '$disk8', 'size', '1000'), ('disk', '$disk8', 'optimal_io_size', '0'), ('disk', '$disk8', 'physical_block_size', '512'), ('disk', '$disk8', 'rotational', '1'), ('disk', '$disk8', 'Write Cache Enable', '1'), ('disk', '$disk8', 'Read Cache Disable', '0'), ] arr = {} self.assertTrue(matcher.match_all(X8_HW, specs, arr, {})) def test_generate_filename_and_macs(self): items = [('system', 'product', 'serial', 'Sysname'), ('network', 'eth0', 'serial', 'mac')] self.assertEqual(matcher.generate_filename_and_macs(items), {'sysname': 'Sysname-mac', 'sysserial': 'Sysname', 'eth': ['eth0'], 'serial': ['mac'], }) def test_generate_filename_and_macs_no_sysname(self): items = [('network', 'eth0', 'serial', 'aa:bb:cc')] self.assertEqual(matcher.generate_filename_and_macs(items), {'serial': ['aa:bb:cc'], 'eth': ['eth0'], 'sysname': 'aa-bb-cc', }) def test_generate_filename_and_macs_virtualbox(self): items = [('disk', 'sda', 'size', '8'), ('system', 'product', 'serial', '0'), ('system', 'product', 'name', 'VirtualBox ()'), ('system', 'product', 'vendor', 'innotek GmbH'), ('system', 'product', 'version', '1.2'), ('system', 'memory', 'size', '521113600'), ('network', 'eth0', 'serial', '08:00:27:6f:77:22'), ('network', 'eth0', 'vendor', 'Intel Corporation'), ('network', 'eth0', 'product', '82540EM Gigabit Ethernet Controller'), ('network', 'eth0', 'size', '1000000000'), ('network', 'eth0', 'ipv4', '10.0.2.15'), ('network', 'eth0', 'link', 'yes'), ('network', 'eth0', 'driver', 'e1000'), ('system', 'cpu', 'number', '1')] result = matcher.generate_filename_and_macs(items) self.assertEqual(result['sysname'], 'VirtualBox-0-08-00-27-6f-77-22') self.assertEqual(result['serial'], ['08:00:27:6f:77:22']) self.assertEqual(result['eth'], ['eth0']) if __name__ == "__main__": unittest.main() X8_HW = [('disk', 'logical', 'count', '8'), ('disk', 'sdd', 'size', '1000'), ('disk', 'sdd', 'model', 'HUA722010CLA330'), ('disk', 'sdd', 'vendor', 'Hitachi'), ('disk', 'sdd', 'rev', 'R001'), ('disk', 'sdd', 'optimal_io_size', '0'), ('disk', 'sdd', 'physical_block_size', '512'), ('disk', 'sdd', 'rotational', '1'), ('disk', 'sdd', 'Write Cache Enable', '1'), ('disk', 'sdd', 'Read Cache Disable', '0'),<|fim▁hole|> ('disk', 'sde', 'vendor', 'Hitachi'), ('disk', 'sde', 'model', 'HUA722010CLA330'), ('disk', 'sde', 'rev', 'R001'), ('disk', 'sde', 'optimal_io_size', '0'), ('disk', 'sde', 'physical_block_size', '512'), ('disk', 'sde', 'rotational', '1'), ('disk', 'sde', 'Write Cache Enable', '1'), ('disk', 'sde', 'Read Cache Disable', '0'), ('disk', 'sde', 'scsi-id', 'scsi-2001b4d2001655500'), ('disk', 'sdf', 'size', '1000'), ('disk', 'sdf', 'vendor', 'Hitachi'), ('disk', 'sdf', 'model', 'HDS721010CLA330'), ('disk', 'sdf', 'rev', 'R001'), ('disk', 'sdf', 'optimal_io_size', '0'), ('disk', 'sdf', 'physical_block_size', '512'), ('disk', 'sdf', 'rotational', '1'), ('disk', 'sdf', 'Write Cache Enable', '1'), ('disk', 'sdf', 'Read Cache Disable', '0'), ('disk', 'sdf', 'scsi-id', 'scsi-2001b4d2012776300'), ('disk', 'sdg', 'size', '1000'), ('disk', 'sdg', 'vendor', 'Seagate'), ('disk', 'sdg', 'model', 'ST31000528AS'), ('disk', 'sdg', 'rev', 'R001'), ('disk', 'sdg', 'optimal_io_size', '0'), ('disk', 'sdg', 'physical_block_size', '512'), ('disk', 'sdg', 'rotational', '1'), ('disk', 'sdg', 'Write Cache Enable', '1'), ('disk', 'sdg', 'Read Cache Disable', '0'), ('disk', 'sda', 'size', '1000'), ('disk', 'sda', 'vendor', 'Seagate'), ('disk', 'sda', 'model', 'ST31000528AS'), ('disk', 'sda', 'rev', 'R001'), ('disk', 'sda', 'optimal_io_size', '0'), ('disk', 'sda', 'physical_block_size', '512'), ('disk', 'sda', 'rotational', '1'), ('disk', 'sda', 'Write Cache Enable', '1'), ('disk', 'sda', 'Read Cache Disable', '0'), ('disk', 'sdb', 'size', '1000'), ('disk', 'sdb', 'vendor', 'Seagate'), ('disk', 'sdb', 'model', 'ST31000528AS'), ('disk', 'sdb', 'rev', 'R001'), ('disk', 'sdb', 'optimal_io_size', '0'), ('disk', 'sdb', 'physical_block_size', '512'), ('disk', 'sdb', 'rotational', '1'), ('disk', 'sdb', 'Write Cache Enable', '1'), ('disk', 'sdb', 'Read Cache Disable', '0'), ('disk', 'sdb', 'scsi-id', 'scsi-2001b4d2000000000'), ('disk', 'sdc', 'size', '1000'), ('disk', 'sdc', 'vendor', 'Seagate'), ('disk', 'sdc', 'model', 'ST31000528AS'), ('disk', 'sdc', 'rev', 'R001'), ('disk', 'sdc', 'optimal_io_size', '0'), ('disk', 'sdc', 'physical_block_size', '512'), ('disk', 'sdc', 'rotational', '1'), ('disk', 'sdc', 'Write Cache Enable', '1'), ('disk', 'sdc', 'Read Cache Disable', '0'), ('disk', 'sdh', 'size', '1000'), ('disk', 'sdh', 'vendor', 'Hitachi'), ('disk', 'sdh', 'model', 'HDS721010CLA330'), ('disk', 'sdh', 'rev', 'R001'), ('disk', 'sdh', 'optimal_io_size', '0'), ('disk', 'sdh', 'physical_block_size', '512'), ('disk', 'sdh', 'rotational', '1'), ('disk', 'sdh', 'Write Cache Enable', '1'), ('disk', 'sdh', 'Read Cache Disable', '0'), ('disk', 'sdh', 'scsi-id', 'scsi-2001b4d2012486900')]<|fim▁end|>
('disk', 'sdd', 'scsi-id', 'scsi-2001b4d2001775100'), ('disk', 'sde', 'size', '1000'),
<|file_name|>xor2.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Thu Apr 21 22:48:37 2016 @author: burger """ import numpy as np from matplotlib import pyplot as plt def sigma(x, a=1, b=0): return 1/(1+np.exp(-(a*x+b))) x = np.asarray([[0.0, .1], [0, 1], [.9, .05], [.9, .95]]) markers = 'v<>^' a = .5*np.ones((2,)) proj = np.dot(x, a)<|fim▁hole|> def trafo(x, y): return sigma(x, 2, -2), sigma(y, 5, 0) proj_line = np.arange(-50, 50, .02) proj_transformed_x, proj_transformed_y = trafo(proj_line, proj_line) proj_x, proj_y = trafo(proj, proj) a = (x[0] + x[3])/2 b = (x[1] + x[2])/2 c = (a + b)/2 m = (proj_y[3] - proj_y[0])/(proj_x[3] - proj_x[0]) X = np.mean(proj_x) + proj_line Y = np.mean(proj_y) + m*proj_line plt.figure() plt.hold(True) ms = 10 for i in range(len(x)): plt.plot(x[i, 0], x[i, 1], 'g'+markers[i], MarkerSize=ms) plt.plot(proj[i], proj[i], 'b'+markers[i], MarkerSize=ms) plt.plot(proj_x[i], proj_y[i], 'r'+markers[i], MarkerSize=ms) dots = 3 plt.plot(proj_line, proj_line, 'k.', MarkerSize=dots) plt.plot(proj_transformed_x, proj_transformed_y, 'r.', MarkerSize=dots) plt.plot(X, Y, 'k') for x in proj_line[::4]: a, b = trafo(proj_line, x*np.ones_like(proj_line)) plt.plot(a, b, 'k') a, b = trafo(x*np.ones_like(proj_line), proj_line) plt.plot(a, b, 'k') #plot(proj_line, y*np.ones_like(proj_line), 'k') plt.xlim([-.05, 1.05]) plt.ylim([-.05, 1.05]) plt.show()<|fim▁end|>
<|file_name|>LEOcoin_sl_SI.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="sl_SI"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About LEOcoin</source> <translation>O LEOcoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;LEOcoin&lt;/b&gt; version</source> <translation>&lt;b&gt;LEOcoin&lt;/b&gt; verzija</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The BlackCoin developers Copyright © 2014-%1 The LEOcoin developers</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or &lt;a href=&quot;http://www.opensource.org/licenses/mit-license.php&quot;&gt;http://www.opensource.org/licenses/mit-license.php&lt;/a&gt;. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (&lt;a href=&quot;https://www.openssl.org/&quot;&gt;https://www.openssl.org/&lt;/a&gt;) and cryptographic software written by Eric Young (&lt;a href=&quot;mailto:[email protected]&quot;&gt;[email protected]&lt;/a&gt;) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+218"/> <source>Label</source> <translation>Oznaka</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Naslov</translation> </message> <message> <location line="+0"/> <source>pubkey</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>stealth</source> <translation type="unfinished"></translation> </message> <message> <location line="+34"/> <source>(no label)</source> <translation>(ni oznake)</translation> </message> <message> <location line="+4"/> <source>Stealth Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>n/a</source> <translation type="unfinished"></translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Poziv gesla</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Vnesite geslo</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Novo geslo</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Ponovite novo geslo</translation> </message> <message> <location line="+33"/> <location line="+16"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Služi kot onemogočenje pošiljanja prostega denarja, v primerih okužbe operacijskega sistema. Ne ponuja prave zaščite.</translation> </message> <message> <location line="-13"/> <source>For staking only</source> <translation>Samo za staking.</translation> </message> <message> <location line="+16"/> <source>Enable messaging</source> <translation type="unfinished"></translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+39"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Vnesite novo geslo za vstop v denarnico.&lt;br/&gt;Prosimo, da geslo sestavite iz &lt;b&gt; 10 ali več naključnih znakov&lt;/b&gt; oz. &lt;b&gt;osem ali več besed&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Šifriraj denarnico</translation> </message> <message> <location line="+11"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>To dejanje zahteva geslo za odklepanje vaše denarnice.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Odkleni denarnico</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>To dejanje zahteva geslo za dešifriranje vaše denarnice.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Dešifriraj denarnico</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Zamenjaj geslo</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Vnesite staro in novo geslo denarnice.</translation> </message> <message> <location line="+45"/> <source>Confirm wallet encryption</source> <translation>Potrdi šifriranje denarnice</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>Opozorilo: Če šifrirate svojo denarnico in izgubite svoje geslo, boste &lt;b&gt; IZGUBILI VSE SVOJE KOVANCE&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ali ste prepričani, da želite šifrirati vašo denarnico?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>POMEMBNO: Vsaka predhodna varnostna kopija datoteke denarnice mora biti nadomeščena z novo datoteko šifrirane denarnice. Zaradi varnostnih razlogov bodo namreč prejšnje varnostne kopije datoteke nešifrirane denarnice postale neuporabne takoj ko boste pričeli uporabljati novo, šifrirano denarnico.</translation> </message> <message> <location line="+104"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Opozorilo: imate prižgan Cap Lock</translation> </message> <message> <location line="-134"/> <location line="+61"/> <source>Wallet encrypted</source> <translation>Denarnica šifrirana</translation> </message> <message> <location line="-59"/> <source>LEOcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>LEOcoin se bo sedaj zaprl, da dokonča proces šifriranje. Pomnite, da tudi šifriranje vaše denarnice ne more v celoti zaščititi vaših kovancev pred krajo z zlonamernimi programi in računalniškimi virusi, če ti okužijo vaš računalnik.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+45"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Šifriranje denarnice je spodletelo</translation> </message> <message> <location line="-57"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Šifriranje denarnice spodletelo je zaradi notranje napake. Vaša denarnica ni šifrirana.</translation> </message> <message> <location line="+7"/> <location line="+51"/> <source>The supplied passphrases do not match.</source> <translation>Vnešeno geslo se ne ujema</translation> </message> <message> <location line="-39"/> <source>Wallet unlock failed</source> <translation>Odklep denarnice spodletel</translation> </message> <message> <location line="+1"/> <location line="+13"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Geslo za dešifriranje denarnice, ki ste ga vnesli, ni pravilno.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Dešifriranje denarnice je spodletelo</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Geslo denarnice je bilo uspešno spremenjeno.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+137"/> <source>Network Alert</source> <translation>Omrežno Opozorilo</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>Kontrola kovancev</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>Količina:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>Biti:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Količina:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Prednostno mesto:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Provizija:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Nizek output:</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+528"/> <location line="+30"/> <source>no</source> <translation>ne</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>Po proviziji:</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>Sprememba:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>od/obkljukaj vse</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Drevo</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Seznam</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Količina</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation>Oznaka</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation>Naslov</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Potrdila</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Potrjeno</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Prednostno mesto</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-520"/> <source>Copy address</source> <translation>Kopiraj naslov</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopiraj oznako</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Kopiraj količino</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Kopiraj ID transakcije</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>Kopiraj količino</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Kopiraj provizijo</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Kopiraj po proviziji</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Kopiraj bite</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Kopiraj prednostno mesto</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Kopiraj nizek output:</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Kopiraj spremembo</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>najvišja</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>visoka</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>srednje visoka</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>srednje</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>srednje nizka</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>nizka</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>najnižja</translation> </message> <message> <location line="+130"/> <location line="+30"/> <source>DUST</source> <translation>PRAH</translation> </message> <message> <location line="-30"/> <location line="+30"/> <source>yes</source> <translation>da</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation>Ta oznakla se obarva rdeče, če je transakcija večja od 10000 bajtov. To pomeni, da je zahtevana provizija vsaj %1 na kb. Lahko variira +/- 1 Bajt na vnos.</translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation>Transakcije z višjo prioriteto imajo višjo verjetnost, da so vključene v blok. Ta oznaka se obarva rdeče, če je prioriteta manjša kot &quot;srednja&quot;. To pomeni, da je zahtevana provizija vsaj %1 na kb.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation>Ta oznaka se obarva rdeče, če prejemnik dobi količino manjšo od %1. To pomeni, da je potrebna vsaj %2 provizija. Zneski pod 0.546 krat minimalna transakcijska provizija so prikazani kot PRAH.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation>Ta oznakla se obarva rdeče, če je sprememba manjša od %1. To pomeni, da je zahtevana provizija vsaj %2.</translation> </message> <message> <location line="+40"/> <location line="+66"/> <source>(no label)</source> <translation>(ni oznake)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>spremeni iz %1 (%2)</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(spremeni)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Uredi naslov</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Oznaka</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Oznaka povezana s tem vnosom v imeniku</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Naslov</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Naslov povezan s tem vnosom v imeniku. Spremenite ga lahko le za naslove odlivov.</translation> </message> <message> <location line="+7"/> <source>&amp;Stealth Address</source> <translation type="unfinished"></translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>Nov naslov za prilive</translation> </message> <message> <location line="+7"/> <source>New sending address</source> <translation>Nov naslov za odlive</translation> </message> <message> <location line="+4"/> <source>Edit receiving address</source> <translation>Uredi naslov za prilive</translation> </message> <message> <location line="+7"/> <source>Edit sending address</source> <translation>Uredi naslov za odlive</translation> </message> <message> <location line="+82"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Vnešeni naslov &quot;&amp;1&quot; je že v imeniku.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid LEOcoin address.</source> <translation>Vneseni naslov &quot;%1&quot; ni veljaven LEOcoin naslov.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Denarnice ni bilo mogoče odkleniti.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ustvarjanje novega ključa je spodletelo.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+526"/> <source>version</source> <translation>različica</translation> </message> <message> <location line="+0"/> <location line="+12"/> <source>LEOcoin</source> <translation type="unfinished">LEOcoin</translation> </message> <message> <location line="-10"/> <source>Usage:</source> <translation>Uporaba:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>možnosti ukazne vrstice</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>možnosti uporabniškega vmesnika</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Nastavi jezik, npr. &quot;sl_SI&quot; (privzeto: jezikovna oznaka sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Zaženi pomanjšano</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Prikaži splash screen ob zagonu (default: 1)</translation> </message> </context> <context> <name>LEOcoinBridge</name> <message> <location filename="../LEOcoinbridge.cpp" line="+410"/> <source>Incoming Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>default</source> <translation type="unfinished"></translation> </message> <message> <location line="+58"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>&lt;b&gt;%1&lt;/b&gt; to LEO %2 (%3)</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <location line="+5"/> <source>&lt;b&gt;%1&lt;/b&gt; LEO, ring size %2 to LEO %3 (%4)</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <location line="+10"/> <location line="+12"/> <location line="+8"/> <source>Error:</source> <translation type="unfinished"></translation> </message> <message> <location line="-30"/> <source>Unknown txn type detected %1.</source> <translation type="unfinished"></translation> </message> <message> <location line="+10"/> <source>Input types must match for all recipients.</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>Ring sizes must match for all recipients.</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>Ring size outside range [%1, %2].</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <location line="+9"/> <source>Confirm send coins</source> <translation type="unfinished"></translation> </message> <message> <location line="-9"/> <source>Are you sure you want to send? Ring size of one is not anonymous, and harms the network.</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <location line="+9"/> <source> and </source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"></translation> </message> <message> <location line="+15"/> <location line="+25"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation type="unfinished"></translation> </message> <message> <location line="-95"/> <source>The change address is not valid, please recheck.</source> <translation type="unfinished"></translation> </message> <message> <location line="+25"/> <location line="+376"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"></translation> </message> <message> <location line="-371"/> <source>The amount to pay must be larger than 0.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <location line="+365"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"></translation> </message> <message> <location line="-360"/> <source>Error: Transaction creation failed.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: Narration is too long.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: Ring Size Error.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: Input Type Error.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: Must be in full mode to send anon.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: Invalid Stealth Address.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>The total exceeds your LEOcoin balance when the %1 transaction fee is included.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error generating transaction.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error generating transaction: %1</source> <translation type="unfinished"></translation> </message> <message> <location line="+304"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <source>Send Message</source> <translation type="unfinished"></translation> </message> <message> <location line="-14"/> <source>The message can&apos;t be empty.</source> <translation type="unfinished"></translation> </message> <message> <location line="+10"/> <source>Error: Message creation failed.</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Error: The message was rejected.</source> <translation type="unfinished"></translation> </message> <message> <location line="+98"/> <source>Sanity Error!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error: a sanity check prevented the transfer of a non-group private key, please close your wallet and report this error to the development team as soon as possible.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../bridgetranslations.h" line="+8"/> <source>Overview</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Send</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Receive</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Transactions</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Address Book</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Chat</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Notifications</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Options</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Wallet Management</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Add New Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Import Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Advanced</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Backup</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Backup Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Encrypt Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Change Passphrase</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>(Un)lock Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Tools</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Chain Data</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Block Explorer</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Sign Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Verify Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Debug</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>About LEOcoin</source> <translation type="unfinished">O LEOcoin</translation> </message> <message> <location line="+1"/> <source>About QT</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>QR code</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Address:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Label:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Narration:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Amount:</source> <translation type="unfinished">Količina:</translation> </message> <message> <location line="+1"/> <source>LEO</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>mLEO</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>µLEO</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Satoshi</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Add new receive address</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Add Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Add a new contact</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Address Lookup</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Address Type</source> <translation type="unfinished"></translation> </message> <message> <location line="-6"/> <source>Normal</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Stealth</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Group</source> <translation type="unfinished"></translation> </message> <message> <location line="-5"/> <source>BIP32</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>Label</source> <translation type="unfinished">Oznaka</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation type="unfinished">Naslov</translation> </message> <message> <location line="+1"/> <source>Public Key</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Transaction Hash</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Recent Transactions</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Market</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Advanced Options</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Coin Control</source> <translation type="unfinished">Kontrola kovancev</translation> </message> <message> <location line="+1"/> <source>Make payment</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Balance transfer</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Select Inputs</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Automatically selected</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Quantity:</source> <translation type="unfinished">Količina:</translation> </message> <message> <location line="+1"/> <source>Fee:</source> <translation type="unfinished">Provizija:</translation> </message> <message> <location line="+1"/> <source>After Fee:</source> <translation type="unfinished">Po proviziji:</translation> </message> <message> <location line="+1"/> <source>Bytes:</source> <translation type="unfinished">Biti:</translation> </message> <message> <location line="+1"/> <source>Priority:</source> <translation type="unfinished">Prednostno mesto:</translation> </message> <message> <location line="+1"/> <source>LowOutput:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Change:</source> <translation type="unfinished">Sprememba:</translation> </message> <message> <location line="+1"/> <source>Custom change address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>From account</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>PUBLIC</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>PRIVATE</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Balance:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Ring Size:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>To account</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Pay to</source> <translation type="unfinished"></translation> </message> <message> <location line="+135"/> <source>Tor connection offline</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>i2p connection offline</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Wallet is encrypted and currently locked</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Wallet is syncing</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Open chat list</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a label for this address to add it to your address book</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a address to add it to your address book</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Inputs</source> <translation type="unfinished">Vnosi</translation> </message> <message> <location line="+1"/> <source>Values</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Outputs</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a LEOcoin address to sign the message with (e.g. 8MfTCSnMvix9mVVNb2MGiEw92GpLrvzhVp)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter the message you want to sign</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Click sign message to generate signature</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy the signed message signature</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a LEOcoin address to verify the message with (e.g. 8MfTCSnMvix9mVVNb2MGiEw92GpLrvzhVp)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter the message you want to verify</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a LEOcoin signature</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Paste signature from clipboard</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Your total balance</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Balances overview</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Recent in/out transactions or stakes</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Select inputs to spend</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Optional address to receive transaction change</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Choose from address book</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Paste address from clipboard</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Remove this recipient</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Current spendable send payment balance</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Current spendable balance to account</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>The address to transfer the balance to</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>The label for this address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Amount to transfer</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Send to multiple recipients at once</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Double click to edit</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Date and time that the transaction was received.</source> <translation type="unfinished">Datum in čas, ko je transakcija bila prejeta.</translation> </message> <message> <location line="+1"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation type="unfinished">Stanje transakcije. Zapeljite z miško čez to polje za prikaz števila potrdil. </translation> </message> <message> <location line="+1"/> <source>Type of transaction.</source> <translation type="unfinished">Vrsta transakcije.</translation> </message> <message> <location line="+1"/> <source>Destination address of transaction.</source> <translation type="unfinished">Naslov prejemnika transakcije.</translation> </message> <message> <location line="+1"/> <source>Short payment note.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Amount removed from or added to balance.</source> <translation type="unfinished">Količina odlita ali prilita dobroimetju.</translation> </message> <message> <location line="+1"/> <source>The address to send the payment to (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Choose address from address book</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a public key for the address above</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a label for this group</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Name for this Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a password</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Would you like to create a bip44 path?</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Your recovery phrase (Keep this safe!)</source> <translation type="unfinished"></translation> </message> <message> <location line="-80"/> <source>Recovery Phrase</source> <translation type="unfinished"></translation> </message> <message> <location line="+26"/> <source>Make Default</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Activate/Deactivate</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Set as Master</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>Not staking because wallet is offline</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>0 active connections to LEOcoin network</source> <translation type="unfinished"></translation> </message> <message> <location line="+26"/> <source>The address to send the payment to</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a label for this address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter a short note to send with payment (max 24 characters)</source> <translation type="unfinished"></translation> </message> <message> <location line="+20"/> <source>Wallet Name for recovered account</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enter the password for the wallet you are trying to recover</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Is this a bip44 path?</source> <translation type="unfinished"></translation> </message> <message> <location line="-66"/> <source>ID</source> <translation type="unfinished"></translation> </message> <message> <location line="-122"/> <source>Narration</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation type="unfinished">Količina</translation> </message> <message> <location line="+1"/> <source>Default Stealth Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Add Recipient</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Clear All</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Suggest Ring Size</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Send Payment</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>RECEIVE</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Filter by type..</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Type</source> <translation type="unfinished">Vrsta</translation> </message> <message> <location line="+1"/> <source>Show QR Code</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>New Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>TRANSACTIONS</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Date</source> <translation type="unfinished">Datum</translation> </message> <message> <location line="+1"/> <source>ADDRESSBOOK</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Delete</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start Private Conversation</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Name:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Public Key:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start Conversation</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Choose identity</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Identity:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start Group Conversation</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Group name:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Create Group</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Invite others</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Search</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Invite others to group</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Invite to Group</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Invite</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>GROUP</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>BOOK</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start private conversation</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start group conversation</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>CHAT</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Leave Group</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>CHAIN DATA</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Coin Value</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Owned (Mature)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>System (Mature)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Spends</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Least Depth</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>BLOCK EXPLORER</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Refresh</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Hash</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Height</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Timestamp</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Value Out</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>OPTIONS</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Main</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Network</source> <translation type="unfinished">Omrežje</translation> </message> <message> <location line="+1"/> <source>Window</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Display</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>I2P</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Tor</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Start LEOcoin on system login</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Detach databases at shutdown</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Pay transaction fee:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Most transactions are 1kB. Fee 0.01 recommended.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enable Staking</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Reserve:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Minimum Stake Interval</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Minimum Ring size:</source> <translation type="unfinished"></translation> </message> <message><|fim▁hole|> <message> <location line="+1"/> <source>Automatically select ring size</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Enable Secure messaging</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Thin Mode (Requires Restart)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Thin Full Index</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Thin Index Window</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Map port using UPnP</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Connect through SOCKS proxy:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Details</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Proxy IP:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Port:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>SOCKS Version:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Minimize to the tray instead of the taskbar</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Minimize on close</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>User Interface language:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Unit to show amounts in:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Rows per page:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Display addresses in transaction list</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Notifications:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Visible Transaction Types:</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>I2P (coming soon)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>TOR (coming soon)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Cancel</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Apply</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Ok</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Lets create a New Wallet and Account to get you started!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Wallet Name</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Password</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Add an optional Password to secure the Recovery Phrase (shown on next page)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Would you like to create a Multi-Account HD Key? (BIP44)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Language</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>English</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>French</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Japanese</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Spanish</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Chinese (Simplified)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Chinese (Traditional)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Next Step</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Write your Wallet Recovery Phrase</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Important!</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>You need the Wallet Recovery Phrase to restore this wallet. Write it down and keep them somewhere safe. You will be asked to confirm the Wallet Recovery Phrase in the next screen to ensure you have written it down correctly</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Back</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Please confirm your Wallet Recovery Phrase</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Congratulations! You have successfully created a New Wallet and Account</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>You can now use your Account to send and receive funds :) Remember to keep your Wallet Recovery Phrase and Password (if set) safe in case you ever need to recover your wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Lets import your Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>The Wallet Recovery Phrase could require a password to be imported</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Is this a Multi-Account HD Key (BIP44)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Recovery Phrase (Usually 24 words)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Congratulations! You have successfully imported your Wallet from your Recovery Phrase</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>You can now use your Account to send and receive funds :) Remember to keep your Wallet Recovery Phrase and Password safe in case you ever need to recover your wallet again!</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Accounts</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Name</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Created</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Active Account</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Default</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Wallet Keys</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Path</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Active</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Master</source> <translation type="unfinished"></translation> </message> <message> <location line="+61"/> <source>LEOcoin Notification</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>A new version of the LEOcoin wallet is available.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Please go to</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>and upgrade to the latest version.</source> <translation type="unfinished"></translation> </message> </context> <context> <name>LEOcoinGUI</name> <message> <location filename="../LEOcoin.cpp" line="+111"/> <source>A fatal error occurred. LEOcoin can no longer continue safely and will quit.</source> <translation type="unfinished"></translation> </message> <message> <location filename="../LEOcoingui.cpp" line="+89"/> <location line="+178"/> <source>LEOcoin</source> <translation type="unfinished">LEOcoin</translation> </message> <message> <location line="-178"/> <source>Client</source> <translation type="unfinished"></translation> </message> <message> <location line="+90"/> <source>E&amp;xit</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;About LEOcoin</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Show information about LEOcoin</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Modify configuration options for LEOcoin</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>&amp;Show / Hide</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>&amp;Encrypt Wallet...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Encrypt or decrypt wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>&amp;Backup Wallet...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Backup wallet to another location</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>&amp;Change Passphrase...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Change the passphrase used for wallet encryption</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>&amp;Unlock Wallet...</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Unlock wallet</source> <translation type="unfinished">Odkleni denarnico</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+4"/> <source>&amp;Debug window</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"></translation> </message> <message> <location line="+26"/> <source>&amp;File</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>&amp;Settings</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <location line="+9"/> <source>[testnet]</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <location line="+74"/> <source>LEOcoin client</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+63"/> <source>%n active connection(s) to LEOcoin network</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+18"/> <source>block</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>header</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>blocks</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>headers</source> <translation type="unfinished"></translation> </message> <message> <location line="+8"/> <location line="+22"/> <source>Synchronizing with network...</source> <translation type="unfinished"></translation> </message> <message> <location line="-20"/> <source>Downloading filtered blocks...</source> <translation type="unfinished"></translation> </message> <message> <location line="+6"/> <source>~%1 filtered block(s) remaining (%2% done).</source> <translation type="unfinished"></translation> </message> <message> <location line="+14"/> <source>Importing blocks...</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+5"/> <source>~%n block(s) remaining</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+13"/> <location line="+4"/> <source>Imported</source> <translation type="unfinished"></translation> </message> <message> <location line="-4"/> <location line="+4"/> <source>Downloaded</source> <translation type="unfinished"></translation> </message> <message> <location line="-3"/> <source>%1 of %2 %3 of transaction history (%4% done).</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>%1 blocks of transaction history.</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+23"/> <source>%n second(s) ago</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s) ago</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+3"/> <source>%n day(s) ago</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+7"/> <source>Up to date</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>Catching up...</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>Last received %1 was generated %2.</source> <translation type="unfinished"></translation> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation type="unfinished"></translation> </message> <message> <location line="+28"/> <source>Sent transaction</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <location line="+15"/> <source>Incoming Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Date: %1 From Address: %2 To Address: %3 Message: %4 </source> <translation type="unfinished"></translation> </message> <message> <location line="+45"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"></translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid LEOcoin address or malformed URI parameters.</source> <translation type="unfinished"></translation> </message> <message> <location line="+42"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for staking and messaging only.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for messaging only.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for staking only.</source> <translation type="unfinished"></translation> </message> <message> <location line="+12"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation type="unfinished"></translation> </message> <message> <location line="+25"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation type="unfinished"></translation> </message> <message> <location line="+33"/> <source>Backup Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>Backup Failed</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"></translation> </message> <message> <location line="+48"/> <source>Lock Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Error: Wallet must first be encrypted to be locked.</source> <translation type="unfinished"></translation> </message> <message numerus="yes"> <location line="+69"/> <source>%n second(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+1"/> <source>%n minute(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+1"/> <source>%n hour(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message numerus="yes"> <location line="+1"/> <source>%n day(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> <message> <location line="+9"/> <source>Staking. Your weight is %1 Network weight is %2 Expected time to earn reward is %3</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Not staking because wallet is in thin mode</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking, staking is disabled</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking because wallet is locked</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking because wallet is offline</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking because wallet is syncing</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking because you don&apos;t have mature coins</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Not staking</source> <translation type="unfinished"></translation> </message> <message> <location filename="../transactionrecord.cpp" line="+23"/> <source>Received with</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Received LEOcoin</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Sent LEOcoin</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Other</source> <translation type="unfinished"></translation> </message> </context> <context> <name>MessageModel</name> <message> <location filename="../messagemodel.cpp" line="+376"/> <source>Type</source> <translation type="unfinished">Vrsta</translation> </message> <message> <location line="+0"/> <source>Sent Date Time</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Received Date Time</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Label</source> <translation type="unfinished">Oznaka</translation> </message> <message> <location line="+0"/> <source>To Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>From Address</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Message</source> <translation type="unfinished">Sporočilo</translation> </message> <message> <location line="+41"/> <source>Send Secure Message</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Send failed: %1.</source> <translation type="unfinished"></translation> </message> <message> <location line="+22"/> <location line="+1"/> <source>(no label)</source> <translation type="unfinished">(ni oznake)</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start LEOcoin: click-to-pay handler</source> <translation type="unfinished"></translation> </message> </context> <context> <name>PeerTableModel</name> <message> <location filename="../peertablemodel.cpp" line="+118"/> <source>Address/Hostname</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>User Agent</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Ping Time</source> <translation type="unfinished"></translation> </message> </context> <context> <name>QObject</name> <message> <location filename="../guiutil.cpp" line="-470"/> <source>%1 d</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>%1 h</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>%1 m</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <location line="+55"/> <source>%1 s</source> <translation type="unfinished"></translation> </message> <message> <location line="-10"/> <source>None</source> <translation type="unfinished"></translation> </message> <message> <location line="+5"/> <source>N/A</source> <translation type="unfinished">Neznano</translation> </message> <message> <location line="+0"/> <source>%1 ms</source> <translation type="unfinished"></translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Ime odjemalca</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+23"/> <location line="+36"/> <location line="+23"/> <location line="+23"/> <location line="+491"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <location line="+23"/> <source>N/A</source> <translation>Neznano</translation> </message> <message> <location line="-1062"/> <source>Client version</source> <translation>Različica odjemalca</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informacije</translation> </message> <message> <location line="-10"/> <source>LEOcoin - Debug window</source> <translation type="unfinished"></translation> </message> <message> <location line="+25"/> <source>LEOcoin Core</source> <translation type="unfinished"></translation> </message> <message> <location line="+53"/> <source>Using OpenSSL version</source> <translation>OpenSSL različica v rabi</translation> </message> <message> <location line="+26"/> <source>Using BerkeleyDB version</source> <translation type="unfinished"></translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Čas zagona</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Omrežje</translation> </message> <message> <location line="+7"/> <source>Name</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Number of connections</source> <translation>Število povezav</translation> </message> <message> <location line="+157"/> <source>Show the LEOcoin help message to get a list with possible LEOcoin command-line options.</source> <translation type="unfinished"></translation> </message> <message> <location line="+99"/> <source>&amp;Network Traffic</source> <translation type="unfinished"></translation> </message> <message> <location line="+52"/> <source>&amp;Clear</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>Totals</source> <translation type="unfinished"></translation> </message> <message> <location line="+64"/> <location filename="../rpcconsole.cpp" line="+396"/> <source>In:</source> <translation type="unfinished"></translation> </message> <message> <location line="+80"/> <location filename="../rpcconsole.cpp" line="+1"/> <source>Out:</source> <translation type="unfinished"></translation> </message> <message> <location line="+41"/> <source>&amp;Peers</source> <translation type="unfinished"></translation> </message> <message> <location line="+39"/> <location filename="../rpcconsole.cpp" line="-167"/> <location line="+328"/> <source>Select a peer to view detailed information.</source> <translation type="unfinished"></translation> </message> <message> <location line="+25"/> <source>Peer ID</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Direction</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Version</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>User Agent</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Services</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Starting Height</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Sync Height</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Ban Score</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Connection Time</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Last Send</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Last Receive</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Bytes Sent</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Bytes Received</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Ping Time</source> <translation type="unfinished"></translation> </message> <message> <location line="+23"/> <source>Time Offset</source> <translation type="unfinished"></translation> </message> <message> <location line="-866"/> <source>Block chain</source> <translation>veriga blokov</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Trenutno število blokov</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Ocena vseh blokov</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Čas zadnjega bloka</translation> </message> <message> <location line="+49"/> <source>Open the LEOcoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>&amp;Open</source> <translation>&amp;Odpri</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Možnosti ukazne vrstice.</translation> </message> <message> <location line="+10"/> <source>&amp;Show</source> <translation>&amp;Prikaži</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konzola</translation> </message> <message> <location line="-266"/> <source>Build date</source> <translation>Datum izgradnje</translation> </message> <message> <location line="+206"/> <source>Debug log file</source> <translation>Razhroščevalna dnevniška datoteka</translation> </message> <message> <location line="+109"/> <source>Clear console</source> <translation>Počisti konzolo</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-197"/> <source>Welcome to the LEOcoin Core RPC console.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Za navigiranje po zgodovini uporabite puščici gor in dol, in &lt;b&gt;Ctrl-L&lt;/b&gt; za izpraznjenje zaslona.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Vtipkaj &lt;b&gt;pomoč&lt;/b&gt; za vpogled v razpožljive ukaze.</translation> </message> <message> <location line="+233"/> <source>via %1</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <location line="+1"/> <source>never</source> <translation type="unfinished"></translation> </message> <message> <location line="+9"/> <source>Inbound</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>Outbound</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Unknown</source> <translation type="unfinished"></translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <location filename="../trafficgraphwidget.cpp" line="+79"/> <source>KB/s</source> <translation type="unfinished"></translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Odpri enoto %1</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation> <numerusform>Odprt za %n blok</numerusform> <numerusform>Odprt za %n bloka</numerusform> <numerusform>Odprt za %n blokov</numerusform> <numerusform>Odprt za %n blokov</numerusform> </translation> </message> <message> <location line="+7"/> <source>conflicted</source> <translation>sporen</translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotrjeno</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potrdil</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Stanje</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation> <numerusform>, predvajanje skozi %n vozlišče</numerusform> <numerusform>, predvajanje skozi %n vozlišči</numerusform> <numerusform>, predvajanje skozi %n vozlišč</numerusform> <numerusform>, predvajanje skozi %n vozlišč</numerusform> </translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Izvor</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generirano</translation> </message> <message> <location line="+5"/> <location line="+17"/> <location line="+20"/> <source>From</source> <translation>Pošiljatelj</translation> </message> <message> <location line="-19"/> <location line="+20"/> <location line="+23"/> <location line="+57"/> <source>To</source> <translation>Prejemnik</translation> </message> <message> <location line="-96"/> <location line="+2"/> <location line="+18"/> <location line="+2"/> <source>own address</source> <translation>lasten naslov</translation> </message> <message> <location line="-22"/> <location line="+20"/> <source>label</source> <translation>oznaka</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+44"/> <location line="+20"/> <location line="+40"/> <source>Credit</source> <translation>Kredit</translation> </message> <message numerus="yes"> <location line="-114"/> <source>matures in %n more block(s)</source> <translation> <numerusform>dozori čez %n blok</numerusform> <numerusform>dozori čez %n bloka</numerusform> <numerusform>dozori čez %n blokov</numerusform> <numerusform>dozori čez %n blokov</numerusform> </translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>ni bilo sprejeto</translation> </message> <message> <location line="+43"/> <location line="+8"/> <location line="+16"/> <location line="+42"/> <source>Debit</source> <translation>Dolg</translation> </message> <message> <location line="-52"/> <source>Transaction fee</source> <translation>Provizija transakcije</translation> </message> <message> <location line="+19"/> <source>Net amount</source> <translation>Neto količina</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Sporočilo</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Opomba</translation> </message> <message> <location line="+12"/> <source>Transaction ID</source> <translation>ID transakcije</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Razhroščevalna informacija</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transakcija</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>Vnosi</translation> </message> <message> <location line="+44"/> <source>Amount</source> <translation>Količina</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>pravilno</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>nepravilno</translation> </message> <message> <location line="-266"/> <source>, has not been successfully broadcast yet</source> <translation>, še ni bil uspešno predvajan</translation> </message> <message> <location line="+36"/> <location line="+20"/> <source>unknown</source> <translation>neznano</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Podrobnosti transakcije</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>To podokno prikazuje podroben opis transakcije</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+217"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Vrsta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Naslov</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Količina</translation> </message> <message> <location line="+54"/> <source>Open until %1</source> <translation>Odpri enoto %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Potrjeno (%1 potrdil)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation> <numerusform>Odprt še %n blok</numerusform> <numerusform>Odprt še %n bloka</numerusform> <numerusform>Odprt še %n blokov</numerusform> <numerusform>Odprt še %n blokov</numerusform> </translation> </message> <message> <location line="-51"/> <source>Narration</source> <translation type="unfinished"></translation> </message> <message> <location line="+57"/> <source>Offline</source> <translation>Nepovezan</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation>Nepotrjeno</translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Potrjuje (%1 od %2 priporočenih potrditev)</translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation>Sporen</translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Nezrel (%1 potrditev, na voljo bo po %2)</translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ta blok ni prejelo še nobeno vozlišče. Najverjetneje ne bo sprejet!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generirano, toda ne sprejeto</translation> </message> <message> <location line="+49"/> <source>(n/a)</source> <translation>(ni na voljo)</translation> </message> <message> <location line="+202"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Stanje transakcije. Zapeljite z miško čez to polje za prikaz števila potrdil. </translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum in čas, ko je transakcija bila prejeta.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Vrsta transakcije.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Naslov prejemnika transakcije.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Količina odlita ali prilita dobroimetju.</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+402"/> <location line="+246"/> <source>Sending...</source> <translation>Pošiljanje...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>LEOcoin version</source> <translation>LEOcoin različica</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Uporaba:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or LEOcoind</source> <translation>Pošlji ukaz na -server ali LEOcoind</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Prikaži ukaze</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Prikaži pomoč za ukaz</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>Možnosti:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: LEOcoin.conf)</source> <translation>Določi konfiguracijsko datoteko (privzeto: LEOcoin.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: LEOcoind.pid)</source> <translation>Določi pid datoteko (privzeto: LEOcoind.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Določi datoteko denarnice (znotraj imenika s podatki)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Določi podatkovni imenik</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Nastavi pomnilnik podatkovne zbirke v megabajtih (privzeto: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Nastavi velikost zapisa podatkovne baze na disku v megabajtih (privzeto: 100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 51737 or testnet: 51997)</source> <translation>Sprejmi povezave na &lt;port&gt; (privzeta vrata: 51737 ali testnet: 51997) </translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Obdrži maksimalno število &lt;n&gt; povezav (privzeto: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Poveži se na vozlišče da pridobiš naslove soležnikov in prekini povezavo</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Določite vaš lasten javni naslov</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>Naveži na dani naslov. Uporabi [host]:port ukaz za IPv6</translation> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation>Deleži svoje kovance za podporo omrežja in pridobi nagrado (default: 1)</translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Prag za prekinitev povezav s slabimi odjemalci (privzeto: 1000)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Število sekund preden se ponovno povežejo neodzivni soležniki (privzeto: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Prišlo je do napake pri nastavljanju RPC porta %u za vhodne povezave na IPv4: %s</translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation>Loči podatkovne baze blokov in naslovov. Podaljša čas zaustavitve (privzeto: 0)</translation> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Napaka: Transakcija je bila zavrnjena. To se je lahko zgodilo, če so bili kovanci v vaši denarnici že zapravljeni, na primer če ste uporabili kopijo wallet.dat in so bili kovanci zapravljeni v kopiji, a tu še niso bili označeni kot zapravljeni.</translation> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation>Napaka: Ta transakcija zahteva transakcijsko provizijo vsaj %s zaradi svoje količine, kompleksnosti ali uporabo sredstev, ki ste jih prejeli pred kratkim. </translation> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 51736 or testnet: 51996)</source> <translation>Sprejmi povezave na &lt;port&gt; (privzeta vrata: 51737 ali testnet: 51997) </translation> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>Sprejmi ukaze iz ukazne vrstice in JSON-RPC</translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation>Napaka: Ustvarjanje transakcije spodletelo</translation> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation>Napaka: Zaklenjena denarnica, ni mogoče opraviti transakcije</translation> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation>Uvažanje blockchain podatkovne datoteke.</translation> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation>Uvažanje podatkovne datoteke verige blokov.</translation> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>Teci v ozadju in sprejemaj ukaze</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Uporabi testno omrežje</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Sprejmi zunanje povezave (privzeto: 1 če ni nastavljen -proxy ali -connect)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Prišlo je do napake pri nastavljanju RPC porta %u za vhodne povezave na IPv6: %s</translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation>Napaka pri zagonu podatkovne baze okolja %s! Za popravilo, NAPRAVITE VARNOSTNO KOPIJO IMENIKA, in iz njega odstranite vse razen datoteke wallet.dat</translation> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Nastavi maksimalno velikost visoke-prioritete/nizke-provizije transakcij v bajtih (privzeto: 27000)</translation> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Opozorilo: -paytxfee je nastavljen zelo visoko! To je transakcijska provizija, ki jo boste plačali ob pošiljanju transakcije.</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong LEOcoin will not work properly.</source> <translation>Opozorilo: Prosimo preverite svoj datum in čas svojega računalnika! Če je vaša ura nastavljena napačno LEOcoin ne bo deloval.</translation> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Opozorilo: napaka pri branju wallet.dat! Vsi ključi so bili pravilno prebrani, podatki o transakciji ali imenik vnešenih naslovov so morda izgubljeni ali nepravilni.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Opozorilo: wallet.dat je pokvarjena, podatki rešeni! Originalna wallet.dat je bila shranjena kot denarnica. {timestamp}.bak v %s; če imate napačno prikazano stanje na računu ali v transakcijah prenovite datoteko z varnostno kopijo. </translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Poizkusi rešiti zasebni ključ iz pokvarjene wallet.dat </translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>Možnosti ustvarjanja blokov:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>Poveži se samo na določena vozlišče(a)</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Odkrij svoj IP naslov (privzeto: 1 ob poslušanju, ko ni aktiviran -externalip)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Poslušanje za vrata je spodletelo. Če želite lahko uporabite ukaz -listen=0.</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>Najdi soležnike z uporabno DNS vpogleda (privzeto: 1)</translation> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation>Sinhronizacija načina točk preverjanja (privzeto: strogo)</translation> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Neveljaven -tor naslov: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Neveljavni znesek za -reservebalance=&lt;amount&gt;</translation> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Največji sprejemni medpomnilnik glede na povezavo, &lt;n&gt;*1000 bytov (privzeto: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Največji oddajni medpomnilnik glede na povezavo, &lt;n&gt;*1000 bytov (privzeto: 1000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Poveži se samo z vozlišči v omrežju &lt;net&gt; (IPv4, IPv6 in Tor)</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Output dodatnih informacij razhroščevanja. Obsega vse druge -debug* možnosti.</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Output dodatnih informacij razhroščevanja omrežja. </translation> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation>Opremi output rahroščevanja s časovnim žigom. </translation> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>SSL možnosti: (glejte Bitcoin Wiki za navodla, kako nastaviti SSL)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Izberi verzijo socks proxya za uporabo (4-5, privzeto: 5)</translation> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Pošlji sledilne/razhroščevalne informacije v konzolo namesto jih shraniti v debug.log datoteko</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Pošlji sledilne/razhroščevalne informacije v razhroščevalnik</translation> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Nastavi največjo velikost bloka v bajtih (privzeto: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Nastavi najmanjšo velikost bloka v bajtih (privzeto: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Skrči debug.log datoteko ob zagonu aplikacije (privzeto: 1 ko ni aktiviran -debug)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Določi čas pavze povezovanja v milisekundah (privzeto: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation>Ni bilo mogoče vpisati točke preverjanja, napačen ključ za točko preverjanja? </translation> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Uporabi UPnP za mapiranje vrat poslušanja (privzeto: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Uporabi UPnP za mapiranje vrat poslušanja (privzeto: 1 med poslušanjem)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Uporabi proxy za povezavo s skritimi storitvami tora (privzeto: isto kot -proxy) </translation> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>Uporabniško ime za JSON-RPC povezave</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation>Potrdite neoporečnost baze podatkov...</translation> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation>OPOZORILO: zaznana je bila kršitev s sinhronizirami točkami preverjanja, a je bila izpuščena.</translation> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation>Opozorilo: Malo prostora na disku!</translation> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Opozorilo: ta različica je zastarela, potrebna je nadgradnja!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat poškodovana, neuspešna obnova</translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>Geslo za JSON-RPC povezave</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=LEOcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;LEOcoin Alert&quot; [email protected] </source> <translation>%s, nastaviti morate rpcgeslo v konfiguracijski datoteki: %s Priporočeno je, da uporabite naslednje naključno geslo: rpcuser=LEOcoinrpc rpcpassword=%s (tega gesla si vam ni potrebno zapomniti) Uporabniško ime in geslo NE SMETA biti ista. Če datoteka ne obstaja, jo ustvarite z lastniškimi dovoljenji za datoteke. Prav tako je priporočeno, da nastavite alernotify, tkako da vas opozori na probleme; na primer: alertnotify=echo %%s | mail -s &quot;LEOcoin Alarm&quot; [email protected] </translation> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation>Najdi soležnike prek irca (privzeto: 0)</translation> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation>Sinhroniziraj čas z drugimi vozlišči. Onemogoči, če je čas na vašem sistemu točno nastavljen, npr. sinhroniziranje z NTP (privzeto: 1)</translation> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>Ob ustvarjanju transakcij, prezri vnose z manjšo vrednostjo kot (privzeto: 0.01)</translation> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Dovoli JSON-RPC povezave z določenega IP naslova</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Pošlji ukaze vozlišču na &lt;ip&gt; (privzet: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Izvrši ukaz, ko se najboljši blok spremeni (%s je v cmd programu nadomeščen z zgoščenimi bloki).</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Izvedi ukaz, ko bo transakcija denarnice se spremenila (V cmd je bil TxID zamenjan za %s)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation>Zahtevaj potrditve za spremembo (default: 0)</translation> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation>Zahtevaj da transakcijske skripte uporabljajo operatorje canonical PUSH (privzeto: 1)</translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Izvrši ukaz, ko je prejet relevanten alarm (%s je v cmd programu nadomeščen s sporočilom)</translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Posodobi denarnico v najnovejši zapis</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Nastavi velikost ključa bazena na &lt;n&gt; (privzeto: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Ponovno preglej verigo blokov za manjkajoče transakcije denarnice</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>Koliko blokov naj preveri ob zagonu aplikacije (privzeto: 2500, 0 = vse)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>Kako temeljito naj bo preverjanje blokov (0-6, privzeto: 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>Uvozi bloke iz zunanje blk000?.dat datoteke</translation> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Uporabi OpenSSL (https) za JSON-RPC povezave</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Datoteka potrdila strežnika (privzeta: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Zasebni ključ strežnika (privzet: server.pem)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Dovoljeni kodirniki (privzeti: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation type="unfinished"></translation> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation>OPOZORILO: Najdene so bile neveljavne točke preverjanja! Prikazane transakcije so morda napačne! Poiščite novo različico aplikacije ali pa obvestite razvijalce.</translation> </message> <message> <location line="-158"/> <source>This help message</source> <translation>To sporočilo pomoči</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation>Denarnica %s se nahaja zunaj datotečnega imenika %s.</translation> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. LEOcoin is probably already running.</source> <translation>Ni bilo mogoče najti podatkovnega imenika %s. Aplikacija LEOcoin je verjetno že zagnana.</translation> </message> <message> <location line="-98"/> <source>LEOcoin</source> <translation>LEOcoin</translation> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Na tem računalniku je bilo nemogoče vezati na %s (bind returned error %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation>Poveži se skozi socks proxy</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Omogoči DNS povezave za -addnode, -seednode in -connect</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>Nalaganje naslovov ...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation>Napaka pri nalaganju blkindex.dat</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Napaka pri nalaganju wallet.dat: denarnica pokvarjena</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of LEOcoin</source> <translation>Napaka pri nalaganju wallet.dat: denarnica zahteva novejšo verzijo LEOcoin</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart LEOcoin to complete</source> <translation>Denarnica mora biti prepisana: ponovno odprite LEOcoin za dokončanje</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Napaka pri nalaganju wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Neveljaven -proxy naslov: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Neznano omrežje določeno v -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Zahtevana neznana -socks proxy različica: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Ni mogoče določiti -bind naslova: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Ni mogoče določiti -externalip naslova: &apos;%s&apos;</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neveljavni znesek za -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation>Napaka: ni mogoče zagnati vozlišča</translation> </message> <message> <location line="+11"/> <source>Sending...</source> <translation>Pošiljanje...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Neveljavna količina</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Premalo sredstev</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>Nalaganje indeksa blokov ...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Dodaj vozlišče za povezavo nanj in skušaj le to obdržati odprto</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. LEOcoin is probably already running.</source> <translation>Navezava v %s na tem računalniku ni mogoča LEOcoin aplikacija je verjetno že zagnana.</translation> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation>Provizija na KB ki jo morate dodati transakcijam, ki jih pošiljate</translation> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neveljavni znesek za -miniput=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>Nalaganje denarnice ...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>Ne morem </translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation>Ni mogoče zagnati keypoola</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>Ni mogoče zapisati privzetega naslova</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Ponovno pregledovanje ...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>Nalaganje končano</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>Za uporabo %s opcije</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>Napaka</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Nastaviti morate rpcpassword=&lt;password&gt; v konfiguracijski datoteki: %s Če datoteka ne obstaja, jo ustvarite z lastniškimi dovoljenji za datoteke.</translation> </message> </context> </TS><|fim▁end|>
<location line="+1"/> <source>Maximum Ring size:</source> <translation type="unfinished"></translation> </message>
<|file_name|>list_concat.py<|end_file_name|><|fim▁begin|>list2=['tom','jerry','mickey'] list1=['hardy','bob','minnie'] print(list1+list2) print(list2+list1) <|fim▁hole|>print(list1*3) print(list2+['disney','nick','pogo'])<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from infra_libs.infra_types.infra_types import freeze<|fim▁hole|>from infra_libs.infra_types.infra_types import thaw from infra_libs.infra_types.infra_types import FrozenDict<|fim▁end|>
<|file_name|>Sentence.js<|end_file_name|><|fim▁begin|>import React, { useContext, useEffect, Fragment } from "react" import { Typography } from "@material-ui/core" import EventsContext from "../context/EventsContext" import TitleContext from "../context/TitleContext" import Box from "@material-ui/core/Box" import Button from "@material-ui/core/Button" import FacebookIcon from '@material-ui/icons/Facebook'; import TwitterIcon from '@material-ui/icons/Twitter'; import FileCopyIcon from '@material-ui/icons/FileCopy'; import makeStyles from "@material-ui/core/styles/makeStyles" import { graphql, useStaticQuery } from "gatsby" const ucfirst = (string) => string[0].toUpperCase() + string.slice(1) const Sentence = () => { const useStyles = makeStyles(theme => ( { sentence: { fontSize: '1.8em', }, sharing: { fontFamily: 'Raleway,sans-serif', display: 'flex', justifyContent: 'space-between', margin: '0 auto', marginTop: '50px', maxWidth: '300px' }, button: { fontFamily: theme.typography.body1.fontFamily, fontWeight: 'bold', textTransform: 'none' } })) const classes = useStyles() const titleContext = useContext(TitleContext) const eventsContext = useContext(EventsContext) const { site } = useStaticQuery( graphql` query { site { siteMetadata { title description author url app_id } } } ` ) let testo = "" let sharingText = "" let url = site.siteMetadata.url if( typeof window !== 'undefined' ) { url = window.location.href } if (eventsContext.events.length === 0) { testo = "" } if (eventsContext.events.length === 1) { testo = eventsContext.events[0].precisediff + " ago: <br/>" + eventsContext.events[0].name sharingText = eventsContext.events[0].precisediff + " ago: " + eventsContext.events[0].name + " #closerintime" } if (eventsContext.events.length === 2) { let evento1 = eventsContext.events[0] let evento2 = eventsContext.events[1] let verb = parseInt(evento2.plural) ? " are " : " is " if (evento1.diff > evento2.diff) { testo = ucfirst(evento2.name) + verb + "closer in time to us than to " + evento1.name + "." sharingText = ucfirst(evento2.name) + verb + "#closerintime to us than to " + evento1.name + "." } else if (evento1.diff < evento2.diff) { testo = ucfirst(evento2.name) + verb + "closer in time to " + evento1.name + " than to us." sharingText = ucfirst(evento2.name) + verb + "#closerintime to " + evento1.name + " than to us." } else { testo = ucfirst(evento2.name) + verb + "exactly halfway between " + evento1.name + " and us." sharingText = ucfirst(evento2.name) + verb + "exactly halfway between " + evento1.name + " and us. #closerintime" } } if (eventsContext.events.length === 3) { let evento1 = eventsContext.events[0] let evento2 = eventsContext.events[1] let evento3 = eventsContext.events[2] if (evento1.diff > evento3.diff) { testo = "More time passed between " + evento1.name + " and " + evento2.name + " than between " + evento3.name + " and us." sharingText = "More time passed between " + evento1.name + " and " + evento2.name + " than between " + evento3.name + " and us. #closerintime" } else if (evento1.diff < evento3.diff) { testo = "More time passed between " + evento3.name + " and us than between " + evento1.name + " and " + evento2.name + "." sharingText = "More time passed between " + evento3.name + " and us than between " + evento1.name + " and " + evento2.name + ". #closerintime" } else { testo = "The same amount time passed between " + evento1.name + " and " + evento2.name + " as it did between " + evento3.name + " and us." sharingText = "The same amount time passed between " + evento1.name + " and " + evento2.name + " as it did between " + evento3.name + " and us. #closerintime" } } useEffect(() => { titleContext.setTitle(testo) }, [testo]) const copyToClipboard = (text, href) => { var textArea = document.createElement("textarea"); textArea.style.position = 'fixed'; textArea.style.top = 0; textArea.style.left = 0; textArea.style.width = '2em'; textArea.style.height = '2em'; textArea.style.padding = 0; textArea.style.border = 'none'; textArea.style.outline = 'none'; textArea.style.boxShadow = 'none';<|fim▁hole|> textArea.style.background = 'transparent'; textArea.value = text+' '+href; document.body.appendChild(textArea); textArea.select(); try { var successful = document.execCommand('copy'); var msg = successful ? 'successful' : 'unsuccessful'; console.log('Copying text command was ' + msg); } catch (err) { console.log('Oops, unable to copy'); } document.body.removeChild(textArea); } return (<Fragment> <Typography className={classes.sentence} variant={"h1"} align={"center"} dangerouslySetInnerHTML={{ __html: testo }}></Typography> {(sharingText) && <Box className={classes.sharing} > <Button component={"a"} className={classes.button} target={"_blank"} href={"https://twitter.com/intent/tweet?text=" + encodeURIComponent(sharingText) + "&url=" + encodeURIComponent(url)} startIcon={<TwitterIcon/>}> Tweet </Button> <Button component={"a"} className={classes.button} target={"_blank"} href={"https://www.facebook.com/dialog/share?app_id=" + site.siteMetadata.app_id + "&href=" + encodeURIComponent(url) + "&quote=" + encodeURIComponent(sharingText) + "&hashtag=%23closerintime"} startIcon={<FacebookIcon/>}> Share </Button> <Button component={"a"} className={classes.button} onClick={() => copyToClipboard(sharingText,url)} startIcon={<FileCopyIcon/>}> Copy </Button> </Box>} </Fragment>) } export default Sentence<|fim▁end|>
<|file_name|>database.js<|end_file_name|><|fim▁begin|>/** * description * Author: Oded Sagir * @param Object require for adding dependencies * @return Object Class Object */ define(function(require) { var database = { posts: require("text!api/posts.json") };<|fim▁hole|> return database; });<|fim▁end|>
<|file_name|>lhp.directive.ts<|end_file_name|><|fim▁begin|>/** * Created by akabeera on 2/18/2016. */ /// <reference path="../../typings/browser.d.ts" /> module app.Directives { 'use strict'; export class LhpDirective implements ng.IDirective { public templateUrl:string = 'components/lhp.html'; public restrict:string = 'E'; public controller:string = 'lhpController'; public controllerAs:string = 'lctrl'; public scope = {}; constructor(private $log:ng.ILogService) { this.$log.info('lhp directive'); } public static Factory() { return (log:ng.ILogService) => { return new LhpDirective(log); }; } } <|fim▁hole|><|fim▁end|>
angular.module('reactionGifs').directive('lhp', ['$log', app.Directives.LhpDirective.Factory()]); }
<|file_name|>GS.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2007-2009 Gabest * http://www.gabest.org * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with GNU Make; see the file COPYING. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA USA. * http://www.gnu.org/copyleft/gpl.html * */ #include "stdafx.h" #include "GSdx.h" #include "GSUtil.h" #include "GSRendererSW.h" #include "GSRendererNull.h" #include "GSDeviceNull.h" #include "GSDeviceOGL.h" #include "GSRendererOGL.h" #ifdef _WINDOWS #include "GSRendererDX9.h" #include "GSRendererDX11.h" #include "GSDevice9.h" #include "GSDevice11.h" #include "GSWndDX.h" #include "GSWndWGL.h" #include "GSRendererCS.h" #include "GSSettingsDlg.h" static HRESULT s_hr = E_FAIL; #else #include "GSWndOGL.h" #include "GSWndEGL.h" #include <gtk/gtk.h> #include <gdk/gdkx.h> extern bool RunLinuxDialog(); #endif #define PS2E_LT_GS 0x01 #define PS2E_GS_VERSION 0x0006 #define PS2E_X86 0x01 // 32 bit #define PS2E_X86_64 0x02 // 64 bit static GSRenderer* s_gs = NULL; static void (*s_irq)() = NULL; static uint8* s_basemem = NULL; static int s_renderer = -1; static bool s_framelimit = true; static bool s_vsync = false; static bool s_exclusive = true; #ifdef _WINDOWS static bool s_isgsopen2 = false; // boolean to remove some stuff from the config panel in new PCSX2's/ #endif bool gsopen_done = false; // crash guard for GSgetTitleInfo2 EXPORT_C_(uint32) PS2EgetLibType() { return PS2E_LT_GS; } EXPORT_C_(const char*) PS2EgetLibName() { return GSUtil::GetLibName(); } EXPORT_C_(uint32) PS2EgetLibVersion2(uint32 type) { const uint32 revision = 0; const uint32 build = 1; return (build << 0) | (revision << 8) | (PS2E_GS_VERSION << 16) | (PLUGIN_VERSION << 24); } #ifdef _WINDOWS EXPORT_C_(void) PS2EsetEmuVersion(const char* emuId, uint32 version) { s_isgsopen2 = true; } #endif EXPORT_C_(uint32) PS2EgetCpuPlatform() { #ifdef _M_AMD64 return PS2E_X86_64; #else return PS2E_X86; #endif } EXPORT_C GSsetBaseMem(uint8* mem) { s_basemem = mem; if(s_gs) { s_gs->SetRegsMem(s_basemem); } } EXPORT_C GSsetSettingsDir(const char* dir) { theApp.SetConfigDir(dir); } EXPORT_C_(int) GSinit() { if(!GSUtil::CheckSSE()) { return -1; } #ifdef _WINDOWS s_hr = ::CoInitializeEx(NULL, COINIT_MULTITHREADED); if(!GSUtil::CheckDirectX()) { return -1; } #endif return 0; } EXPORT_C GSshutdown() { gsopen_done = false; delete s_gs; s_gs = NULL; s_renderer = -1; #ifdef _WINDOWS if(SUCCEEDED(s_hr)) { ::CoUninitialize(); s_hr = E_FAIL; } #endif } EXPORT_C GSclose() { gsopen_done = false; if(s_gs == NULL) return; s_gs->ResetDevice(); // Opengl requirement: It must be done before the Detach() of // the context delete s_gs->m_dev; s_gs->m_dev = NULL; if (s_gs->m_wnd) { s_gs->m_wnd->Detach(); } } static int _GSopen(void** dsp, char* title, int renderer, int threads = -1) { GSDevice* dev = NULL; if(renderer == -1) { renderer = theApp.GetConfig("renderer", 0); } if(threads == -1) { threads = theApp.GetConfig("extrathreads", 0); } GSWnd* wnd[2]; try { if(s_renderer != renderer) { // Emulator has made a render change request, which requires a completely // new s_gs -- if the emu doesn't save/restore the GS state across this // GSopen call then they'll get corrupted graphics, but that's not my problem. delete s_gs; s_gs = NULL; } if(renderer == 15) { #ifdef _WINDOWS dev = new GSDevice11(); if(dev == NULL) { return -1; } delete s_gs; s_gs = new GSRendererCS(); s_renderer = renderer; #endif } else { switch(renderer / 3) { default: #ifdef _WINDOWS case 0: dev = new GSDevice9(); break; case 1: dev = new GSDevice11(); break; #endif case 3: dev = new GSDeviceNull(); break; case 4: dev = new GSDeviceOGL(); break; } if(dev == NULL) { return -1; } if(s_gs == NULL) { switch(renderer % 3) { default: case 0: switch(renderer) { default: #ifdef _WINDOWS case 0: s_gs = (GSRenderer*)new GSRendererDX9(); break; case 3: s_gs = (GSRenderer*)new GSRendererDX11(); break; #endif case 12: s_gs = (GSRenderer*)new GSRendererOGL(); break; } break; case 1: s_gs = new GSRendererSW(threads); break; case 2: s_gs = new GSRendererNull(); break; } s_renderer = renderer; } } if (s_gs->m_wnd == NULL) { #ifdef _WINDOWS if (renderer / 3 == 4) s_gs->m_wnd = new GSWndWGL(); else s_gs->m_wnd = new GSWndDX(); #else #ifdef ENABLE_GLES wnd[0] = NULL; #else wnd[0] = new GSWndOGL(); #endif wnd[1] = new GSWndEGL(); #endif } } catch(std::exception& ex) { // Allowing std exceptions to escape the scope of the plugin callstack could // be problematic, because of differing typeids between DLL and EXE compilations. // ('new' could throw std::alloc) printf("GSdx error: Exception caught in GSopen: %s", ex.what()); return -1; } s_gs->SetRegsMem(s_basemem); s_gs->SetIrqCallback(s_irq); s_gs->SetVSync(s_vsync); s_gs->SetFrameLimit(s_framelimit); if(*dsp == NULL) { // old-style API expects us to create and manage our own window: int w = theApp.GetConfig("ModeWidth", 0); int h = theApp.GetConfig("ModeHeight", 0); #ifdef _LINUX for(uint32 i = 0; i < 2; i++) { try { if (wnd[i] == NULL) continue; wnd[i]->Create(title, w, h); s_gs->m_wnd = wnd[i]; if (i == 0) delete wnd[1]; break; } catch (GSDXRecoverableError) { wnd[i]->Detach(); delete wnd[i]; } } if (s_gs->m_wnd == NULL) { GSclose(); return -1; } #endif #ifdef _WINDOWS if(!s_gs->CreateWnd(title, w, h)) { GSclose(); return -1; } #endif s_gs->m_wnd->Show(); *dsp = s_gs->m_wnd->GetDisplay(); } else { s_gs->SetMultithreaded(true); #ifdef _LINUX if (s_gs->m_wnd) { // A window was already attached to s_gs so we also // need to restore the window state (Attach) s_gs->m_wnd->Attach((void*)((uint32*)(dsp)+1), false); } else { // No window found, try to attach a GLX win and retry // with EGL win if failed. for(uint32 i = 0; i < 2; i++) { try { if (wnd[i] == NULL) continue; wnd[i]->Attach((void*)((uint32*)(dsp)+1), false); s_gs->m_wnd = wnd[i]; if (i == 0) delete wnd[1]; break; } catch (GSDXRecoverableError) { wnd[i]->Detach(); delete wnd[i]; } } } if (s_gs->m_wnd == NULL) { return -1; } #endif #ifdef _WINDOWS s_gs->m_wnd->Attach(*dsp, false); #endif } if(!s_gs->CreateDevice(dev)) { // This probably means the user has DX11 configured with a video card that is only DX9 // compliant. Cound mean drivr issues of some sort also, but to be sure, that's the most // common cause of device creation errors. :) --air GSclose(); return -1; } return 0; } EXPORT_C_(int) GSopen2(void** dsp, uint32 flags) { #ifdef _LINUX // Use ogl renderer as default otherwise it crash at startup // GSRenderOGL only GSDeviceOGL (not GSDeviceNULL) int renderer = theApp.GetConfig("renderer", 12); #else int renderer = theApp.GetConfig("renderer", 0); #endif if(flags & 4) { #ifdef _WINDOWS int best_sw_renderer = GSUtil::CheckDirect3D11Level() >= D3D_FEATURE_LEVEL_10_0 ? 4 : 1; // dx11 / dx9 sw switch(renderer){ // Use alternative renderer (SW if currently using HW renderer, and vice versa, keeping the same DX level) case 1: renderer = 0; break; // DX9: SW to HW case 0: renderer = 1; break; // DX9: HW to SW case 4: renderer = 3; break; // DX11: SW to HW case 3: renderer = 4; break; // DX11: HW to SW case 13: renderer = 12; break; // OGL: SW to HW case 12: renderer = 13; break; // OGL: HW to SW default: renderer = best_sw_renderer; // If wasn't using DX (e.g. SDL), use best SW renderer. } #endif #ifdef _LINUX switch(renderer) { case 13: renderer = 12; break; // OGL: SW to HW case 12: renderer = 13; break; // OGL: HW to SW } #endif } int retval = _GSopen(dsp, NULL, renderer); if (s_gs != NULL) s_gs->SetAspectRatio(0); // PCSX2 manages the aspect ratios gsopen_done = true; return retval; } EXPORT_C_(int) GSopen(void** dsp, char* title, int mt) { /* if(!XInitThreads()) return -1; Display* display = XOpenDisplay(0); XCloseDisplay(display); */ int renderer = 0; // Legacy GUI expects to acquire vsync from the configuration files. s_vsync = !!theApp.GetConfig("vsync", 0); if(mt == 2) { // pcsx2 sent a switch renderer request #ifdef _WINDOWS renderer = GSUtil::CheckDirect3D11Level() >= D3D_FEATURE_LEVEL_10_0 ? 4 : 1; // dx11 / dx9 sw #endif mt = 1; } else { // normal init renderer = theApp.GetConfig("renderer", 0); } *dsp = NULL; int retval = _GSopen(dsp, title, renderer); if(retval == 0 && s_gs) { s_gs->SetMultithreaded(!!mt); } gsopen_done = true; return retval; } EXPORT_C GSreset() { try { s_gs->Reset(); } catch (GSDXRecoverableError) { } } EXPORT_C GSgifSoftReset(uint32 mask) { try { s_gs->SoftReset(mask); } catch (GSDXRecoverableError) { } } EXPORT_C GSwriteCSR(uint32 csr) { try { s_gs->WriteCSR(csr); } catch (GSDXRecoverableError) { } } EXPORT_C GSinitReadFIFO(uint8* mem) { try { s_gs->InitReadFIFO(mem, 1); } catch (GSDXRecoverableError) { } } EXPORT_C GSreadFIFO(uint8* mem) { try { s_gs->ReadFIFO(mem, 1); } catch (GSDXRecoverableError) { } } EXPORT_C GSinitReadFIFO2(uint8* mem, uint32 size) { try { s_gs->InitReadFIFO(mem, size); } catch (GSDXRecoverableError) { } } EXPORT_C GSreadFIFO2(uint8* mem, uint32 size) { try { s_gs->ReadFIFO(mem, size); } catch (GSDXRecoverableError) { } } EXPORT_C GSgifTransfer(const uint8* mem, uint32 size) { try { s_gs->Transfer<3>(mem, size); } catch (GSDXRecoverableError) { } } EXPORT_C GSgifTransfer1(uint8* mem, uint32 addr) { try { s_gs->Transfer<0>(const_cast<uint8*>(mem) + addr, (0x4000 - addr) / 16); } catch (GSDXRecoverableError) { } } EXPORT_C GSgifTransfer2(uint8* mem, uint32 size) { try { s_gs->Transfer<1>(const_cast<uint8*>(mem), size); } catch (GSDXRecoverableError) { } } EXPORT_C GSgifTransfer3(uint8* mem, uint32 size) { try { s_gs->Transfer<2>(const_cast<uint8*>(mem), size); } catch (GSDXRecoverableError) { } } EXPORT_C GSvsync(int field) { try { #ifdef _WINDOWS if(s_gs->m_wnd->IsManaged()) { MSG msg; memset(&msg, 0, sizeof(msg)); while(msg.message != WM_QUIT && PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { TranslateMessage(&msg); DispatchMessage(&msg); } } #endif s_gs->VSync(field); } catch (GSDXRecoverableError) { } } EXPORT_C_(uint32) GSmakeSnapshot(char* path) { try { string s(path); if(!s.empty() && s[s.length() - 1] != DIRECTORY_SEPARATOR) { s = s + DIRECTORY_SEPARATOR; } return s_gs->MakeSnapshot(s + "gsdx"); } catch (GSDXRecoverableError) { return false; } } EXPORT_C GSkeyEvent(GSKeyEventData* e) { try { if (gsopen_done) s_gs->KeyEvent(e); } catch (GSDXRecoverableError) { } } EXPORT_C_(int) GSfreeze(int mode, GSFreezeData* data) { try { if(mode == FREEZE_SAVE) { return s_gs->Freeze(data, false); } else if(mode == FREEZE_SIZE) { return s_gs->Freeze(data, true); } else if(mode == FREEZE_LOAD) { return s_gs->Defrost(data); } } catch (GSDXRecoverableError) { } return 0; } EXPORT_C GSconfigure() { try { if(!GSUtil::CheckSSE()) return; #ifdef _WINDOWS if(GSSettingsDlg(s_isgsopen2).DoModal() == IDOK) { if(s_gs != NULL && s_gs->m_wnd->IsManaged()) { // Legacy apps like gsdxgui expect this... GSshutdown(); } } #else if (RunLinuxDialog()) { theApp.ReloadConfig(); } #endif } catch (GSDXRecoverableError) { } }<|fim▁hole|> if(!GSUtil::CheckSSE()) { return -1; } #ifdef _WINDOWS s_hr = ::CoInitializeEx(NULL, COINIT_MULTITHREADED); if(!GSUtil::CheckDirectX()) { if(SUCCEEDED(s_hr)) { ::CoUninitialize(); } s_hr = E_FAIL; return -1; } if(SUCCEEDED(s_hr)) { ::CoUninitialize(); } s_hr = E_FAIL; #endif return 0; } EXPORT_C GSabout() { } EXPORT_C GSirqCallback(void (*irq)()) { s_irq = irq; if(s_gs) { s_gs->SetIrqCallback(s_irq); } } void pt(const char* str){ struct tm *current; time_t now; time(&now); current = localtime(&now); printf("%02i:%02i:%02i%s", current->tm_hour, current->tm_min, current->tm_sec, str); } EXPORT_C_(int) GSsetupRecording(int start, void* data) { if (s_gs == NULL) { printf("GSdx: no s_gs for recording\n"); return 0; } if(start & 1) { printf("GSdx: Recording start command\n"); if( s_gs->BeginCapture() ) pt(" - Capture started\n"); } else { printf("GSdx: Recording end command\n"); s_gs->EndCapture(); pt(" - Capture ended\n"); } return 1; } EXPORT_C GSsetGameCRC(uint32 crc, int options) { s_gs->SetGameCRC(crc, options); } EXPORT_C GSgetLastTag(uint32* tag) { s_gs->GetLastTag(tag); } EXPORT_C GSgetTitleInfo2(char* dest, size_t length) { if (gsopen_done == false) { //printf("GSdx: GSgetTitleInfo but GSOpen not yet done. Ignoring\n"); return; } string s = "GSdx"; // TODO: this gets called from a different thread concurrently with GSOpen (on linux) if(s_gs == NULL) return; if(s_gs->m_GStitleInfoBuffer[0]) { GSAutoLock lock(&s_gs->m_pGSsetTitle_Crit); s = format("GSdx | %s", s_gs->m_GStitleInfoBuffer); if(s.size() > length - 1) { s = s.substr(0, length - 1); } } strcpy(dest, s.c_str()); } EXPORT_C GSsetFrameSkip(int frameskip) { s_gs->SetFrameSkip(frameskip); } EXPORT_C GSsetVsync(int enabled) { s_vsync = !!enabled; if(s_gs) { s_gs->SetVSync(s_vsync); } } EXPORT_C GSsetExclusive(int enabled) { s_exclusive = !!enabled; if(s_gs) { s_gs->SetVSync(s_vsync); } } EXPORT_C GSsetFrameLimit(int limit) { s_framelimit = !!limit; if(s_gs) { s_gs->SetFrameLimit(s_framelimit); } } #ifdef _WINDOWS #include <io.h> #include <fcntl.h> class Console { HANDLE m_console; string m_title; public: Console::Console(LPCSTR title, bool open) : m_console(NULL) , m_title(title) { if(open) Open(); } Console::~Console() { Close(); } void Console::Open() { if(m_console == NULL) { CONSOLE_SCREEN_BUFFER_INFO csbiInfo; AllocConsole(); SetConsoleTitle(m_title.c_str()); m_console = GetStdHandle(STD_OUTPUT_HANDLE); COORD size; size.X = 100; size.Y = 300; SetConsoleScreenBufferSize(m_console, size); GetConsoleScreenBufferInfo(m_console, &csbiInfo); SMALL_RECT rect; rect = csbiInfo.srWindow; rect.Right = rect.Left + 99; rect.Bottom = rect.Top + 64; SetConsoleWindowInfo(m_console, TRUE, &rect); *stdout = *_fdopen(_open_osfhandle((long)m_console, _O_TEXT), "w"); setvbuf(stdout, NULL, _IONBF, 0); } } void Console::Close() { if(m_console != NULL) { FreeConsole(); m_console = NULL; } } }; // lpszCmdLine: // First parameter is the renderer. // Second parameter is the gs file to load and run. EXPORT_C GSReplay(HWND hwnd, HINSTANCE hinst, LPSTR lpszCmdLine, int nCmdShow) { int renderer = -1; { char* start = lpszCmdLine; char* end = NULL; long n = strtol(lpszCmdLine, &end, 10); if(end > start) {renderer = n; lpszCmdLine = end;} } while(*lpszCmdLine == ' ') lpszCmdLine++; ::SetPriorityClass(::GetCurrentProcess(), HIGH_PRIORITY_CLASS); if(FILE* fp = fopen(lpszCmdLine, "rb")) { Console console("GSdx", true); GSinit(); uint8 regs[0x2000]; GSsetBaseMem(regs); s_vsync = !!theApp.GetConfig("vsync", 0); HWND hWnd = NULL; _GSopen((void**)&hWnd, "", renderer); uint32 crc; fread(&crc, 4, 1, fp); GSsetGameCRC(crc, 0); GSFreezeData fd; fread(&fd.size, 4, 1, fp); fd.data = new uint8[fd.size]; fread(fd.data, fd.size, 1, fp); GSfreeze(FREEZE_LOAD, &fd); delete [] fd.data; fread(regs, 0x2000, 1, fp); long start = ftell(fp); GSvsync(1); struct Packet {uint8 type, param; uint32 size, addr; vector<uint8> buff;}; list<Packet*> packets; vector<uint8> buff; int type; while((type = fgetc(fp)) != EOF) { Packet* p = new Packet(); p->type = (uint8)type; switch(type) { case 0: p->param = (uint8)fgetc(fp); fread(&p->size, 4, 1, fp); switch(p->param) { case 0: p->buff.resize(0x4000); p->addr = 0x4000 - p->size; fread(&p->buff[p->addr], p->size, 1, fp); break; case 1: case 2: case 3: p->buff.resize(p->size); fread(&p->buff[0], p->size, 1, fp); break; } break; case 1: p->param = (uint8)fgetc(fp); break; case 2: fread(&p->size, 4, 1, fp); break; case 3: p->buff.resize(0x2000); fread(&p->buff[0], 0x2000, 1, fp); break; } packets.push_back(p); } Sleep(100); while(IsWindowVisible(hWnd)) { for(list<Packet*>::iterator i = packets.begin(); i != packets.end(); i++) { Packet* p = *i; switch(p->type) { case 0: switch(p->param) { case 0: GSgifTransfer1(&p->buff[0], p->addr); break; case 1: GSgifTransfer2(&p->buff[0], p->size / 16); break; case 2: GSgifTransfer3(&p->buff[0], p->size / 16); break; case 3: GSgifTransfer(&p->buff[0], p->size / 16); break; } break; case 1: GSvsync(p->param); break; case 2: if(buff.size() < p->size) buff.resize(p->size); GSreadFIFO2(&buff[0], p->size / 16); break; case 3: memcpy(regs, &p->buff[0], 0x2000); break; } } } for(list<Packet*>::iterator i = packets.begin(); i != packets.end(); i++) { delete *i; } packets.clear(); Sleep(100); /* vector<uint8> buff; bool exit = false; int round = 0; while(!exit) { uint32 index; uint32 size; uint32 addr; int pos; switch(fgetc(fp)) { case EOF: fseek(fp, start, 0); exit = !IsWindowVisible(hWnd); //exit = ++round == 60; break; case 0: index = fgetc(fp); fread(&size, 4, 1, fp); switch(index) { case 0: if(buff.size() < 0x4000) buff.resize(0x4000); addr = 0x4000 - size; fread(&buff[addr], size, 1, fp); GSgifTransfer1(&buff[0], addr); break; case 1: if(buff.size() < size) buff.resize(size); fread(&buff[0], size, 1, fp); GSgifTransfer2(&buff[0], size / 16); break; case 2: if(buff.size() < size) buff.resize(size); fread(&buff[0], size, 1, fp); GSgifTransfer3(&buff[0], size / 16); break; case 3: if(buff.size() < size) buff.resize(size); fread(&buff[0], size, 1, fp); GSgifTransfer(&buff[0], size / 16); break; } break; case 1: GSvsync(fgetc(fp)); exit = !IsWindowVisible(hWnd); break; case 2: fread(&size, 4, 1, fp); if(buff.size() < size) buff.resize(size); GSreadFIFO2(&buff[0], size / 16); break; case 3: fread(regs, 0x2000, 1, fp); break; } } */ GSclose(); GSshutdown(); fclose(fp); } } EXPORT_C GSBenchmark(HWND hwnd, HINSTANCE hinst, LPSTR lpszCmdLine, int nCmdShow) { ::SetPriorityClass(::GetCurrentProcess(), HIGH_PRIORITY_CLASS); FILE* file = fopen("c:\\temp1\\log.txt", "a"); fprintf(file, "-------------------------\n\n"); if(1) { GSLocalMemory * pMem = new GSLocalMemory(); GSLocalMemory& mem(*pMem); static struct {int psm; const char* name;} s_format[] = { {PSM_PSMCT32, "32"}, {PSM_PSMCT24, "24"}, {PSM_PSMCT16, "16"}, {PSM_PSMCT16S, "16S"}, {PSM_PSMT8, "8"}, {PSM_PSMT4, "4"}, {PSM_PSMT8H, "8H"}, {PSM_PSMT4HL, "4HL"}, {PSM_PSMT4HH, "4HH"}, {PSM_PSMZ32, "32Z"}, {PSM_PSMZ24, "24Z"}, {PSM_PSMZ16, "16Z"}, {PSM_PSMZ16S, "16ZS"}, }; uint8* ptr = (uint8*)_aligned_malloc(1024 * 1024 * 4, 32); for(int i = 0; i < 1024 * 1024 * 4; i++) ptr[i] = (uint8)i; // for(int tbw = 5; tbw <= 10; tbw++) { int n = 256 << ((10 - tbw) * 2); int w = 1 << tbw; int h = 1 << tbw; fprintf(file, "%d x %d\n\n", w, h); for(size_t i = 0; i < countof(s_format); i++) { const GSLocalMemory::psm_t& psm = GSLocalMemory::m_psm[s_format[i].psm]; GSLocalMemory::writeImage wi = psm.wi; GSLocalMemory::readImage ri = psm.ri; GSLocalMemory::readTexture rtx = psm.rtx; GSLocalMemory::readTexture rtxP = psm.rtxP; GIFRegBITBLTBUF BITBLTBUF; BITBLTBUF.SBP = 0; BITBLTBUF.SBW = w / 64; BITBLTBUF.SPSM = s_format[i].psm; BITBLTBUF.DBP = 0; BITBLTBUF.DBW = w / 64; BITBLTBUF.DPSM = s_format[i].psm; GIFRegTRXPOS TRXPOS; TRXPOS.SSAX = 0; TRXPOS.SSAY = 0; TRXPOS.DSAX = 0; TRXPOS.DSAY = 0; GIFRegTRXREG TRXREG; TRXREG.RRW = w; TRXREG.RRH = h; GSVector4i r(0, 0, w, h); GIFRegTEX0 TEX0; TEX0.TBP0 = 0; TEX0.TBW = w / 64; GIFRegTEXA TEXA; TEXA.TA0 = 0; TEXA.TA1 = 0x80; TEXA.AEM = 0; int trlen = w * h * psm.trbpp / 8; int len = w * h * psm.bpp / 8; clock_t start, end; _ftprintf(file, _T("[%4s] "), s_format[i].name); start = clock(); for(int j = 0; j < n; j++) { int x = 0; int y = 0; (mem.*wi)(x, y, ptr, trlen, BITBLTBUF, TRXPOS, TRXREG); } end = clock(); fprintf(file, "%6d %6d | ", (int)((float)trlen * n / (end - start) / 1000), (int)((float)(w * h) * n / (end - start) / 1000)); start = clock(); for(int j = 0; j < n; j++) { int x = 0; int y = 0; (mem.*ri)(x, y, ptr, trlen, BITBLTBUF, TRXPOS, TRXREG); } end = clock(); fprintf(file, "%6d %6d | ", (int)((float)trlen * n / (end - start) / 1000), (int)((float)(w * h) * n / (end - start) / 1000)); const GSOffset* o = mem.GetOffset(TEX0.TBP0, TEX0.TBW, TEX0.PSM); start = clock(); for(int j = 0; j < n; j++) { (mem.*rtx)(o, r, ptr, w * 4, TEXA); } end = clock(); fprintf(file, "%6d %6d ", (int)((float)len * n / (end - start) / 1000), (int)((float)(w * h) * n / (end - start) / 1000)); if(psm.pal > 0) { start = clock(); for(int j = 0; j < n; j++) { (mem.*rtxP)(o, r, ptr, w, TEXA); } end = clock(); fprintf(file, "| %6d %6d ", (int)((float)len * n / (end - start) / 1000), (int)((float)(w * h) * n / (end - start) / 1000)); } fprintf(file, "\n"); fflush(file); } fprintf(file, "\n"); } _aligned_free(ptr); delete pMem; } // if(0) { GSLocalMemory * pMem2 = new GSLocalMemory(); GSLocalMemory& mem2(*pMem2); uint8* ptr = (uint8*)_aligned_malloc(1024 * 1024 * 4, 32); for(int i = 0; i < 1024 * 1024 * 4; i++) ptr[i] = (uint8)i; const GSLocalMemory::psm_t& psm = GSLocalMemory::m_psm[PSM_PSMCT32]; GSLocalMemory::writeImage wi = psm.wi; GIFRegBITBLTBUF BITBLTBUF; BITBLTBUF.DBP = 0; BITBLTBUF.DBW = 32; BITBLTBUF.DPSM = PSM_PSMCT32; GIFRegTRXPOS TRXPOS; TRXPOS.DSAX = 0; TRXPOS.DSAY = 1; GIFRegTRXREG TRXREG; TRXREG.RRW = 256; TRXREG.RRH = 256; int trlen = 256 * 256 * psm.trbpp / 8; int x = 0; int y = 0; (mem2.*wi)(x, y, ptr, trlen, BITBLTBUF, TRXPOS, TRXREG); delete pMem2; } // fclose(file); PostQuitMessage(0); } #endif #ifdef _LINUX #include <sys/time.h> #include <sys/timeb.h> // ftime(), struct timeb inline unsigned long timeGetTime() { timeb t; ftime(&t); return (unsigned long)(t.time*1000 + t.millitm); } void _fread(void *ptr, size_t size, size_t nmemb, FILE *stream) { static uint32 read_cnt = 0; read_cnt++; size_t result = fread(ptr, size, nmemb, stream); if (result != nmemb) { fprintf(stderr, "Read error\n"); exit(read_cnt); } } // Note EXPORT_C GSReplay(char* lpszCmdLine, int renderer) { GLLoader::in_replayer = true; // lpszCmdLine: // First parameter is the renderer. // Second parameter is the gs file to load and run. //EXPORT_C GSReplay(HWND hwnd, HINSTANCE hinst, LPSTR lpszCmdLine, int nCmdShow) #if 0 int renderer = -1; { char* start = lpszCmdLine; char* end = NULL; long n = strtol(lpszCmdLine, &end, 10); if(end > start) {renderer = n; lpszCmdLine = end;} } while(*lpszCmdLine == ' ') lpszCmdLine++; ::SetPriorityClass(::GetCurrentProcess(), HIGH_PRIORITY_CLASS); #endif // Allow to easyly switch between SW/HW renderer renderer = theApp.GetConfig("renderer", 12); if (renderer != 12 && renderer != 13) { fprintf(stderr, "wrong renderer selected %d\n", renderer); return; } vector<float> stats; stats.clear(); if(FILE* fp = fopen(lpszCmdLine, "rb")) { //Console console("GSdx", true); GSinit(); uint8 regs[0x2000]; GSsetBaseMem(regs); s_vsync = !!theApp.GetConfig("vsync", 0); void* hWnd = NULL; int err = _GSopen((void**)&hWnd, "", renderer); if (err != 0) { fprintf(stderr, "Error failed to GSopen\n"); return; } if (s_gs->m_wnd == NULL) return; uint32 crc; _fread(&crc, 4, 1, fp); GSsetGameCRC(crc, 0); GSFreezeData fd; _fread(&fd.size, 4, 1, fp); fd.data = new uint8[fd.size]; _fread(fd.data, fd.size, 1, fp); GSfreeze(FREEZE_LOAD, &fd); delete [] fd.data; _fread(regs, 0x2000, 1, fp); GSvsync(1); struct Packet {uint8 type, param; uint32 size, addr; vector<uint8> buff;}; list<Packet*> packets; vector<uint8> buff; int type; while((type = fgetc(fp)) != EOF) { Packet* p = new Packet(); p->type = (uint8)type; switch(type) { case 0: p->param = (uint8)fgetc(fp); _fread(&p->size, 4, 1, fp); switch(p->param) { case 0: p->buff.resize(0x4000); p->addr = 0x4000 - p->size; _fread(&p->buff[p->addr], p->size, 1, fp); break; case 1: case 2: case 3: p->buff.resize(p->size); _fread(&p->buff[0], p->size, 1, fp); break; } break; case 1: p->param = (uint8)fgetc(fp); break; case 2: _fread(&p->size, 4, 1, fp); break; case 3: p->buff.resize(0x2000); _fread(&p->buff[0], 0x2000, 1, fp); break; } packets.push_back(p); } sleep(1); //while(IsWindowVisible(hWnd)) //FIXME map? int finished = theApp.GetConfig("linux_replay", 1); unsigned long frame_number = 0; while(finished > 0) { frame_number = 0; unsigned long start = timeGetTime(); for(auto i = packets.begin(); i != packets.end(); i++) { Packet* p = *i; switch(p->type) { case 0: switch(p->param) { case 0: GSgifTransfer1(&p->buff[0], p->addr); break; case 1: GSgifTransfer2(&p->buff[0], p->size / 16); break; case 2: GSgifTransfer3(&p->buff[0], p->size / 16); break; case 3: GSgifTransfer(&p->buff[0], p->size / 16); break; } break; case 1: GSvsync(p->param); frame_number++; break; case 2: if(buff.size() < p->size) buff.resize(p->size); GSreadFIFO2(&buff[0], p->size / 16); break; case 3: memcpy(regs, &p->buff[0], 0x2000); break; } } unsigned long end = timeGetTime(); fprintf(stderr, "The %ld frames of the scene was render on %ldms\n", frame_number, end - start); fprintf(stderr, "A means of %fms by frame\n", (float)(end - start)/(float)frame_number); stats.push_back((float)(end - start)); sleep(1); finished--; } if (theApp.GetConfig("linux_replay", 1) > 1) { // Print some nice stats // Skip first frame (shader compilation populate the result) // it divides by 10 the standard deviation... float n = (float)theApp.GetConfig("linux_replay", 1) - 1.0f; float mean = 0; float sd = 0; for (auto i = stats.begin()+1; i != stats.end(); i++) { mean += *i; } mean = mean/n; for (auto i = stats.begin()+1; i != stats.end(); i++) { sd += pow((*i)-mean, 2); } sd = sqrt(sd/n); fprintf(stderr, "\n\nMean: %fms\n", mean); fprintf(stderr, "Standard deviation: %fms\n", sd); fprintf(stderr, "Mean by frame: %fms (%ffps)\n", mean/(float)frame_number, 1000.0f*frame_number/mean); fprintf(stderr, "Standard deviatin by frame: %fms\n", sd/(float)frame_number); } #ifdef ENABLE_OGL_DEBUG_MEM_BW fprintf(stderr, "memory bandwith. T: %f. V: %f\n", (float)g_texture_upload_byte/(float)frame_number/1024, (float)g_vertex_upload_byte/(float)frame_number/1024); #endif for(auto i = packets.begin(); i != packets.end(); i++) { delete *i; } packets.clear(); sleep(1); GSclose(); GSshutdown(); fclose(fp); } else { fprintf(stderr, "failed to open %s\n", lpszCmdLine); } } #endif<|fim▁end|>
EXPORT_C_(int) GStest() {
<|file_name|>version.py<|end_file_name|><|fim▁begin|>import re from os.path import join from setuptools import find_packages def get(): pkgnames = find_packages() if len(pkgnames) == 0: return "unknown"<|fim▁hole|> c = re.compile(r"__version__ *= *('[^']+'|\"[^\"]+\")") m = c.search(content) if m is None: return "unknown" return m.groups()[0][1:-1]<|fim▁end|>
pkgname = pkgnames[0] content = open(join(pkgname, "__init__.py")).read()
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import absolute_import import mock import six import time from exam import fixture, patcher from sentry.quotas.redis import ( is_rate_limited, RedisQuota, ) from sentry.testutils import TestCase from sentry.utils.redis import clusters def test_is_rate_limited_script(): now = int(time.time()) cluster = clusters.get('default') client = cluster.get_local_client(six.next(iter(cluster.hosts))) # The item should not be rate limited by either key. assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \ [False, False] # The item should be rate limited by the first key (1). assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \ [True, False] # The item should still be rate limited by the first key (1), but *not* # rate limited by the second key (2) even though this is the third time # we've checked the quotas. This ensures items that are rejected by a lower # quota don't affect unrelated items that share a parent quota. assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \ [True, False] assert client.get('foo') == '1' assert 59 <= client.ttl('foo') <= 60 assert client.get('bar') == '1' assert 119 <= client.ttl('bar') <= 120 class RedisQuotaTest(TestCase): quota = fixture(RedisQuota) @patcher.object(RedisQuota, 'get_project_quota') def get_project_quota(self): inst = mock.MagicMock() inst.return_value = (0, 60) return inst @patcher.object(RedisQuota, 'get_organization_quota') def get_organization_quota(self):<|fim▁hole|> return inst def test_uses_defined_quotas(self): self.get_project_quota.return_value = (200, 60) self.get_organization_quota.return_value = (300, 60) quotas = self.quota.get_quotas(self.project) assert quotas[0].key == 'p:{}'.format(self.project.id) assert quotas[0].limit == 200 assert quotas[0].window == 60 assert quotas[1].key == 'o:{}'.format(self.project.organization.id) assert quotas[1].limit == 300 assert quotas[1].window == 60 @mock.patch('sentry.quotas.redis.is_rate_limited') @mock.patch.object(RedisQuota, 'get_quotas', return_value=[]) def test_bails_immediately_without_any_quota(self, get_quotas, is_rate_limited): result = self.quota.is_rate_limited(self.project) assert not is_rate_limited.called assert not result.is_limited @mock.patch('sentry.quotas.redis.is_rate_limited', return_value=(False, False)) def test_is_not_limited_without_rejections(self, is_rate_limited): self.get_organization_quota.return_value = (100, 60) self.get_project_quota.return_value = (200, 60) assert not self.quota.is_rate_limited(self.project).is_limited @mock.patch('sentry.quotas.redis.is_rate_limited', return_value=(True, False)) def test_is_limited_on_rejections(self, is_rate_limited): self.get_organization_quota.return_value = (100, 60) self.get_project_quota.return_value = (200, 60) assert self.quota.is_rate_limited(self.project).is_limited<|fim▁end|>
inst = mock.MagicMock() inst.return_value = (0, 60)
<|file_name|>loc.rs<|end_file_name|><|fim▁begin|>//! The rpeek library. //! //! # License //! //! Copyright (c) 2015 by Stacy Prowell. All rights reserved. //! //! Licensed under the BSD 2-Clause license. See the file LICENSE //! that is part of this distribution. This file may not be copied,<|fim▁hole|> use std::fmt; /// Specify the current location for the parser. #[derive(Debug)] pub struct Loc { /// The name of the thing being parsed. Typically a file name. pub name: &'static str, /// The column. pub column: u32, /// The line. pub line: u32, } impl fmt::Display for Loc { fn fmt(&self, form: &mut fmt::Formatter) -> fmt::Result { write!(form, "{}:{}:{}", self.name, self.line, self.column) } }<|fim▁end|>
//! modified, or distributed except according to those terms.
<|file_name|>auth.js<|end_file_name|><|fim▁begin|>'use strict'; memoryApp.controller('AuthCtrl', function ($scope, $location, AuthService) { $scope.register = function () { var username = $scope.registerUsername; var password = $scope.registerPassword; if (username && password) { AuthService.register(username, password).then( function () { $location.path('/dashboard'); }, function (error) { $scope.registerError = error; } ); } else { $scope.registerError = 'Username and password required'; } }; $scope.login = function () { var username = $scope.loginUsername; var password = $scope.loginPassword; <|fim▁hole|> AuthService.login(username, password).then( function () { $location.path('/dashboard'); }, function (error) { $scope.loginError = error; } ); } else { $scope.error = 'Username and password required'; } }; });<|fim▁end|>
if (username && password) {
<|file_name|>Hotel.view.js<|end_file_name|><|fim▁begin|>import React from 'react' import {observer} from 'mobx-react' import {Route, Link} from 'react-router-dom' import {IMAGE_DIR, TOKEN, GET_HOTEL_INFO, RESPONSE_CODE_SUCCESS} from 'macros' import {setParamsToURL, dateToZh} from 'utils' import Cookies from 'js-cookie' import WeiXinShareTips from 'WeiXinShareTips' import './hotel.less' export default observer( (props) => ( <div className="hotel-container"> <div className="banner-container"> <div className="colllect"> {Cookies.get(TOKEN)? props.store.isFavorite == 1? <i className="iconfont btn-colllect icon-hotle_icon_like1" /> : <i className="iconfont btn-colllect icon-hotle_icon_like" /> : ''} <span className="btn-share"><i className="iconfont icon-share1" /></span> </div> <div className="swiper-container"> <div className="swiper-wrapper"> { props.store.hotelImgs.map((item, index) => { return <div className="swiper-slide" key={index} style={{backgroundImage:`url(${item.imgPath})`}}></div> }) } </div> <div className="swiper-pagination"></div> </div> </div> <div className="hotel-name-wrapper"> <div className="hotel-box"> <div className="hotel-name">{props.store.hotelName}</div> <div className="hotel-address">{props.store.address}</div> </div> <Link to={setParamsToURL(`${props.match.url}/map`, {longitude: props.store.longitude, latitude: props.store.latitude})}> <i className="iconfont icon-will_live_icon_loction" /> </Link> </div> <ul className="hotel-funs"> <li><img src={`${IMAGE_DIR}/use-face.jpg`} /><span>刷脸入住</span></li> <li><img src={`${IMAGE_DIR}/use-lock.jpg`} /><span>无卡开锁</span></li> <li><img src={`${IMAGE_DIR}/use-free.jpg`} /><span>面排队/查房</span></li> </ul> <Link to={`${props.match.url}/select-date`} className="checking-in-out"> <div><p>入住</p><span>{dateToZh(props.store.checkInDate)}</span></div> <div><p>退房</p><span>{dateToZh(props.store.checkOutDate)}</span></div> <i className="iconfont icon-hotle_icon_show" /> </Link> <div className="hotel-cards"> <div className="swiper-container"> <div className="swiper-wrapper"> { props.store.roomTypes.map((item, index) => { return ( <div className="swiper-slide" key={index} style={{backgroundImage:`url(${item.imgPath})`}}> <div className="slide-adjust"> <p className="price-box">&yen;{`${item.price}`}</p> <Link className="clickable-area" to={`/room/${props.match.params.hotelID}/${item.roomTypeID}`} ></Link> <div className="hotel-type"> <div className="info-wrapper"> <p>{item.roomType}</p> <p>{`${item.roomSize}m², ${item.bedType}`}</p> </div> <Link to={{pathname:`/booking/${props.match.params.hotelID}/${item.roomTypeID}`, search: `?checkInDate=${props.store.checkInDate}&checkOutDate=${props.store.checkOutDate}`}} className="btn-booking">立即预订</Link> </div> </div> </div> ) }) } </div> </div> </div> { props.store.isShowShareTips? ( <WeiXinShareTips onClick={(e) => { props.store.isShowShareTips = false }}/> ) : '' } </div> )<|fim▁hole|><|fim▁end|>
)