max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
3,428
<filename>lib/node_modules/@stdlib/datasets/spam-assassin/data/spam-1/00297.3350c2dbbb0272c27b2c7773d7012356.json {"id":"00297","group":"spam-1","checksum":{"type":"MD5","value":"3350c2dbbb0272c27b2c7773d7012356"},"text":"From /aimcque/zzzzail.rcv/3/[email protected] Sat Sep 14 16:20:56 2002\nReturn-Path: </aimcque/zzzzail.rcv/3/[email protected]>\nDelivered-To: <EMAIL>.<EMAIL>\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby zzzzason.org (Postfix) with ESMTP id 55F0716F03\n\tfor <zzzz@localhost>; Sat, 14 Sep 2002 16:20:53 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor zzzz@localhost (single-drop); Sat, 14 Sep 2002 16:20:53 +0100 (IST)\nReceived: from freemail.nx.cninfo.net (freemail.nx.cninfo.net\n [202.100.100.171]) by dogma.slashnull.org (8.11.6/8.11.6) with SMTP id\n g8DMgqC05710 for <<EMAIL>>; Fri, 13 Sep 2002 23:42:53 +0100\nMessage-Id: <<EMAIL>>\nReceived: from 192.168.3.11([217.125.101.38]) by\n freemail.nx.cninfo.net(JetMail 2.5.3.0) with SMTP id zzzza3d8281b2;\n Fri, 13 Sep 2002 22:33:22 -0000\nTo: <C:`<EMAIL>.Ad<EMAIL>huge<EMAIL>>\nFrom: \"mary\" <<EMAIL>>\nSubject: Best product for 2002\nDate: Fri, 13 Sep 2002 18:34:51 -1600\nMIME-Version: 1.0\nContent-Type: text/html; charset=\"iso-8859-1\"\nContent-Transfer-Encoding: quoted-printable\n\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2//EN\">\n<HTML>\n\n<HEAD>\n\t<META NAME=3D\"GENERATOR\" Content=3D\"Visual Page 1.0 for Windows\">\n\t<META HTTP-EQUIV=3D\"Content-Type\" CONTENT=3D\"text/html;CHARSET=3Diso-8859=\n-1\">\n\t<TITLE>untitled</TITLE>\n</HEAD>\n\n<BODY onLoad=3D\"(window.open('http://dvd.specialdiscounts4u.com/'))\">\n\n<P ALIGN=3D\"CENTER\"><FONT COLOR=3D\"#0000FF\" face=3D\"Arial\"><B>Copy DVD Mov=\nies?<BR>\n</B></FONT><FONT COLOR=3D\"#000000\" face=3D\"Arial\"><BR>\n</FONT><FONT face=3D\"Arial\"><B>Yes! Copy and burn your own DVD\nmovies and video with a CD-R Drive.<BR>\n</B></FONT><FONT COLOR=3D\"#000000\" face=3D\"Arial\"><BR>\n</FONT><FONT COLOR=3D\"#FF0000\" face=3D\"Arial\"><B>* Order by September 21, =\n2002, and receive the following Free Gifts!\n\n1. \"Free DVD Movie of your choice ($20.00 Value)\n2. Cell Phone Battery Booster ($19.95 Value)\n\n\nOwn all the DVD's you've always wanted\nand start burning today!\n.</B></FONT></P>\n\n<P ALIGN=3D\"CENTER\"><FONT COLOR=3D\"#FF0000\" face=3D\"Arial\"><B><BR>\n</B></FONT><A HREF=3D\"http://dvd.specialdiscounts4u.com/ \"><FONT face=3D\"A=\nrial\"><B>Click Here Now!</B></FONT></A>\n\n\n</BODY>\n\n</HTML>\n\n\n\n\n"}
1,185
1,062
// // Generated by class-dump 3.5b1 (64 bit) (Debug version compiled Dec 3 2019 19:59:57). // // Copyright (C) 1997-2019 <NAME>. // #import <objc/NSObject.h> #import "TerminationHandler-Protocol.h" @class MessageViewer, NSString, NSTimer; @interface MailTimeMachineController : NSObject <TerminationHandler> { id _tableOfContentsSaveLock; // 8 = 0x8 NSTimer *_tableOfContentsSaveTimer; // 16 = 0x10 BOOL _savingTOC; // 24 = 0x18 MessageViewer *_messageViewer; // 32 = 0x20 struct CGRect _originalMailWindowFrame; // 40 = 0x28 } + (id)sharedController; // IMP=0x000000010000314e + (id)allocWithZone:(struct _NSZone *)arg1; // IMP=0x00000001001cd4d1 + (id)log; // IMP=0x00000001001cd474 @property(nonatomic) struct CGRect originalMailWindowFrame; // @synthesize originalMailWindowFrame=_originalMailWindowFrame; @property(nonatomic) BOOL savingTOC; // @synthesize savingTOC=_savingTOC; @property(retain, nonatomic) MessageViewer *messageViewer; // @synthesize messageViewer=_messageViewer; // - (void).cxx_destruct; // IMP=0x00000001001cfd9d - (void)relinquishTimeMachineAccess; // IMP=0x00000001001cfd17 - (void)exitTimeMachine; // IMP=0x00000001001cfa7c - (void)prepareToExitTimeMachineModeWithDisplayState:(id)arg1; // IMP=0x00000001001cf8ac - (void)enterTimeMachineMode; // IMP=0x00000001001cf80c - (unsigned char)_launchTimeMachineHelperApp; // IMP=0x00000001001ce258 - (id)_mailTimeMachineDescriptionForMessage:(id)arg1 withColumns:(id)arg2 selected:(BOOL)arg3; // IMP=0x00000001001cdbd5 - (void)_threadedSaveTableOfContentsApprovedQuit:(BOOL)arg1; // IMP=0x00000001001cda92 - (BOOL)_timingOutSaveTableOfContents; // IMP=0x00000001001cd86f - (void)_threadedSaveTableOfContentsEnded:(id)arg1; // IMP=0x00000001000854bc - (void)_thread_saveTableOfContents:(id)arg1; // IMP=0x000000010008510f - (void)_saveTableOfContents; // IMP=0x0000000100085348 - (void)_approvedQuit; // IMP=0x0000000100094d18 - (void)nowWouldBeAGoodTimeToTerminate:(id)arg1; // IMP=0x0000000100094c3f - (void)unregisterTimeMachineHandlers; // IMP=0x0000000100095d6e - (void)registerTimeMachineHandlers; // IMP=0x00000001000032ac - (void)disableTableOfContentsSaveTimer:(BOOL)arg1; // IMP=0x00000001001cd75c - (void)_setTableOfContentsTimer:(id)arg1; // IMP=0x000000010007d623 - (void)_setTableOfContentsIsValid:(BOOL)arg1; // IMP=0x000000010007d4fb - (void)forceSaveTableOfContentsNow; // IMP=0x00000001001cd729 - (void)_saveTableOfContents:(id)arg1; // IMP=0x00000001001cd6a3 - (void)_invalidateTableOfContents:(id)arg1; // IMP=0x000000010007d486 - (void)_invalidateTableOfContentsAndBackupManager:(id)arg1; // IMP=0x000000010007d400 - (void)registerNotificationHandlers; // IMP=0x0000000100040633 - (void)dealloc; // IMP=0x00000001001cd58b - (id)init; // IMP=0x0000000100003207 // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
1,151
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_xmlsecurity.hxx" #include <sal/main.h> #include <vcl/event.hxx> #include <vcl/svapp.hxx> #include <vcl/wrkwin.hxx> #include <vcl/msgbox.hxx> #include <vcl/fixed.hxx> #include <vcl/edit.hxx> #include <vcl/button.hxx> #include <vcl/lstbox.hxx> #include <svtools/filectrl.hxx> #include <tools/urlobj.hxx> #include <osl/file.hxx> #include <svtools/docpasswdrequest.hxx> #include <comphelper/processfactory.hxx> #include <cppuhelper/servicefactory.hxx> #include <cppuhelper/bootstrap.hxx> #include <unotools/streamhelper.hxx> #include <ucbhelper/contentbroker.hxx> #include <ucbhelper/configurationkeys.hxx> // Will be in comphelper if CWS MAV09 is integrated #include <comphelper/storagehelper.hxx> #include <com/sun/star/lang/XMultiServiceFactory.hpp> #include <xmlsecurity/xmlsignaturehelper.hxx> #include <xmlsecurity/digitalsignaturesdialog.hxx> #include <xmlsecurity/certificatechooser.hxx> #include <xmlsecurity/biginteger.hxx> #include <com/sun/star/security/XDocumentDigitalSignatures.hpp> using namespace ::com::sun::star; using namespace ::com::sun::star; void Main(); #define TEXTFIELDWIDTH 80 #define TEXTFIELDSTARTX 10 #define EDITWIDTH 200 #define EDITHEIGHT 20 #define FIXEDLINEHEIGHT 15 #define BUTTONWIDTH 50 #define BUTTONHEIGHT 22 #define BUTTONSPACE 20 #define LISTBOXHEIGHT 120 // #define TEST_IMPLEMENTATION_DIRECTLY // ----------------------------------------------------------------------- SAL_IMPLEMENT_MAIN() { uno::Reference< lang::XMultiServiceFactory > xMSF; try { uno::Reference< uno::XComponentContext > xCtx( cppu::defaultBootstrap_InitialComponentContext() ); if ( !xCtx.is() ) { DBG_ERROR( "Error creating initial component context!" ); return -1; } xMSF = uno::Reference< lang::XMultiServiceFactory >(xCtx->getServiceManager(), uno::UNO_QUERY ); if ( !xMSF.is() ) { DBG_ERROR( "No service manager!" ); return -1; } // Init USB uno::Sequence< uno::Any > aArgs( 2 ); aArgs[ 0 ] <<= rtl::OUString::createFromAscii( UCB_CONFIGURATION_KEY1_LOCAL ); aArgs[ 1 ] <<= rtl::OUString::createFromAscii( UCB_CONFIGURATION_KEY2_OFFICE ); sal_Bool bSuccess = ::ucb::ContentBroker::initialize( xMSF, aArgs ); if ( !bSuccess ) { DBG_ERROR( "Error creating UCB!" ); return -1; } } catch ( uno::Exception const & ) { DBG_ERROR( "Exception during creation of initial component context!" ); return -1; } comphelper::setProcessServiceFactory( xMSF ); InitVCL( xMSF ); ::Main(); DeInitVCL(); return 0; } // ----------------------------------------------------------------------- class MyWin : public WorkWindow { private: FixedLine maTokenLine; CheckBox maCryptoCheckBox; FixedText maFixedTextTokenName; FileControl maEditTokenName; FixedLine maTest1Line; FixedText maFixedTextXMLFileName; FileControl maEditXMLFileName; FixedText maFixedTextBINFileName; FileControl maEditBINFileName; FixedText maFixedTextSIGFileName; FileControl maEditSIGFileName; PushButton maSignButton; PushButton maVerifyButton; FixedLine maTest2Line; FixedText maFixedTextDOCFileName; FileControl maEditDOCFileName; PushButton maDigitalSignaturesButton; PushButton maVerifyDigitalSignaturesButton; FixedLine maHintLine; FixedText maHintText; DECL_LINK( CryptoCheckBoxHdl, CheckBox* ); DECL_LINK( SignButtonHdl, Button* ); DECL_LINK( VerifyButtonHdl, Button* ); DECL_LINK( DigitalSignaturesWithServiceHdl, Button* ); DECL_LINK( VerifyDigitalSignaturesHdl, Button* ); DECL_LINK( DigitalSignaturesWithTokenHdl, Button* ); DECL_LINK( StartVerifySignatureHdl, void* ); public: MyWin( Window* pParent, WinBits nWinStyle ); }; // ----------------------------------------------------------------------- void Main() { MyWin aMainWin( NULL, WB_APP | WB_STDWORK | WB_3DLOOK); aMainWin.Show(); Application::Execute(); } // ----------------------------------------------------------------------- MyWin::MyWin( Window* pParent, WinBits nWinStyle ) : WorkWindow( pParent, nWinStyle ), maTokenLine( this ), maTest1Line( this ), maTest2Line( this ), maHintLine( this ), maFixedTextXMLFileName( this ), maEditXMLFileName( this, WB_BORDER ), maFixedTextBINFileName( this ), maEditBINFileName( this, WB_BORDER ), maFixedTextSIGFileName( this ), maEditSIGFileName( this, WB_BORDER ), maFixedTextTokenName( this ), maEditTokenName( this, WB_BORDER ), maFixedTextDOCFileName( this ), maEditDOCFileName( this, WB_BORDER ), maSignButton( this ), maVerifyButton( this ), maDigitalSignaturesButton( this ), maVerifyDigitalSignaturesButton( this ), maHintText( this, WB_WORDBREAK ), maCryptoCheckBox( this ) { #ifdef TEST_IMPLEMENTATION_DIRECTLY Size aOutputSize( 400, 600 ); #else Size aOutputSize( 400, 400 ); #endif SetOutputSizePixel( aOutputSize ); SetText( String( RTL_CONSTASCII_USTRINGPARAM( "XML Signature Test" ) ) ); long nY = 15; maTokenLine.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, FIXEDLINEHEIGHT ); maTokenLine.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Crypto Settings" ) ) ); maTokenLine.Show(); nY += EDITHEIGHT*3/2; maCryptoCheckBox.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, FIXEDLINEHEIGHT ); maCryptoCheckBox.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Use Default Token (NSS option only)" ) ) ); maCryptoCheckBox.Check( sal_True ); maEditTokenName.Disable(); maFixedTextTokenName.Disable(); maCryptoCheckBox.SetClickHdl( LINK( this, MyWin, CryptoCheckBoxHdl ) ); maCryptoCheckBox.Show(); nY += EDITHEIGHT; maFixedTextTokenName.SetPosSizePixel( TEXTFIELDSTARTX, nY, TEXTFIELDWIDTH, EDITHEIGHT ); maFixedTextTokenName.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Crypto Token:" ) ) ); maFixedTextTokenName.Show(); maEditTokenName.SetPosSizePixel( TEXTFIELDSTARTX+TEXTFIELDWIDTH, nY, EDITWIDTH, EDITHEIGHT ); maEditTokenName.Show(); nY += EDITHEIGHT*3; #ifdef TEST_IMPLEMENTATION_DIRECTLY maTest1Line.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, FIXEDLINEHEIGHT ); maTest1Line.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Test simple files" ) ) ); maTest1Line.Show(); nY += EDITHEIGHT*3/2; maFixedTextXMLFileName.SetPosSizePixel( TEXTFIELDSTARTX, nY, TEXTFIELDWIDTH, EDITHEIGHT ); maFixedTextXMLFileName.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "XML File:" ) ) ); maFixedTextXMLFileName.Show(); maEditXMLFileName.SetPosSizePixel( TEXTFIELDSTARTX+TEXTFIELDWIDTH, nY, EDITWIDTH, EDITHEIGHT ); maEditXMLFileName.Show(); nY += EDITHEIGHT*3/2; maFixedTextBINFileName.SetPosSizePixel( TEXTFIELDSTARTX, nY, TEXTFIELDWIDTH, EDITHEIGHT ); maFixedTextBINFileName.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Binary File:" ) ) ); maFixedTextBINFileName.Show(); maEditBINFileName.SetPosSizePixel( TEXTFIELDSTARTX+TEXTFIELDWIDTH, nY, EDITWIDTH, EDITHEIGHT ); maEditBINFileName.Show(); nY += EDITHEIGHT*3/2; maFixedTextSIGFileName.SetPosSizePixel( TEXTFIELDSTARTX, nY, TEXTFIELDWIDTH, EDITHEIGHT ); maFixedTextSIGFileName.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Signature File:" ) ) ); maFixedTextSIGFileName.Show(); maEditSIGFileName.SetPosSizePixel( TEXTFIELDSTARTX+TEXTFIELDWIDTH, nY, EDITWIDTH, EDITHEIGHT ); maEditSIGFileName.Show(); nY += EDITHEIGHT*2; maSignButton.SetPosSizePixel( TEXTFIELDSTARTX, nY, BUTTONWIDTH, BUTTONHEIGHT ); maSignButton.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Sign" ) ) ); maSignButton.SetClickHdl( LINK( this, MyWin, SignButtonHdl ) ); maSignButton.Show(); maVerifyButton.SetPosSizePixel( TEXTFIELDSTARTX+BUTTONWIDTH+BUTTONSPACE, nY, BUTTONWIDTH, BUTTONHEIGHT ); maVerifyButton.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Verify" ) ) ); maVerifyButton.SetClickHdl( LINK( this, MyWin, VerifyButtonHdl ) ); maVerifyButton.Show(); nY += EDITHEIGHT*3; #endif // TEST_IMPLEMENTATION_DIRECTLY maTest2Line.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, FIXEDLINEHEIGHT ); maTest2Line.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Test Office Document" ) ) ); maTest2Line.Show(); nY += EDITHEIGHT*3/2; maFixedTextDOCFileName.SetPosSizePixel( TEXTFIELDSTARTX, nY, TEXTFIELDWIDTH, EDITHEIGHT ); maFixedTextDOCFileName.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Office File:" ) ) ); maFixedTextDOCFileName.Show(); maEditDOCFileName.SetPosSizePixel( TEXTFIELDSTARTX+TEXTFIELDWIDTH, nY, EDITWIDTH, EDITHEIGHT ); maEditDOCFileName.Show(); nY += EDITHEIGHT*2; maDigitalSignaturesButton.SetPosSizePixel( TEXTFIELDSTARTX, nY, BUTTONWIDTH*2, BUTTONHEIGHT ); maDigitalSignaturesButton.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Digital Signatures..." ) ) ); maDigitalSignaturesButton.SetClickHdl( LINK( this, MyWin, DigitalSignaturesWithServiceHdl ) ); maDigitalSignaturesButton.Show(); maVerifyDigitalSignaturesButton.SetPosSizePixel( TEXTFIELDSTARTX+BUTTONWIDTH*2+BUTTONSPACE, nY, BUTTONWIDTH*2, BUTTONHEIGHT ); maVerifyDigitalSignaturesButton.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Verify Signatures" ) ) ); maVerifyDigitalSignaturesButton.SetClickHdl( LINK( this, MyWin, VerifyDigitalSignaturesHdl ) ); maVerifyDigitalSignaturesButton.Show(); nY += EDITHEIGHT*2; maHintLine.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, FIXEDLINEHEIGHT ); maHintLine.Show(); nY += EDITHEIGHT*2; maHintText.SetPosSizePixel( TEXTFIELDSTARTX, nY, aOutputSize.Width()-2*TEXTFIELDSTARTX, aOutputSize.Height()-nY ); maHintText.SetText( String( RTL_CONSTASCII_USTRINGPARAM( "Hint: Copy crypto files from xmlsecurity/tools/cryptoken/nss and sample files from xmlsecurity/tools/examples to <temp>/nss.\nThis location will be used from the demo as the default location." ) ) ); maHintText.Show(); // Help the user with some default values ::rtl::OUString aTempDirURL; ::osl::File::getTempDirURL( aTempDirURL ); INetURLObject aURLObj( aTempDirURL ); aURLObj.insertName( String( RTL_CONSTASCII_USTRINGPARAM( "nss" ) ), true ); ::rtl::OUString aNSSFolder = aURLObj.getFSysPath( INetURLObject::FSYS_DETECT ); String aDefaultXMLFileName( aNSSFolder ); maEditXMLFileName.SetText( aNSSFolder + String( RTL_CONSTASCII_USTRINGPARAM( "demo-sample.xml" ) ) ); maEditBINFileName.SetText( aNSSFolder + String( RTL_CONSTASCII_USTRINGPARAM( "demo-sample.gif" ) ) ); maEditDOCFileName.SetText( aNSSFolder + String( RTL_CONSTASCII_USTRINGPARAM( "demo-sample.sxw" ) ) ); maEditSIGFileName.SetText( aNSSFolder + String( RTL_CONSTASCII_USTRINGPARAM( "demo-result.xml" ) ) ); maEditTokenName.SetText( aNSSFolder ); #ifdef WNT maEditTokenName.SetText( String() ); maEditTokenName.Disable(); maCryptoCheckBox.Disable(); #endif } IMPL_LINK( MyWin, CryptoCheckBoxHdl, CheckBox*, EMPTYARG ) { if ( maCryptoCheckBox.IsChecked() ) { maEditTokenName.Disable(); maFixedTextTokenName.Disable(); } else { maEditTokenName.Enable(); maFixedTextTokenName.Enable(); } return 1; } IMPL_LINK( MyWin, DigitalSignaturesWithServiceHdl, Button*, EMPTYARG ) { rtl::OUString aDocFileName = maEditDOCFileName.GetText(); uno::Reference < embed::XStorage > xStore = ::comphelper::OStorageHelper::GetStorageFromURL( aDocFileName, embed::ElementModes::READWRITE, comphelper::getProcessServiceFactory() ); uno::Reference< security::XDocumentDigitalSignatures > xD( comphelper::getProcessServiceFactory()->createInstance( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM ( "com.sun.star.security.DocumentDigitalSignatures" ) ) ), uno::UNO_QUERY ); if ( xD.is() ) xD->signDocumentContent( xStore, NULL ); return 0; } IMPL_LINK( MyWin, VerifyDigitalSignaturesHdl, Button*, EMPTYARG ) { rtl::OUString aDocFileName = maEditDOCFileName.GetText(); uno::Reference < embed::XStorage > xStore = ::comphelper::OStorageHelper::GetStorageFromURL( aDocFileName, embed::ElementModes::READWRITE, comphelper::getProcessServiceFactory() ); uno::Reference< security::XDocumentDigitalSignatures > xD( comphelper::getProcessServiceFactory()->createInstance( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM ( "com.sun.star.security.DocumentDigitalSignatures" ) ) ), uno::UNO_QUERY ); if ( xD.is() ) { uno::Sequence< security::DocumentSignatureInformation > aInfos = xD->verifyDocumentContentSignatures( xStore, NULL ); int nInfos = aInfos.getLength(); for ( int n = 0; n < nInfos; n++ ) { security::DocumentSignatureInformation& rInf = aInfos[n]; String aText( RTL_CONSTASCII_USTRINGPARAM( "The document is signed by\n\n " ) ); aText += String( rInf.Signer->getSubjectName() ); aText += String( RTL_CONSTASCII_USTRINGPARAM( "\n\n The signature is " ) ); if ( !rInf.SignatureIsValid ) aText += String( RTL_CONSTASCII_USTRINGPARAM( "NOT " ) ); aText += String( RTL_CONSTASCII_USTRINGPARAM( "valid" ) ); InfoBox( this, aText ).Execute(); } } return 0; } #ifdef TEST_IMPLEMENTATION_DIRECTLY IMPL_LINK( MyWin, DigitalSignaturesWithTokenHdl, Button*, EMPTYARG ) { String aDocFileName = maEditDOCFileName.GetText(); String aTokenFileName = maEditTokenName.GetText(); DigitalSignaturesDialog aSignaturesDialog( this, comphelper::getProcessServiceFactory(), SignatureModeDocumentContent, false ); bool bInit = aSignaturesDialog.Init( aTokenFileName ); if ( !bInit ) { ErrorBox( this, WB_OK, String( RTL_CONSTASCII_USTRINGPARAM( "Error initializing security context!" ) ) ).Execute(); return 0; } uno::Reference < embed::XStorage > xStore = ::comphelper::OStorageHelper::GetStorageFromURL( aDocFileName, embed::ElementModes::READWRITE, comphelper::getProcessServiceFactory() ); aSignaturesDialog.SetStorage( xStore ); aSignaturesDialog.Execute(); return 0; } IMPL_LINK( MyWin, SignButtonHdl, Button*, EMPTYARG ) { String aXMLFileName = maEditXMLFileName.GetText(); String aBINFileName = maEditBINFileName.GetText(); String aSIGFileName = maEditSIGFileName.GetText(); String aTokenFileName; if ( !maCryptoCheckBox.IsChecked() ) aTokenFileName = maEditTokenName.GetText(); XMLSignatureHelper aSignatureHelper( comphelper::getProcessServiceFactory() ); bool bInit = aSignatureHelper.Init( aTokenFileName ); if ( !bInit ) { ErrorBox( this, WB_OK, String( RTL_CONSTASCII_USTRINGPARAM( "Error initializing security context!" ) ) ).Execute(); return 0; } uno::Reference< ::com::sun::star::security::XCertificate > xCertToUse; CertificateChooser aChooser( this, aSignatureHelper.GetSecurityEnvironment(), SignatureInformations() ); if ( aChooser.Execute() ) xCertToUse = aChooser.GetSelectedCertificate(); if ( !xCertToUse.is() ) return 0; aSignatureHelper.StartMission(); sal_Int32 nSecurityId = aSignatureHelper.GetNewSecurityId(); aSignatureHelper.SetX509Certificate( nSecurityId, xCertToUse->getIssuerName(), bigIntegerToNumericString( xCertToUse->getSerialNumber() ) ); aSignatureHelper.AddForSigning( nSecurityId, aXMLFileName, aXMLFileName, sal_False ); aSignatureHelper.AddForSigning( nSecurityId, aBINFileName, aBINFileName, sal_True ); SvFileStream* pStream = new SvFileStream( aSIGFileName, STREAM_WRITE ); SvLockBytesRef xLockBytes = new SvLockBytes( pStream, sal_True ); uno::Reference< io::XOutputStream > xOutputStream = new utl::OOutputStreamHelper( xLockBytes ); bool bDone = aSignatureHelper.CreateAndWriteSignature( xOutputStream ); aSignatureHelper.EndMission(); if ( !bDone ) { ErrorBox( this, WB_OK, String( RTL_CONSTASCII_USTRINGPARAM( "Error creating Signature!" ) ) ).Execute(); } else { rtl::OUString aInfo( String( RTL_CONSTASCII_USTRINGPARAM( "Signature successfully created!\n\n" ) ) ); // aInfo += getSignatureInformationmations( aSignatureHelper.getAllSignatureInformation(), aSignatureHelper.GetSecurityEnvironment() ); InfoBox( this, aInfo ).Execute(); } // Check for more detailed results... return 0; } IMPL_LINK( MyWin, VerifyButtonHdl, Button*, EMPTYARG ) { String aXMLFileName = maEditXMLFileName.GetText(); String aBINFileName = maEditBINFileName.GetText(); String aSIGFileName = maEditSIGFileName.GetText(); String aTokenFileName; if ( !maCryptoCheckBox.IsChecked() ) aTokenFileName = maEditTokenName.GetText(); XMLSignatureHelper aSignatureHelper( comphelper::getProcessServiceFactory() ); bool bInit = aSignatureHelper.Init( aTokenFileName ); if ( !bInit ) { ErrorBox( this, WB_OK, String( RTL_CONSTASCII_USTRINGPARAM( "Error initializing security context!" ) ) ).Execute(); return 0; } aSignatureHelper.SetStartVerifySignatureHdl( LINK( this, MyWin, StartVerifySignatureHdl ) ); aSignatureHelper.StartMission(); SvFileStream* pStream = new SvFileStream( aSIGFileName, STREAM_READ ); pStream->Seek( STREAM_SEEK_TO_END ); sal_uLong nBytes = pStream->Tell(); pStream->Seek( STREAM_SEEK_TO_BEGIN ); SvLockBytesRef xLockBytes = new SvLockBytes( pStream, sal_True ); uno::Reference< io::XInputStream > xInputStream = new utl::OInputStreamHelper( xLockBytes, nBytes ); bool bDone = aSignatureHelper.ReadAndVerifySignature( xInputStream ); xInputStream->closeInput(); aSignatureHelper.EndMission(); if ( !bDone ) ErrorBox( this, WB_OK, String( RTL_CONSTASCII_USTRINGPARAM( "Error in Signature!" ) ) ).Execute(); else InfoBox( this, String( RTL_CONSTASCII_USTRINGPARAM( "Signatures verified without any problems!" ) ) ).Execute(); return 0; } IMPL_LINK( MyWin, StartVerifySignatureHdl, void*, EMPTYARG ) { QueryBox aQueryBox( this, WB_YES_NO|WB_DEF_YES, String( RTL_CONSTASCII_USTRINGPARAM( "Found Signature - Verify?" ) ) ); return ( aQueryBox.Execute() == RET_YES ) ? 1 : 0; } #endif // #ifdef TEST_IMPLEMENTATION_DIRECTLY
7,485
435
{ "copyright_text": "CC-BY-NC-SA 4.0", "description": "<NAME>\n\nhttps://2020.pycon.org.au/program/DM7UVV\n\nWe've all done it. Setting up a Slack to chat to our colleagues when the proper system doesn't cut it. Forwarding a document from work to our personal email address so we can read it on the device we want to. Building out experimental services on our personal AWS accounts because we couldn't get the permissions we needed on the company's systems.\r\n\r\nEvery organisation's infrastructure has its shadow, the unofficial system of servers, accounts, and hardware that crisscrosses and bypasses the sanctioned pathways. It is every security department's nightmare and every development team's open secret. From the newest graduate to the CEO, we all know at least some of these shortcuts.\r\n\r\nThis talk is a space for both confession and redemption: in it, we will delve into the psychology that leads to the development of shadow IT, the opportunities that can grow out of this corporate underground, and how to get these systems out of the shadows and into the light. Developers and security professionals alike will emerge from this talk with the tools they need to build the systems they actually want.\n\nProduced by NDV: https://youtube.com/channel/UCQ7dFBzZGlBvtU2hCecsBBg?sub_confirmation=1\n\nPython, PyCon, PyConAU, PyConline\n\nFri Sep 4 12:45:00 2020 at Python 2", "duration": 1321, "language": "eng", "recorded": "2020-09-05", "related_urls": [ { "label": "Conference schedule", "url": "https://2020.pycon.org.au/program/" }, { "label": "https://2020.pycon.org.au/program/DM7UVV", "url": "https://2020.pycon.org.au/program/DM7UVV" }, { "label": "https://youtube.com/channel/UCQ7dFBzZGlBvtU2hCecsBBg?sub_confirmation=1", "url": "https://youtube.com/channel/UCQ7dFBzZGlBvtU2hCecsBBg?sub_confirmation=1" } ], "speakers": [ "<NAME>" ], "tags": [ "LillyRyan", "PyCon", "PyConAU", "PyConline", "Python", "pyconau", "pyconau_2020" ], "thumbnail_url": "https://i.ytimg.com/vi/R0-VDnbst0M/hqdefault.jpg?sqp=-oaymwEcCNACELwBSFXyq4qpAw4IARUAAIhCGAFwAcABBg==&rs=AOn4CLCCyZk7oKty-jjoAulkeE_4C9b0Kg", "title": "What We Do in the Shadows", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=R0-VDnbst0M" } ] }
876
956
<filename>src/pal/linux/rte_atomic.h<gh_stars>100-1000 #ifndef RTE_ATOMIC_STUB_H #define RTE_ATOMIC_STUB_H /* <NAME> Cisco Systems, Inc. */ /* Copyright (c) 2015-2015 Cisco Systems, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* stubs for DPDK function for simulation */ static inline void rte_mb(void){ } static inline void rte_wmb(void){ } static inline void rte_rmb(void){ } static inline void rte_smp_mb(void){ } static inline void rte_smp_wmb(void){ } static inline void rte_smp_rmb(void){ } static inline void rte_io_mb(void){ } static inline void rte_io_wmb(void){ } static inline void rte_io_rmb(void){ } static inline void rte_compiler_barrier(){ } /*------------------------- 32 bit atomic operations -------------------------*/ typedef struct { volatile int32_t cnt; /**< An internal counter value. */ } rte_atomic32_t; #define RTE_ATOMIC32_INIT(val) { (val) } static inline void rte_atomic32_init(rte_atomic32_t *v) { v->cnt = 0; } static inline int32_t rte_atomic32_read(const rte_atomic32_t *v) { return v->cnt; } static inline void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value) { v->cnt = new_value; } static inline void rte_atomic32_add(rte_atomic32_t *v, int32_t inc) { v->cnt+=inc; } static inline void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec) { v->cnt-=dec; } static inline void rte_atomic32_inc(rte_atomic32_t *v){ rte_atomic32_add(v, 1); } static inline void rte_atomic32_dec(rte_atomic32_t *v){ rte_atomic32_sub(v, 1); } static inline int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc) { int32_t res=v->cnt; v->cnt+=inc; return(res); } #endif
842
647
<filename>include/trick/compat/sim_services/EchoJobs/include/EchoJobs.hh #include "trick/EchoJobs.hh"
43
459
/* * This file is part of choco-solver, http://choco-solver.org/ * * Copyright (c) 2021, IMT Atlantique. All rights reserved. * * Licensed under the BSD 4-clause license. * * See LICENSE file in the project root for full license information. */ package org.chocosolver.solver.constraints.extension.nary; import org.chocosolver.solver.variables.IntVar; /* * Created by IntelliJ IDEA. * User: hcambaza * Date: Jul 31, 2008 * Since : Choco 2.0.0 * */ public final class FastBooleanValidityChecker extends ValidityChecker { public FastBooleanValidityChecker(int arity, IntVar[] vars) { super(arity, vars); } // Is tuple valide ? public final boolean isValid(final int[] tuple) { for (int i = 0; i < arity; i++) { if (vars[sortedidx[i]].isInstantiated()) { if (vars[sortedidx[i]].getValue() != tuple[sortedidx[i]]) return false; } else break; // variable are sorted by domain size so only non instantiated variables remain // and non instantiated variables do not need to be checked in boolean ! } return true; } }
467
5,169
<gh_stars>1000+ { "name": "BLLSudokuImageLayout", "version": "0.1.0", "summary": "A UI component display images with 'sudoku' style like 'nice' app.", "description": "A UI component display images with the style like \"nice\" app. (类似【nice】app 首页列表中图片显示效果的 UI 组件】)", "homepage": "https://github.com/light-bo/BLLSudokuImageLayout", "license": "MIT", "authors": { "light_bo": "<EMAIL>" }, "platforms": { "ios": "7.0" }, "source": { "git": "https://github.com/light-bo/BLLSudokuImageLayout.git", "tag": "0.1.0" }, "source_files": "BLLSudokuImageLayoutDemo/BLLSudokuImageLayout/Layout/*", "exclude_files": "Classes/Exclude", "frameworks": "UIKit", "requires_arc": true }
316
2,958
<reponame>blanexie/blade package com.blade.ioc.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Automatic initInjection * * @author <a href="mailto:<EMAIL>" target="_blank">biezhi</a> * @since 1.5 */ @Target(ElementType.FIELD) @Retention(RetentionPolicy.RUNTIME) public @interface Inject { String value() default ""; }
160
1,056
<filename>java/j2ee.persistence/src/org/netbeans/modules/j2ee/persistence/jpqleditor/JPQLEditorController.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.j2ee.persistence.jpqleditor; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.SwingUtilities; import org.netbeans.api.db.explorer.ConnectionManager; import org.netbeans.api.db.explorer.DatabaseConnection; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.modules.j2ee.persistence.api.PersistenceEnvironment; import org.netbeans.modules.j2ee.persistence.dd.common.PersistenceUnit; import org.netbeans.modules.j2ee.persistence.editor.JPAEditorUtil; import org.netbeans.modules.j2ee.persistence.jpqleditor.ui.JPQLEditorTopComponent; import org.netbeans.modules.j2ee.persistence.provider.Provider; import org.netbeans.modules.j2ee.persistence.provider.ProviderUtil; import org.netbeans.modules.j2ee.persistence.wizard.Util; import org.openide.nodes.Node; import org.openide.util.Exceptions; import org.openide.util.Mutex; import org.openide.util.NbBundle; /** * JPQL Editor controller. Controls overall JPQL query execution. */ public class JPQLEditorController { private static final Logger logger = Logger.getLogger(JPQLEditorController.class.getName()); private JPQLEditorTopComponent editorTopComponent = null; private enum AnnotationAccessType { FIELD_TYPE, METHOD_TYPE; }; public void executeJPQLQuery(final String jpql, final PersistenceUnit pu, final PersistenceEnvironment pe, final int maxRowCount, final ProgressHandle ph) { final List<URL> localResourcesURLList = new ArrayList<URL>(); // final HashMap<String, String> props = new HashMap<String, String>(); final List<String> initialProblems = new ArrayList<String>(); //connection open final DatabaseConnection dbconn = JPAEditorUtil.findDatabaseConnection(pu, pe.getProject()); if (dbconn != null) { if (dbconn.getJDBCConnection() == null) { Mutex.EVENT.readAccess(new Mutex.Action<DatabaseConnection>() { @Override public DatabaseConnection run() { ConnectionManager.getDefault().showConnectionDialog(dbconn); return dbconn; } }); } } // final boolean containerManaged = Util.isSupportedJavaEEVersion(pe.getProject()); final Provider provider = ProviderUtil.getProvider(pu.getProvider(), pe.getProject()); if (containerManaged && provider!=null) { Utils.substitutePersistenceProperties(pe, pu, dbconn, props); } final ClassLoader defClassLoader = Thread.currentThread().getContextClassLoader(); try { ph.progress(10); ph.setDisplayName(NbBundle.getMessage(JPQLEditorTopComponent.class, "queryExecutionPrepare")); // Construct custom classpath here. initialProblems.addAll(Utils.collectClassPathURLs(pe, pu, dbconn, localResourcesURLList)); ClassLoader customClassLoader = pe.getProjectClassLoader( localResourcesURLList.toArray(new URL[]{})); Thread.currentThread().setContextClassLoader(customClassLoader); Thread t = new Thread() { @Override public void run() { ClassLoader customClassLoader = Thread.currentThread().getContextClassLoader(); JPQLResult jpqlResult = new JPQLResult(); if (initialProblems.isEmpty()) { JPQLExecutor queryExecutor = new JPQLExecutor(); try { // Parse POJOs from JPQL // Check and if required compile POJO files mentioned in JPQL ph.progress(50); ph.setDisplayName(NbBundle.getMessage(JPQLEditorTopComponent.class, "queryExecutionPassControlToProvider")); jpqlResult = queryExecutor.execute(jpql, pu, pe, props, provider, maxRowCount, ph, true); ph.progress(80); ph.setDisplayName(NbBundle.getMessage(JPQLEditorTopComponent.class, "queryExecutionProcessResults")); } catch (Exception e) { logger.log(Level.INFO, "Problem in executing JPQL", e); jpqlResult.getExceptions().add(e); } } else { StringBuilder sb = new StringBuilder(); for (String txt : initialProblems) { sb.append(txt).append("\n"); } jpqlResult.setQueryProblems(sb.toString()); jpqlResult.getExceptions().add(new Exception(sb.toString())); } final JPQLResult jpqlResult0 = jpqlResult; final ClassLoader customClassLoader0 = customClassLoader; SwingUtilities.invokeLater(new Runnable() { @Override public void run() { editorTopComponent.setResult(jpqlResult0, customClassLoader0); } }); Thread.currentThread().setContextClassLoader(defClassLoader); } }; t.setContextClassLoader(customClassLoader); t.start(); } catch (Exception ex) { Exceptions.printStackTrace(ex); } finally { Thread.currentThread().setContextClassLoader(defClassLoader); } } public void init(Node[] activatedNodes) { editorTopComponent = new JPQLEditorTopComponent(this); editorTopComponent.open(); editorTopComponent.requestActive(); editorTopComponent.setFocusToEditor(); editorTopComponent.fillPersistenceConfigurations(activatedNodes); } }
3,055
302
<reponame>Yalantis/Watchface-Constructor<filename>mobile/src/main/java/com/yalantis/watchface/task/SendToDataLayerThread.java<gh_stars>100-1000 package com.yalantis.watchface.task; import android.graphics.Bitmap; import android.util.Log; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.wearable.Node; import com.google.android.gms.wearable.NodeApi; import com.google.android.gms.wearable.Wearable; import java.io.ByteArrayOutputStream; /** * @author andrewkhristyan on 10/16/15. */ public class SendToDataLayerThread extends Thread { private static final int MAX_SIZE = 2000000; private String path; private Bitmap bitmap; private GoogleApiClient mGoogleApiClient; private DataLayerListener mDataLayerListener; public SendToDataLayerThread(String path, Bitmap bitmap, GoogleApiClient googleApiClient, DataLayerListener dataLayerListener) { this.path = path; this.bitmap = bitmap; mGoogleApiClient = googleApiClient; mDataLayerListener = dataLayerListener; } public void run() { NodeApi.GetConnectedNodesResult nodes = Wearable.NodeApi.getConnectedNodes(mGoogleApiClient).await(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); String message = path + " was sent successfully"; if (bitmap.getByteCount() < MAX_SIZE) { for (Node node : nodes.getNodes()) { Wearable.MessageApi.sendMessage(mGoogleApiClient, node.getId(), path, stream.toByteArray()).await(); } } else { message = "Big image file, try to use another"; } mDataLayerListener.onSuccess(message); } public interface DataLayerListener { void onSuccess(String message); } }
696
352
<reponame>Troublor/crawljax package com.crawljax.plugins.testcasegenerator.visualdiff.pageobjects; import org.opencv.core.Mat; public interface IPageObjectFactory { PageObject makePageObject(Mat image, int x, int y, int width, int height); }
83
560
<reponame>kento-forest/heamy # coding:utf-8 import random import numpy as np import pytest from sklearn.model_selection import train_test_split from sklearn.datasets import load_boston from sklearn.linear_model import LinearRegression, LogisticRegression from heamy.dataset import Dataset from heamy.estimator import Regressor, Classifier np.random.seed(1000) random.seed(1111) X_train = np.random.rand(10, 100) X_test = np.random.rand(10, 100) y_train = np.random.rand(10, 1) y_test = np.random.rand(10, 1) class RealDataset(Dataset): @staticmethod def preprocess(): data = load_boston() X, y = data['data'], data['target'] X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=111) return {'X_train': X_train, 'y_train': y_train, 'X_test': X_test, 'y_test': y_test} class CustomDataset(Dataset): @staticmethod def preprocess(): return X_train, y_train, X_test, y_test class CustomEstimator(Classifier): @staticmethod def estimator(X_train, y_train, X_test, y_test=None): return np.zeros((2, X_test.shape[0])) # return np.zeros(X_test.shape[0]) def func_estimator(X_train, y_train, X_test, y_test): return np.zeros(X_test.shape[0]) def random_param(): return random.randint(1, 100) model_func = Regressor(estimator=func_estimator, dataset=CustomDataset) model_cls = CustomEstimator(dataset=CustomDataset()) model_param = Regressor(estimator=LinearRegression, parameters={'random_param': random_param}, dataset=CustomDataset) model_param2 = Classifier(estimator=LogisticRegression, parameters={'colsample_bylevel': 0.9}, dataset=CustomDataset) def test_hashing(): assert str(model_func) == 'func_estimator(54743c7a5484d1bf2a64ac1d7b68f8cc)' assert str(model_cls) == 'CustomEstimator(95738761045c1f666bbe10e6c7eefc6c)' assert str(model_param) == 'LinearRegression(2e789a766f6dc2457fb6a63452ad2859)' assert str(model_param2) == 'LogisticRegression(74efb248db47d168aed2fc37c0016e6f)' assert model_param2.hash == '74efb248db47d168aed2fc37c0016e6f' e_hash = CustomEstimator(dataset=CustomDataset()).hash assert e_hash == CustomEstimator(dataset=CustomDataset()).hash def test_custom_estimators(): def test_estimator(): return class TestEstimator2(Classifier): def estimator(self): return with pytest.raises(ValueError): TestEstimator2(dataset=CustomDataset) with pytest.raises(ValueError): Regressor(estimator=test_estimator, dataset=CustomDataset) def test_validation(): model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) model.validate(k=10) # Retrieve cached object y_true, y_pred = model.validate(k=10) assert len(y_true) == len(y_pred) model.validate(k=1) # Retrieve cached object y_true, y_pred = model.validate(k=1) assert len(y_true) == len(y_pred) assert len(y_true) == 1 def test_prediction(): model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) output = model.predict() assert len(output.shape) == 1 assert model.dataset.X_test.shape[0] == output.shape[0] # Retrieve cached object output = model.predict() assert len(output.shape) == 1 assert model.dataset.X_test.shape[0] == output.shape[0] def test_stacking(): model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) ds = model.stack(10) assert ds.X_train.shape[0] == model.dataset.X_train.shape[0] assert ds.X_test.shape[0] == model.dataset.X_test.shape[0] assert ds.y_train.shape[0] == model.dataset.y_train.shape[0] model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) ds = model.stack(10, full_test=False) assert np.isnan(ds.X_train).sum() == 0 assert ds.X_train.shape[0] == model.dataset.X_train.shape[0] assert ds.X_test.shape[0] == model.dataset.X_test.shape[0] assert ds.y_train.shape[0] == model.dataset.y_train.shape[0] model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) model.dataset.load() ds = model.stack(10, full_test=False) # Check cache assert np.isnan(ds.X_train).sum() == 0 assert ds.X_train.shape[0] == model.dataset.X_train.shape[0] assert ds.X_test.shape[0] == model.dataset.X_test.shape[0] assert ds.y_train.shape[0] == model.dataset.y_train.shape[0] def test_blending(): model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset) _, _, X_t, y_t = model.dataset.split(test_size=0.2) ds = model.blend(proportion=0.2) assert ds.X_test.shape[0] == model.dataset.X_test.shape[0] assert ds.X_train.shape[0] == X_t.shape[0] # Check cache ds = model.blend(proportion=0.2) assert ds.X_test.shape[0] == model.dataset.X_test.shape[0] assert ds.X_train.shape[0] == X_t.shape[0]
2,089
498
<filename>NonBaseClass-MVVM-ReactiveObjc-master/FXXKBaseMVVM/NonBase/ViewModel/FKViewModelIntercepter.h // // FKViewModelIntercepter.h // FXXKBaseMVVM // // Created by 梁宪松 on 2017/12/10. // Copyright © 2017年 madao. All rights reserved. // #import <Foundation/Foundation.h> @interface FKViewModelIntercepter : NSObject @end
135
14,668
<reponame>chromium/chromium // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_APPS_APP_SERVICE_APP_SHORTCUT_ITEM_H_ #define CHROME_BROWSER_APPS_APP_SERVICE_APP_SHORTCUT_ITEM_H_ #include <string> #include <vector> #include "ash/components/arc/mojom/app.mojom.h" #include "ui/gfx/image/image_skia.h" namespace apps { // Describes app shortcut that is published by Android's ShortcutManager. struct AppShortcutItem { AppShortcutItem(); AppShortcutItem(const AppShortcutItem& item); ~AppShortcutItem(); // The ID of this shortcut. Unique within each publisher app and stable across // devices. std::string shortcut_id; // The short description of this shortcut. std::string short_label; // The icon for this shortcut. gfx::ImageSkia icon; // The category type of this shortcut. arc::mojom::AppShortcutItemType type = arc::mojom::AppShortcutItemType::kStatic; // "Rank" of a shortcut, which is a non-negative, sequential value. int rank = 0; }; using AppShortcutItems = std::vector<AppShortcutItem>; } // namespace apps #endif // CHROME_BROWSER_APPS_APP_SERVICE_APP_SHORTCUT_ITEM_H_
432
743
NAN_INT = 7535805 MIN_EMBEDDING = 4 EMBEDDING_SUFFIX = '_emb'
34
348
{"nom":"Licq-Athérey","circ":"4ème circonscription","dpt":"Pyrénées-Atlantiques","inscrits":193,"abs":72,"votants":121,"blancs":8,"nuls":4,"exp":109,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":56},{"nuance":"DVD","nom":"<NAME>","voix":53}]}
101
1,600
<filename>scripts/addons/RetopoFlow/addon_common/common/ui_linefitter.py ''' Copyright (C) 2021 CG Cookie http://cgcookie.com <EMAIL> Created by <NAME>, <NAME> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import os import re import sys import math import time import random import asyncio import inspect import traceback import contextlib from math import floor, ceil from inspect import signature from itertools import dropwhile, zip_longest from concurrent.futures import ThreadPoolExecutor from .ui_utilities import UI_Element_Utils from .ui_settings import DEBUG_COLOR_CLEAN, DEBUG_PROPERTY, DEBUG_COLOR, DEBUG_DIRTY, DEBUG_LIST, CACHE_METHOD, ASYNC_IMAGE_LOADING import bpy import bgl import blf import gpu from .blender import tag_redraw_all from .ui_styling import UI_Styling, ui_defaultstylings from .ui_utilities import helper_wraptext, convert_token_to_cursor from .drawing import ScissorStack, FrameBuffer from .fsm import FSM from .useractions import ActionHandler from .boundvar import BoundVar from .debug import debugger, dprint, tprint from .decorators import debug_test_call, blender_version_wrapper, add_cache from .drawing import Drawing from .fontmanager import FontManager from .globals import Globals from .hasher import Hasher from .maths import Vec2D, Color, mid, Box2D, Size1D, Size2D, Point2D, RelPoint2D, Index2D, clamp, NumberUnit from .maths import floor_if_finite, ceil_if_finite from .profiler import profiler, time_it from .utils import iter_head, any_args, join, abspath class LineFitter: def __init__(self, *, left, top, width, height): self.box = Box2D(left=left, top=top, width=width, height=height) self.max_width = 0 self.sum_height = 0 self.lines = [] self.current_line = None self.new_line() def new_line(self): # width: sum of all widths added to current line # height: max of all heights added to current line if not self.is_current_line_empty(): self.max_width = max(self.max_width, self.current_width) self.sum_height = self.sum_height + self.current_height self.lines.append(self.current.elements) self.current_line = [] self.current_width = 0 self.current_height = 0 def is_current_line_empty(self): return not self.current_line @property def remaining_width(self): return self.box.width - self.current_width @property def remaining_height(self): return self.box.height - self.sum_height def get_next_box(self): return Box2D( left = self.box.left + self.current_width, top = -(self.box.top + self.sum_height), width = self.box.width - self.current_width, height = self.box.height - self.sum_height, ) def add_element(self, element, size): # assuming element is placed in correct spot in line if not self.fit(size): self.new_line() pos = Box2D( left = self.box.left + self.current_width, top = -(self.box.top + self.sum_height), width = size.smallest_width(), height = size.smallest_height(), ) self.current_line.append(element) self.current_width += size.smallest_width() self.current_height = max(self.current_height, size.smallest_height()) return pos def fit(self, size): if size.smallest_width() > self.remaining_width: return False if size.smallest_height() > self.remaining_height: return False return True class TableFitter: def __init__(self): self._cells = {} # keys are Index2D self._index = Index2D(0, 0) def new_row(self): self._index.update(i=0, j_off=1) def new_col(self): pass
1,671
1,498
<filename>src/main/java/com/spotify/docker/client/messages/LogConfig.java /*- * -\-\- * docker-client * -- * Copyright (C) 2016 Spotify AB * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package com.spotify.docker.client.messages; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableMap; import java.util.Map; import javax.annotation.Nullable; @AutoValue @JsonAutoDetect(fieldVisibility = ANY, getterVisibility = NONE, setterVisibility = NONE) public abstract class LogConfig { @JsonProperty("Type") public abstract String logType(); @Nullable @JsonProperty("Config") public abstract ImmutableMap<String, String> logOptions(); public static LogConfig create(final String logType) { return new AutoValue_LogConfig(logType, ImmutableMap.<String, String>builder().build()); } @JsonCreator public static LogConfig create( @JsonProperty("Type") final String logType, @JsonProperty("Config") final Map<String, String> logOptions) { final ImmutableMap<String, String> logOptionsCopy = logOptions == null ? null : ImmutableMap.copyOf(logOptions); return new AutoValue_LogConfig(logType, logOptionsCopy); } }
678
1,850
package com.alibaba.jvm.sandbox.repeater.plugin.redis; import com.alibaba.jvm.sandbox.api.event.Event; import com.alibaba.jvm.sandbox.repeater.plugin.api.InvocationProcessor; import com.alibaba.jvm.sandbox.repeater.plugin.core.impl.AbstractInvokePluginAdapter; import com.alibaba.jvm.sandbox.repeater.plugin.core.model.EnhanceModel; import com.alibaba.jvm.sandbox.repeater.plugin.domain.InvokeType; import com.alibaba.jvm.sandbox.repeater.plugin.spi.InvokePlugin; import com.google.common.collect.Lists; import org.kohsuke.MetaInfServices; import java.util.*; /** * {@link RedisPlugin} jedis的java插件 * <p> * 拦截{@code redis.clients.jedis.commands}包下面的commands实现类 * * 获取redis常用操作指令,不包括所有命令 * 详见Jedis类、BinaryJedis类的实现接口 * </p> * * @author zhaoyb1990 */ @MetaInfServices(InvokePlugin.class) public class RedisPlugin extends AbstractInvokePluginAdapter { @Override protected List<EnhanceModel> getEnhanceModels() { EnhanceModel jedis = EnhanceModel.builder() .classPattern("redis.clients.jedis.Jedis") .methodPatterns(EnhanceModel.MethodPattern.transform( "zcount", "sunionstore", "zunionstore", "del", "echo", "hscan", "zinterstore", "psubscribe", "type", "sinterstore", "xrevrange", "setex", "xadd", "zlexcount", "brpoplpush", "bitcount", "llen", "zscan", "lpushx", "bitpos", "setnx", "xack", "hvals", "evalsha", "substr", "randomKey", "geodist", "zrangeByLex", "geoadd", "expire", "bitop", "zrangeByScore", "smove", "lset", "decrBy", "pttl", "scan", "zrank", "xtrim", "blpop", "zremrangeByLex", "rpoplpush", "get", "lpop", "persist", "georadius", "scriptExists", "set", "srandmember", "incr", "setbit", "hexists", "expireAt", "pexpire", "zcard", "bitfield", "zrevrangeByLex", "sinter", "srem", "getrange", "rename", "watch", "zrevrank", "exists", "setrange", "zremrangeByRank", "sadd", "sdiff", "zrevrange", "unwatch", "getbit", "scard", "sdiffstore", "zrevrangeByScore", "zincrby", "rpushx", "psetex", "strlen", "zrevrangeWithScores", "hdel", "zremrangeByScore", "geohash", "xgroupDestroy", "brpop", "lrem", "hlen", "decr", "scriptLoad", "lpush", "lindex", "zrange", "incrBy", "getSet", "xlen", "ltrim", "georadiusReadonly", "touch", "incrByFloat", "rpop", "sort", "xdel", "zrevrangeByScoreWithScores", "xreadGroup", "xclaim", "pfadd", "eval", "linsert", "pfcount", "hkeys", "hsetnx", "hincrBy", "xpending", "hgetAll", "xgroupSetID", "georadiusByMemberReadonly", "keys", "restoreReplace", "hset", "spop", "zrangeWithScores", "hincrByFloat", "hmset", "renamenx", "zrem", "dump", "msetnx", "hmget", "sunion", "hget", "xread", "zadd", "move", "restore", "geopos", "subscribe", "mset", "zrangeByScoreWithScores", "zscore", "pexpireAt", "georadiusByMember", "ttl", "lrange", "hstrlen", "smembers", "xgroupCreate", "unlink", "pfmerge", "rpush", "publish", "sscan", "mget", "xrange", "append", "sismember", "xgroupDelConsumer", "sismember" )) .watchTypes(Event.Type.BEFORE, Event.Type.RETURN, Event.Type.THROWS) .build(); EnhanceModel binaryJedis = EnhanceModel.builder() .classPattern("redis.clients.jedis.BinaryJedis") .methodPatterns(EnhanceModel.MethodPattern.transform( "zcount", "sunionstore", "scriptKill", "zunionstore", "del", "echo", "hscan", "zinterstore", "psubscribe", "type", "sinterstore", "xrevrange", "setex", "xadd", "zlexcount", "brpoplpush", "bitcount", "llen", "zscan", "lpushx", "setnx", "xack", "hvals", "evalsha", "substr", "geodist", "zrangeByLex", "geoadd", "expire", "bitop", "zrangeByScore", "smove", "lset", "decrBy", "pttl", "scriptFlush", "zrank", "xtrim", "blpop", "zremrangeByLex", "rpoplpush", "get", "lpop", "persist", "georadius", "scriptExists", "set", "srandmember", "incr", "setbit", "hexists", "expireAt", "pexpire", "zcard", "bitfield", "zrevrangeByLex", "sinter", "srem", "getrange", "rename", "watch", "zrevrank", "exists", "setrange", "zremrangeByRank", "sadd", "sdiff", "zrevrange", "unwatch", "getbit", "scard", "sdiffstore", "zrevrangeByScore", "zincrby", "rpushx", "psetex", "strlen", "zrevrangeWithScores", "hdel", "zremrangeByScore", "geohash", "xgroupDestroy", "brpop", "lrem", "hlen", "decr", "scriptLoad", "lpush", "lindex", "zrange", "incrBy", "getSet", "xlen", "ltrim", "georadiusReadonly", "touch", "incrByFloat", "rpop", "sort", "xdel", "zrevrangeByScoreWithScores", "xreadGroup", "xclaim", "pfadd", "eval", "linsert", "pfcount", "hkeys", "hsetnx", "hincrBy", "xpending", "hgetAll", "xgroupSetID", "georadiusByMemberReadonly", "keys", "restoreReplace", "hset", "spop", "randomBinaryKey", "zrangeWithScores", "hincrByFloat", "hmset", "renamenx", "zrem", "dump", "msetnx", "hmget", "sunion", "hget", "xread", "zadd", "move", "restore", "geopos", "subscribe", "mset", "zrangeByScoreWithScores", "zscore", "pexpireAt", "georadiusByMember", "ttl", "lrange", "hstrlen", "smembers", "xgroupCreate", "unlink", "pfmerge", "rpush", "publish", "sscan", "mget", "xrange", "append", "sismember", "xgroupDelConsumer", "sismember" )) .watchTypes(Event.Type.BEFORE, Event.Type.RETURN, Event.Type.THROWS) .build(); return Lists.newArrayList(jedis, binaryJedis); } @Override protected InvocationProcessor getInvocationProcessor() { return new RedisProcessor(getType()); } @Override public InvokeType getType() { return InvokeType.REDIS; } @Override public String identity() { return "redis"; } @Override public boolean isEntrance() { return false; } }
9,865
9,852
{ "symbol-whitelist" : [ "Symfony\\Component\\EventDispatcher\\Event", "Symfony\\Contracts\\EventDispatcher\\Event", "Symfony\\Contracts\\EventDispatcher\\EventDispatcherInterface", "null", "true", "false", "static", "self", "parent", "array", "string", "int", "float", "bool", "iterable", "callable", "void", "T_AMPERSAND_NOT_FOLLOWED_BY_VAR_OR_VARARG", "T_ATTRIBUTE", "T_COALESCE_EQUAL", "T_ENUM", "T_FN", "T_MATCH", "T_NAME_FULLY_QUALIFIED", "T_NAME_QUALIFIED", "T_NAME_RELATIVE", "T_NULLSAFE_OBJECT_OPERATOR", "T_READONLY" ], "php-core-extensions" : [ "dom", "mbstring", "Phar", "Core", "date", "pcre", "Reflection", "SPL", "standard" ] }
430
1,185
<filename>userspace/libraries/libc/src/functions/arch/crti.c typedef void (*func_ptr)(void); extern func_ptr __init_array_start[0], __init_array_end[0]; extern func_ptr __fini_array_start[0], __fini_array_end[0]; void _init(void) { for (func_ptr* func = __init_array_start; func != __init_array_end; func++) (*func)(); } void _fini(void) { for (func_ptr* func = __fini_array_start; func != __fini_array_end; func++) (*func)(); }
196
1,072
[ {"tracker": "https://google-analytics.com/ga.js", "site": "reddit.com", "req": {"type": "script"}, "result": {"action": "redirect", "firstParty": false, "reason": "matched rule - surrogate"}}, {"tracker": "https://google-analytics.com/ga.js", "site": "google.com", "req": {"type": "script"}, "result": {"action": "ignore", "firstParty": true, "reason": "first party"}}, {"tracker": "https://yahoo.com/", "site": "facebook.com", "req": {"type": "image"}, "result": {"action": "block", "firstParty": false, "reason": "default block"}} ]
187
599
<reponame>laodiu/bk-bcs<gh_stars>100-1000 # -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 平台功能开关 """ import logging from django.conf import settings from backend.container_service.projects.models import FunctionController logger = logging.getLogger(__name__) def get_func_controller(func_code): # 直接开启的功能开关,不需要在db中配置 if func_code in settings.DIRECT_ON_FUNC_CODE: return True, [] try: ref = FunctionController.objects.filter(func_code=func_code).first() if not ref: return (False, []) if ref.wlist: wlist = [i.strip() for i in ref.wlist.split(';')] else: wlist = [] return (ref.enabled, wlist) except Exception: logger.exception("get_func_controller error") return (False, [])
568
4,339
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.query.h2; import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Database schema object. */ public class H2Schema { /** */ private final String schemaName; /** */ private final ConcurrentMap<String, H2TableDescriptor> tbls = new ConcurrentHashMap<>(); /** */ private final ConcurrentMap<H2TypeKey, H2TableDescriptor> typeToTbl = new ConcurrentHashMap<>(); /** Whether schema is predefined and cannot be dorpped. */ private final boolean predefined; /** Usage count. */ private int usageCnt; /** * Constructor. * * @param schemaName Schema name. * @param predefined Predefined flag. */ public H2Schema(String schemaName, boolean predefined) { this.schemaName = schemaName; this.predefined = predefined; } /** * @return Schema name. */ public String schemaName() { return schemaName; } /** * Increments counter for number of caches having this schema. */ public void incrementUsageCount() { if (!predefined) ++usageCnt; } /** * Increments counter for number of caches having this schema. * * @return If schema is no longer used. */ public boolean decrementUsageCount() { return !predefined && --usageCnt == 0; } /** * @return Tables. */ public Collection<H2TableDescriptor> tables() { return tbls.values(); } /** * @param tblName Table name. * @return Table. */ public H2TableDescriptor tableByName(String tblName) { return tbls.get(tblName); } /** * @param typeName Type name. * @return Table. */ public H2TableDescriptor tableByTypeName(String cacheName, String typeName) { return typeToTbl.get(new H2TypeKey(cacheName, typeName)); } /** * @param tbl Table descriptor. */ public void add(H2TableDescriptor tbl) { if (tbls.putIfAbsent(tbl.tableName(), tbl) != null) throw new IllegalStateException("Table already registered: " + tbl.fullTableName()); if (typeToTbl.putIfAbsent(new H2TypeKey(tbl.cacheName(), tbl.typeName()), tbl) != null) throw new IllegalStateException("Table already registered: " + tbl.fullTableName()); } /** * Drop table. * * @param tbl Table to be removed. */ public void drop(H2TableDescriptor tbl) { tbl.onDrop(); tbls.remove(tbl.tableName()); typeToTbl.remove(new H2TypeKey(tbl.cacheName(), tbl.typeName())); } /** * @return {@code True} if schema is predefined. */ public boolean predefined() { return predefined; } }
1,344
563
# python/example3.py -- running NanoGUI in detached mode # (contributed by <NAME>) # # NanoGUI was developed by <NAME> <<EMAIL>>. # The widget drawing code is based on the NanoVG demo application # by <NAME>. # # All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE.txt file. import nanogui from nanogui import * import time class TestApp(Screen): def __init__(self): super(TestApp, self).__init__((190, 170), "NanoGUI Test") window = Window(self, "Detached mode") window.setPosition((15, 15)) window.setLayout(GroupLayout()) Label(window, "Push buttons", "sans-bold") b = Button(window, "Plain button") def cb(): print("pushed!") b.setCallback(cb) b = Button(window, "Quit") def cb2(): self.setVisible(False) b.setCallback(cb2) self.performLayout() if __name__ == "__main__": nanogui.init() test = TestApp() test.drawAll() test.setVisible(True) print("Launching detached mainloop") h = nanogui.mainloop(detach=test) print("Back in Python context") for i in range(10): print(i) time.sleep(1) if not nanogui.active(): break h.join() nanogui.shutdown()
558
361
<filename>galaxies/cert-eu-govsector.json { "description": "Cert EU GovSector", "icon": "globe", "name": "Cert EU GovSector", "namespace": "misp", "type": "cert-eu-govsector", "uuid": "68858a48-b898-11e7-91ce-bf424ef9b662", "version": 2 }
111
1,806
<filename>apollo-environment/src/test/java/com/spotify/apollo/environment/ConfigUtilTest.java /* * -\-\- * Spotify Apollo API Environment * -- * Copyright (C) 2013 - 2015 Spotify AB * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package com.spotify.apollo.environment; import com.google.common.collect.ImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.junit.Test; import java.util.Optional; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; public class ConfigUtilTest { @Test public void optionalOrShouldReturnFirstIfPresent() throws Exception { assertThat(ConfigUtil.either(Optional.of("hi"), Optional.of("there")), is(Optional.of("hi"))); } @Test public void optionalOrShouldReturnAlternativeIfFirstMissing() throws Exception { assertThat(ConfigUtil.either(Optional.empty(), Optional.of("there")), is(Optional.of("there"))); } @Test public void shouldReturnValueForAvailableString() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", "ho")); assertThat(ConfigUtil.optionalString(config, "hey"), is(Optional.of("ho"))); } @Test public void shouldReturnEmptyForMissingString() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", "ho")); assertThat(ConfigUtil.optionalString(config, "ho"), is(Optional.empty())); } @Test public void shouldReturnValueForAvailableBoolean() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", true)); assertThat(ConfigUtil.optionalBoolean(config, "hey"), is(Optional.of(true))); } @Test public void shouldReturnEmptyForMissingBoolean() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", false)); assertThat(ConfigUtil.optionalBoolean(config, "ho"), is(Optional.empty())); } @Test public void shouldReturnValueForAvailableInt() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", 345)); assertThat(ConfigUtil.optionalInt(config, "hey"), is(Optional.of(345))); } @Test public void shouldReturnEmptyForMissingInt() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", 99)); assertThat(ConfigUtil.optionalInt(config, "ho"), is(Optional.empty())); } @Test public void shouldReturnValueForAvailableDouble() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", 345.1)); assertThat(ConfigUtil.optionalDouble(config, "hey"), is(Optional.of(345.1))); } @Test public void shouldReturnEmptyForMissingDouble() throws Exception { final Config config = ConfigFactory.parseMap(ImmutableMap.of("hey", 99.0)); assertThat(ConfigUtil.optionalDouble(config, "ho"), is(Optional.empty())); } }
1,025
336
<reponame>poolqf/PQFCustomLoaders // // PQFBallDrop.h // PQFCustomLoadersDemo // // Created by <NAME> on 6/3/15. // Copyright (c) 2015 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> #import "PQFLoader.h" IB_DESIGNABLE @interface PQFBallDrop : PQFLoader /** Text label of the Loader. Hidden if text is nil */ @property (nonatomic, strong) IBInspectable UILabel *label; /** Corner radius of the Loader background */ @property (nonatomic, assign) IBInspectable CGFloat cornerRadius; /** Color of the Loader */ @property (nonatomic, strong) IBInspectable UIColor *loaderColor; /** Alpha of the loader */ @property (nonatomic, assign) IBInspectable CGFloat loaderAlpha; /** Duration of each animation */ @property (nonatomic, assign) IBInspectable CGFloat duration; /** Size of the label text */ @property (nonatomic, assign) IBInspectable CGFloat fontSize; /** Maximum diameter of the circles */ @property (nonatomic, assign) IBInspectable CGFloat maxDiam; /** Delay between the animations */ @property (nonatomic, assign) IBInspectable CGFloat delay; /** Ball added size when droping */ @property (nonatomic, assign) IBInspectable CGFloat amountZoom; /** Alpha of the hole view */ @property (nonatomic, assign) IBInspectable CGFloat alpha; @end
409
318
<reponame>kangzai228/learning-power #!/usr/bin/env python3 # -*- coding:utf-8 -*- # @Author : lisztomania # @Date : 2021/1/28 # @Software : Pycharm # @Version : Python 3.8.5 # @File : Url_Test.py # @Function : 链接延迟测试 import time import ssl from socket import timeout from urllib import request from urllib.error import URLError from inside.Template.Meta_Singleton import SINGLETON __all__ = ['URL_TEST', 'Url_Test'] class URL_TEST(metaclass=SINGLETON): """url延迟测试类""" ssl._create_default_https_context = ssl._create_unverified_context @classmethod def Url_Test(cls, url: str) -> float: """ Url_Test(url: str) -> float 测试url访问延迟 :param url: url :return: float """ try: temp = 0 for _ in range(3): s = time.time() try: request.urlopen(url=url, timeout=3) except (timeout, URLError): pass temp += time.time() - s return temp / 3 except (ValueError, AttributeError): raise Exception(f"{url}:不是正确的url链接") _inst = URL_TEST Url_Test = _inst.Url_Test
632
548
from django.db import models from django.db.models import permalink from django.conf import settings from basic.people.models import Person class Genre(models.Model): """Genre model""" title = models.CharField(max_length=100) slug = models.SlugField(unique=True) class Meta: db_table = 'movie_genres' ordering = ('title',) def __unicode__(self): return '%s' % self.title @permalink def get_absolute_url(self): return ('movie_genre_detail', None, { 'slug': self.slug }) class Studio(models.Model): """Studio model""" title = models.CharField(max_length=100) prefix = models.CharField(max_length=20, blank=True) slug = models.SlugField(unique=True) website = models.URLField(blank=True) class Meta: db_table = 'movie_studios' ordering = ('title',) def __unicode__(self): return '%s' % self.full_title @property def full_title(self): return '%s %s' % (self.prefix, self.title) @permalink def get_absolute_url(self): return ('movie_studio_detail', None, { 'slug': self.slug }) class Movie(models.Model): """Movie model""" title = models.CharField(max_length=255) prefix = models.CharField(max_length=20, blank=True) subtitle = models.CharField(blank=True, max_length=255) slug = models.SlugField(unique=True) directors = models.ManyToManyField(Person, limit_choices_to={'person_types__slug__exact': 'director'}, blank=True) studio = models.ForeignKey(Studio, blank=True, null=True) released = models.DateField(blank=True, null=True) asin = models.CharField(blank=True, max_length=100) cover = models.FileField(upload_to='films', blank=True) review = models.TextField(blank=True) genre = models.ManyToManyField(Genre, blank=True) class Meta: db_table = 'movies' ordering = ('title',) def __unicode__(self): return '%s' % self.full_title @property def full_title(self): return '%s %s' % (self.prefix, self.title) @permalink def get_absolute_url(self): return ('movie_detail', None, { 'slug': self.slug }) @property def amazon_url(self): try: return 'http://www.amazon.com/dp/%s/?%s' % (self.asin, settings.AMAZON_AFFILIATE_EXTENTION) except: return 'http://www.amazon.com/dp/%s/' % self.asin @property def cover_url(self): return '%s%s' % (settings.MEDIA_URL, self.cover)
1,024
561
/// Copyright 2016 Pinterest Inc. /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. // // @author bol (<EMAIL>) // #include "examples/counter_service/counter_handler.h" #include <string> #include "examples/counter_service/stats_enum.h" #include "common/stats/stats.h" #include "common/timer.h" namespace counter { std::shared_ptr<::admin::ApplicationDB> CounterHandler::getDB( const std::string& db_name, CounterException* ex) { ::admin::AdminException e; auto db = ::admin::AdminHandler::getDB(db_name, &e); if (ex && db == nullptr) { ex->code = ErrorCode::DB_NOT_FOUND; ex->msg = std::move(e.message); } return db; } void CounterHandler::async_tm_getCounter( std::unique_ptr<apache::thrift::HandlerCallback< std::unique_ptr<::counter::GetResponse>>> callback, std::unique_ptr<::counter::GetRequest> request) { common::Stats::get()->Incr(kApiGetCounter); common::Timer timer(kApiGetCounterMs); CounterException ex; if (request->need_routing) { request->need_routing = false; std::vector<std::shared_ptr<CounterAsyncClient>> clients; router_->GetClientsFor(request->segment, request->counter_name, true /* for_read */, &clients); if (clients.empty()) { ex.code = ErrorCode::SERVER_NOT_FOUND; ex.msg = "Server not found for getting: " + request->counter_name; callback.release()->exceptionInThread(std::move(ex)); return; } clients[0]->future_getCounter(*request).then( [ callback = std::move(callback) ] (folly::Try<::counter::GetResponse>&& t) mutable { if (t.hasException()) { callback.release()->exceptionInThread(t.exception()); } else { callback.release()->resultInThread(std::move(t.value())); } }); return; } auto db_name = router_->GetDBName(request->segment, request->counter_name); auto db = getDB(db_name, &ex); if (db == nullptr) { callback.release()->exceptionInThread(std::move(ex)); return; } std::string value; auto status = db->Get(read_options_, request->counter_name, &value); if (!status.ok()) { ex.code = ErrorCode::ROCKSDB_ERROR; ex.msg = status.ToString(); callback.release()->exceptionInThread(std::move(ex)); return; } if (value.size() != sizeof(int64_t)) { ex.code = ErrorCode::CORRUPTED_DATA; ex.msg = "Corrupted data found"; callback.release()->exceptionInThread(std::move(ex)); return; } GetResponse res; memcpy(&res.counter_value, value.c_str(), value.size()); callback.release()->resultInThread(res); } void CounterHandler::async_tm_setCounter( std::unique_ptr<apache::thrift::HandlerCallback< std::unique_ptr<::counter::SetResponse>>> callback, std::unique_ptr<::counter::SetRequest> request) { common::Stats::get()->Incr(kApiSetCounter); common::Timer timer(kApiSetCounterMs); CounterException ex; if (request->need_routing) { request->need_routing = false; std::vector<std::shared_ptr<CounterAsyncClient>> clients; router_->GetClientsFor(request->segment, request->counter_name, false /* for_read */, &clients); if (clients.empty()) { ex.code = ErrorCode::SERVER_NOT_FOUND; ex.msg = "Server not found for setting: " + request->counter_name; callback.release()->exceptionInThread(std::move(ex)); return; } clients[0]->future_setCounter(*request).then( [ callback = std::move(callback) ] (folly::Try<::counter::SetResponse>&& t) mutable { if (t.hasException()) { callback.release()->exceptionInThread(t.exception()); } else { callback.release()->resultInThread(std::move(t.value())); } }); return; } auto db_name = router_->GetDBName(request->segment, request->counter_name); auto db = getDB(db_name, &ex); if (db == nullptr) { callback.release()->exceptionInThread(std::move(ex)); return; } rocksdb::WriteBatch write_batch; write_batch.Put( request->counter_name, rocksdb::Slice(reinterpret_cast<const char*>(&request->counter_value), sizeof(request->counter_value))); auto status = db->Write(write_options_, &write_batch); if (status.ok()) { callback.release()->resultInThread(SetResponse()); return; } ex.code = ErrorCode::ROCKSDB_ERROR; ex.msg = status.ToString(); callback.release()->exceptionInThread(std::move(ex)); } void CounterHandler::async_tm_bumpCounter( std::unique_ptr<apache::thrift::HandlerCallback< std::unique_ptr<::counter::BumpResponse>>> callback, std::unique_ptr<::counter::BumpRequest> request) { common::Stats::get()->Incr(kApiBumpCounter); common::Timer timer(kApiBumpCounterMs); CounterException ex; if (request->need_routing) { request->need_routing = false; std::vector<std::shared_ptr<CounterAsyncClient>> clients; router_->GetClientsFor(request->segment, request->counter_name, false /* for_read */, &clients); if (clients.empty()) { ex.code = ErrorCode::SERVER_NOT_FOUND; ex.msg = "Server not found for bumping: " + request->counter_name; callback.release()->exceptionInThread(std::move(ex)); return; } clients[0]->future_bumpCounter(*request).then( [ callback = std::move(callback) ] (folly::Try<::counter::BumpResponse>&& t) mutable { if (t.hasException()) { callback.release()->exceptionInThread(t.exception()); } else { callback.release()->resultInThread(std::move(t.value())); } }); return; } auto db_name = router_->GetDBName(request->segment, request->counter_name); auto db = getDB(db_name, &ex); if (db == nullptr) { callback.release()->exceptionInThread(std::move(ex)); return; } rocksdb::WriteBatch write_batch; write_batch.Merge( request->counter_name, rocksdb::Slice(reinterpret_cast<const char*>(&request->counter_delta), sizeof(request->counter_delta))); auto status = db->Write(write_options_, &write_batch); if (status.ok()) { callback.release()->resultInThread(BumpResponse()); return; } ex.code = ErrorCode::ROCKSDB_ERROR; ex.msg = status.ToString(); callback.release()->exceptionInThread(std::move(ex)); } } // namespace counter
2,778
3,579
/* * Copyright 2015, The Querydsl Team (http://www.querydsl.com/team) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.querydsl.collections; import java.util.ArrayList; import java.util.List; import org.junit.Ignore; import org.junit.Test; import com.querydsl.core.Tuple; import com.querydsl.core.types.Projections; import com.querydsl.core.types.QTuple; public class SerializationTest extends AbstractQueryTest { // TODO : order // TODO : subqueries private QTuple tuple = Projections.tuple(cat, otherCat); @Test public void oneSource_list() { query().from(cat, cats).select(cat).fetch(); } public List<Cat> oneSource_list(List<Cat> cats) { return cats; } @Test public void twoSources_list() { query().from(cat,cats).from(otherCat, cats).select(cat).fetch(); } public List<Cat> twoSources_list(List<Cat> cats, List<Cat> otherCats) { return cats; } @Test public void oneSource_filteredList() { query().from(cat, cats).where(cat.name.eq("Kitty")).select(cat).fetch(); } public List<Cat> oneSource_filteredList(List<Cat> cats) { List<Cat> rv = new ArrayList<Cat>(); for (Cat cat : cats) { // from if (cat.getName().equals("Kitty")) { // where rv.add(cat); // list } } return rv; } @Test public void oneSource_projectedList() { query().from(cat, cats).select(cat.name).fetch(); } public List<String> oneSource_projectedList(List<Cat> cats) { List<String> rv = new ArrayList<String>(); for (Cat cat : cats) { // from rv.add(cat.getName()); // list } return rv; } @Test public void oneSource_filtered_projectedList() { query().from(cat, cats).where(cat.name.eq("Kitty")).select(cat.name).fetch(); } public List<String> oneSource_filtered_projectedList(List<Cat> cats) { List<String> rv = new ArrayList<String>(); for (Cat cat : cats) { // from if (cat.getName().equals("Kitty")) { // where rv.add(cat.getName()); // list } } return rv; } @Test public void oneSource_filtered_projectedUnique() { query().from(cat, cats).where(cat.name.eq("Kitty")).select(cat.name).fetchOne(); } public String oneSource_filtered_projectedUnique(List<Cat> cats) { for (Cat cat : cats) { // from if (cat.getName().equals("Kitty")) { // where return cat.getName(); // unique } } throw new IllegalArgumentException(); } @Test @Ignore public void join_list() { query().from(cat, cats) .innerJoin(cat.kittens, kitten).where(kitten.name.eq("Kitty")) .select(cat).fetch(); } public List<Cat> join_list(List<Cat> cats) { List<Cat> rv = new ArrayList<Cat>(); for (Cat cat : cats) { // from for (Cat kitten : cat.getKittens()) { // inner join if (kitten.getName().equals("Kitty")) { // where rv.add(cat); // list } } } return rv; } public List<Object[]> pairs(List<Cat> cats, List<Cat> otherCats) { query().from(cat, cats) .from(otherCat, otherCats) .where(cat.name.eq(otherCat.name)) .select(cat, otherCat).fetch(); List<Object[]> rv = new ArrayList<Object[]>(); for (Cat cat : cats) { // from for (Cat otherCat : otherCats) { // from if (cat.getName().equals(otherCat.getName())) { // where rv.add(new Object[]{cat,otherCat}); // list } } } return rv; } public List<Tuple> pairsAsTuple(List<Cat> cats, List<Cat> otherCats) { query().from(cat, cats).from(otherCat, cats) .where(cat.name.eq(otherCat.name)) .select(Projections.tuple(cat, otherCat)).fetch(); List<Tuple> rv = new ArrayList<Tuple>(); for (Cat cat : cats) { // from for (Cat otherCat : otherCats) { // from if (cat.getName().equals(otherCat.getName())) { // where rv.add(tuple.newInstance(cat, otherCat)); // list } } } return rv; } }
2,532
323
<reponame>PicoJr/StereoPipeline<filename>src/asp/Camera/PeruSatXML.h // __BEGIN_LICENSE__ // Copyright (c) 2009-2013, United States Government as represented by the // Administrator of the National Aeronautics and Space Administration. All // rights reserved. // // The NGT platform is licensed under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance with the // License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // __END_LICENSE__ // These are objects that relate directly to block in XML that we need // to read. They only read and then store the raw values. Other // objects will interpret the results. #ifndef __STEREO_CAMERA_PERUSAT_XML_H__ #define __STEREO_CAMERA_PERUSAT_XML_H__ #include <vw/Core/FundamentalTypes.h> #include <vw/Core/Log.h> #include <vw/Math/Vector.h> #include <vw/Math/Quaternion.h> #include <vw/Math/BBox.h> #include <vw/Math/Geometry.h> #include <vw/Cartography/GeoReference.h> #include <vw/Camera/CameraModel.h> #include <vw/Camera/Extrinsics.h> #include <asp/Core/Common.h> #include <vector> #include <string> #include <boost/smart_ptr/scoped_ptr.hpp> #include <boost/date_time/posix_time/posix_time.hpp> // Special forward declare so we can hide the Xerces headers. #include <xercesc/util/XercesDefs.hpp> // Needed for this XERCES macro XERCES_CPP_NAMESPACE_BEGIN class DOMDocument; class DOMElement; class XercesDOMParser; class ErrorHandler; XERCES_CPP_NAMESPACE_END namespace asp { class PeruSatXML { public: /// Constructor PeruSatXML(): m_start_time_is_set(false){} vw::Vector2i m_image_size; vw::Quaternion<double> m_instrument_biases; vw::Vector2 m_tan_psi_x, m_tan_psi_y; /// Parse an XML file to populate the data void read_xml(std::string const& xml_path); /// Parse an XML tree to populate the data void parse_xml(xercesc::DOMElement* node); // Functions to setup functors which manage the raw input data. vw::camera::LinearTimeInterpolation setup_time_func() const; vw::camera::LagrangianInterpolation setup_position_func (vw::camera::LinearTimeInterpolation const& time_func) const; vw::camera::LagrangianInterpolation setup_velocity_func (vw::camera::LinearTimeInterpolation const& time_func) const; vw::camera::SLERPPoseInterpolation setup_pose_func (vw::camera::LinearTimeInterpolation const& time_func) const; private: // The various XML data reading sections /// Just opens the XML file for reading and returns the root node. xercesc::DOMElement* open_xml_file(std::string const& xml_path); void read_image_size (xercesc::DOMElement* raster_data); void read_times (xercesc::DOMElement* time); void read_ephemeris (xercesc::DOMElement* ephemeris); void read_attitudes (xercesc::DOMElement* attitudes); void read_look_angles (xercesc::DOMElement* look_angles); void read_instr_biases(xercesc::DOMElement* instr_biases); void read_center_data (xercesc::DOMElement* geom_values); /// Converts a time from string to double precision in seconds. /// All times are in seconds relative to the start time. /// When the start time is passed in, use is_start_time = true. double convert_time(std::string const& s, bool is_start_time); // Boost does not like a time string such as "2017-12-07 15:36:40.90795Z" // because it expects precisely 6 digits after the dot (hence for the millisecond). // Fix that. static std::string fix_millisecond(std::string const& in_str); // All times represented as doubles will be in seconds relative to m_start_time_stamp boost::posix_time::ptime m_start_time_stamp; bool m_start_time_is_set; double m_start_time; double center_time; double m_line_period; double m_center_col; double m_center_row; std::list<std::pair<double, vw::Vector3>> m_positions; // (time, X/Y/Z) std::list<std::pair<double, vw::Vector3>> m_velocities; // (time, dX/dY/dZ) std::list<std::pair<double, vw::Quaternion<double>>> m_poses; // (time, quaternion) boost::shared_ptr<xercesc::XercesDOMParser> m_parser; boost::shared_ptr<xercesc::ErrorHandler> m_err_handler; }; // End class PeruSatXML } //end namespace asp #endif//__STEREO_CAMERA_PERUSAT_XML_H__
1,736
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef FORMS_ERRORBROADCASTER_HXX #define FORMS_ERRORBROADCASTER_HXX #include <cppuhelper/implbase1.hxx> #include <com/sun/star/sdb/XSQLErrorBroadcaster.hpp> #include <cppuhelper/interfacecontainer.h> #include <com/sun/star/sdbc/SQLException.hpp> #include <com/sun/star/sdb/SQLErrorEvent.hpp> //......................................................................... namespace frm { //......................................................................... //===================================================================== //= OErrorBroadcaster //===================================================================== typedef ::cppu::ImplHelper1 < ::com::sun::star::sdb::XSQLErrorBroadcaster > OErrorBroadcaster_BASE; class OErrorBroadcaster : public OErrorBroadcaster_BASE { private: ::cppu::OBroadcastHelper& m_rBHelper; ::cppu::OInterfaceContainerHelper m_aErrorListeners; protected: OErrorBroadcaster( ::cppu::OBroadcastHelper& _rBHelper ); virtual ~OErrorBroadcaster( ); void SAL_CALL disposing(); void SAL_CALL onError( const ::com::sun::star::sdbc::SQLException& _rException, const ::rtl::OUString& _rContextDescription ); void SAL_CALL onError( const ::com::sun::star::sdb::SQLErrorEvent& _rException ); protected: // XSQLErrorBroadcaster virtual void SAL_CALL addSQLErrorListener( const ::com::sun::star::uno::Reference< ::com::sun::star::sdb::XSQLErrorListener >& _rListener ) throw (::com::sun::star::uno::RuntimeException); virtual void SAL_CALL removeSQLErrorListener( const ::com::sun::star::uno::Reference< ::com::sun::star::sdb::XSQLErrorListener >& _rListener ) throw (::com::sun::star::uno::RuntimeException); }; //......................................................................... } // namespace frm //......................................................................... #endif // FORMS_ERRORBROADCASTER_HXX
808
341
# -*- coding: utf-8 -*- from __future__ import division import numpy as np from scipy import sparse from pygsp import utils logger = utils.build_logger(__name__) class DifferenceMixIn(object): @property def D(self): r"""Differential operator (for gradient and divergence). Is computed by :func:`compute_differential_operator`. """ if self._D is None: self.logger.warning('The differential operator G.D is not ' 'available, we need to compute it. Explicitly ' 'call G.compute_differential_operator() ' 'once beforehand to suppress the warning.') self.compute_differential_operator() return self._D def compute_differential_operator(self): r"""Compute the graph differential operator (cached). The differential operator is the matrix :math:`D` such that .. math:: L = D D^\top, where :math:`L` is the graph Laplacian (combinatorial or normalized). It is used to compute the gradient and the divergence of graph signals (see :meth:`grad` and :meth:`div`). The differential operator computes the gradient and divergence of signals, and the Laplacian computes the divergence of the gradient, as follows: .. math:: z = L x = D y = D D^\top x, where :math:`y = D^\top x = \nabla_\mathcal{G} x` is the gradient of :math:`x` and :math:`z = D y = \operatorname{div}_\mathcal{G} y` is the divergence of :math:`y`. See :meth:`grad` and :meth:`div` for details. The difference operator is actually an incidence matrix of the graph, defined as .. math:: D[i, k] = \begin{cases} -\sqrt{W[i, j] / 2} & \text{if } e_k = (v_i, v_j) \text{ for some } j, \\ +\sqrt{W[i, j] / 2} & \text{if } e_k = (v_j, v_i) \text{ for some } j, \\ 0 & \text{otherwise} \end{cases} for the combinatorial Laplacian, and .. math:: D[i, k] = \begin{cases} -\sqrt{W[i, j] / 2 / d[i]} & \text{if } e_k = (v_i, v_j) \text{ for some } j, \\ +\sqrt{W[i, j] / 2 / d[i]} & \text{if } e_k = (v_j, v_i) \text{ for some } j, \\ 0 & \text{otherwise} \end{cases} for the normalized Laplacian, where :math:`v_i \in \mathcal{V}` is a vertex, :math:`e_k = (v_i, v_j) \in \mathcal{E}` is an edge from :math:`v_i` to :math:`v_j`, :math:`W[i, j]` is the weight :attr:`W` of the edge :math:`(v_i, v_j)`, :math:`d[i]` is the degree :attr:`dw` of vertex :math:`v_i`. For undirected graphs, only half the edges are kept (the upper triangular part of the adjacency matrix) in the interest of space and time. In that case, the :math:`1/\sqrt{2}` factor disappears from the above equations for :math:`L = D D^\top` to stand at all times. The result is cached and accessible by the :attr:`D` property. See Also -------- grad : compute the gradient div : compute the divergence Examples -------- The difference operator is an incidence matrix. Example with a undirected graph. >>> graph = graphs.Graph([ ... [0, 2, 0], ... [2, 0, 1], ... [0, 1, 0], ... ]) >>> graph.compute_laplacian('combinatorial') >>> graph.compute_differential_operator() >>> graph.D.toarray() array([[-1.41421356, 0. ], [ 1.41421356, -1. ], [ 0. , 1. ]]) >>> graph.compute_laplacian('normalized') >>> graph.compute_differential_operator() >>> graph.D.toarray() array([[-1. , 0. ], [ 0.81649658, -0.57735027], [ 0. , 1. ]]) Example with a directed graph. >>> graph = graphs.Graph([ ... [0, 2, 0], ... [2, 0, 1], ... [0, 0, 0], ... ]) >>> graph.compute_laplacian('combinatorial') >>> graph.compute_differential_operator() >>> graph.D.toarray() array([[-1. , 1. , 0. ], [ 1. , -1. , -0.70710678], [ 0. , 0. , 0.70710678]]) >>> graph.compute_laplacian('normalized') >>> graph.compute_differential_operator() >>> graph.D.toarray() array([[-0.70710678, 0.70710678, 0. ], [ 0.63245553, -0.63245553, -0.4472136 ], [ 0. , 0. , 1. ]]) The graph Laplacian acts on a signal as the divergence of the gradient. >>> G = graphs.Logo() >>> G.compute_differential_operator() >>> s = np.random.default_rng().normal(size=G.N) >>> s_grad = G.D.T.dot(s) >>> s_lap = G.D.dot(s_grad) >>> np.linalg.norm(s_lap - G.L.dot(s)) < 1e-10 True """ sources, targets, weights = self.get_edge_list() n = self.n_edges rows = np.concatenate([sources, targets]) columns = np.concatenate([np.arange(n), np.arange(n)]) values = np.empty(2*n) if self.lap_type == 'combinatorial': values[:n] = -np.sqrt(weights) values[n:] = -values[:n] elif self.lap_type == 'normalized': values[:n] = -np.sqrt(weights / self.dw[sources]) values[n:] = +np.sqrt(weights / self.dw[targets]) else: raise ValueError('Unknown lap_type {}'.format(self.lap_type)) if self.is_directed(): values /= np.sqrt(2) self._D = sparse.csc_matrix((values, (rows, columns)), shape=(self.n_vertices, self.n_edges)) self._D.eliminate_zeros() # Self-loops introduce stored zeros. def grad(self, x): r"""Compute the gradient of a signal defined on the vertices. The gradient :math:`y` of a signal :math:`x` is defined as .. math:: y = \nabla_\mathcal{G} x = D^\top x, where :math:`D` is the differential operator :attr:`D`. The value of the gradient on the edge :math:`e_k = (v_i, v_j)` from :math:`v_i` to :math:`v_j` with weight :math:`W[i, j]` is .. math:: y[k] = D[i, k] x[i] + D[j, k] x[j] = \sqrt{\frac{W[i, j]}{2}} (x[j] - x[i]) for the combinatorial Laplacian, and .. math:: y[k] = \sqrt{\frac{W[i, j]}{2}} \left( \frac{x[j]}{\sqrt{d[j]}} - \frac{x[i]}{\sqrt{d[i]}} \right) for the normalized Laplacian. For undirected graphs, only half the edges are kept and the :math:`1/\sqrt{2}` factor disappears from the above equations. See :meth:`compute_differential_operator` for details. Parameters ---------- x : array_like Signal of length :attr:`n_vertices` living on the vertices. Returns ------- y : ndarray Gradient signal of length :attr:`n_edges` living on the edges. See Also -------- compute_differential_operator div : compute the divergence of an edge signal dirichlet_energy : compute the norm of the gradient Examples -------- Non-directed graph and combinatorial Laplacian: >>> graph = graphs.Path(4, directed=False, lap_type='combinatorial') >>> graph.compute_differential_operator() >>> graph.grad([0, 2, 4, 2]) array([ 2., 2., -2.]) Directed graph and combinatorial Laplacian: >>> graph = graphs.Path(4, directed=True, lap_type='combinatorial') >>> graph.compute_differential_operator() >>> graph.grad([0, 2, 4, 2]) array([ 1.41421356, 1.41421356, -1.41421356]) Non-directed graph and normalized Laplacian: >>> graph = graphs.Path(4, directed=False, lap_type='normalized') >>> graph.compute_differential_operator() >>> graph.grad([0, 2, 4, 2]) array([ 1.41421356, 1.41421356, -0.82842712]) Directed graph and normalized Laplacian: >>> graph = graphs.Path(4, directed=True, lap_type='normalized') >>> graph.compute_differential_operator() >>> graph.grad([0, 2, 4, 2]) array([ 1.41421356, 1.41421356, -0.82842712]) """ x = self._check_signal(x) return self.D.T.dot(x) def div(self, y): r"""Compute the divergence of a signal defined on the edges. The divergence :math:`z` of a signal :math:`y` is defined as .. math:: z = \operatorname{div}_\mathcal{G} y = D y, where :math:`D` is the differential operator :attr:`D`. The value of the divergence on the vertex :math:`v_i` is .. math:: z[i] = \sum_k D[i, k] y[k] = \sum_{\{k,j | e_k=(v_j, v_i) \in \mathcal{E}\}} \sqrt{\frac{W[j, i]}{2}} y[k] - \sum_{\{k,j | e_k=(v_i, v_j) \in \mathcal{E}\}} \sqrt{\frac{W[i, j]}{2}} y[k] for the combinatorial Laplacian, and .. math:: z[i] = \sum_k D[i, k] y[k] = \sum_{\{k,j | e_k=(v_j, v_i) \in \mathcal{E}\}} \sqrt{\frac{W[j, i]}{2 d[i]}} y[k] - \sum_{\{k,j | e_k=(v_i, v_j) \in \mathcal{E}\}} \sqrt{\frac{W[i, j]}{2 d[i]}} y[k] for the normalized Laplacian. For undirected graphs, only half the edges are kept and the :math:`1/\sqrt{2}` factor disappears from the above equations. See :meth:`compute_differential_operator` for details. Parameters ---------- y : array_like Signal of length :attr:`n_edges` living on the edges. Returns ------- z : ndarray Divergence signal of length :attr:`n_vertices` living on the vertices. See Also -------- compute_differential_operator grad : compute the gradient of a vertex signal Examples -------- Non-directed graph and combinatorial Laplacian: >>> graph = graphs.Path(4, directed=False, lap_type='combinatorial') >>> graph.compute_differential_operator() >>> graph.div([2, -2, 0]) array([-2., 4., -2., 0.]) Directed graph and combinatorial Laplacian: >>> graph = graphs.Path(4, directed=True, lap_type='combinatorial') >>> graph.compute_differential_operator() >>> graph.div([2, -2, 0]) array([-1.41421356, 2.82842712, -1.41421356, 0. ]) Non-directed graph and normalized Laplacian: >>> graph = graphs.Path(4, directed=False, lap_type='normalized') >>> graph.compute_differential_operator() >>> graph.div([2, -2, 0]) array([-2. , 2.82842712, -1.41421356, 0. ]) Directed graph and normalized Laplacian: >>> graph = graphs.Path(4, directed=True, lap_type='normalized') >>> graph.compute_differential_operator() >>> graph.div([2, -2, 0]) array([-2. , 2.82842712, -1.41421356, 0. ]) """ y = np.asanyarray(y) if y.shape[0] != self.Ne: raise ValueError('First dimension must be the number of edges ' 'G.Ne = {}, got {}.'.format(self.Ne, y.shape)) return self.D.dot(y)
5,834
476
from .support import HPyTest class TestTuple(HPyTest): def test_Check(self): mod = self.make_module(""" HPyDef_METH(f, "f", f_impl, HPyFunc_O) static HPy f_impl(HPyContext *ctx, HPy self, HPy arg) { if (HPyTuple_Check(ctx, arg)) return HPy_Dup(ctx, ctx->h_True); return HPy_Dup(ctx, ctx->h_False); } @EXPORT(f) @INIT """) class MyTuple(tuple): pass assert mod.f(()) is True assert mod.f([]) is False assert mod.f(MyTuple()) is True def test_FromArray(self): mod = self.make_module(""" HPyDef_METH(f, "f", f_impl, HPyFunc_O) static HPy f_impl(HPyContext *ctx, HPy self, HPy arg) { HPy x = HPyLong_FromLong(ctx, 42); if (HPy_IsNull(x)) return HPy_NULL; HPy items[] = {self, arg, x}; HPy res = HPyTuple_FromArray(ctx, items, 3); HPy_Close(ctx, x); return res; } @EXPORT(f) @INIT """) assert mod.f('hello') == (mod, 'hello', 42) def test_Pack(self): mod = self.make_module(""" HPyDef_METH(f, "f", f_impl, HPyFunc_O) static HPy f_impl(HPyContext *ctx, HPy self, HPy arg) { HPy x = HPyLong_FromLong(ctx, 42); if (HPy_IsNull(x)) return HPy_NULL; HPy result = HPyTuple_Pack(ctx, 3, self, arg, x); HPy_Close(ctx, x); return result; } @EXPORT(f) @INIT """) assert mod.f('hello') == (mod, 'hello', 42) def test_TupleBuilder(self): mod = self.make_module(""" HPyDef_METH(f, "f", f_impl, HPyFunc_O) static HPy f_impl(HPyContext *ctx, HPy h_self, HPy h_arg) { HPyTupleBuilder builder = HPyTupleBuilder_New(ctx, 3); HPyTupleBuilder_Set(ctx, builder, 0, h_arg); HPyTupleBuilder_Set(ctx, builder, 1, ctx->h_True); HPy h_num = HPyLong_FromLong(ctx, -42); if (HPy_IsNull(h_num)) { HPyTupleBuilder_Cancel(ctx, builder); return HPy_NULL; } HPyTupleBuilder_Set(ctx, builder, 2, h_num); HPy_Close(ctx, h_num); HPy h_tuple = HPyTupleBuilder_Build(ctx, builder); return h_tuple; } @EXPORT(f) @INIT """) assert mod.f("xy") == ("xy", True, -42)
1,669
796
<filename>src/maple_me/src/me_alias_class.cpp /* * Copyright (c) [2019-2020] Huawei Technologies Co.,Ltd.All rights reserved. * * OpenArkCompiler is licensed under the Mulan PSL v1. * You can use this software according to the terms and conditions of the Mulan PSL v1. * You may obtain a copy of Mulan PSL v1 at: * * http://license.coscl.org.cn/MulanPSL * * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR * FIT FOR A PARTICULAR PURPOSE. * See the Mulan PSL v1 for more details. */ #include "me_alias_class.h" #include "me_option.h" #include "mpl_logging.h" #include "ssa_mir_nodes.h" #include "ssa_tab.h" #include "me_function.h" #include "mpl_timer.h" namespace maple { // This phase performs alias analysis based on Steensgaard's algorithm and // represent the resulting alias relationships in the Maple IR representation bool MeAliasClass::HasWriteToStaticFinal() const { for (auto bIt = func.valid_begin(); bIt != func.valid_end(); ++bIt) { for (const auto &stmt : (*bIt)->GetStmtNodes()) { if (stmt.GetOpCode() == OP_dassign) { const auto &dassignNode = static_cast<const DassignNode&>(stmt); if (dassignNode.GetStIdx().IsGlobal()) { const MIRSymbol *sym = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dassignNode.GetStIdx()); if (sym->IsFinal()) { return true; } } } } } return false; } void MeAliasClass::DoAliasAnalysis() { for (auto bIt = func.valid_begin(); bIt != func.valid_end(); ++bIt) { for (auto &stmt : (*bIt)->GetStmtNodes()) { ApplyUnionForCopies(stmt); } } CreateAssignSets(); if (enabledDebug) { DumpAssignSets(); } ReinitUnionFind(); if (MeOption::noSteensgaard) { UnionAllPointedTos(); } else { ApplyUnionForPointedTos(); UnionForNotAllDefsSeen(); } // TBAA if (!MeOption::noTBAA && mirModule.IsJavaModule()) { ReconstructAliasGroups(); } CreateClassSets(); if (enabledDebug) { DumpClassSets(); } // pass 2 through the program statements if (enabledDebug) { LogInfo::MapleLogger() << "\n============ Alias Classification Pass 2 ============" << '\n'; } for (auto bIt = func.valid_begin(); bIt != func.valid_end(); ++bIt) { auto *bb = *bIt; for (auto &stmt : bb->GetStmtNodes()) { GenericInsertMayDefUse(stmt, bb->GetBBId()); } } } AnalysisResult *MeDoAliasClass::Run(MeFunction *func, MeFuncResultMgr *funcResMgr, ModuleResultMgr *moduleResMgr) { MPLTimer timer; timer.Start(); (void)funcResMgr->GetAnalysisResult(MeFuncPhase_SSATAB, func); MemPool *aliasClassMp = NewMemPool(); auto *kh = static_cast<KlassHierarchy*>(moduleResMgr->GetAnalysisResult( MoPhase_CHA, &func->GetMIRModule())); auto *aliasClass = aliasClassMp->New<MeAliasClass>( *aliasClassMp, func->GetMIRModule(), *func->GetMeSSATab(), *func, MeOption::lessThrowAlias, MeOption::ignoreIPA, DEBUGFUNC(func), MeOption::setCalleeHasSideEffect, kh); // pass 1 through the program statements if (DEBUGFUNC(func)) { LogInfo::MapleLogger() << "\n============ Alias Classification Pass 1 ============" << '\n'; } aliasClass->DoAliasAnalysis(); timer.Stop(); if (DEBUGFUNC(func)) { LogInfo::MapleLogger() << "ssaTab + aliasClass passes consume cumulatively " << timer.Elapsed() << "seconds " << '\n'; } return aliasClass; } } // namespace maple
1,359
2,059
/******************************************************************* * File automatically generated by rebuild_wrappers.py (v2.0.0.10) * *******************************************************************/ #ifndef __wrappedlibxtstUNDEFS_H_ #define __wrappedlibxtstUNDEFS_H_ #endif // __wrappedlibxtstUNDEFS_H_
82
4,335
<filename>components/ai/include/nnacl/int8/deconv.h /** * Copyright 2020 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef MINDSPORE_LITE_NNACL_INT8_DECONV_H_ #define MINDSPORE_LITE_NNACL_INT8_DECONV_H_ #include <string.h> #include "nnacl/pack.h" #include "nnacl/op_base.h" #include "nnacl/errorcode.h" #include "nnacl/conv_parameter.h" #include "nnacl/common_func.h" #include "nnacl/int8/matmul_int8.h" #ifdef __cplusplus extern "C" { #endif void DeConvPackWeightSum(int8_t *weight, int32_t *weight_sum, int32_t input_zp, int32_t filter_zp, int deep16, int col4, bool suppport_opt); void DeConvPackInputSum(const int8_t *src, int32_t *dst, int32_t filter_zp, size_t row4, size_t col16, bool suppport_opt); void DeConvWeightTransInt8(int8_t *src, int8_t *dst, int input_channel, int output_channel, int plane, bool support_optimize_); int DeConvInt8(const int8_t *input, const int8_t *weight, int32_t *output, int32_t *weight_sum, int32_t *input_sum, size_t act_row, size_t act_col, size_t act_deep, ConvParameter *conv_param, MATMUL_OPT_R4_FUNC matmul_func); int DeConvPostInt8(const int32_t *src, const int32_t *bias, int32_t *tmp, int8_t *out, int output_channel, ConvParameter *conv_param, bool support_optimize); #ifdef __cplusplus } #endif #endif // MINDSPORE_LITE_NNACL_INT8_DECONV_H_
806
1,797
/* Copyright (c) 2015-2016, Apple Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder(s) nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // LZVN low-level encoder #include "lzvn_encode_base.h" #if defined(_MSC_VER) && !defined(__clang__) # define restrict __restrict #endif // =============================================================== // Coarse/fine copy, non overlapping buffers /*! @abstract Copy at least \p nbytes bytes from \p src to \p dst, by blocks * of 8 bytes (may go beyond range). No overlap. * @return \p dst + \p nbytes. */ static inline unsigned char *lzvn_copy64(unsigned char *restrict dst, const unsigned char *restrict src, size_t nbytes) { for (size_t i = 0; i < nbytes; i += 8) store8(dst + i, load8(src + i)); return dst + nbytes; } /*! @abstract Copy exactly \p nbytes bytes from \p src to \p dst (respects range). * No overlap. * @return \p dst + \p nbytes. */ static inline unsigned char *lzvn_copy8(unsigned char *restrict dst, const unsigned char *restrict src, size_t nbytes) { for (size_t i = 0; i < nbytes; i++) dst[i] = src[i]; return dst + nbytes; } /*! @abstract Emit (L,0,0) instructions (final literal). * We read at most \p L bytes from \p p. * @param p input stream * @param q1 the first byte after the output buffer. * @return pointer to the next output, <= \p q1. * @return \p q1 if output is full. In that case, output will be partially invalid. */ static inline unsigned char *emit_literal(const unsigned char *p, unsigned char *q, unsigned char *q1, size_t L) { size_t x; while (L > 15) { x = L < 271 ? L : 271; if (q + x + 10 >= q1) goto OUT_FULL; store2(q, 0xE0 + ((x - 16) << 8)); q += 2; L -= x; q = lzvn_copy8(q, p, x); p += x; } if (L > 0) { if (q + L + 10 >= q1) goto OUT_FULL; *q++ = 0xE0 + L; // 1110LLLL q = lzvn_copy8(q, p, L); } return q; OUT_FULL: return q1; } /*! @abstract Emit (L,M,D) instructions. M>=3. * @param p input stream pointing to the beginning of the literal. We read at * most \p L+4 bytes from \p p. * @param q1 the first byte after the output buffer. * @return pointer to the next output, <= \p q1. * @return \p q1 if output is full. In that case, output will be partially invalid. */ static inline unsigned char *emit(const unsigned char *p, unsigned char *q, unsigned char *q1, size_t L, size_t M, size_t D, size_t D_prev) { size_t x; while (L > 15) { x = L < 271 ? L : 271; if (q + x + 10 >= q1) goto OUT_FULL; store2(q, 0xE0 + ((x - 16) << 8)); q += 2; L -= x; q = lzvn_copy64(q, p, x); p += x; } if (L > 3) { if (q + L + 10 >= q1) goto OUT_FULL; *q++ = 0xE0 + L; // 1110LLLL q = lzvn_copy64(q, p, L); p += L; L = 0; } x = M <= 10 - 2 * L ? M : 10 - 2 * L; // x = min(10-2*L,M) M -= x; x -= 3; // M = (x+3) + M' max value for x is 7-2*L // Here L<4 literals remaining, we read them here uint32_t literal = load4(p); // P is not accessed after this point // Relaxed capacity test covering all cases if (q + 8 >= q1) goto OUT_FULL; if (D == D_prev) { if (L == 0) { *q++ = 0xF0 + (x + 3); // XM! } else { *q++ = (L << 6) + (x << 3) + 6; // LLxxx110 } store4(q, literal); q += L; } else if (D < 2048 - 2 * 256) { // Short dist D>>8 in 0..5 *q++ = (D >> 8) + (L << 6) + (x << 3); // LLxxxDDD *q++ = D & 0xFF; store4(q, literal); q += L; } else if (D >= (1 << 14) || M == 0 || (x + 3) + M > 34) { // Long dist *q++ = (L << 6) + (x << 3) + 7; store2(q, D); q += 2; store4(q, literal); q += L; } else { // Medium distance x += M; M = 0; *q++ = 0xA0 + (x >> 2) + (L << 3); store2(q, D << 2 | (x & 3)); q += 2; store4(q, literal); q += L; } // Issue remaining match while (M > 15) { if (q + 2 >= q1) goto OUT_FULL; x = M < 271 ? M : 271; store2(q, 0xf0 + ((x - 16) << 8)); q += 2; M -= x; } if (M > 0) { if (q + 1 >= q1) goto OUT_FULL; *q++ = 0xF0 + M; // M = 0..15 } return q; OUT_FULL: return q1; } // =============================================================== // Conversions /*! @abstract Return 32-bit value to store for offset x. */ static inline int32_t offset_to_s32(lzvn_offset x) { return (int32_t)x; } /*! @abstract Get offset from 32-bit stored value x. */ static inline lzvn_offset offset_from_s32(int32_t x) { return (lzvn_offset)x; } // =============================================================== // Hash and Matching /*! @abstract Get hash in range \c [0,LZVN_ENCODE_HASH_VALUES-1] from 3 bytes in i. */ static inline uint32_t hash3i(uint32_t i) { i &= 0xffffff; // truncate to 24-bit input (slightly increases compression ratio) uint32_t h = (i * (1 + (1 << 6) + (1 << 12))) >> 12; return h & (LZVN_ENCODE_HASH_VALUES - 1); } /*! @abstract Return the number [0, 4] of zero bytes in \p x, starting from the * least significant byte. */ static inline lzvn_offset trailing_zero_bytes(uint32_t x) { return (x == 0) ? 4 : (__builtin_ctzl(x) >> 3); } /*! @abstract Return the number [0, 4] of matching chars between values at * \p src+i and \p src+j, starting from the least significant byte. * Assumes we can read 4 chars from each position. */ static inline lzvn_offset nmatch4(const unsigned char *src, lzvn_offset i, lzvn_offset j) { uint32_t vi = load4(src + i); uint32_t vj = load4(src + j); return trailing_zero_bytes(vi ^ vj); } /*! @abstract Check if l_begin, m_begin, m0_begin (m0_begin < m_begin) can be * expanded to a match of length at least 3. * @param m_begin new string to match. * @param m0_begin candidate old string. * @param src source buffer, with valid indices src_begin <= i < src_end. * (src_begin may be <0) * @return If a match can be found, return 1 and set all \p match fields, * otherwise return 0. * @note \p *match should be 0 before the call. */ static inline int lzvn_find_match(const unsigned char *src, lzvn_offset src_begin, lzvn_offset src_end, lzvn_offset l_begin, lzvn_offset m0_begin, lzvn_offset m_begin, lzvn_match_info *match) { lzvn_offset n = nmatch4(src, m_begin, m0_begin); if (n < 3) return 0; // no match lzvn_offset D = m_begin - m0_begin; // actual distance if (D <= 0 || D > LZVN_ENCODE_MAX_DISTANCE) return 0; // distance out of range // Expand forward lzvn_offset m_end = m_begin + n; while (n == 4 && m_end + 4 < src_end) { n = nmatch4(src, m_end, m_end - D); m_end += n; } // Expand backwards over literal while (m0_begin > src_begin && m_begin > l_begin && src[m_begin - 1] == src[m0_begin - 1]) { m0_begin--; m_begin--; } // OK, we keep it, update MATCH lzvn_offset M = m_end - m_begin; // match length match->m_begin = m_begin; match->m_end = m_end; match->K = M - ((D < 0x600) ? 2 : 3); match->M = M; match->D = D; return 1; // OK } /*! @abstract Same as lzvn_find_match, but we already know that N bytes do * match (N<=4). */ static inline int lzvn_find_matchN(const unsigned char *src, lzvn_offset src_begin, lzvn_offset src_end, lzvn_offset l_begin, lzvn_offset m0_begin, lzvn_offset m_begin, lzvn_offset n, lzvn_match_info *match) { // We can skip the first comparison on 4 bytes if (n < 3) return 0; // no match lzvn_offset D = m_begin - m0_begin; // actual distance if (D <= 0 || D > LZVN_ENCODE_MAX_DISTANCE) return 0; // distance out of range // Expand forward lzvn_offset m_end = m_begin + n; while (n == 4 && m_end + 4 < src_end) { n = nmatch4(src, m_end, m_end - D); m_end += n; } // Expand backwards over literal while (m0_begin > src_begin && m_begin > l_begin && src[m_begin - 1] == src[m0_begin - 1]) { m0_begin--; m_begin--; } // OK, we keep it, update MATCH lzvn_offset M = m_end - m_begin; // match length match->m_begin = m_begin; match->m_end = m_end; match->K = M - ((D < 0x600) ? 2 : 3); match->M = M; match->D = D; return 1; // OK } // =============================================================== // Encoder Backend /*! @abstract Emit a match and update state. * @return number of bytes written to \p dst. May be 0 if there is no more space * in \p dst to emit the match. */ static inline lzvn_offset lzvn_emit_match(lzvn_encoder_state *state, lzvn_match_info match) { size_t L = (size_t)(match.m_begin - state->src_literal); // literal count size_t M = (size_t)match.M; // match length size_t D = (size_t)match.D; // match distance size_t D_prev = (size_t)state->d_prev; // previously emitted match distance unsigned char *dst = emit(state->src + state->src_literal, state->dst, state->dst_end, L, M, D, D_prev); // Check if DST is full if (dst >= state->dst_end) { return 0; // FULL } // Update state lzvn_offset dst_used = dst - state->dst; state->d_prev = match.D; state->dst = dst; state->src_literal = match.m_end; return dst_used; } /*! @abstract Emit a n-bytes literal and update state. * @return number of bytes written to \p dst. May be 0 if there is no more space * in \p dst to emit the literal. */ static inline lzvn_offset lzvn_emit_literal(lzvn_encoder_state *state, lzvn_offset n) { size_t L = (size_t)n; unsigned char *dst = emit_literal(state->src + state->src_literal, state->dst, state->dst_end, L); // Check if DST is full if (dst >= state->dst_end) return 0; // FULL // Update state lzvn_offset dst_used = dst - state->dst; state->dst = dst; state->src_literal += n; return dst_used; } /*! @abstract Emit end-of-stream and update state. * @return number of bytes written to \p dst. May be 0 if there is no more space * in \p dst to emit the instruction. */ static inline lzvn_offset lzvn_emit_end_of_stream(lzvn_encoder_state *state) { // Do we have 8 byte in dst? if (state->dst_end < state->dst + 8) return 0; // FULL // Insert end marker and update state store8(state->dst, 0x06); // end-of-stream command state->dst += 8; return 8; // dst_used } // =============================================================== // Encoder Functions /*! @abstract Initialize encoder table in \p state, uses current I/O parameters. */ static inline void lzvn_init_table(lzvn_encoder_state *state) { lzvn_offset index = -LZVN_ENCODE_MAX_DISTANCE; // max match distance if (index < state->src_begin) index = state->src_begin; uint32_t value = load4(state->src + index); lzvn_encode_entry_type e; for (int i = 0; i < 4; i++) { e.indices[i] = offset_to_s32(index); e.values[i] = value; } for (int u = 0; u < LZVN_ENCODE_HASH_VALUES; u++) state->table[u] = e; // fill entire table } void lzvn_encode(lzvn_encoder_state *state) { const lzvn_match_info NO_MATCH = {0}; for (; state->src_current < state->src_current_end; state->src_current++) { // Get 4 bytes at src_current uint32_t vi = load4(state->src + state->src_current); // Compute new hash H at position I, and push value into position table int h = hash3i(vi); // index of first entry // Read table entries for H lzvn_encode_entry_type e = state->table[h]; // Update entry with index=current and value=vi lzvn_encode_entry_type updated_e; // rotate values, so we will replace the oldest updated_e.indices[0] = offset_to_s32(state->src_current); updated_e.indices[1] = e.indices[0]; updated_e.indices[2] = e.indices[1]; updated_e.indices[3] = e.indices[2]; updated_e.values[0] = vi; updated_e.values[1] = e.values[0]; updated_e.values[2] = e.values[1]; updated_e.values[3] = e.values[2]; // Do not check matches if still in previously emitted match if (state->src_current < state->src_literal) goto after_emit; // Update best with candidate if better #define UPDATE(best, candidate) \ do { \ if (candidate.K > best.K || \ ((candidate.K == best.K) && (candidate.m_end > best.m_end + 1))) { \ best = candidate; \ } \ } while (0) // Check candidate. Keep if better. #define CHECK_CANDIDATE(ik, nk) \ do { \ lzvn_match_info m1; \ if (lzvn_find_matchN(state->src, state->src_begin, state->src_end, \ state->src_literal, ik, state->src_current, nk, &m1)) { \ UPDATE(incoming, m1); \ } \ } while (0) // Emit match M. Return if we don't have enough space in the destination buffer #define EMIT_MATCH(m) \ do { \ if (lzvn_emit_match(state, m) == 0) \ return; \ } while (0) // Emit literal of length L. Return if we don't have enough space in the // destination buffer #define EMIT_LITERAL(l) \ do { \ if (lzvn_emit_literal(state, l) == 0) \ return; \ } while (0) lzvn_match_info incoming = NO_MATCH; // Check candidates in order (closest first) uint32_t diffs[4]; for (int k = 0; k < 4; k++) diffs[k] = e.values[k] ^ vi; // XOR, 0 if equal lzvn_offset ik; // index lzvn_offset nk; // match byte count // The values stored in e.xyzw are 32-bit signed indices, extended to signed // type lzvn_offset ik = offset_from_s32(e.indices[0]); nk = trailing_zero_bytes(diffs[0]); CHECK_CANDIDATE(ik, nk); ik = offset_from_s32(e.indices[1]); nk = trailing_zero_bytes(diffs[1]); CHECK_CANDIDATE(ik, nk); ik = offset_from_s32(e.indices[2]); nk = trailing_zero_bytes(diffs[2]); CHECK_CANDIDATE(ik, nk); ik = offset_from_s32(e.indices[3]); nk = trailing_zero_bytes(diffs[3]); CHECK_CANDIDATE(ik, nk); // Check candidate at previous distance if (state->d_prev != 0) { lzvn_match_info m1; if (lzvn_find_match(state->src, state->src_begin, state->src_end, state->src_literal, state->src_current - state->d_prev, state->src_current, &m1)) { m1.K = m1.M - 1; // fix K for D_prev UPDATE(incoming, m1); } } // Here we have the best candidate in incoming, may be NO_MATCH // If no incoming match, and literal backlog becomes too high, emit pending // match, or literals if there is no pending match if (incoming.M == 0) { if (state->src_current - state->src_literal >= LZVN_ENCODE_MAX_LITERAL_BACKLOG) // at this point, we always have // current >= literal { if (state->pending.M != 0) { EMIT_MATCH(state->pending); state->pending = NO_MATCH; } else { EMIT_LITERAL(271); // emit long literal (271 is the longest literal size we allow) } } goto after_emit; } if (state->pending.M == 0) { // NOTE. Here, we can also emit incoming right away. It will make the // encoder 1.5x faster, at a cost of ~10% lower compression ratio: // EMIT_MATCH(incoming); // state->pending = NO_MATCH; // No pending match, emit nothing, keep incoming state->pending = incoming; } else { // Here we have both incoming and pending if (state->pending.m_end <= incoming.m_begin) { // No overlap: emit pending, keep incoming EMIT_MATCH(state->pending); state->pending = incoming; } else { // If pending is better, emit pending and discard incoming. // Otherwise, emit incoming and discard pending. if (incoming.K > state->pending.K) state->pending = incoming; EMIT_MATCH(state->pending); state->pending = NO_MATCH; } } after_emit: // We commit state changes only after we tried to emit instructions, so we // can restart in the same state in case dst was full and we quit the loop. state->table[h] = updated_e; } // i loop // Do not emit pending match here. We do it only at the end of stream. } // =============================================================== // API entry points size_t lzvn_encode_scratch_size(void) { return LZVN_ENCODE_WORK_SIZE; } static size_t lzvn_encode_partial(void *__restrict dst, size_t dst_size, const void *__restrict src, size_t src_size, size_t *src_used, void *__restrict work) { // Min size checks to avoid accessing memory outside buffers. if (dst_size < LZVN_ENCODE_MIN_DST_SIZE) { *src_used = 0; return 0; } // Max input size check (limit to offsets on uint32_t). if (src_size > LZVN_ENCODE_MAX_SRC_SIZE) { src_size = LZVN_ENCODE_MAX_SRC_SIZE; } // Setup encoder state lzvn_encoder_state state; memset(&state, 0, sizeof(state)); state.src = src; state.src_begin = 0; state.src_end = (lzvn_offset)src_size; state.src_literal = 0; state.src_current = 0; state.dst = dst; state.dst_begin = dst; state.dst_end = (unsigned char *)dst + dst_size - 8; // reserve 8 bytes for end-of-stream state.table = work; // Do not encode if the input buffer is too small. We'll emit a literal instead. if (src_size >= LZVN_ENCODE_MIN_SRC_SIZE) { state.src_current_end = (lzvn_offset)src_size - LZVN_ENCODE_MIN_MARGIN; lzvn_init_table(&state); lzvn_encode(&state); } // No need to test the return value: src_literal will not be updated on failure, // and we will fail later. lzvn_emit_literal(&state, state.src_end - state.src_literal); // Restore original size, so end-of-stream always succeeds, and emit it state.dst_end = (unsigned char *)dst + dst_size; lzvn_emit_end_of_stream(&state); *src_used = state.src_literal; return (size_t)(state.dst - state.dst_begin); } size_t lzvn_encode_buffer(void *__restrict dst, size_t dst_size, const void *__restrict src, size_t src_size, void *__restrict work) { size_t src_used = 0; size_t dst_used = lzvn_encode_partial(dst, dst_size, src, src_size, &src_used, work); if (src_used != src_size) return 0; // could not encode entire input stream = fail return dst_used; // return encoded size }
9,793
4,403
package cn.hutool.poi.excel.cell.setters; import cn.hutool.poi.excel.cell.CellSetter; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.RichTextString; /** * {@link RichTextString} 值单元格设置器 * * @author looly * @since 5.7.8 */ public class RichTextCellSetter implements CellSetter { private final RichTextString value; /** * 构造 * * @param value 值 */ RichTextCellSetter(RichTextString value) { this.value = value; } @Override public void setValue(Cell cell) { cell.setCellValue(value); } }
231
852
#ifndef DQM_SiStripCommissioningClients_ApvTimingHistograms_H #define DQM_SiStripCommissioningClients_ApvTimingHistograms_H #include "DQM/SiStripCommissioningClients/interface/CommissioningHistograms.h" #include "DQMServices/Core/interface/DQMStore.h" class ApvTimingHistograms : public virtual CommissioningHistograms { public: ApvTimingHistograms(const edm::ParameterSet& pset, DQMStore*); ~ApvTimingHistograms() override; void histoAnalysis(bool debug) override; }; #endif // DQM_SiStripCommissioningClients_ApvTimingHistograms_H
192
409
<gh_stars>100-1000 package rm.com.audiogram.holder; import android.support.v7.widget.RecyclerView; import android.view.View; import android.widget.ImageView; import butterknife.BindView; import butterknife.ButterKnife; import rm.com.audiogram.R; import rm.com.audiogram.entity.Record; import rm.com.audiowave.AudioWaveView; /** * Created by alex */ public final class RecordHolder extends RecyclerView.ViewHolder { @BindView(R.id.audio_button_play) ImageView play; @BindView(R.id.audio_wave) AudioWaveView wave; public RecordHolder(View itemView) { super(itemView); ButterKnife.bind(this, itemView); } public final void bind(Record item) { wave.setScaledData(new byte[0]); wave.setProgress(0); wave.setRawData(item.raw); } }
274
1,104
{ "Attributes": "Atributos", "Body": "Cuerpo", "Tech": "Tecnología", "Heart": "Corazón", "Mind": "Mente", "Conditions": "Condiciones", "Upset": "Disgustada/o", "Scared": "Asustada/o", "Exhausted": "Exhausta/o", "Injured": "Herido/a", "Broken": "Derrotada/o", "Skills": "Habilidades", "Sneak (Body)": "Sigilo (Cuerpo)", "Force (Body)": "Fuerza (Cuerpo)", "Move (Body)": "Moverse (Cuerpo)", "Tinker (Tech)": "Trastear (Tecnología)", "Program (Tech)": "Programar (Tecnología)", "Calculate (Tech)": "Calcular (Tecnología)", "Contact (Heart)": "Contactos (Corazón)", "Charm (Heart)": "Encanto (Corazón)", "Lead (Heart)": "Liderazgo (Corazón)", "Investigate (Mind)": "Investigar (Mente)", "Comprehend (Mind)": "Comprender (Mente)", "Empathize (Mind)": "Empatía (Mente)", "Experience": "Experiencia", "Name": "Nombre", "Type": "Tipo", "Age": "Edad", "Luck": "Suerte", "Drive": "Motivación", "Problem": "Problema", "Pride": "Orgullo", "Description": "Descripción", "Favourite Song": "Canción favorita", "Anchor": "Apoyo", "Relationships": "Relaciones", "PC": "PJ", "NPC": "PNJ", "Items": "Objetos", "Bonus": "Bonificador", "Iconic Item": "Objeto icónico", "Hideout": "Escondite", "Notes": "Notas", "Reroll": "Tirar otra vez", "Spend a point of luck, or take a condition": "Gasta un punto de suerte, o ponte una condición" }
702
990
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from pyspark.ml.util import JavaMLWritable, JavaMLReadable class H2OStageBase(JavaMLReadable, JavaMLWritable): # Set default values directly from Scala so we don't have to duplicate it on PySpark side def _setDefaultValuesFromJava(self): for paramPair in self._java_obj.extractParamMap().toList(): paramName = paramPair.param().name() paramValue = self._java_obj.getDefault(paramPair.param()).get() param = getattr(self, paramName) self._defaultParamMap[param] = param.typeConverter(paramValue) return self # Override of _set method # Spark's _set method skips parameters with None values, but we want to validate them as well def _set(self, **kwargs): """ Sets user-supplied params. """ for param, value in kwargs.items(): p = getattr(self, param) try: value = p.typeConverter(value) except TypeError as e: raise TypeError('Invalid param value given for param "%s". %s' % (p.name, e)) self._paramMap[p] = value return self
667
1,125
<filename>server/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.geo.GeoPoint; import java.io.IOException; /** * A stateful lightweight per document set of {@link GeoPoint} values. * To iterate over values in a document use the following pattern: * <pre> * GeoPointValues values = ..; * values.setDocId(docId); * final int numValues = values.count(); * for (int i = 0; i &lt; numValues; i++) { * GeoPoint value = values.valueAt(i); * // process value * } * </pre> * The set of values associated with a document might contain duplicates and * comes in a non-specified order. */ public abstract class MultiGeoPointValues { /** * Creates a new {@link MultiGeoPointValues} instance */ protected MultiGeoPointValues() { } /** * Advance this instance to the given document id * @return true if there is a value for this document */ public abstract boolean advanceExact(int doc) throws IOException; /** * Return the number of geo points the current document has. */ public abstract int docValueCount(); /** * Return the next value associated with the current document. This must not be * called more than {@link #docValueCount()} times. * * Note: the returned {@link GeoPoint} might be shared across invocations. * * @return the next value for the current docID set to {@link #advanceExact(int)}. */ public abstract GeoPoint nextValue() throws IOException; }
715
1,350
<reponame>minaremeli/adversarial-robustness-toolbox # MIT License # # Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2020 # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy as np import pytest from numpy.testing import assert_array_equal from art.defences.preprocessor import VideoCompression from tests.utils import ARTTestException logger = logging.getLogger(__name__) @pytest.fixture def video_batch(channels_first): """ Video fixture of shape NFHWC and NCFHW. """ test_input = np.stack((np.zeros((3, 25, 4, 6)), np.ones((3, 25, 4, 6)))) if not channels_first: test_input = np.transpose(test_input, (0, 2, 3, 4, 1)) test_output = test_input.copy() return test_input, test_output @pytest.mark.parametrize("channels_first", [True, False]) @pytest.mark.skip_framework("keras", "pytorch", "scikitlearn", "mxnet") def test_video_compresssion(art_warning, video_batch, channels_first): try: test_input, test_output = video_batch video_compression = VideoCompression(video_format="mp4", constant_rate_factor=0, channels_first=channels_first) assert_array_equal(video_compression(test_input)[0], test_output) except ARTTestException as e: art_warning(e) @pytest.mark.skip_framework("keras", "pytorch", "scikitlearn", "mxnet") def test_compress_video_call(art_warning): try: test_input = np.arange(12).reshape((1, 3, 1, 2, 2)) video_compression = VideoCompression(video_format="mp4", constant_rate_factor=50, channels_first=True) assert np.any(np.not_equal(video_compression(test_input)[0], test_input)) except ARTTestException as e: art_warning(e) @pytest.mark.parametrize("constant_rate_factor", [-1, 52]) def test_constant_rate_factor_error(art_warning, constant_rate_factor): try: exc_msg = r"Constant rate factor must be an integer in the range \[0, 51\]." with pytest.raises(ValueError, match=exc_msg): VideoCompression(video_format="", constant_rate_factor=constant_rate_factor) except ARTTestException as e: art_warning(e) def test_non_spatio_temporal_data_error(art_warning, image_batch_small): try: test_input = image_batch_small video_compression = VideoCompression(video_format="") exc_msg = "Video compression can only be applied to spatio-temporal data." with pytest.raises(ValueError, match=exc_msg): video_compression(test_input) except ARTTestException as e: art_warning(e)
1,261
379
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma once #include <vector> #include "core/privc/privc_context.h" #include "core/paddlefl_mpc/mpc_protocol/context_holder.h" #include "../common/paddle_tensor.h" #include "core/common/tensor_adapter_factory.h" #include "core/privc/he_triplet.h" #include "core/privc/utils.h" namespace privc { template<typename T> using TensorAdapter = common::TensorAdapter<T>; using TensorAdapterFactory = common::TensorAdapterFactory; template<size_t N> inline void fixed64_tensor_mult(const TensorAdapter<int64_t>* lhs, const TensorAdapter<int64_t>* rhs, TensorAdapter<int64_t>* ret) { std::transform(lhs->data(), lhs->data() + lhs->numel(), rhs->data(), ret->data(), [] (const int64_t& lhs, const int64_t& rhs) -> int64_t { return fixed_mult<int64_t, N>(lhs, rhs); }); } template<typename T, size_t N> class FixedPointTensor { public: explicit FixedPointTensor(TensorAdapter<T>* share_tensor); ~FixedPointTensor() {}; template<typename T_> class Type2Type { typedef T_ type; }; //get mutable shape of tensor TensorAdapter<T>* mutable_share(); const TensorAdapter<T>* share() const; size_t numel() const { return _share->numel(); } // reveal fixedpointtensor to one party void reveal_to_one(size_t party, TensorAdapter<T>* ret) const; // reveal fixedpointtensor to all parties void reveal(TensorAdapter<T>* ret) const; const std::vector<size_t> shape() const; //convert TensorAdapter to shares static void share(const TensorAdapter<T>* input, TensorAdapter<T>* output_shares[2], block seed = common::g_zero_block); // element-wise add with FixedPointTensor void add(const FixedPointTensor* rhs, FixedPointTensor* ret) const; // element-wise add with TensorAdapter void add(const TensorAdapter<T>* rhs, FixedPointTensor* ret) const; // element-wise sub with FixedPointTensor void sub(const FixedPointTensor* rhs, FixedPointTensor* ret) const; // element-wise sub with TensorAdapter void sub(const TensorAdapter<T>* rhs, FixedPointTensor* ret) const; // negative void negative(FixedPointTensor* ret) const; // exp void exp(FixedPointTensor<T, N>* ret, size_t iter = 8) const; // element-wise mul with FixedPointTensor using truncate1 void mul(const FixedPointTensor* rhs, FixedPointTensor* ret) const; // element-wise mul with TensorAdapter void mul(const TensorAdapter<T>* rhs, FixedPointTensor* ret) const; // div by TensorAdapter void div(const TensorAdapter<T>* rhs, FixedPointTensor* ret) const; // AC division for fixedpoint tesnor void long_div(const FixedPointTensor* rhs, FixedPointTensor* ret) const; //sum all element void sum(FixedPointTensor* ret) const; //reduce last dim void reduce(FixedPointTensor* ret) const; // mat_mul with FixedPointTensor void mat_mul(const FixedPointTensor* rhs, FixedPointTensor* ret) const; // mat_mul with TensorAdapter void mat_mul(const TensorAdapter<T>* rhs, FixedPointTensor* ret) const; // element-wise relu void relu(FixedPointTensor* ret) const; // element-wise sigmoid void sigmoid(FixedPointTensor* ret) const; // element-wise softmax void softmax(FixedPointTensor* ret, bool use_relu = false) const; // matrix argmax // return max index in one-hot void argmax(FixedPointTensor<T, N>* ret) const; private: static void to_gc_num(const TensorAdapter<int64_t>* input, size_t party_in, TensorBlock* gc_share); static void gc_add(const TensorBlock* lhs, const TensorBlock* rhs, TensorBlock* ret); // GC division for fixedpoint tensor using long division algorithm static void gc_div(const TensorBlock* lhs, const TensorBlock* rhs, TensorBlock* ret); static void abs(const TensorBlock* lhs, TensorBlock* ret); static void relu_bc(const TensorBlock* lhs, TensorAdapter<int64_t>* ret); static void argmax_one_hot(const TensorBlock* op, TensorBlock* ret); static void to_ac_num(const TensorAdapter<int64_t>* input, TensorAdapter<int64_t>* ret); static void logistic(const TensorBlock* lhs, TensorBlock* ret); TensorAdapter<T>* _share; }; } //namespace privc #include "fixedpoint_tensor_imp.h" #include "fixedpoint_tensor_gc_imp.h"
2,066
5,169
<reponame>Gantios/Specs { "name": "AssertRequest", "version": "0.1.0", "summary": "Assert Request allows you to assert that your app is making the correct network request in a given situation, without any setup.", "description": "Assert Request allows you to assert that your app is making the correct network request in a given situation, without any setup.\nThis framework works like a snapshot test. First, you run it on record mode, so every request made during test time is stored on disk. Later, when you disable recording, the framework will assert that matching requests will be made for that same test case.\nDuring test time, no requests will be actually made to the web.", "homepage": "https://github.com/lucas1295santos/AssertRequest", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "lucas1295santos": "<EMAIL>" }, "source": { "git": "https://github.com/lucas1295santos/AssertRequest.git", "tag": "0.1.0" }, "social_media_url": "https://twitter.com/oliveira__lucas", "platforms": { "ios": "10.0" }, "swift_versions": "5.0", "source_files": "AssertRequest/Classes/**/*", "xcconfig": { "FRAMEWORK_SEARCH_PATHS": "$(inherited) $(PROJECT_DIR) $(PLATFORM_DIR)/Developer/Library/Frameworks" }, "swift_version": "5.0" }
449
1,121
package com.flyco.systembardemo.ui.statusimmersive; import android.content.Context; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.View; import android.widget.TextView; import com.flyco.systembar.SystemBarHelper; import com.flyco.systembardemo.R; import com.flyco.systembardemo.ui.common.adapter.SingleTypeAdapter; import java.util.ArrayList; import butterknife.Bind; import butterknife.ButterKnife; public class StatusBarImmersive2Activity extends AppCompatActivity { private Context mContext = this; @Bind(R.id.toolbar) Toolbar mToolbar; @Bind(R.id.recycler_view) RecyclerView mRecyclerView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_statusbar_immersive_2); ButterKnife.bind(this); setSupportActionBar(mToolbar); getSupportActionBar().setTitle("H07000223"); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mToolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onBackPressed(); } }); //method 1 SystemBarHelper.immersiveStatusBar(this); SystemBarHelper.setHeightAndPadding(this, mToolbar); //method 2,分别处理4.4调用SystemBarHelper方法,5.0以上使用系统方法 // if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { // SystemBarHelper.immersiveStatusBar(this); // SystemBarHelper.setHeightAndPadding(this, mToolbar); // } ArrayList<String> mNames = new ArrayList<>(); for (int i = 0; i < 20; i++) { mNames.add("状态栏沉浸"); } mRecyclerView.setLayoutManager(new LinearLayoutManager(mContext)); mRecyclerView.setAdapter(new SingleTypeAdapter<String>(mNames, R.layout.item_home_fragment) { @Override public void bindView(ViewHolder holder, int position, View itemView) { TextView mItem = ButterKnife.findById(itemView, R.id.item); mItem.setText(getDataList().get(holder.getAdapterPosition()) + "-" + holder.getAdapterPosition()); } }); } }
1,018
543
# # Copyright (c) 2021, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import dask.dataframe as dd import pandas as pd import pytest import nvtabular as nvt from nvtabular import ops try: import dask_cudf _CPU = [True, False] _HAS_GPU = True except ImportError: _CPU = [True] _HAS_GPU = False @pytest.mark.parametrize("cpu", _CPU) @pytest.mark.parametrize("groups", [[["Author", "Engaging-User"]], "Author"]) def test_joingroupby_multi(tmpdir, groups, cpu): df = pd.DataFrame( { "Author": ["User_A", "User_A", "User_A", "User_B"], "Engaging-User": ["User_B", "User_B", "User_C", "User_C"], "Cost": [100.0, 200.0, 300.0, 400.0], "Post": [1, 2, 3, 4], } ) groupby_features = groups >> ops.JoinGroupby( out_path=str(tmpdir), stats=["sum"], cont_cols=["Cost"] ) workflow = nvt.Workflow(groupby_features + "Post") df_out = workflow.fit_transform(nvt.Dataset(df, cpu=cpu)).to_ddf().compute() if isinstance(groups, list): # Join on ["Author", "Engaging-User"] if cpu: check = df_out["Author_Engaging-User_Cost_sum"].to_list() else: check = df_out["Author_Engaging-User_Cost_sum"].to_arrow().to_pylist() assert check == [300.0, 300.0, 300.0, 400.0] else: # Join on ["Author"] if cpu: check = df_out["Author_Cost_sum"].to_list() else: check = df_out["Author_Cost_sum"].to_arrow().to_pylist() assert check == [600.0, 600.0, 600.0, 400.0] @pytest.mark.skipif(not _HAS_GPU, reason="This unittest requires cudf/dask_cudf to run") @pytest.mark.parametrize("engine", ["parquet"]) @pytest.mark.parametrize( "kind_ext", [ "cudf", "pandas", "arrow", "parquet", "parquet-multi", "csv", "dask-dataframe", "dask-cudf", "dataset", ], ) @pytest.mark.parametrize("cache", ["host", "device"]) @pytest.mark.parametrize("how", ["left", "inner"]) @pytest.mark.parametrize("cpu", _CPU) @pytest.mark.parametrize("drop_duplicates", [True, False]) def test_join_external(tmpdir, df, dataset, engine, kind_ext, cache, how, cpu, drop_duplicates): # Define "external" table shift = 100 df_ext = df[["id"]].copy().sort_values("id") df_ext["new_col"] = df_ext["id"] + shift df_ext["new_col_2"] = "keep" df_ext["new_col_3"] = "ignore" df_ext_check = df_ext.copy() if kind_ext == "pandas": df_ext = df_ext.to_pandas() elif kind_ext == "arrow": df_ext = df_ext.to_arrow() elif kind_ext == "parquet": path = tmpdir.join("external.parquet") df_ext.to_parquet(path) df_ext = path elif kind_ext == "parquet-multi": path = tmpdir.join("external-multi.parquet") dask_cudf.from_cudf(df_ext, npartitions=3).to_parquet(path) df_ext = path elif kind_ext == "csv": path = tmpdir.join("external.csv") df_ext.to_csv(path) df_ext = path elif kind_ext == "dask-dataframe": df_ext = dd.from_pandas(df_ext.to_pandas(), npartitions=2) elif kind_ext == "dask-cudf": df_ext = dask_cudf.from_cudf(df_ext, npartitions=2) elif kind_ext == "dataset": df_ext = nvt.Dataset(df_ext) # Define Op on = "id" columns_left = list(df.columns) columns_ext = ["id", "new_col", "new_col_2"] df_ext_check = df_ext_check[columns_ext] if drop_duplicates: df_ext_check.drop_duplicates(ignore_index=True, inplace=True) joined = nvt.ColumnSelector(columns_left) >> nvt.ops.JoinExternal( df_ext, on, how=how, columns_ext=columns_ext, cache=cache, drop_duplicates_ext=drop_duplicates, ) gdf = df.reset_index() dataset = nvt.Dataset(gdf, cpu=cpu) processor = nvt.Workflow(joined) processor.fit(dataset) new_gdf = processor.transform(dataset).to_ddf().compute().reset_index() check_gdf = gdf.merge(df_ext_check, how=how, on=on) assert len(check_gdf) == len(new_gdf) assert (new_gdf["id"] + shift).all() == new_gdf["new_col"].all() assert gdf["id"].all() == new_gdf["id"].all() assert "new_col_2" in new_gdf.columns assert "new_col_3" not in new_gdf.columns
2,220
732
// // DSCommentCell.h // DSLolita // // Created by <NAME> on 15/6/3. // Copyright (c) 2015年 samDing. All rights reserved. // #import <UIKit/UIKit.h> @class DSCommentCellFrame; @class DSCommentDetailView; @interface DSCommentCell : UITableViewCell + (instancetype)cellWithTableView:(UITableView *)tableView; @property (nonatomic , strong) DSCommentCellFrame *commentFrame; @property (nonatomic , weak) DSCommentDetailView *commentDetailView; @property (nonatomic , strong) NSIndexPath *indexpath; @end
176
450
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ //-------------------------------------------------------------------------- // @filename: // CMappingColIdVarQuery.cpp // // @doc: // Implementation of the functions that provide the mapping from CDXLNode to // Var during DXL->Query translation // // @test: // // //--------------------------------------------------------------------------- #include "postgres.h" #include "gpopt/translate/CMappingColIdVarQuery.h" #include "nodes/makefuncs.h" #include "nodes/primnodes.h" #include "naucrates/dxl/operators/CDXLScalarIdent.h" #include "gpopt/gpdbwrappers.h" using namespace gpdxl; using namespace gpos; //--------------------------------------------------------------------------- // @function: // CMappingColIdVarQuery::CMappingColIdVarQuery // // @doc: // Constructor for a query translator context associated to a sub query // at a certain depth (level) in the GPDB query object // //--------------------------------------------------------------------------- CMappingColIdVarQuery::CMappingColIdVarQuery ( IMemoryPool *pmp, TEMap *ptemap, ULONG ulQueryLevel ) : CMappingColIdVar(pmp), m_ptemap(ptemap), m_ulQueryLevel(ulQueryLevel) { } //--------------------------------------------------------------------------- // @function: // CMappingColIdVarQuery::Pte // // @doc: // Lookup TargetEntry associated with a given col id // //--------------------------------------------------------------------------- const TargetEntry * CMappingColIdVarQuery::Pte ( ULONG ulColId ) const { const CMappingElementColIdTE *pmappingelement = m_ptemap->PtLookup(&ulColId); GPOS_ASSERT(NULL != pmappingelement); return pmappingelement->Pte(); } //--------------------------------------------------------------------------- // @function: // CMappingColIdVarQuery::FInsertMapping // // @doc: // Insert // //--------------------------------------------------------------------------- BOOL CMappingColIdVarQuery::FInsertMapping ( ULONG ulColId, TargetEntry *pte ) { // Assert that there are no duplicate entries for a column id GPOS_ASSERT(NULL == m_ptemap->PtLookup(&ulColId)); // create mapping element CMappingElementColIdTE *pmappingelement = GPOS_NEW(m_pmp) CMappingElementColIdTE(ulColId, m_ulQueryLevel, pte); // insert ColId->TE mapping ULONG *pulKey1 = GPOS_NEW(m_pmp) ULONG(ulColId); BOOL fRes1 = m_ptemap->FInsert(pulKey1, pmappingelement); GPOS_ASSERT(fRes1); return fRes1; } //--------------------------------------------------------------------------- // @function: // CMappingColIdVarQuery::UlQueryLevel // // @doc: // Returns the query level counter // //--------------------------------------------------------------------------- ULONG CMappingColIdVarQuery::UlQueryLevel() const { return m_ulQueryLevel; } //--------------------------------------------------------------------------- // @function: // CMappingColIdVarQuery::PvarFromDXLNodeScId // // @doc: // Translates a DXL scalar identifier operator into a GPDB Var node // //--------------------------------------------------------------------------- Var * CMappingColIdVarQuery::PvarFromDXLNodeScId ( const CDXLScalarIdent *pdxlop ) { ULONG ulColId = pdxlop->Pdxlcr()->UlID(); const CMappingElementColIdTE *pmappingelement = m_ptemap->PtLookup(&ulColId); GPOS_ASSERT(NULL != pmappingelement); const TargetEntry *pte = pmappingelement->Pte(); GPOS_ASSERT(NULL != pte); GPOS_ASSERT(IsA(pte->expr, Var)); Var *pvar = ((Var*) pte->expr); Var *pvarNew = (Var*) gpdb::PvCopyObject(pvar); // lookup query level const ULONG ulLevel = pmappingelement->UlQueryLevel(); pvarNew->varlevelsup = m_ulQueryLevel - ulLevel; return pvarNew; } // EOF
1,331
315
<filename>include/Tools/Code/Turbo/Post_processing_SISO/Scaling_factor/Scaling_factor.hpp /*! * \file * \brief Class tools::Scaling_factor. */ #ifndef SCALING_FACTOR_HPP #define SCALING_FACTOR_HPP #include "Tools/Code/Turbo/Post_processing_SISO/Post_processing_SISO.hpp" namespace aff3ct { namespace tools { /*! * \class Scaling_factor * \brief Re-scales the extrinsic information (used in the turbo decoding process). */ template <typename B = int, typename R = float> class Scaling_factor : public Post_processing_SISO<B,R> { protected: const int n_ite; /*!< Number of iterations in the turbo decoding process */ public: /*! * \brief Constructor. * * \param n_ite: number of iterations in the turbo decoding process. */ explicit Scaling_factor(const int n_ite); /*! * \brief Destructor. */ virtual ~Scaling_factor() = default; virtual Scaling_factor<B,R>* clone() const; }; } } #ifndef DOXYGEN_SHOULD_SKIP_THIS #include "Tools/Code/Turbo/Post_processing_SISO/Scaling_factor/Scaling_factor.hxx" #endif #endif /* SCALING_FACTOR_HPP */
396
916
package com.pancm.thread.concurrent.liveLock; import java.util.Random; public class Consumer implements Runnable { private Drop drop; public Consumer(Drop drop) { this.drop = drop; } public void run() { Random random = new Random(); // String message=""; // do{ // message= drop.take(); // System.out.format("MESSAGE RECEIVED: %s%n", message); // try { // Thread.sleep(random.nextInt(1000)); // } catch (InterruptedException e) { // e.printStackTrace(); // } // } while(!message.equals("DONE")); for (String message = drop.take(); !message.equals("DONE"); message = drop.take()) { System.out.format("MESSAGE RECEIVED: %s%n", message); try { Thread.sleep(random.nextInt(1000)); } catch (InterruptedException e) { e.printStackTrace(); } } } }
480
1,755
/* Copyright <NAME>. Distributed under the OSI-approved BSD 3-Clause License. See accompanying file Copyright.txt for details. */ #include <stdio.h> /* Test KWIML header inclusion after above system headers. */ #include "test.h" #include "../include/kwiml/abi.h" #include "../include/kwiml/int.h" int test_include_C(void) { return 1; }
123
713
<reponame>XSoyOscar/Algorithms # # My initial solution, which is wrong. # class Solution(object): # def maxProfit(self, prices): # """ # :type prices: List[int] # :rtype: int # """ # days = len(prices) # if days < 2: # return 0 # totalProfit = 0 # isCoolingDown = False # minimumPrice = prices[0] # for today in range(1, days): # if prices[today] < minimumPrice: # minimumPrice = prices[today] # isCoolingDown = False # elif minimumPrice < prices[today]: # if not isCoolingDown: # totalProfit += (prices[today] - minimumPrice) # minimumPrice = prices[today] # isCoolingDown = True # else: # isCoolingDown = False # else: # isCoolingDown = False # return totalProfit # Taken from this (Must read): https://tinyurl.com/wlol23f and https://tinyurl.com/syuepe6 class Solution: def maxProfit(self, prices): """ :type prices: List[int] :rtype: int """ if len(prices) <= 1: return 0 have_1_stock_and_sell = 'have 1 stock and sell' have_1_stock_and_keep = 'have 1 stock and keep' have_0_stock_and_buy = 'have 0 stock and buy' have_0_stock_and_rest = 'have 0 stock and rest' # The keys of this dictionary is the action taken on 'i'th day and the values represent the possible actions on the 'i + 1' day. action_to_next_day_possible_actions = { have_1_stock_and_sell: {have_0_stock_and_rest}, # Cool-down. have_1_stock_and_keep: {have_1_stock_and_keep, have_1_stock_and_sell}, have_0_stock_and_buy: {have_1_stock_and_keep, have_1_stock_and_sell}, have_0_stock_and_rest: {have_0_stock_and_rest, have_0_stock_and_buy}, } # We initialize with the possible actions for the first day. possible_actions = [{have_0_stock_and_buy: 0, have_0_stock_and_rest: 0}] def set_max_action_to_gain(_today_actions_to_total_gain, _today_action, _previous_day_total_gain, _today_gain=0): if _today_action in _today_actions_to_total_gain: different_previous_action_today_gain = _today_actions_to_total_gain[_today_action] _today_actions_to_total_gain.update({_today_action: max(_previous_day_total_gain + _today_gain, different_previous_action_today_gain)}) else: _today_actions_to_total_gain.update({_today_action: _previous_day_total_gain + _today_gain}) # Start with the second day, compare all actions possible on the second day based on the possible actions of # the first day. i = 1 while i < len(prices): today_actions_to_total_gain = dict() today_gain = prices[i] - prices[i - 1] for previous_day_action, previous_day_total_gain in possible_actions[-1].items(): for today_action in action_to_next_day_possible_actions[previous_day_action]: if previous_day_action == have_1_stock_and_sell: # If we sold yesterday, we have to rest there, so no gain for today. set_max_action_to_gain(today_actions_to_total_gain, today_action, previous_day_total_gain) elif previous_day_action == have_1_stock_and_keep: # Whether we keep or sell, the gain is the same: set_max_action_to_gain(today_actions_to_total_gain, today_action, previous_day_total_gain, today_gain) elif previous_day_action == have_0_stock_and_buy: # In both cases, have_1_stock_and_keep and have_1_stock_and_sell would yield to the same gain: set_max_action_to_gain(today_actions_to_total_gain, today_action, previous_day_total_gain, today_gain) elif previous_day_action == have_0_stock_and_rest: # In this case, the gain is 0 because we don't hold any stock, whether we rest or buy on this # day. set_max_action_to_gain(today_actions_to_total_gain, today_action, previous_day_total_gain) possible_actions.append(today_actions_to_total_gain) i += 1 last_possible_actions = possible_actions[-1] print('last possible actions', last_possible_actions) return max(last_possible_actions.values()) sol = Solution() prices = [1,2,4] profit = sol.maxProfit(prices) print("Profit: ", profit)
2,449
664
<reponame>adem4ik/LIII<gh_stars>100-1000 /* Copyright (c) 2015, <NAME> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "libtorrent/settings_pack.hpp" #include "libtorrent/alert.hpp" #include "settings.hpp" using namespace libtorrent; libtorrent::settings_pack settings() { const int mask = alert::all_categories & ~(alert::progress_notification | alert::performance_warning | alert::stats_notification | alert::picker_log_notification); settings_pack pack; pack.set_bool(settings_pack::enable_lsd, false); pack.set_bool(settings_pack::enable_natpmp, false); pack.set_bool(settings_pack::enable_upnp, false); pack.set_bool(settings_pack::enable_dht, false); pack.set_str(settings_pack::dht_bootstrap_nodes, ""); pack.set_bool(settings_pack::prefer_rc4, false); pack.set_int(settings_pack::in_enc_policy, settings_pack::pe_disabled); pack.set_int(settings_pack::out_enc_policy, settings_pack::pe_disabled); pack.set_int(settings_pack::allowed_enc_level, settings_pack::pe_both); pack.set_int(settings_pack::alert_mask, mask); #ifndef TORRENT_BUILD_SIMULATOR pack.set_bool(settings_pack::allow_multiple_connections_per_ip, true); #else // we use 0 threads (disk I/O operations will be performed in the network // thread) to be simulator friendly. pack.set_int(settings_pack::aio_threads, 0); #endif #ifndef TORRENT_NO_DEPRECATE pack.set_int(settings_pack::half_open_limit, 1); #endif return pack; }
932
826
#pragma once extern "C" { #include <ngx_http.h> } #include <weserv/utils/status.h> namespace weserv { namespace nginx { ngx_int_t ngx_weserv_return_error(ngx_http_request_t *r, api::utils::Status status, ngx_chain_t *out); } // namespace nginx } // namespace weserv
152
582
<reponame>Sphericone/archi<gh_stars>100-1000 /** * This program and the accompanying materials * are made available under the terms of the License * which accompanies this distribution in the file LICENSE.txt */ package com.archimatetool.editor.diagram.figures.connections; import org.eclipse.draw2d.PolylineDecoration; import org.eclipse.draw2d.RotatableDecoration; import org.eclipse.swt.SWT; import com.archimatetool.editor.diagram.figures.FigureUtils; import com.archimatetool.editor.utils.StringUtils; import com.archimatetool.model.IInfluenceRelationship; /** * Influence Connection Figure class * * @author <NAME> */ public class InfluenceConnectionFigure extends AbstractArchimateConnectionFigure { /** * @return Decoration to use on Target Node */ public static RotatableDecoration createFigureTargetDecoration() { return new PolylineDecoration(); } private RotatableDecoration fDecoratorTarget = createFigureTargetDecoration(); public InfluenceConnectionFigure() { } @Override protected void setFigureProperties() { setLineStyle(SWT.LINE_CUSTOM); // We have to explitly set this otherwise dashes/dots don't show setLineDash(getLineDashFloats()); } @Override public void setText() { super.setText(); // Show Strength after Name if we don't show it already by means of the text expression if(getModelConnection().isNameVisible()) { String text = getConnectionLabel().getText(); String strength = ((IInfluenceRelationship)getModelConnection().getArchimateRelationship()).getStrength(); if(StringUtils.isSet(strength) && !text.contains(strength)) { text += " " + strength; //$NON-NLS-1$ getConnectionLabel().setText(text); } } } @Override protected float[] getLineDashFloats() { double scale = Math.min(FigureUtils.getFigureScale(this), 1.0); // only scale below 1.0 return new float[] { (float)(6 * scale), (float)(3 * scale) }; } @Override public void refreshVisuals() { setTargetDecoration(usePlainJunctionTargetDecoration() ? null : fDecoratorTarget); // This last super.refreshVisuals(); } }
931
14,668
<gh_stars>1000+ // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/apps/chrome_app_window_client.h" #include "chrome/browser/ui/views/apps/chrome_native_app_window_views_aura_ash.h" // static extensions::NativeAppWindow* ChromeAppWindowClient::CreateNativeAppWindowImpl( extensions::AppWindow* app_window, const extensions::AppWindow::CreateParams& params) { ChromeNativeAppWindowViewsAuraAsh* window = new ChromeNativeAppWindowViewsAuraAsh; window->Init(app_window, params); return window; }
206
875
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.mapreduce; import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.sqoop.config.ConfigurationConstants; import org.apache.sqoop.SqoopOptions; import org.apache.sqoop.config.ConfigurationHelper; import org.apache.sqoop.manager.ConnManager; import org.apache.sqoop.tool.SqoopTool; import org.apache.sqoop.util.ClassLoaderStack; import org.apache.sqoop.util.Jars; import org.apache.sqoop.validation.*; /** * Base class for configuring and running a MapReduce job. * Allows dependency injection, etc, for easy customization of import job types. */ public class JobBase { public static final Log LOG = LogFactory.getLog(JobBase.class.getName()); public static final String SERIALIZE_SQOOPOPTIONS = "sqoop.jobbase.serialize.sqoopoptions"; public static final boolean SERIALIZE_SQOOPOPTIONS_DEFAULT = false; public static final String HADOOP_MAP_TASK_MAX_ATTEMTPS = "mapreduce.map.maxattempts"; public static final String HADOOP_REDUCE_TASK_MAX_ATTEMTPS = "mapreduce.reduce.maxattempts"; protected SqoopOptions options; protected Class<? extends Mapper> mapperClass; protected Class<? extends InputFormat> inputFormatClass; protected Class<? extends OutputFormat> outputFormatClass; private Job mrJob; private ClassLoader prevClassLoader = null; protected final boolean isHCatJob; public static final String PROPERTY_VERBOSE = "sqoop.verbose"; public JobBase() { this(null); } public JobBase(final SqoopOptions opts) { this(opts, null, null, null); } public JobBase(final SqoopOptions opts, final Class<? extends Mapper> mapperClass, final Class<? extends InputFormat> inputFormatClass, final Class<? extends OutputFormat> outputFormatClass) { this.options = opts; this.mapperClass = mapperClass; this.inputFormatClass = inputFormatClass; this.outputFormatClass = outputFormatClass; isHCatJob = options.getHCatTableName() != null; } /** * @return the mapper class to use for the job. */ protected Class<? extends Mapper> getMapperClass() throws ClassNotFoundException { return this.mapperClass; } /** * @return the inputformat class to use for the job. */ protected Class<? extends InputFormat> getInputFormatClass() throws ClassNotFoundException { return this.inputFormatClass; } /** * @return the outputformat class to use for the job. */ protected Class<? extends OutputFormat> getOutputFormatClass() throws ClassNotFoundException { return this.outputFormatClass; } /** Set the OutputFormat class to use for this job. */ public void setOutputFormatClass(Class<? extends OutputFormat> cls) { this.outputFormatClass = cls; } /** Set the InputFormat class to use for this job. */ public void setInputFormatClass(Class<? extends InputFormat> cls) { this.inputFormatClass = cls; } /** Set the Mapper class to use for this job. */ public void setMapperClass(Class<? extends Mapper> cls) { this.mapperClass = cls; } /** * Set the SqoopOptions configuring this job. */ public void setOptions(SqoopOptions opts) { this.options = opts; } /** * Put jar files required by Sqoop into the DistributedCache. * @param job the Job being submitted. * @param mgr the ConnManager to use. */ protected void cacheJars(Job job, ConnManager mgr) throws IOException { if (options.isSkipDistCache()) { LOG.info("Not adding sqoop jars to distributed cache as requested"); return; } Configuration conf = job.getConfiguration(); FileSystem fs = FileSystem.getLocal(conf); Set<String> localUrls = new HashSet<String>(); addToCache(Jars.getSqoopJarPath(), fs, localUrls); if (null != mgr) { addToCache(Jars.getDriverClassJar(mgr), fs, localUrls); addToCache(Jars.getJarPathForClass(mgr.getClass()), fs, localUrls); } SqoopTool tool = this.options.getActiveSqoopTool(); if (null != tool) { // Make sure the jar for the tool itself is on the classpath. (In case // this is a third-party plugin tool.) addToCache(Jars.getJarPathForClass(tool.getClass()), fs, localUrls); List<String> toolDeps = tool.getDependencyJars(); if (null != toolDeps) { for (String depFile : toolDeps) { addToCache(depFile, fs, localUrls); } } } // If the user specified a particular jar file name, // Add anything in $SQOOP_HOME/lib, if this is set. String sqoopHome = System.getenv("SQOOP_HOME"); if (null != sqoopHome) { File sqoopHomeFile = new File(sqoopHome); File sqoopLibFile = new File(sqoopHomeFile, "lib"); if (sqoopLibFile.exists()) { addDirToCache(sqoopLibFile, fs, localUrls); } } else { LOG.warn("SQOOP_HOME is unset. May not be able to find " + "all job dependencies."); } // If the user run import into hive as Parquet file, // Add anything in $HIVE_HOME/lib. if (options.doHiveImport() && (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile)) { String hiveHome = options.getHiveHome(); if (null != hiveHome) { File hiveHomeFile = new File(hiveHome); File hiveLibFile = new File(hiveHomeFile, "lib"); if (hiveLibFile.exists()) { addDirToCache(hiveLibFile, fs, localUrls); } } else { LOG.warn("HIVE_HOME is unset. Cannot add hive libs as dependencies."); } } String tmpjars = conf.get(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM); StringBuilder sb = new StringBuilder(); // If we didn't put anything in our set, then there's nothing to cache. if (localUrls.isEmpty() && (org.apache.commons.lang.StringUtils.isEmpty(tmpjars))) { return; } if (null != tmpjars) { String[] tmpjarsElements = tmpjars.split(","); for (String jarElement : tmpjarsElements) { if (jarElement.isEmpty()) { warn("Empty input is invalid and was removed from tmpjars."); } else { sb.append(jarElement); sb.append(","); } } } int lastComma = sb.lastIndexOf(","); if (localUrls.isEmpty() && lastComma >= 0) { sb.deleteCharAt(lastComma); } // Add these to the 'tmpjars' array, which the MR JobSubmitter // will upload to HDFS and put in the DistributedCache libjars. sb.append(StringUtils.arrayToString(localUrls.toArray(new String[0]))); conf.set(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM, sb.toString()); } protected void warn(String message) { LOG.warn(message); } private void addToCache(String file, FileSystem fs, Set<String> localUrls) { if (null == file) { return; } Path p = new Path(file); String qualified = p.makeQualified(fs).toString(); LOG.debug("Adding to job classpath: " + qualified); localUrls.add(qualified); } /** * Add the .jar elements of a directory to the DCache classpath, * nonrecursively. */ private void addDirToCache(File dir, FileSystem fs, Set<String> localUrls) { if (null == dir) { return; } for (File libfile : dir.listFiles()) { if (libfile.exists() && !libfile.isDirectory() && libfile.getName().endsWith("jar")) { addToCache(libfile.toString(), fs, localUrls); } } } /** * If jars must be loaded into the local environment, do so here. */ protected void loadJars(Configuration conf, String ormJarFile, String tableClassName) throws IOException { if (ConfigurationHelper.isLocalJobTracker(conf)) { // If we're using the LocalJobRunner, then instead of using the compiled // jar file as the job source, we're running in the current thread. Push // on another classloader that loads from that jar in addition to // everything currently on the classpath. this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile, tableClassName); } } /** * If any classloader was invoked by loadJars, free it here. */ protected void unloadJars() { if (null != this.prevClassLoader) { // unload the special classloader for this jar. ClassLoaderStack.setCurrentClassLoader(this.prevClassLoader); } } /** * Configure the inputformat to use for the job. */ protected void configureInputFormat(Job job, String tableName, String tableClassName, String splitByCol) throws ClassNotFoundException, IOException { //TODO: 'splitByCol' is import-job specific; lift it out of this API. Class<? extends InputFormat> ifClass = getInputFormatClass(); LOG.debug("Using InputFormat: " + ifClass); job.setInputFormatClass(ifClass); } /** * Configure the output format to use for the job. */ protected void configureOutputFormat(Job job, String tableName, String tableClassName) throws ClassNotFoundException, IOException { Class<? extends OutputFormat> ofClass = getOutputFormatClass(); LOG.debug("Using OutputFormat: " + ofClass); job.setOutputFormatClass(ofClass); } /** * Set the mapper class implementation to use in the job, * as well as any related configuration (e.g., map output types). */ protected void configureMapper(Job job, String tableName, String tableClassName) throws ClassNotFoundException, IOException { job.setMapperClass(getMapperClass()); } /** * Configure the number of map/reduce tasks to use in the job, * returning the number of map tasks for backward compatibility. */ protected int configureNumTasks(Job job) throws IOException { int numMapTasks = configureNumMapTasks(job); configureNumReduceTasks(job); return numMapTasks; } /** * Configure the number of map tasks to use in the job. */ protected int configureNumMapTasks(Job job) throws IOException { int numMapTasks = options.getNumMappers(); if (numMapTasks < 1) { numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS; LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers."); } ConfigurationHelper.setJobNumMaps(job, numMapTasks); return numMapTasks; } /** * Configure the number of reduce tasks to use in the job. */ protected int configureNumReduceTasks(Job job) throws IOException { job.setNumReduceTasks(0); return 0; } /** Set the main job that will be run. */ protected void setJob(Job job) { mrJob = job; } /** * @return the main MapReduce job that is being run, or null if no * job has started. */ public Job getJob() { return mrJob; } /** * Create new Job object in unified way for all types of jobs. * * @param configuration Hadoop configuration that should be used * @return New job object, created object won't be persisted in the instance */ public Job createJob(Configuration configuration) throws IOException { // Put the SqoopOptions into job if requested if(configuration.getBoolean(SERIALIZE_SQOOPOPTIONS, SERIALIZE_SQOOPOPTIONS_DEFAULT)) { putSqoopOptionsToConfiguration(options, configuration); } return new Job(configuration); } /** * Iterates over serialized form of SqoopOptions and put them into Configuration * object. * * @param opts SqoopOptions that should be serialized * @param configuration Target configuration object */ public void putSqoopOptionsToConfiguration(SqoopOptions opts, Configuration configuration) { for(Map.Entry<Object, Object> e : opts.writeProperties().entrySet()) { String key = (String)e.getKey(); String value = (String)e.getValue(); // We don't need to do if(value is empty) because that is already done // for us by the SqoopOptions.writeProperties() method. configuration.set("sqoop.opt." + key, value); } } /** * Actually run the MapReduce job. */ protected boolean runJob(Job job) throws ClassNotFoundException, IOException, InterruptedException { return job.waitForCompletion(true); } /** * Display a notice on the log that the current MapReduce job has * been retired, and thus Counters are unavailable. * @param log the Log to display the info to. */ protected void displayRetiredJobNotice(Log log) { log.info("The MapReduce job has already been retired. Performance"); log.info("counters are unavailable. To get this information, "); log.info("you will need to enable the completed job store on "); log.info("the jobtracker with:"); log.info("mapreduce.jobtracker.persist.jobstatus.active = true"); log.info("mapreduce.jobtracker.persist.jobstatus.hours = 1"); log.info("A jobtracker restart is required for these settings"); log.info("to take effect."); } /** * Save interesting options to constructed job. Goal here is to propagate some * of them to the job itself, so that they can be easily accessed. We're * propagating only interesting global options (like verbose flag). * * @param job Destination job to save options */ protected void propagateOptionsToJob(Job job) { Configuration configuration = job.getConfiguration(); // So far, propagate only verbose flag configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose()); } protected long getRowCountFromDB(ConnManager connManager, String tableName) throws SQLException { return connManager.getTableRowCount(tableName); } protected long getRowCountFromHadoop(Job job) throws IOException, InterruptedException { return ConfigurationHelper.getNumMapOutputRecords(job); } protected void doValidate(SqoopOptions options, Configuration conf, ValidationContext validationContext) throws ValidationException { Validator validator = (Validator) ReflectionUtils.newInstance( options.getValidatorClass(), conf); ValidationThreshold threshold = (ValidationThreshold) ReflectionUtils.newInstance(options.getValidationThresholdClass(), conf); ValidationFailureHandler failureHandler = (ValidationFailureHandler) ReflectionUtils.newInstance(options.getValidationFailureHandlerClass(), conf); StringBuilder sb = new StringBuilder(); sb.append("Validating the integrity of the import using the " + "following configuration\n"); sb.append("\tValidator : ").append(validator.getClass().getName()) .append('\n'); sb.append("\tThreshold Specifier : ") .append(threshold.getClass().getName()).append('\n'); sb.append("\tFailure Handler : ") .append(failureHandler.getClass().getName()).append('\n'); LOG.info(sb.toString()); validator.validate(validationContext, threshold, failureHandler); } }
5,574
498
<reponame>Patrick-Kladek/CocoaDebugKit // // CrossPlatformDefinitions.h // CocoaDebugKit // // Created by <NAME> on 20.05.17. // Copyright (c) 2017 <NAME>. All rights reserved. // #include <TargetConditionals.h> #ifndef CocoaDebugKit_CrossPlatformDefinitions_h #define CocoaDebugKit_CrossPlatformDefinitions_h #if TARGET_OS_IPHONE #import <UIKit/UIKit.h> #import <CocoaDebugKit/CocoaPropertyLine.h> typedef UIView CPView; typedef UIColor CPColor; typedef UIFont CPFont; typedef UIImage CPImage; typedef UILabel CPTextField; typedef UIImageView CPImageView; typedef UIScreen CPScreen; typedef CGPoint CPPoint; typedef CGSize CPSize; typedef CGRect CPRect; // iOS doesn´t have Image scaling typedef NS_ENUM(NSUInteger, CPImageScaling) { CPImageScaleProportionallyDown = 0, CPImageScaleAxesIndependently = 0, CPImageScaleNone = 0, CPImageScaleProportionallyUpOrDown = 0 }; typedef NS_ENUM(NSUInteger, CPTextAlignment) { CPAlignmentLeft = NSTextAlignmentLeft, CPAlignmentCenter = NSTextAlignmentCenter, CPAlignmentRight = NSTextAlignmentRight, CPAlignmentJustified = NSTextAlignmentJustified, CPAlignmentNatural = NSTextAlignmentNatural }; #define CPMakeSize(width, height) CGSizeMake(width, height) #define CPMakeRect(x, y, w, h) CGRectMake(x, y, w, h) #define CPMakePoint(x, y) CGPointMake(x, y) #define CPSizeFromString(string) CGSizeFromString(string) #else #import <Cocoa/Cocoa.h> #import <QuartzCore/QuartzCore.h> #import <CocoaDebugKit/CocoaPropertyLine.h> typedef NSView CPView; typedef NSColor CPColor; typedef NSFont CPFont; typedef NSImage CPImage; typedef NSTextField CPTextField; typedef NSImageView CPImageView; typedef NSScreen CPScreen; typedef NSPoint CPPoint; typedef NSSize CPSize; typedef NSRect CPRect; typedef NS_ENUM(NSUInteger, CPImageScaling) { CPImageScaleProportionallyDown = NSImageScaleProportionallyDown, CPImageScaleAxesIndependently = NSImageScaleAxesIndependently, CPImageScaleNone = NSImageScaleNone, CPImageScaleProportionallyUpOrDown = NSImageScaleProportionallyUpOrDown }; typedef NS_ENUM(NSUInteger, CPTextAlignment) { CPAlignmentCenter = NSTextAlignmentCenter, CPAlignmentLeft = NSTextAlignmentLeft, CPAlignmentRight = NSTextAlignmentRight, CPAlignmentJustified = NSTextAlignmentJustified, CPAlignmentNatural = NSTextAlignmentNatural }; #define CPMakeSize(width, height) NSMakeSize(width, height) #define CPMakeRect(x, y, w, h) NSMakeRect(x, y, w, h) #define CPMakePoint(x, y) NSMakePoint(x, y) #define CPSizeFromString(string) NSSizeFromString(string) #endif #endif
1,116
2,529
<gh_stars>1000+ #!/usr/bin/env python3 import os import sys def main(argv): if len(argv) > 3: log = argv[1] fail_on = argv[2] cmd = argv[3] domain = argv[4] if 'renewing' != cmd: f1 = open(log, 'a+') f1.write(f"{[argv[0], log, cmd, domain]}\n") f1.close() if cmd.startswith(fail_on): sys.stderr.write(f"failing on: {cmd}\n") sys.exit(1) sys.stderr.write("done, all fine.\n") sys.exit(0) else: sys.stderr.write("%s without arguments" % (argv[0])) sys.exit(7) if __name__ == "__main__": main(sys.argv)
378
881
<reponame>gdubya/java package com.structurizr.view; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; public class FontTests { private Font font; @Before public void setUp() { this.font = new Font(); } @Test public void construction_WithANameOnly() { this.font = new Font("Times New Roman"); assertEquals("Times New Roman", font.getName()); } @Test public void construction_WithANameAndUrl() { this.font = new Font("Open Sans", "https://fonts.googleapis.com/css?family=Open+Sans:400,700"); assertEquals("Open Sans", font.getName()); assertEquals("https://fonts.googleapis.com/css?family=Open+Sans:400,700", font.getUrl()); } @Test public void test_setUrl_WithAUrl() { font.setUrl("https://fonts.googleapis.com/css?family=Open+Sans:400,700"); assertEquals("https://fonts.googleapis.com/css?family=Open+Sans:400,700", font.getUrl()); } @Test(expected = IllegalArgumentException.class) public void test_setUrl_ThrowsAnIllegalArgumentException_WhenAnInvalidUrlIsSpecified() { font.setUrl("htt://blah"); } @Test public void test_setUrl_DoesNothing_WhenANullUrlIsSpecified() { font.setUrl(null); assertNull(font.getUrl()); } @Test public void test_setUrl_DoesNothing_WhenAnEmptyUrlIsSpecified() { font.setUrl(" "); assertNull(font.getUrl()); } }
624
852
import FWCore.ParameterSet.Config as cms process = cms.Process("PROD") process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi") #Geometry # process.load("Configuration.Geometry.GeometryExtendedReco_cff") #Magnetic Field # process.load("Configuration.StandardSequences.MagneticField_38T_cff") # Output of events, etc... # # Explicit note : since some histos/tree might be dumped directly, # better NOT use PoolOutputModule ! # Detector simulation (Geant4-based) # process.load("SimG4Core.Application.g4SimHits_cfi") process.RandomNumberGeneratorService = cms.Service("RandomNumberGeneratorService", moduleSeeds = cms.PSet( g4SimHits = cms.untracked.uint32(9876) ) ) process.load('FWCore.MessageService.MessageLogger_cfi') if 'MessageLogger' in process.__dict__: process.MessageLogger.MaterialBudget=dict() process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring('file:single_neutrino_random.root') ) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) ) process.p1 = cms.Path(process.g4SimHits) process.g4SimHits.UseMagneticField = False process.g4SimHits.StackingAction.TrackNeutrino = True process.g4SimHits.Physics.type = 'SimG4Core/Physics/DummyPhysics' process.g4SimHits.Physics.DummyEMPhysics = True process.g4SimHits.Physics.CutsPerRegion = False process.g4SimHits.Watchers = cms.VPSet(cms.PSet( type = cms.string('MaterialBudgetAction'), MaterialBudgetAction = cms.PSet( HistosFile = cms.string('matbdg_InFrontOfECAL.root'), AllStepsToTree = cms.bool(False), HistogramList = cms.string('ECAL'), SelectedVolumes = cms.vstring('BEAM', 'Tracker'), # string TextFile = "None" # "None" means this option TreeFile = cms.string('None'), StopAfterProcess = cms.string('None'), TextFile = cms.string('matbdg_InFrontOfECAL.txt') ) ))
782
6,969
/** * * @author <NAME>(https://github.com/thecoder8890) * Solution of https://www.hackerrank.com/challenges/welcome-to-java/problem * */ public class WelcomeToJava { public static void main(String[] args) { System.out.println("Hello, World."); System.out.println("Hello, Java."); } }
105
310
<gh_stars>100-1000 { "name": "<NAME>", "description": "A content management system for slideshows.", "url": "http://slideshowpro.net/" }
50
839
<reponame>kimjand/cxf<gh_stars>100-1000 /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.transport.servlet; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import javax.naming.InitialContext; import javax.servlet.ServletContext; import org.apache.cxf.resource.ResourceResolver; public class ServletContextResourceResolver implements ResourceResolver { ServletContext servletContext; Map<String, URL> urlMap = new ConcurrentHashMap<>(); public ServletContextResourceResolver(ServletContext sc) { servletContext = sc; } public final InputStream getAsStream(final String string) { if (urlMap.containsKey(string)) { try { return urlMap.get(string).openStream(); } catch (IOException e) { //ignore } } return servletContext.getResourceAsStream(string); } public final <T> T resolve(final String entryName, final Class<T> clz) { Object obj = null; try { if (entryName != null) { InitialContext ic = new InitialContext(); try { obj = ic.lookup(entryName); } finally { ic.close(); } } } catch (Throwable e) { //do nothing } if (obj != null && clz.isInstance(obj)) { return clz.cast(obj); } if (clz.isAssignableFrom(URL.class)) { if (urlMap.containsKey(entryName)) { return clz.cast(urlMap.get(entryName)); } try { URL url = servletContext.getResource(entryName); if (url != null && "file".equals(url.getProtocol()) && !(new File(url.toURI()).exists())) { url = null; } if (url != null) { urlMap.put(url.toString(), url); return clz.cast(url); } } catch (MalformedURLException e) { //fallthrough } catch (URISyntaxException e) { //ignore } try { URL url = servletContext.getResource("/" + entryName); if (url != null && "file".equals(url.getProtocol()) && !(new File(url.toURI()).exists())) { url = null; } if (url != null) { urlMap.put(url.toString(), url); return clz.cast(url); } } catch (MalformedURLException | URISyntaxException e1) { //ignore } } else if (clz.isAssignableFrom(InputStream.class)) { return clz.cast(getAsStream(entryName)); } return null; } }
1,787
8,092
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import time import unittest from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook try: from moto import mock_logs except ImportError: mock_logs = None class TestAwsLogsHook(unittest.TestCase): @unittest.skipIf(mock_logs is None, 'mock_logs package not present') @mock_logs def test_get_conn_returns_a_boto3_connection(self): hook = AwsLogsHook(aws_conn_id='aws_default', region_name="us-east-1") assert hook.get_conn() is not None @unittest.skipIf(mock_logs is None, 'mock_logs package not present') # moto.logs does not support proper pagination so we cannot test that yet # https://github.com/spulec/moto/issues/2259 @mock_logs def test_get_log_events(self): log_group_name = 'example-group' log_stream_name = 'example-log-stream' hook = AwsLogsHook(aws_conn_id='aws_default', region_name="us-east-1") # First we create some log events conn = hook.get_conn() conn.create_log_group(logGroupName=log_group_name) conn.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name) input_events = [{'timestamp': int(time.time()) * 1000, 'message': 'Test Message 1'}] conn.put_log_events( logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=input_events ) events = hook.get_log_events(log_group=log_group_name, log_stream_name=log_stream_name) # Iterate through entire generator events = list(events) count = len(events) assert count == 1 assert events[0]['timestamp'] == input_events[0]['timestamp'] assert events[0]['message'] == input_events[0]['message']
913
422
import apps.common.func.InitDjango from all_models_for_ui.models import Tb3UIGlobalText from django.db import connection from django.forms.models import model_to_dict from apps.common.func.CommonFunc import * from all_models.models.A0011_version_manage import TbVersionGlobalText class PageObjectService(object): @staticmethod def delText(id): delResult = Tb3UIGlobalText.objects.get(id=id) delResult.state = 0 delResult.save() if __name__ == "__main__": # print((HTTP_test_caseService.getTestCaseForIdToDict("23"))) # print(UserService.getUserByLoginname(UserService.getUsers()[0].loginname)) # HTTP_test_caseService.testCaseAdd("") pass
258
716
/* * Copyright (c) 2014, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in * the LICENSE file in the root directory of this source tree. An * additional grant of patent rights can be found in the PATENTS file * in the same directory. * */ #include <stdio.h> #include <string.h> #include <stdlib.h> #include <ctype.h> #include "util.h" #include "autocmd.h" #include "argv.h" #if FBADB_MAIN FORWARD(logwrite); #elif !defined(__ANDROID__) int logwrite_main(const struct cmd_logwrite_info* info) { die(ENOSYS, "Android logcat not supported on this system"); } #else static const char* log_levels[] = { // We use a prefix match, so make these long "verbose", "debug", "informational", "warning", "error", "fatal", }; #include <android/log.h> static void tolower_inplace(char* s) { for (;*s; ++s) { *s = tolower(*s); } } int logwrite_main(const struct cmd_logwrite_info* info) { const char* tag = info->logwrite.tag ?: "fb-adb-logwrite"; int priority = ANDROID_LOG_INFO; if (info->logwrite.priority) { priority = -1; char* xprio = xstrdup(info->logwrite.priority); tolower_inplace(xprio); size_t xprio_len = strlen(xprio); for (unsigned i = 0; i < ARRAYSIZE(log_levels) - 1; ++i) { if (!strncmp(xprio, log_levels[i], xprio_len)) { priority = ANDROID_LOG_VERBOSE + i; break; } } if (priority == -1) usage_error("unknown priority \"%s\"", info->logwrite.priority); } const char* const* p; size_t sz = 1; for (p = info->message_parts; *p; ++p) if (SATADD(&sz, sz, strlen(*p) + 1)) die(EINVAL, "argument list too long"); char* msg = xalloc(sz); char* pos = msg; for (p = info->message_parts; *p; ++p) { const char* m = *p; size_t len = strlen(m); memcpy(pos, m, len); pos += len; if (pos - len != msg) *pos++ = ' '; } msg[sz-1] = '\0'; int ret = __android_log_write(priority, tag, msg); if (ret < 0) { errno = -ret; die_errno("__android_log_write"); } return 0; } #endif
1,080
14,668
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.feedback; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.components.variations.VariationsAssociatedData; import java.util.Map; /** Grabs feedback about the current variations state. */ class VariationsFeedbackSource implements FeedbackSource { private final boolean mIsOffTheRecord; VariationsFeedbackSource(Profile profile) { mIsOffTheRecord = profile.isOffTheRecord(); } @Override public Map<String, String> getFeedback() { if (mIsOffTheRecord) return null; return VariationsAssociatedData.getFeedbackMap(); } }
243
1,609
# # Copyright (c) 2009, Novartis Institutes for BioMedical Research Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Novartis Institutes for BioMedical Research Inc. # nor the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Created by <NAME> and <NAME>, March 2009 from rdkit.ML.Cluster import Butina from rdkit import DataStructs import sys, pickle # sims is the list of similarity thresholds used to generate clusters sims = [.9, .8, .7, .6] smis = [] uniq = [] uFps = [] for fileN in sys.argv[1:]: inF = file(sys.argv[1], 'r') cols = pickle.load(inF) fps = pickle.load(inF) for row in fps: nm, smi, fp = row[:3] if smi not in smis: try: fpIdx = uFps.index(fp) except ValueError: fpIdx = len(uFps) uFps.append(fp) uniq.append([fp, nm, smi, 'FP_%d' % fpIdx] + row[3:]) smis.append(smi) def distFunc(a, b): return 1. - DataStructs.DiceSimilarity(a[0], b[0]) for sim in sims: clusters = Butina.ClusterData(uniq, len(uniq), 1. - sim, False, distFunc) print('Sim: %.2f, nClusters: %d' % (sim, len(clusters)), file=sys.stderr) for i, cluster in enumerate(clusters): for pt in cluster: uniq[pt].append(str(i + 1)) cols.append('cluster_thresh_%d' % (int(100 * sim))) print(' '.join(cols)) for row in uniq: print(' '.join(row[1:]))
997
967
#include <sys/types.h> int seteuid(uid_t euid) { return 0; } int initgroups(const char *user, gid_t group) { return 0; }
58
5,169
<gh_stars>1000+ { "name": "SCNRecorder", "version": "2.3.0", "summary": "A lags-free recorder of ARKit and SceneKit for iOS in Swift", "homepage": "https://github.com/gorastudio/SCNRecorder", "license": { "type": "MIT", "file": "LICENSE.md" }, "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/gorastudio/SCNRecorder.git", "tag": "2.3.0" }, "module_name": "SCNRecorder", "module_map": "SCNRecorder.modulemap", "swift_versions": "5.2", "platforms": { "ios": "12.0" }, "source_files": [ "SCNRecorder.h", "Sources/**/*.{h,m,swift}" ], "public_header_files": [ "SCNRecorder.h", "Sources/**/*.h" ], "private_header_files": "Sources/**/*.h", "pod_target_xcconfig": { "GCC_PREPROCESSOR_DEFINITIONS": "GLES_SILENCE_DEPRECATION CI_SILENCE_GL_DEPRECATION" }, "swift_version": "5.2" }
411
7,018
<filename>android/sdk/src/main/java/com/tencent/mtt/hippy/uimanager/NativeGestureProcessor.java /* Tencent is pleased to support the open source community by making Hippy available. * Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencent.mtt.hippy.uimanager; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.view.MotionEvent; import android.view.ViewConfiguration; import com.tencent.mtt.hippy.dom.node.NodeProps; @SuppressWarnings({"unused"}) public class NativeGestureProcessor { static final int PRESS_IN = 1; static final int PRESS_OUT = 2; private static final int TAP_TIMEOUT = ViewConfiguration.getTapTimeout(); @SuppressWarnings("deprecation") private static final int TOUCH_SLOP = ViewConfiguration.getTouchSlop(); boolean mNoPressIn = false; final Callback mCallback; private Handler mHandler; private float mLastPressInX = 0; private float mLastPressInY = 0; public NativeGestureProcessor(Callback callback) { this.mCallback = callback; } public Handler getGestureHandler() { if (mHandler == null) { mHandler = new GestureHandler(this); } return mHandler; } private Callback getCallback() { return mCallback; } public boolean onTouchEvent(MotionEvent event) { int action = event.getAction() & MotionEvent.ACTION_MASK; boolean handle = false; switch (action) { case MotionEvent.ACTION_DOWN: { if (mCallback.needHandle(NodeProps.ON_PRESS_IN)) { mNoPressIn = false; mLastPressInX = event.getX(); mLastPressInY = event.getY(); getGestureHandler().sendEmptyMessageDelayed(PRESS_IN, TAP_TIMEOUT); handle = true; } else { mNoPressIn = true; } if (mCallback.needHandle(NodeProps.ON_TOUCH_DOWN)) { mCallback.handle(NodeProps.ON_TOUCH_DOWN, event.getX(), event.getY()); handle = true; } if (!handle && mCallback.needHandle(NodeProps.ON_TOUCH_MOVE)) { handle = true; } if (!handle && mCallback.needHandle(NodeProps.ON_TOUCH_END)) { handle = true; } if (!handle && mCallback.needHandle(NodeProps.ON_TOUCH_CANCEL)) { handle = true; } break; } case MotionEvent.ACTION_MOVE: { if (mCallback.needHandle(NodeProps.ON_TOUCH_MOVE)) { mCallback.handle(NodeProps.ON_TOUCH_MOVE, event.getX(), event.getY()); handle = true; } if (!handle && mCallback.needHandle(NodeProps.ON_TOUCH_END)) { handle = true; } if (!handle && mCallback.needHandle(NodeProps.ON_TOUCH_CANCEL)) { handle = true; } if (!mNoPressIn) { float distX = Math.abs(event.getX() - mLastPressInX); float distY = Math.abs(event.getY() - mLastPressInY); if (distX > TOUCH_SLOP || distY > TOUCH_SLOP) { getGestureHandler().removeMessages(PRESS_IN); mNoPressIn = true; } } break; } case MotionEvent.ACTION_UP: { if (mCallback.needHandle(NodeProps.ON_TOUCH_END)) { mCallback.handle(NodeProps.ON_TOUCH_END, event.getX(), event.getY()); handle = true; } if (mNoPressIn && mCallback.needHandle(NodeProps.ON_PRESS_OUT)) { mCallback.handle(NodeProps.ON_PRESS_OUT, event.getX(), event.getY()); handle = true; } else if (!mNoPressIn && mCallback.needHandle(NodeProps.ON_PRESS_OUT)) { getGestureHandler().sendEmptyMessageDelayed(PRESS_OUT, TAP_TIMEOUT); handle = true; } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_OUTSIDE: { if (mCallback.needHandle(NodeProps.ON_TOUCH_CANCEL)) { mCallback.handle(NodeProps.ON_TOUCH_CANCEL, event.getX(), event.getY()); handle = true; } if (mNoPressIn && mCallback.needHandle(NodeProps.ON_PRESS_OUT)) { mCallback.handle(NodeProps.ON_PRESS_OUT, event.getX(), event.getY()); handle = true; } else if (!mNoPressIn && mCallback.needHandle(NodeProps.ON_PRESS_OUT)) { if (getGestureHandler().hasMessages(PRESS_IN)) { getGestureHandler().removeMessages(PRESS_IN); break; } getGestureHandler().sendEmptyMessageDelayed(PRESS_OUT, TAP_TIMEOUT); handle = true; } break; } } return handle; } public interface Callback { boolean needHandle(String type); void handle(String type, float x, float y); } private static class GestureHandler extends android.os.Handler { private final NativeGestureProcessor mDispatcher; private final NativeGestureProcessor.Callback mCallback; public GestureHandler(NativeGestureProcessor dispatcher) { super(Looper.getMainLooper()); mDispatcher = dispatcher; mCallback = mDispatcher.getCallback(); } @Override public void handleMessage(Message msg) { switch (msg.what) { case PRESS_IN: { mCallback.handle(NodeProps.ON_PRESS_IN, -1, -1); mDispatcher.mNoPressIn = true; break; } case PRESS_OUT: { mCallback.handle(NodeProps.ON_PRESS_OUT, -1, -1); break; } } } } }
2,527
373
<reponame>ADLINK/edk2-platforms /** @file Header file for Virtual Keyboard driver. Copyright (c) 2012 - 2020, Intel Corporation. All rights reserved.<BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #ifndef _VIRTUAL_KEYBOARD_H_ #define _VIRTUAL_KEYBOARD_H_ #include <Uefi.h> #include <Library/BaseMemoryLib.h> #include <Library/DebugLib.h> #include <Library/MemoryAllocationLib.h> #include <Library/UefiBootServicesTableLib.h> #include <Library/UefiDriverEntryPoint.h> #include <Library/UefiLib.h> #include <Library/BaseLib.h> #include <Library/PcdLib.h> #include <Library/HiiLib.h> #include <Protocol/AbsolutePointer.h> #include <Protocol/GraphicsOutput.h> #include <Protocol/SimpleTextIn.h> #include <Protocol/SimpleTextInEx.h> #include <Protocol/SimpleTextOut.h> #include <Protocol/HiiDatabase.h> #include <Protocol/HiiImageEx.h> #include <Protocol/HiiPackageList.h> #include <Guid/ConsoleInDevice.h> #include "ComponentName.h" // // Global Variables // extern EFI_DRIVER_BINDING_PROTOCOL gVirtualKeyboardDriverBinding; extern EFI_COMPONENT_NAME_PROTOCOL gVirtualKeyboardComponentName; extern EFI_COMPONENT_NAME2_PROTOCOL gVirtualKeyboardComponentName2; /// /// Debug raw data points /// #define DEBUG_VK_POINTS DEBUG_INFO /// /// Debug data point scaling /// #define DEBUG_VK_POINT_SCALING DEBUG_INFO /// /// Debug key press /// #define DEBUG_VK_KEYS DEBUG_INFO /// /// Debug routine entry and exit /// #define DEBUG_VK_ROUTINE_ENTRY_EXIT DEBUG_VERBOSE /// /// Display the graphics info /// #define DEBUG_VK_GRAPHICS_INFO DEBUG_INFO /// /// Display the timer entry and exit /// #define DEBUG_VK_TIMER_ENTRY_EXIT DEBUG_VERBOSE /// /// Signature /// #define VK_SIGNATURE SIGNATURE_32 ('V', 'K', 'e', 'y') #define VK_NOTIFY_SIGNATURE SIGNATURE_32 ('V', 'K', 'n', 's') /// /// Poll interval /// #define VK_POLL_INTERVAL (1000 * 1000) /// /// Define the touch timeout in poll intervals /// #define VK_REPEAT_TIMEOUT 5 /// /// TPL used to synchronize add/remove from list /// #define TPL_VK_SYNC TPL_NOTIFY /// /// Dimension of an array ( number of elements ) /// #define DIM(x) ( sizeof ( x ) / sizeof ( x [ 0 ])) /// /// Define Key buffer /// #define MAX_KEY_BUF_SIZE 64 /// /// Define Transparent Weight /// #define TRANSPARENCY_WEIGHT 50 typedef struct _VK_CONTEXT VK_CONTEXT; typedef enum _VK_KEY_TYPE { VkKeyNull = 0x0000 | CHAR_NULL, VkKeyBackspace = 0x0000 | CHAR_BACKSPACE, VkKeyTab = 0x0000 | CHAR_TAB, VkKeyEnter = 0x0000 | CHAR_CARRIAGE_RETURN, VkKeyScanMask = 0x1000, VkKeyEsc = 0x1000 | SCAN_ESC, VkKeyLeft = 0x1000 | SCAN_LEFT, VkKeyRight = 0x1000 | SCAN_RIGHT, VkKeyUp = 0x1000 | SCAN_UP, VkKeyDown = 0x1000 | SCAN_DOWN, VkKeyF1 = 0x1000 | SCAN_F1, VkKeyF2 = 0x1000 | SCAN_F2, VkKeyF3 = 0x1000 | SCAN_F3, VkKeyF4 = 0x1000 | SCAN_F4, VkKeyF5 = 0x1000 | SCAN_F5, VkKeyF6 = 0x1000 | SCAN_F6, VkKeyF7 = 0x1000 | SCAN_F7, VkKeyF8 = 0x1000 | SCAN_F8, VkKeyF9 = 0x1000 | SCAN_F9, VkKeyF10 = 0x1000 | SCAN_F10, VkKeyF11 = 0x1000 | SCAN_F11, VkKeyF12 = 0x1000 | SCAN_F12, VkKeySpecificMask = 0x2000, VkKeyShift = 0x2000 | 0x0000, VkKeyCapslock = 0x2000 | 0x0001, VkKeyTwoPage = 0x2000 | 0x0002, VkKeyTypeMaximum = 0xFFFF } VK_KEY_TYPE; typedef enum _VK_PAGE_TYPE { // // +---+---+---+---+---+---+---+---+---+---+---+ // | q | w | e | r | t | y | u | i | o | p |<X|| Line 0 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | | a | s | d | f | g | h | j | k | l | F2| | Line 1 // +-+---+---+---+---+---+---+---+---+---+---+-+ // |Caps | z | x | c | v | b | n | m |aU |Enter| Line 2 // +-----+---+---+---+---+---+---+---+---+---+-+ // | Esc |12#| Space |aL |aD |aR | | Line 3 // +-----+---+-------------------+---+---+---+-+ // VkPage0, // // +---+---+---+---+---+---+---+---+---+---+---+ // | Q | W | E | R | T | Y | U | I | O | P |<X|| Line 0 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | | A | S | D | F | G | H | J | K | L | F2| | Line 1 // +-+---+---+---+---+---+---+---+---+---+---+-+ // |Caps | Z | X | C | V | B | N | M |aU |Enter| Line 2 // +-----+---+---+---+---+---+---+---+---+---+-+ // | Esc |12#| Space |aL |aD |aR | | Line 3 // +-----+---+-------------------+---+---+---+-+ // VkPage1, // // +---+---+---+---+---+---+---+---+---+---+---+ // | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 0 |<X|| Line 0 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | |F1 |F2 |F3 |F4 |F5 |F6 |F7 |F8 |F9 |F10| | Line 1 // +-+---+---+---+---+---+---+---+---+---+---+-+ // |Shift| . | ; | ' | , | . | / |F11|F12|Enter| Line 2 // +-----+---+---+---+---+---+---+---+---+---+-+ // | Esc |12#| Space | \ | - | = | | Line 3 // +-----+---+-------------------+---+---+---+-+ // VkPage2, // // +---+---+---+---+---+---+---+---+---+---+---+ // | ! | @ | # | $ | % | ^ | & | * | ( | ) |<X|| Line 0 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | |F1 |F2 |F3 |F4 |F5 |F6 |F7 |F8 |F9 |F10| | Line 1 // +-+---+---+---+---+---+---+---+---+---+---+-+ // |Shift| ~ | : | " | < | > | ? |F11|F12|Enter| Line 2 // +-----+---+---+---+---+---+---+---+---+---+-+ // | Esc |12#| Space | | | _ | + | | Line 3 // +-----+---+-------------------+---+---+---+-+ // VkPage3, VkPageMaximum } VK_PAGE_TYPE; typedef enum VK_DISPLAY_ATTRIBUTE { VkDisplayAttributeNone, /// No keyboard displayed VkDisplayAttributeFullTop, /// Full keyboard display at top VkDisplayAttributeFullBottom, /// Full keyboard display at bottom VkDisplayAttributeSimpleTop, /// Simple keyboard display at top VkDisplayAttributeSimpleBottom, /// Simple keyboard display at bottom VkDisplayAttributeMaximum } VK_DISPLAY_ATTRIBUTE; typedef struct _VK_STRUCT { UINT16 DisStartX; UINT16 DisStartY; UINT16 DisEndX; UINT16 DisEndY; VK_KEY_TYPE PageFont[VkPageMaximum]; } VK_STRUCT; typedef struct _VK_NOTIFY { UINTN Signature; EFI_KEY_DATA KeyData; EFI_KEY_NOTIFY_FUNCTION KeyNotificationFn; LIST_ENTRY NotifyEntry; } VK_NOTIFY; /// /// Virtual Keyboard context /// struct _VK_CONTEXT { /// /// Structure identification /// UINTN Signature; /// /// Controller Handle /// EFI_HANDLE Controller; /// /// Upper level API /// EFI_SIMPLE_TEXT_INPUT_PROTOCOL SimpleTextIn; /// /// Simple Text In EX /// EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL SimpleTextInEx; /// /// Lower level APIs /// EFI_ABSOLUTE_POINTER_PROTOCOL *AbsolutePointer; EFI_GRAPHICS_OUTPUT_PROTOCOL *GraphicsOutput; /// /// Flag when the last poll indicated a touch event /// BOOLEAN TouchActive; /// /// Time to poll for touch input /// EFI_EVENT TimerEvent; /// /// HII handle to get image data used /// EFI_HII_HANDLE HiiHandle; EFI_HII_IMAGE_EX_PROTOCOL *HiiImageEx; /// /// Keyboard body background buffer information /// EFI_GRAPHICS_OUTPUT_BLT_PIXEL *VkBodyBackgroundBltBuffer; EFI_GRAPHICS_OUTPUT_BLT_PIXEL *VkBodyCompoundBltBuffer; UINTN VkBodyBltSize; UINTN VkBodyBltStartX; UINTN VkBodyBltStartY; UINTN VkBodyBltHeight; UINTN VkBodyBltWidth; BOOLEAN IsBackgroundChanged; /// /// Icon buffer information /// EFI_GRAPHICS_OUTPUT_BLT_PIXEL *IconBltBuffer; UINTN IconBltSize; UINTN IconBltHeight; UINTN IconBltWidth; /// /// Full icon background buffer information /// EFI_GRAPHICS_OUTPUT_BLT_PIXEL *FullIconBackBuffer; UINTN FullIconBackStartX; UINTN FullIconBackStartY; UINTN FullIconBackHeight; UINTN FullIconBackWidth; UINTN FullIconBackSize; BOOLEAN FullIconUpdatedFlag; /// /// Simple icon background buffer information /// EFI_GRAPHICS_OUTPUT_BLT_PIXEL *SimIconBackBuffer; UINTN SimIconBackStartX; UINTN SimIconBackStartY; UINTN SimIconBackHeight; UINTN SimIconBackWidth; UINTN SimIconBackSize; BOOLEAN SimIconUpdatedFlag; /// /// Small Icon /// EFI_IMAGE_INPUT *SmallIcon; /// /// Full Icon /// EFI_IMAGE_INPUT *FullIcon; /// /// Simple Key body /// EFI_IMAGE_INPUT *SimKeyBody; /// /// Digital key body /// EFI_IMAGE_INPUT *DigKeyBody; /// /// Capital Letter Key board /// EFI_IMAGE_INPUT *CapLeKeyBody; /// /// Screen check buffer. /// This is used to check if screen is kept scrolling up. /// EFI_GRAPHICS_OUTPUT_BLT_PIXEL *ScreenCheckBuffer; UINTN ScreenCheckBufferSize; /// /// Key state /// BOOLEAN KeyPressed; /// /// Keyboard display status /// VK_DISPLAY_ATTRIBUTE CurrentKeyboardDisplay; VK_DISPLAY_ATTRIBUTE TargetKeyboardDisplay; /// /// Keyboard icon display status /// BOOLEAN IsIconShowed; UINT8 IconReDrawCheck; /// /// Keyboard body Image address /// Size of KeyboardBodyPtr must larger than mFullKeyboardBody /// UINT32 NumOfKeysInfo; VK_STRUCT KeyboardBodyPtr[50]; /// /// KeyBuffer /// EFI_EVENT KeyNotifyProcessEvent; EFI_KEY_TOGGLE_STATE KeyToggleState; EFI_KEY_DATA Keybuffer[MAX_KEY_BUF_SIZE]; UINT8 KeyStartIndex; UINT8 KeyEndIndex; UINT16 KeyTouchedTimeOut; BOOLEAN IsShiftKeyFlag; BOOLEAN IsCapsLockFlag; BOOLEAN IsSupportPartialKey; BOOLEAN IsRedrawUpdateUI; VK_PAGE_TYPE PageNumber; LIST_ENTRY NotifyList; }; /// /// Locate VK_CONTEXT from protocol /// #define VK_CONTEXT_FROM_PROTOCOL(a) CR (a, VK_CONTEXT, SimpleTextIn, VK_SIGNATURE) #define VK_CONTEXT_FROM_SIMPLETEXTINEX_PROTOCOL(a) CR (a, VK_CONTEXT, SimpleTextInEx, VK_SIGNATURE) #define VK_CONTEXT_FROM_VKBD_PROTOCOL(a) CR (a, VK_CONTEXT, VkbdProtocol, VK_SIGNATURE) /** Start the virtual keyboard driver This routine allocates the necessary resources for the driver. This routine is called by VirtualKeyboardDriverStart to complete the driver initialization. @param[in, out] VkContext Address of an VK_CONTEXT structure @param[in] Controller Handle of device to work with. @retval EFI_SUCCESS Driver API properly initialized **/ EFI_STATUS VkApiStart ( IN OUT VK_CONTEXT *VkContext, IN EFI_HANDLE Controller ); /** Stop the virtual keyboard driver This routine releases the resources allocated by VKApiStart. This routine is called by VirtualKeyboardDriverStop to initiate the driver shutdown. @param[in] VkContext Address of an VK_CONTEXT structure **/ VOID VkApiStop ( IN VK_CONTEXT *VkContext ); /** Resets the input device hardware. The Reset() function resets the input device hardware. As part of initialization process, the firmware/device will make a quick but reasonable attempt to verify that the device is functioning. If the ExtendedVerification flag is TRUE the firmware may take an extended amount of time to verify the device is operating on reset. Otherwise the reset operation is to occur as quickly as possible. The hardware verification process is not defined by this specification and is left up to the platform firmware or driver to implement. @param[in] This A pointer to the EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL instance. @param[in] ExtendedVerification Indicates that the driver may perform a more exhaustive verification operation of the device during reset. @retval EFI_SUCCESS The device was reset. @retval EFI_DEVICE_ERROR The device is not functioning correctly and could not be reset. **/ EFI_STATUS EFIAPI VkKeyboardReset ( IN EFI_SIMPLE_TEXT_INPUT_PROTOCOL *This, IN BOOLEAN ExtendedVerification ); /** Reads the next keystroke from the input device. The WaitForKey Event can be used to test for existence of a keystroke via WaitForEvent () call. @param[in] This Protocol instance pointer. @param[out] Key Driver may perform diagnostics on reset. @retval EFI_SUCCESS The keystroke information was returned. @retval EFI_NOT_READY There was no keystroke data available. @retval EFI_DEVICE_ERROR The keystroke information was not returned due to hardware errors. **/ EFI_STATUS EFIAPI VkKeyboardReadKeyStroke ( IN EFI_SIMPLE_TEXT_INPUT_PROTOCOL *This, OUT EFI_INPUT_KEY *Key ); /** Resets the input device hardware. The Reset() function resets the input device hardware. As part of initialization process, the firmware/device will make a quick but reasonable attempt to verify that the device is functioning. If the ExtendedVerification flag is TRUE the firmware may take an extended amount of time to verify the device is operating on reset. Otherwise the reset operation is to occur as quickly as possible. The hardware verification process is not defined by this specification and is left up to the platform firmware or driver to implement. @param[in] This A pointer to the EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL instance. @param[in] ExtendedVerification Indicates that the driver may perform a more exhaustive verification operation of the device during reset. @retval EFI_SUCCESS The device was reset. @retval EFI_DEVICE_ERROR The device is not functioning correctly and could not be reset. **/ EFI_STATUS EFIAPI VkKeyboardResetEx ( IN EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, IN BOOLEAN ExtendedVerification ); /** Reads the next keystroke from the input device. @param[in] This Protocol instance pointer. @param[out] KeyData A pointer to a buffer that is filled in with the keystroke state data for the key that was pressed. @retval EFI_SUCCESS The keystroke information was returned. @retval EFI_NOT_READY There was no keystroke data available. @retval EFI_INVALID_PARAMETER This or KeyData is NULL. **/ EFI_STATUS EFIAPI VkKeyboardReadKeyStrokeEx ( IN EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, OUT EFI_KEY_DATA *KeyData ); /** Set certain state for the input device. @param[in] This Protocol instance pointer. @param[in] KeyToggleState A pointer to the EFI_KEY_TOGGLE_STATE to set the state for the input device. @retval EFI_SUCCESS The device state was set appropriately. @retval EFI_INVALID_PARAMETER This or KeyToggleState is NULL. **/ EFI_STATUS EFIAPI VkKeyboardSetState ( IN EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, IN EFI_KEY_TOGGLE_STATE *KeyToggleState ); /** Register a notification function for a particular keystroke for the input device. @param[in] This Protocol instance pointer. @param[in] KeyData A pointer to a buffer that is filled in with the keystroke information data for the key that was pressed. @param[in] KeyNotificationFunction Points to the function to be called when the key sequence is typed specified by KeyData. @param[out] NotifyHandle Points to the unique handle assigned to the registered notification. @retval EFI_SUCCESS The notification function was registered successfully. @retval EFI_OUT_OF_RESOURCES Unable to allocate resources for necessary data structures. @retval EFI_INVALID_PARAMETER KeyData or NotifyHandle or KeyNotificationFunction is NULL. **/ EFI_STATUS EFIAPI VkKeyboardRegisterKeyNotify ( IN EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, IN EFI_KEY_DATA *KeyData, IN EFI_KEY_NOTIFY_FUNCTION KeyNotificationFunction, OUT EFI_HANDLE *NotifyHandle ); /** Remove a registered notification function from a particular keystroke. @param[in] This Protocol instance pointer. @param[in] NotificationHandle The handle of the notification function being unregistered. @retval EFI_SUCCESS The notification function was unregistered successfully. @retval EFI_INVALID_PARAMETER The NotificationHandle is invalid **/ EFI_STATUS EFIAPI VkKeyboardUnregisterKeyNotify ( IN EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, IN EFI_HANDLE NotificationHandle ); /** Draw key board on the display @param[in] VkContext Graphic Protocol for draw the alphabet. @retval EFI_SUCCESS Draw keyboard was done. @retval EFI_UNSUPPORTED Did not get key mapping table. **/ EFI_STATUS DrawKeyboardLayout ( IN VK_CONTEXT *VkContext ); /** Clear the keyboard body @param[in] VkContext Code context. @retval EFI_SUCCESS Clear rectangle is done. **/ EFI_STATUS HideVkBody ( IN VK_CONTEXT *VkContext ); /** Clear the keyboard icon @param[in] VkContext Code context. @retval EFI_SUCCESS Clear rectangle is done. **/ EFI_STATUS HideVkIcon ( IN VK_CONTEXT *VkContext ); /** Use to draw the keyboard icon. @param[in] VkContext Pointer to virtual keyboard's context @param[in] VkImage Image of keyboard to display on the screen. @param[in] Attribute Attribute of keyboard to display on the screen. @retval EFI_SUCCESS ConsoleControl has been flipped to graphics and keyboard icon displayed. @retval EFI_UNSUPPORTED KeyboardFile not found @retval EFI_INVALID_PARAMETER Attribute is unknown. **/ EFI_STATUS EFIAPI DrawVkIcon ( IN VK_CONTEXT *VkContext, IN EFI_IMAGE_INPUT *VkImage, IN VK_DISPLAY_ATTRIBUTE Attribute ); /** Use to draw the keyboard. @param[in] VkContext Pointer to virtual keyboard's context @param[in] VkImage Image of keyboard to display on the screen. @param[in] Attribute Attribute of keyboard to display on the screen. @retval EFI_SUCCESS ConsoleControl has been flipped to graphics and keyboard displayed. @retval EFI_UNSUPPORTED KeyboardFile not found @retval EFI_INVALID_PARAMETER Attribute is unknown. **/ EFI_STATUS EFIAPI DrawVkBody ( IN VK_CONTEXT *VkContext, IN EFI_IMAGE_INPUT *VkImage, IN VK_DISPLAY_ATTRIBUTE Attribute ); /** Get unicode by VkContext->PageNumber and VkContext->KeyboardBodyPtr. @param[in] VkContext Address of an VK_CONTEXT structure. @param[in] KeyItem Key Item. @param[out] FontPtr Follow VkContext->PageNumber to translate font unicode. @retval EFI_SUCCESS Finish translating FontPtr. @retval EFI_INVALID_PARAMETER VkContext or FontPtr is NULL. **/ EFI_STATUS VkGetMappingFont ( IN VK_CONTEXT *VkContext, IN VK_STRUCT KeyItem, OUT UINT32 *FontPtr ); /** This routine is used to check if icon has been cleared. @param[in] VkContext Pointer to virtual keyboard's context @retval EFI_SUCCESS Function completed. **/ EFI_STATUS CheckIconCleared ( IN VK_CONTEXT *VkContext ); /** ConvertCoordinate - Convert the touch panel's coordinate to display's coordinate. @param[in] VkContext Virtual Keyboard context. @param[in] Point The coordinate reported from touch panel. @param[out] TouchX The coordinate X converted to display panel. @param[out] TouchY The coordinate Y converted to display panel.. @retval EFI_SUCCESS Convert success. **/ EFI_STATUS ConvertCoordinate ( IN VK_CONTEXT *VkContext, IN EFI_ABSOLUTE_POINTER_STATE Point, OUT UINT32 *TouchX, OUT UINT32 *TouchY ); /** This routine is used to check if screen has been cleared. @param[in] VkContext Pointer to virtual keyboard's context @retval EFI_SUCCESS Function completed. **/ EFI_STATUS CheckScreenCleared ( IN VK_CONTEXT *VkContext ); /** This routine is used to check if background beneath virtual keyboard has been cleared. @param[in] VkContext Pointer to virtual keyboard's context @retval EFI_SUCCESS Function completed. **/ EFI_STATUS CheckBackgroundChanged ( IN VK_CONTEXT *VkContext ); /** To prevent screen keyboard layout occur scroll up @param[in, out] VkContext Address of an VK_CONTEXT structure. **/ VOID PreventScreenScrollUp ( IN OUT VK_CONTEXT *VkContext ); /** Set the position of character. @param[in] VkContext Address of an VK_CONTEXT structure. @param[in] DestX X position. @param[in] DestY Y position. @retval EFI_SUCCESS Success for the function. @retval Others An unexpected error occurred. **/ EFI_STATUS SetCharacterPosition ( IN VK_CONTEXT *VkContext, IN UINT32 DestX, IN UINT32 DestY ); /** Set the keyboard layout. @param[in] VkContext Graphic Protocol for draw the alphabet. @param[in] Index The layout selected. @retval EFI_SUCCESS Draw keyboard was done. @retval Others An unexpected error occurred. **/ EFI_STATUS KeyboardLayoutHandler ( IN VK_CONTEXT *VkContext, IN UINT32 Index ); /** Save the background blt buffer. @param[in] VkContext Address of an VK_CONTEXT structure. @param[in] BltSize Size of blt. @retval EFI_SUCCESS Success for the function. @retval EFI_OUT_OF_RESOURCES Allocate memory failed. **/ EFI_STATUS EFIAPI SaveVkBodyBackgroundBltBuffer ( IN VK_CONTEXT *VkContext, IN UINTN BltSize ); /** Restore the background blt buffer. @param[in] VkContext Address of an VK_CONTEXT structure. @retval EFI_SUCCESS Success for the function. @retval EFI_UNSUPPORTED Input blt buffer is NULL. @retval Others An unexpected error occurred. **/ EFI_STATUS EFIAPI RestoreVkBodyBackgroundBltBuffer ( IN VK_CONTEXT *VkContext ); /** Save the icon background blt buffer. @param[in] VkContext Address of an VK_CONTEXT structure. @param[in] IconType The icon type. @retval EFI_SUCCESS Success for the function. @retval EFI_OUT_OF_RESOURCES Allocate memory failed. @retval Others An unexpected error occurred. **/ EFI_STATUS EFIAPI SaveVkIconBackgroundBltBuffer ( IN VK_CONTEXT *VkContext, IN VK_DISPLAY_ATTRIBUTE IconType ); #endif
11,709
778
<reponame>clazaro/Kratos<filename>applications/ParticleMechanicsApplication/custom_strategies/strategies/mpm_residual_based_newton_raphson_strategy.hpp // | / | // ' / __| _` | __| _ \ __| // . \ | ( | | ( |\__ \. // _|\_\_| \__,_|\__|\___/ ____/ // Multi-Physics // // License: BSD License // Kratos default license: kratos/license.txt // // Main authors: <NAME>, <NAME> // // #if !defined(KRATOS_MPM_RESIDUAL_BASED_NEWTON_RAPHSON_STRATEGY ) #define KRATOS_MPM_RESIDUAL_BASED_NEWTON_RAPHSON_STRATEGY /* System includes */ /* External includes */ #include "solving_strategies/strategies/residualbased_newton_raphson_strategy.h" // Application includes #include "particle_mechanics_application_variables.h" namespace Kratos { /**@name Kratos Globals */ /*@{ */ /*@} */ /**@name Type Definitions */ /*@{ */ /*@} */ /**@name Enum's */ /*@{ */ /*@} */ /**@name Functions */ /*@{ */ /*@} */ /**@name Kratos Classes */ /*@{ */ /// Short class definition. /** * @class MPMResidualBasedNewtonRaphsonStrategy * @ingroup KratosParticle * @brief Newton Raphson strategy suited for MPM simulations * @details This strategy iterates until the convergence is achieved (or the maximum number of iterations is achieved) using a Newton Raphson algorithm */ template<class TSparseSpace, class TDenseSpace, class TLinearSolver > class MPMResidualBasedNewtonRaphsonStrategy : public ResidualBasedNewtonRaphsonStrategy<TSparseSpace, TDenseSpace, TLinearSolver> { public: /**@name Type Definitions */ /*@{ */ typedef ConvergenceCriteria<TSparseSpace, TDenseSpace> TConvergenceCriteriaType; /** Counted pointer of ClassName */ KRATOS_CLASS_POINTER_DEFINITION(MPMResidualBasedNewtonRaphsonStrategy); typedef ImplicitSolvingStrategy<TSparseSpace, TDenseSpace, TLinearSolver> BaseType; typedef typename BaseType::TBuilderAndSolverType TBuilderAndSolverType; typedef typename BaseType::TDataType TDataType; typedef TSparseSpace SparseSpaceType; typedef typename BaseType::TSchemeType TSchemeType; typedef typename BaseType::DofsArrayType DofsArrayType; typedef typename BaseType::TSystemMatrixType TSystemMatrixType; typedef typename BaseType::TSystemVectorType TSystemVectorType; typedef typename BaseType::LocalSystemVectorType LocalSystemVectorType; typedef typename BaseType::LocalSystemMatrixType LocalSystemMatrixType; typedef typename BaseType::TSystemMatrixPointerType TSystemMatrixPointerType; typedef typename BaseType::TSystemVectorPointerType TSystemVectorPointerType; /*@} */ /**@name Life Cycle */ /*@{ */ /** Constructors. */ MPMResidualBasedNewtonRaphsonStrategy( ModelPart& rModelPart, bool MoveMeshFlag = false ) : ResidualBasedNewtonRaphsonStrategy<TSparseSpace, TDenseSpace, TLinearSolver>( rModelPart, MoveMeshFlag) { } MPMResidualBasedNewtonRaphsonStrategy( ModelPart& rModelPart, typename TSchemeType::Pointer pScheme, typename TLinearSolver::Pointer pNewLinearSolver, typename TConvergenceCriteriaType::Pointer pNewConvergenceCriteria, int MaxIterations = 30, bool CalculateReactions = false, bool ReformDofSetAtEachStep = false, bool MoveMeshFlag = false ) : ResidualBasedNewtonRaphsonStrategy<TSparseSpace, TDenseSpace, TLinearSolver>( rModelPart, pScheme, pNewLinearSolver, pNewConvergenceCriteria, MaxIterations, CalculateReactions, ReformDofSetAtEachStep, MoveMeshFlag) { } MPMResidualBasedNewtonRaphsonStrategy( ModelPart& rModelPart, typename TSchemeType::Pointer pScheme, typename TLinearSolver::Pointer pNewLinearSolver, typename TConvergenceCriteriaType::Pointer pNewConvergenceCriteria, typename TBuilderAndSolverType::Pointer pNewBuilderAndSolver, int MaxIterations = 30, bool CalculateReactions = false, bool ReformDofSetAtEachStep = false, bool MoveMeshFlag = false ) : ResidualBasedNewtonRaphsonStrategy<TSparseSpace, TDenseSpace, TLinearSolver>( rModelPart, pScheme, pNewLinearSolver, pNewConvergenceCriteria, pNewBuilderAndSolver, MaxIterations, CalculateReactions, ReformDofSetAtEachStep, MoveMeshFlag) { } /** Destructor. */ virtual ~MPMResidualBasedNewtonRaphsonStrategy() { } /** * @brief Solves the current step. This function returns true if a solution has been found, false otherwise. */ bool SolveSolutionStep() override { typename TSchemeType::Pointer p_scheme = this->GetScheme(); typename TBuilderAndSolverType::Pointer p_builder_and_solver = this->GetBuilderAndSolver(); TSystemMatrixType& rA = *(this->mpA); TSystemVectorType& rDx = *(this->mpDx); TSystemVectorType& rb = *(this->mpb); DofsArrayType& r_dof_set = p_builder_and_solver->GetDofSet(); // Initializing the parameters of the Newton-Raphson cycle unsigned int iteration_number = 1; BaseType::GetModelPart().GetProcessInfo()[NL_ITERATION_NUMBER] = iteration_number; bool is_converged = false; p_scheme->InitializeNonLinIteration(BaseType::GetModelPart(), rA, rDx, rb); is_converged = this->mpConvergenceCriteria->PreCriteria(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "PreCriteria:" << "\tIs_converged: " << is_converged << "\tmRebuildLevel: " << BaseType::mRebuildLevel << "\tmStiffnessMatrixIsBuilt: " << BaseType::mStiffnessMatrixIsBuilt << std::endl; if (BaseType::mRebuildLevel > 1 || BaseType::mStiffnessMatrixIsBuilt == false) { KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "SetToZero the matrix and vectors of the system" << std::endl; TSparseSpace::SetToZero(rA); TSparseSpace::SetToZero(rDx); TSparseSpace::SetToZero(rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "Build and Solve" << std::endl; p_builder_and_solver->BuildAndSolve(p_scheme, BaseType::GetModelPart(), rA, rDx, rb); } else { TSparseSpace::SetToZero(rDx); // rDx=0.00; TSparseSpace::SetToZero(rb); p_builder_and_solver->BuildRHSAndSolve(p_scheme, BaseType::GetModelPart(), rA, rDx, rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "BuildRHSAndSolve" << std::endl; } if (this->GetEchoLevel() == 3) // If it is needed to print the debug info { KRATOS_INFO("MPMNewtonRaphsonStrategy") << "SystemMatrix = " << rA << std::endl; KRATOS_INFO("MPMNewtonRaphsonStrategy") << "solution obtained = " << rDx << std::endl; KRATOS_INFO("MPMNewtonRaphsonStrategy") << "RHS = " << rb << std::endl; } else if (this->GetEchoLevel() == 4) // Print to matrix market file { std::stringstream matrix_market_name; matrix_market_name << "A_" << BaseType::GetModelPart().GetProcessInfo()[TIME] << "_" << iteration_number << ".mm"; TSparseSpace::WriteMatrixMarketMatrix((char*)(matrix_market_name.str()).c_str(), rA, false); std::stringstream matrix_market_vectname; matrix_market_vectname << "b_" << BaseType::GetModelPart().GetProcessInfo()[TIME] << "_" << iteration_number << ".mm.rhs"; TSparseSpace::WriteMatrixMarketVector((char*)(matrix_market_vectname.str()).c_str(), rb); } // Update results r_dof_set = p_builder_and_solver->GetDofSet(); p_scheme->Update(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); p_scheme->FinalizeNonLinIteration(BaseType::GetModelPart(), rA, rDx, rb); // Move the mesh if needed if (BaseType::MoveMeshFlag() == true) BaseType::MoveMesh(); if (is_converged == true) { // Initialisation of the convergence criteria this->mpConvergenceCriteria->InitializeSolutionStep(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); if (this->mpConvergenceCriteria->GetActualizeRHSflag() == true) { TSparseSpace::SetToZero(rb); p_builder_and_solver->BuildRHS(p_scheme, BaseType::GetModelPart(), rb); } is_converged = this->mpConvergenceCriteria->PostCriteria(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); } KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3 && !is_converged) << "Starting Nonlinear iteration" << std::endl; // Iteration Loop while (is_converged == false && iteration_number++ < this->mMaxIterationNumber) { // Setting the number of iteration BaseType::GetModelPart().GetProcessInfo()[NL_ITERATION_NUMBER] = iteration_number; p_scheme->InitializeNonLinIteration(BaseType::GetModelPart(), rA, rDx, rb); is_converged = this->mpConvergenceCriteria->PreCriteria(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); // Call the linear system solver to find the correction rDx. It is not called if there is no system to solve if (SparseSpaceType::Size(rDx) != 0) { if (BaseType::mRebuildLevel > 1 || BaseType::mStiffnessMatrixIsBuilt == false) { KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "Iteration Number: " << iteration_number << std::endl; if (this->GetKeepSystemConstantDuringIterations() == false) { TSparseSpace::SetToZero(rA); TSparseSpace::SetToZero(rDx); TSparseSpace::SetToZero(rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "Build and Solve" << std::endl; p_builder_and_solver->BuildAndSolve(p_scheme, BaseType::GetModelPart(), rA, rDx, rb); } else { TSparseSpace::SetToZero(rDx); TSparseSpace::SetToZero(rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "Build RHS and Solve" << std::endl; p_builder_and_solver->BuildRHSAndSolve(p_scheme, BaseType::GetModelPart(), rA, rDx, rb); } } else { TSparseSpace::SetToZero(rDx); TSparseSpace::SetToZero(rb); KRATOS_INFO_IF("MPMNewtonRaphsonStrategy", this->GetEchoLevel() >= 3) << "Build RHS and Solve" << std::endl; p_builder_and_solver->BuildRHSAndSolve(p_scheme, BaseType::GetModelPart(), rA, rDx, rb); } } else { KRATOS_WARNING("MPMNewtonRaphsonStrategy") << "ATTENTION: no free DOFs!! " << std::endl; } // Updating the results stored in the database r_dof_set = p_builder_and_solver->GetDofSet(); p_scheme->Update(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); p_scheme->FinalizeNonLinIteration(BaseType::GetModelPart(), rA, rDx, rb); // Move the mesh if needed if (BaseType::MoveMeshFlag() == true) BaseType::MoveMesh(); // If converged if (is_converged == true) { if (this->mpConvergenceCriteria->GetActualizeRHSflag() == true) { TSparseSpace::SetToZero(rb); p_builder_and_solver->BuildRHS(p_scheme, BaseType::GetModelPart(), rb); } is_converged = this->mpConvergenceCriteria->PostCriteria(BaseType::GetModelPart(), r_dof_set, rA, rDx, rb); } } // Plot a warning if the maximum number of iterations is exceeded if (iteration_number >= this->mMaxIterationNumber && BaseType::GetModelPart().GetCommunicator().MyPID() == 0) { if (this->GetEchoLevel() > 1) this->MaxIterationsExceeded(); } return is_converged; } }; /* Class MPMResidualBasedNewtonRaphsonStrategy */ } /* namespace Kratos.*/ #endif /* KRATOS_MPM_RESIDUAL_BASED_NEWTON_RAPHSON_STRATEGY defined */
5,746
3,372
<gh_stars>1000+ /* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.sagemaker.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * CreateFeatureGroupRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class CreateFeatureGroupRequestMarshaller { private static final MarshallingInfo<String> FEATUREGROUPNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FeatureGroupName").build(); private static final MarshallingInfo<String> RECORDIDENTIFIERFEATURENAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("RecordIdentifierFeatureName").build(); private static final MarshallingInfo<String> EVENTTIMEFEATURENAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("EventTimeFeatureName").build(); private static final MarshallingInfo<List> FEATUREDEFINITIONS_BINDING = MarshallingInfo.builder(MarshallingType.LIST) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FeatureDefinitions").build(); private static final MarshallingInfo<StructuredPojo> ONLINESTORECONFIG_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("OnlineStoreConfig").build(); private static final MarshallingInfo<StructuredPojo> OFFLINESTORECONFIG_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("OfflineStoreConfig").build(); private static final MarshallingInfo<String> ROLEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("RoleArn").build(); private static final MarshallingInfo<String> DESCRIPTION_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Description").build(); private static final MarshallingInfo<List> TAGS_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Tags").build(); private static final CreateFeatureGroupRequestMarshaller instance = new CreateFeatureGroupRequestMarshaller(); public static CreateFeatureGroupRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(CreateFeatureGroupRequest createFeatureGroupRequest, ProtocolMarshaller protocolMarshaller) { if (createFeatureGroupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createFeatureGroupRequest.getFeatureGroupName(), FEATUREGROUPNAME_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getRecordIdentifierFeatureName(), RECORDIDENTIFIERFEATURENAME_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getEventTimeFeatureName(), EVENTTIMEFEATURENAME_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getFeatureDefinitions(), FEATUREDEFINITIONS_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getOnlineStoreConfig(), ONLINESTORECONFIG_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getOfflineStoreConfig(), OFFLINESTORECONFIG_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(createFeatureGroupRequest.getTags(), TAGS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
1,607
465
# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Converts images to json file Converts images to JSON File. Currently models support 2 formats, tensor or jpg Depending the model select the correct model type. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from ast import literal_eval from PIL import Image import base64 import codecs import json import logging import requests import numpy as np INPUT_FILE = 'image.jpg' OUTPUT_FILE = '/tmp/out.json' LOAD_BALANCER = 'localhost:8888' # Enter your TF Serve IP Address. URL = 'http://%s/v1/models/default:predict' % LOAD_BALANCER UPLOAD_FOLDER = '/tmp/' NUM_REQUESTS = 10 # Wait this long for outgoing HTTP connections to be established. _CONNECT_TIMEOUT_SECONDS = 90 # Wait this long to read from an HTTP socket. _READ_TIMEOUT_SECONDS = 120 MODEL_TYPE = 'jpg' # tensor | jpg ENABLE_PREDICT = True def get_classes(): url = 'https://gist.githubusercontent.com/yrevar/942d3a0ac09ec9e5eb3a/raw' \ '/238f720ff059c1f82f368259d1ca4ffa5dd8f9f5' \ '/imagenet1000_clsidx_to_labels.txt' response = requests.get(url) classes = literal_eval(response.text) return classes def convert_to_json(image_file): """Open image, convert it to numpy and create JSON request""" img = Image.open(image_file).resize((240, 240)) img_array = np.array(img) predict_request = {"instances": [img_array.tolist()]} json.dump(predict_request, codecs.open(OUTPUT_FILE, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, indent=4) return predict_request def convert_to_base64(image_file): """Open image and convert it to base64""" with open(image_file, 'rb') as f: jpeg_bytes = base64.b64encode(f.read()).decode('utf-8') predict_request = '{"instances" : [{"b64": "%s"}]}' % jpeg_bytes # Write JSON to file with open(OUTPUT_FILE, 'w') as f: f.write(predict_request) return predict_request def model_predict(predict_request): """Sends Image for prediction.""" total_time = 0 session = requests.Session() try: for _ in range(0, NUM_REQUESTS): response = session.post( URL, data=predict_request, timeout=(_CONNECT_TIMEOUT_SECONDS, _READ_TIMEOUT_SECONDS), allow_redirects=False) response.raise_for_status() total_time += response.elapsed.total_seconds() print('Num requests: {} Avg latency: {} ms'.format(NUM_REQUESTS, ( total_time * 1000) / NUM_REQUESTS)) return response.json() except requests.exceptions.HTTPError as err: logging.exception(err) if err.response.status_code == 400: logging.exception('Server error %s', URL) return if err.response.status_code == 404: logging.exception('Page not found %s', URL) return def main(): if MODEL_TYPE == 'tensor': predict_request = convert_to_json(INPUT_FILE) elif MODEL_TYPE == 'jpg': predict_request = convert_to_base64(INPUT_FILE) else: logging.error('Invalid Model Type') return if ENABLE_PREDICT: classes = get_classes() response = model_predict(predict_request) if response: prediction_class = response.get('predictions')[0].get('classes') - 1 prediction_probabilities = response.get('predictions')[0].get('probabilities') print( 'Prediction: [%d] %s Probability [%.2f] ' % ( prediction_class, classes[prediction_class], max(prediction_probabilities))) if __name__ == '__main__': main()
1,466
347
<reponame>hbraha/ovirt-engine<filename>backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/dc/InternalImportExternalNetworkCommand.java package org.ovirt.engine.core.bll.network.dc; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import javax.inject.Inject; import org.ovirt.engine.core.bll.CommandBase; import org.ovirt.engine.core.bll.InternalCommandAttribute; import org.ovirt.engine.core.bll.NonTransactiveCommandAttribute; import org.ovirt.engine.core.bll.context.CommandContext; import org.ovirt.engine.core.bll.network.cluster.NetworkHelper; import org.ovirt.engine.core.bll.utils.PermissionSubject; import org.ovirt.engine.core.common.VdcObjectType; import org.ovirt.engine.core.common.action.ActionReturnValue; import org.ovirt.engine.core.common.action.ActionType; import org.ovirt.engine.core.common.action.AddNetworkStoragePoolParameters; import org.ovirt.engine.core.common.action.InternalImportExternalNetworkParameters; import org.ovirt.engine.core.common.businessentities.Cluster; import org.ovirt.engine.core.common.businessentities.network.Network; import org.ovirt.engine.core.common.errors.EngineMessage; import org.ovirt.engine.core.common.queries.IdQueryParameters; import org.ovirt.engine.core.common.queries.QueryReturnValue; import org.ovirt.engine.core.common.queries.QueryType; import org.ovirt.engine.core.compat.Guid; @NonTransactiveCommandAttribute @InternalCommandAttribute public class InternalImportExternalNetworkCommand<P extends InternalImportExternalNetworkParameters> extends CommandBase<P> { @Inject private NetworkHelper networkHelper; public InternalImportExternalNetworkCommand(P parameters, CommandContext cmdContext) { super(parameters, cmdContext); setStoragePoolId(getParameters().getDataCenterId()); } protected Network getNetwork() { return getParameters().getExternalNetwork(); } @Override protected void executeCommand() { final Guid dataCenterId = getStoragePoolId(); final Network network = getNetwork(); network.setDataCenterId(dataCenterId); ActionReturnValue addNetworkReturnValue = addNetwork(dataCenterId, network, getParameters().isAttachToAllClusters()); if (!addNetworkReturnValue.getSucceeded()) { propagateFailure(addNetworkReturnValue); return; } network.setId(addNetworkReturnValue.getActionReturnValue()); getReturnValue().setActionReturnValue(network.getId()); setSucceeded(true); } private ActionReturnValue addNetwork(Guid dataCenterId, Network network, boolean attachToAllClusters) { AddNetworkStoragePoolParameters params = new AddNetworkStoragePoolParameters(dataCenterId, network); params.setVnicProfilePublicUse(getParameters().isPublicUse()); if (attachToAllClusters) { params.setNetworkClusterList(networkHelper.createNetworkClusters( getAllClusterIdsInDataCenter(dataCenterId))); } return runInternalAction(ActionType.AddNetwork, params); } private List<Guid> getAllClusterIdsInDataCenter(Guid dataCenterId) { QueryReturnValue queryReturnValue = runInternalQuery(QueryType.GetClustersByStoragePoolId, new IdQueryParameters(dataCenterId)); List<Cluster> clusters = queryReturnValue.getReturnValue(); return clusters.stream().map(Cluster::getId).collect(Collectors.toList()); } @Override public List<PermissionSubject> getPermissionCheckSubjects() { return Collections.singletonList(new PermissionSubject(getStoragePoolId(), VdcObjectType.StoragePool, getActionType().getActionGroup())); } @Override protected void setActionMessageParameters() { addValidationMessage(EngineMessage.VAR__ACTION__IMPORT); addValidationMessage(EngineMessage.VAR__TYPE__NETWORK); } }
1,374
435
<gh_stars>100-1000 { "description": "Keynote by <NAME>\u00e1l\n", "duration": 3406, "language": "eng", "recorded": "2017-02-16", "speakers": [ "<NAME>\u00e1l" ], "thumbnail_url": "https://i.ytimg.com/vi/LSFG7Y2WM98/hqdefault.jpg", "title": "[Keynote] (MY) OSS Life", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=LSFG7Y2WM98" } ] }
199
511
<reponame>ziyik/TizenRT-1 /**************************************************************************** * * Copyright 2021 Samsung Electronics All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ****************************************************************************/ /************************************************************************************ * Included Files ************************************************************************************/ #include <tinyara/config.h> #include <sys/types.h> #include <stdio.h> #include <stdint.h> #include <stdbool.h> #include <stddef.h> #include <semaphore.h> #include <errno.h> #include <debug.h> #include <tinyara/irq.h> #include <tinyara/arch.h> #include <tinyara/semaphore.h> #include <tinyara/spi/spi.h> #include <arch/board/board.h> #include "up_internal.h" #include "up_arch.h" #include "chip.h" #include "amebad_spi.h" #include "mbed/targets/hal/rtl8721d/PinNames.h" #include "mbed/hal/spi_api.h" /************************************************************************************ * Pre-processor Definitions ************************************************************************************/ /* Configuration ********************************************************************/ /* SPI interrupts */ #ifdef CONFIG_AMEBAD_SPI_INTERRUPTS #error "Interrupt driven SPI not yet supported" #endif #if defined(CONFIG_AMEBAD_SPI_DMA) #error "DMA mode is not yet supported" #endif /* Can't have both interrupt driven SPI and SPI DMA */ #if defined(CONFIG_AMEBAD_SPI_INTERRUPTS) && defined(CONFIG_AMEBAD_SPI_DMA) #error "Cannot enable both interrupt mode and DMA mode for SPI" #endif #undef spierr #undef spiinfo #if defined(DEBUG_IMX_I2C) #define spiinfo(format, ...) printf(format, ##__VA_ARGS__) #define spierr(format, ...) printf(format, ##__VA_ARGS__) #else #define spiinfo(format, ...) //#define spierr(format, ...) #endif #define spierr(format, ...) printf(format, ##__VA_ARGS__) #define AMEBAD_SPI_MASTER 0 #define AMEBAD_SPI_SLAVE 1 /************************************************************************************ * Private Types ************************************************************************************/ struct amebad_spidev_s { struct spi_dev_s spidev; /* Externally visible part of the SPI interface */ // uint32_t spibase; /* SPIn base address */ //#ifdef CONFIG_AMEBAD_SPI_INTERRUPTS // uint8_t spiirq; /* SPI IRQ number */ //#endif sem_t exclsem; /* Held while chip is selected for mutual exclusion */ uint32_t frequency; /* Requested clock frequency */ uint32_t actual; /* Actual clock frequency */ spi_t spi_object; uint32_t spi_idx; PinName spi_mosi; PinName spi_miso; PinName spi_sclk; PinName spi_cs; int8_t nbits; /* Width of word in bits */ uint8_t mode; /* Mode 0,1,2,3 */ int role; }; enum amebad_delay_e { SPI_PCS_TO_SCK = 1, /* PCS-to-SCK delay. */ SPI_LAST_SCK_TO_PCS, /* Last SCK edge to PCS delay. */ SPI_BETWEEN_TRANSFER /* Delay between transfers. */ }; /************************************************************************************ * Private Function Prototypes ************************************************************************************/ /* Helpers */ static inline uint32_t amebad_spi_getreg32(FAR struct amebad_spidev_s *priv, uint8_t offset); static inline void amebad_spi_putreg32(FAR struct amebad_spidev_s *priv, uint8_t offset, uint32_t value); static inline uint16_t amebad_spi_readword(FAR struct amebad_spidev_s *priv); static inline void amebad_spi_writeword(FAR struct amebad_spidev_s *priv, uint16_t byte); static inline bool amebad_spi_9to16bitmode(FAR struct amebad_spidev_s *priv); static inline void amebad_spi_master_set_delays(FAR struct amebad_spidev_s *priv, uint32_t delay_ns, enum amebad_delay_e type); static inline void amebad_spi_master_set_delay_scaler(FAR struct amebad_spidev_s *priv, uint32_t scaler, enum amebad_delay_e type); /* SPI methods */ static int amebad_spi_lock(FAR struct spi_dev_s *dev, bool lock); static uint32_t amebad_spi_setfrequency(FAR struct spi_dev_s *dev, uint32_t frequency); static void amebad_spi_setmode(FAR struct spi_dev_s *dev, enum spi_mode_e mode); static void amebad_spi_setbits(FAR struct spi_dev_s *dev, int nbits); #ifdef CONFIG_SPI_HWFEATURES static int amebad_spi_hwfeatures(FAR struct spi_dev_s *dev, amebad_spi_hwfeatures_t features); #endif static uint16_t amebad_spi_send(FAR struct spi_dev_s *dev, uint16_t wd); static void amebad_spi_exchange(FAR struct spi_dev_s *dev, FAR const void *txbuffer, FAR void *rxbuffer, size_t nwords); #ifndef CONFIG_SPI_EXCHANGE static void amebad_spi_sndblock(FAR struct spi_dev_s *dev, FAR const void *txbuffer, size_t nwords); static void amebad_spi_recvblock(FAR struct spi_dev_s *dev, FAR void *rxbuffer, size_t nwords); #endif /* Initialization */ static void amebad_spi_bus_initialize(FAR struct amebad_spidev_s *priv); /************************************************************************************ * Private Data ************************************************************************************/ static const struct spi_ops_s g_spi0ops = { .lock = amebad_spi_lock, .select = amebad_spi0select, .setfrequency = amebad_spi_setfrequency, .setmode = amebad_spi_setmode, .setbits = amebad_spi_setbits, #ifdef CONFIG_SPI_HWFEATURES .hwfeatures = amebad_spi_hwfeatures, #endif .status = amebad_spi0status, #ifdef CONFIG_SPI_CMDDATA .cmddata = amebad_spi0cmddata, #endif .send = amebad_spi_send, #ifdef CONFIG_SPI_EXCHANGE .exchange = amebad_spi_exchange, #else .sndblock = amebad_spi_sndblock, .recvblock = amebad_spi_recvblock, #endif #ifdef CONFIG_SPI_CALLBACK .registercallback = amebad_spi0register, /* Provided externally */ #else .registercallback = 0, /* Not implemented */ #endif }; static struct amebad_spidev_s g_spi0dev = { .spidev = { &g_spi0ops }, /* .spibase = AMEBAD_SPI1_BASE, #ifdef CONFIG_AMEBAD_SPI_INTERRUPTS .spiirq = AMEBAD_IRQ_SPI1, #endif #ifdef CONFIG_AMEBAD_SPI_DMA .rxch = DMAMAP_SPI1_RX, .txch = DMAMAP_SPI1_TX, #endif */ .spi_object = {0}, .spi_idx = MBED_SPI0, .spi_mosi = PA_16, .spi_miso = PA_17, .spi_sclk = PA_18, .spi_cs = PA_19, .nbits = 8, .mode = SPIDEV_MODE0, .role = AMEBAD_SPI_SLAVE, }; static const struct spi_ops_s g_spi1ops = { .lock = amebad_spi_lock, .select = amebad_spi1select, .setfrequency = amebad_spi_setfrequency, .setmode = amebad_spi_setmode, .setbits = amebad_spi_setbits, #ifdef CONFIG_SPI_HWFEATURES .hwfeatures = amebad_spi_hwfeatures, #endif .status = amebad_spi1status, #ifdef CONFIG_SPI_CMDDATA .cmddata = amebad_spi1cmddata, #endif .send = amebad_spi_send, #ifdef CONFIG_SPI_EXCHANGE .exchange = amebad_spi_exchange, #else .sndblock = amebad_spi_sndblock, .recvblock = amebad_spi_recvblock, #endif #ifdef CONFIG_SPI_CALLBACK .registercallback = amebad_spi1register, /* Provided externally */ #else .registercallback = 0, /* Not implemented */ #endif }; static struct amebad_spidev_s g_spi1dev = { .spidev = { &g_spi1ops }, /* .spibase = AMEBAD_SPI1_BASE, #ifdef CONFIG_AMEBAD_SPI_INTERRUPTS .spiirq = AMEBAD_IRQ_SPI1, #endif #ifdef CONFIG_AMEBAD_SPI_DMA .rxch = DMAMAP_SPI1_RX, .txch = DMAMAP_SPI1_TX, #endif */ .spi_object = {0}, .spi_idx = MBED_SPI1, .spi_mosi = PB_4, .spi_miso = PB_5, .spi_sclk = PB_6, .spi_cs = PB_7, .nbits = 8, .mode = SPIDEV_MODE0, .role = AMEBAD_SPI_MASTER }; /************************************************************************************ * Private Functions ************************************************************************************/ /************************************************************************************ * Name: amebad_spi_getreg8 * * Description: * Get the contents of the SPI register at offset * * Input Parameters: * priv - private SPI device structure * offset - offset to the register of interest * * Returned Value: * The contents of the 8-bit register * ************************************************************************************/ static inline uint8_t amebad_spi_getreg8(FAR struct amebad_spidev_s *priv, uint8_t offset) { return 0; } /************************************************************************************ * Name: amebad_spi_putreg8 * * Description: * Write a 8-bit value to the SPI register at offset * * Input Parameters: * priv - private SPI device structure * offset - offset to the register of interest * value - the 8-bit value to be written * ************************************************************************************/ static inline void amebad_spi_putreg8(FAR struct amebad_spidev_s *priv, uint8_t offset, uint8_t value) { return; } /************************************************************************************ * Name: amebad_spi_getreg * * Description: * Get the contents of the SPI register at offset * * Input Parameters: * priv - private SPI device structure * offset - offset to the register of interest * * Returned Value: * The contents of the 32-bit register * ************************************************************************************/ static inline uint32_t amebad_spi_getreg32(FAR struct amebad_spidev_s *priv, uint8_t offset) { return 0; } /************************************************************************************ * Name: amebad_spi_putreg * * Description: * Write a 16-bit value to the SPI register at offset * * Input Parameters: * priv - private SPI device structure * offset - offset to the register of interest * value - the 32-bit value to be written * * Returned Value: * The contents of the 32-bit register * ************************************************************************************/ static inline void amebad_spi_putreg32(FAR struct amebad_spidev_s *priv, uint8_t offset, uint32_t value) { return; } /************************************************************************************ * Name: amebad_spi_readword * * Description: * Read one word from SPI * * Input Parameters: * priv - Device-specific state data * * Returned Value: * word as read * ************************************************************************************/ static inline uint16_t amebad_spi_readword(FAR struct amebad_spidev_s *priv) { return 0; } /************************************************************************************ * Name: amebad_spi_writeword * * Description: * Write one word to SPI * * Input Parameters: * priv - Device-specific state data * word - word to send * * Returned Value: * None * ************************************************************************************/ static inline void amebad_spi_writeword(FAR struct amebad_spidev_s *priv, uint16_t word) { return; } /************************************************************************************ * Name: amebad_spi_readbyte * * Description: * Read one byte from SPI * * Input Parameters: * priv - Device-specific state data * * Returned Value: * Byte as read * ************************************************************************************/ static inline uint8_t amebad_spi_readbyte(FAR struct amebad_spidev_s *priv) { return 0; } /************************************************************************************ * Name: amebad_spi_writebyte * * Description: * Write one 8-bit frame to the SPI FIFO * * Input Parameters: * priv - Device-specific state data * byte - Byte to send * * Returned Value: * None * ************************************************************************************/ static inline void amebad_spi_writebyte(FAR struct amebad_spidev_s *priv, uint8_t byte) { return; } /************************************************************************************ * Name: amebad_spi_9to16bitmode * * Description: * Check if the SPI is operating in more then 8 bit mode * * Input Parameters: * priv - Device-specific state data * * Returned Value: * true: >8 bit mode-bit mode, false: <= 8-bit mode * ************************************************************************************/ static inline bool amebad_spi_9to16bitmode(FAR struct amebad_spidev_s *priv) { if (priv->nbits < 9) return false; else return true; } /************************************************************************************ * Name: amebad_spi_modifyreg * * Description: * Clear and set bits in register * * Input Parameters: * priv - Device-specific state data * offset - Register offset * clrbits - The bits to clear * setbits - The bits to set * * Returned Value: * None * ************************************************************************************/ static void amebad_spi_modifyreg32(FAR struct amebad_spidev_s *priv, uint8_t offset, uint32_t clrbits, uint32_t setbits) { } /************************************************************************************ * Name: amebad_spi_master_set_delays * * Description: * SET SPI Delay times * * Input Parameters: * priv - Device-specific state data * scaler - scaler value * type - delay time type * * Returned Value: * None * ************************************************************************************/ static inline void amebad_spi_master_set_delay_scaler(FAR struct amebad_spidev_s *priv, uint32_t scaler, enum amebad_delay_e type) { /* switch (type) { case SPI_PCS_TO_SCK: amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, SPI_CCR_PCSSCK_MASK, 0); amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, 0, SPI_CCR_PCSSCK(scaler)); break; case SPI_LAST_SCK_TO_PCS: amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, SPI_CCR_SCKPCS_MASK, 0); amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, 0, SPI_CCR_SCKPCS(scaler)); break; case SPI_BETWEEN_TRANSFER: amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, SPI_CCR_DBT_MASK, 0); amebad_spi_modifyreg32(priv, AMEBAD_SPI_CCR_OFFSET, 0, SPI_CCR_DBT(scaler)); break; } */ } /************************************************************************************ * Name: amebad_spi_master_set_delays * * Description: * SET SPI Delay times * * Input Parameters: * priv - Device-specific state data * delay_ns - delay time in nano seconds * type - delay time type * * Returned Value: * None * ************************************************************************************/ static inline void amebad_spi_master_set_delays(FAR struct amebad_spidev_s *priv, uint32_t delay_ns, enum amebad_delay_e type) { } /************************************************************************************ * Name: amebad_spi_lock * * Description: * On SPI busses where there are multiple devices, it will be necessary to * lock SPI to have exclusive access to the busses for a sequence of * transfers. The bus should be locked before the chip is selected. After * locking the SPI bus, the caller should then also call the setfrequency, * setbits, and setmode methods to make sure that the SPI is properly * configured for the device. If the SPI buss is being shared, then it * may have been left in an incompatible state. * * Input Parameters: * dev - Device-specific state data * lock - true: Lock spi bus, false: unlock SPI bus * * Returned Value: * None * ************************************************************************************/ static int amebad_spi_lock(FAR struct spi_dev_s *dev, bool lock) { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; int ret; if (lock) { /* Take the semaphore (perhaps waiting) */ do { ret = sem_wait(&priv->exclsem); /* The only case that an error should occur here is if the wait was * awakened by a signal. */ DEBUGASSERT(ret == OK || errno == EINTR); } while (errno == EINTR); } else { (void)sem_post(&priv->exclsem); ret = OK; } return ret; } /************************************************************************************ * Name: amebad_spi0select * * Description: * Enable/disable the SPI slave select. The implementation of this method * must include handshaking: If a device is selected, it must hold off * all other attempts to select the device until the device is deselecte. * * Input Parameters: * dev - Device-specific state data * devid - Device Id * selected - whether it is selected or not * * Returned Value: None * * ************************************************************************************/ void amebad_spi0select(FAR struct spi_dev_s *dev, enum spi_dev_e devid, bool selected) { return; } /************************************************************************************ * Name: amebad_spi0status * * Description: * Get the spi status * * Input Parameters: * dev - Device-specific state data * devid - Device ID * * Returned Value: * Returns the SPI status * ************************************************************************************/ uint8_t amebad_spi0status(FAR struct spi_dev_s *dev, enum spi_dev_e devid) { return 0; } /************************************************************************************ * Name: amebad_spi0cmddata * * Description: * Send cmd to device according to devid. * * Input Parameters: * dev - Device-specific state data * devid - Device ID * cmd - the command that need to be sent * * Returned Value: * TBD * ************************************************************************************/ int amebad_spi0cmddata(FAR struct spi_dev_s *dev, uint32_t devid, bool cmd) { return 0; } /************************************************************************************ * Name: amebad_spi1select * * Description: * Enable/disable the SPI slave select. The implementation of this method * must include handshaking: If a device is selected, it must hold off * all other attempts to select the device until the device is deselecte. * * Input Parameters: * dev - Device-specific state data * devid - Device Id * selected - whether it is selected or not * * Returned Value: None * * ************************************************************************************/ void amebad_spi1select(FAR struct spi_dev_s *dev, enum spi_dev_e devid, bool selected) { return; } /************************************************************************************ * Name: amebad_spi1status * * Description: * Get the spi status * * Input Parameters: * dev - Device-specific state data * devid - Device ID * * Returned Value: * Returns the SPI status * ************************************************************************************/ uint8_t amebad_spi1status(FAR struct spi_dev_s *dev, enum spi_dev_e devid) { return 0; } /************************************************************************************ * Name: amebad_spi1cmddata * * Description: * Send cmd to device according to devid. * * Input Parameters: * dev - Device-specific state data * devid - Device ID * cmd - the command that need to be sent * * Returned Value: * TBD * ************************************************************************************/ int amebad_spi1cmddata(FAR struct spi_dev_s *dev, uint32_t devid, bool cmd) { return 0; } /************************************************************************************ * Name: amebad_spi_setfrequency * * Description: * Set the SPI frequency. * * Input Parameters: * dev - Device-specific state data * frequency - The SPI frequency requested * * Returned Value: * Returns the actual frequency selected * ************************************************************************************/ static uint32_t amebad_spi_setfrequency(FAR struct spi_dev_s *dev, uint32_t frequency) { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; /* Has the SPI bus frequency changed? */ if (frequency != priv->frequency) { /* Disable SPI if it is enabled */ priv->frequency = frequency; if (priv->role == AMEBAD_SPI_MASTER) spi_frequency(&priv->spi_object, priv->frequency); } return priv->frequency; } /************************************************************************************ * Name: amebad_spi_setmode * * Description: * Set the SPI mode. see enum spi_mode_e mode for mode definitions * * Input Parameters: * dev - Device-specific state data * mode - The SPI mode requested * * Returned Value: * Returns the actual frequency selected * ************************************************************************************/ static void amebad_spi_setmode(FAR struct spi_dev_s *dev, enum spi_mode_e mode) { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; spiinfo("mode=%d\n", mode); /* Has the mode changed? */ if (mode != priv->mode) { /* Disable SPI if it is enabled */ priv->mode = mode; spi_format(&priv->spi_object, priv->nbits, priv->mode, priv->role); } } /************************************************************************************ * Name: amebad_spi_setbits * * Description: * Set the number of bits per word. * * Input Parameters: * dev - Device-specific state data * nbits - The number of bits requested * * Returned Value: * None * ************************************************************************************/ static void amebad_spi_setbits(FAR struct spi_dev_s *dev, int nbits) { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; spiinfo("nbits=%d\n", nbits); /* Has the number of bits changed? */ if (nbits != priv->nbits) { if (nbits < 4 || nbits > 16) { return; } /* Save the selection so the subsequence re-configurations will be faster */ priv->nbits = nbits; spi_format(&priv->spi_object, priv->nbits, priv->mode, priv->role); } } /**************************************************************************** * Name: amebad_spi_hwfeatures * * Description: * Set hardware-specific feature flags. * * Input Parameters: * dev - Device-specific state data * features - H/W feature flags * * Returned Value: * Zero (OK) if the selected H/W features are enabled; A negated errno * value if any H/W feature is not supportable. * ****************************************************************************/ #ifdef CONFIG_SPI_HWFEATURES static int amebad_spi_hwfeatures(FAR struct spi_dev_s *dev, amebad_spi_hwfeatures_t features) { #ifdef CONFIG_SPI_BITORDER FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; spiinfo("features=%08x\n", features); /* Other H/W features are not supported */ #endif return -ENOSYS; } #endif /************************************************************************************ * Name: amebad_spi_send * * Description: * Exchange one word on SPI * * Input Parameters: * dev - Device-specific state data * wd - The word to send. the size of the data is determined by the * number of bits selected for the SPI interface. * * Returned Value: * response * ************************************************************************************/ static uint16_t amebad_spi_send(FAR struct spi_dev_s *dev, uint16_t wd) { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; uint16_t ret; DEBUGASSERT(priv); if (priv->role == AMEBAD_SPI_MASTER) { ret = spi_master_write(&priv->spi_object, wd); } else if (priv->role == AMEBAD_SPI_SLAVE) { spi_slave_write(&priv->spi_object, wd); ret = wd; } /* Check and clear any error flags (Reading from the SR clears the error * flags). */ spiinfo("Sent: %04x Return: %04x\n", wd, ret); return ret; } /************************************************************************************ * Name: amebad_spi_exchange (no DMA). aka amebad_spi_exchange_nodma * * Description: * Exchange a block of data on SPI without using DMA * * Input Parameters: * dev - Device-specific state data * txbuffer - A pointer to the buffer of data to be sent * rxbuffer - A pointer to a buffer in which to receive data * nwords - the length of data to be exchaned in units of words. * The wordsize is determined by the number of bits-per-word * selected for the SPI interface. If nbits <= 8, the data is * packed into uint8_t's; if nbits >8, the data is packed into uint16_t's * * Returned Value: * None * ************************************************************************************/ #if !defined(CONFIG_AMEBAD_SPI_DMA) || defined(CONFIG_AMEBAD_DMACAPABLE) #if !defined(CONFIG_AMEBAD_SPI_DMA) static void amebad_spi_exchange(FAR struct spi_dev_s *dev, FAR const void *txbuffer, FAR void *rxbuffer, size_t nwords) #else static void amebad_spi_exchange_nodma(FAR struct spi_dev_s *dev, FAR const void *txbuffer, FAR void *rxbuffer, size_t nwords) #endif { FAR struct amebad_spidev_s *priv = (FAR struct amebad_spidev_s *)dev; DEBUGASSERT(priv); spiinfo("txbuffer=%p rxbuffer=%p nwords=%d\n", txbuffer, rxbuffer, nwords); /* 8- or 16-bit mode? */ if (amebad_spi_9to16bitmode(priv)) { /* 16-bit mode */ const uint16_t *src = (const uint16_t *)txbuffer; uint16_t *dest = (uint16_t *)rxbuffer; uint16_t word; while (nwords-- > 0) { /* Get the next word to write. Is there a source buffer? */ if (src) { word = *src++; } else { word = 0xffff; } /* Exchange one word */ word = amebad_spi_send(dev, word); /* Is there a buffer to receive the return value? */ if (dest) { *dest++ = word; } } } else { /* 8-bit mode */ const uint8_t *src = (const uint8_t *)txbuffer; uint8_t *dest = (uint8_t *)rxbuffer; uint8_t word; while (nwords-- > 0) { /* Get the next word to write. Is there a source buffer? */ if (src) { word = *src++; } else { word = 0xff; } /* Exchange one word */ word = (uint8_t)amebad_spi_send(dev, (uint16_t) word); /* Is there a buffer to receive the return value? */ if (dest) { *dest++ = word; } } } } #endif /* !CONFIG_AMEBAD_SPI_DMA || CONFIG_AMEBAD_DMACAPABLE */ /**************************************************************************** * Name: amebad_spi_sndblock * * Description: * Send a block of data on SPI * * Input Parameters: * dev - Device-specific state data * txbuffer - A pointer to the buffer of data to be sent * nwords - the length of data to send from the buffer in number of words. * The wordsize is determined by the number of bits-per-word * selected for the SPI interface. If nbits <= 8, the data is * packed into uint8_t's; if nbits >8, the data is packed into uint16_t's * * Returned Value: * None * ************************************************************************************/ #ifndef CONFIG_SPI_EXCHANGE static void amebad_spi_sndblock(FAR struct spi_dev_s *dev, FAR const void *txbuffer, size_t nwords) { spiinfo("txbuffer=%p nwords=%d\n", txbuffer, nwords); return amebad_spi_exchange(dev, txbuffer, NULL, nwords); } #endif /************************************************************************************ * Name: amebad_spi_recvblock * * Description: * Receive a block of data from SPI * * Input Parameters: * dev - Device-specific state data * rxbuffer - A pointer to the buffer in which to recieve data * nwords - the length of data that can be received in the buffer in number * of words. The wordsize is determined by the number of bits-per-word * selected for the SPI interface. If nbits <= 8, the data is * packed into uint8_t's; if nbits >8, the data is packed into uint16_t's * * Returned Value: * None * ************************************************************************************/ #ifndef CONFIG_SPI_EXCHANGE static void amebad_spi_recvblock(FAR struct spi_dev_s *dev, FAR void *rxbuffer, size_t nwords) { spiinfo("rxbuffer=%p nwords=%d\n", rxbuffer, nwords); return amebad_spi_exchange(dev, NULL, rxbuffer, nwords); } #endif /************************************************************************************ * Name: amebad_spi_clock_enable * * Description: * Ungate SPI clock * ************************************************************************************/ void amebad_spi_clock_enable(uint32_t base) { return; } /************************************************************************************ * Name: amebad_spi_clock_disable * * Description: * Gate SPI clock * ************************************************************************************/ void amebad_spi_clock_disable(uint32_t base) { return; } /************************************************************************************ * Name: amebad_spi_bus_initialize * * Description: * Initialize the selected SPI bus in its default state (Master, 8-bit, mode 0, etc.) * * Input Parameters: * priv - private SPI device structure * * Returned Value: * None * ************************************************************************************/ static void amebad_spi_bus_initialize(struct amebad_spidev_s *priv) { DEBUGASSERT(priv); DEBUGASSERT(&priv->spi_object); priv->spi_object.spi_idx = priv->spi_idx; spi_init(&priv->spi_object, priv->spi_mosi, priv->spi_miso, priv->spi_sclk, priv->spi_cs); spi_format(&priv->spi_object, priv->nbits, priv->mode, priv->role); } /************************************************************************************ * Public Functions ************************************************************************************/ /************************************************************************************ * Name: amebad_spibus_initialize * * Description: * Initialize the selected SPI bus * * Input Parameters: * Port number (for hardware that has mutiple SPI interfaces) * * Returned Value: * Valid SPI device structure reference on success; a NULL on failure * ************************************************************************************/ FAR struct spi_dev_s *amebad_spibus_initialize(int bus) { FAR struct amebad_spidev_s *priv = NULL; irqstate_t flags = irqsave(); if (bus == 1) { /* Select SPI1 */ priv = &g_spi0dev; /* Only configure if the bus is not already configured */ amebad_spi_bus_initialize(priv); } else if (bus == 2) { /* Select SPI2 */ priv = &g_spi1dev; /* Only configure if the bus is not already configured */ amebad_spi_bus_initialize(priv); } else { spierr("ERROR: Unsupported SPI bus: %d\n", bus); irqrestore(flags); return NULL; } irqrestore(flags); return (FAR struct spi_dev_s *)priv; } /************************************************************************************ * Name: up_spiinitialize * * Description: * Initialize the selected SPI bus * * Input Parameters: * Port number (for hardware that has mutiple SPI interfaces) * * Returned Value: * Valid SPI device structure reference on success; a NULL on failure * ************************************************************************************/ FAR struct spi_dev_s *up_spiinitialize(int port) { FAR struct amebad_spidev_s *priv = NULL; irqstate_t flags = irqsave(); if (port == 0) { /* Select SPI1 */ priv = &g_spi0dev; /* Only configure if the bus is not already configured */ amebad_spi_bus_initialize(priv); } else if (port == 1) { /* Select SPI1 */ priv = &g_spi1dev; /* Only configure if the bus is not already configured */ amebad_spi_bus_initialize(priv); } else { spierr("ERROR: Unsupported SPI bus: %d\n", port); irqrestore(flags); return NULL; } irqrestore(flags); return (FAR struct spi_dev_s *)priv; }
11,194
529
<gh_stars>100-1000 //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Copyright (c) Microsoft Corporation. All rights reserved. //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #include <tinyhal.h> void Listener(unsigned int e, unsigned int param); struct GestureDriver { static const int c_IgnoreCount = 2; private: static BOOL s_initialized; PalEventListener m_gestureListener; HAL_COMPLETION m_gestureCompletion; UINT32 m_index; UINT32 m_currentState; UINT16 m_lastx; UINT16 m_lasty; UINT16 m_startx; UINT16 m_starty; UINT32 m_stateIgnoreIndex; UINT32 m_stateIgnoreHead; UINT32 m_stateIgnoreTail; UINT32 m_stateIgnoreBuffer[c_IgnoreCount]; public: static HRESULT Initialize(); static HRESULT Uninitialize(); static BOOL ProcessPoint(UINT32 flags, UINT16 source, UINT16 x, UINT16 y, INT64 time); static void ResetRecognition(); static void EventListener(unsigned int e, unsigned int param); static void GestureContinuationRoutine(void *arg); }; extern GestureDriver g_GestureDriver;
545
392
<gh_stars>100-1000 package io.logz.apollo.database; public enum OrderDirection { DESC, ASC }
43
1,657
<reponame>mens-artis/Auto-PyTorch import ConfigSpace class BaseTrainingTechnique(): def __init__(self, training_components=None): """Initialize the training technique. Should be called in a fit Method of a Pipeline node. Keyword Arguments: training_components {dict} -- Maps a names to a training components necessary for this training technique (default: {None}) """ self.training_components = training_components or dict() # VIRTUAL def set_up(self, trainer, pipeline_config): """Set up the training component Arguments: trainer {Trainer} -- The trainer object used for training. pipeline_config {dict} -- Configuration of the Pipeline. logger {Logger} -- Logger. """ pass # VIRTUAL def on_epoch_start(self, trainer, log, epoch): """Function that gets called before the train_batches method of each epoch in training. Arguments: trainer {Trainer} -- The trainer object used for training. log {dict} -- The log of the current epoch. epoch {int} -- The current epoch of training. """ pass # VIRTUAL def on_epoch_end(self, trainer, log, epoch): """Function that gets called after the train_batches method of each epoch in training. Is able to stop training by returning True. Arguments: trainer {Trainer} -- The trainer object used for training. log {dict} -- The log of the current epoch. epoch {int} -- The current epoch of training. Returns: bool -- If training should be stopped. """ return False # VIRTUAL def on_batch_start(self, trainer, epoch, step, num_steps): """Function that gets called in the train_batches method of training. Is able to cancel the current epoch by returning True. Arguments: batch_loss {tensor} -- The batch loss of the current batch. trainer {Trainer} -- The trainer object used for training Returns: bool -- If the current epoch should be canceled. """ return False # VIRTUAL def on_batch_end(self, batch_loss, trainer, epoch, step, num_steps): """Function that gets called in the train_batches method of training. Is able to cancel the current epoch by returning True. Arguments: batch_loss {tensor} -- The batch loss of the current batch. trainer {Trainer} -- The trainer object used for training Returns: bool -- If the current epoch should be canceled. """ return False # VIRTUAL def select_log(self, logs, trainer): """Select one log from the list of all epoch logs. Arguments: logs {list} -- A list of log. For each epoch of training there is one entry. trainer {Trainer} -- The trainer object used for training Returns: log -- The selected log. Return None if undecided. """ return False # VIRTUAL def requires_eval_each_epoch(self): """ Specify if the training technique needs the network to be evaluated on a snapshot after training. Return: bool -- If the training technique needs the network to be evaluated on a snapshot after training """ return False # VIRTUAL @staticmethod def get_pipeline_config_options(): """Return a list of ConfigOption used for this training technique. Returns: list -- A list of ConfigOptions. """ return [] class BaseBatchLossComputationTechnique(): # VIRTUAL def set_up(self, pipeline_config, hyperparameter_config, logger): """Initialize the batch loss computation technique. Arguments: pipeline_config {dict} -- The configuration of the pipeline. hyperparameter_config {dict} -- The hyperparameter config sampled by BOHB. logger {Logger} -- Logger. """ self.logger = logger # VIRTUAL def prepare_data(self, X_batch, y_batch): """Method that gets called, before batch is but into network. Arguments: X_batch {tensor} -- The features of the batch. y_batch {tensor} -- The targets of the batch. """ return X_batch, {'y_batch' : y_batch} # VIRTUAL def criterion(self, y_batch): return lambda criterion, pred: criterion(pred, y_batch) # VIRTUAL @staticmethod def get_hyperparameter_search_space(**pipeline_config): """Get the hyperparameter config space for this technique. Returns: ConfigurationSpace -- The hyperparameter config space for this technique """ return ConfigSpace.ConfigurationSpace()
2,074
544
{ "id": 5, "from": 1600925481, "to": 1601301350, "type": 0, "appversion": "7.5.286", "title": "How is your experience using the updated Timer?", "text": "Let us know in a 2-minute survey!", "button": "Give feedback", "url-mac": "https://forms.gle/QKX7datPDVV7axvu8", "url-win": "https://forms.gle/QKX7datPDVV7axvu8", "url-linux": "https://forms.gle/QKX7datPDVV7axvu8" }
197
461
#ifndef __MONITOR_H__ #define __MONITOR_H__ #include "common.h" enum { NEMU_STOP, NEMU_RUNNING, NEMU_END, NEMU_ABORT }; typedef struct { int state; vaddr_t halt_pc; uint32_t halt_ret; } NEMUState; extern NEMUState nemu_state; #endif
119
1,831
/** * Copyright (c) 2004-present, Facebook, Inc. and its affiliates. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. */ #include "logdevice/common/work_model/WorkContext.h" #include <gtest/gtest.h> #include "folly/executors/SerialExecutor.h" using namespace ::testing; using namespace facebook::logdevice; class NOOPExecutor : public folly::Executor { public: NOOPExecutor() {} ~NOOPExecutor() override {} void add(folly::Func /* func */) override {} bool keepAliveAcquire() noexcept override { return true; } void keepAliveRelease() noexcept override {} }; TEST(WorkContextTest, SimpleTest) { auto no_op_executor = std::make_unique<NOOPExecutor>(); { auto serial_keep_alive = folly::SerialExecutor::create( folly::Executor::getKeepAliveToken(no_op_executor.get())); WorkContext ctx(std::move(serial_keep_alive)); EXPECT_TRUE(ctx.anonymous()); } { auto serial_keep_alive = folly::SerialExecutor::create( folly::Executor::getKeepAliveToken(no_op_executor.get())); WorkContext ctx(std::move(serial_keep_alive), 1); EXPECT_FALSE(ctx.anonymous()); EXPECT_EQ(1, ctx.getId()); } }
450
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef OOX_DRAWINGML_SHAPEPROPERTYMAP_HXX #define OOX_DRAWINGML_SHAPEPROPERTYMAP_HXX #include "oox/helper/propertymap.hxx" namespace oox { class ModelObjectHelper; } namespace oox { namespace drawingml { // ============================================================================ /** Enumeration for various properties related to drawing shape formatting. This is an abstraction for shape formatting properties that have different names in various implementations, e.g. drawing shapes vs. chart objects. */ enum ShapePropertyId { SHAPEPROP_LineStyle, SHAPEPROP_LineWidth, SHAPEPROP_LineColor, SHAPEPROP_LineTransparency, SHAPEPROP_LineDash, /// Explicit line dash or name of a line dash stored in a global container. SHAPEPROP_LineJoint, SHAPEPROP_LineStart, /// Explicit line start marker or name of a line marker stored in a global container. SHAPEPROP_LineStartWidth, SHAPEPROP_LineStartCenter, SHAPEPROP_LineEnd, /// Explicit line end marker or name of a line marker stored in a global container. SHAPEPROP_LineEndWidth, SHAPEPROP_LineEndCenter, SHAPEPROP_FillStyle, SHAPEPROP_FillColor, SHAPEPROP_FillTransparency, SHAPEPROP_FillGradient, /// Explicit fill gradient or name of a fill gradient stored in a global container. SHAPEPROP_FillBitmapUrl, /// Explicit fill bitmap URL or name of a fill bitmap URL stored in a global container. SHAPEPROP_FillBitmapMode, SHAPEPROP_FillBitmapSizeX, SHAPEPROP_FillBitmapSizeY, SHAPEPROP_FillBitmapOffsetX, SHAPEPROP_FillBitmapOffsetY, SHAPEPROP_FillBitmapRectanglePoint, SHAPEPROP_END }; // ============================================================================ struct ShapePropertyInfo { const sal_Int32* mpnPropertyIds; /// Pointer to array of property identifiers for all SHAPEPROP properties. bool mbNamedLineMarker; /// True = use named line marker instead of explicit line marker. bool mbNamedLineDash; /// True = use named line dash instead of explicit line dash. bool mbNamedFillGradient; /// True = use named fill gradient instead of explicit fill gradient. bool mbNamedFillBitmapUrl; /// True = use named fill bitmap URL instead of explicit fill bitmap URL. static ShapePropertyInfo DEFAULT; /// Default property info (used as default parameter of other methods). explicit ShapePropertyInfo( const sal_Int32* pnPropertyIds, bool bNamedLineMarker, bool bNamedLineDash, bool bNamedFillGradient, bool bNamedFillBitmapUrl ); inline bool has( ShapePropertyId ePropId ) const { return mpnPropertyIds[ ePropId ] >= 0; } inline sal_Int32 operator[]( ShapePropertyId ePropId ) const { return mpnPropertyIds[ ePropId ]; } }; // ============================================================================ class ShapePropertyMap : public PropertyMap { public: explicit ShapePropertyMap( ModelObjectHelper& rModelObjHelper, const ShapePropertyInfo& rShapePropInfo = ShapePropertyInfo::DEFAULT ); /** Returns true, if the specified property is supported. */ bool supportsProperty( ShapePropertyId ePropId ) const; /** Returns true, if named line markers are supported, and the specified line marker has already been inserted into the marker table. */ bool hasNamedLineMarkerInTable( const ::rtl::OUString& rMarkerName ) const; /** Sets the specified shape property to the passed value. */ bool setAnyProperty( ShapePropertyId ePropId, const ::com::sun::star::uno::Any& rValue ); /** Sets the specified shape property to the passed value. */ template< typename Type > inline bool setProperty( ShapePropertyId ePropId, const Type& rValue ) { return setAnyProperty( ePropId, ::com::sun::star::uno::Any( rValue ) ); } using PropertyMap::setAnyProperty; using PropertyMap::setProperty; using PropertyMap::operator[]; private: /** Sets an explicit line marker, or creates a named line marker. */ bool setLineMarker( sal_Int32 nPropId, const ::com::sun::star::uno::Any& rValue ); /** Sets an explicit line dash, or creates a named line dash. */ bool setLineDash( sal_Int32 nPropId, const ::com::sun::star::uno::Any& rValue ); /** Sets an explicit fill gradient, or creates a named fill gradient. */ bool setFillGradient( sal_Int32 nPropId, const ::com::sun::star::uno::Any& rValue ); /** Sets an explicit fill bitmap URL, or creates a named fill bitmap URL. */ bool setFillBitmapUrl( sal_Int32 nPropId, const ::com::sun::star::uno::Any& rValue ); // not implemented, to prevent implicit conversion from enum to int ::com::sun::star::uno::Any& operator[]( ShapePropertyId ePropId ); const ::com::sun::star::uno::Any& operator[]( ShapePropertyId ePropId ) const; private: ModelObjectHelper& mrModelObjHelper; ShapePropertyInfo maShapePropInfo; }; // ============================================================================ } // namespace drawingml } // namespace oox #endif
2,285
1,350
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.maintenance.implementation; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.maintenance.fluent.PublicMaintenanceConfigurationsClient; import com.azure.resourcemanager.maintenance.fluent.models.MaintenanceConfigurationInner; import com.azure.resourcemanager.maintenance.models.MaintenanceConfiguration; import com.azure.resourcemanager.maintenance.models.PublicMaintenanceConfigurations; import com.fasterxml.jackson.annotation.JsonIgnore; public final class PublicMaintenanceConfigurationsImpl implements PublicMaintenanceConfigurations { @JsonIgnore private final ClientLogger logger = new ClientLogger(PublicMaintenanceConfigurationsImpl.class); private final PublicMaintenanceConfigurationsClient innerClient; private final com.azure.resourcemanager.maintenance.MaintenanceManager serviceManager; public PublicMaintenanceConfigurationsImpl( PublicMaintenanceConfigurationsClient innerClient, com.azure.resourcemanager.maintenance.MaintenanceManager serviceManager) { this.innerClient = innerClient; this.serviceManager = serviceManager; } public PagedIterable<MaintenanceConfiguration> list() { PagedIterable<MaintenanceConfigurationInner> inner = this.serviceClient().list(); return Utils.mapPage(inner, inner1 -> new MaintenanceConfigurationImpl(inner1, this.manager())); } public PagedIterable<MaintenanceConfiguration> list(Context context) { PagedIterable<MaintenanceConfigurationInner> inner = this.serviceClient().list(context); return Utils.mapPage(inner, inner1 -> new MaintenanceConfigurationImpl(inner1, this.manager())); } public MaintenanceConfiguration get(String resourceName) { MaintenanceConfigurationInner inner = this.serviceClient().get(resourceName); if (inner != null) { return new MaintenanceConfigurationImpl(inner, this.manager()); } else { return null; } } public Response<MaintenanceConfiguration> getWithResponse(String resourceName, Context context) { Response<MaintenanceConfigurationInner> inner = this.serviceClient().getWithResponse(resourceName, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new MaintenanceConfigurationImpl(inner.getValue(), this.manager())); } else { return null; } } private PublicMaintenanceConfigurationsClient serviceClient() { return this.innerClient; } private com.azure.resourcemanager.maintenance.MaintenanceManager manager() { return this.serviceManager; } }
1,014
1,283
<filename>hikyuu_cpp/unit_test/hikyuu/indicator/test_Indicator.cpp<gh_stars>1000+ /* * test_Indicator.cpp * * Created on: 2013-4-11 * Author: fasiondog */ #include "doctest/doctest.h" #include <hikyuu/indicator/Indicator.h> #include <hikyuu/indicator/crt/PRICELIST.h> #include <hikyuu/indicator/crt/KDATA.h> #include <hikyuu/StockManager.h> using namespace hku; /** * @defgroup test_indicator_Indicator test_indicator_Indicator * @ingroup test_hikyuu_indicator_suite * @{ */ /** @par 检测点 */ TEST_CASE("test_operator_add") { /** @arg 正常相加*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = data1 + data2; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], i + i + 1); } /** @arg 两个待加的ind的size不同,其中一个为0 */ Indicator data3; result = data1 + data3; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个待加的ind的size不同,其中一个为0 */ PriceList d3; for (size_t i = 0; i < 20; ++i) { d3.push_back(i); } data3 = PRICELIST(d3); result = data1 + data3; CHECK_EQ(data1.size(), 10); CHECK_EQ(data3.size(), 20); CHECK_EQ(result.empty(), false); CHECK_EQ(result.size(), 20); CHECK_EQ(result.discard(), 10); for (size_t i = 0; i < result.discard(); ++i) { CHECK_UNARY(std::isnan(result[i])); } for (size_t i = result.discard(); i < 20; ++i) { CHECK_EQ(result[i], i + i - 10); } /** @arg 两个待加的ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = k + data1; CHECK_EQ(result.size(), k.size()); CHECK_EQ(result.getResultNumber(), 1); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], (k[i] + data1[i])); } } /** @par 检测点 */ TEST_CASE("test_operator_reduce") { /** @arg 正常相减*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = data1 - data2; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], data1[i] - data2[i]); } /** @arg 两个待减的ind的size不同 */ Indicator data3; result = data1 - data3; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个待减的ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = k - data1; CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], (k[i] - data1[i])); } } /** @par 检测点 */ TEST_CASE("test_operator_multi") { /** @arg 正常相乘*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = data1 * data2; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], data1[i] * data2[i]); } /** @arg 两个待乘的ind的size不同 */ Indicator data3; result = data1 * data3; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个待乘的ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = k * data1; CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], (k[i] * data1[i])); } } /** @par 检测点 */ TEST_CASE("test_operator_division") { /** @arg 正常相除*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = data2 / data1; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { if (data1[i] == 0.0) { CHECK_UNARY(std::isnan(result[i])); } else { CHECK_EQ(result[i], data2[i] / data1[i]); } } /** @arg 两个待除的ind的size不同 */ Indicator data3; result = data1 / data3; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个待除的ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = k / data1; CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { if (data1[i] == 0.0) { CHECK_UNARY(std::isnan(result[i])); } else { CHECK_EQ(result[i], (k[i] / data1[i])); } } } /** @par 检测点 */ TEST_CASE("test_operator_mod") { /** @arg 正常取模*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i + 2); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = data2 % data1; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); CHECK_UNARY(std::isnan(result[0])); CHECK_EQ(result[1], 0); CHECK_EQ(result[2], 0); CHECK_EQ(result[3], 2); CHECK_EQ(result[4], 2); CHECK_EQ(result[5], 2); CHECK_EQ(result[6], 2); CHECK_EQ(result[7], 2); CHECK_EQ(result[8], 2); CHECK_EQ(result[9], 2); /** @arg 两个待除的ind的size不同 */ Indicator data3; result = data1 % data3; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个待除的ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = k % data1; CHECK_EQ(result.size(), k.size()); CHECK_UNARY(std::isnan(result[0])); CHECK_EQ(result[1], 0); CHECK_EQ(result[2], 1); CHECK_EQ(result[3], 1); CHECK_EQ(result[4], 3); CHECK_EQ(result[5], 1); CHECK_EQ(result[6], 3); CHECK_EQ(result[7], 6); CHECK_EQ(result[8], 2); CHECK_EQ(result[9], 8); } /** @par 检测点 */ TEST_CASE("test_operator_eq") { /** @arg 正常相等*/ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = (data2 == data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], true); } /** @arg 两个ind的size不同 */ Indicator data3; result = (data1 == data3); CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k == data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], false); } } /** @par 检测点 */ TEST_CASE("test_operator_ne") { /** @arg 正常不相等 */ PriceList d1, d2; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator result = (data2 != data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], false); } /** @arg 两个ind的size不同 */ Indicator data3; result = (data1 != data3); CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k != data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], true); } } /** @par 检测点 */ TEST_CASE("test_operator_gt") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); d3.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1 > ind2*/ Indicator result = (data3 > data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1 < ind2 */ result = (data1 > data3); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind1 == ind2 */ result = (data1 > data2); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 > data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k > data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], 1.0); } } /** @par 检测点 */ TEST_CASE("test_operator_ge") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); d3.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1 > ind2*/ Indicator result = (data3 >= data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1 < ind2 */ result = (data1 >= data3); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind1 == ind2 */ result = (data1 >= data2); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 >= data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k >= data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], 1.0); } } /** @par 检测点 */ TEST_CASE("test_operator_lt") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); d3.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1 > ind2*/ Indicator result = (data3 < data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind1 < ind2 */ result = (data1 < data3); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1 == ind2 */ result = (data1 < data2); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 < data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k < data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], 0.0); } } /** @par 检测点 */ TEST_CASE("test_operator_le") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(i); d2.push_back(i); d3.push_back(i + 1); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1 > ind2*/ Indicator result = (data3 <= data1); CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind1 < ind2 */ result = (data1 <= data3); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1 == ind2 */ result = (data1 <= data2); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 <= data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); /** @arg 两个ind的size相同,但result_number不同 */ StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query(0, 10); KData kdata = stock.getKData(query); Indicator k = KDATA(kdata); CHECK_EQ(k.size(), data1.size()); result = (k <= data1); CHECK_EQ(result.size(), k.size()); for (size_t i = 0; i < result.size(); ++i) { CHECK_EQ(result[i], 0.0); } } /** @par 检测点 */ TEST_CASE("test_getResult_getResultAsPriceList") { StockManager& sm = StockManager::instance(); Stock stock = sm.getStock("sh600000"); KQuery query; KData kdata; Indicator ikdata, result1; PriceList result2; /** @arg 源数据为空 */ ikdata = KDATA(kdata); result1 = ikdata.getResult(0); result2 = ikdata.getResultAsPriceList(0); CHECK_EQ(result1.size(), 0); CHECK_EQ(result2.size(), 0); /** @arg result_num参数非法 */ query = KQuery(0, 10); kdata = stock.getKData(query); ikdata = KDATA(kdata); CHECK_EQ(ikdata.size(), 10); result1 = ikdata.getResult(6); result2 = ikdata.getResultAsPriceList(6); CHECK_EQ(result1.size(), 0); CHECK_EQ(result2.size(), 0); /** @arg 正常获取 */ result1 = ikdata.getResult(0); result2 = ikdata.getResultAsPriceList(1); CHECK_EQ(result1.size(), 10); CHECK_EQ(result2.size(), 10); CHECK_EQ(result1[0], 29.5); CHECK_LT(std::fabs(result1[1] - 27.58), 0.0001); CHECK_EQ(result1[9], 26.45); CHECK_EQ(result2[0], 29.8); CHECK_EQ(result2[1], 28.38); CHECK_EQ(result2[9], 26.55); } /** @par 检测点 */ TEST_CASE("test_LOGIC_AND") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(0); d2.push_back(1); d3.push_back(i); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1为全0, ind2为全1 */ Indicator result = data1 & data2; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind为全0, val为1 */ /*result = IND_AND(data1, 1.0); BOOST_CHECK(result.size() == 10); BOOST_CHECK(result.getResultNumber() == 1); BOOST_CHECK(result.discard() == 0); for (size_t i = 0; i < 10; ++i) { BOOST_CHECK(result[i] == 0.0); }*/ /** @arg ind1为全0, ind2为从0开始的整数 */ result = data1 & data3; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 0.0); } /** @arg ind1为全1, ind2为从0开始的整数 */ result = data2 & data3; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); CHECK_EQ(result[0], 0.0); for (size_t i = 1; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 & data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); } /** @par 检测点 */ TEST_CASE("test_LOGIC_OR") { PriceList d1, d2, d3; for (size_t i = 0; i < 10; ++i) { d1.push_back(0); d2.push_back(1); d3.push_back(i); } Indicator data1 = PRICELIST(d1); Indicator data2 = PRICELIST(d2); Indicator data3 = PRICELIST(d3); /** @arg ind1为全0, ind2为全1 */ Indicator result = data1 | data2; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1为全0, ind2为从0开始的整数 */ result = data1 | data3; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); CHECK_EQ(result[0], 0.0); for (size_t i = 1; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg ind1为全1, ind2为从0开始的整数 */ result = data2 | data3; CHECK_EQ(result.size(), 10); CHECK_EQ(result.getResultNumber(), 1); CHECK_EQ(result.discard(), 0); for (size_t i = 0; i < 10; ++i) { CHECK_EQ(result[i], 1.0); } /** @arg 两个ind的size不同 */ Indicator data4; result = data1 | data4; CHECK_UNARY(result.empty()); CHECK_EQ(result.size(), 0); } /** @} */
9,796
5,079
<gh_stars>1000+ # -*- test-case-name: openid.test.test_xrires -*- """XRI resolution. """ from urllib import urlencode from openid import fetchers from openid.yadis import etxrd from openid.yadis.xri import toURINormal from openid.yadis.services import iterServices DEFAULT_PROXY = 'http://proxy.xri.net/' class ProxyResolver(object): """Python interface to a remote XRI proxy resolver. """ def __init__(self, proxy_url=DEFAULT_PROXY): self.proxy_url = proxy_url def queryURL(self, xri, service_type=None): """Build a URL to query the proxy resolver. @param xri: An XRI to resolve. @type xri: unicode @param service_type: The service type to resolve, if you desire service endpoint selection. A service type is a URI. @type service_type: str @returns: a URL @returntype: str """ # Trim off the xri:// prefix. The proxy resolver didn't accept it # when this code was written, but that may (or may not) change for # XRI Resolution 2.0 Working Draft 11. qxri = toURINormal(xri)[6:] hxri = self.proxy_url + qxri args = { # XXX: If the proxy resolver will ensure that it doesn't return # bogus CanonicalIDs (as per Steve's message of 15 Aug 2006 # 11:13:42), then we could ask for application/xrd+xml instead, # which would give us a bit less to process. '_xrd_r': 'application/xrds+xml', } if service_type: args['_xrd_t'] = service_type else: # Don't perform service endpoint selection. args['_xrd_r'] += ';sep=false' query = _appendArgs(hxri, args) return query def query(self, xri, service_types): """Resolve some services for an XRI. Note: I don't implement any service endpoint selection beyond what the resolver I'm querying does, so the Services I return may well include Services that were not of the types you asked for. May raise fetchers.HTTPFetchingError or L{etxrd.XRDSError} if the fetching or parsing don't go so well. @param xri: An XRI to resolve. @type xri: unicode @param service_types: A list of services types to query for. Service types are URIs. @type service_types: list of str @returns: tuple of (CanonicalID, Service elements) @returntype: (unicode, list of C{ElementTree.Element}s) """ # FIXME: No test coverage! services = [] # Make a seperate request to the proxy resolver for each service # type, as, if it is following Refs, it could return a different # XRDS for each. canonicalID = None for service_type in service_types: url = self.queryURL(xri, service_type) response = fetchers.fetch(url) if response.status not in (200, 206): # XXX: sucks to fail silently. # print "response not OK:", response continue et = etxrd.parseXRDS(response.body) canonicalID = etxrd.getCanonicalID(xri, et) some_services = list(iterServices(et)) services.extend(some_services) # TODO: # * If we do get hits for multiple service_types, we're almost # certainly going to have duplicated service entries and # broken priority ordering. return canonicalID, services def _appendArgs(url, args): """Append some arguments to an HTTP query. """ # to be merged with oidutil.appendArgs when we combine the projects. if hasattr(args, 'items'): args = args.items() args.sort() if len(args) == 0: return url # According to XRI Resolution section "QXRI query parameters": # # """If the original QXRI had a null query component (only a leading # question mark), or a query component consisting of only question # marks, one additional leading question mark MUST be added when # adding any XRI resolution parameters.""" if '?' in url.rstrip('?'): sep = '&' else: sep = '?' return '%s%s%s' % (url, sep, urlencode(args))
1,787
2,761
<reponame>ami-GS/msquic #include <clog.h> #ifdef BUILDING_TRACEPOINT_PROVIDER #define TRACEPOINT_CREATE_PROBES #else #define TRACEPOINT_DEFINE #endif #include "quic_gtest.cpp.clog.h"
87
2,151
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/renderer/service_worker/web_service_worker_impl.h" #include <utility> #include "base/macros.h" #include "content/common/service_worker/service_worker_messages.h" #include "content/renderer/service_worker/service_worker_context_client.h" #include "content/renderer/service_worker/service_worker_provider_context.h" #include "content/renderer/service_worker/web_service_worker_provider_impl.h" #include "third_party/blink/public/platform/modules/serviceworker/web_service_worker_proxy.h" #include "third_party/blink/public/platform/web_runtime_features.h" #include "third_party/blink/public/platform/web_string.h" using blink::WebString; namespace content { namespace { class ServiceWorkerHandleImpl : public blink::WebServiceWorker::Handle { public: explicit ServiceWorkerHandleImpl(scoped_refptr<WebServiceWorkerImpl> worker) : worker_(std::move(worker)) {} ~ServiceWorkerHandleImpl() override {} blink::WebServiceWorker* ServiceWorker() override { return worker_.get(); } private: scoped_refptr<WebServiceWorkerImpl> worker_; DISALLOW_COPY_AND_ASSIGN(ServiceWorkerHandleImpl); }; void OnTerminated( std::unique_ptr<WebServiceWorkerImpl::TerminateForTestingCallback> callback) { callback->OnSuccess(); } } // namespace // static scoped_refptr<WebServiceWorkerImpl> WebServiceWorkerImpl::CreateForServiceWorkerGlobalScope( blink::mojom::ServiceWorkerObjectInfoPtr info) { scoped_refptr<WebServiceWorkerImpl> impl = new WebServiceWorkerImpl(std::move(info), nullptr /* provider_context */); return impl; } // static scoped_refptr<WebServiceWorkerImpl> WebServiceWorkerImpl::CreateForServiceWorkerClient( blink::mojom::ServiceWorkerObjectInfoPtr info, base::WeakPtr<ServiceWorkerProviderContext> provider_context) { DCHECK(provider_context); scoped_refptr<WebServiceWorkerImpl> impl = new WebServiceWorkerImpl(std::move(info), std::move(provider_context)); return impl; } void WebServiceWorkerImpl::StateChanged( blink::mojom::ServiceWorkerState new_state) { state_ = new_state; // TODO(nhiroki): This is a quick fix for http://crbug.com/507110 DCHECK(proxy_); if (proxy_) proxy_->DispatchStateChangeEvent(); } void WebServiceWorkerImpl::SetProxy(blink::WebServiceWorkerProxy* proxy) { proxy_ = proxy; } blink::WebServiceWorkerProxy* WebServiceWorkerImpl::Proxy() { return proxy_; } blink::WebURL WebServiceWorkerImpl::Url() const { return info_->url; } blink::mojom::ServiceWorkerState WebServiceWorkerImpl::GetState() const { return state_; } void WebServiceWorkerImpl::PostMessageToServiceWorker( blink::TransferableMessage message) { host_->PostMessageToServiceWorker(std::move(message)); } void WebServiceWorkerImpl::TerminateForTesting( std::unique_ptr<TerminateForTestingCallback> callback) { host_->TerminateForTesting( base::BindOnce(&OnTerminated, std::move(callback))); } // static std::unique_ptr<blink::WebServiceWorker::Handle> WebServiceWorkerImpl::CreateHandle(scoped_refptr<WebServiceWorkerImpl> worker) { if (!worker) return nullptr; return std::make_unique<ServiceWorkerHandleImpl>(std::move(worker)); } WebServiceWorkerImpl::WebServiceWorkerImpl( blink::mojom::ServiceWorkerObjectInfoPtr info, base::WeakPtr<ServiceWorkerProviderContext> provider_context) : binding_(this), info_(std::move(info)), state_(info_->state), proxy_(nullptr), is_for_client_(provider_context), context_for_client_(std::move(provider_context)) { DCHECK_NE(blink::mojom::kInvalidServiceWorkerVersionId, info_->version_id); host_.Bind(std::move(info_->host_ptr_info)); binding_.Bind(std::move(info_->request)); if (is_for_client_) { context_for_client_->AddServiceWorkerObject(info_->version_id, this); } else { ServiceWorkerContextClient::ThreadSpecificInstance() ->AddServiceWorkerObject(info_->version_id, this); } } WebServiceWorkerImpl::~WebServiceWorkerImpl() { if (is_for_client_) { if (context_for_client_) { context_for_client_->RemoveServiceWorkerObject(info_->version_id); } } else { if (ServiceWorkerContextClient::ThreadSpecificInstance()) { ServiceWorkerContextClient::ThreadSpecificInstance() ->RemoveServiceWorkerObject(info_->version_id); } } } } // namespace content
1,547
5,460
import boto3 import json import pytest from botocore.exceptions import ClientError from moto import mock_iot, mock_cognitoidentity @mock_iot def test_attach_policy(): client = boto3.client("iot", region_name="ap-northeast-1") policy_name = "my-policy" doc = "{}" cert = client.create_keys_and_certificate(setAsActive=True) cert_arn = cert["certificateArn"] client.create_policy(policyName=policy_name, policyDocument=doc) client.attach_policy(policyName=policy_name, target=cert_arn) res = client.list_attached_policies(target=cert_arn) res.should.have.key("policies").which.should.have.length_of(1) res["policies"][0]["policyName"].should.equal("my-policy") @mock_iot @mock_cognitoidentity def test_attach_policy_to_identity(): region = "ap-northeast-1" cognito_identity_client = boto3.client("cognito-identity", region_name=region) identity_pool_name = "test_identity_pool" identity_pool = cognito_identity_client.create_identity_pool( IdentityPoolName=identity_pool_name, AllowUnauthenticatedIdentities=True ) identity = cognito_identity_client.get_id( AccountId="test", IdentityPoolId=identity_pool["IdentityPoolId"] ) client = boto3.client("iot", region_name=region) policy_name = "my-policy" doc = "{}" client.create_policy(policyName=policy_name, policyDocument=doc) client.attach_policy(policyName=policy_name, target=identity["IdentityId"]) res = client.list_attached_policies(target=identity["IdentityId"]) res.should.have.key("policies").which.should.have.length_of(1) res["policies"][0]["policyName"].should.equal(policy_name) @mock_iot def test_detach_policy(): client = boto3.client("iot", region_name="ap-northeast-1") policy_name = "my-policy" doc = "{}" cert = client.create_keys_and_certificate(setAsActive=True) cert_arn = cert["certificateArn"] client.create_policy(policyName=policy_name, policyDocument=doc) client.attach_policy(policyName=policy_name, target=cert_arn) res = client.list_attached_policies(target=cert_arn) res.should.have.key("policies").which.should.have.length_of(1) res["policies"][0]["policyName"].should.equal("my-policy") client.detach_policy(policyName=policy_name, target=cert_arn) res = client.list_attached_policies(target=cert_arn) res.should.have.key("policies").which.should.be.empty @mock_iot def test_list_attached_policies(): client = boto3.client("iot", region_name="ap-northeast-1") cert = client.create_keys_and_certificate(setAsActive=True) policies = client.list_attached_policies(target=cert["certificateArn"]) policies["policies"].should.be.empty @mock_iot def test_policy_versions(): client = boto3.client("iot", region_name="ap-northeast-1") policy_name = "my-policy" doc = "{}" policy = client.create_policy(policyName=policy_name, policyDocument=doc) policy.should.have.key("policyName").which.should.equal(policy_name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) policy.should.have.key("policyVersionId").which.should.equal("1") policy = client.get_policy(policyName=policy_name) policy.should.have.key("policyName").which.should.equal(policy_name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal(json.dumps({})) policy.should.have.key("defaultVersionId").which.should.equal( policy["defaultVersionId"] ) policy1 = client.create_policy_version( policyName=policy_name, policyDocument=json.dumps({"version": "version_1"}), setAsDefault=True, ) policy1.should.have.key("policyArn").which.should_not.be.none policy1.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_1"}) ) policy1.should.have.key("policyVersionId").which.should.equal("2") policy1.should.have.key("isDefaultVersion").which.should.equal(True) policy2 = client.create_policy_version( policyName=policy_name, policyDocument=json.dumps({"version": "version_2"}), setAsDefault=False, ) policy2.should.have.key("policyArn").which.should_not.be.none policy2.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_2"}) ) policy2.should.have.key("policyVersionId").which.should.equal("3") policy2.should.have.key("isDefaultVersion").which.should.equal(False) policy = client.get_policy(policyName=policy_name) policy.should.have.key("policyName").which.should.equal(policy_name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_1"}) ) policy.should.have.key("defaultVersionId").which.should.equal( policy1["policyVersionId"] ) policy3 = client.create_policy_version( policyName=policy_name, policyDocument=json.dumps({"version": "version_3"}), setAsDefault=False, ) policy3.should.have.key("policyArn").which.should_not.be.none policy3.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_3"}) ) policy3.should.have.key("policyVersionId").which.should.equal("4") policy3.should.have.key("isDefaultVersion").which.should.equal(False) policy4 = client.create_policy_version( policyName=policy_name, policyDocument=json.dumps({"version": "version_4"}), setAsDefault=False, ) policy4.should.have.key("policyArn").which.should_not.be.none policy4.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_4"}) ) policy4.should.have.key("policyVersionId").which.should.equal("5") policy4.should.have.key("isDefaultVersion").which.should.equal(False) policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(5) list( map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) ).count(True).should.equal(1) default_policy = list( filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) ) default_policy[0].should.have.key("versionId").should.equal( policy1["policyVersionId"] ) policy = client.get_policy(policyName=policy_name) policy.should.have.key("policyName").which.should.equal(policy_name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_1"}) ) policy.should.have.key("defaultVersionId").which.should.equal( policy1["policyVersionId"] ) client.set_default_policy_version( policyName=policy_name, policyVersionId=policy4["policyVersionId"] ) policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(5) list( map(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) ).count(True).should.equal(1) default_policy = list( filter(lambda item: item["isDefaultVersion"], policy_versions["policyVersions"]) ) default_policy[0].should.have.key("versionId").should.equal( policy4["policyVersionId"] ) policy = client.get_policy(policyName=policy_name) policy.should.have.key("policyName").which.should.equal(policy_name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal( json.dumps({"version": "version_4"}) ) policy.should.have.key("defaultVersionId").which.should.equal( policy4["policyVersionId"] ) with pytest.raises(ClientError) as exc: client.create_policy_version( policyName=policy_name, policyDocument=json.dumps({"version": "version_5"}), setAsDefault=False, ) err = exc.value.response["Error"] err["Message"].should.equal( "The policy %s already has the maximum number of versions (5)" % policy_name ) client.delete_policy_version(policyName=policy_name, policyVersionId="1") policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(4) client.delete_policy_version( policyName=policy_name, policyVersionId=policy1["policyVersionId"] ) policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(3) client.delete_policy_version( policyName=policy_name, policyVersionId=policy2["policyVersionId"] ) policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(2) client.delete_policy_version( policyName=policy_name, policyVersionId=policy3["policyVersionId"] ) policy_versions = client.list_policy_versions(policyName=policy_name) policy_versions.should.have.key("policyVersions").which.should.have.length_of(1) # should fail as it"s the default policy. Should use delete_policy instead with pytest.raises(ClientError) as exc: client.delete_policy_version( policyName=policy_name, policyVersionId=policy4["policyVersionId"] ) err = exc.value.response["Error"] err["Message"].should.equal("Cannot delete the default version of a policy") @mock_iot def test_delete_policy_validation(): doc = """{ "Version": "2012-10-17", "Statement":[ { "Effect":"Allow", "Action":[ "iot: *" ], "Resource":"*" } ] } """ client = boto3.client("iot", region_name="ap-northeast-1") cert = client.create_keys_and_certificate(setAsActive=True) cert_arn = cert["certificateArn"] policy_name = "my-policy" client.create_policy(policyName=policy_name, policyDocument=doc) client.attach_principal_policy(policyName=policy_name, principal=cert_arn) with pytest.raises(ClientError) as e: client.delete_policy(policyName=policy_name) e.value.response["Error"]["Message"].should.contain( "The policy cannot be deleted as the policy is attached to one or more principals (name=%s)" % policy_name ) res = client.list_policies() res.should.have.key("policies").which.should.have.length_of(1) client.detach_principal_policy(policyName=policy_name, principal=cert_arn) client.delete_policy(policyName=policy_name) res = client.list_policies() res.should.have.key("policies").which.should.have.length_of(0) @mock_iot def test_policy(): client = boto3.client("iot", region_name="ap-northeast-1") name = "my-policy" doc = "{}" policy = client.create_policy(policyName=name, policyDocument=doc) policy.should.have.key("policyName").which.should.equal(name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal(doc) policy.should.have.key("policyVersionId").which.should.equal("1") policy = client.get_policy(policyName=name) policy.should.have.key("policyName").which.should.equal(name) policy.should.have.key("policyArn").which.should_not.be.none policy.should.have.key("policyDocument").which.should.equal(doc) policy.should.have.key("defaultVersionId").which.should.equal("1") res = client.list_policies() res.should.have.key("policies").which.should.have.length_of(1) for policy in res["policies"]: policy.should.have.key("policyName").which.should_not.be.none policy.should.have.key("policyArn").which.should_not.be.none client.delete_policy(policyName=name) res = client.list_policies() res.should.have.key("policies").which.should.have.length_of(0)
4,694
1,727
package strings; import java.util.Arrays; import java.util.Random; import java.util.stream.IntStream; // DC3 linear time suffix array construction algorithm ("Linear Work Suffix Array Construction") public class SuffixArrayDC3 { static boolean leq(int a1, int a2, int b1, int b2) { return a1 < b1 || a1 == b1 && a2 <= b2; } static boolean leq(int a1, int a2, int a3, int b1, int b2, int b3) { return a1 < b1 || a1 == b1 && leq(a2, a3, b2, b3); } // stably sort a[0..n-1] to b[0..n-1] with keys in 0..K from r static void radixPass(int[] a, int[] b, int[] r, int offset, int n, int K) { int[] cnt = new int[K + 1]; for (int i = 0; i < n; i++) ++cnt[r[a[i] + offset]]; for (int i = 1; i < cnt.length; i++) cnt[i] += cnt[i - 1]; for (int i = n - 1; i >= 0; i--) b[--cnt[r[a[i] + offset]]] = a[i]; } // find the suffix array SA of T[0..n-1] in {1..K}^n // require T[n]=T[n+1]=T[n+2]=0, n>=2 private static void suffixArray(int[] T, int[] SA, int n, int K) { int n0 = (n + 2) / 3; int n1 = (n + 1) / 3; int n2 = n / 3; int n02 = n0 + n2; //******* Step 0: Construct sample ******** // generate positions of mod 1 and mod 2 suffixes // the "+(n0-n1)" adds a dummy mod 1 suffix if n%3 == 1 int[] R = new int[n02 + 3]; for (int i = 0, j = 0; i < n + (n0 - n1); i++) if (i % 3 != 0) R[j++] = i; //******* Step 1: Sort sample suffixes ******** // lsb radix sort the mod 1 and mod 2 triples int[] SA12 = new int[n02 + 3]; radixPass(R, SA12, T, 2, n02, K); radixPass(SA12, R, T, 1, n02, K); radixPass(R, SA12, T, 0, n02, K); // find lexicographic names of triples and // write them to correct places in R int name = 0; for (int i = 0; i < n02; i++) { if (i == 0 || T[SA12[i]] != T[SA12[i - 1]] || T[SA12[i] + 1] != T[SA12[i - 1] + 1] || T[SA12[i] + 2] != T[SA12[i - 1] + 2]) { ++name; } R[SA12[i] / 3 + (SA12[i] % 3 == 1 ? 0 : n0)] = name; } if (name < n02) { // recurse if names are not yet unique suffixArray(R, SA12, n02, name); // store unique names in R using the suffix array for (int i = 0; i < n02; i++) R[SA12[i]] = i + 1; } else { // generate the suffix array of R directly for (int i = 0; i < n02; i++) SA12[R[i] - 1] = i; } //******* Step 2: Sort nonsample suffixes ******** // stably sort the mod 0 suffixes from SA12 by their first character int[] R0 = new int[n0]; for (int i = 0, j = 0; i < n02; i++) if (SA12[i] < n0) R0[j++] = 3 * SA12[i]; int[] SA0 = new int[n0]; radixPass(R0, SA0, T, 0, n0, K); //******* Step 3: Merge ******** // merge sorted SA0 suffixes and sorted SA12 suffixes for (int p = 0, t = n0 - n1, k = 0; k < n; k++) { int i = SA12[t] < n0 ? SA12[t] * 3 + 1 : (SA12[t] - n0) * 3 + 2; // pos of current offset 12 suffix int j = SA0[p]; // pos of current offset 0 suffix if (SA12[t] < n0 ? // different compares for mod 1 and mod 2 suffixes leq(T[i], R[SA12[t] + n0], T[j], R[j / 3]) : leq(T[i], T[i + 1], R[SA12[t] - n0 + 1], T[j], T[j + 1], R[j / 3 + n0])) { // suffix from SA12 is smaller SA[k] = i; if (++t == n02) // done --- only SA0 suffixes left for (k++; p < n0; p++, k++) SA[k] = SA0[p]; } else { // suffix from SA0 is smaller SA[k] = j; if (++p == n0) // done --- only SA12 suffixes left for (k++; t < n02; t++, k++) SA[k] = SA12[t] < n0 ? SA12[t] * 3 + 1 : (SA12[t] - n0) * 3 + 2; } } } public static int[] suffixArray(CharSequence s) { int n = s.length(); if (n <= 1) return new int[n]; int[] S = IntStream.range(0, n + 3).map(i -> i < n ? s.charAt(i) : 0).toArray(); int[] sa = new int[n]; suffixArray(S, sa, n, 255); return sa; } // longest common prefixes array in O(n) public static int[] lcp(int[] sa, CharSequence s) { int n = sa.length; int[] rank = new int[n]; for (int i = 0; i < n; i++) rank[sa[i]] = i; int[] lcp = new int[n - 1]; for (int i = 0, h = 0; i < n; i++) { if (rank[i] < n - 1) { for (int j = sa[rank[i] + 1]; Math.max(i, j) + h < s.length() && s.charAt(i + h) == s.charAt(j + h); ++h) ; lcp[rank[i]] = h; if (h > 0) --h; } } return lcp; } // Usage example public static void main(String[] args) { Random rnd1 = new Random(1); int n2 = 5_000_000; StringBuilder ss = rnd1.ints(n2, 0, 26).collect( StringBuilder::new, (sb, i) -> sb.append((char) ('a' + i)), StringBuilder::append); long time = System.currentTimeMillis(); int[] sa2 = suffixArray(ss); System.out.println(System.currentTimeMillis() - time); String s1 = "abcab"; int[] sa1 = suffixArray(s1); // print suffixes in lexicographic order for (int p : sa1) System.out.println(s1.substring(p)); System.out.println("lcp = " + Arrays.toString(lcp(sa1, s1))); // random test Random rnd = new Random(1); for (int step = 0; step < 100000; step++) { int n = rnd.nextInt(100) + 1; StringBuilder s = rnd.ints(n, 0, 10).collect( StringBuilder::new, (sb, i) -> sb.append((char) ('a' + i)), StringBuilder::append); int[] sa = suffixArray(s); int[] lcp = lcp(sa, s); for (int i = 0; i + 1 < n; i++) { String a = s.substring(sa[i]); String b = s.substring(sa[i + 1]); if (a.compareTo(b) >= 0 || !a.substring(0, lcp[i]).equals(b.substring(0, lcp[i])) || (a + " ").charAt(lcp[i]) == (b + " ").charAt(lcp[i])) throw new RuntimeException(); } } System.out.println("Test passed"); } }
3,446
735
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mcxiaoke.next.ui.widget; import android.content.Context; import android.widget.BaseAdapter; import android.widget.Filter; import android.widget.Filterable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Locale; /** * 从系统源码复制出来,精简了一些无用的方法,添加了一些实用的方法 * 广播里的大部分Adapter都是基于这个,源码:ArrayAdapterCompat,主要添加了以下方法: * <p/> * <p/> * public List<T> getAllItems() //获取Adapter里所有的数据项 * public void add(int index, T object) // 指定位置插入数据 * public void set(int index, T object) // 指定位置插入数据 * public void addAll(int index, Collection<? extends T> collection) //指定位置批量插入数据 * public void addAll(int index, T... items) //指定位置批量插入数据 * public boolean contains(T object) //是否包含某个数据项 * public int indexOf(T object) // 同上,返回index,不包含就返回-1 * public void removeAt(int index) // 移除某个位置的数据项 * <p/> * 补充说明:系统自带的ArrayAdapter的addAll方法需要API11以上才能使用,ArrayAdapterCompat不存在此问题 * * @param <T> */ /** * User: mcxiaoke * Date: 13-10-25 * Time: 下午3:56 */ public abstract class ArrayAdapterCompat<T> extends BaseAdapter implements Filterable { /** * Lock used to modify the content of {@link #mObjects}. Any write operation * performed on the array should be synchronized on this lock. This lock is also * used by the filter (see {@link #getFilter()} to make a synchronized copy of * the original array of data. */ protected final Object mLock = new Object(); /** * Indicates whether or not {@link #notifyDataSetChanged()} must be called whenever * {@link #mObjects} is modified. */ private boolean mNotifyOnChange = true; private Context mContext; /** * Contains the list of objects that represent the data of this ArrayAdapter. * The content of this list is referred to as "the array" in the documentation. */ protected List<T> mObjects; // A copy of the original mObjects array, initialized from and then used instead as soon as // the mFilter ArrayFilter is used. mObjects will then only contain the filtered values. protected List<T> mOriginalValues; protected Filter mFilter; /** * Constructor * * @param context The current context. */ public ArrayAdapterCompat(Context context) { init(context, new ArrayList<T>()); } /** * Constructor * * @param context The current context. * @param objects The objects to represent in the ListView. */ public ArrayAdapterCompat(Context context, T[] objects) { init(context, Arrays.asList(objects)); } /** * Constructor * * @param context The current context. * @param objects The objects to represent in the ListView. */ public ArrayAdapterCompat(Context context, List<T> objects) { init(context, objects); } /** * Adds the specified object at the end of the array. * * @param object The object to add at the end of the array. */ public void add(T object) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.add(object); } else { mObjects.add(object); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Adds the specified object at the index * * @param object The object to add at the end of the array. */ public void add(int index, T object) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.add(index, object); } else { mObjects.add(index, object); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * set the specified object at index * * @param object The object to add at the end of the array. */ public void set(int index, T object) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.set(index, object); } else { mObjects.set(index, object); } } if (mNotifyOnChange) notifyDataSetChanged(); } public void setAll(Collection<? extends T> collection) { clear(); addAll(collection); } public void setAll(T... items) { clear(); addAll(items); } /** * Adds the specified Collection at the end of the array. * * @param collection The Collection to add at the end of the array. */ public void addAll(Collection<? extends T> collection) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.addAll(collection); } else { mObjects.addAll(collection); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Adds the specified items at the end of the array. * * @param items The items to add at the end of the array. */ public void addAll(T... items) { synchronized (mLock) { if (mOriginalValues != null) { Collections.addAll(mOriginalValues, items); } else { Collections.addAll(mObjects, items); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Inserts the specified objects at the specified index in the array. * * @param collection The objects to insert into the array. * @param index The index at which the object must be inserted. */ public void addAll(int index, Collection<? extends T> collection) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.addAll(index, collection); } else { mObjects.addAll(index, collection); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Inserts the specified objects at the specified index in the array. * * @param items The objects to insert into the array. * @param index The index at which the object must be inserted. */ public void addAll(int index, T... items) { List<T> collection = Arrays.asList(items); synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.addAll(index, collection); } else { mObjects.addAll(index, collection); } } if (mNotifyOnChange) notifyDataSetChanged(); } public List<T> getAllItems() { if (mOriginalValues != null) { return mOriginalValues; } else { return mObjects; } } /** * Inserts the specified object at the specified index in the array. * * @param object The object to insert into the array. * @param index The index at which the object must be inserted. */ public void insert(T object, int index) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.add(index, object); } else { mObjects.add(index, object); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * check contains the object. * * @param object The object to remove. */ public boolean contains(T object) { synchronized (mLock) { if (mOriginalValues != null) { return mOriginalValues.contains(object); } else { return mObjects.contains(object); } } } /** * check contains the object. * * @param object The object to remove. */ public int indexOf(T object) { synchronized (mLock) { if (mOriginalValues != null) { return mOriginalValues.indexOf(object); } else { return mObjects.indexOf(object); } } } /** * Removes the specified object from the array. * * @param object The object to remove. */ public void remove(T object) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.remove(object); } else { mObjects.remove(object); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Removes the specified object in index from the array. * * @param index The index to remove. */ public void removeAt(int index) { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.remove(index); } else { mObjects.remove(index); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Removes the specified objects. * * @param collection The collection to remove. */ public boolean removeAll(Collection<?> collection) { boolean result = false; synchronized (mLock) { Iterator<?> it; if (mOriginalValues != null) { it = mOriginalValues.iterator(); } else { it = mObjects.iterator(); } while (it.hasNext()) { if (collection.contains(it.next())) { it.remove(); result = true; } } } if (mNotifyOnChange) notifyDataSetChanged(); return result; } /** * Remove all elements from the list. */ public void clear() { synchronized (mLock) { if (mOriginalValues != null) { mOriginalValues.clear(); } else { mObjects.clear(); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * Sorts the content of this adapter using the specified comparator. * * @param comparator The comparator used to sort the objects contained * in this adapter. */ public void sort(Comparator<? super T> comparator) { synchronized (mLock) { if (mOriginalValues != null) { Collections.sort(mOriginalValues, comparator); } else { Collections.sort(mObjects, comparator); } } if (mNotifyOnChange) notifyDataSetChanged(); } /** * {@inheritDoc} */ @Override public void notifyDataSetChanged() { super.notifyDataSetChanged(); mNotifyOnChange = true; } /** * Control whether methods that change the list ({@link #add}, * {@link #insert}, {@link #remove}, {@link #clear}) automatically call * {@link #notifyDataSetChanged}. If set to false, caller must * manually call notifyDataSetChanged() to have the changes * reflected in the attached view. * <p/> * The default is true, and calling notifyDataSetChanged() * resets the flag to true. * * @param notifyOnChange if true, modifications to the list will * automatically call {@link * #notifyDataSetChanged} */ public void setNotifyOnChange(boolean notifyOnChange) { mNotifyOnChange = notifyOnChange; } private void init(Context context, List<T> objects) { mContext = context; mObjects = objects; } /** * Returns the context associated with this array adapter. The context is used * to create views from the resource passed to the constructor. * * @return The Context associated with this adapter. */ public Context getContext() { return mContext; } /** * {@inheritDoc} */ public int getCount() { return mObjects.size(); } /** * {@inheritDoc} */ public T getItem(int position) { return mObjects.get(position); } /** * Returns the position of the specified item in the array. * * @param item The item to retrieve the position of. * @return The position of the specified item. */ public int getPosition(T item) { return mObjects.indexOf(item); } /** * {@inheritDoc} */ public long getItemId(int position) { return position; } /** * {@inheritDoc} */ public Filter getFilter() { if (mFilter == null) { mFilter = new ArrayFilter(); } return mFilter; } public void setFilter(Filter filter) { mFilter = filter; } /** * <p>An array filter constrains the content of the array adapter with * a prefix. Each item that does not start with the supplied prefix * is removed from the list.</p> */ private class ArrayFilter extends Filter { @Override protected FilterResults performFiltering(CharSequence prefix) { FilterResults results = new FilterResults(); if (mOriginalValues == null) { synchronized (mLock) { mOriginalValues = new ArrayList<T>(mObjects); } } if (prefix == null || prefix.length() == 0) { ArrayList<T> list; synchronized (mLock) { list = new ArrayList<T>(mOriginalValues); } results.values = list; results.count = list.size(); } else { String prefixString = toLowerCase(prefix.toString()); ArrayList<T> values; synchronized (mLock) { values = new ArrayList<T>(mOriginalValues); } final ArrayList<T> newValues = new ArrayList<T>(); for (final T value : values) { final String valueText = toLowerCase(value.toString()); // First match against the whole, non-splitted value if (valueText.startsWith(prefixString)) { newValues.add(value); } else { final String[] words = valueText.split(" "); final int wordCount = words.length; // Start at index 0, in case valueText starts with space(s) for (String word : words) { if (word.startsWith(prefixString)) { newValues.add(value); break; } } } } results.values = newValues; results.count = newValues.size(); } return results; } @Override protected void publishResults(CharSequence constraint, FilterResults results) { //noinspection unchecked mObjects = (List<T>) results.values; if (results.count > 0) { notifyDataSetChanged(); } else { notifyDataSetInvalidated(); } } } static String toLowerCase(String text) { return text == null ? null : text.toLowerCase(Locale.US); } }
7,335