max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
933
<reponame>ztlevi/perfetto /* * Copyright (C) 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "src/profiling/symbolizer/scoped_read_mmap.h" #if !PERFETTO_BUILDFLAG(PERFETTO_OS_WIN) #include "perfetto/base/logging.h" #include "perfetto/ext/base/file_utils.h" #include <sys/mman.h> namespace perfetto { namespace profiling { ScopedReadMmap::ScopedReadMmap(const char* fname, size_t length) : length_(length), fd_(base::OpenFile(fname, O_RDONLY)) { if (!fd_) { PERFETTO_PLOG("Failed to open %s", fname); return; } ptr_ = mmap(nullptr, length, PROT_READ, MAP_PRIVATE, *fd_, 0); } ScopedReadMmap::~ScopedReadMmap() { if (ptr_ != MAP_FAILED) munmap(ptr_, length_); } bool ScopedReadMmap::IsValid() { return ptr_ != MAP_FAILED; } } // namespace profiling } // namespace perfetto #endif // !PERFETTO_BUILDFLAG(PERFETTO_OS_WIN)
512
3,428
{"id":"01020","group":"spam-2","checksum":{"type":"MD5","value":"291c3b0685eedf6178cc323bb8e2ce55"},"text":"Received: from hq.pro-ns.net (localhost [127.0.0.1])\n\tby hq.pro-ns.net (8.12.5/8.12.5) with ESMTP id g6O2a1hY010356\n\t(version=TLSv1/SSLv3 cipher=EDH-DSS-DES-CBC3-SHA bits=168 verify=NO)\n\tfor <<EMAIL>>; Tue, 23 Jul 2002 21:36:01 -0500 (CDT)\n\t(envelope-from <EMAIL>)\nReceived: (from cpunks@localhost)\n\tby hq.pro-ns.net (8.12.5/8.12.5/Submit) id g6O2a1Nu010353\n\tfor <EMAIL>; Tue, 23 Jul 2002 21:36:01 -0500 (CDT)\nReceived: from einstein.ssz.com (cpunks@[207.200.56.4])\n\tby hq.pro-ns.net (8.12.5/8.12.5) with ESMTP id g6O2ZxhX010328\n\tfor <<EMAIL>>; Tue, 23 Jul 2002 21:35:59 -0500 (CDT)\n\t(envelope-from <EMAIL>)\nX-Authentication-Warning: hq.pro-ns.net: Host cpunks@[207.200.56.4] claimed to be einstein.ssz.com\nReceived: (from cpunks@localhost)\n\tby einstein.ssz.com (8.8.8/8.8.8) id VAA26412\n\tfor <EMAIL>; Tue, 23 Jul 2002 21:44:37 -0500\nReceived: (from mdom@localhost)\n\tby einstein.ssz.com (8.8.8/8.8.8) id VAA26390\n\tfor cypherpunks-outgoing; Tue, 23 Jul 2002 21:44:28 -0500\nReceived: from yahoo.com (nobody@[210.95.127.129])\n\tby einstein.ssz.com (8.8.8/8.8.8) with SMTP id VAA26382\n\tfor <<EMAIL>>; Tue, 23 Jul 2002 21:44:21 -0500\nFrom: <EMAIL>\nMessage-ID: <026e73b85c7c$1423d3b7$5cd66db8@vqmlfv>\nTo: <EMAIL>.<EMAIL>.com\nSubject: Long distance 1335Pv-6\nDate: Wed, 24 Jul 2002 11:15:29 -0900\nMiME-Version: 1.0\nContent-Type: text/html; charset=\"iso-8859-1\"\nX-Priority: 3 (Normal)\nX-MSMail-Priority: Normal\nX-Mailer: Microsoft Outlook, Build 10.0.2627\nImportance: Normal\nSender: <EMAIL>\nPrecedence: bulk\nReply-To: <EMAIL>\nX-Mailing-List: [email protected]\nX-Unsubscription-Info: http://einstein.ssz.com/cdr\nX-List-Admin: <EMAIL>\nX-Loop: ssz.com\nX-Acceptable-Languages: English, Russian, German, French, Spanish\n\n\n<body bgcolor=\"#FFFFFF\" text=\"#CC3333\">\n<p><font face=\"Arial, Helvetica, sans-serif\"><b><font size=\"3\">Hi: <br>\n </font></b></font></p>\n<p><b><font face=\"Arial, Helvetica, sans-serif\" size=\"3\" color=\"#000000\">Have \n you been paying too much for your home or<br>\n business long distance?</font></b></p>\n<p><font face=\"Arial, Helvetica, sans-serif\" color=\"#000000\"><b><font size=\"3\">Have \n you been looking for an affordable but honest<br>\n long distance alternative?</font></b></font></p>\n<p><font face=\"Arial, Helvetica, sans-serif\"><b><font size=\"3\">We are offering \n Fiber optic Long distance for<br>\n as low as $9.95 per month!</font></b></font></p>\n<p><font face=\"Arial, Helvetica, sans-serif\"><b><font size=\"3\"><a href=\"mailto:<EMAIL>?subject=Phoneoffer\">Email</a> \n us with your phone number and we'll call you<br>\n back so you can hear how great the connection is.<br>\n <br>\n Six plans to choose from including a travel plan. <br>\n <br>\n <font color=\"#000000\">There are no credit checks and because you don't <br>\n need to change your long distance carrier, your <br>\n service can be turned on in just a few hours. <br>\n </font><br>\n Distributors needed! <br>\n <br>\n <font color=\"#000000\">We have distributors now making a few hundred to <br>\n many thousands of dollars per month from the comfort <br>\n of their homes. </font><br>\n <br>\n Obtain <a href=\"mailto:<EMAIL>?subject=Phoneoffer\">complete \n details</a> Include your phone number- we'll<br>\n call you back to confirm our crisp clear connection.<br>\n <br>\n To be removed: <a href=\"mailto:<EMAIL>\">click here</a></font></b></font></p>\n\n9954ceKz6-102mjlg8918qBcR4-723lUuX3399cRNs0-338xWys4979yobM4-873gAkQ5719cl69\n\n--DeathToSpamDeathToSpamDeathToSpam--\n\n\n-------------------------------------------------------\nThis sf.net email is sponsored by:ThinkGeek\nWelcome to geek heaven.\nhttp://thinkgeek.com/sf\n_______________________________________________\nSpamassassin-Sightings mailing list\n<EMAIL>\nhttps://lists.sourceforge.net/lists/listinfo/spamassassin-sightings\n\n\n"}
1,679
1,853
unsigned __attribute__((const)) ctz(unsigned x) { return 0; }
24
451
/*Header-MicMac-eLiSe-25/06/2007 MicMac : Multi Image Correspondances par Methodes Automatiques de Correlation eLiSe : ELements of an Image Software Environnement www.micmac.ign.fr Copyright : Institut Geographique National Author : <NAME> Contributors : <NAME>, <NAME>. [1] <NAME>, <NAME>. "A multiresolution and optimization-based image matching approach: An application to surface reconstruction from SPOT5-HRS stereo imagery." In IAPRS vol XXXVI-1/W41 in ISPRS Workshop On Topographic Mapping From Space (With Special Emphasis on Small Satellites), Ankara, Turquie, 02-2006. [2] <NAME>, "MicMac, un lociel de mise en correspondance d'images, adapte au contexte geograhique" to appears in Bulletin d'information de l'Institut Geographique National, 2007. Francais : MicMac est un logiciel de mise en correspondance d'image adapte au contexte de recherche en information geographique. Il s'appuie sur la bibliotheque de manipulation d'image eLiSe. Il est distibue sous la licences Cecill-B. Voir en bas de fichier et http://www.cecill.info. English : MicMac is an open source software specialized in image matching for research in geographic information. MicMac is built on the eLiSe image library. MicMac is governed by the "Cecill-B licence". See below and http://www.cecill.info. Header-MicMac-eLiSe-25/06/2007*/ #include "StdAfx.h" //=================================================== //=================================================== //=================================================== //=================================================== class DATA_DXF_WRITER : public RC_Object { public : DATA_DXF_WRITER ( const ElSTDNS string &, Box2di , bool InvY = true ); virtual ~DATA_DXF_WRITER(); void PutPt0(Pt2dr); void PutSeg(Seg2d s,const char * Layer); void PutVertex(Pt2dr,const char * Layer); void PutPolyline(const ElFilo<Pt2dr> &,const char * Layer,bool circ = false); private : void PutPt(Pt2dr,INT DTag,bool corY = true); void PutPt1(Pt2dr); void PutPt0(Pt2dr,bool corY); ElSTDNS string _name; FILE * _fp; bool _inv_Y; REAL _Y1; REAL StdY(REAL y) {return _inv_Y ? (_Y1 -y) : y;} Pt2dr StdPt(Pt2dr p){return Pt2dr(p.x,StdY(p.y));} typedef enum { TagStrEntity = 0, TagStrBlock = 2, TagStrLayer = 8, TagStrIdent = 9, TagPtX0 = 10, TagPtY0 = 20, TagEntityFollow = 66, TagCount = 70 } TAG; static const char *_SECTION; static const char *_ENDSEC; static const char *_SEQEND; static const char *_EOF; static const char *_HEADER; static const char *_BLOCKS; static const char *_TABLES; static const char *_EXTMIN; static const char *_EXTMAX; static const char *_LUPREC; static const char *_ENTITIES; static const char *_LINE; static const char *_POLYLINE; static const char *_VERTEX; //========================================== void PutTag(TAG tag) { fprintf(_fp,"%3d\n",tag);} void PutReal(REAL r) {fprintf(_fp,"%.3f\n",r); } void PutInt(INT i) {fprintf(_fp," %d\n",i); } void PutValY(REAL y) {PutReal(StdY(y));} void PutStr(const char * str) {fprintf(_fp,"%s\n",str);} void PutTagX0() {PutTag(TagPtX0);} void PutTagY0() {PutTag(TagPtY0);} void PutTagLayer() {PutTag(TagStrLayer);} void PutTagEntity() {PutTag(TagStrEntity);} void PutTagBlock() {PutTag(TagStrBlock);} void PutTagIdent() {PutTag(TagStrIdent);} void PutCount(INT i) { PutTag(TagCount); PutInt(i); } void EmptySec(const char * str) { PutSection(); PutStrBlock(str), PutEndSec(); } void PutLayer(const char * str) { if (str) { PutTagLayer(); PutStr(str); } } void PutStrEntity(const char * str) { PutTagEntity(); PutStr(str); } void PutSection() {PutStrEntity(_SECTION);} void PutEndSec() {PutStrEntity(_ENDSEC);} void PutEOF() {PutStrEntity(_EOF);} void PutSeqEnd() {PutStrEntity(_SEQEND);} void PutStrBlock(const char * str) { PutTagBlock(); PutStr(str); } void PutStrIdent(const char * str) { PutTagIdent(); PutStr(str); } }; const char * DATA_DXF_WRITER::_SEQEND = "SEQEND" ; const char * DATA_DXF_WRITER::_SECTION = "SECTION" ; const char * DATA_DXF_WRITER::_ENDSEC = "ENDSEC" ; const char * DATA_DXF_WRITER::_HEADER = "HEADER" ; const char * DATA_DXF_WRITER::_BLOCKS = "BLOCKS" ; const char * DATA_DXF_WRITER::_TABLES = "TABLES" ; const char * DATA_DXF_WRITER::_EXTMIN = "$EXTMIN" ; const char * DATA_DXF_WRITER::_EXTMAX = "$EXTMAX" ; const char * DATA_DXF_WRITER::_LUPREC = "$LUPREC" ; const char * DATA_DXF_WRITER::_ENTITIES = "ENTITIES" ; const char * DATA_DXF_WRITER::_LINE = "LINE" ; const char * DATA_DXF_WRITER::_POLYLINE = "POLYLINE" ; const char * DATA_DXF_WRITER::_EOF = "EOF" ; const char * DATA_DXF_WRITER::_VERTEX = "VERTEX" ; void DATA_DXF_WRITER::PutPt(Pt2dr p,INT dtag,bool corY) { PutTag((TAG)(TagPtX0+dtag)); PutReal(p.x); PutTag((TAG)(TagPtY0+dtag)); if (corY) PutValY(p.y); else PutReal(p.x); } void DATA_DXF_WRITER::PutPt0(Pt2dr p) { PutPt(p,0); } void DATA_DXF_WRITER::PutPt0(Pt2dr p,bool corY) { PutPt(p,0,corY); } void DATA_DXF_WRITER::PutPt1(Pt2dr p) { PutPt(p,1); } DATA_DXF_WRITER::DATA_DXF_WRITER ( const ElSTDNS string & Name, Box2di Box, bool InvY ) : _name (Name), _fp (ElFopen(Name.c_str(),"wb")), _inv_Y (InvY), _Y1 (Box._p1.y) { Box2di B( StdPt(Pt2dr(Box._p0)) , StdPt(Pt2dr(Box._p1)) ); ELISE_ASSERT(_fp != 0,"Can't open DXF file"); PutSection(); PutStrBlock(_HEADER); PutStrIdent(_EXTMIN); PutPt0(Pt2dr(B._p0),false); PutStrIdent(_EXTMAX); PutPt0(Pt2dr(B._p1),false); PutStrIdent(_LUPREC); PutCount(14); PutEndSec(); EmptySec(_TABLES); EmptySec(_BLOCKS); PutSection(); PutStrBlock(_ENTITIES); } void DATA_DXF_WRITER::PutSeg(Seg2d seg,const char * Layer) { PutStrEntity(_LINE); PutLayer(Layer); PutPt0(seg.p0()); PutPt1(seg.p1()); } void DATA_DXF_WRITER::PutVertex(Pt2dr pt,const char * Layer) { PutStrEntity(_VERTEX); PutLayer(Layer); PutPt0(pt); } DATA_DXF_WRITER::~DATA_DXF_WRITER() { PutEndSec(); PutEOF(); ElFclose(_fp); } void DATA_DXF_WRITER::PutPolyline(const ElFilo<Pt2dr> & pts,const char * Layer,bool circ) { PutStrEntity(_POLYLINE); PutLayer(Layer); PutTag(TagEntityFollow); PutInt(1); INT nb = pts.nb() + (circ ? 1 : 0); for (INT k=0; k<nb ; k++) PutVertex(pts[k%pts.nb()],Layer); PutSeqEnd(); } //=================================================== //=================================================== //=================================================== //=================================================== DXF_Writer::DXF_Writer ( const char * str, Box2di box, bool InvY ) : PRC0(new DATA_DXF_WRITER(str,box,InvY)) { } DATA_DXF_WRITER * DXF_Writer::ddw() { return (DATA_DXF_WRITER *) _ptr; } void DXF_Writer::PutPt0(Pt2dr pt) { ddw()->PutPt0(pt); } void DXF_Writer::PutSeg(Seg2d s,const char * Layer) { ddw()->PutSeg(s,Layer); } void DXF_Writer::PutVertex(Pt2dr p,const char * Layer) { ddw()->PutVertex(p,Layer); } void DXF_Writer::PutPolyline ( const ElFilo<Pt2dr> & pts, const char * Layer, bool circ ) { ddw()->PutPolyline(pts,Layer,circ); } /*Footer-MicMac-eLiSe-25/06/2007 Ce logiciel est un programme informatique servant à la mise en correspondances d'images pour la reconstruction du relief. Ce logiciel est régi par la licence CeCILL-B soumise au droit français et respectant les principes de diffusion des logiciels libres. Vous pouvez utiliser, modifier et/ou redistribuer ce programme sous les conditions de la licence CeCILL-B telle que diffusée par le CEA, le CNRS et l'INRIA sur le site "http://www.cecill.info". En contrepartie de l'accessibilité au code source et des droits de copie, de modification et de redistribution accordés par cette licence, il n'est offert aux utilisateurs qu'une garantie limitée. Pour les mêmes raisons, seule une responsabilité restreinte pèse sur l'auteur du programme, le titulaire des droits patrimoniaux et les concédants successifs. A cet égard l'attention de l'utilisateur est attirée sur les risques associés au chargement, à l'utilisation, à la modification et/ou au développement et à la reproduction du logiciel par l'utilisateur étant donné sa spécificité de logiciel libre, qui peut le rendre complexe à manipuler et qui le réserve donc à des développeurs et des professionnels avertis possédant des connaissances informatiques approfondies. Les utilisateurs sont donc invités à charger et tester l'adéquation du logiciel à leurs besoins dans des conditions permettant d'assurer la sécurité de leurs systèmes et ou de leurs données et, plus généralement, à l'utiliser et l'exploiter dans les mêmes conditions de sécurité. Le fait que vous puissiez accéder à cet en-tête signifie que vous avez pris connaissance de la licence CeCILL-B, et que vous en avez accepté les termes. Footer-MicMac-eLiSe-25/06/2007*/
5,019
544
package com.hubspot.jinjava.el.ext.eager; import com.hubspot.jinjava.el.ext.DeferredParsingException; import com.hubspot.jinjava.interpret.DeferredValueException; import com.hubspot.jinjava.util.EagerExpressionResolver; import de.odysseus.el.tree.Bindings; import de.odysseus.el.tree.impl.ast.AstMethod; import de.odysseus.el.tree.impl.ast.AstNode; import de.odysseus.el.tree.impl.ast.AstParameters; import de.odysseus.el.tree.impl.ast.AstProperty; import javax.el.ELContext; import javax.el.ELException; public class EagerAstMethod extends AstMethod implements EvalResultHolder { protected Object evalResult; protected boolean hasEvalResult; // instanceof AstProperty protected final EvalResultHolder property; // instanceof AstParameters protected final EvalResultHolder params; public EagerAstMethod(AstProperty property, AstParameters params) { this( EagerAstNodeDecorator.getAsEvalResultHolder(property), EagerAstNodeDecorator.getAsEvalResultHolder(params) ); } private EagerAstMethod(EvalResultHolder property, EvalResultHolder params) { super((AstProperty) property, (AstParameters) params); this.property = property; this.params = params; } @Override public Object eval(Bindings bindings, ELContext context) { try { evalResult = super.eval(bindings, context); hasEvalResult = true; return evalResult; } catch (DeferredValueException | ELException originalException) { DeferredParsingException e = EvalResultHolder.convertToDeferredParsingException( originalException ); throw new DeferredParsingException( this, getPartiallyResolved(bindings, context, e) ); } finally { property.getAndClearEvalResult(); params.getAndClearEvalResult(); } } @Override public Object getAndClearEvalResult() { Object temp = evalResult; evalResult = null; hasEvalResult = false; return temp; } @Override public boolean hasEvalResult() { return hasEvalResult; } /** * This method is used when we need to reconstruct the method property and params manually. * Neither the property or params could be evaluated so we dive into the property and figure out * where the DeferredParsingException came from. */ private String getPartiallyResolved( Bindings bindings, ELContext context, DeferredParsingException deferredParsingException ) { String stringPrefix; String stringMethod; AstNode prefix; String formatString; if (property instanceof EagerAstDot) { formatString = "%s.%s"; prefix = ((EagerAstDot) property).getPrefix(); stringMethod = ((EagerAstDot) property).getProperty(); } else if (property instanceof EagerAstBracket) { formatString = "%s[%s]"; prefix = ((EagerAstBracket) property).getPrefix(); stringMethod = EvalResultHolder.reconstructNode( bindings, context, (EvalResultHolder) ((EagerAstBracket) property).getMethod(), deferredParsingException, false ); } else { // Should not happen natively throw new DeferredValueException("Cannot resolve property in EagerAstMethod"); } // If prefix is an identifier, then preserve it in case the method should modify it. stringPrefix = EvalResultHolder.reconstructNode( bindings, context, (EvalResultHolder) prefix, deferredParsingException, true ); String paramString; if ( deferredParsingException != null && deferredParsingException.getSourceNode() == params ) { paramString = deferredParsingException.getDeferredEvalResult(); } else { try { paramString = EagerExpressionResolver.getValueAsJinjavaStringSafe( params.eval(bindings, context) ); // remove brackets so they can get replaced with parentheses paramString = paramString.substring(1, paramString.length() - 1); } catch (DeferredParsingException e) { paramString = e.getDeferredEvalResult(); } } return ( String.format(formatString, stringPrefix, stringMethod) + String.format("(%s)", paramString) ); } }
1,559
1,079
package com.dds.skywebrtc; import android.content.Context; import android.os.Handler; import android.os.Looper; import android.text.TextUtils; import android.util.Log; import android.view.View; import com.dds.skywebrtc.engine.EngineCallback; import com.dds.skywebrtc.engine.webrtc.WebRTCEngine; import com.dds.skywebrtc.inter.ISkyEvent; import org.webrtc.IceCandidate; import org.webrtc.SessionDescription; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * 会话层 * Created by dds on 2019/8/19. * */ public class CallSession implements EngineCallback { private static final String TAG = "CallSession"; private WeakReference<CallSessionCallback> sessionCallback; private ExecutorService executor; private Handler handler = new Handler(Looper.getMainLooper()); // session参数 private boolean mIsAudioOnly; // 房间人列表 private List<String> mUserIDList; // 单聊对方Id/群聊邀请人 public String mTargetId; // 房间Id private String mRoomId; // myId public String mMyId; // 房间大小 private int mRoomSize; private boolean mIsComing; private EnumType.CallState _callState = EnumType.CallState.Idle; private long startTime; private AVEngine iEngine; private ISkyEvent mEvent; public CallSession(Context context, String roomId, boolean audioOnly, ISkyEvent event) { executor = Executors.newSingleThreadExecutor(); this.mIsAudioOnly = audioOnly; this.mRoomId = roomId; this.mEvent = event; iEngine = AVEngine.createEngine(new WebRTCEngine(audioOnly, context)); iEngine.init(this); } // ----------------------------------------各种控制-------------------------------------------- // 创建房间 public void createHome(String room, int roomSize) { executor.execute(() -> { if (mEvent != null) { mEvent.createRoom(room, roomSize); } }); } // 加入房间 public void joinHome(String roomId) { executor.execute(() -> { _callState = EnumType.CallState.Connecting; Log.d(TAG, "joinHome mEvent = " + mEvent); setIsComing(true); if (mEvent != null) { mEvent.sendJoin(roomId); } }); } //开始响铃 public void shouldStartRing() { if (mEvent != null) { mEvent.shouldStartRing(true); } } // 关闭响铃 public void shouldStopRing() { Log.d(TAG, "shouldStopRing mEvent != null is " + (mEvent != null)); if (mEvent != null) { mEvent.shouldStopRing(); } } // 发送响铃回复 public void sendRingBack(String targetId, String room) { executor.execute(() -> { if (mEvent != null) { mEvent.sendRingBack(targetId, room); } }); } // 发送拒绝信令 public void sendRefuse() { executor.execute(() -> { if (mEvent != null) { // 取消拨出 mEvent.sendRefuse(mRoomId, mTargetId, EnumType.RefuseType.Hangup.ordinal()); } }); release(EnumType.CallEndReason.Hangup); } // 发送忙时拒绝 void sendBusyRefuse(String room, String targetId) { executor.execute(() -> { if (mEvent != null) { // 取消拨出 mEvent.sendRefuse(room, targetId, EnumType.RefuseType.Busy.ordinal()); } }); release(EnumType.CallEndReason.Hangup); } // 发送取消信令 public void sendCancel() { executor.execute(() -> { if (mEvent != null) { // 取消拨出 List<String> list = new ArrayList<>(); list.add(mTargetId); mEvent.sendCancel(mRoomId, list); } }); release(EnumType.CallEndReason.Hangup); } // 离开房间 public void leave() { executor.execute(() -> { if (mEvent != null) { mEvent.sendLeave(mRoomId, mMyId); } }); // 释放变量 release(EnumType.CallEndReason.Hangup); } // 切换到语音接听 public void sendTransAudio() { executor.execute(() -> { if (mEvent != null) { // 发送到对面,切换到语音 mEvent.sendTransAudio(mTargetId); } }); } // 设置静音 public boolean toggleMuteAudio(boolean enable) { return iEngine.muteAudio(enable); } // 设置扬声器 public boolean toggleSpeaker(boolean enable) { return iEngine.toggleSpeaker(enable); } // 设置扬声器 public boolean toggleHeadset(boolean isHeadset) { return iEngine.toggleHeadset(isHeadset); } // 切换到语音通话 public void switchToAudio() { mIsAudioOnly = true; // 告诉远端 sendTransAudio(); // 本地切换 if (sessionCallback != null && sessionCallback.get() != null) { sessionCallback.get().didChangeMode(true); } } // 调整摄像头前置后置 public void switchCamera() { iEngine.switchCamera(); } // 释放资源 private void release(EnumType.CallEndReason reason) { executor.execute(() -> { // 释放内容 iEngine.release(); // 状态设置为Idle _callState = EnumType.CallState.Idle; //界面回调 if (sessionCallback != null && sessionCallback.get() != null) { sessionCallback.get().didCallEndWithReason(reason); } else { //TODO 结束会话 } }); } //------------------------------------receive--------------------------------------------------- // 加入房间成功 public void onJoinHome(String myId, String users, int roomSize) { // 开始计时 mRoomSize = roomSize; startTime = 0; handler.post(() -> executor.execute(() -> { mMyId = myId; List<String> strings; if (!TextUtils.isEmpty(users)) { String[] split = users.split(","); strings = Arrays.asList(split); mUserIDList = strings; } // 发送邀请 if (!mIsComing) { if (roomSize == 2) { List<String> inviteList = new ArrayList<>(); inviteList.add(mTargetId); mEvent.sendInvite(mRoomId, inviteList, mIsAudioOnly); } } else { iEngine.joinRoom(mUserIDList); } if (!isAudioOnly()) { // 画面预览 if (sessionCallback != null && sessionCallback.get() != null) { sessionCallback.get().didCreateLocalVideoTrack(); } } })); } // 新成员进入 public void newPeer(String userId) { handler.post(() -> executor.execute(() -> { // 其他人加入房间 iEngine.userIn(userId); // 关闭响铃 if (mEvent != null) { mEvent.shouldStopRing(); } // 更换界面 _callState = EnumType.CallState.Connected; if (sessionCallback != null && sessionCallback.get() != null) { startTime = System.currentTimeMillis(); sessionCallback.get().didChangeState(_callState); } })); } // 对方已拒绝 public void onRefuse(String userId, int type) { iEngine.userReject(userId, type); } // 对方已响铃 public void onRingBack(String userId) { if (mEvent != null) { mEvent.onRemoteRing(); //mEvent.shouldStartRing(false); } } // 切换到语音 public void onTransAudio(String userId) { mIsAudioOnly = true; // 本地切换 if (sessionCallback != null && sessionCallback.get() != null) { sessionCallback.get().didChangeMode(true); } } // 对方网络断开 public void onDisConnect(String userId, EnumType.CallEndReason reason) { executor.execute(() -> { iEngine.disconnected(userId, reason); }); } // 对方取消拨出 public void onCancel(String userId) { Log.d(TAG, "onCancel userId = " + userId); shouldStopRing(); release(EnumType.CallEndReason.RemoteHangup); } public void onReceiveOffer(String userId, String description) { executor.execute(() -> { iEngine.receiveOffer(userId, description); }); } public void onReceiverAnswer(String userId, String sdp) { executor.execute(() -> { iEngine.receiveAnswer(userId, sdp); }); } public void onRemoteIceCandidate(String userId, String id, int label, String candidate) { executor.execute(() -> { iEngine.receiveIceCandidate(userId, id, label, candidate); }); } // 对方离开房间 public void onLeave(String userId) { if (mRoomSize > 2) { // 返回到界面上 if (sessionCallback != null && sessionCallback.get() != null) { sessionCallback.get().didUserLeave(userId); } } executor.execute(() -> iEngine.leaveRoom(userId)); } // --------------------------------界面显示相关--------------------------------------------/ public long getStartTime() { return startTime; } public View setupLocalVideo(boolean isOverlay) { return iEngine.startPreview(isOverlay); } public View setupRemoteVideo(String userId, boolean isOverlay) { return iEngine.setupRemoteVideo(userId, isOverlay); } //------------------------------------各种参数----------------------------------------------/ public void setIsAudioOnly(boolean _isAudioOnly) { this.mIsAudioOnly = _isAudioOnly; } public boolean isAudioOnly() { return mIsAudioOnly; } public void setTargetId(String targetIds) { this.mTargetId = targetIds; } public void setIsComing(boolean isComing) { this.mIsComing = isComing; } public boolean isComing() { return mIsComing; } public void setRoom(String _room) { this.mRoomId = _room; } public String getRoomId() { return mRoomId; } public EnumType.CallState getState() { return _callState; } public void setCallState(EnumType.CallState callState) { this._callState = callState; } public void setSessionCallback(CallSessionCallback sessionCallback) { this.sessionCallback = new WeakReference<>(sessionCallback); } //-----------------------------Engine回调----------------------------------------- @Override public void joinRoomSucc() { // 关闭响铃 if (mEvent != null) { mEvent.shouldStopRing(); } // 更换界面 _callState = EnumType.CallState.Connected; //Log.d(TAG, "joinRoomSucc, sessionCallback.get() = " + sessionCallback.get()); if (sessionCallback != null && sessionCallback.get() != null) { startTime = System.currentTimeMillis(); sessionCallback.get().didChangeState(_callState); } } @Override public void exitRoom() { // 状态设置为Idle if (mRoomSize == 2) { handler.post(() -> { release(EnumType.CallEndReason.RemoteHangup); }); } } @Override public void reject(int type) { shouldStopRing(); Log.d(TAG, "reject type = " + type); // handler.post(() -> { switch (type) { case 0: release(EnumType.CallEndReason.Busy); break; case 1: release(EnumType.CallEndReason.RemoteHangup); break; } // }); } @Override public void disconnected(EnumType.CallEndReason reason) { handler.post(() -> { shouldStopRing(); release(reason); }); } @Override public void onSendIceCandidate(String userId, IceCandidate candidate) { executor.execute(() -> { if (mEvent != null) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } Log.d("dds_test", "onSendIceCandidate"); mEvent.sendIceCandidate(userId, candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp); } }); } @Override public void onSendOffer(String userId, SessionDescription description) { executor.execute(() -> { if (mEvent != null) { Log.d("dds_test", "onSendOffer"); mEvent.sendOffer(userId, description.description); } }); } @Override public void onSendAnswer(String userId, SessionDescription description) { executor.execute(() -> { if (mEvent != null) { Log.d("dds_test", "onSendAnswer"); mEvent.sendAnswer(userId, description.description); } }); } @Override public void onRemoteStream(String userId) { // 画面预览 if (sessionCallback != null && sessionCallback.get() != null) { Log.d(TAG, "onRemoteStream sessionCallback.get() != null "); sessionCallback.get().didReceiveRemoteVideoTrack(userId); } else { Log.d(TAG, "onRemoteStream sessionCallback.get() == null "); } } @Override public void onDisconnected(String userId) { //断线了,需要关闭通话界面 if (sessionCallback != null && sessionCallback.get() != null) { Log.d(TAG, "onDisconnected sessionCallback.get() != null "); sessionCallback.get().didDisconnected(userId); } else { Log.d(TAG, "onDisconnected sessionCallback.get() == null "); } } public interface CallSessionCallback { void didCallEndWithReason(EnumType.CallEndReason var1); void didChangeState(EnumType.CallState var1); void didChangeMode(boolean isAudioOnly); void didCreateLocalVideoTrack(); void didReceiveRemoteVideoTrack(String userId); void didUserLeave(String userId); void didError(String error); void didDisconnected(String userId); } }
7,151
1,212
<reponame>btrask/stronglink<filename>res/experiments/2015-06-07-file-cache.c<gh_stars>1000+ // Caching is done with mmap, so this defines address space, not actual RAM use. // The kernel is free to page out cached files depending on memory pressure. #if defined(__LP64__) || defined(__ILP64__) #define SLN_CACHE_SIZE (1024 * 1024 * 1024 * 1) #else #define SLN_CACHE_SIZE (1024 * 1024 * 128) #endif #define SLN_CACHE_SMALL_FILE_MAX (1024 * 8) #define SLN_CACHE_LARGE_FILE_FDS 50 typedef struct SLNFileCache *SLNFileCacheRef; struct item { str_t *path; uv_buf_t buf[1]; unsigned refcount; struct item *next; }; struct SLNFileCache { async_mutex_t mutex[1]; struct item *items; struct item *head; struct item *tail; }; // is it even sln? // or is it just part of the blog system? // well the server api could use it // although not as desperately SLNFileCacheRef SLNFileCacheCreate(void); int SLNFileCacheGet(SLNFileCacheRef const cache, strarg_t const path, struct item **const out); // we need reference counting? // one simplification... we don't need to be thread-safe // we only need to send files on the main thread // actually at some point it might be nice to support multiple threads // one lock for the whole cache (perfect for the single threaded case) // a linked list to track usage recency? // our item struct is pretty bloated // what if buffers were detached while used? // we wouldnt need a refcount // but they couldnt be shared either, which is unacceptable // we want to have one unified interface for reading chunks from small and large files typedef struct { uv_buf_t buf; unsigned refcount; } SLNFileCacheChunk; int SLNFileCacheRead(SLNFileCacheRef const cache, strarg_t const path, size_t const len, uint64_t const offset, SLNFileCacheChunk **const out); // i have such a clear picture of how this should work // but the devil is in the details... // note that for very small files (less than one page), mmap is wasteful... // and read(2) has more overhead the larger a file is // potential approach // 1. files less than ~2k: read(2), buffer manually // 2. files 2k-8mb: mmap entire file // 3. files greater than 8mb: mmap chunks at a time // note that each chunk should be cachable separately // thus is is a path and offset that identifies a cached item // then for very large files we could also cache fds... // although actually that might not even be worth it // note that caching in application memory is a terrible idea // but for very small files, maybe it can't be helped? // this code should really be reusable, even as a library // look at sources from nginx or haproxy...
800
348
<filename>docs/data/leg-t2/002/00205163.json {"nom":"Charly-sur-Marne","circ":"5ème circonscription","dpt":"Aisne","inscrits":1650,"abs":891,"votants":759,"blancs":39,"nuls":9,"exp":711,"res":[{"nuance":"REM","nom":"<NAME>","voix":437},{"nuance":"FN","nom":"<NAME>","voix":274}]}
116
435
<reponame>Montana/datawave<gh_stars>100-1000 package datawave.util; import org.apache.accumulo.core.iterators.LongCombiner; import org.apache.hadoop.io.Text; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; public class TextUtil { /** * Appends a null byte followed by the UTF-8 bytes of the given string to the given {@link Text} * * @param text * the Text to which to append * @param string * the String to append */ public static void textAppend(Text text, String string) { appendNullByte(text); textAppendNoNull(text, string); } public static void textAppend(Text text, String string, boolean replaceBadChar) { appendNullByte(text); textAppendNoNull(text, string, replaceBadChar); } public static void textAppend(Text t, long s) { t.append(nullByte, 0, 1); t.append(LongCombiner.FIXED_LEN_ENCODER.encode(s), 0, 8); } private static final byte[] nullByte = {0}; /** * Appends a null byte to the given text * * @param text * the text to which to append the null byte */ public static void appendNullByte(Text text) { text.append(nullByte, 0, nullByte.length); } /** * Appends the UTF-8 bytes of the given string to the given {@link Text} * * @param t * the Text to which to append * @param s * the String to append */ public static void textAppendNoNull(Text t, String s) { textAppendNoNull(t, s, false); } /** * Appends the UTF-8 bytes of the given string to the given {@link Text} */ public static void textAppendNoNull(Text t, String s, boolean replaceBadChar) { try { ByteBuffer buffer = Text.encode(s, replaceBadChar); t.append(buffer.array(), 0, buffer.limit()); } catch (CharacterCodingException cce) { throw new IllegalArgumentException(cce); } } /** * Converts the given string its UTF-8 bytes. This uses Hadoop's method for converting string to UTF-8 and is much faster than calling * {@link String#getBytes(String)}. * * @param string * the string to convert * @return the UTF-8 representation of the string */ public static byte[] toUtf8(String string) { ByteBuffer buffer; try { buffer = Text.encode(string, false); } catch (CharacterCodingException cce) { throw new IllegalArgumentException(cce); } byte[] bytes = new byte[buffer.limit()]; System.arraycopy(buffer.array(), 0, bytes, 0, bytes.length); return bytes; } /** * Converts a UTF-8 encoded byte array back into a String. * * @param bytes * @return string */ public static String fromUtf8(byte[] bytes) { try { return Text.decode(bytes); } catch (CharacterCodingException e) { throw new IllegalArgumentException(e); } } }
1,343
476
/* * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.spi.util; import org.testng.annotations.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Random; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public class TestBloomFilter { private final String[] values; private final String[] testValues; private static final int COUNT = 1024 * 1024; public TestBloomFilter() { Random rnd = new Random(); values = new String[COUNT]; testValues = new String[COUNT]; for (int i = 0; i < COUNT; i++) { values[i] = "item " + i; testValues[i] = "item " + rnd.nextInt(1024 * 1024); } } @Test public void testMerge() { BloomFilter pbf1 = new BloomFilter(COUNT, 0.1); BloomFilter pbf2 = new BloomFilter(COUNT, 0.1); BloomFilter pbf3 = new BloomFilter(COUNT, 0.1); for (int i = 0; i < COUNT / 3; i++) { pbf1.add(values[i].getBytes()); } for (int i = (int) Math.ceil(COUNT / 3); i < 2 * COUNT / 3; i++) { pbf2.add(values[i].getBytes()); } for (int i = (int) Math.ceil(2 * COUNT / 3); i < COUNT; i++) { pbf3.add(values[i].getBytes()); } System.out.println("approximate count before merging: " + pbf1.approximateElementCount()); BloomFilter pbf = pbf1; pbf1.merge(pbf2); pbf1.merge(pbf3); long testStart = System.nanoTime(); for (int i = 0; i < COUNT; i++) { if (!pbf.test(values[i].getBytes())) { throw new RuntimeException("error"); } } System.out.println("Time testing 1M values: " + (System.nanoTime() - testStart) / 1000000 + " ms"); int negativeCount = 0; for (int i = 0; i < COUNT; i++) { if (!pbf.test(("abc" + i).getBytes())) { negativeCount++; } } System.out.println("negativeCount: " + negativeCount + ", real fpp: " + (double) (COUNT - negativeCount) / COUNT + ", expected fpp: " + pbf.expectedFpp()); System.out.println("approximate count after merging: " + pbf.approximateElementCount()); } @Test public void testSerDe() throws IOException { BloomFilter bloomFilter = new BloomFilter(COUNT, 0.1); for (String value : values) { bloomFilter.add(value.getBytes()); } ByteArrayOutputStream out = new ByteArrayOutputStream(); long serializationStart = System.nanoTime(); bloomFilter.writeTo(out); System.out.println("Serialization 1M values took: " + (System.nanoTime() - serializationStart) / 1000000 + " ms"); long deserializationStart = System.nanoTime(); BloomFilter deserializedBloomFilter = BloomFilter.readFrom(new ByteArrayInputStream(out.toByteArray())); System.out.println("Deserialization 1M values took: " + (System.nanoTime() - deserializationStart) / 1000000 + " ms"); for (String value : values) { assertTrue(deserializedBloomFilter.test(value.getBytes()), "Value should exist in deserialized BloomFilter"); } BloomFilter bloomFilter1 = new BloomFilter(COUNT, 0.01); assertTrue(bloomFilter1.isEmpty()); bloomFilter1.add(0L); assertFalse(bloomFilter1.isEmpty()); ByteArrayOutputStream out1 = new ByteArrayOutputStream(); bloomFilter1.writeTo(out1); BloomFilter deserializedBloomFilter1 = BloomFilter.readFrom(new ByteArrayInputStream(out1.toByteArray())); assertFalse(deserializedBloomFilter1.isEmpty()); } }
1,752
419
/***************************************************************************/ /* */ /* fttrigon.h */ /* */ /* FreeType trigonometric functions (specification). */ /* */ /* Copyright 2001-2018 by */ /* <NAME>, <NAME>, and <NAME>. */ /* */ /* This file is part of the FreeType project, and may only be used, */ /* modified, and distributed under the terms of the FreeType project */ /* license, LICENSE.TXT. By continuing to use, modify, or distribute */ /* this file you indicate that you have read the license and */ /* understand and accept it fully. */ /* */ /***************************************************************************/ #ifndef FTTRIGON_H_ #define FTTRIGON_H_ #include FT_FREETYPE_H FT_BEGIN_HEADER /*************************************************************************/ /* */ /* <Section> */ /* computations */ /* */ /*************************************************************************/ /************************************************************************* * * @type: * FT_Angle * * @description: * This type is used to model angle values in FreeType. Note that the * angle is a 16.16 fixed-point value expressed in degrees. * */ typedef FT_Fixed FT_Angle; /************************************************************************* * * @macro: * FT_ANGLE_PI * * @description: * The angle pi expressed in @FT_Angle units. * */ #define FT_ANGLE_PI ( 180L << 16 ) /************************************************************************* * * @macro: * FT_ANGLE_2PI * * @description: * The angle 2*pi expressed in @FT_Angle units. * */ #define FT_ANGLE_2PI ( FT_ANGLE_PI * 2 ) /************************************************************************* * * @macro: * FT_ANGLE_PI2 * * @description: * The angle pi/2 expressed in @FT_Angle units. * */ #define FT_ANGLE_PI2 ( FT_ANGLE_PI / 2 ) /************************************************************************* * * @macro: * FT_ANGLE_PI4 * * @description: * The angle pi/4 expressed in @FT_Angle units. * */ #define FT_ANGLE_PI4 ( FT_ANGLE_PI / 4 ) /************************************************************************* * * @function: * FT_Vector_Length * * @description: * Return the length of a given vector. * * @input: * vec :: * The address of target vector. * * @return: * The vector length, expressed in the same units that the original * vector coordinates. * */ FT_EXPORT( FT_Fixed ) FT_Vector_Length( FT_Vector* vec ); /* */ FT_END_HEADER #endif /* FTTRIGON_H_ */ /* END */
1,731
2,338
// Example source from breakpad's linux tutorial // https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/linux_starter_guide.md #include <stdio.h> #include <sys/types.h> #include <unistd.h> #include "client/linux/handler/exception_handler.h" static bool dumpCallback(const google_breakpad::MinidumpDescriptor &descriptor, void *context, bool succeeded) { printf("Dump path: %s\n", descriptor.path()); return succeeded; } void crash() { volatile int *a = (int *)(NULL); *a = 1; } int main(int argc, char *argv[]) { google_breakpad::MinidumpDescriptor descriptor("/tmp"); google_breakpad::ExceptionHandler eh(descriptor, NULL, dumpCallback, NULL, true, -1); printf("pid: %d\n", getpid()); crash(); return 0; }
324
1,031
/* Copyright (c) <2003-2019> <<NAME>, Newton Game Dynamics> * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * * 3. This notice may not be removed or altered from any source distribution. */ #ifndef __DGCOLLISION_LUMPED_MASS_PARTICLES_H__ #define __DGCOLLISION_LUMPED_MASS_PARTICLES_H__ #include "dgCollision.h" #include "dgCollisionConvex.h" class dgCollisionLumpedMassParticles: public dgCollisionConvex { public: dgCollisionLumpedMassParticles (const dgCollisionLumpedMassParticles& source); dgCollisionLumpedMassParticles (dgWorld* const world, dgCollisionID collisionID); dgCollisionLumpedMassParticles (dgWorld* const world, dgDeserialize deserialization, void* const userData, dgInt32 revisionNumber); virtual ~dgCollisionLumpedMassParticles(void); dgInt32 GetCount() const; dgInt32 GetStrideInByte() const; const dgVector* GetVelocity() const; const dgVector* GetPositions() const; const dgVector* GetAcceleration() const; dgDynamicBody* GetOwner () const; void SetOwnerAndMassPraperties (dgDynamicBody* const body); virtual void IntegrateForces (dgFloat32 timestep) = 0; protected: virtual void FinalizeBuild(); virtual dgInt32 CalculateSignature() const; virtual void RegisterCollision(const dgBody* const otherBody); virtual void SetCollisionBBox(const dgVector& p0, const dgVector& p1); virtual void Serialize(dgSerialize callback, void* const userData) const; virtual void CalcAABB(const dgMatrix& matrix, dgVector& p0, dgVector& p1) const; virtual dgMatrix CalculateInertiaAndCenterOfMass(const dgMatrix& m_alignMatrix, const dgVector& localScale, const dgMatrix& matrix) const; virtual void DebugCollision (const dgMatrix& matrix, dgCollision::OnDebugCollisionMeshCallback callback, void* const userData) const; dgFloat32 RayCast(const dgVector& localP0, const dgVector& localP1, dgFloat32 maxT, dgContactPoint& contactOut, const dgBody* const body, void* const userData, OnRayPrecastAction preFilter) const; //dgFloat32 CalculaleContactPenetration(const dgVector& point, const dgVector& normal) const; dgVector CalculateContactNormalAndPenetration(const dgVector& worldPosition) const; virtual void HandleCollision (dgFloat32 timestep, dgVector* const normalDir, dgVector* const normalAccel, dgFloat32* const frictionCoefficient); virtual dgInt32 GetMemoryBufferSizeInBytes() const = 0; dgArray<dgVector> m_posit; dgArray<dgVector> m_veloc; dgArray<dgVector> m_accel; dgArray<dgVector> m_externalAccel; dgArray<dgFloat32> m_mass; dgArray<dgFloat32> m_invMass; dgDynamicBody* m_body; dgFloat32 m_totalMass; dgFloat32 m_particleRadius; dgInt32 m_particlesCount; friend class dgBroadPhase; friend class dgDynamicBody; friend class dgWorldDynamicUpdate; }; inline dgDynamicBody* dgCollisionLumpedMassParticles::GetOwner () const { return m_body; } #endif
1,134
631
<reponame>peteraramaldes/javalite /* Copyright 2009-(CURRENT YEAR) <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.javalite.activeweb; import org.javalite.common.Util; import org.junit.jupiter.api.BeforeEach; /** * Super class for controller tests. This class is used by unit tests that test a single controller. Controllers are * tested by simulating a web request to a controller (no physical network is involved, and no container initialized). * <p/> * Subclasses must follow a simple naming convention: subclass name must be * made of two words: controller short class name and word "Spec". Example, of there is a controller: * <pre> * public class GreeterController extends AppController{ * ... * } * </pre> * then the test will look like this: * <pre> * * public class GreeterControllerSpec extends ControllerSpec{ * ... * } * </pre> * * ActiveWeb controller specs allow for true TDD, since they do not have a compiler dependency on controllers. * You can describe full behavior of your controller before a controller class even exists. Simplest example: * <pre> * public GreeterControllerSpec extends ControllerSpec{ * &#064;Test * public void shouldRespondWithGreetingMessage(){ * request().get("index"); * a(responseCode()).shouldBeEqual(200); * a(assigns().get("message")).shouldBeEqual("Hello, earthlings!"); * } * } * </pre> * * In a code snippet above, a request with HTTP GET method is simulated to the GreeterController, index() action. * Controller is expected to assign an object called "message" with value "Hello, earthlings!" to a view. * * This class will not open a connection to a test DB. If you need a connection, * use {@link org.javalite.activeweb.DBControllerSpec}. * * @see {@link org.javalite.activeweb.DBControllerSpec}. * @author <NAME> */ public class ControllerSpec extends RequestSpecHelper { private String controllerPath; public ControllerSpec() { Configuration.resetFilters(); Configuration.setInjector(null); } @Override @BeforeEach public void atStart() { super.atStart(); controllerPath = getControllerPath(); } /** * Use this DSL-ish method to send requests to controllers from specs. * <strong>Attention</strong>: this method always returns a new object, please string methods one after another - fluent interfaces * approach. * * @return instance of <code>RequestBuilder</code> with convenience methods. */ protected RequestBuilder request() { return new RequestBuilder(controllerPath); } /** * Returns a controller path - this includes packages if there are any after "app.controllers". * * @return controller path */ protected final String getControllerPath(){ String controllerClassName = getControllerClassName(); Class<? extends AppController> controllerClass; try{ controllerClass = (Class<? extends AppController>) Class.forName(controllerClassName); }catch(Exception e){ throw new SpecException("Failed to find a class for: " + controllerClassName, e); } return Router.getControllerPath(controllerClass); } protected final String getControllerClassName() { String packageName = getClass().getPackage().getName(); if(!packageName.startsWith("app.controllers")){ throw new SpecException("controller specs must be located in package 'app.controllers' or sub-packages"); } if (!getClass().getSimpleName().endsWith("ControllerSpec")) throw new SpecException("Descendant of activeweb.ControllerSpec must be named with: controller name + 'Spec', " + "and because controllers have to have a suffix 'Controller'," + " controller spec classes must have a suffix: 'ControllerSpec' "); String temp = getClass().getName();//full name temp = temp.substring(16); if(temp.contains(".")){ temp = temp.substring(0, temp.lastIndexOf("."));// this is sub-package }else{ temp = ""; } String specClassName = getClass().getSimpleName(); String controllerName = specClassName.substring(0, specClassName.lastIndexOf("Spec")); return "app.controllers." + (Util.blank(temp)? "": temp + ".") + controllerName; } }
1,597
3,101
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtstack.flinkx.connector.phoenix5.source; import com.dtstack.flinkx.connector.jdbc.source.JdbcInputFormat; import com.dtstack.flinkx.connector.phoenix5.util.Phoenix5Util; import org.apache.flink.core.io.InputSplit; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.Connection; import java.util.List; /** * @author wujuan * @version 1.0 * @date 2021/7/9 16:01 星期五 * @email <EMAIL> * @company www.dtstack.com */ public class Phoenix5InputFormat extends JdbcInputFormat { private static final Logger LOG = LoggerFactory.getLogger(Phoenix5InputFormat.class); // phoenix resolve table meta data by retrieving a row of data. @Override protected Pair<List<String>, List<String>> getTableMetaData() { LOG.info("Obtain meta data , table = {}.", jdbcConf.getTable()); return Phoenix5Util.getTableMetaData( jdbcConf.getColumn(), jdbcConf.getTable(), getConnection()); } @Override public void openInternal(InputSplit inputSplit) { super.openInternal(inputSplit); LOG.info(" Open phoenix5 input format internal success !"); } /** * 获取数据库连接,用于子类覆盖 * * @return connection */ @SuppressWarnings("AlibabaRemoveCommentedCode") @Override protected Connection getConnection() { Connection conn = Phoenix5Util.getConnection(jdbcDialect.defaultDriverName().get(), jdbcConf); LOG.info("Obtain a phoenix5 connection success !"); return conn; } }
838
4,879
<filename>src/third_party/icu4c-57.1/source/i18n/simpletz.cpp /* ******************************************************************************* * Copyright (C) 1997-2013, International Business Machines Corporation and * others. All Rights Reserved. ******************************************************************************* * * File SIMPLETZ.H * * Modification History: * * Date Name Description * 12/05/96 clhuang Creation. * 04/21/97 aliu Fixed miscellaneous bugs found by inspection and * testing. * 07/29/97 aliu Ported source bodies back from Java version with * numerous feature enhancements and bug fixes. * 08/10/98 stephen JDK 1.2 sync. * 09/17/98 stephen Fixed getOffset() for last hour of year and DST * 12/02/99 aliu Added TimeMode and constructor and setStart/EndRule * methods that take TimeMode. Whitespace cleanup. ******************************************************************************** */ #include "utypeinfo.h" // for 'typeid' to work #include "unicode/utypes.h" #if !UCONFIG_NO_FORMATTING #include "unicode/simpletz.h" #include "unicode/gregocal.h" #include "unicode/smpdtfmt.h" #include "gregoimp.h" #include "umutex.h" U_NAMESPACE_BEGIN UOBJECT_DEFINE_RTTI_IMPLEMENTATION(SimpleTimeZone) // Use only for decodeStartRule() and decodeEndRule() where the year is not // available. Set February to 29 days to accomodate rules with that date // and day-of-week-on-or-before-that-date mode (DOW_LE_DOM_MODE). // The compareToRule() method adjusts to February 28 in non-leap years. // // For actual getOffset() calculations, use Grego::monthLength() and // Grego::previousMonthLength() which take leap years into account. // We handle leap years assuming always // Gregorian, since we know they didn't have daylight time when // Gregorian calendar started. const int8_t SimpleTimeZone::STATICMONTHLENGTH[] = {31,29,31,30,31,30,31,31,30,31,30,31}; static const UChar DST_STR[] = {0x0028,0x0044,0x0053,0x0054,0x0029,0}; // "(DST)" static const UChar STD_STR[] = {0x0028,0x0053,0x0054,0x0044,0x0029,0}; // "(STD)" // ***************************************************************************** // class SimpleTimeZone // ***************************************************************************** SimpleTimeZone::SimpleTimeZone(int32_t rawOffsetGMT, const UnicodeString& ID) : BasicTimeZone(ID), startMonth(0), startDay(0), startDayOfWeek(0), startTime(0), startTimeMode(WALL_TIME), endTimeMode(WALL_TIME), endMonth(0), endDay(0), endDayOfWeek(0), endTime(0), startYear(0), rawOffset(rawOffsetGMT), useDaylight(FALSE), startMode(DOM_MODE), endMode(DOM_MODE), dstSavings(U_MILLIS_PER_HOUR) { clearTransitionRules(); } // ------------------------------------- SimpleTimeZone::SimpleTimeZone(int32_t rawOffsetGMT, const UnicodeString& ID, int8_t savingsStartMonth, int8_t savingsStartDay, int8_t savingsStartDayOfWeek, int32_t savingsStartTime, int8_t savingsEndMonth, int8_t savingsEndDay, int8_t savingsEndDayOfWeek, int32_t savingsEndTime, UErrorCode& status) : BasicTimeZone(ID) { clearTransitionRules(); construct(rawOffsetGMT, savingsStartMonth, savingsStartDay, savingsStartDayOfWeek, savingsStartTime, WALL_TIME, savingsEndMonth, savingsEndDay, savingsEndDayOfWeek, savingsEndTime, WALL_TIME, U_MILLIS_PER_HOUR, status); } // ------------------------------------- SimpleTimeZone::SimpleTimeZone(int32_t rawOffsetGMT, const UnicodeString& ID, int8_t savingsStartMonth, int8_t savingsStartDay, int8_t savingsStartDayOfWeek, int32_t savingsStartTime, int8_t savingsEndMonth, int8_t savingsEndDay, int8_t savingsEndDayOfWeek, int32_t savingsEndTime, int32_t savingsDST, UErrorCode& status) : BasicTimeZone(ID) { clearTransitionRules(); construct(rawOffsetGMT, savingsStartMonth, savingsStartDay, savingsStartDayOfWeek, savingsStartTime, WALL_TIME, savingsEndMonth, savingsEndDay, savingsEndDayOfWeek, savingsEndTime, WALL_TIME, savingsDST, status); } // ------------------------------------- SimpleTimeZone::SimpleTimeZone(int32_t rawOffsetGMT, const UnicodeString& ID, int8_t savingsStartMonth, int8_t savingsStartDay, int8_t savingsStartDayOfWeek, int32_t savingsStartTime, TimeMode savingsStartTimeMode, int8_t savingsEndMonth, int8_t savingsEndDay, int8_t savingsEndDayOfWeek, int32_t savingsEndTime, TimeMode savingsEndTimeMode, int32_t savingsDST, UErrorCode& status) : BasicTimeZone(ID) { clearTransitionRules(); construct(rawOffsetGMT, savingsStartMonth, savingsStartDay, savingsStartDayOfWeek, savingsStartTime, savingsStartTimeMode, savingsEndMonth, savingsEndDay, savingsEndDayOfWeek, savingsEndTime, savingsEndTimeMode, savingsDST, status); } /** * Internal construction method. */ void SimpleTimeZone::construct(int32_t rawOffsetGMT, int8_t savingsStartMonth, int8_t savingsStartDay, int8_t savingsStartDayOfWeek, int32_t savingsStartTime, TimeMode savingsStartTimeMode, int8_t savingsEndMonth, int8_t savingsEndDay, int8_t savingsEndDayOfWeek, int32_t savingsEndTime, TimeMode savingsEndTimeMode, int32_t savingsDST, UErrorCode& status) { this->rawOffset = rawOffsetGMT; this->startMonth = savingsStartMonth; this->startDay = savingsStartDay; this->startDayOfWeek = savingsStartDayOfWeek; this->startTime = savingsStartTime; this->startTimeMode = savingsStartTimeMode; this->endMonth = savingsEndMonth; this->endDay = savingsEndDay; this->endDayOfWeek = savingsEndDayOfWeek; this->endTime = savingsEndTime; this->endTimeMode = savingsEndTimeMode; this->dstSavings = savingsDST; this->startYear = 0; this->startMode = DOM_MODE; this->endMode = DOM_MODE; decodeRules(status); if (savingsDST <= 0) { status = U_ILLEGAL_ARGUMENT_ERROR; } } // ------------------------------------- SimpleTimeZone::~SimpleTimeZone() { deleteTransitionRules(); } // ------------------------------------- // Called by TimeZone::createDefault(), then clone() inside a Mutex - be careful. SimpleTimeZone::SimpleTimeZone(const SimpleTimeZone &source) : BasicTimeZone(source) { *this = source; } // ------------------------------------- // Called by TimeZone::createDefault(), then clone() inside a Mutex - be careful. SimpleTimeZone & SimpleTimeZone::operator=(const SimpleTimeZone &right) { if (this != &right) { TimeZone::operator=(right); rawOffset = right.rawOffset; startMonth = right.startMonth; startDay = right.startDay; startDayOfWeek = right.startDayOfWeek; startTime = right.startTime; startTimeMode = right.startTimeMode; startMode = right.startMode; endMonth = right.endMonth; endDay = right.endDay; endDayOfWeek = right.endDayOfWeek; endTime = right.endTime; endTimeMode = right.endTimeMode; endMode = right.endMode; startYear = right.startYear; dstSavings = right.dstSavings; useDaylight = right.useDaylight; clearTransitionRules(); } return *this; } // ------------------------------------- UBool SimpleTimeZone::operator==(const TimeZone& that) const { return ((this == &that) || (typeid(*this) == typeid(that) && TimeZone::operator==(that) && hasSameRules(that))); } // ------------------------------------- // Called by TimeZone::createDefault() inside a Mutex - be careful. TimeZone* SimpleTimeZone::clone() const { return new SimpleTimeZone(*this); } // ------------------------------------- /** * Sets the daylight savings starting year, that is, the year this time zone began * observing its specified daylight savings time rules. The time zone is considered * not to observe daylight savings time prior to that year; SimpleTimeZone doesn't * support historical daylight-savings-time rules. * @param year the daylight savings starting year. */ void SimpleTimeZone::setStartYear(int32_t year) { startYear = year; transitionRulesInitialized = FALSE; } // ------------------------------------- /** * Sets the daylight savings starting rule. For example, in the U.S., Daylight Savings * Time starts at the first Sunday in April, at 2 AM in standard time. * Therefore, you can set the start rule by calling: * setStartRule(TimeFields.APRIL, 1, TimeFields.SUNDAY, 2*60*60*1000); * The dayOfWeekInMonth and dayOfWeek parameters together specify how to calculate * the exact starting date. Their exact meaning depend on their respective signs, * allowing various types of rules to be constructed, as follows:<ul> * <li>If both dayOfWeekInMonth and dayOfWeek are positive, they specify the * day of week in the month (e.g., (2, WEDNESDAY) is the second Wednesday * of the month). * <li>If dayOfWeek is positive and dayOfWeekInMonth is negative, they specify * the day of week in the month counting backward from the end of the month. * (e.g., (-1, MONDAY) is the last Monday in the month) * <li>If dayOfWeek is zero and dayOfWeekInMonth is positive, dayOfWeekInMonth * specifies the day of the month, regardless of what day of the week it is. * (e.g., (10, 0) is the tenth day of the month) * <li>If dayOfWeek is zero and dayOfWeekInMonth is negative, dayOfWeekInMonth * specifies the day of the month counting backward from the end of the * month, regardless of what day of the week it is (e.g., (-2, 0) is the * next-to-last day of the month). * <li>If dayOfWeek is negative and dayOfWeekInMonth is positive, they specify the * first specified day of the week on or after the specfied day of the month. * (e.g., (15, -SUNDAY) is the first Sunday after the 15th of the month * [or the 15th itself if the 15th is a Sunday].) * <li>If dayOfWeek and DayOfWeekInMonth are both negative, they specify the * last specified day of the week on or before the specified day of the month. * (e.g., (-20, -TUESDAY) is the last Tuesday before the 20th of the month * [or the 20th itself if the 20th is a Tuesday].)</ul> * @param month the daylight savings starting month. Month is 0-based. * eg, 0 for January. * @param dayOfWeekInMonth the daylight savings starting * day-of-week-in-month. Please see the member description for an example. * @param dayOfWeek the daylight savings starting day-of-week. Please see * the member description for an example. * @param time the daylight savings starting time. Please see the member * description for an example. */ void SimpleTimeZone::setStartRule(int32_t month, int32_t dayOfWeekInMonth, int32_t dayOfWeek, int32_t time, TimeMode mode, UErrorCode& status) { startMonth = (int8_t)month; startDay = (int8_t)dayOfWeekInMonth; startDayOfWeek = (int8_t)dayOfWeek; startTime = time; startTimeMode = mode; decodeStartRule(status); transitionRulesInitialized = FALSE; } // ------------------------------------- void SimpleTimeZone::setStartRule(int32_t month, int32_t dayOfMonth, int32_t time, TimeMode mode, UErrorCode& status) { setStartRule(month, dayOfMonth, 0, time, mode, status); } // ------------------------------------- void SimpleTimeZone::setStartRule(int32_t month, int32_t dayOfMonth, int32_t dayOfWeek, int32_t time, TimeMode mode, UBool after, UErrorCode& status) { setStartRule(month, after ? dayOfMonth : -dayOfMonth, -dayOfWeek, time, mode, status); } // ------------------------------------- /** * Sets the daylight savings ending rule. For example, in the U.S., Daylight * Savings Time ends at the last (-1) Sunday in October, at 2 AM in standard time. * Therefore, you can set the end rule by calling: * setEndRule(TimeFields.OCTOBER, -1, TimeFields.SUNDAY, 2*60*60*1000); * Various other types of rules can be specified by manipulating the dayOfWeek * and dayOfWeekInMonth parameters. For complete details, see the documentation * for setStartRule(). * @param month the daylight savings ending month. Month is 0-based. * eg, 0 for January. * @param dayOfWeekInMonth the daylight savings ending * day-of-week-in-month. See setStartRule() for a complete explanation. * @param dayOfWeek the daylight savings ending day-of-week. See setStartRule() * for a complete explanation. * @param time the daylight savings ending time. Please see the member * description for an example. */ void SimpleTimeZone::setEndRule(int32_t month, int32_t dayOfWeekInMonth, int32_t dayOfWeek, int32_t time, TimeMode mode, UErrorCode& status) { endMonth = (int8_t)month; endDay = (int8_t)dayOfWeekInMonth; endDayOfWeek = (int8_t)dayOfWeek; endTime = time; endTimeMode = mode; decodeEndRule(status); transitionRulesInitialized = FALSE; } // ------------------------------------- void SimpleTimeZone::setEndRule(int32_t month, int32_t dayOfMonth, int32_t time, TimeMode mode, UErrorCode& status) { setEndRule(month, dayOfMonth, 0, time, mode, status); } // ------------------------------------- void SimpleTimeZone::setEndRule(int32_t month, int32_t dayOfMonth, int32_t dayOfWeek, int32_t time, TimeMode mode, UBool after, UErrorCode& status) { setEndRule(month, after ? dayOfMonth : -dayOfMonth, -dayOfWeek, time, mode, status); } // ------------------------------------- int32_t SimpleTimeZone::getOffset(uint8_t era, int32_t year, int32_t month, int32_t day, uint8_t dayOfWeek, int32_t millis, UErrorCode& status) const { // Check the month before calling Grego::monthLength(). This // duplicates the test that occurs in the 7-argument getOffset(), // however, this is unavoidable. We don't mind because this method, in // fact, should not be called; internal code should always call the // 7-argument getOffset(), and outside code should use Calendar.get(int // field) with fields ZONE_OFFSET and DST_OFFSET. We can't get rid of // this method because it's public API. - liu 8/10/98 if(month < UCAL_JANUARY || month > UCAL_DECEMBER) { status = U_ILLEGAL_ARGUMENT_ERROR; return 0; } return getOffset(era, year, month, day, dayOfWeek, millis, Grego::monthLength(year, month), status); } int32_t SimpleTimeZone::getOffset(uint8_t era, int32_t year, int32_t month, int32_t day, uint8_t dayOfWeek, int32_t millis, int32_t /*monthLength*/, UErrorCode& status) const { // Check the month before calling Grego::monthLength(). This // duplicates a test that occurs in the 9-argument getOffset(), // however, this is unavoidable. We don't mind because this method, in // fact, should not be called; internal code should always call the // 9-argument getOffset(), and outside code should use Calendar.get(int // field) with fields ZONE_OFFSET and DST_OFFSET. We can't get rid of // this method because it's public API. - liu 8/10/98 if (month < UCAL_JANUARY || month > UCAL_DECEMBER) { status = U_ILLEGAL_ARGUMENT_ERROR; return -1; } // We ignore monthLength because it can be derived from year and month. // This is so that February in leap years is calculated correctly. // We keep this argument in this function for backwards compatibility. return getOffset(era, year, month, day, dayOfWeek, millis, Grego::monthLength(year, month), Grego::previousMonthLength(year, month), status); } int32_t SimpleTimeZone::getOffset(uint8_t era, int32_t year, int32_t month, int32_t day, uint8_t dayOfWeek, int32_t millis, int32_t monthLength, int32_t prevMonthLength, UErrorCode& status) const { if(U_FAILURE(status)) return 0; if ((era != GregorianCalendar::AD && era != GregorianCalendar::BC) || month < UCAL_JANUARY || month > UCAL_DECEMBER || day < 1 || day > monthLength || dayOfWeek < UCAL_SUNDAY || dayOfWeek > UCAL_SATURDAY || millis < 0 || millis >= U_MILLIS_PER_DAY || monthLength < 28 || monthLength > 31 || prevMonthLength < 28 || prevMonthLength > 31) { status = U_ILLEGAL_ARGUMENT_ERROR; return -1; } int32_t result = rawOffset; // Bail out if we are before the onset of daylight savings time if(!useDaylight || year < startYear || era != GregorianCalendar::AD) return result; // Check for southern hemisphere. We assume that the start and end // month are different. UBool southern = (startMonth > endMonth); // Compare the date to the starting and ending rules.+1 = date>rule, -1 // = date<rule, 0 = date==rule. int32_t startCompare = compareToRule((int8_t)month, (int8_t)monthLength, (int8_t)prevMonthLength, (int8_t)day, (int8_t)dayOfWeek, millis, startTimeMode == UTC_TIME ? -rawOffset : 0, startMode, (int8_t)startMonth, (int8_t)startDayOfWeek, (int8_t)startDay, startTime); int32_t endCompare = 0; /* We don't always have to compute endCompare. For many instances, * startCompare is enough to determine if we are in DST or not. In the * northern hemisphere, if we are before the start rule, we can't have * DST. In the southern hemisphere, if we are after the start rule, we * must have DST. This is reflected in the way the next if statement * (not the one immediately following) short circuits. */ if(southern != (startCompare >= 0)) { endCompare = compareToRule((int8_t)month, (int8_t)monthLength, (int8_t)prevMonthLength, (int8_t)day, (int8_t)dayOfWeek, millis, endTimeMode == WALL_TIME ? dstSavings : (endTimeMode == UTC_TIME ? -rawOffset : 0), endMode, (int8_t)endMonth, (int8_t)endDayOfWeek, (int8_t)endDay, endTime); } // Check for both the northern and southern hemisphere cases. We // assume that in the northern hemisphere, the start rule is before the // end rule within the calendar year, and vice versa for the southern // hemisphere. if ((!southern && (startCompare >= 0 && endCompare < 0)) || (southern && (startCompare >= 0 || endCompare < 0))) result += dstSavings; return result; } void SimpleTimeZone::getOffsetFromLocal(UDate date, int32_t nonExistingTimeOpt, int32_t duplicatedTimeOpt, int32_t& rawOffsetGMT, int32_t& savingsDST, UErrorCode& status) const { if (U_FAILURE(status)) { return; } rawOffsetGMT = getRawOffset(); int32_t year, month, dom, dow; double day = uprv_floor(date / U_MILLIS_PER_DAY); int32_t millis = (int32_t) (date - day * U_MILLIS_PER_DAY); Grego::dayToFields(day, year, month, dom, dow); savingsDST = getOffset(GregorianCalendar::AD, year, month, dom, (uint8_t) dow, millis, Grego::monthLength(year, month), status) - rawOffsetGMT; if (U_FAILURE(status)) { return; } UBool recalc = FALSE; // Now we need some adjustment if (savingsDST > 0) { if ((nonExistingTimeOpt & kStdDstMask) == kStandard || ((nonExistingTimeOpt & kStdDstMask) != kDaylight && (nonExistingTimeOpt & kFormerLatterMask) != kLatter)) { date -= getDSTSavings(); recalc = TRUE; } } else { if ((duplicatedTimeOpt & kStdDstMask) == kDaylight || ((duplicatedTimeOpt & kStdDstMask) != kStandard && (duplicatedTimeOpt & kFormerLatterMask) == kFormer)) { date -= getDSTSavings(); recalc = TRUE; } } if (recalc) { day = uprv_floor(date / U_MILLIS_PER_DAY); millis = (int32_t) (date - day * U_MILLIS_PER_DAY); Grego::dayToFields(day, year, month, dom, dow); savingsDST = getOffset(GregorianCalendar::AD, year, month, dom, (uint8_t) dow, millis, Grego::monthLength(year, month), status) - rawOffsetGMT; } } // ------------------------------------- /** * Compare a given date in the year to a rule. Return 1, 0, or -1, depending * on whether the date is after, equal to, or before the rule date. The * millis are compared directly against the ruleMillis, so any * standard-daylight adjustments must be handled by the caller. * * @return 1 if the date is after the rule date, -1 if the date is before * the rule date, or 0 if the date is equal to the rule date. */ int32_t SimpleTimeZone::compareToRule(int8_t month, int8_t monthLen, int8_t prevMonthLen, int8_t dayOfMonth, int8_t dayOfWeek, int32_t millis, int32_t millisDelta, EMode ruleMode, int8_t ruleMonth, int8_t ruleDayOfWeek, int8_t ruleDay, int32_t ruleMillis) { // Make adjustments for startTimeMode and endTimeMode millis += millisDelta; while (millis >= U_MILLIS_PER_DAY) { millis -= U_MILLIS_PER_DAY; ++dayOfMonth; dayOfWeek = (int8_t)(1 + (dayOfWeek % 7)); // dayOfWeek is one-based if (dayOfMonth > monthLen) { dayOfMonth = 1; /* When incrementing the month, it is desirible to overflow * from DECEMBER to DECEMBER+1, since we use the result to * compare against a real month. Wraparound of the value * leads to bug 4173604. */ ++month; } } while (millis < 0) { millis += U_MILLIS_PER_DAY; --dayOfMonth; dayOfWeek = (int8_t)(1 + ((dayOfWeek+5) % 7)); // dayOfWeek is one-based if (dayOfMonth < 1) { dayOfMonth = prevMonthLen; --month; } } // first compare months. If they're different, we don't have to worry about days // and times if (month < ruleMonth) return -1; else if (month > ruleMonth) return 1; // calculate the actual day of month for the rule int32_t ruleDayOfMonth = 0; // Adjust the ruleDay to the monthLen, for non-leap year February 29 rule days. if (ruleDay > monthLen) { ruleDay = monthLen; } switch (ruleMode) { // if the mode is day-of-month, the day of month is given case DOM_MODE: ruleDayOfMonth = ruleDay; break; // if the mode is day-of-week-in-month, calculate the day-of-month from it case DOW_IN_MONTH_MODE: // In this case ruleDay is the day-of-week-in-month (this code is using // the dayOfWeek and dayOfMonth parameters to figure out the day-of-week // of the first day of the month, so it's trusting that they're really // consistent with each other) if (ruleDay > 0) ruleDayOfMonth = 1 + (ruleDay - 1) * 7 + (7 + ruleDayOfWeek - (dayOfWeek - dayOfMonth + 1)) % 7; // if ruleDay is negative (we assume it's not zero here), we have to do // the same calculation figuring backward from the last day of the month. else { // (again, this code is trusting that dayOfWeek and dayOfMonth are // consistent with each other here, since we're using them to figure // the day of week of the first of the month) ruleDayOfMonth = monthLen + (ruleDay + 1) * 7 - (7 + (dayOfWeek + monthLen - dayOfMonth) - ruleDayOfWeek) % 7; } break; case DOW_GE_DOM_MODE: ruleDayOfMonth = ruleDay + (49 + ruleDayOfWeek - ruleDay - dayOfWeek + dayOfMonth) % 7; break; case DOW_LE_DOM_MODE: ruleDayOfMonth = ruleDay - (49 - ruleDayOfWeek + ruleDay + dayOfWeek - dayOfMonth) % 7; // Note at this point ruleDayOfMonth may be <1, although it will // be >=1 for well-formed rules. break; } // now that we have a real day-in-month for the rule, we can compare days... if (dayOfMonth < ruleDayOfMonth) return -1; else if (dayOfMonth > ruleDayOfMonth) return 1; // ...and if they're equal, we compare times if (millis < ruleMillis) return -1; else if (millis > ruleMillis) return 1; else return 0; } // ------------------------------------- int32_t SimpleTimeZone::getRawOffset() const { return rawOffset; } // ------------------------------------- void SimpleTimeZone::setRawOffset(int32_t offsetMillis) { rawOffset = offsetMillis; transitionRulesInitialized = FALSE; } // ------------------------------------- void SimpleTimeZone::setDSTSavings(int32_t millisSavedDuringDST, UErrorCode& status) { if (millisSavedDuringDST <= 0) { status = U_ILLEGAL_ARGUMENT_ERROR; } else { dstSavings = millisSavedDuringDST; } transitionRulesInitialized = FALSE; } // ------------------------------------- int32_t SimpleTimeZone::getDSTSavings() const { return dstSavings; } // ------------------------------------- UBool SimpleTimeZone::useDaylightTime() const { return useDaylight; } // ------------------------------------- /** * Overrides TimeZone * Queries if the given date is in Daylight Savings Time. */ UBool SimpleTimeZone::inDaylightTime(UDate date, UErrorCode& status) const { // This method is wasteful since it creates a new GregorianCalendar and // deletes it each time it is called. However, this is a deprecated method // and provided only for Java compatibility as of 8/6/97 [LIU]. if (U_FAILURE(status)) return FALSE; GregorianCalendar *gc = new GregorianCalendar(*this, status); /* test for NULL */ if (gc == 0) { status = U_MEMORY_ALLOCATION_ERROR; return FALSE; } gc->setTime(date, status); UBool result = gc->inDaylightTime(status); delete gc; return result; } // ------------------------------------- /** * Return true if this zone has the same rules and offset as another zone. * @param other the TimeZone object to be compared with * @return true if the given zone has the same rules and offset as this one */ UBool SimpleTimeZone::hasSameRules(const TimeZone& other) const { if (this == &other) return TRUE; if (typeid(*this) != typeid(other)) return FALSE; SimpleTimeZone *that = (SimpleTimeZone*)&other; return rawOffset == that->rawOffset && useDaylight == that->useDaylight && (!useDaylight // Only check rules if using DST || (dstSavings == that->dstSavings && startMode == that->startMode && startMonth == that->startMonth && startDay == that->startDay && startDayOfWeek == that->startDayOfWeek && startTime == that->startTime && startTimeMode == that->startTimeMode && endMode == that->endMode && endMonth == that->endMonth && endDay == that->endDay && endDayOfWeek == that->endDayOfWeek && endTime == that->endTime && endTimeMode == that->endTimeMode && startYear == that->startYear)); } // ------------------------------------- //---------------------------------------------------------------------- // Rule representation // // We represent the following flavors of rules: // 5 the fifth of the month // lastSun the last Sunday in the month // lastMon the last Monday in the month // Sun>=8 first Sunday on or after the eighth // Sun<=25 last Sunday on or before the 25th // This is further complicated by the fact that we need to remain // backward compatible with the 1.1 FCS. Finally, we need to minimize // API changes. In order to satisfy these requirements, we support // three representation systems, and we translate between them. // // INTERNAL REPRESENTATION // This is the format SimpleTimeZone objects take after construction or // streaming in is complete. Rules are represented directly, using an // unencoded format. We will discuss the start rule only below; the end // rule is analogous. // startMode Takes on enumerated values DAY_OF_MONTH, // DOW_IN_MONTH, DOW_AFTER_DOM, or DOW_BEFORE_DOM. // startDay The day of the month, or for DOW_IN_MONTH mode, a // value indicating which DOW, such as +1 for first, // +2 for second, -1 for last, etc. // startDayOfWeek The day of the week. Ignored for DAY_OF_MONTH. // // ENCODED REPRESENTATION // This is the format accepted by the constructor and by setStartRule() // and setEndRule(). It uses various combinations of positive, negative, // and zero values to encode the different rules. This representation // allows us to specify all the different rule flavors without altering // the API. // MODE startMonth startDay startDayOfWeek // DOW_IN_MONTH_MODE >=0 !=0 >0 // DOM_MODE >=0 >0 ==0 // DOW_GE_DOM_MODE >=0 >0 <0 // DOW_LE_DOM_MODE >=0 <0 <0 // (no DST) don't care ==0 don't care // // STREAMED REPRESENTATION // We must retain binary compatibility with the 1.1 FCS. The 1.1 code only // handles DOW_IN_MONTH_MODE and non-DST mode, the latter indicated by the // flag useDaylight. When we stream an object out, we translate into an // approximate DOW_IN_MONTH_MODE representation so the object can be parsed // and used by 1.1 code. Following that, we write out the full // representation separately so that contemporary code can recognize and // parse it. The full representation is written in a "packed" format, // consisting of a version number, a length, and an array of bytes. Future // versions of this class may specify different versions. If they wish to // include additional data, they should do so by storing them after the // packed representation below. //---------------------------------------------------------------------- /** * Given a set of encoded rules in startDay and startDayOfMonth, decode * them and set the startMode appropriately. Do the same for endDay and * endDayOfMonth. Upon entry, the day of week variables may be zero or * negative, in order to indicate special modes. The day of month * variables may also be negative. Upon exit, the mode variables will be * set, and the day of week and day of month variables will be positive. * This method also recognizes a startDay or endDay of zero as indicating * no DST. */ void SimpleTimeZone::decodeRules(UErrorCode& status) { decodeStartRule(status); decodeEndRule(status); } /** * Decode the start rule and validate the parameters. The parameters are * expected to be in encoded form, which represents the various rule modes * by negating or zeroing certain values. Representation formats are: * <p> * <pre> * DOW_IN_MONTH DOM DOW>=DOM DOW<=DOM no DST * ------------ ----- -------- -------- ---------- * month 0..11 same same same don't care * day -5..5 1..31 1..31 -1..-31 0 * dayOfWeek 1..7 0 -1..-7 -1..-7 don't care * time 0..ONEDAY same same same don't care * </pre> * The range for month does not include UNDECIMBER since this class is * really specific to GregorianCalendar, which does not use that month. * The range for time includes ONEDAY (vs. ending at ONEDAY-1) because the * end rule is an exclusive limit point. That is, the range of times that * are in DST include those >= the start and < the end. For this reason, * it should be possible to specify an end of ONEDAY in order to include the * entire day. Although this is equivalent to time 0 of the following day, * it's not always possible to specify that, for example, on December 31. * While arguably the start range should still be 0..ONEDAY-1, we keep * the start and end ranges the same for consistency. */ void SimpleTimeZone::decodeStartRule(UErrorCode& status) { if(U_FAILURE(status)) return; useDaylight = (UBool)((startDay != 0) && (endDay != 0) ? TRUE : FALSE); if (useDaylight && dstSavings == 0) { dstSavings = U_MILLIS_PER_HOUR; } if (startDay != 0) { if (startMonth < UCAL_JANUARY || startMonth > UCAL_DECEMBER) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } if (startTime < 0 || startTime > U_MILLIS_PER_DAY || startTimeMode < WALL_TIME || startTimeMode > UTC_TIME) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } if (startDayOfWeek == 0) { startMode = DOM_MODE; } else { if (startDayOfWeek > 0) { startMode = DOW_IN_MONTH_MODE; } else { startDayOfWeek = (int8_t)-startDayOfWeek; if (startDay > 0) { startMode = DOW_GE_DOM_MODE; } else { startDay = (int8_t)-startDay; startMode = DOW_LE_DOM_MODE; } } if (startDayOfWeek > UCAL_SATURDAY) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } if (startMode == DOW_IN_MONTH_MODE) { if (startDay < -5 || startDay > 5) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } else if (startDay<1 || startDay > STATICMONTHLENGTH[startMonth]) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } } /** * Decode the end rule and validate the parameters. This method is exactly * analogous to decodeStartRule(). * @see decodeStartRule */ void SimpleTimeZone::decodeEndRule(UErrorCode& status) { if(U_FAILURE(status)) return; useDaylight = (UBool)((startDay != 0) && (endDay != 0) ? TRUE : FALSE); if (useDaylight && dstSavings == 0) { dstSavings = U_MILLIS_PER_HOUR; } if (endDay != 0) { if (endMonth < UCAL_JANUARY || endMonth > UCAL_DECEMBER) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } if (endTime < 0 || endTime > U_MILLIS_PER_DAY || endTimeMode < WALL_TIME || endTimeMode > UTC_TIME) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } if (endDayOfWeek == 0) { endMode = DOM_MODE; } else { if (endDayOfWeek > 0) { endMode = DOW_IN_MONTH_MODE; } else { endDayOfWeek = (int8_t)-endDayOfWeek; if (endDay > 0) { endMode = DOW_GE_DOM_MODE; } else { endDay = (int8_t)-endDay; endMode = DOW_LE_DOM_MODE; } } if (endDayOfWeek > UCAL_SATURDAY) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } if (endMode == DOW_IN_MONTH_MODE) { if (endDay < -5 || endDay > 5) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } else if (endDay<1 || endDay > STATICMONTHLENGTH[endMonth]) { status = U_ILLEGAL_ARGUMENT_ERROR; return; } } } UBool SimpleTimeZone::getNextTransition(UDate base, UBool inclusive, TimeZoneTransition& result) const { if (!useDaylight) { return FALSE; } UErrorCode status = U_ZERO_ERROR; checkTransitionRules(status); if (U_FAILURE(status)) { return FALSE; } UDate firstTransitionTime = firstTransition->getTime(); if (base < firstTransitionTime || (inclusive && base == firstTransitionTime)) { result = *firstTransition; } UDate stdDate, dstDate; UBool stdAvail = stdRule->getNextStart(base, dstRule->getRawOffset(), dstRule->getDSTSavings(), inclusive, stdDate); UBool dstAvail = dstRule->getNextStart(base, stdRule->getRawOffset(), stdRule->getDSTSavings(), inclusive, dstDate); if (stdAvail && (!dstAvail || stdDate < dstDate)) { result.setTime(stdDate); result.setFrom((const TimeZoneRule&)*dstRule); result.setTo((const TimeZoneRule&)*stdRule); return TRUE; } if (dstAvail && (!stdAvail || dstDate < stdDate)) { result.setTime(dstDate); result.setFrom((const TimeZoneRule&)*stdRule); result.setTo((const TimeZoneRule&)*dstRule); return TRUE; } return FALSE; } UBool SimpleTimeZone::getPreviousTransition(UDate base, UBool inclusive, TimeZoneTransition& result) const { if (!useDaylight) { return FALSE; } UErrorCode status = U_ZERO_ERROR; checkTransitionRules(status); if (U_FAILURE(status)) { return FALSE; } UDate firstTransitionTime = firstTransition->getTime(); if (base < firstTransitionTime || (!inclusive && base == firstTransitionTime)) { return FALSE; } UDate stdDate, dstDate; UBool stdAvail = stdRule->getPreviousStart(base, dstRule->getRawOffset(), dstRule->getDSTSavings(), inclusive, stdDate); UBool dstAvail = dstRule->getPreviousStart(base, stdRule->getRawOffset(), stdRule->getDSTSavings(), inclusive, dstDate); if (stdAvail && (!dstAvail || stdDate > dstDate)) { result.setTime(stdDate); result.setFrom((const TimeZoneRule&)*dstRule); result.setTo((const TimeZoneRule&)*stdRule); return TRUE; } if (dstAvail && (!stdAvail || dstDate > stdDate)) { result.setTime(dstDate); result.setFrom((const TimeZoneRule&)*stdRule); result.setTo((const TimeZoneRule&)*dstRule); return TRUE; } return FALSE; } void SimpleTimeZone::clearTransitionRules(void) { initialRule = NULL; firstTransition = NULL; stdRule = NULL; dstRule = NULL; transitionRulesInitialized = FALSE; } void SimpleTimeZone::deleteTransitionRules(void) { if (initialRule != NULL) { delete initialRule; } if (firstTransition != NULL) { delete firstTransition; } if (stdRule != NULL) { delete stdRule; } if (dstRule != NULL) { delete dstRule; } clearTransitionRules(); } /* * Lazy transition rules initializer * * Note On the removal of UMTX_CHECK from checkTransitionRules(): * * It would be faster to have a UInitOnce as part of a SimpleTimeZone object, * which would avoid needing to lock a mutex to check the initialization state. * But we can't easily because simpletz.h is a public header, and including * a UInitOnce as a member of SimpleTimeZone would publicly expose internal ICU headers. * * Alternatively we could have a pointer to a UInitOnce in the SimpleTimeZone object, * allocate it in the constructors. This would be a more intrusive change, but doable * if performance turns out to be an issue. */ static UMutex gLock = U_MUTEX_INITIALIZER; void SimpleTimeZone::checkTransitionRules(UErrorCode& status) const { if (U_FAILURE(status)) { return; } umtx_lock(&gLock); if (!transitionRulesInitialized) { SimpleTimeZone *ncThis = const_cast<SimpleTimeZone*>(this); ncThis->initTransitionRules(status); } umtx_unlock(&gLock); } void SimpleTimeZone::initTransitionRules(UErrorCode& status) { if (U_FAILURE(status)) { return; } if (transitionRulesInitialized) { return; } deleteTransitionRules(); UnicodeString tzid; getID(tzid); if (useDaylight) { DateTimeRule* dtRule; DateTimeRule::TimeRuleType timeRuleType; UDate firstStdStart, firstDstStart; // Create a TimeZoneRule for daylight saving time timeRuleType = (startTimeMode == STANDARD_TIME) ? DateTimeRule::STANDARD_TIME : ((startTimeMode == UTC_TIME) ? DateTimeRule::UTC_TIME : DateTimeRule::WALL_TIME); switch (startMode) { case DOM_MODE: dtRule = new DateTimeRule(startMonth, startDay, startTime, timeRuleType); break; case DOW_IN_MONTH_MODE: dtRule = new DateTimeRule(startMonth, startDay, startDayOfWeek, startTime, timeRuleType); break; case DOW_GE_DOM_MODE: dtRule = new DateTimeRule(startMonth, startDay, startDayOfWeek, true, startTime, timeRuleType); break; case DOW_LE_DOM_MODE: dtRule = new DateTimeRule(startMonth, startDay, startDayOfWeek, false, startTime, timeRuleType); break; default: status = U_INVALID_STATE_ERROR; return; } // Check for Null pointer if (dtRule == NULL) { status = U_MEMORY_ALLOCATION_ERROR; return; } // For now, use ID + "(DST)" as the name dstRule = new AnnualTimeZoneRule(tzid+UnicodeString(DST_STR), getRawOffset(), getDSTSavings(), dtRule, startYear, AnnualTimeZoneRule::MAX_YEAR); // Check for Null pointer if (dstRule == NULL) { status = U_MEMORY_ALLOCATION_ERROR; deleteTransitionRules(); return; } // Calculate the first DST start time dstRule->getFirstStart(getRawOffset(), 0, firstDstStart); // Create a TimeZoneRule for standard time timeRuleType = (endTimeMode == STANDARD_TIME) ? DateTimeRule::STANDARD_TIME : ((endTimeMode == UTC_TIME) ? DateTimeRule::UTC_TIME : DateTimeRule::WALL_TIME); switch (endMode) { case DOM_MODE: dtRule = new DateTimeRule(endMonth, endDay, endTime, timeRuleType); break; case DOW_IN_MONTH_MODE: dtRule = new DateTimeRule(endMonth, endDay, endDayOfWeek, endTime, timeRuleType); break; case DOW_GE_DOM_MODE: dtRule = new DateTimeRule(endMonth, endDay, endDayOfWeek, true, endTime, timeRuleType); break; case DOW_LE_DOM_MODE: dtRule = new DateTimeRule(endMonth, endDay, endDayOfWeek, false, endTime, timeRuleType); break; } // Check for Null pointer if (dtRule == NULL) { status = U_MEMORY_ALLOCATION_ERROR; deleteTransitionRules(); return; } // For now, use ID + "(STD)" as the name stdRule = new AnnualTimeZoneRule(tzid+UnicodeString(STD_STR), getRawOffset(), 0, dtRule, startYear, AnnualTimeZoneRule::MAX_YEAR); //Check for Null pointer if (stdRule == NULL) { status = U_MEMORY_ALLOCATION_ERROR; deleteTransitionRules(); return; } // Calculate the first STD start time stdRule->getFirstStart(getRawOffset(), dstRule->getDSTSavings(), firstStdStart); // Create a TimeZoneRule for initial time if (firstStdStart < firstDstStart) { initialRule = new InitialTimeZoneRule(tzid+UnicodeString(DST_STR), getRawOffset(), dstRule->getDSTSavings()); firstTransition = new TimeZoneTransition(firstStdStart, *initialRule, *stdRule); } else { initialRule = new InitialTimeZoneRule(tzid+UnicodeString(STD_STR), getRawOffset(), 0); firstTransition = new TimeZoneTransition(firstDstStart, *initialRule, *dstRule); } // Check for null pointers. if (initialRule == NULL || firstTransition == NULL) { status = U_MEMORY_ALLOCATION_ERROR; deleteTransitionRules(); return; } } else { // Create a TimeZoneRule for initial time initialRule = new InitialTimeZoneRule(tzid, getRawOffset(), 0); // Check for null pointer. if (initialRule == NULL) { status = U_MEMORY_ALLOCATION_ERROR; deleteTransitionRules(); return; } } transitionRulesInitialized = TRUE; } int32_t SimpleTimeZone::countTransitionRules(UErrorCode& /*status*/) const { return (useDaylight) ? 2 : 0; } void SimpleTimeZone::getTimeZoneRules(const InitialTimeZoneRule*& initial, const TimeZoneRule* trsrules[], int32_t& trscount, UErrorCode& status) const { if (U_FAILURE(status)) { return; } checkTransitionRules(status); if (U_FAILURE(status)) { return; } initial = initialRule; int32_t cnt = 0; if (stdRule != NULL) { if (cnt < trscount) { trsrules[cnt++] = stdRule; } if (cnt < trscount) { trsrules[cnt++] = dstRule; } } trscount = cnt; } U_NAMESPACE_END #endif /* #if !UCONFIG_NO_FORMATTING */ //eof
19,259
1,273
package org.broadinstitute.hellbender.tools.copynumber; import org.broadinstitute.hellbender.CommandLineProgramTest; import org.broadinstitute.hellbender.testutils.ArgumentsBuilder; import org.broadinstitute.hellbender.tools.copynumber.arguments.CopyNumberArgumentValidationUtils; import org.broadinstitute.hellbender.tools.copynumber.arguments.CopyNumberStandardArgument; import org.broadinstitute.hellbender.tools.copynumber.formats.collections.AllelicCountCollection; import org.broadinstitute.hellbender.tools.copynumber.formats.collections.CopyRatioCollection; import org.broadinstitute.hellbender.tools.copynumber.formats.collections.CopyRatioSegmentCollection; import org.broadinstitute.hellbender.tools.copynumber.formats.collections.ModeledSegmentCollection; import org.broadinstitute.hellbender.tools.copynumber.formats.collections.ParameterDecileCollection; import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.SampleLocatableMetadata; import org.broadinstitute.hellbender.tools.copynumber.models.AlleleFractionParameter; import org.broadinstitute.hellbender.tools.copynumber.models.CopyRatioParameter; import org.broadinstitute.hellbender.tools.copynumber.models.MultidimensionalModellerUnitTest; import org.broadinstitute.hellbender.tools.copynumber.segmentation.MultisampleMultidimensionalKernelSegmenter; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Integration tests for {@link ModelSegments}. We test for input validation across various run modes of the tool * and for consistency of metadata in output, but do not test for correctness of output (which is tested elsewhere, * e.g. {@link MultisampleMultidimensionalKernelSegmenter} and {@link MultidimensionalModellerUnitTest}). * * @author <NAME> &lt;s<EMAIL>&gt; */ public final class ModelSegmentsIntegrationTest extends CommandLineProgramTest { private static final File TEST_SUB_DIR = new File(toolsTestDir, "copynumber"); private static final File TUMOR_1_DENOISED_COPY_RATIOS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-1-denoised-copy-ratios-SM-74P4M-v1-chr20-downsampled.deduplicated.denoisedCR.tsv"); private static final File TUMOR_1_DENOISED_COPY_RATIOS_WITH_MISSING_INTERVALS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-1-denoised-copy-ratios-with-missing-intervals.denoisedCR.tsv"); private static final File TUMOR_1_ALLELIC_COUNTS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-1-allelic-counts-SM-74P4M-v1-chr20-downsampled.deduplicated.allelicCounts.tsv"); private static final File TUMOR_1_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-1-allelic-counts-with-missing-sites.allelicCounts.tsv"); private static final File TUMOR_2_DENOISED_COPY_RATIOS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-2-denoised-copy-ratios-SM-74P4M-v1-chr20-downsampled.deduplicated.denoisedCR.tsv"); private static final File TUMOR_2_ALLELIC_COUNTS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-tumor-2-allelic-counts-SM-74P4M-v1-chr20-downsampled.deduplicated.allelicCounts.tsv"); private static final File NORMAL_ALLELIC_COUNTS_FILE = new File(TEST_SUB_DIR, "model-segments-wes-normal-allelic-counts-SM-74NEG-v1-chr20-downsampled.deduplicated.allelicCounts.tsv"); private static final File NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE = new File(TEST_SUB_DIR, "model-segments-wes-normal-allelic-counts-with-missing-sites.allelicCounts.tsv"); private static final String OUTPUT_PREFIX = "test"; private static final SampleLocatableMetadata TUMOR_1_EXPECTED_METADATA = new CopyRatioCollection(TUMOR_1_DENOISED_COPY_RATIOS_FILE).getMetadata(); private static final SampleLocatableMetadata TUMOR_2_EXPECTED_METADATA = new CopyRatioCollection(TUMOR_2_DENOISED_COPY_RATIOS_FILE).getMetadata(); private static final SampleLocatableMetadata NORMAL_EXPECTED_METADATA = new AllelicCountCollection(NORMAL_ALLELIC_COUNTS_FILE).getMetadata(); @Test public void testMetadata() { Assert.assertEquals( TUMOR_1_EXPECTED_METADATA, CopyNumberArgumentValidationUtils.getValidatedMetadata( new CopyRatioCollection(TUMOR_1_DENOISED_COPY_RATIOS_FILE), new CopyRatioCollection(TUMOR_1_DENOISED_COPY_RATIOS_WITH_MISSING_INTERVALS_FILE), new AllelicCountCollection(TUMOR_1_ALLELIC_COUNTS_FILE), new AllelicCountCollection(TUMOR_1_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE))); Assert.assertEquals( TUMOR_2_EXPECTED_METADATA, CopyNumberArgumentValidationUtils.getValidatedMetadata( new CopyRatioCollection(TUMOR_2_DENOISED_COPY_RATIOS_FILE), new AllelicCountCollection(TUMOR_2_ALLELIC_COUNTS_FILE))); Assert.assertEquals( NORMAL_EXPECTED_METADATA, CopyNumberArgumentValidationUtils.getValidatedMetadata( new AllelicCountCollection(NORMAL_ALLELIC_COUNTS_FILE), new AllelicCountCollection(NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE))); } @DataProvider(name = "dataValidDataModesSingleSample") public Object[][] dataValidDataModesSingleSample() { return new Object[][]{ { TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_1_ALLELIC_COUNTS_FILE, NORMAL_ALLELIC_COUNTS_FILE }, { TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_1_ALLELIC_COUNTS_FILE, null }, { null, TUMOR_1_ALLELIC_COUNTS_FILE, NORMAL_ALLELIC_COUNTS_FILE }, { TUMOR_1_DENOISED_COPY_RATIOS_FILE, null, null }, { null, TUMOR_1_ALLELIC_COUNTS_FILE, null } }; } @DataProvider(name = "dataInvalidDataModesSingleSample") public Object[][] dataInvalidDataModesSingleSample() { return new Object[][]{ //allele-fraction sites mismatch { TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_1_ALLELIC_COUNTS_FILE, NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE }, { null, TUMOR_1_ALLELIC_COUNTS_FILE, NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE }, //sample names mismatch { TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE, NORMAL_ALLELIC_COUNTS_FILE }, { TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE, null }, //missing case allelic-counts file { TUMOR_1_DENOISED_COPY_RATIOS_FILE, null, NORMAL_ALLELIC_COUNTS_FILE }, //missing case files { null, null, NORMAL_ALLELIC_COUNTS_FILE } }; } @Test(dataProvider = "dataValidDataModesSingleSample") public void testValidDataModesSingleSample(final File denoisedCopyRatiosFile, final File allelicCountsFile, final File normalAllelicCountsFile) { final File outputDir = createTempDir("testDir"); final ArgumentsBuilder argsBuilder = buildArgsBuilderSingleSample( outputDir, denoisedCopyRatiosFile, allelicCountsFile, normalAllelicCountsFile); runCommandLine(argsBuilder); final boolean isAllelicCountsPresent = allelicCountsFile != null; final boolean isNormalAllelicCountsPresent = normalAllelicCountsFile != null; assertOutputFilesSingleSample(outputDir, isAllelicCountsPresent, isNormalAllelicCountsPresent); } @Test(dataProvider = "dataInvalidDataModesSingleSample", expectedExceptions = IllegalArgumentException.class) public void testInvalidDataModesSingleSample(final File denoisedCopyRatiosFile, final File allelicCountsFile, final File normalAllelicCountsFile) { final File outputDir = createTempDir("testDir"); final ArgumentsBuilder argsBuilder = buildArgsBuilderSingleSample( outputDir, denoisedCopyRatiosFile, allelicCountsFile, normalAllelicCountsFile); runCommandLine(argsBuilder); } private static ArgumentsBuilder buildArgsBuilderSingleSample(final File outputDir, final File denoisedCopyRatiosFile, final File allelicCountsFile, final File normalAllelicCountsFile) { final ArgumentsBuilder argsBuilder = new ArgumentsBuilder() .addOutput(outputDir) .add(CopyNumberStandardArgument.OUTPUT_PREFIX_LONG_NAME, OUTPUT_PREFIX); if (denoisedCopyRatiosFile != null) { argsBuilder.add(CopyNumberStandardArgument.DENOISED_COPY_RATIOS_FILE_LONG_NAME, denoisedCopyRatiosFile); } if (allelicCountsFile != null) { argsBuilder.add(CopyNumberStandardArgument.ALLELIC_COUNTS_FILE_LONG_NAME, allelicCountsFile); } if (normalAllelicCountsFile != null) { argsBuilder.add(CopyNumberStandardArgument.NORMAL_ALLELIC_COUNTS_FILE_LONG_NAME, normalAllelicCountsFile); } return argsBuilder; } private static void assertOutputFilesSingleSample(final File outputDir, final boolean isAllelicCountsPresent, final boolean isNormalAllelicCountsPresent) { Assert.assertFalse(!isAllelicCountsPresent && isNormalAllelicCountsPresent); for (final String fileTag : Arrays.asList(ModelSegments.BEGIN_FIT_FILE_TAG, ModelSegments.FINAL_FIT_FILE_TAG)) { final ModeledSegmentCollection modeledSegments = new ModeledSegmentCollection( new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.SEGMENTS_FILE_SUFFIX)); Assert.assertEquals(TUMOR_1_EXPECTED_METADATA, modeledSegments.getMetadata()); final ParameterDecileCollection<CopyRatioParameter> copyRatioParameters = new ParameterDecileCollection<>( new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.COPY_RATIO_MODEL_PARAMETER_FILE_SUFFIX), CopyRatioParameter.class); Assert.assertEquals(TUMOR_1_EXPECTED_METADATA.getSampleName(), copyRatioParameters.getMetadata().getSampleName()); final ParameterDecileCollection<AlleleFractionParameter> alleleFractionParameters = new ParameterDecileCollection<>( new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.ALLELE_FRACTION_MODEL_PARAMETER_FILE_SUFFIX), AlleleFractionParameter.class); Assert.assertEquals(TUMOR_1_EXPECTED_METADATA.getSampleName(), alleleFractionParameters.getMetadata().getSampleName()); } final CopyRatioSegmentCollection copyRatioSegments = new CopyRatioSegmentCollection( new File(outputDir, OUTPUT_PREFIX + ModelSegments.COPY_RATIO_SEGMENTS_FOR_CALLER_FILE_SUFFIX)); Assert.assertEquals(TUMOR_1_EXPECTED_METADATA, copyRatioSegments.getMetadata()); Assert.assertTrue(new File(outputDir, OUTPUT_PREFIX + ModelSegments.COPY_RATIO_LEGACY_SEGMENTS_FILE_SUFFIX).exists()); Assert.assertTrue(new File(outputDir, OUTPUT_PREFIX + ModelSegments.ALLELE_FRACTION_LEGACY_SEGMENTS_FILE_SUFFIX).exists()); AllelicCountCollection hetAllelicCounts = null; if (isAllelicCountsPresent) { hetAllelicCounts = new AllelicCountCollection( new File(outputDir, OUTPUT_PREFIX + ModelSegments.HET_ALLELIC_COUNTS_FILE_SUFFIX)); Assert.assertEquals(TUMOR_1_EXPECTED_METADATA, hetAllelicCounts.getMetadata()); } if (isNormalAllelicCountsPresent) { //if this is true, case sample allelic counts will be present final AllelicCountCollection hetNormalAllelicCounts = new AllelicCountCollection( new File(outputDir, OUTPUT_PREFIX + ModelSegments.NORMAL_HET_ALLELIC_COUNTS_FILE_SUFFIX)); Assert.assertEquals(NORMAL_EXPECTED_METADATA, hetNormalAllelicCounts.getMetadata()); Assert.assertTrue(CopyNumberArgumentValidationUtils.isSameDictionary( //sequence dictionary should be the same TUMOR_1_EXPECTED_METADATA.getSequenceDictionary(), hetNormalAllelicCounts.getMetadata().getSequenceDictionary())); Assert.assertEquals(hetAllelicCounts.getIntervals(), hetNormalAllelicCounts.getIntervals()); } Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.PICARD_INTERVAL_LIST_FILE_SUFFIX).exists()); } @DataProvider(name = "dataValidDataModesMultipleSamples") public Object[][] dataValidDataModesMultipleSamples() { return new Object[][]{ { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null }, { null, Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), null, null }, { null, Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null } }; } @DataProvider(name = "dataInvalidDataModesMultipleSamples") public Object[][] dataInvalidDataModesMultipleSamples() { return new Object[][]{ //copy-ratio intervals mismatch { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_WITH_MISSING_INTERVALS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_WITH_MISSING_INTERVALS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_WITH_MISSING_INTERVALS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), null, null }, //case allele-fraction sites mismatch { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null }, { null, Arrays.asList(TUMOR_1_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null }, //normal allele-fraction sites mismatch { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE }, { null, Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_WITH_MISSING_SITES_FILE }, //sample order mismatch { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_2_ALLELIC_COUNTS_FILE, TUMOR_1_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_2_ALLELIC_COUNTS_FILE, TUMOR_1_ALLELIC_COUNTS_FILE), null }, //sample number mismatch { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Collections.singletonList(TUMOR_1_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), Collections.singletonList(TUMOR_1_ALLELIC_COUNTS_FILE), null }, { Collections.singletonList(TUMOR_1_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), NORMAL_ALLELIC_COUNTS_FILE }, { Collections.singletonList(TUMOR_1_DENOISED_COPY_RATIOS_FILE), Arrays.asList(TUMOR_1_ALLELIC_COUNTS_FILE, TUMOR_2_ALLELIC_COUNTS_FILE), null }, //missing case allelic-counts files { Arrays.asList(TUMOR_1_DENOISED_COPY_RATIOS_FILE, TUMOR_2_DENOISED_COPY_RATIOS_FILE), null, NORMAL_ALLELIC_COUNTS_FILE } }; } @Test(dataProvider = "dataValidDataModesMultipleSamples") public void testValidDataModesMultipleSamples(final List<File> denoisedCopyRatiosFiles, final List<File> allelicCountsFiles, final File normalAllelicCountsFile) { final File outputDir = createTempDir("testDir"); final ArgumentsBuilder argsBuilder = buildArgsBuilderMultipleSamples( outputDir, denoisedCopyRatiosFiles, allelicCountsFiles, normalAllelicCountsFile); runCommandLine(argsBuilder); final boolean isAllelicCountsPresent = allelicCountsFiles != null; final boolean isNormalAllelicCountsPresent = normalAllelicCountsFile != null; assertOutputFilesMultipleSamples(outputDir, isAllelicCountsPresent, isNormalAllelicCountsPresent); } @Test(dataProvider = "dataInvalidDataModesMultipleSamples", expectedExceptions = IllegalArgumentException.class) public void testInvalidDataModesMultipleSamples(final List<File> denoisedCopyRatiosFiles, final List<File> allelicCountsFiles, final File normalAllelicCountsFile) { final File outputDir = createTempDir("testDir"); final ArgumentsBuilder argsBuilder = buildArgsBuilderMultipleSamples( outputDir, denoisedCopyRatiosFiles, allelicCountsFiles, normalAllelicCountsFile); runCommandLine(argsBuilder); } private static ArgumentsBuilder buildArgsBuilderMultipleSamples(final File outputDir, final List<File> denoisedCopyRatiosFiles, final List<File> allelicCountsFiles, final File normalAllelicCountsFile) { final ArgumentsBuilder argsBuilder = new ArgumentsBuilder() .addOutput(outputDir) .add(CopyNumberStandardArgument.OUTPUT_PREFIX_LONG_NAME, OUTPUT_PREFIX); if (denoisedCopyRatiosFiles != null) { denoisedCopyRatiosFiles.forEach(f -> argsBuilder.add(CopyNumberStandardArgument.DENOISED_COPY_RATIOS_FILE_LONG_NAME, f)); } if (allelicCountsFiles != null) { allelicCountsFiles.forEach(f -> argsBuilder.add(CopyNumberStandardArgument.ALLELIC_COUNTS_FILE_LONG_NAME, f)); } if (normalAllelicCountsFile != null) { argsBuilder.add(CopyNumberStandardArgument.NORMAL_ALLELIC_COUNTS_FILE_LONG_NAME, normalAllelicCountsFile); } return argsBuilder; } private static void assertOutputFilesMultipleSamples(final File outputDir, final boolean isAllelicCountsPresent, final boolean isNormalAllelicCountsPresent) { Assert.assertFalse(!isAllelicCountsPresent && isNormalAllelicCountsPresent); for (final String fileTag : Arrays.asList(ModelSegments.BEGIN_FIT_FILE_TAG, ModelSegments.FINAL_FIT_FILE_TAG)) { Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.SEGMENTS_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.COPY_RATIO_MODEL_PARAMETER_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + fileTag + ModelSegments.ALLELE_FRACTION_MODEL_PARAMETER_FILE_SUFFIX).exists()); } Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.COPY_RATIO_SEGMENTS_FOR_CALLER_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.COPY_RATIO_LEGACY_SEGMENTS_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.ALLELE_FRACTION_LEGACY_SEGMENTS_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.HET_ALLELIC_COUNTS_FILE_SUFFIX).exists()); Assert.assertFalse(new File(outputDir, OUTPUT_PREFIX + ModelSegments.NORMAL_HET_ALLELIC_COUNTS_FILE_SUFFIX).exists()); Assert.assertTrue(new File(outputDir, OUTPUT_PREFIX + ModelSegments.PICARD_INTERVAL_LIST_FILE_SUFFIX).exists()); } }
12,810
9,680
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import os import sys import numpy import sklearn.gaussian_process as gp sys.path.insert(1, os.path.join(sys.path[0], '..')) def create_model(samples_x, samples_y_aggregation, n_restarts_optimizer=250, is_white_kernel=False): ''' Trains GP regression model ''' kernel = gp.kernels.ConstantKernel(constant_value=1, constant_value_bounds=(1e-12, 1e12)) * \ gp.kernels.Matern(nu=1.5) if is_white_kernel is True: kernel += gp.kernels.WhiteKernel(noise_level=1, noise_level_bounds=(1e-12, 1e12)) regressor = gp.GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=n_restarts_optimizer, normalize_y=True, alpha=1e-10) regressor.fit(numpy.array(samples_x), numpy.array(samples_y_aggregation)) model = {} model['model'] = regressor model['kernel_prior'] = str(kernel) model['kernel_posterior'] = str(regressor.kernel_) model['model_loglikelihood'] = regressor.log_marginal_likelihood(regressor.kernel_.theta) return model
644
431
/* * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can obtain * a copy of the License at https://jersey.dev.java.net/CDDL+GPL.html * or jersey/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at jersey/legal/LICENSE.txt. * Sun designates this particular file as subject to the "Classpath" exception * as provided by Sun in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the License * Header, with the fields enclosed by brackets [] replaced by your own * identifying information: "Portions Copyrighted [year] * [name of copyright owner]" * * Contributor(s): * * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.sun.jersey.samples.storageservice.resources; import com.sun.jersey.samples.storageservice.Container; import com.sun.jersey.samples.storageservice.Item; import com.sun.jersey.samples.storageservice.MemoryStore; import java.math.BigInteger; import java.net.URI; import java.security.MessageDigest; import java.util.Date; import java.util.GregorianCalendar; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.NotFoundException; import javax.ws.rs.PUT; import javax.ws.rs.core.EntityTag; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Request; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; /** * An item in the container. * * @author <EMAIL> */ public class ItemResource { UriInfo uriInfo; Request request; String container; String item; public ItemResource(UriInfo uriInfo, Request request, String container, String item) { this.uriInfo = uriInfo; this.request = request; this.container = container; this.item = item; } @GET public Response getItem() { System.out.println("GET ITEM " + container + " " + item); Item i = MemoryStore.MS.getItem(container, item); if (i == null) throw new NotFoundException("Item not found"); Date lastModified = i.getLastModified().getTime(); EntityTag et = new EntityTag(i.getDigest()); ResponseBuilder rb = request.evaluatePreconditions(lastModified, et); if (rb != null) return rb.build(); byte[] b = MemoryStore.MS.getItemData(container, item); return Response.ok(b, i.getMimeType()). lastModified(lastModified).tag(et).build(); } @PUT public Response putItem( @Context HttpHeaders headers, byte[] data) { System.out.println("PUT ITEM " + container + " " + item); URI uri = uriInfo.getAbsolutePath(); MediaType mimeType = headers.getMediaType(); GregorianCalendar gc = new GregorianCalendar(); gc.set(GregorianCalendar.MILLISECOND, 0); Item i = new Item(item, uri.toString(), mimeType.toString(), gc); String digest = computeDigest(data); i.setDigest(digest); Response r; if (!MemoryStore.MS.hasItem(container, item)) { r = Response.created(uri).build(); } else { r = Response.noContent().build(); } Item ii = MemoryStore.MS.createOrUpdateItem(container, i, data); if (ii == null) { // Create the container if one has not been created URI containerUri = uriInfo.getAbsolutePathBuilder().path(".."). build().normalize(); Container c = new Container(container, containerUri.toString()); MemoryStore.MS.createContainer(c); i = MemoryStore.MS.createOrUpdateItem(container, i, data); if (i == null) throw new NotFoundException("Container not found"); } return r; } @DELETE public void deleteItem() { System.out.println("DELETE ITEM " + container + " " + item); Item i = MemoryStore.MS.deleteItem(container, item); if (i == null) { throw new NotFoundException("Item not found"); } } private String computeDigest(byte[] content) { try { MessageDigest md = MessageDigest.getInstance("SHA"); byte[] digest = md.digest(content); BigInteger bi = new BigInteger(digest); return bi.toString(16); } catch (Exception e) { return ""; } } }
2,206
807
#!/usr/bin/env trial from absl import flags as gflags import json import mock import sys import urlparse from ct.client import log_client from ct.client import async_log_client from ct.client import log_client_test_util as test_util from ct.client.db import database from twisted.internet import defer from twisted.internet import task from twisted.internet import reactor from twisted.python import failure from twisted.test import proto_helpers from twisted.trial import unittest FLAGS = gflags.FLAGS class ResponseBodyHandlerTest(unittest.TestCase): def test_send(self): finished = defer.Deferred() handler = async_log_client.ResponseBodyHandler(finished) transport = proto_helpers.StringTransportWithDisconnection() handler.makeConnection(transport) transport.protocol = handler handler.dataReceived("test") transport.loseConnection() finished.addCallback(self.assertEqual, "test") return finished def test_send_chunks(self): test_msg = "x"*1024 chunk_size = 100 finished = defer.Deferred() handler = async_log_client.ResponseBodyHandler(finished) transport = proto_helpers.StringTransportWithDisconnection() handler.makeConnection(transport) transport.protocol = handler sent = 0 while sent < len(test_msg): handler.dataReceived(test_msg[sent:sent + chunk_size]) sent += chunk_size transport.loseConnection() finished.addCallback(self.assertEqual, test_msg) return finished def test_buffer_overflow(self): original = FLAGS.response_buffer_size_bytes FLAGS.response_buffer_size_bytes = 10 test_msg = "x"*11 finished = defer.Deferred() handler = async_log_client.ResponseBodyHandler(finished) transport = proto_helpers.StringTransportWithDisconnection() handler.makeConnection(transport) transport.protocol = handler handler.dataReceived(test_msg) transport.loseConnection() # TODO(ekasper): find a more elegant and robust way to save flags. FLAGS.response_buffer_size_bytes = original return self.assertFailure(finished, async_log_client.HTTPResponseSizeExceededError) class AsyncLogClientTest(unittest.TestCase): class FakeHandler(test_util.FakeHandlerBase): # A class that mimics twisted.web.iweb.IResponse. Note: the IResponse # interface is only partially implemented. class FakeResponse(object): def __init__(self, code, reason, json_content=None): self.code = code self.phrase = reason self.headers = AsyncLogClientTest.FakeHandler.FakeHeader() if json_content is not None: self._body = json.dumps(json_content) else: self._body = "" def deliverBody(self, protocol): transport = proto_helpers.StringTransportWithDisconnection() protocol.makeConnection(transport) transport.protocol = protocol protocol.dataReceived(self._body) transport.loseConnection() @classmethod def make_response(cls, code, reason, json_content=None): return cls.FakeResponse(code, reason, json_content=json_content) class FakeHeader(object): def getAllRawHeaders(self): return [] # Twisted doesn't (yet) have an official fake Agent: # https://twistedmatrix.com/trac/ticket/4024 class FakeAgent(object): def __init__(self, responder): self._responder = responder def request(self, method, uri): if method != "GET": return defer.fail(failure.Failure()) # Naive, for testing. path, _, params = uri.partition("?") params = urlparse.parse_qs(params) # Take the first value of each parameter. if any([len(params[key]) != 1 for key in params]): return defer.fail(failure.Failure()) params = {key: params[key][0] for key in params} response = self._responder.get_response(path, params=params) return defer.succeed(response) class FakeDB(object): def scan_entries(self, first, last): raise database.KeyError("boom!") def store_entries(self, entries): self.entries = list(entries) def setUp(self): self.clock = task.Clock() def one_shot_client(self, json_content): """Make a one-shot client and give it a mock response.""" mock_handler = mock.Mock() response = self.FakeHandler.make_response(200, "OK", json_content=json_content) mock_handler.get_response.return_value = response return async_log_client.AsyncLogClient(self.FakeAgent(mock_handler), test_util.DEFAULT_URI, reactor=self.clock) def default_client(self, entries_db=None, reactor_=None): # A client whose responder is configured to answer queries for the # correct uri. if reactor_ is None: reactor_ = self.clock return async_log_client.AsyncLogClient(self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI)), test_util.DEFAULT_URI, entries_db=entries_db, reactor=reactor_) def test_get_sth(self): client = self.default_client() self.assertEqual(test_util.DEFAULT_STH, self.successResultOf(client.get_sth())) def test_get_sth_raises_on_invalid_response(self): json_sth = test_util.sth_to_json(test_util.DEFAULT_STH) json_sth.pop("timestamp") client = self.one_shot_client(json_sth) return self.assertFailure(client.get_sth(), log_client.InvalidResponseError) def test_get_sth_raises_on_invalid_base64(self): json_sth = test_util.sth_to_json(test_util.DEFAULT_STH) json_sth["tree_head_signature"] = "garbagebase64^^^" client = self.one_shot_client(json_sth) return self.assertFailure(client.get_sth(), log_client.InvalidResponseError) class EntryConsumer(object): def __init__(self): self.received = [] self.consumed = defer.Deferred() def done(self, result): self.result = result self.consumed.callback("Done") def consume(self, entries): self.received += entries d = defer.Deferred() d.callback(None) return d # Helper method. def get_entries(self, client, start, end, batch_size=0): producer = client.get_entries(start, end, batch_size=batch_size) consumer = self.EntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) # Ensure the tasks scheduled in the reactor are invoked. # Since start of get entries is delayed, we have to pump to make up for # that delay. If some test is going to force get_entries to do more than # one fetch, then that test has to take care of additional pumping. self.pump_get_entries() return consumer def pump_get_entries(self, delay=None, pumps=1): if not delay: delay = FLAGS.get_entries_retry_delay # Helper method which advances time past get_entries delay for _ in range(0, pumps): self.clock.pump([0, delay]) def test_get_entries(self): client = self.default_client() consumer = self.get_entries(client, 0, 9) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) def test_get_sth_consistency(self): client = self.default_client() self.assertEqual([], self.successResultOf(client.get_sth_consistency(0, 9))) def test_get_entries_raises_on_invalid_response(self): json_entries = test_util.entries_to_json(test_util.make_entries(0, 9)) json_entries["entries"][5]["leaf_input"] = "garbagebase64^^^" client = self.one_shot_client(json_entries) producer = client.get_entries(0, 9) # remove exponential back-off producer._calculate_retry_delay = lambda _: 1 consumer = self.EntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) # pump through retries (with retries there are 2 delays per request and # and initial delay) self.pump_get_entries(1, FLAGS.get_entries_max_retries * 2 + 1) self.assertTrue(consumer.result.check(log_client.InvalidResponseError)) # The entire response should be discarded upon error. self.assertFalse(consumer.received) def test_get_entries_raises_on_too_large_response(self): large_response = test_util.entries_to_json( test_util.make_entries(4, 5)) client = self.one_shot_client(large_response) producer = client.get_entries(4, 4) # remove exponential back-off producer._calculate_retry_delay = lambda _: 1 consumer = self.EntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) # pump through retries (with retries there are 2 delays per request and # initial delay) self.pump_get_entries(1, FLAGS.get_entries_max_retries * 2 + 1) self.assertTrue(consumer.result.check(log_client.InvalidResponseError)) def test_get_entries_succedes_after_retry(self): json_entries = test_util.entries_to_json(test_util.make_entries(0, 9)) json_entries["entries"][5]["leaf_input"] = "garbagebase64^^^" client = self.one_shot_client(json_entries) producer = client.get_entries(0, 9) # remove exponential back-off producer._calculate_retry_delay = lambda _: 1 consumer = self.EntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) # pump retries halfway through (there are actually two delays before # firing requests, so this loop will go only through half of retries) self.pump_get_entries(1, FLAGS.get_entries_max_retries) self.assertFalse(hasattr(consumer, 'result')) json_entries = test_util.entries_to_json(test_util.make_entries(0, 9)) response = self.FakeHandler.make_response(200, "OK", json_content=json_entries) client._handler._agent._responder.get_response.return_value = response self.pump_get_entries(1) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) def test_get_entries_raises_if_query_is_larger_than_tree_size(self): client = async_log_client.AsyncLogClient( self.FakeAgent(self.FakeHandler( test_util.DEFAULT_URI, tree_size=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) # also pump error self.pump_get_entries() self.assertTrue(consumer.result.check(log_client.HTTPClientError)) def test_get_entries_returns_all_in_batches(self): mock_handler = mock.Mock() fake_responder = self.FakeHandler(test_util.DEFAULT_URI) mock_handler.get_response.side_effect = ( fake_responder.get_response) client = async_log_client.AsyncLogClient(self.FakeAgent(mock_handler), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9, batch_size=4) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) self.assertEqual(3, len(mock_handler.get_response.call_args_list)) def test_get_entries_returns_all_for_limiting_server(self): client = async_log_client.AsyncLogClient( self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI, entry_limit=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) # 1 pump in get_entries and 3 more so we fetch everything self.pump_get_entries(pumps=3) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) class PausingConsumer(object): def __init__(self, pause_at): self.received = [] self.pause_at = pause_at self.already_paused = False self.result = None def registerProducer(self, producer): self.producer = producer def done(self, result): self.result = result def consume(self, entries): self.received += entries if (not self.already_paused and len(self.received) >= self.pause_at): self.producer.pauseProducing() self.already_paused = True d = defer.Deferred() d.callback(None) return d def test_get_entries_pause_resume(self): client = self.default_client() producer = client.get_entries(0, 9, batch_size=4) consumer = self.PausingConsumer(4) consumer.registerProducer(producer) d = producer.startProducing(consumer) d.addBoth(consumer.done) # fire all pending callbacks, and then fire request self.pump_get_entries() self.assertTrue(test_util.verify_entries(consumer.received, 0, 3)) self.assertEqual(4, len(consumer.received)) self.assertIsNone(consumer.result) producer.resumeProducing() # pump next 2 batches self.pump_get_entries(pumps=2) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) def test_get_entries_use_stored_entries(self): fake_db = self.FakeDB() # if client tries to fetch entries instead of taking them from db, then # he will get 0 - 9 entries. If he uses db then he will get 10 - 19 fake_db.scan_entries = mock.Mock( return_value=test_util.make_entries(10, 19)) client = self.default_client(entries_db=fake_db, reactor_=reactor) consumer = self.get_entries(client, 0, 9) consumer.consumed.addCallback(lambda _: self.assertEqual(len(consumer.received), 10)) consumer.consumed.addCallback(lambda _: [self.assertEqual(test_util.make_entry(i + 10), consumer.received[i]) for i in range(0, 9)]) def test_get_entries_tries_to_fetch_if_not_available_in_db(self): fake_db = self.FakeDB() fake_db.scan_entries = mock.Mock(return_value=None) client = self.default_client(entries_db=fake_db) consumer = self.get_entries(client, 0, 9) test_util.verify_entries(consumer.received, 0, 9) def test_get_entries_stores_entries(self): fake_db = self.FakeDB() client = self.default_client(entries_db=fake_db, reactor_=reactor) consumer = self.get_entries(client, 0, 9) consumer.consumed.addCallback(lambda _: test_util.verify_entries(consumer.received, 0, 9)) consumer.consumed.addCallback(lambda _: test_util.verify_entries(fake_db.entries, 0, 9)) return consumer.consumed class BadEntryConsumer(EntryConsumer): def consume(self, entries): self.received += entries d = defer.Deferred() d.errback(ValueError("Boom!")) return d def test_get_entries_fires_done_if_consumer_raises(self): client = self.default_client() producer = client.get_entries(0, 9) consumer = self.BadEntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) self.pump_get_entries() self.assertTrue(consumer.result.check(ValueError)) if __name__ == "__main__" or __name__ == "ct.client.async_log_client_test": sys.argv = FLAGS(sys.argv)
7,380
2,643
<reponame>Firm/odyssey #ifndef OD_OPTION_H #define OD_OPTION_H #include <kiwi.h> #include <argp.h> extern void od_usage(od_instance_t *instance, char *path); typedef struct { od_instance_t *instance; int silent; int verbose; int console; int log_stdout; } od_arguments_t; typedef enum { OD_OPT_CONSOLE = 10001, // >= than any utf symbol like -q -l etc OD_OPT_SILENT, OD_OPT_VERBOSE, OD_OPT_LOG_STDOUT, } od_cli_options; static struct argp_option options[] = { { "verbose", OD_OPT_VERBOSE, 0, OPTION_ARG_OPTIONAL, "Log everything", 0 }, { "silent", OD_OPT_SILENT, 0, OPTION_ARG_OPTIONAL, "Do not log anything", 0 }, { "console", OD_OPT_CONSOLE, 0, OPTION_ARG_OPTIONAL, "Do not fork on startup", 0 }, { "log_to_stdout", OD_OPT_LOG_STDOUT, 0, OPTION_ARG_OPTIONAL, "Log to stdout", 0 }, { 0 } }; static inline error_t parse_opt(int key, char *arg, struct argp_state *state) { /* Get the input argument from argp_parse, which we know is a pointer to our arguments structure. */ od_arguments_t *arguments = state->input; od_instance_t *instance = arguments->instance; switch (key) { case 'q': case 's': case OD_OPT_SILENT: arguments->silent = 1; break; case 'v': case OD_OPT_VERBOSE: arguments->verbose = 1; break; case 'h': { od_usage(instance, instance->exec_path); } break; case OD_OPT_CONSOLE: { arguments->console = 1; } break; case OD_OPT_LOG_STDOUT: { arguments->log_stdout = 1; } break; case ARGP_KEY_ARG: { if (state->arg_num >= 1) { /* Too many arguments. */ od_usage(instance, instance->exec_path); return ARGP_KEY_ERROR; } instance->config_file = strdup(arg); } break; case ARGP_KEY_END: if (state->arg_num < 1) { /* Not enough arguments. */ od_usage(instance, instance->exec_path); return ARGP_KEY_ERROR; } break; default: return ARGP_ERR_UNKNOWN; } return 0; } extern od_retcode_t od_apply_validate_cli_args(od_logger_t *logger, od_config_t *conf, od_arguments_t *args, od_rules_t *rules); static inline void od_bind_args(struct argp *argp) { /* Program documentation. */ static char doc[] = "Odyssey - scalable postgresql connection pooler"; /* A description of the arguments we accept. */ static char args_doc[] = "/path/to/odyssey.conf"; memset(argp, 0, sizeof(struct argp)); argp->options = options; argp->parser = parse_opt; argp->args_doc = args_doc; argp->doc = doc; } #endif // OD_OPTION_H
1,054
530
package org.carlspring.strongbox.security; import org.carlspring.strongbox.security.exceptions.NotSupportedException; /** * @author mtodorov */ public interface Group { String getName(); String getDescription(); Group getParent() throws NotSupportedException; }
84
1,146
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.aliyun.oss.integrationtests; import static com.aliyun.oss.integrationtests.TestUtils.genFixedLengthInputStream; import java.io.File; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import junit.framework.Assert; import org.junit.Test; import com.aliyun.oss.model.AppendObjectRequest; import com.aliyun.oss.model.CompleteMultipartUploadRequest; import com.aliyun.oss.model.CopyObjectRequest; import com.aliyun.oss.model.CreateSymlinkRequest; import com.aliyun.oss.model.InitiateMultipartUploadRequest; import com.aliyun.oss.model.InitiateMultipartUploadResult; import com.aliyun.oss.model.ObjectMetadata; import com.aliyun.oss.model.PartETag; import com.aliyun.oss.model.TagSet; import com.aliyun.oss.model.UploadFileRequest; import com.aliyun.oss.model.UploadPartRequest; import com.aliyun.oss.model.UploadPartResult; import com.aliyun.oss.model.SetObjectTaggingRequest; public class ObjectTaggingTest extends TestBase { @Test public void testNormalSetObjectAcl() { String key = "normal-set-tagging-acl"; try { InputStream instream = genFixedLengthInputStream(1024); ossClient.putObject(bucketName, key, instream); Map<String, String> tags = new HashMap<String, String>(1); tags.put("tag1", "balabala"); tags.put("tag2", "haha"); ossClient.setObjectTagging(bucketName, key, tags); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObjectTagging(bucketName, key); tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 0); tagSet = new TagSet(); tagSet.setTag("tag1", "balabala"); SetObjectTaggingRequest request = new SetObjectTaggingRequest(bucketName, key) .withTagSet(tagSet); Assert.assertEquals("balabala", request.getTag("tag1")); ossClient.setObjectTagging(request); tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 1); ossClient.deleteObjectTagging(bucketName, key); tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 0); tagSet = new TagSet(); tagSet.setTag("tag1", "balabala"); tagSet.setTag("tag2", "ala"); //request = new SetObjectTaggingRequest(bucketName, key, tagSet); ossClient.setObjectTagging(bucketName, key, tagSet); tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testNormalPutObject() { String key = "normal-put-object"; try { Map<String, String> tags = new HashMap<String, String>(); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); InputStream instream = genFixedLengthInputStream(1024); ossClient.putObject(bucketName, key, instream, metadata); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testNormalAppendObject() { String key = "normal-append-object"; try { Map<String, String> tags = new HashMap<String, String>(); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); InputStream instream = genFixedLengthInputStream(1024); AppendObjectRequest appendObjectRequest = new AppendObjectRequest(bucketName, key, instream, metadata); appendObjectRequest.setPosition(0L); ossClient.appendObject(appendObjectRequest); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testMutilPartUploadObject() { String key = "normal-mutil-part-upload"; try { Map<String, String> tags = new HashMap<String, String>(1); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, key, metadata); InitiateMultipartUploadResult initResult = ossClient.initiateMultipartUpload(initRequest); String uploadId = initResult.getUploadId(); List<PartETag> partETags = new ArrayList<PartETag>(); InputStream instream = genFixedLengthInputStream(1024); UploadPartRequest request = new UploadPartRequest(bucketName, key, uploadId, 1, instream, 1024); UploadPartResult uploadPartResult = ossClient.uploadPart(request); partETags.add(uploadPartResult.getPartETag()); CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest(bucketName, key, uploadId, partETags); ossClient.completeMultipartUpload(completeRequest); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testNormalCopyObject() { String key = "normal-copy-object"; try { InputStream instream = genFixedLengthInputStream(1024); ossClient.putObject(bucketName, key, instream); CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucketName, key, bucketName, key); Map<String, String> tags = new HashMap<String, String>(); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); copyObjectRequest.setNewObjectMetadata(metadata); ossClient.copyObject(copyObjectRequest); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testNormalSymlinkObject() { String key = "normal-symlink-object"; try { InputStream instream = genFixedLengthInputStream(1024); ossClient.putObject(bucketName, key, instream); Map<String, String> tags = new HashMap<String, String>(); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); CreateSymlinkRequest createSymlinkRequest = new CreateSymlinkRequest(bucketName, key, key); createSymlinkRequest.setMetadata(metadata); ossClient.createSymlink(createSymlinkRequest); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testNormalUploadFile() { final String key = "normal-upload-object"; try { File file = createSampleFile(key, 1024 * 500); UploadFileRequest uploadFileRequest = new UploadFileRequest(bucketName, key); uploadFileRequest.setUploadFile(file.getAbsolutePath()); Map<String, String> tags = new HashMap<String, String>(); tags.put("tag1 ", "balabala +"); tags.put("tag2+", "haha -"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setObjectTagging(tags); uploadFileRequest.setObjectMetadata(metadata); ossClient.uploadFile(uploadFileRequest); TagSet tagSet = ossClient.getObjectTagging(bucketName, key); Assert.assertEquals(tagSet.getAllTags().size(), 2); file.delete(); ossClient.deleteObject(bucketName, key); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } }
4,385
348
{"nom":"Montaigu","circ":"1ère circonscription","dpt":"Aisne","inscrits":502,"abs":282,"votants":220,"blancs":14,"nuls":5,"exp":201,"res":[{"nuance":"REM","nom":"<NAME>","voix":120},{"nuance":"FN","nom":"<NAME>","voix":81}]}
89
3,645
/* * Copyright (c) 2012 <NAME> * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ /** * @file * Raw subtitles decoder */ #include "avcodec.h" #include "ass.h" #include "libavutil/bprint.h" #include "libavutil/opt.h" typedef struct { AVClass *class; const char *linebreaks; int keep_ass_markup; int readorder; } TextContext; #define OFFSET(x) offsetof(TextContext, x) #define SD AV_OPT_FLAG_SUBTITLE_PARAM | AV_OPT_FLAG_DECODING_PARAM static const AVOption options[] = { { "keep_ass_markup", "Set if ASS tags must be escaped", OFFSET(keep_ass_markup), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags=SD }, { NULL } }; static int text_decode_frame(AVCodecContext *avctx, void *data, int *got_sub_ptr, AVPacket *avpkt) { int ret = 0; AVBPrint buf; AVSubtitle *sub = data; const char *ptr = avpkt->data; TextContext *text = avctx->priv_data; av_bprint_init(&buf, 0, AV_BPRINT_SIZE_UNLIMITED); if (ptr && avpkt->size > 0 && *ptr) { ff_ass_bprint_text_event(&buf, ptr, avpkt->size, text->linebreaks, text->keep_ass_markup); ret = ff_ass_add_rect(sub, buf.str, text->readorder++, 0, NULL, NULL); } av_bprint_finalize(&buf, NULL); if (ret < 0) return ret; *got_sub_ptr = sub->num_rects > 0; return avpkt->size; } static void text_flush(AVCodecContext *avctx) { TextContext *text = avctx->priv_data; if (!(avctx->flags2 & AV_CODEC_FLAG2_RO_FLUSH_NOOP)) text->readorder = 0; } #define DECLARE_CLASS(decname) static const AVClass decname ## _decoder_class = { \ .class_name = #decname " decoder", \ .item_name = av_default_item_name, \ .option = decname ## _options, \ .version = LIBAVUTIL_VERSION_INT, \ } #if CONFIG_TEXT_DECODER #define text_options options DECLARE_CLASS(text); AVCodec ff_text_decoder = { .name = "text", .long_name = NULL_IF_CONFIG_SMALL("Raw text subtitle"), .priv_data_size = sizeof(TextContext), .type = AVMEDIA_TYPE_SUBTITLE, .id = AV_CODEC_ID_TEXT, .decode = text_decode_frame, .init = ff_ass_subtitle_header_default, .priv_class = &text_decoder_class, .flush = text_flush, }; #endif #if CONFIG_VPLAYER_DECODER || CONFIG_PJS_DECODER || CONFIG_SUBVIEWER1_DECODER || CONFIG_STL_DECODER static int linebreak_init(AVCodecContext *avctx) { TextContext *text = avctx->priv_data; text->linebreaks = "|"; return ff_ass_subtitle_header_default(avctx); } #if CONFIG_VPLAYER_DECODER #define vplayer_options options DECLARE_CLASS(vplayer); AVCodec ff_vplayer_decoder = { .name = "vplayer", .long_name = NULL_IF_CONFIG_SMALL("VPlayer subtitle"), .priv_data_size = sizeof(TextContext), .type = AVMEDIA_TYPE_SUBTITLE, .id = AV_CODEC_ID_VPLAYER, .decode = text_decode_frame, .init = linebreak_init, .priv_class = &vplayer_decoder_class, .flush = text_flush, }; #endif #if CONFIG_STL_DECODER #define stl_options options DECLARE_CLASS(stl); AVCodec ff_stl_decoder = { .name = "stl", .long_name = NULL_IF_CONFIG_SMALL("Spruce subtitle format"), .priv_data_size = sizeof(TextContext), .type = AVMEDIA_TYPE_SUBTITLE, .id = AV_CODEC_ID_STL, .decode = text_decode_frame, .init = linebreak_init, .priv_class = &stl_decoder_class, .flush = text_flush, }; #endif #if CONFIG_PJS_DECODER #define pjs_options options DECLARE_CLASS(pjs); AVCodec ff_pjs_decoder = { .name = "pjs", .long_name = NULL_IF_CONFIG_SMALL("PJS subtitle"), .priv_data_size = sizeof(TextContext), .type = AVMEDIA_TYPE_SUBTITLE, .id = AV_CODEC_ID_PJS, .decode = text_decode_frame, .init = linebreak_init, .priv_class = &pjs_decoder_class, .flush = text_flush, }; #endif #if CONFIG_SUBVIEWER1_DECODER #define subviewer1_options options DECLARE_CLASS(subviewer1); AVCodec ff_subviewer1_decoder = { .name = "subviewer1", .long_name = NULL_IF_CONFIG_SMALL("SubViewer1 subtitle"), .priv_data_size = sizeof(TextContext), .type = AVMEDIA_TYPE_SUBTITLE, .id = AV_CODEC_ID_SUBVIEWER1, .decode = text_decode_frame, .init = linebreak_init, .priv_class = &subviewer1_decoder_class, .flush = text_flush, }; #endif #endif /* text subtitles with '|' line break */
2,425
515
package br.com.caelum.stella.format; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; public class LeftSideZerosFormatterTest { private Formatter formatter; @Before public void setUp() throws Exception { formatter = new LeftSideZerosFormatter(14); } @Test public void testFormat() { String actual = formatter.format("1234567890"); assertEquals("00001234567890", actual); } @Test public void testUnformat() { String actual = formatter.unformat("000567890"); assertEquals("567890", actual); } @Test public void shouldVerifyIfAValueIsAlreadyFormattedOrNot() throws Exception { assertTrue(formatter.isFormatted("00001234567890")); assertFalse(formatter.isFormatted("00001234")); assertFalse(formatter.isFormatted("1234567890")); assertFalse(formatter.isFormatted("123456789012345")); } @Test public void shouldVerifyIfAValueCanBeFormattedOrNot() throws Exception { assertTrue(formatter.canBeFormatted("00001234567890")); assertTrue(formatter.canBeFormatted("00001234")); assertTrue(formatter.canBeFormatted("1234567890")); assertFalse(formatter.canBeFormatted("123456789012345")); assertFalse(formatter.canBeFormatted("abc123")); } }
495
2,151
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/frame/device_single_window_event_controller.h" #include "third_party/blink/renderer/core/dom/document.h" #include "third_party/blink/renderer/core/dom/events/event.h" #include "third_party/blink/renderer/core/page/page.h" namespace blink { DeviceSingleWindowEventController::DeviceSingleWindowEventController( Document& document) : PlatformEventController(&document), needs_checking_null_events_(true), document_(document) { document.domWindow()->RegisterEventListenerObserver(this); } DeviceSingleWindowEventController::~DeviceSingleWindowEventController() = default; void DeviceSingleWindowEventController::DidUpdateData() { DispatchDeviceEvent(LastEvent()); } void DeviceSingleWindowEventController::DispatchDeviceEvent(Event* event) { if (!GetDocument().domWindow() || GetDocument().IsContextPaused() || GetDocument().IsContextDestroyed()) return; GetDocument().domWindow()->DispatchEvent(event); if (needs_checking_null_events_) { if (IsNullEvent(event)) StopUpdating(); else needs_checking_null_events_ = false; } } void DeviceSingleWindowEventController::DidAddEventListener( LocalDOMWindow* window, const AtomicString& event_type) { if (event_type != EventTypeName()) return; if (GetPage() && GetPage()->IsPageVisible()) StartUpdating(); has_event_listener_ = true; } void DeviceSingleWindowEventController::DidRemoveEventListener( LocalDOMWindow* window, const AtomicString& event_type) { if (event_type != EventTypeName() || window->HasEventListeners(EventTypeName())) return; StopUpdating(); has_event_listener_ = false; } void DeviceSingleWindowEventController::DidRemoveAllEventListeners( LocalDOMWindow*) { StopUpdating(); has_event_listener_ = false; } bool DeviceSingleWindowEventController::IsSameSecurityOriginAsMainFrame() const { if (!GetDocument().GetFrame() || !GetDocument().GetPage()) return false; if (GetDocument().GetFrame()->IsMainFrame()) return true; const SecurityOrigin* main_security_origin = GetDocument() .GetPage() ->MainFrame() ->GetSecurityContext() ->GetSecurityOrigin(); if (main_security_origin && GetDocument().GetSecurityOrigin()->CanAccess(main_security_origin)) return true; return false; } bool DeviceSingleWindowEventController::CheckPolicyFeatures( const Vector<mojom::FeaturePolicyFeature>& features) const { LocalFrame* frame = GetDocument().GetFrame(); if (!frame) return false; return std::all_of(features.begin(), features.end(), [frame](mojom::FeaturePolicyFeature feature) { return frame->IsFeatureEnabled(feature); }); } void DeviceSingleWindowEventController::Trace(blink::Visitor* visitor) { visitor->Trace(document_); PlatformEventController::Trace(visitor); } } // namespace blink
1,212
596
<reponame>zsb534923374/VulkanDemos<gh_stars>100-1000 #pragma once #include "Common/Common.h" #include "Common/Log.h" #include "Application/GenericWindow.h" #include "Application/GenericApplicationMessageHandler.h" #include <Cocoa/Cocoa.h> // ------------------------------ VulkanView ------------------------------ @interface VulkanView : NSView @end // ------------------------------ VulkanWindow ------------------------------ @interface VulkanWindow : NSWindow <NSWindowDelegate, NSDraggingDestination> -(void)SetMessageHandler:(GenericApplicationMessageHandler*)messageHandler; @end // ------------------------------ AppDelegate ------------------------------ @interface AppDelegate : NSObject <NSApplicationDelegate> - (void)setCMDLines:(const std::vector<std::string>&)cmdLines; @end
222
458
// Copyright 2015-2021 Swim Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.math; import swim.codec.Debug; import swim.codec.Format; import swim.codec.Output; import swim.structure.Kind; import swim.structure.Value; import swim.util.Murmur3; public class R3Vector implements Debug { public final double x; public final double y; public final double z; public R3Vector(double x, double y, double z) { this.x = x; this.y = y; this.z = z; } public final R3Vector plus(R3Vector that) { return new R3Vector(this.x + that.x, this.y + that.y, this.z + that.z); } public final R3Vector opposite() { return new R3Vector(-this.x, -this.y, -this.z); } public final R3Vector minus(R3Vector that) { return new R3Vector(this.x - that.x, this.y - that.y, this.z - that.z); } public final R3Vector times(double scalar) { return new R3Vector(this.x * scalar, this.y * scalar, this.z * scalar); } public Z3Vector transform(R3ToZ3Function f) { return new Z3Vector(f.transformX(this.x, this.y, this.z), f.transformY(this.x, this.y, this.z), f.transformZ(this.x, this.y, this.z)); } public Value toValue() { return R3Vector.form().mold(this).toValue(); } protected boolean canEqual(R3Vector that) { return true; } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (other instanceof R3Vector) { final R3Vector that = (R3Vector) other; return that.canEqual(this) && this.x == that.x && this.y == that.y && this.z == that.z; } return false; } private static int hashSeed; @Override public int hashCode() { if (R3Vector.hashSeed == 0) { R3Vector.hashSeed = Murmur3.seed(R3Vector.class); } return Murmur3.mash(Murmur3.mix(Murmur3.mix(Murmur3.mix(R3Vector.hashSeed, Murmur3.hash(this.x)), Murmur3.hash(this.y)), Murmur3.hash(this.z))); } @Override public <T> Output<T> debug(Output<T> output) { output = output.write("R3Vector").write('.').write("of").write('(') .debug(this.x).write(", ").debug(this.y).write(", ").debug(this.z).write(')'); return output; } @Override public String toString() { return Format.debug(this); } private static R3Vector zero; public static R3Vector zero() { if (R3Vector.zero == null) { R3Vector.zero = new R3Vector(0.0, 0.0, 0.0); } return R3Vector.zero; } public static R3Vector of(double x, double y, double z) { return new R3Vector(x, y, z); } private static TensorForm<R3Vector> form; @Kind public static TensorForm<R3Vector> form() { if (R3Vector.form == null) { R3Vector.form = new R3VectorForm(); } return R3Vector.form; } }
1,286
1,264
<filename>examples/basic/window.cpp #include "window.h" Window::Window(int x, int y, const char* title) { GLFWwindow* window = glfwCreateWindow(x, y, title, NULL, NULL); this->window = window; } Window::~Window() { glfwDestroyWindow(this->window); } void Window::Resize() { GLint w, h; glfwGetWindowSize(this->window, &w, &h); glViewport(0, 0, w, h); } int Window::Close() { return glfwWindowShouldClose(this->window); }
174
973
<reponame>bcaglayan/pcap4j<filename>pcap4j-core/src/test/java/org/pcap4j/core/PcapHandleTest.java package org.pcap4j.core; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.pcap4j.core.PcapHandle.PcapDirection; import org.pcap4j.core.PcapNetworkInterface.PromiscuousMode; import org.pcap4j.packet.Packet; import org.pcap4j.packet.UnknownPacket; import org.pcap4j.packet.namednumber.DataLinkType; import org.pcap4j.util.ByteArrays; @SuppressWarnings("javadoc") public class PcapHandleTest { private PcapHandle ph; @BeforeClass public static void setUpBeforeClass() throws Exception {} @AfterClass public static void tearDownAfterClass() throws Exception {} @Before public void setUp() throws Exception { ph = Pcaps.openOffline("src/test/resources/org/pcap4j/core/PcapHandleTest.pcap"); } @After public void tearDown() throws Exception { if (ph != null) { ph.close(); } } @Test public void testGetStats() throws Exception { if (ph != null) { ph.close(); } List<PcapNetworkInterface> nifs = Pcaps.findAllDevs(); if (nifs.isEmpty()) { ph = Pcaps.openDead(DataLinkType.EN10MB, 2048); try { ph.getStats(); fail("getStats on a pcap_open_dead pcap_t should throw a PcapNativeException."); } catch (PcapNativeException e) { assertEquals("Statistics aren't available from a pcap_open_dead pcap_t", e.getMessage()); } } else { try { ph = nifs.get(0).openLive(55555, PromiscuousMode.PROMISCUOUS, 100); PcapStat ps = ph.getStats(); assertNotNull(ps); } catch (PcapNativeException e) { assertTrue( "The exception should complain about permission to capture.", e.getMessage().contains("You don't have permission to capture on that device")); } } } @Test public void testListDatalinks() throws Exception { List<DataLinkType> list = ph.listDatalinks(); assertNotNull(list); assertEquals(1, list.size()); assertEquals(DataLinkType.EN10MB, list.get(0)); } @Test public void testSetDlt() throws Exception { ph.setDlt(ph.getDlt()); } @Test public void testGetTimestamp() throws Exception { assertNull(ph.getTimestamp()); ph.getNextPacket(); assertEquals(1434220771517L, ph.getTimestamp().getTime()); } @Test public void testGetTimestampEx() throws Exception { assertNull(ph.getTimestamp()); ph.getNextPacketEx(); assertEquals(1434220771517L, ph.getTimestamp().getTime()); } @Test public void testGetTimestampRaw() throws Exception { assertNull(ph.getTimestamp()); ph.getNextRawPacket(); assertEquals(1434220771517L, ph.getTimestamp().getTime()); } @Test public void testGetTimestampLoop() throws Exception { assertNull(ph.getTimestamp()); ph.loop( 1, new PacketListener() { @Override public void gotPacket(Packet packet) { assertEquals(1434220771517L, ph.getTimestamp().getTime()); } }); } @Test public void testGetTimestampLoopRaw() throws Exception { assertNull(ph.getTimestamp()); ph.loop( 1, new RawPacketListener() { @Override public void gotPacket(byte[] packet) { assertEquals(1434220771517L, ph.getTimestamp().getTime()); } }); } @Test public void testGetTimestampRawEx() throws Exception { assertNull(ph.getTimestamp()); ph.getNextRawPacketEx(); assertEquals(1434220771517L, ph.getTimestamp().getTime()); } @Test public void testGetOriginalLength() throws Exception { assertNull(ph.getOriginalLength()); Packet packet = ph.getNextPacket(); assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length(), ph.getOriginalLength().intValue()); } @Test public void testGetOriginalLengthEx() throws Exception { assertNull(ph.getOriginalLength()); Packet packet = ph.getNextPacketEx(); assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length(), ph.getOriginalLength().intValue()); } @Test public void testGetOriginalLengthRaw() throws Exception { assertNull(ph.getOriginalLength()); byte[] packet = ph.getNextRawPacket(); assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length, ph.getOriginalLength().intValue()); } @Test public void testGetOriginalLengthRawEx() throws Exception { assertNull(ph.getOriginalLength()); byte[] packet = ph.getNextRawPacketEx(); assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length, ph.getOriginalLength().intValue()); } @Test public void testGetOriginalLengthLoop() throws Exception { assertNull(ph.getOriginalLength()); ph.loop( 1, new PacketListener() { @Override public void gotPacket(Packet packet) { assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length(), ph.getOriginalLength().intValue()); } }); } @Test public void testGetOriginalLengthLoopRaw() throws Exception { assertNull(ph.getOriginalLength()); ph.loop( 1, new RawPacketListener() { @Override public void gotPacket(byte[] packet) { assertEquals(new Integer(74), ph.getOriginalLength()); assertEquals(packet.length, ph.getOriginalLength().intValue()); } }); } @Test public void testSetDirection() throws Exception { try { ph.setDirection(PcapDirection.OUT); fail(); } catch (PcapNativeException e) { assertTrue(e.getMessage().startsWith("Failed to set direction:")); } } // moved these tests to pcap4j-packetfactory-static // to remove the dependency on pcap4j-packetfactory-static from pcap4j-core // @Test // public void testSetDirection() throws Exception { // if (System.getenv("TRAVIS") != null) { // // run only on Travis CI // PcapNetworkInterface nif = Pcaps.getDevByName("any"); // PcapHandle handle = nif.openLive(65536, PromiscuousMode.PROMISCUOUS, 10); // handle.setDirection(PcapDirection.OUT); // handle.setFilter("icmp", BpfCompileMode.OPTIMIZE); // // ProcessBuilder pb = new ProcessBuilder("/bin/ping", "www.google.com"); // Process process = pb.start(); // // final List<Packet> packets = new ArrayList<Packet>(); // handle.loop( // 3, // new PacketListener() { // @Override // public void gotPacket(Packet packet) { // packets.add(packet); // } // }); // handle.close(); // process.destroy(); // // assertEquals(3, packets.size()); // assertTrue(packets.get(0).contains(IcmpV4EchoPacket.class)); // assertTrue(packets.get(1).contains(IcmpV4EchoPacket.class)); // assertTrue(packets.get(2).contains(IcmpV4EchoPacket.class)); // } else { // try { // ph.setDirection(PcapDirection.OUT); // fail(); // } catch (PcapNativeException e) { // assertTrue(e.getMessage().startsWith("Failed to set direction:")); // } // } // } // // @Test // public void testDirection() throws Exception { // if (System.getenv("TRAVIS") != null) { // // run only on Travis CI // PcapHandle handle = // new PcapHandle.Builder("any") // .direction(PcapDirection.IN) // .promiscuousMode(PromiscuousMode.PROMISCUOUS) // .snaplen(65536) // .timeoutMillis(10) // .build(); // handle.setFilter("icmp", BpfCompileMode.OPTIMIZE); // // ProcessBuilder pb = new ProcessBuilder("/bin/ping", "www.google.com"); // Process process = pb.start(); // // final List<Packet> packets = new ArrayList<Packet>(); // handle.loop( // 3, // new PacketListener() { // @Override // public void gotPacket(Packet packet) { // packets.add(packet); // } // }); // handle.close(); // process.destroy(); // // assertEquals(3, packets.size()); // assertTrue(packets.get(0).contains(IcmpV4EchoReplyPacket.class)); // assertTrue(packets.get(1).contains(IcmpV4EchoReplyPacket.class)); // assertTrue(packets.get(2).contains(IcmpV4EchoReplyPacket.class)); // } // } // // @Test // public void testSetFilterIcmp() throws Exception { // PcapHandle handle = null; // try { // handle = Pcaps.openOffline("src/test/resources/org/pcap4j/core/udp_tcp_icmp.pcap"); // handle.setFilter("icmp", BpfCompileMode.OPTIMIZE); // int count = 0; // try { // while (true) { // Packet p = handle.getNextPacketEx(); // assertNotNull(p.get(IcmpV4CommonPacket.class)); // count++; // } // } catch (EOFException e) { // } // assertEquals(1, count); // } finally { // if (handle != null) { // handle.close(); // } // } // } // // @Test // public void testSetFilterUdp() throws Exception { // PcapHandle handle = null; // BpfProgram prog = null; // try { // handle = Pcaps.openOffline("src/test/resources/org/pcap4j/core/udp_tcp_icmp.pcap"); // prog = handle.compileFilter("udp", BpfCompileMode.OPTIMIZE, // PcapHandle.PCAP_NETMASK_UNKNOWN); // handle.setFilter(prog); // int count = 0; // try { // while (true) { // Packet p = handle.getNextPacketEx(); // assertNotNull(p.get(UdpPacket.class)); // count++; // } // } catch (EOFException e) { // } // assertEquals(1, count); // } finally { // if (handle != null) { // handle.close(); // } // if (prog != null) { // prog.free(); // } // } // } @Test public void testSendPacket() throws Exception { if (System.getenv("TRAVIS") != null) { // run only on Travis CI PcapNetworkInterface nif = Pcaps.getDevByName("lo"); final PcapHandle handle = nif.openLive(65536, PromiscuousMode.PROMISCUOUS, 10); byte[] sendingRawPacket = new byte[50]; sendingRawPacket[0] = 1; sendingRawPacket[1] = 2; sendingRawPacket[2] = 3; sendingRawPacket[3] = 4; sendingRawPacket[4] = 5; Packet sendingPacket = UnknownPacket.newPacket(sendingRawPacket, 0, sendingRawPacket.length); ExecutorService pool = Executors.newSingleThreadExecutor(); final byte[] result = new byte[sendingRawPacket.length]; final FutureTask<byte[]> future = new FutureTask<byte[]>( new Runnable() { @Override public void run() {} }, result); pool.execute( new Runnable() { @Override public void run() { try { handle.loop( -1, new RawPacketListener() { @Override public void gotPacket(byte[] p) { if (p[0] == 1 && p[1] == 2 && p[2] == 3 && p[3] == 4 && p[4] == 5) { assertEquals(result.length, p.length); System.arraycopy(p, 0, result, 0, result.length); future.run(); } } }); } catch (PcapNativeException e) { } catch (InterruptedException e) { } catch (NotOpenException e) { } } }); Thread.sleep(1000); handle.sendPacket(sendingPacket); future.get(5, TimeUnit.SECONDS); handle.breakLoop(); handle.close(); assertArrayEquals(sendingRawPacket, result); } } @Test public void testSendPacketWithLen() throws Exception { if (System.getenv("TRAVIS") != null) { // run only on Travis CI PcapNetworkInterface nif = Pcaps.getDevByName("lo"); final PcapHandle handle = nif.openLive(65536, PromiscuousMode.PROMISCUOUS, 10); byte[] sendingRawPacket = new byte[100]; sendingRawPacket[0] = 1; sendingRawPacket[1] = 2; sendingRawPacket[2] = 3; sendingRawPacket[3] = 4; sendingRawPacket[4] = 5; ExecutorService pool = Executors.newSingleThreadExecutor(); final byte[] result = new byte[50]; final FutureTask<byte[]> future = new FutureTask<byte[]>( new Runnable() { @Override public void run() {} }, result); pool.execute( new Runnable() { @Override public void run() { try { handle.loop( -1, new RawPacketListener() { @Override public void gotPacket(byte[] p) { if (p[0] == 1 && p[1] == 2 && p[2] == 3 && p[3] == 4 && p[4] == 5) { assertEquals(result.length, p.length); System.arraycopy(p, 0, result, 0, result.length); future.run(); } } }); } catch (PcapNativeException e) { } catch (InterruptedException e) { } catch (NotOpenException e) { } } }); Thread.sleep(1000); handle.sendPacket(sendingRawPacket, result.length); future.get(5, TimeUnit.SECONDS); handle.breakLoop(); handle.close(); assertArrayEquals(ByteArrays.getSubArray(sendingRawPacket, 0, result.length), result); } } @Test public void testImmediateMode() throws Exception { if (System.getenv("TRAVIS") != null) { // run only on Travis CI PcapHandle handle = new PcapHandle.Builder("any") .immediateMode(true) .promiscuousMode(PromiscuousMode.PROMISCUOUS) .snaplen(65536) .timeoutMillis(Integer.MAX_VALUE) .build(); ProcessBuilder pb = new ProcessBuilder("/bin/ping", "www.google.com"); Process process = pb.start(); handle.loop( 3, new PacketListener() { @Override public void gotPacket(Packet packet) { // Do nothing. } }); handle.close(); process.destroy(); } } }
6,819
28,056
// Copyright 2018 <NAME> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.alibaba.fastjson.util; import java.math.BigInteger; /** * An implementation of Ryu for double. */ public final class RyuDouble { private static final int[][] POW5_SPLIT = new int[326][4]; private static final int[][] POW5_INV_SPLIT = new int[291][4]; static { BigInteger mask = BigInteger.ONE.shiftLeft(31).subtract(BigInteger.ONE); BigInteger invMask = BigInteger.ONE.shiftLeft(31).subtract(BigInteger.ONE); for (int i = 0; i < 326; i++) { BigInteger pow = BigInteger.valueOf(5).pow(i); int pow5len = pow.bitLength(); int expectedPow5Bits = i == 0 ? 1 : (int) ((i * 23219280L + 10000000L - 1) / 10000000L); if (expectedPow5Bits != pow5len) { throw new IllegalStateException(pow5len + " != " + expectedPow5Bits); } if (i < POW5_SPLIT.length) { for (int j = 0; j < 4; j++) { POW5_SPLIT[i][j] = pow .shiftRight(pow5len - 121 + (3 - j) * 31) .and(mask) .intValue(); } } if (i < POW5_INV_SPLIT.length) { // We want floor(log_2 5^q) here, which is pow5len - 1. int j = pow5len + 121; BigInteger inv = BigInteger.ONE .shiftLeft(j) .divide(pow) .add(BigInteger.ONE); for (int k = 0; k < 4; k++) { if (k == 0) { POW5_INV_SPLIT[i][k] = inv .shiftRight((3 - k) * 31) .intValue(); } else { POW5_INV_SPLIT[i][k] = inv .shiftRight((3 - k) * 31) .and(invMask) .intValue(); } } } } } public static String toString(double value) { char[] result = new char[24]; int len = toString(value, result, 0); return new String(result, 0, len); } public static int toString(double value, char[] result, int off) { final long DOUBLE_MANTISSA_MASK = 4503599627370495L; // (1L << 52) - 1; final int DOUBLE_EXPONENT_MASK = 2047; // (1 << 11) - 1; final int DOUBLE_EXPONENT_BIAS = 1023; // (1 << (11 - 1)) - 1; final long LOG10_5_NUMERATOR = 6989700L; // (long) (10000000L * Math.log10(5)); final long LOG10_2_NUMERATOR = 3010299L; // (long) (10000000L * Math.log10(2)); // Step 1: Decode the floating point number, and unify normalized and subnormal cases. // First, handle all the trivial cases. int index = off; if (Double.isNaN(value)) { result[index++] = 'N'; result[index++] = 'a'; result[index++] = 'N'; return index - off; } if (value == Double.POSITIVE_INFINITY) { result[index++] = 'I'; result[index++] = 'n'; result[index++] = 'f'; result[index++] = 'i'; result[index++] = 'n'; result[index++] = 'i'; result[index++] = 't'; result[index++] = 'y'; return index - off; } if (value == Double.NEGATIVE_INFINITY) { result[index++] = '-'; result[index++] = 'I'; result[index++] = 'n'; result[index++] = 'f'; result[index++] = 'i'; result[index++] = 'n'; result[index++] = 'i'; result[index++] = 't'; result[index++] = 'y'; return index - off; } long bits = Double.doubleToLongBits(value); if (bits == 0) { result[index++] = '0'; result[index++] = '.'; result[index++] = '0'; return index - off; } if (bits == 0x8000000000000000L) { result[index++] = '-'; result[index++] = '0'; result[index++] = '.'; result[index++] = '0'; return index - off; } final int DOUBLE_MANTISSA_BITS = 52; // Otherwise extract the mantissa and exponent bits and run the full algorithm. int ieeeExponent = (int) ((bits >>> DOUBLE_MANTISSA_BITS) & DOUBLE_EXPONENT_MASK); long ieeeMantissa = bits & DOUBLE_MANTISSA_MASK; int e2; long m2; if (ieeeExponent == 0) { // Denormal number - no implicit leading 1, and the exponent is 1, not 0. e2 = 1 - DOUBLE_EXPONENT_BIAS - DOUBLE_MANTISSA_BITS; m2 = ieeeMantissa; } else { // Add implicit leading 1. e2 = ieeeExponent - DOUBLE_EXPONENT_BIAS - DOUBLE_MANTISSA_BITS; m2 = ieeeMantissa | (1L << DOUBLE_MANTISSA_BITS); } boolean sign = bits < 0; // Step 2: Determine the interval of legal decimal representations. boolean even = (m2 & 1) == 0; final long mv = 4 * m2; final long mp = 4 * m2 + 2; final int mmShift = ((m2 != (1L << DOUBLE_MANTISSA_BITS)) || (ieeeExponent <= 1)) ? 1 : 0; final long mm = 4 * m2 - 1 - mmShift; e2 -= 2; // Step 3: Convert to a decimal power base using 128-bit arithmetic. // -1077 = 1 - 1023 - 53 - 2 <= e_2 - 2 <= 2046 - 1023 - 53 - 2 = 968 long dv, dp, dm; final int e10; boolean dmIsTrailingZeros = false, dvIsTrailingZeros = false; if (e2 >= 0) { final int q = Math.max(0, (int) (e2 * LOG10_2_NUMERATOR / 10000000L) - 1); // k = constant + floor(log_2(5^q)) // q == 0 ? 1 : (int) ((q * 23219280L + 10000000L - 1) / 10000000L) final int k = 122 + (q == 0 ? 1 : (int) ((q * 23219280L + 10000000L - 1) / 10000000L)) - 1; final int i = -e2 + q + k; int actualShift = i - 3 * 31 - 21; if (actualShift < 0) { throw new IllegalArgumentException("" + actualShift); } final int[] ints = POW5_INV_SPLIT[q]; { long mHigh = mv >>> 31; long mLow = mv & 0x7fffffff; long bits13 = mHigh * ints[0]; long bits03 = mLow * ints[0]; long bits12 = mHigh * ints[1]; long bits02 = mLow * ints[1]; long bits11 = mHigh * ints[2]; long bits01 = mLow * ints[2]; long bits10 = mHigh * ints[3]; long bits00 = mLow * ints[3]; dv = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } { long mHigh = mp >>> 31; long mLow = mp & 0x7fffffff; long bits13 = mHigh * ints[0]; long bits03 = mLow * ints[0]; long bits12 = mHigh * ints[1]; long bits02 = mLow * ints[1]; long bits11 = mHigh * ints[2]; long bits01 = mLow * ints[2]; long bits10 = mHigh * ints[3]; long bits00 = mLow * ints[3]; dp = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } { long mHigh = mm >>> 31; long mLow = mm & 0x7fffffff; long bits13 = mHigh * ints[0]; long bits03 = mLow * ints[0]; long bits12 = mHigh * ints[1]; long bits02 = mLow * ints[1]; long bits11 = mHigh * ints[2]; long bits01 = mLow * ints[2]; long bits10 = mHigh * ints[3]; long bits00 = mLow * ints[3]; dm = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } e10 = q; if (q <= 21) { if (mv % 5 == 0) { int pow5Factor_mv; { long v = mv; if ((v % 5) != 0) { pow5Factor_mv = 0; } else if ((v % 25) != 0) { pow5Factor_mv = 1; } else if ((v % 125) != 0) { pow5Factor_mv = 2; } else if ((v % 625) != 0) { pow5Factor_mv = 3; } else { pow5Factor_mv = 4; v /= 625; while (v > 0) { if (v % 5 != 0) { break; } v /= 5; pow5Factor_mv++; } } } dvIsTrailingZeros = pow5Factor_mv >= q; } else if (even) { int pow5Factor_mm; { long v = mm; if ((v % 5) != 0) { pow5Factor_mm = 0; } else if ((v % 25) != 0) { pow5Factor_mm = 1; } else if ((v % 125) != 0) { pow5Factor_mm = 2; } else if ((v % 625) != 0) { pow5Factor_mm = 3; } else { pow5Factor_mm = 4; v /= 625; while (v > 0) { if (v % 5 != 0) { break; } v /= 5; pow5Factor_mm++; } } } dmIsTrailingZeros = pow5Factor_mm >= q; // } else { int pow5Factor_mp; { long v = mp; if ((v % 5) != 0) { pow5Factor_mp = 0; } else if ((v % 25) != 0) { pow5Factor_mp = 1; } else if ((v % 125) != 0) { pow5Factor_mp = 2; } else if ((v % 625) != 0) { pow5Factor_mp = 3; } else { pow5Factor_mp = 4; v /= 625; while (v > 0) { if (v % 5 != 0) { break; } v /= 5; pow5Factor_mp++; } } } if (pow5Factor_mp >= q) { dp--; } } } } else { final int q = Math.max(0, (int) (-e2 * LOG10_5_NUMERATOR / 10000000L) - 1); final int i = -e2 - q; final int k = (i == 0 ? 1 : (int) ((i * 23219280L + 10000000L - 1) / 10000000L)) - 121; final int j = q - k; int actualShift = j - 3 * 31 - 21; if (actualShift < 0) { throw new IllegalArgumentException("" + actualShift); } int[] ints = POW5_SPLIT[i]; { long mHigh = mv >>> 31; long mLow = mv & 0x7fffffff; long bits13 = mHigh * ints[0]; // 124 long bits03 = mLow * ints[0]; // 93 long bits12 = mHigh * ints[1]; // 93 long bits02 = mLow * ints[1]; // 62 long bits11 = mHigh * ints[2]; // 62 long bits01 = mLow * ints[2]; // 31 long bits10 = mHigh * ints[3]; // 31 long bits00 = mLow * ints[3]; // 0 dv = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } { long mHigh = mp >>> 31; long mLow = mp & 0x7fffffff; long bits13 = mHigh * ints[0]; // 124 long bits03 = mLow * ints[0]; // 93 long bits12 = mHigh * ints[1]; // 93 long bits02 = mLow * ints[1]; // 62 long bits11 = mHigh * ints[2]; // 62 long bits01 = mLow * ints[2]; // 31 long bits10 = mHigh * ints[3]; // 31 long bits00 = mLow * ints[3]; // 0 dp = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } { long mHigh = mm >>> 31; long mLow = mm & 0x7fffffff; long bits13 = mHigh * ints[0]; // 124 long bits03 = mLow * ints[0]; // 93 long bits12 = mHigh * ints[1]; // 93 long bits02 = mLow * ints[1]; // 62 long bits11 = mHigh * ints[2]; // 62 long bits01 = mLow * ints[2]; // 31 long bits10 = mHigh * ints[3]; // 31 long bits00 = mLow * ints[3]; // 0 dm = (((((( ((bits00 >>> 31) + bits01 + bits10) >>> 31) + bits02 + bits11) >>> 31) + bits03 + bits12) >>> 21) + (bits13 << 10)) >>> actualShift; } e10 = q + e2; if (q <= 1) { dvIsTrailingZeros = true; if (even) { dmIsTrailingZeros = mmShift == 1; } else { dp--; } } else if (q < 63) { dvIsTrailingZeros = (mv & ((1L << (q - 1)) - 1)) == 0; } } // Step 4: Find the shortest decimal representation in the interval of legal representations. // // We do some extra work here in order to follow Float/Double.toString semantics. In particular, // that requires printing in scientific format if and only if the exponent is between -3 and 7, // and it requires printing at least two decimal digits. // // Above, we moved the decimal dot all the way to the right, so now we need to count digits to // figure out the correct exponent for scientific notation. final int vplength; // = decimalLength(dp); if (dp >= 1000000000000000000L) { vplength= 19; } else if (dp >= 100000000000000000L) { vplength= 18; } else if (dp >= 10000000000000000L) { vplength = 17; } else if (dp >= 1000000000000000L) { vplength = 16; } else if (dp >= 100000000000000L) { vplength = 15; } else if (dp >= 10000000000000L) { vplength = 14; } else if (dp >= 1000000000000L) { vplength = 13; } else if (dp >= 100000000000L) { vplength = 12; } else if (dp >= 10000000000L) { vplength = 11; } else if (dp >= 1000000000L) { vplength = 10; } else if (dp >= 100000000L) { vplength = 9; } else if (dp >= 10000000L) { vplength = 8; } else if (dp >= 1000000L) { vplength = 7; } else if (dp >= 100000L) { vplength = 6; } else if (dp >= 10000L) { vplength = 5; } else if (dp >= 1000L) { vplength = 4; } else if (dp >= 100L) { vplength = 3; } else if (dp >= 10L) { vplength = 2; } else { vplength = 1; } int exp = e10 + vplength - 1; // Double.toString semantics requires using scientific notation if and only if outside this range. boolean scientificNotation = !((exp >= -3) && (exp < 7)); int removed = 0; int lastRemovedDigit = 0; long output; if (dmIsTrailingZeros || dvIsTrailingZeros) { while (dp / 10 > dm / 10) { if ((dp < 100) && scientificNotation) { // Double.toString semantics requires printing at least two digits. break; } dmIsTrailingZeros &= dm % 10 == 0; dvIsTrailingZeros &= lastRemovedDigit == 0; lastRemovedDigit = (int) (dv % 10); dp /= 10; dv /= 10; dm /= 10; removed++; } if (dmIsTrailingZeros && even) { while (dm % 10 == 0) { if ((dp < 100) && scientificNotation) { // Double.toString semantics requires printing at least two digits. break; } dvIsTrailingZeros &= lastRemovedDigit == 0; lastRemovedDigit = (int) (dv % 10); dp /= 10; dv /= 10; dm /= 10; removed++; } } if (dvIsTrailingZeros && (lastRemovedDigit == 5) && (dv % 2 == 0)) { // Round even if the exact numbers is .....50..0. lastRemovedDigit = 4; } output = dv + ((dv == dm && !(dmIsTrailingZeros && even)) || (lastRemovedDigit >= 5) ? 1 : 0); } else { while (dp / 10 > dm / 10) { if ((dp < 100) && scientificNotation) { // Double.toString semantics requires printing at least two digits. break; } lastRemovedDigit = (int) (dv % 10); dp /= 10; dv /= 10; dm /= 10; removed++; } output = dv + ((dv == dm || (lastRemovedDigit >= 5)) ? 1 : 0); } int olength = vplength - removed; // Step 5: Print the decimal representation. // We follow Double.toString semantics here. if (sign) { result[index++] = '-'; } // Values in the interval [1E-3, 1E7) are special. if (scientificNotation) { // Print in the format x.xxxxxE-yy. for (int i = 0; i < olength - 1; i++) { int c = (int) (output % 10); output /= 10; result[index + olength - i] = (char) ('0' + c); } result[index] = (char) ('0' + output % 10); result[index + 1] = '.'; index += olength + 1; if (olength == 1) { result[index++] = '0'; } // Print 'E', the exponent sign, and the exponent, which has at most three digits. result[index++] = 'E'; if (exp < 0) { result[index++] = '-'; exp = -exp; } if (exp >= 100) { result[index++] = (char) ('0' + exp / 100); exp %= 100; result[index++] = (char) ('0' + exp / 10); } else if (exp >= 10) { result[index++] = (char) ('0' + exp / 10); } result[index++] = (char) ('0' + exp % 10); return index - off; } else { // Otherwise follow the Java spec for values in the interval [1E-3, 1E7). if (exp < 0) { // Decimal dot is before any of the digits. result[index++] = '0'; result[index++] = '.'; for (int i = -1; i > exp; i--) { result[index++] = '0'; } int current = index; for (int i = 0; i < olength; i++) { result[current + olength - i - 1] = (char) ('0' + output % 10); output /= 10; index++; } } else if (exp + 1 >= olength) { // Decimal dot is after any of the digits. for (int i = 0; i < olength; i++) { result[index + olength - i - 1] = (char) ('0' + output % 10); output /= 10; } index += olength; for (int i = olength; i < exp + 1; i++) { result[index++] = '0'; } result[index++] = '.'; result[index++] = '0'; } else { // Decimal dot is somewhere between the digits. int current = index + 1; for (int i = 0; i < olength; i++) { if (olength - i - 1 == exp) { result[current + olength - i - 1] = '.'; current--; } result[current + olength - i - 1] = (char) ('0' + output % 10); output /= 10; } index += olength + 1; } return index - off; } } }
13,508
884
/* * Copyright 2014 - 2021 Blazebit. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blazebit.persistence.testsuite.entity; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.OneToMany; import javax.persistence.Table; @Entity @Table(name = "emb_tst_ent_cont") public class EmbeddableTestEntityContainer implements Serializable { private static final long serialVersionUID = 1L; private Long id; private Set<EmbeddableTestEntity> embeddableTestEntities = new HashSet<EmbeddableTestEntity>(); @Id @Column(name = "id") public Long getId() { return id; } public void setId(Long id) { this.id = id; } @OneToMany(fetch = FetchType.LAZY) @JoinTable(name = "emb_tst_ent_cont_entities", joinColumns = @JoinColumn(name = "tst_ent_cont_id", referencedColumnName = "id"), inverseJoinColumns = { @JoinColumn(name = "tst_ent_key", referencedColumnName = "test_key"), @JoinColumn(name = "tst_ent_value", referencedColumnName = "test_value") } ) public Set<EmbeddableTestEntity> getEmbeddableTestEntities() { return embeddableTestEntities; } public void setEmbeddableTestEntities(Set<EmbeddableTestEntity> embeddableTestEntities) { this.embeddableTestEntities = embeddableTestEntities; } }
769
2,180
package com.xiaojukeji.kafka.manager.monitor.component.n9e.entry; import java.util.List; /** * @author zengqiao * @date 20/10/18 */ public class N9eStrategyExpression { private String metric; private String func; private String eopt; private Integer threshold; private List<Integer> params; public String getMetric() { return metric; } public void setMetric(String metric) { this.metric = metric; } public String getFunc() { return func; } public void setFunc(String func) { this.func = func; } public String getEopt() { return eopt; } public void setEopt(String eopt) { this.eopt = eopt; } public Integer getThreshold() { return threshold; } public void setThreshold(Integer threshold) { this.threshold = threshold; } public List<Integer> getParams() { return params; } public void setParams(List<Integer> params) { this.params = params; } @Override public String toString() { return "N9eStrategyExpression{" + "metric='" + metric + '\'' + ", func='" + func + '\'' + ", eopt='" + eopt + '\'' + ", threshold=" + threshold + ", params=" + params + '}'; } }
611
1,132
#include "gb-include.h" #include "DataFeed.h" DataFeed::DataFeed() : MetaContainer() { m_customerId = -1; m_id = -1; m_passcodeLen = 0; m_passcode[0] = '\0'; m_isLocked = false; } DataFeed::~DataFeed() { } void DataFeed::setUrl ( char *name, int32_t nameLen ) { if (!name || nameLen == 0) return; if (nameLen < 11 || strncasecmp(name, "datafeed://", 11) != 0) { char tempUrl[MAX_USERNAMELEN+1]; setstr(tempUrl, MAX_USERNAMELEN-12, name, nameLen); m_urlLen = sprintf(m_url, "datafeed://%s/", tempUrl); } else m_urlLen = setstr(m_url, MAX_USERNAMELEN, name, nameLen); // base name int32_t i; for (i = 0; m_url[i+11] != '/'; i++) m_baseName[i] = m_url[i+11]; m_baseName[i] = '\0'; m_baseNameLen = i; } void DataFeed::set ( int32_t creationTime, char *dataFeedUrl, int32_t dataFeedUrlLen, char *passcode, int32_t passcodeLen, bool isActive, bool isLocked ) { setUrl(dataFeedUrl, dataFeedUrlLen); m_passcodeLen = setstr(m_passcode, MAX_PASSCODELEN, passcode, passcodeLen); // flags m_isActive = isActive; m_isLocked = isLocked; // creation time m_creationTime = creationTime; } void DataFeed::parse ( char *dataFeedPage, int32_t dataFeedPageLen ) { // use Xml Class to parse up the page Xml xml; xml.set ( csUTF8, dataFeedPage, dataFeedPageLen, false, 0, false, TITLEREC_CURRENT_VERSION ); // get the nodes int32_t numNodes = xml.getNumNodes(); XmlNode *nodes = xml.getNodes(); // to count the tiers, result levels, and level costs int32_t currTier = 0; int32_t currResultLevel = 0; int32_t currLevelCost = 0; // pull out the keywords for the data feed for (int32_t i = 0; i < numNodes; i++) { // skip if this isn't a meta tag, shouldn't happen if (nodes[i].m_nodeId != 68) continue; // get the meta tag name //int32_t tagLen; //char *tag = xml.getString(i, "name", &tagLen); int32_t ucTagLen; char *ucTag = xml.getString(i, "name", &ucTagLen); char tag[256]; int32_t tagLen = utf16ToLatin1 ( tag, 256, (UChar*)ucTag, ucTagLen>>1 ); // skip if empty if (!tag || tagLen <= 0) continue; // get the content int32_t ucConLen; char *ucCon = xml.getString(i, "content", &ucConLen); char con[1024]; int32_t conLen = utf16ToLatin1 ( con, 1024, (UChar*)ucCon, ucConLen>>1 ); if (!con || conLen <= 0) continue; // match the meta tag to its local var and copy content if (tagLen == 10 && strncasecmp(tag, "customerid", 10) == 0) m_customerId = atoll(con); else if (tagLen == 11 && strncasecmp(tag, "datafeedurl", 11) == 0) setUrl(con, conLen); else if (tagLen == 8 && strncasecmp(tag, "passcode", 8) == 0) m_passcodeLen = setstr(m_passcode, MAX_PASSCODELEN, con, conLen); else if (tagLen == 6 && strncasecmp(tag, "status", 6) == 0) m_isActive = (bool)atoi(con); else if (tagLen == 6 && strncasecmp(tag, "locked", 6) == 0) m_isLocked = (bool)atoi(con); else if (tagLen == 14 && strncasecmp(tag, "dfcreationtime", 14) == 0) m_creationTime = atol(con); else if (tagLen == 8 && strncasecmp(tag, "numtiers", 8) == 0) m_priceTable.m_numTiers = atol(con); else if (tagLen == 15 && strncasecmp(tag, "numresultlevels", 15) == 0) m_priceTable.m_numResultLevels = atol(con); else if (tagLen == 10 && strncasecmp(tag, "monthlyfee", 10) == 0) m_priceTable.m_monthlyFee = atol(con); else if (tagLen == 7 && strncasecmp(tag, "tiermax", 7) == 0) { m_priceTable.m_tierMax[currTier] = (uint32_t)atol(con); currTier++; } else if (tagLen == 11 && strncasecmp(tag, "resultlevel", 11) == 0) { m_priceTable.m_resultLevels[currResultLevel] = (uint32_t)atol(con); currResultLevel++; } else if (tagLen == 9 && strncasecmp(tag, "levelcost", 9) == 0) { m_priceTable.m_levelCosts[currLevelCost] = (uint32_t)atol(con); currLevelCost++; } else log(LOG_INFO, "datafeed: Invalid Meta Tag Parsed [%"INT32"]:" " %s", tagLen, tag); } } int32_t DataFeed::buildPage ( char *page ) { // fill the page buffer with the data feed page char *p = page; p += sprintf(p, "<meta name=customerid content=\"%"INT64"\">\n" "<meta name=datafeedurl content=\"%s\">\n" "<meta name=passcode content=\"%s\">\n" "<meta name=status content=\"%d\">\n" "<meta name=locked content=\"%d\">\n" "<meta name=dfcreationtime content=\"%"INT32"\">\n", m_customerId, m_url, m_passcode, m_isActive, m_isLocked, m_creationTime ); // write the pricetable p += sprintf(p, "<meta name=numtiers content=\"%"INT32"\">\n" "<meta name=numresultlevels content=\"%"INT32"\">\n" "<meta name=monthlyfee content=\"%"INT32"\">\n", m_priceTable.m_numTiers, m_priceTable.m_numResultLevels, m_priceTable.m_monthlyFee ); // write the tiers for (int32_t i = 0; i < m_priceTable.m_numTiers; i++) p += sprintf(p, "<meta name=tiermax content=\"%"UINT32"\">\n", m_priceTable.m_tierMax[i] ); // write the result levels for (int32_t i = 0; i < m_priceTable.m_numResultLevels; i++) p += sprintf(p, "<meta name=resultlevel content=\"%"UINT32"\">\n", m_priceTable.m_resultLevels[i] ); // write the costs int32_t numCosts = m_priceTable.m_numTiers * m_priceTable.m_numResultLevels * 2; for (int32_t i = 0; i < numCosts; i++) p += sprintf(p, "<meta name=levelcost content=\"%"UINT32"\">\n", m_priceTable.m_levelCosts[i] ); // return the length return (p - page); } void DataFeed::buildPage ( SafeBuf *sb ) { sb->safePrintf("<meta name=customerid content=\"%"INT64"\">\n" "<meta name=datafeedurl content=\"%s\">\n" "<meta name=passcode content=\"%s\">\n" "<meta name=status content=\"%d\">\n" "<meta name=locked content=\"%d\">\n" "<meta name=dfcreationtime content=\"%"INT32"\">\n", m_customerId, m_url, m_passcode, m_isActive, m_isLocked, m_creationTime ); // write the pricetable sb->safePrintf("<meta name=numtiers content=\"%"INT32"\">\n" "<meta name=numresultlevels content=\"%"INT32"\">\n" "<meta name=monthlyfee content=\"%"INT32"\">\n", m_priceTable.m_numTiers, m_priceTable.m_numResultLevels, m_priceTable.m_monthlyFee ); // write the tiers for (int32_t i = 0; i < m_priceTable.m_numTiers; i++) sb->safePrintf("<meta name=tiermax content=\"%"UINT32"\">\n", m_priceTable.m_tierMax[i] ); // write the result levels for (int32_t i = 0; i < m_priceTable.m_numResultLevels; i++) sb->safePrintf("<meta name=resultlevel content=\"%"UINT32"\">\n", m_priceTable.m_resultLevels[i] ); // write the costs int32_t numCosts = m_priceTable.m_numTiers*m_priceTable.m_numResultLevels*2; for (int32_t i = 0; i < numCosts; i++) sb->safePrintf("<meta name=levelcost content=\"%"UINT32"\">\n", m_priceTable.m_levelCosts[i] ); }
3,072
559
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.seyren.mongo; import com.mongodb.BasicDBObject; public final class NiceDBObject extends BasicDBObject { private static final long serialVersionUID = 1L; private NiceDBObject(String field, Object value) { put(field, value); } public static NiceDBObject forId(Object id) { return object("_id", id); } public static NiceDBObject object(String field, Object value) { return new NiceDBObject(field, value); } public NiceDBObject with(String field, Object value) { put(field, value); return this; } }
388
3,055
<reponame>Linghhh/u8g2 /* Fontname: -FreeType-HabsburgChancery-Medium-R-Normal--16-160-72-72-P-89-ISO10646-1 Copyright: Extant Glyphs: 388/388 BBX Build Mode: 0 */ const uint8_t u8g2_font_habsburgchancery_t_all[8821] U8G2_FONT_SECTION("u8g2_font_habsburgchancery_t_all") = "\204\0\3\2\5\5\3\5\6\20\20\376\374\12\374\14\376\1\366\5^\17\227 \6\0\20\226\0!\14c" "\25\222\211\206CTM\226\4\42\12e\24\233P\206dJ\0#\31\252\325\255\224\304,\315\302,\33n" "EI\314\262\341d\314BI\315B\0$\27\306\325\235K\263iP\224E\311\244dP\222A\31\206h" "KC\0%\36\252\325\255\7$\61J$\251-J\64\245\42)\213\230D\231\22e\221\22i\222\16\345" "\0&\36n\21\272\31\246)\34\42E\34\242p\210v`G\263\71\32FY\324\201p\215\207!\2'" "\10b\24\217\30\24\0(\17\305\325\231\222\246,\354Q\23\225d\213\0)\16\305\315\221\312\226D\324\304" "\276E\223\4*\13\245\364\232HJ\321 U\12+\14\346\24\236\13M\303$\212\31\0,\11\242\264\215" "\30\22%\1-\11\204T\226\32\302!\2.\10c\24\222I\226\4/\21\250\315\235\226c\71\226c\71" "\226cU\7b\0\60\21&\21\232\12\247H\211\24i\210\42%\32\63\0\61\14$\21\222\311\224\311i" "\221\42\0\62\20&\21\232\231\226Z\224hbm\30\222!\1\63\21&\21\232\231\6%\12G)\32\242" "d\310\64\0\64\24g\321\235L\215I\226h\211\224\14\311\60dI\250J\0\65\22f\321\231\21\207I" "I\207p\224*R\62d\32\0\66\22f\21\232\322\206(\332\222D\32\24\247D\212\306\14\67\20\206\261" "\231\213\6eQ\222\315\30:\216\31\0\70\22f\21\232\12\247H\221\242mH\234\6%\32\63\0\71\24" "g\315\231K\267H\212,\322\60)Q\226H\331\20j\0:\13\343\24\222I\226\64Y\22\0;\14#" "\261\215I\226T\221\222(\1<\11\246T\236\224\244u\25=\10et\232\30\324A>\11\246T\236P" "OF\0\77\17e\25\232\222\206e\210\262\232\61\333\42\0@\33j\25\256\33\304l\312\242!\32\222C" "\264$\322\62(\213\62\211\331\220i\203\4A \256\261\271\34\242t\30\324Q\215EY\214\23-\224\264" "l\30\224\312 M\241$\346\4\235\60\3B \315\261\265G\344aV\224\315\242\205C&\16R\252," "\241\246d\211\224\245c*\347\260u\230\0C!\255\361\265\315\321\34\33t@\31\224(\213\266:\240\345" "\200\226\3\322\16Hq\262\204\332p\36R\0D#\260\311\271\310)\303\203\66,\321\20\26\247\64\225\322" "T\12GIL\263-S\207$\331\201C\16\347\224\34E\27\211\361\245\225\207\60\232\62-\12\225h\270" "\351\210\30i\341\20k\0F\42\355\215\261\325\261aH\245!\326rDM\207!\35r@\222S\61\26" "c\61\7\302X\25sd\210\1G\36\214\21\262\207tHG\222u\322\24%\314\6I\33\26M\32\246" "PY\302l\220\342A\3H\42\353\221\255\33\342h\310\322E\36$mH\222%Q\242I\311&UR" "\63\61\207r(Gt@G\0I\23\207\361\235\332\6%S\62)\222\206!R-\331Z\3J\26\347" "\221\235\332\6%S\62%\31\224H[\235U\251\226\210!\0K\33j\21\252\33RiR\244\312\20%" "\266d\310\242!VTE\33\42%\222&\0L\33m\21\266\33r@\223S\61\26c\61V\262\64\21" "\207!G\344l\270\245\23\0M\34P\11\272\331$mP\226D\312$[\344,\231%\263d^\26%" "[\16\251d\2N\42\257\315\271\307\344m\10\7)\321\264i\21\67%\326\242X\222bI\212%e\234" "\246\64\14u\312 \3O\36n\21\272\326\261\35R\206QJ\206)K\7-\36\242y\210\342h\11\265" "\341\20\17+\0P+\360\211\271\311\306,\32\66m\210\244!\207\222H\207D\35\21\345!\15\207A\313" "\264d\311\322h\14\247xHrJN\311\11\71\2Q!\256\321\271\326\261\35R\206Q\33\206(\207\6" "\35\33rp\10\265\350\260\15\322<\14:\272\344\330\2R&\316\261\271\23\325!\32\62E\211\264l\322" "\262%\14\7\61Kli\244\245\212$.C\24JZNPsdP\1S\35m\21\266\35r\340\240" "I\331T\236t`\32\262D\33\66q\220\206T\33\264x\330\0T(\357\215\271\334\261a\220\303aJ" "\262e\30\302,R\262\60\315\302\64\323\322lJ\303aZ\207!\313\11\71E\315\241A\6U$\316\261" "\271\7\323!\14\207)\324\244\60\224\62\65\312\322L\12C)\33\42-\34\222%\225\264\234\260\23d\0" "V\42\217\355\271\322\321A\207\244\35IE%\226\6\35P\22%\7\26%\7\226,\7\326X\7\324\34" "\322\1W\37\220\11\272\312\11;\232\354\204I\307\244-^\244uZdi\221\245%G\244\35\262cE" "\0X\35k\21\256\22\245!\32\246)\311\306lJ\222a\310\62\65\32\322$\221\225%\331\26\0Y!" "\353\221\255\321\221!\223\222C\250\14\341\220\250S\272\344\200\222\3;\244C\351\42f\332I\231\206\4Z" "\22\307\321\235J\323\7m\15\305!V\245H\211\264\15[\12\303\325\221\30\24\377\227\1\134\30\250\315\235" "\320\201\34\320\201\34\320\201\34\320\201\34\320\201\34\320\1\71\1]\12\303\321\215X\374_\6\1^\11\205" "\24\233\312\246%\22_\7(\320\241\70\10`\10c\20\217I$\1a\21\7\21\236K\305!R$%" "S\224a\210J\0b\24f\21\232\323\206$R\22eK\244H\211\24)\31\244\11c\14\345\20\226\221" "\206!\63\15\222\4d\20g\15\232I\325\67ER$e\261\215\32\0e\14\5\21\226\253-\211MJ" "&\11f\21\347\221\235L\262!J\264T\35\224\325\65\356\12g\26i\215\241*\16\203bTDE\35" "RE\313$M\322\302!\4h\35\351\221\245\224\207\64\21\225t\36Be\213\226!I\244!\321$\35" "\310\221\34\310\201\20i\17e\15\222\314\352P\64D\232%\331\42\0j\20\345\215\221k\207\244Q\323&" "-\314\302\32\0k\25i\21\246\32\246m\210\244eI\226p\30\244\331\226I\351\12l\20e\21\226\313" "\226d\210\226d\322,\311\226\1m\22\353\14\252JL\303M\261)\66\305\222\14\267\250\4n\22\351\14" "\242J\264aTDET\244d\30\262H\2o\16\6\21\232\13\303)\221\224A\221\66\15p\25h\215" "\235\252\15\242\242)\27%\31\302$\315\201\34\210c\0q\22\206\221\231\12\265!\212\24\227a\220\22\61" "M\305\10r\16\6\15\226\13\245A\324B)\31\62\11s\16\345\20\226\222\226(\31\224d\230$\0t" "\21F\21\232K\303p\30\224H\14\245d\310\64\0u\21\347\20\236\211\42ER$ER\224a\210J" "\0v\17\6\21\232\12\243D\31\26K\244Dc\6w\23\353\14\252*I\303%\261L\212\224I\343\42" "G!\0x\24h\251\231\16\23i\30\242ESB)\32\264D\214\353\0y\23h\211\231\262\14C\246" "\210I\272\312\71\20\207I*\3z\20e\261\225\312\262!\332*\303\264$JI\3{\21\305\325\231\312" "\26Q\63\211\232\226\211J\262E\0|\11\302\325\215\31\376A\1}\20\305\315\221\312\226D\324\62\243\244" "\31\225-\2~\13h\224\246\231\222HJ\242\5\240\6\0\20\226\0\241\15c\265\221I\226\64\212\206C" "\224\0\242\20F\325\235\12\207e\251%\331\64Lc\232\1\243\35m\25\272\33r@\223S\61\213\304l" "\30\224pHt$\307tl\230\262\341\226N\0\244\21\347T\242i\31\246DS$EI\206EI\0" "\245#\353\265\261\321\221!\223\222C\250\14\341\220\250S\272\344\200\222-[E\31%\345\240dj\66)" "\323\220\0\246\12\302\325\215\31\236\206\27\0\247\24f\25\236\313\206DR\246A\221\224aY\262A\211&" "\0\250\12g\14\233\311\222K\226\0\251\15\6\21\232\214\22eq\331\264Q\3\252\15\305\264\232\222\206e" "H\222A\251\0\253\12\205t\232JZ\6\245\2\254\11fT\236\70\212\11\0\256\22\347\221\235K\7%" "\222\222lX\226Hu\215;\3\257\7%\60\227\30\4\260\12\204\364\226Q\42)Q\0\261\14\6\365\235" "\13M\303$\212\331\60\262\12\304\324\226q\211\42e\20\263\14\304\324\226QJJ\62$\211\2\264\10c" "\20\217\211\206\4\265$\15\226\271\316\301\35\314\321\34P\234\224AQ\206(\32\376\377\240\34\222h\220*" "J\242\244I\35\70\344P\14\266(\15\226\271\34\344qH\224\34Rrd\311\21)G\244\34\221rD" "\312\21)G\244\34YrH\311!%\307Vu\330\0\267\10c\224\222I\226\4\270\10\203\220\215\211\226" "\5\271\12\303\324\222I\26%R\4\272\13\304\324\226Q\222iY\22\5\273\12\205t\232\251\14J'\0" "\274\34\252\325\255I\225\315\26f\242\224*Y(\211Q\222)\211\26\15\212\30\351P\216\0\275\34\252\325" "\255I\225\315\26f\242\224*\221\250(Y\224dZ\230)\211\66\354P\216\0\276\34\252\325\255\21\225\232" "\255bS\252J\26Jb\224dJ\242E\203\42F:\224#\0\277\17e\265\231\312\266P\323\232\206d" "\230$\0\300$\356\261\271\7tXG\206(\35\6uTcQ\26\343D\13%-\33\6\245\62HS" "(\211\71A'\314\0\301%\356\261\271\207tPG\206\34\31\206$\334\326X\224\305\70\321BI\313\206" "A\251\14\322\24JbN\320\11\63\0\302%\356\261\271Gt\60\321\201\35\32\206$\334\326X\224\305\70" "\321BI\313\206A\251\14\322\24JbN\320\11\63\0\303&\356\261\271\7\224\34Jtd\310\221aH" "\302m\215EY\214\23-\224\264l\30\224\312 M\241$\346\4\235\60\3\304%\356\261\271\7\242\34\32" "f%J\207A\35\325X\224\305\70\321BI\313\206A\251\14\322\24JbN\320\11\63\0\305%\356\261" "\271\207r\64\311\201)\36\206$\334\326X\224\305\70\321BI\313\206A\251\14\322\24JbN\320\11\63" "\0\306)\317\255\271\35\305a\32\42M\211\24qS\322)R\227HM\206%\223\324h\30\304d\220\264" "$\323\206(\207D\235\262#\0\307&\15\222\265\315\321\34\33t@\31\224(\213\266:\240\345\200\226\3" "\322\16Hq\262\204\332p\36rL\307\222\34\323\1\310\30\251\361\245\321\21\363\20FS\246E\241\22\15" "\67\35\21#-\34b\15\311\30\251\361\245S%y\10\243)\323\242P\211\206\233\216\210\221\26\16\261\6" "\312\30\251\361\245\225\23\71\7\206\60\232\62I\221\206\223\216\210\221\26\16\261\6\313\31\251\361\245\214\302a" "T\322!\214\246LR\244\341\244#b\244\205C\254\1\314\25\247\361\235\222ul\210\242%\223\42i\30" "\42U\311\326\32\0\315\25\247\361\235\22ut\210\242%\223\42i\30\42U\311\326\32\0\316\25\247\361\235" "\22\23\35\33\242h\311\244H\32\206HU\262\265\6\317\26\247\361\235\211\242a\252\15Q\264dR$\15" "C\244*\331Z\3\320%\260\311\271\310)\303\203\66,\321\20\26\247\64]\6%\35\246D\224\304\64\333" "\62uH\222\35\70\344pN\311\1\321%\317\315\271\7\224\34Kt\212:dC\246\14\313\42nJ\272" "E\261$\305\222\24K\312\70Mi\30\352\224A\6\322\37\216\21\272\207t\310\274C\312\60J\311\60e" "\351\240\305C\64\17Q\34-\241\66\34\342a\5\323!\216\21\272\207t(\321\221\35R\206QJ\206)" "K\7-\36\242y\210\342h\11\265\341\20\17+\0\324!\216\21\272\326\301D\7sp\207\224a\224\222" "a\321\322AZ\7)\7\222%\216\206\203:\14!\0\325!\216\21\272Vr(\321\301\34\334!e\30" "\245dX\264t\220\326A\312\201d\211\243\341\240\16C\10\326!\216\21\272G\242\34\32\326%\12\225a" "\224\222a\312\322A\213\207h\36\242\70ZBm\70\304\303\12\327\16\306\24\236\311\206$\32\247d\310\22" "\0\330\42\256\321\271U\22Y\33\225A\325\206!J\263AL\207l\35\242\70\32Rm\70\304\303\16\350" "h\16\2\331&\356\261\271\7tXI\207\60\34\246P\223\302P\312\324(K\63)\14\245l\210\264p" "H\226T\322r\302N\220\1\332&\356\261\271\207tP\7\206\60\34\246P\223\302P\312\324(K\63)" "\14\245l\210\264pH\226T\322r\302N\220\1\333&\356\261\271Gt\60\221\207\60\34\246P\223\302P" "\312\324(K\63)\14\245l\210\264pH\226T\322r\302N\220\1\334&\356\261\271\7\242\34\32\326%" "J\7\61\63\205\241\224\251Q\226fR\30J\331\20i\341\220,\251\244\345\204\235 \3\335\42\13\222\255" "\26%q\310\244\344\20*C\70$\352\224.\71\240\344\300\16\351P\272\210\231vR\246!\1\336,\17" "\216\271\10w \32v`Svh\30rD\333\1Q\7D\35P\303a\214\206C\22\211\321\20\245a" "\70$\71!'\344p\216\0\337\32\346\221\231K\242!I\244$\32\24e\220\22%\32\6iI\226\26" ")M\1\340\23g\21\236Je\71\25\207H\221\224LQ\206!*\1\341\24g\21\236\212E\35H\305" "!R$%S\224a\210J\0\342\23G\21\236\22\23\35\24\207H\221\224LQ\206!*\1\343\24G" "\21\236Q\242DG\305!R$%S\224a\210J\0\344\25g\21\236J\262AK\322T\34\42ER" "\62E\31\206\250\4\345\24g\21\236K\223\64G\305!R$%S\224a\210J\0\346\23\11\21\246\316" "$)\31F%\32\66E[\6I\231\0\347\20e\221\225\221\206!\63\15\222\26JI\244\1\350\17e" "\21\226\312D\61\313\226\304&%\223\4\351\17e\21\226\12%\35\312\226\304&%\223\4\352\17E\21\226" "\222\22\35\310\226\304&%\223\4\353\20e\21\226\251\14J\35\311\226\304&%\223\4\354\17e\15\222\312" "D\35\211\206H\263$[\4\355\17e\15\222\13%\35\211\206H\263$[\4\356\17E\15\222\222\22\35" "\210\206H\263$[\4\357\20e\15\222\251\14J\35\211\206H\263$[\4\360\23g\15\232I\7M\211" "\206tS$ER\26\333\250\1\361\26I\15\242R\302D\247$\332\60*\242\42*R\62\14Y$\1" "\362\21f\21\232\12U\65\14\247DR\6E\332\64\0\363\21f\21\232J\65\35\14\247DR\6E\332" "\64\0\364\21F\21\232\322\22\35\12\247DR\6E\332\64\0\365\21F\21\232Q*:\26N\211\244\14" "\212\264i\0\366\22f\21\232)\15R\222c\341\224H\312\240H\233\6\367\16&\31\242\13\307\34\31v" "$\34\63\0\370\17\7\355\231K\262-\321\224\305\64d[\14\371\24g\21\236Ke\35\213\42ER$" "ER\224a\210J\0\372\24g\21\236\213E\35\214\42ER$ER\224a\210J\0\373\24G\21\236" "\22\23\35\213\42ER$ER\224a\210J\0\374\25g\21\236\251\15Z\222\203Q\244H\212\244H\212" "\62\14Q\11\375\27\350\211\231\315\1UG%e\30\62EL\322U\316\201\70LR\31\376\30\350\215\235" "\212\325\331\42\15\233\62I\213$\15\232\22\312\71\220\3\61\0\377\31\350\211\231L\302ALrPR\206" "!S\304$]\345\34\210\303$\225\1\0\0\0\4\377\377\1\0$\356\261\271\36r\226!J\207A\35" "\325X\224\305\70\321BI\313\206A\251\14\322\24JbN\320\11\63\0\1\1\24G\21\236\31r\64\25" "\207H\221\224LQ\206!*\1\1\2&\356\261\271\7\242\34\324\221\35\32\206$\334\326X\224\305\70\321" "BI\313\206A\251\14\322\24JbN\320\11\63\0\1\3\24G\21\236\211B\35\25\207H\221\224LQ" "\206!*\1\1\4#\256\261\271\34\242t\30\324Q\215EY\214\23-\224\264l\30\224\312 M\241$" "\346H*'\351\230\0\1\5\24g\261\235K\305!R$%S\224a\210\312\225\64\1\1\6#\255\361" "\265\215t$\321\201!G\224A\211\262h\253\3Z\16h\71 \355\200\24'K\250\15\347!\5\1\7" "\20e\21\226\12%\35\221\206!\63\15\222\4\1\10#\255\361\265\7tH\321\221\34\33\206$\222\222!" "\253\3Z\16h\71 \355\200\24'K\250\15\347!\5\1\11\20E\21\226\222\22Y\32\206\314\64H\22" "\0\1\12\42\255\361\265\215r(\231\207$V\6%\312\242\255\16h\71\240\345\200\264\3R\234,\241\66" "\234\207\24\1\13\20e\21\226\312\266\34\221\206!\63\15\222\4\1\14\42\255\361\265UrL\307rl\30" "\222HJ\206\254\16h\71\240\345\200\264\3R\234,\241\66\234\207\24\1\15\20E\21\226Q\42\35\220\206" "!\63\15\222\4\1\16%\320\311\271G\224\34Vs\312\360\240\15K\66\26\247\64\225\302Q\22\323l\313" "\324!Iv\340\220\303\71%\7\1\17\24g\15\232I\25eH\244uS$ER\26\333\250\1\1\20" "&\260\311\271\310)\303\203\66,\321\20\26\247\64]\6%\35\246D\224\304\64\333\62uH\222\35\70\344" "pN\311\1\1\21\26\207\21\236\331\244\344\260\224\24m\10\305D\212\206E\32B\15\1\22\31\251\361\245" "\33t\212<\204\321\224I\212\64\234tD\214\264p\210\65\0\1\23\20E\21\226\31r$\313\226\304&" "%\223\4\1\24\32\251\361\245\213b\35\311\201!\214\246LR\244\341\244#b\244\205C\254\1\1\25\20" "E\21\226\211\42\35\311\226\304&%\223\4\1\26\33\251\361\245\313\201\35Hr`\10\243)\223\24i\70" "\351\210\30i\341\20k\0\1\27\20e\21\226\312\266\34\312\226\304&%\223\4\1\30\34\351\221\245\225\207" "\60\232\62-\12\225h\270\351\210\30i\341\20\353H\216\324\201\10\1\31\20e\261\225\253-\211MJ&" "\261\222%\0\1\32\32\251\361\245Sb\35\311\201!\214\246LR\244\341\244#b\244\205C\254\1\1\33" "\20E\21\226P\42\65\313\226\304&%\223\4\1\34\37\214\21\262\325\241\304\16\311\313\246HR\66d\332" "\260h\322\60\205\312\22f\203\24\17\32\0\1\35\33\311\215\241\223\23\235\20\205\303\240\30\25QQ\207T" "\321\62I\223\264p\10\1\1\36\37\214\21\262\214\22Y\321\261u\322\24%\314\6I\33\26M\32\246P" "Y\302l\220\342A\3\1\37\32\311\215\241*\353\224(\34\6\305\250\210\212:\244\212\226I\232\244\205C" "\10\1 \37\214\21\262\315\324E\7\242u\322\24%\314\6I\33\26M\32\246PY\302l\220\342A\3" "\1!\34\351\215\241\313\201\35\310I\305aP\214\212\250\250C\252h\231\244IZ\70\204\0\1\42&\14" "\222\261\207tHG\222u\322\24%\314\6I\33\26M\32\246PY\302l\220\342A\207rP\207\222\34" "R\1\1#\33\311\215\241\223\223XG\212\303\240\30\25QQ\207T\321\62I\223\264p\10\1\1$%" "\13\222\255\325\221D\7t@\31\262t\221\264!I\226D\211&%\233TI\315\304\34\312\241\34\321\1" "\35\1\1% \11\222\245\224\23\71\7\206P\21\225tU\304h\31\222D\32\22M\322\201\34\311\201\34" "\10\1\1&%\354\215\255\33\304$\33\242A\233\224p\221\64EI\226H\211\66%\333TM\15\305\34" "\313\261\34\322\21\35\1\1'\37\11\222\245\33r\212<\204\212\250\244\253\42F\313\220$\322\220h\222\16" "\344H\16\344@\10\1(\27\247\361\235R\242D\307\206(Z\62)\222\206!R\225l\255\1\1)\17" "E\15\222qG\242!\322,\311\26\1\1*\25\247\361\235\31rp\33\224L\212\244a\210TK\266\326" "\0\1+\20E\15\222\30r\70\32\42\315\222l\21\0\1,\26\247\361\235\211B\35\34\242h\311\244H" "\32\206HU\262\265\6\1-\20E\15\222\211\42\35\211\206H\263$[\4\1.\26\247\361\235\222ul" "\210\242%\223\42i\30\42U\311\326\32\0\1/\22\305\255\221\314\352P\64D\232%\331\302J\226\0\1" "\60\26\247\361\235\222ul\210\242%\223\42i\30\42U\311\326\32\0\1\61\15\345\14\222\212\206H\263$" "[\4\1\62\26\247\361\235\222ul\210\242%\223\42i\30\42U\311\326\32\0\1\63\32\350\215\235L\302" "$Lr\70\221\206M\362\264,Z\224\3q\16\344@\6\1\64\31\7\222\235\22\23\35\33\242h\311\224" "dP\42m\65\253R-\21C\0\1\65\21\305\215\221\222\22Y\32\65m\322\302,\254\1\1\66\42\352" "\221\251\33RiR\244\312\20%\266d\310\242!VTE\33\42%\222v$\207t \311\1\15\1\67" "\34\351\221\245\32\246m\210\244eI\226p\30\244\331\226I\351\16\344\210\234\304\62\0\1\70\25(\21\242" "\333\264\304\222\14\223\242\15\241\42-C\24I\0\1\71\36\215\21\266GtL\7\206\34\320\344T\214\305" "X\311\322D\34\206\34\221\263\341\226N\0\1:\22\205\21\226\262fK\62DK\62iJ\262e\0\1" ";\42\315\261\265\33r@\223S\61\26c\61V\262\64\21\207!G\344l\270E\321\16\350X\222c:" "\0\1<\25\345\221\225\313\226d\210\226d\322,\311\26\206R\22i\0\1=\36\215\21\266VrL\7" "\206\34\320\344T\214\305X\311\322D\34\206\34\221\263\341\226N\0\1>\23\205\21\226P\42\65[\222!" "Z\222IS\222-\3\1\77\33m\21\266\33r@\223S\61\26c\61V\262\64\331\244,\226\263\341\226" "N\0\1@\25g\21\236K\267D\222\42%[\42e\221\42m\30\62\21\1A\33m\21\266\33dQ" "\215\265\34\70&C\42\16:\246c:$g\303-\235\0\1B\21f\15\226\14\247$\332\206\313\20Z" "\242\61\3\1C%\317\315\271GtT\221\267!\34\244D\323\246E\334\224X\213bI\212%)\226\224" "q\232\322\60\324)\203\14\1D\27i\15\242\314\21Y\247$\332\60*\242\42*R\62\14Y$\1\1" "E'\357\215\271\307\344m\10\7)\321\264i\21\67%\326\242X\222b\311*)\305i\323\302x\330A" "\35MrTG\0\1F\30i\215\241J\264aTDET\244d\30\262H\7rDNb\15\1G" "%\317\315\271VrT\221\267!\34\244D\323\246E\334\224X\213bI\212%)\226\224q\232\322\60\324" ")\203\14\1H\26I\15\242Rb\235\222h\303\250\210\212\250H\311\60d\221\4\1I\30I\15\242\321" "\1\35\310\241D\33FETDEJ\206!\213$\0\1J(\357\215\271\307\344m\10\7)\321\264i" "\21\67%\326\224XSbM\211\65E\334\244\64\234tD\35\222H\7wX\2\1K\22G\255\231\252" "\274\230\24-Y\244H\325\22Q\1\1L\42\216\21\272G\206\34\311\301\35R\206QJ\206)K\7-" "\36\242y\210\342h\11\265\341\20\17+\0\1M\22F\21\232\31r,\14\247DR\6E\332\64\0\1" "N\42\216\21\272G\242\34\211t`\207\224a\224\222a\312\322A\213\207h\36\242\70ZBm\70\304\303" "\12\1O\22F\21\232\211\62\35\13\247DR\6E\332\64\0\1P\42\216\21\272\207\222\34i\7vH" "\31F)\31\246,\35\264x\210\346!\212\243%\324\206C<\254\0\1Q\22F\21\232J\242$\307\302" ")\221\224A\221\66\15\1R%o\15\272\326\326m\220\224!\211\24)\231\244,R\242-\32\26mP" "\27I^\62-\31\16J\70H\23\0\1S\23\10\21\242\213\262(\33\226\304\233\42%\303\244H\0\1" "T)\356\261\271\7t\304u\210\206LQ\42-\233\264l\11\303A\314\22[\32i\251\42\211\313\20\205" "\222\226\23\324\34\31T\0\1U\22f\15\226L\65\35\222\6Q\13\245d\310$\0\1V*\356\221\271" "\23\325!\32\62E\211\264l\322\262%\14\7\61Kli\244\245\212$.C\24JZ\16\205:\240\16" "J\35\224\0\1W\23\206\215\225\13\245A\324B)\31\62\61\325\222L\3\1X(\356\261\271UrP" "\207Du\210\206LQ\42-[\302p\20\263\304\226FZ\252H\342\62D\241\244\345\4\65G\6\25\1" "Y\22F\15\226Q\62\35\223\6Q\13\245d\310$\0\1Z\37\215\21\266\325\61\35\336\201\203&eS" "y\32\322i\30\224m\30\222!\226\6\61\35\206\10\1[\22e\21\226\12%\35\222\226(\31\224d\230" "$\0\1\134 \215\21\266\325\261Dg\70\204R\62H\345iH\247aP\266aH\206X\32\304t\30" "\42\0\1]\22E\21\226\221\22\35\221\226(\31\224d\230$\0\1^%\355\221\265\35r\340\240I\331" "T\236t`\32\262D\33\66q\220\206T\33\264x\330\261\34\325\261$\307T\0\1_\22e\221\225\222" "\226(\31\224d\230\264PJ\42\15\1` \215\21\266VrL\307v\340\240I\331T\236\206t\32\6" "e\33\206d\210\245AL\207!\2\1a\22E\21\226P\42\35\222\226(\31\224d\230$\0\1b," "\357\215\271\334\261a\220\303aJ\262e\30\302,R\262\60\315\302\64\323\322lJ\303aZ\207!\313\221" "\64GT\65\311\201!\21\1\1c\25\306\221\231K\303p\30\224H\14\245d\310\324TK\62\15\1d" "+\17\216\271\7\225p\325\206A\16\207)\311\226a\10\263H\311\302\64\13\323LK\263)\15\207i\35" "\206,'\344\24\65\207\6\31\1e\22F\21\232Q\62\61\34\6%\22C)\31\62\15\1f,\357\215" "\271\334\261a\220\303aJ\262e\30\302,R\262(\31\242,\32\222(\323\322lJ\303aZ\207!\313" "\11\71E\315\241A\6\1g\22F\21\232\13\65mX\264a\211\22q\220\64\0\1h(\356\261\271\7" "\224\34Jt`\10\303a\12\65)\14\245L\215\262\64\223\302P\312\206H\13\207dI%-'\354\4" "\31\1i\26G\21\236Q\242D\7\243H\221\24I\221\24e\30\242\22\0\1j&\356\261\271\36rJ" ":\204\341\60\205\232\24\206R\246FY\232Ia(eC\244\205C\262\244\222\226\23v\202\14\1k\24" "G\21\236\31rZ\24)\222\42)\222\242\14CT\2\1l(\356\261\271\7\242\34\324\201!\14\207)" "\324\244\60\224\62\65\312\322L\12C)\33\42-\34\222%\225\264\234\260\23d\0\1m\25G\21\236\211" "B\35\214\42ER$ER\224a\210J\0\1n(\356\261\271Gr\64\251\16I\24\16S\250Ia" "(ej\224\245\231\24\206R\66DZ\70$K*i\71a'\310\0\1o\26g\21\236K\223\64\7" "\243H\221\24I\221\24e\30\242\22\0\1p(\356\261\271G\352X\222\3C\30\16S\250Ia(e" "j\224\245\231\24\206R\66DZ\70$K*i\71a'\310\0\1q\26G\21\236K\262$\307\242H" "\221\24I\221\24e\30\242\22\0\1r'\316\261\271\7\323!\14\207)\324\244\60\224\62\65\312\322L\12" "C)\33\42-\34\222%\225\264\34I\327$\26\23\0\1s\24G\261\235\211\42ER$ER\224a" "\210\312\225\64\1\1t\42\220\11\272\312!m\7\22%\331\11\223\216I[\274H\353\264\310\322\42KK" "\216H;d\307\212\0\1u\27K\15\252\325\221D'\225\244\341\222X&E\312\244q\221\243\20\1v" "#\13\222\255\324\221D\325\221!\223\222C\70$\352\224.\71\240\344\300\16\351P\272\210\231vR\246!" "\1\1w\30\310\211\231T\23\35\224\224a\310\24\61IW\71\7\342\60Ie\0\1x$\13\222\255L" "r`\20\225\342\220I\311!\34\22uJ\227\34Pr`\207t(]\304L;)\323\220\0\1y\26" "\307\321\235J\244DR\17\323\220\206\342\20\253R\244D\332\6\1z\23\305\261\225\12%\35\311\206h\253" "\14\323\222(%\15\1{\26\307\321\235J\262\344\222\255\203\266\206\342\20\253R\244D\332\6\1|\23\305" "\261\225\312\266\34\311\206h\253\14\323\222(%\15\1}\26\307\321\235I\224IY\17\332\32\212C\254J" "\221\22i\33\0\1~\23\245\261\225P\42\35\311\206h\253\14\323\222(%\15\1\177\20\346\221\231K\242" "!I\244\320\217iW\0\1\315&\356\261\271\7\224\34\324\221\35\32\206$\334\326X\224\305\70\321BI" "\313\206A\251\14\322\24JbN\320\11\63\0\1\316\24G\21\236QB\35\25\207H\221\224LQ\206!" "*\1\1\346\37\214\21\262Tr\310\16\311\313\246HR\66d\332\260h\322\60\205\312\22f\203\24\17\32" "\0\1\347\33\311\215\241Rb\235\22\205\303\240\30\25QQ\207T\321\62I\223\264p\10\1\1\367 \354" "\211\251\323\201d\210\134\206XZ\65US\65U\222%Y\321\201DGt,\7s,\7\1\370%\317" "\315\271\326\11\212\274\15\341 %\232\66-\342\246\304Z\24KR,I\261\244\214\323\224\206\241N\31d" "\0\1\371\26I\15\242\322\21\235\222h\303\250\210\212\250H\311\60d\221\4\2\34\36\351\221\245\35\207)" "TR%M\322(\224\262!I\207x\213\246H\212\266\34\32\62\0\2\35\22F\261\231\322\206$\22C" "m\230\264)\311\64\0\2\36%\13\222\255TrDGt@\31\262t\221\264!I\226D\211&%\233" "TI\315\304\34\312\241\34\321\1\35\1\2\37!\11\222\245Sb\35\310\201!TD%]\25\61Z\206" "$\221\206D\223t Gr \7B\0\3\0\10CH\207\220\4\3\1\11CH\207\31\22\0\3\2" "\11DH\213Q\22\1\3\3\10EH\217q\1\3\4\10$h\213\30\2\3\6\11DH\213\210\22\5" "\3\7\11c(\207I\226\4\3\10\12e(\217\251\14J\5\3\14\11DH\213P\22\5\3'\11\203" "\210\205\211\226\5\3=\12d(\213P\22%\21\36\16$\260\311\271\310)\303\203\66,\321\20\26\247\64" "\225\322T\12GIL\263-S\207$\331\201C\316<\250\0\36\17\23\247\315\231I\325\67ER$e" "\261\215:\70(\0\36\60\34\212\21\252\7t@\35RiR\244\312\20%\246h\210\25U\321\206H\211" "\244\11\36\61\27\211\21\246\225\325a\332\206HZ\226dI\207m\235\62)]\1\36T.\20\212\271\7" "\305l\22\243a\323\206H\32r(\211tH\324\21Q\36\322p\30\264LK\226,\215\306p\212\207$" "\247\344\224\234\220#\0\36U\31\310\215\235Tu\70\312\6Q\321\224\213\222\14a\222\346@\16\304\61\0" "\36n)\357\215\271\334\261a\220\303aJ\262e\30\302,R\262\60\315\302\64\323\322lJ\303aZ\207" "!\313\11\351\240\352\24\35\2\36o\24\206\321\231K\303p\30\224H\14\245d\310thP\0 \23\10" "*\224\256x\10 \24\10-\224\272\370\20 \30\11\202\364\216Q\22\1 \31\11\202\364\216P\22\5 " "\34\12\204\364\226I\224\226! \35\13\204\364\226\30\222\26%\1 \21\207\365\241\261\14\203Ti\32" "\224\250\26\367\6 !\24\207\365\241\261\14\203Ti\32\224\250\62(Q-n\3 &\13k\24\262\311" "*/Y\5 \71\12\203t\222J\222%\1 :\12\203t\222I\226\22\0 =\17e\25\232\222\206" "\227H\313\214\331\26\1 B\32k\25\262K\352H\216\14:\222#\355\204\244\247\64\32\224AJ\243\244" "\7 N\14\245t\232HJ\321 U\12 Q\22e\25\232HJ\321 U\252I)\32\244J\1 " "\254\30\211\365\251\225\207\60\232\62-\36\206L\36\206P\214\264p\210\65\0!\220\17\252T\256\312\221\34" "\31\16\211\16\345\0!\221\14E\365\231\312\226d\12\373\4!\222\20\252T\256\7rH\31\16\71\222#" "\21\0!\223\14E\365\231\12\373\264$[\4!\224\20\252T\256\12\263T\31\16\211\232\205\21\0!\225" "\17E\365\231\312\226d\12\233\226d\213\0#\210\42\214\25\266\12s \313\201A\313\224\344\222\14\341 " "\205\321 \16IrI\264l\320\201,\7\302\10&\5\25)\25\252\314\221\34\330\206\313\60d\203\70h" "\313\224&\0&\36\20\253\64\262\310\261\341\207l\30\264h\10\1&%\26\210\25\246\213\7)\31\206t" "X\242A\325\206A\223\315!\0&-\35\254\365\265\33\306A\36rd\15\323\61\35\245!\35\242X\207" "\224\361\262\3\211\16\1&\220%\15\226\271\316\301\35\314\321\34P\234\224AQ\206(\32\376\377\240\34\222" "h\220*J\242\304;\62\14\71\24\3&\221%\15\226\271\270\14\7m\70(\303\313\60DMR\32\205" "\212\316\327,LTE\211\222(\31\266a\320\201\227a&\233#\253\365\261\315\241$G\212\203\62LI" ")I\6%S\22-\31\224RR\32\226ALr\244\16\245\0'&\23)\25\252\314\221\34X\7i" "\70\15\352\16\344H\10''\25)\25\252\314\221\34HRER%EMr GB\0')\26)" "\25\252\314\221\34H\262!\31\222\64+&\265eJ\23\0'/\24)\25\252\314\221\34\321\26)Z\324" "Q\11%-M\0'\64\22)\25\252L\243ZR\335\206\333\232\324\242j\10'\65\35\214\365\265.f" "\231TU\22\71\331\206!\307\206![b%Q\243H+\306\61\0'\66\26)\25\252\314\221PY\224" "a\310\6m\30\22e\21s$\4'\71\37m\365\271\315\301,\7\42\35Htd\210\304aP\242d" "\321\264D\307\42\35\312r(\7':\42\255\325\271\316\261\244\234tM\226Pi\21\223%\33\36\262d" "\11\225\26\61Y\322\244sR\307b\0\60\0\7\0\20\302\0\0";
15,411
8,148
#pragma once #ifdef __GNUC__ #define likely(x) __builtin_expect((x),1) #define unlikely(x) __builtin_expect((x),0) #else #define likely(x) (x) #define unlikely(x) (x) #endif
81
4,324
<reponame>ideas-detoxes/jerryscript<filename>jerry-ext/handler/handler-print.c /* Copyright JS Foundation and other contributors, http://js.foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "jerryscript-debugger.h" #include "jerryscript-port.h" #include "jerryscript-ext/handler.h" /** * Provide a 'print' implementation for scripts. * * The routine converts all of its arguments to strings and outputs them * char-by-char using jerry_port_print_char. * * The NUL character is output as "\u0000", other characters are output * bytewise. * * Note: * This implementation does not use standard C `printf` to print its * output. This allows more flexibility but also extends the core * JerryScript engine port API. Applications that want to use * `jerryx_handler_print` must ensure that their port implementation also * provides `jerry_port_print_char`. * * @return undefined - if all arguments could be converted to strings, * error - otherwise. */ jerry_value_t jerryx_handler_print (const jerry_call_info_t *call_info_p, /**< call information */ const jerry_value_t args_p[], /**< function arguments */ const jerry_length_t args_cnt) /**< number of function arguments */ { (void) call_info_p; /* unused */ jerry_value_t ret_val = jerry_undefined (); for (jerry_length_t arg_index = 0; arg_index < args_cnt; arg_index++) { jerry_value_t str_val; if (jerry_value_is_symbol (args_p[arg_index])) { str_val = jerry_symbol_descriptive_string (args_p[arg_index]); } else { str_val = jerry_value_to_string (args_p[arg_index]); } if (jerry_value_is_exception (str_val)) { /* There is no need to free the undefined value. */ ret_val = str_val; break; } if (arg_index > 0) { jerry_port_print_char (' '); } jerry_string_print (str_val); jerry_value_free (str_val); } jerry_port_print_char ('\n'); return ret_val; } /* jerryx_handler_print */
916
348
<filename>docs/data/leg-t2/021/02101485.json {"nom":"Plombières-lès-Dijon","circ":"1ère circonscription","dpt":"Côte-d'Or","inscrits":1924,"abs":1149,"votants":775,"blancs":84,"nuls":33,"exp":658,"res":[{"nuance":"REM","nom":"<NAME>","voix":348},{"nuance":"DVD","nom":"<NAME>","voix":310}]}
124
841
<reponame>burgessjp/GanHuoIO<gh_stars>100-1000 package ren.solid.ganhuoio.module.about; import android.support.v7.widget.Toolbar; import android.text.SpannableStringBuilder; import android.view.View; import android.widget.TextView; import ren.solid.ganhuoio.R; import ren.solid.library.activity.base.BaseActivity; import ren.solid.library.utils.SpannableStringUtils; import ren.solid.library.utils.SystemUtils; /** * Created by _SOLID * Date:2016/5/5 * Time:10:30 */ public class AboutActivity extends BaseActivity { private TextView tv_version; private TextView tv_msg; @Override protected void setUpView() { Toolbar toolbar = $(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setDisplayShowHomeEnabled(true); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onBackPressed(); } }); tv_msg = $(R.id.tv_msg); tv_version = $(R.id.tv_version); tv_version.setText("v" + SystemUtils.getAppVersion(this)); SpannableStringBuilder builder = new SpannableStringBuilder(); builder.append(getString(R.string.about_msg)); builder.append("\n"); builder.append("\n"); builder.append(SpannableStringUtils.format(this, getString(R.string.about_author), R.style.AboutItemText)); builder.append("\n"); builder.append(SpannableStringUtils.format(this, getString(R.string.about_github), R.style.AboutItemText)); builder.append("\n"); builder.append(SpannableStringUtils.format(this, getString(R.string.about_blog), R.style.AboutItemText)); builder.append("\n"); builder.append(SpannableStringUtils.format(this, getString(R.string.about_weibo), R.style.AboutItemText)); builder.append("\n"); builder.append(SpannableStringUtils.format(this, getString(R.string.about_project), R.style.AboutItemText)); tv_msg.setText(builder.subSequence(0, builder.length())); } @Override protected int setLayoutResourceID() { return R.layout.activity_about; } }
1,053
456
// SPDX-License-Identifier: BSD-3-Clause // Copyright (c) 2019-2020 <NAME> // All rights reserved. namespace djv { namespace Scene3D { inline std::string PolyLinePrimitive::getClassName() const { return "PolyLinePrimitive"; } inline bool PolyLinePrimitive::isShaded() const { return false; } inline const std::vector<std::shared_ptr<Geom::PointList> >& PolyLinePrimitive::getPolyLines() const { return _pointLists; } inline size_t PolyLinePrimitive::getPointCount() const { return _pointCount; } } // namespace Scene3D } // namespace djv
316
1,745
//************************************ bs::framework - Copyright 2018 <NAME> **************************************// //*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********// #include "Components/BsCSkybox.h" #include "Private/RTTI/BsCSkyboxRTTI.h" #include "Scene/BsSceneManager.h" #include "Renderer/BsSkybox.h" namespace bs { CSkybox::CSkybox() { setFlag(ComponentFlag::AlwaysRun, true); setName("Skybox"); } CSkybox::CSkybox(const HSceneObject& parent) : Component(parent) { setFlag(ComponentFlag::AlwaysRun, true); setName("Skybox"); } CSkybox::~CSkybox() { mInternal->destroy(); } void CSkybox::onInitialized() { // If mInternal already exists this means this object was deserialized, // so all we need to do is initialize it. if (mInternal != nullptr) mInternal->initialize(); else mInternal = Skybox::create(); gSceneManager()._bindActor(mInternal, sceneObject()); } void CSkybox::onDestroyed() { gSceneManager()._unbindActor(mInternal); } RTTITypeBase* CSkybox::getRTTIStatic() { return CSkyboxRTTI::instance(); } RTTITypeBase* CSkybox::getRTTI() const { return CSkybox::getRTTIStatic(); } }
438
3,372
<reponame>rbalamohan/aws-sdk-java<filename>aws-java-sdk-fsx/src/main/java/com/amazonaws/services/fsx/model/CreateDataRepositoryTaskRequest.java /* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.fsx.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/fsx-2018-03-01/CreateDataRepositoryTask" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateDataRepositoryTaskRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Specifies the type of data repository task to create. * </p> */ private String type; /** * <p> * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is processed. The * default path is the file system root directory. The paths you provide need to be relative to the mount point of * the file system. If the mount point is <code>/mnt/fsx</code> and <code>/mnt/fsx/path1</code> is a directory or * file on the file system you want to export, then the path to provide is <code>path1</code>. If a path that you * provide isn't valid, the task fails. * </p> */ private java.util.List<String> paths; private String fileSystemId; /** * <p> * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A CompletionReport * provides a detailed report on the files that Amazon FSx processed that meet the criteria specified by the * <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. * </p> */ private CompletionReport report; private String clientRequestToken; private java.util.List<Tag> tags; /** * <p> * Specifies the type of data repository task to create. * </p> * * @param type * Specifies the type of data repository task to create. * @see DataRepositoryTaskType */ public void setType(String type) { this.type = type; } /** * <p> * Specifies the type of data repository task to create. * </p> * * @return Specifies the type of data repository task to create. * @see DataRepositoryTaskType */ public String getType() { return this.type; } /** * <p> * Specifies the type of data repository task to create. * </p> * * @param type * Specifies the type of data repository task to create. * @return Returns a reference to this object so that method calls can be chained together. * @see DataRepositoryTaskType */ public CreateDataRepositoryTaskRequest withType(String type) { setType(type); return this; } /** * <p> * Specifies the type of data repository task to create. * </p> * * @param type * Specifies the type of data repository task to create. * @return Returns a reference to this object so that method calls can be chained together. * @see DataRepositoryTaskType */ public CreateDataRepositoryTaskRequest withType(DataRepositoryTaskType type) { this.type = type.toString(); return this; } /** * <p> * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is processed. The * default path is the file system root directory. The paths you provide need to be relative to the mount point of * the file system. If the mount point is <code>/mnt/fsx</code> and <code>/mnt/fsx/path1</code> is a directory or * file on the file system you want to export, then the path to provide is <code>path1</code>. If a path that you * provide isn't valid, the task fails. * </p> * * @return (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is * processed. The default path is the file system root directory. The paths you provide need to be relative * to the mount point of the file system. If the mount point is <code>/mnt/fsx</code> and * <code>/mnt/fsx/path1</code> is a directory or file on the file system you want to export, then the path * to provide is <code>path1</code>. If a path that you provide isn't valid, the task fails. */ public java.util.List<String> getPaths() { return paths; } /** * <p> * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is processed. The * default path is the file system root directory. The paths you provide need to be relative to the mount point of * the file system. If the mount point is <code>/mnt/fsx</code> and <code>/mnt/fsx/path1</code> is a directory or * file on the file system you want to export, then the path to provide is <code>path1</code>. If a path that you * provide isn't valid, the task fails. * </p> * * @param paths * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is * processed. The default path is the file system root directory. The paths you provide need to be relative * to the mount point of the file system. If the mount point is <code>/mnt/fsx</code> and * <code>/mnt/fsx/path1</code> is a directory or file on the file system you want to export, then the path to * provide is <code>path1</code>. If a path that you provide isn't valid, the task fails. */ public void setPaths(java.util.Collection<String> paths) { if (paths == null) { this.paths = null; return; } this.paths = new java.util.ArrayList<String>(paths); } /** * <p> * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is processed. The * default path is the file system root directory. The paths you provide need to be relative to the mount point of * the file system. If the mount point is <code>/mnt/fsx</code> and <code>/mnt/fsx/path1</code> is a directory or * file on the file system you want to export, then the path to provide is <code>path1</code>. If a path that you * provide isn't valid, the task fails. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setPaths(java.util.Collection)} or {@link #withPaths(java.util.Collection)} if you want to override the * existing values. * </p> * * @param paths * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is * processed. The default path is the file system root directory. The paths you provide need to be relative * to the mount point of the file system. If the mount point is <code>/mnt/fsx</code> and * <code>/mnt/fsx/path1</code> is a directory or file on the file system you want to export, then the path to * provide is <code>path1</code>. If a path that you provide isn't valid, the task fails. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withPaths(String... paths) { if (this.paths == null) { setPaths(new java.util.ArrayList<String>(paths.length)); } for (String ele : paths) { this.paths.add(ele); } return this; } /** * <p> * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is processed. The * default path is the file system root directory. The paths you provide need to be relative to the mount point of * the file system. If the mount point is <code>/mnt/fsx</code> and <code>/mnt/fsx/path1</code> is a directory or * file on the file system you want to export, then the path to provide is <code>path1</code>. If a path that you * provide isn't valid, the task fails. * </p> * * @param paths * (Optional) The path or paths on the Amazon FSx file system to use when the data repository task is * processed. The default path is the file system root directory. The paths you provide need to be relative * to the mount point of the file system. If the mount point is <code>/mnt/fsx</code> and * <code>/mnt/fsx/path1</code> is a directory or file on the file system you want to export, then the path to * provide is <code>path1</code>. If a path that you provide isn't valid, the task fails. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withPaths(java.util.Collection<String> paths) { setPaths(paths); return this; } /** * @param fileSystemId */ public void setFileSystemId(String fileSystemId) { this.fileSystemId = fileSystemId; } /** * @return */ public String getFileSystemId() { return this.fileSystemId; } /** * @param fileSystemId * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withFileSystemId(String fileSystemId) { setFileSystemId(fileSystemId); return this; } /** * <p> * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A CompletionReport * provides a detailed report on the files that Amazon FSx processed that meet the criteria specified by the * <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. * </p> * * @param report * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A * CompletionReport provides a detailed report on the files that Amazon FSx processed that meet the criteria * specified by the <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. */ public void setReport(CompletionReport report) { this.report = report; } /** * <p> * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A CompletionReport * provides a detailed report on the files that Amazon FSx processed that meet the criteria specified by the * <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. * </p> * * @return Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A * CompletionReport provides a detailed report on the files that Amazon FSx processed that meet the criteria * specified by the <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. */ public CompletionReport getReport() { return this.report; } /** * <p> * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A CompletionReport * provides a detailed report on the files that Amazon FSx processed that meet the criteria specified by the * <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. * </p> * * @param report * Defines whether or not Amazon FSx provides a CompletionReport once the task has completed. A * CompletionReport provides a detailed report on the files that Amazon FSx processed that meet the criteria * specified by the <code>Scope</code> parameter. For more information, see <a * href="https://docs.aws.amazon.com/fsx/latest/LustreGuide/task-completion-report.html">Working with Task * Completion Reports</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withReport(CompletionReport report) { setReport(report); return this; } /** * @param clientRequestToken */ public void setClientRequestToken(String clientRequestToken) { this.clientRequestToken = clientRequestToken; } /** * @return */ public String getClientRequestToken() { return this.clientRequestToken; } /** * @param clientRequestToken * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withClientRequestToken(String clientRequestToken) { setClientRequestToken(clientRequestToken); return this; } /** * @return */ public java.util.List<Tag> getTags() { return tags; } /** * @param tags */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * @param tags * @return Returns a reference to this object so that method calls can be chained together. */ public CreateDataRepositoryTaskRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getPaths() != null) sb.append("Paths: ").append(getPaths()).append(","); if (getFileSystemId() != null) sb.append("FileSystemId: ").append(getFileSystemId()).append(","); if (getReport() != null) sb.append("Report: ").append(getReport()).append(","); if (getClientRequestToken() != null) sb.append("ClientRequestToken: ").append(getClientRequestToken()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateDataRepositoryTaskRequest == false) return false; CreateDataRepositoryTaskRequest other = (CreateDataRepositoryTaskRequest) obj; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getPaths() == null ^ this.getPaths() == null) return false; if (other.getPaths() != null && other.getPaths().equals(this.getPaths()) == false) return false; if (other.getFileSystemId() == null ^ this.getFileSystemId() == null) return false; if (other.getFileSystemId() != null && other.getFileSystemId().equals(this.getFileSystemId()) == false) return false; if (other.getReport() == null ^ this.getReport() == null) return false; if (other.getReport() != null && other.getReport().equals(this.getReport()) == false) return false; if (other.getClientRequestToken() == null ^ this.getClientRequestToken() == null) return false; if (other.getClientRequestToken() != null && other.getClientRequestToken().equals(this.getClientRequestToken()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getPaths() == null) ? 0 : getPaths().hashCode()); hashCode = prime * hashCode + ((getFileSystemId() == null) ? 0 : getFileSystemId().hashCode()); hashCode = prime * hashCode + ((getReport() == null) ? 0 : getReport().hashCode()); hashCode = prime * hashCode + ((getClientRequestToken() == null) ? 0 : getClientRequestToken().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateDataRepositoryTaskRequest clone() { return (CreateDataRepositoryTaskRequest) super.clone(); } }
7,075
372
package helpers; import java.io.File; import java.util.Random; import java.util.Arrays; import java.util.Calendar; import java.util.Date; public class Config { public static Random rand = new Random(); public static Random getRandom() { return rand; } private volatile static Config instance; public static Config get() { if (instance != null) { return instance; } synchronized (Config.class) { if (instance != null) { return instance; } instance = new Config(); return instance; } } public int framesRangeFrom = 0; public int framesRangeTo = 10; public File stickyPathDir; public File eventSpotDir; public ThreadLocal<Integer> entryPointIndex = new ThreadLocal<>(); public Config() { } public static void setFramesRangeFromCommandLine(int[] framesCountRange) { if ((framesCountRange == null) || (framesCountRange.length != 2)) { System.out.println("Invalid frames range " + Arrays.toString(framesCountRange)); return; } int from = framesCountRange[0]; int to = framesCountRange[1]; Config.get().setFramesRange(from, to); } public void setFramesRange(int from, int to) { if (from < 0) { from = 0; } if (to < from) { to = from; } this.framesRangeFrom = from; this.framesRangeTo = to; } public void setStickyPathsDir(String stickyPathDirPath) { if (stickyPathDirPath == null) { System.out.println("Invalid stickyPathDirPath, null"); return; } File stickyPathDir = new File(stickyPathDirPath); stickyPathDir.mkdirs(); if (!stickyPathDir.isDirectory()) { System.out.println("Provided sticky path dir is not directory: " + stickyPathDir); return; } this.stickyPathDir = stickyPathDir; } public File getStickyPathsDir() { return stickyPathDir; } public int getStickyPath(int classId, int methodId, int maxNumber) { if (stickyPathDir == null) { return rand.nextInt(maxNumber); } int result = StickyPathHelper.getMethodToCall(stickyPathDir, classId, methodId); if (result == -1) { int randomNumber = rand.nextInt(maxNumber); if (!StickyPathHelper.persistMethodToCall( stickyPathDir, classId, methodId, randomNumber)) { System.out.println("Error persisiting sticky path"); } result = randomNumber; } return result; } public void setEventSpotDir(String eventSpotDirPath) { if (eventSpotDirPath == null) { System.out.println("Invalid eventSpotDirPath, null"); return; } File eventSpotDir = new File(eventSpotDirPath); eventSpotDir.mkdirs(); if (!eventSpotDir.isDirectory()) { System.out.println("Provided event spot dir is not directory: " + eventSpotDir); return; } this.eventSpotDir = eventSpotDir; } public File getEventSpotDir() { return eventSpotDir; } public boolean shouldFireEvent(Context context) { if (eventSpotDir == null) { return context.framesDepth > framesRangeTo; } return EventsSpot.shouldFireEvent(eventSpotDir, context); } public boolean shouldRunAway(Context context) { return context.framesDepth > framesRangeTo; } public void updateContext(Context context, int entryPointId, int classId, int methodId) { context.framesDepth++; context.entryPointId = entryPointId; context.classId = classId; context.methodId = methodId; context.addPath(classId, methodId); Context.incInvCount(context); } public boolean shouldWriteLogInfo(Context context) { return true; } public boolean shouldWriteLogWarn(Context context) { return false; } public boolean shouldWriteLogError(Context context) { return true; } public boolean shouldSuicide() { return false; } public boolean shouldDoIoCpuIntensiveLogic(Context context) { return false; } }
1,363
563
/* * Copyright 2019 Oath Holdings Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.athenz.zts.cert.impl.crypki; import java.util.List; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.yahoo.athenz.common.server.cert.Priority; @JsonInclude(JsonInclude.Include.ALWAYS) public class X509CertificateSigningRequest { private KeyMeta keyMeta; private String csr; @JsonInclude(JsonInclude.Include.NON_EMPTY) public Integer validity; @JsonInclude(JsonInclude.Include.NON_EMPTY) public List<Integer> extKeyUsage; public X509CertificateSigningRequest() { } public String getCsr() { return csr; } public void setCsr(String csr) { this.csr = csr; } @JsonProperty("ext_key_usage") public List<Integer> getExtKeyUsage() { return extKeyUsage; } public void setExtKeyUsage(List<Integer> extKeyUsage) { this.extKeyUsage = extKeyUsage; } public void setValidity(Integer validity) { this.validity = validity; } public Integer getValidity() { return validity; } @JsonProperty("key_meta") public KeyMeta getKeyMeta() { return keyMeta; } public void setKeyMeta(KeyMeta keyMeta) { this.keyMeta = keyMeta; } @JsonInclude(JsonInclude.Include.NON_EMPTY) private Priority priority; public void setPriority(Priority priority) { this.priority = priority; } public Priority getPriority() { return priority; } }
794
1,006
/**************************************************************************** * arch/arm/src/stm32f0l0g0/stm32g0_pwr.c * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <stdint.h> #include <stdbool.h> #include <errno.h> #include <nuttx/arch.h> #include <nuttx/irq.h> #include "arm_arch.h" #include "stm32_pwr.h" #if defined(CONFIG_STM32F0L0G0_PWR) /**************************************************************************** * Private Functions ****************************************************************************/ static inline uint32_t stm32_pwr_getreg32(uint8_t offset) { return getreg32(STM32_PWR_BASE + (uint32_t)offset); } static inline void stm32_pwr_putreg32(uint8_t offset, uint32_t value) { putreg32(value, STM32_PWR_BASE + (uint32_t)offset); } static inline void stm32_pwr_modifyreg32(uint8_t offset, uint32_t clearbits, uint32_t setbits) { modifyreg32(STM32_PWR_BASE + (uint32_t)offset, clearbits, setbits); } /**************************************************************************** * Public Functions ****************************************************************************/ void stm32_pwr_setvos(uint16_t vos) { uint16_t regval; /* The following sequence is required to program the voltage regulator * ranges: * 1. Wait until VOSF flag is cleared in Power Status register 2 (PWR_SR2). * 2. Configure the voltage scaling range by setting the VOS bits in the * PWR_CR1 register. * 3. Wait until VOSF flag is cleared in Power Status register 2 (PWR_SR2). * * No checking is performed to ensure the VOS value to be set is within the * valid range. */ while ((stm32_pwr_getreg32(STM32_PWR_SR2_OFFSET) & PWR_SR2_VOSF) != 0) { } regval = stm32_pwr_getreg32(STM32_PWR_CR1_OFFSET); regval &= ~PWR_CR1_VOS_MASK; regval |= (vos & PWR_CR1_VOS_MASK); stm32_pwr_putreg32(STM32_PWR_CR1_OFFSET, regval); while ((stm32_pwr_getreg32(STM32_PWR_SR2_OFFSET) & PWR_SR2_VOSF) != 0) { } } /* TODO Other stm32_pwr_* functions need to be implemented */ #endif /* CONFIG_STM32F0L0G0_PWR */
1,028
854
<gh_stars>100-1000 @NonNullApi package dev.morphia.aggregation.expressions; import com.mongodb.lang.NonNullApi;
44
14,668
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chromecast/browser/webview/cast_window_embedder.h" namespace chromecast { CastWindowEmbedder::EmbedderWindowEvent::EmbedderWindowEvent() = default; CastWindowEmbedder::EmbedderWindowEvent::~EmbedderWindowEvent() = default; CastWindowEmbedder::CastWindowProperties::CastWindowProperties() = default; CastWindowEmbedder::CastWindowProperties::~CastWindowProperties() = default; CastWindowEmbedder::CastWindowProperties::CastWindowProperties( CastWindowProperties&& other) = default; } // namespace chromecast
197
380
# -*- coding: utf-8 -*- '''Chemical Engineering Design Library (ChEDL). Utilities for process modeling. Copyright (C) 2020 <NAME> <<EMAIL>> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.''' import numpy as np import pytest from thermo import * from fluids.constants import R from math import log, exp, sqrt, log10 from fluids.numerics import linspace, derivative, logspace, assert_close, assert_close1d, assert_close2d, assert_close3d # TODO: add unit tests for solid phase identification and sorting def test_vapor_score_Tpc(): score = vapor_score_Tpc(T=300.0, Tcs=[304.2, 507.6], zs=[0.21834418746784942, 0.7816558125321506]) assert_close(score, -163.18879226903942) def test_vapor_score_Vpc(): score = vapor_score_Vpc(V=0.00011316308855449715, Vcs=[9.4e-05, 0.000368], zs=[0.21834418746784942, 0.7816558125321506]) assert_close(score, -0.000195010604079) def test_vapor_score_Tpc_weighted(): score = vapor_score_Tpc_weighted(T=300.0, Tcs=[304.2, 507.6], Vcs=[9.4e-05, 0.000368], zs=[0.9752234962374878, 0.024776503762512052]) assert_close(score, -22.60037521107) def test_vapor_score_Tpc_Vpc(): score = vapor_score_Tpc_Vpc(T=300.0, V=0.00011316308855449715, Tcs=[304.2, 507.6], Vcs=[9.4e-05, 0.000368], zs=[0.21834418746784942, 0.7816558125321506]) assert_close(score, -55.932094761) def test_vapor_score_Wilson(): # 1 component score = vapor_score_Wilson(T=300.0, P=1e6, zs=[1], Tcs=[304.2], Pcs=[7376460.0], omegas=[0.2252]) assert_close(score, -5727363.494462478) score = vapor_score_Wilson(T=206.40935716944634, P=100.0, zs=[0.5, 0.5], Tcs=[304.2, 507.6], Pcs=[7376460.0, 3025000.0], omegas=[0.2252, 0.2975]) assert_close(score, 1.074361930956633) def test_vapor_score_Poling(): assert_close(vapor_score_Poling(1.0054239121594122e-05), 1.0137457789955244) def test_vapor_score_PIP(): score = vapor_score_PIP(0.024809176851423774, 337.0119286073647, -4009021.959558917, 321440573.3615088, -13659.63987996052) assert_close(score, 0.016373735005) def test_vapor_score_Bennett_Schmidt(): assert_close(vapor_score_Bennett_Schmidt(7.558572848883679e-06), -7.558572848883679e-06) def test_vapor_score_traces(): score = vapor_score_traces(zs=[.218, .782], Tcs=[304.2, 507.6], CASs=['124-38-9', '110-54-3']) assert_close(score, 0.218) score = vapor_score_traces(zs=[.975, .025], Tcs=[304.2, 507.6], CASs=['124-38-9', '110-54-3']) assert_close(score, 0.975) def test_identity_phase_states_basic(): constants = ChemicalConstantsPackage(Tcs=[563.0, 647.14, 514.0], Vcs=[0.000274, 5.6e-05, 0.000168], Pcs=[4414000.0, 22048320.0, 6137000.0], omegas=[0.59, 0.344, 0.635], MWs=[74.1216, 18.01528, 46.06844], CASs=['71-36-3', '7732-18-5', '64-17-5']) properties = PropertyCorrelationsPackage(constants=constants, skip_missing=True, HeatCapacityGases=[HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [-3.787200194613107e-20, 1.7692887427654656e-16, -3.445247207129205e-13, 3.612771874320634e-10, -2.1953250181084466e-07, 7.707135849197655e-05, -0.014658388538054169, 1.5642629364740657, -7.614560475001724])), HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])), HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965])),], ) eos_kwargs = dict(Tcs=constants.Tcs, Pcs=constants.Pcs, omegas=constants.omegas) gas = CEOSGas(SRKMIX, eos_kwargs, HeatCapacityGases=properties.HeatCapacityGases) liq = CEOSLiquid(SRKMIX, eos_kwargs, HeatCapacityGases=properties.HeatCapacityGases) # flashN = FlashVLN(constants, properties, liquids=[liq, liq], gas=gas) T, P = 361, 1e5 # flashN.flash(T=361, P=1e5, zs=[.25, 0.7, .05]).phase_count gas = gas.to(T=T, P=P, zs=[0.2384009970908655, 0.5786839935180925, 0.1829150093910419]) liq0 = liq.to(T=T, P=P, zs=[7.619975052238032e-05, 0.9989622883894993, 0.0009615118599781474]) liq1 = liq.to(T=T, P=P, zs=[0.6793120076703771, 0.19699746328631124, 0.12369052904331178]) VLL_methods_here = [VL_ID_VPC, VL_ID_TPC_VPC, VL_ID_POLING, VL_ID_PIP, VL_ID_BS, VL_ID_TRACES] LLL_methods_here = [VL_ID_TPC, VL_ID_TPC_VC_WEIGHTED, VL_ID_WILSON] for skip_solids in (True, False): for m in VLL_methods_here: sln = identity_phase_states(phases=[gas, liq0, liq1], constants=constants, correlations=properties,VL_method=m, skip_solids=skip_solids) assert sln[0] is gas assert sln[1][0] is liq0 assert sln[1][1] is liq1 assert not sln[2] for m in LLL_methods_here: sln = identity_phase_states(phases=[gas, liq0, liq1], constants=constants, correlations=properties,VL_method=m, skip_solids=skip_solids) assert sln[0] is None assert sln[1][0] is gas assert sln[1][1] is liq0 assert sln[1][2] is liq1 assert not sln[2] betas = [0.027939322463018015, 0.6139152961492583, 0.35814538138772367] phases=[gas, liq0, liq1] for skip_solids in (True, False): for m in VLL_methods_here: settings = BulkSettings(VL_ID=m) sln = identify_sort_phases(phases=phases, betas=betas, constants=constants, correlations=properties, settings=settings, skip_solids=skip_solids) assert sln[0] is gas assert sln[1][0] is liq0 assert sln[1][1] is liq1 assert not sln[2] assert_close1d(sln[3], betas) for m in LLL_methods_here: settings = BulkSettings(VL_ID=m) sln = identify_sort_phases(phases=phases, betas=betas, constants=constants, correlations=properties, settings=settings, skip_solids=skip_solids) assert sln[0] is None assert sln[1][2] is gas assert sln[1][0] is liq0 assert sln[1][1] is liq1 assert not sln[2] assert_close1d(sln[3], [0.6139152961492583, 0.35814538138772367, 0.027939322463018015]) def test_sort_phases_liquids(): from thermo.phase_identification import VL_ID_METHODS, PROP_SORT, DENSITY_MASS, DENSITY, ISOTHERMAL_COMPRESSIBILITY, HEAT_CAPACITY constants = ChemicalConstantsPackage(Tcs=[563.0, 647.14, 514.0], Vcs=[0.000274, 5.6e-05, 0.000168], Pcs=[4414000.0, 22048320.0, 6137000.0], omegas=[0.59, 0.344, 0.635], MWs=[74.1216, 18.01528, 46.06844], CASs=['71-36-3', '7732-18-5', '64-17-5']) properties = PropertyCorrelationsPackage(constants=constants, skip_missing=True, HeatCapacityGases=[HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [-3.787200194613107e-20, 1.7692887427654656e-16, -3.445247207129205e-13, 3.612771874320634e-10, -2.1953250181084466e-07, 7.707135849197655e-05, -0.014658388538054169, 1.5642629364740657, -7.614560475001724])), HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])), HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965])),], ) eos_kwargs = dict(Tcs=constants.Tcs, Pcs=constants.Pcs, omegas=constants.omegas) gas = CEOSGas(SRKMIX, eos_kwargs, HeatCapacityGases=properties.HeatCapacityGases) liq = CEOSLiquid(SRKMIX, eos_kwargs, HeatCapacityGases=properties.HeatCapacityGases) # flashN = FlashVLN(constants, properties, liquids=[liq, liq], gas=gas) T, P = 361, 1e5 # flashN.flash(T=361, P=1e5, zs=[.25, 0.7, .05]).phase_count gas = gas.to(T=T, P=P, zs=[0.2384009970908655, 0.5786839935180925, 0.1829150093910419]) liq0 = liq.to(T=T, P=P, zs=[7.619975052238032e-05, 0.9989622883894993, 0.0009615118599781474]) liq1 = liq.to(T=T, P=P, zs=[0.6793120076703771, 0.19699746328631124, 0.12369052904331178]) liq0.constants = liq1.constants = constants settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=False) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [650.9479399573546, 717.583719673216]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=True) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [717.583719673216, 650.9479399573546]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY, phase_sort_higher_first=True) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [717.583719673216, 650.9479399573546]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY, phase_sort_higher_first=False) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [650.9479399573546, 717.583719673216]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=ISOTHERMAL_COMPRESSIBILITY, phase_sort_higher_first=False) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [717.583719673216, 650.9479399573546]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=ISOTHERMAL_COMPRESSIBILITY, phase_sort_higher_first=True) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [650.9479399573546, 717.583719673216]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=HEAT_CAPACITY, phase_sort_higher_first=False) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [717.583719673216, 650.9479399573546]) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=HEAT_CAPACITY, phase_sort_higher_first=True) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close1d([i.rho_mass() for i in liquids], [650.9479399573546, 717.583719673216]) # Water settings settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=False, water_sort=WATER_FIRST) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close(liquids[0].zs[1], 0.9989622883894993) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=True, water_sort=WATER_LAST) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close(liquids[1].zs[1], 0.9989622883894993) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=True, water_sort=WATER_NOT_SPECIAL) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close(liquids[0].zs[1], 0.9989622883894993) settings = BulkSettings(liquid_sort_method=PROP_SORT, liquid_sort_prop=DENSITY_MASS, phase_sort_higher_first=False, water_sort=WATER_NOT_SPECIAL) liquids, _ = sort_phases(liquids=[liq0, liq1], solids=[], constants=constants, settings=settings) assert_close(liquids[1].zs[1], 0.9989622883894993)
6,282
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #include "precompiled_svtools.hxx" #include "dummypanel.hxx" #include "toolpanelcollection.hxx" #include "paneldecklisteners.hxx" #include "toolpaneldeckpeer.hxx" #include "svtools/toolpanel/toolpaneldeck.hxx" #include "svtools/toolpanel/tablayouter.hxx" #include "svtools/toolpanel/drawerlayouter.hxx" /** === begin UNO includes === **/ #include <com/sun/star/accessibility/XAccessible.hpp> #include <com/sun/star/accessibility/AccessibleRole.hpp> /** === end UNO includes === **/ #include <tools/diagnose_ex.h> #include <boost/optional.hpp> //........................................................................ namespace svt { //........................................................................ /** === begin UNO using === **/ using ::com::sun::star::uno::Reference; using ::com::sun::star::accessibility::XAccessible; using ::com::sun::star::awt::XWindowPeer; using ::com::sun::star::uno::UNO_SET_THROW; /** === end UNO using === **/ namespace AccessibleRole = ::com::sun::star::accessibility::AccessibleRole; enum DeckAction { /// activates the first panel ACTION_ACTIVATE_FIRST, // activates the panel after the currently active panel ACTION_ACTIVATE_NEXT, // activates the panel before the currently active panel ACTION_ACTIVATE_PREV, // activates the last panel ACTION_ACTIVATE_LAST, // toggles the focus between the active panel and the panel selector ACTION_TOGGLE_FOCUS, }; //==================================================================== //= ToolPanelDeck_Impl //==================================================================== class ToolPanelDeck_Impl : public IToolPanelDeckListener { public: ToolPanelDeck_Impl( ToolPanelDeck& i_rDeck ) :m_rDeck( i_rDeck ) ,m_aPanelAnchor( &i_rDeck, WB_DIALOGCONTROL | WB_CHILDDLGCTRL ) ,m_aPanels() ,m_pDummyPanel( new DummyPanel ) ,m_pLayouter() ,m_bInDtor( false ) ,m_pAccessibleParent( NULL ) { m_aPanels.AddListener( *this ); m_aPanelAnchor.Show(); m_aPanelAnchor.SetAccessibleRole( AccessibleRole::PANEL ); } ~ToolPanelDeck_Impl() { m_bInDtor = true; } PDeckLayouter GetLayouter() const { return m_pLayouter; } void SetLayouter( const PDeckLayouter& i_pNewLayouter ); Window& GetPanelWindowAnchor() { return m_aPanelAnchor; } const Window& GetPanelWindowAnchor() const { return m_aPanelAnchor; } bool IsDead() const { return m_bInDtor; } /// notifies our listeners that we're going to die. Only to be called from with our anti-impl's destructor void NotifyDying() { m_aPanels.RemoveListener( *this ); m_aListeners.Dying(); } // IToolPanelDeck equivalents size_t GetPanelCount() const; PToolPanel GetPanel( const size_t i_nPos ) const; ::boost::optional< size_t > GetActivePanel() const; void ActivatePanel( const ::boost::optional< size_t >& i_rPanel ); size_t InsertPanel( const PToolPanel& i_pPanel, const size_t i_nPosition ); PToolPanel RemovePanel( const size_t i_nPosition ); void AddListener( IToolPanelDeckListener& i_rListener ); void RemoveListener( IToolPanelDeckListener& i_rListener ); /// re-layouts everything void LayoutAll() { ImplDoLayout(); } void DoAction( const DeckAction i_eAction ); bool FocusActivePanel(); void SetAccessibleParentWindow( Window* i_pAccessibleParent ); Window* GetAccessibleParentWindow() const { return m_pAccessibleParent; } protected: // IToolPanelDeckListener virtual void PanelInserted( const PToolPanel& i_pPanel, const size_t i_nPosition ); virtual void PanelRemoved( const size_t i_nPosition ); virtual void ActivePanelChanged( const ::boost::optional< size_t >& i_rOldActive, const ::boost::optional< size_t >& i_rNewActive ); virtual void LayouterChanged( const PDeckLayouter& i_rNewLayouter ); virtual void Dying(); private: void ImplDoLayout(); PToolPanel GetActiveOrDummyPanel_Impl(); private: ToolPanelDeck& m_rDeck; Window m_aPanelAnchor; ToolPanelCollection m_aPanels; PToolPanel m_pDummyPanel; PanelDeckListeners m_aListeners; PDeckLayouter m_pLayouter; bool m_bInDtor; Window* m_pAccessibleParent; }; //-------------------------------------------------------------------- PToolPanel ToolPanelDeck_Impl::GetActiveOrDummyPanel_Impl() { ::boost::optional< size_t > aActivePanel( m_aPanels.GetActivePanel() ); if ( !aActivePanel ) return m_pDummyPanel; return m_aPanels.GetPanel( *aActivePanel ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::SetLayouter( const PDeckLayouter& i_pNewLayouter ) { ENSURE_OR_RETURN_VOID( i_pNewLayouter.get(), "invalid layouter" ); if ( m_pLayouter.get() ) m_pLayouter->Destroy(); m_pLayouter = i_pNewLayouter; ImplDoLayout(); m_aListeners.LayouterChanged( m_pLayouter ); } //-------------------------------------------------------------------- size_t ToolPanelDeck_Impl::GetPanelCount() const { return m_aPanels.GetPanelCount(); } //-------------------------------------------------------------------- PToolPanel ToolPanelDeck_Impl::GetPanel( const size_t i_nPos ) const { return m_aPanels.GetPanel( i_nPos ); } //-------------------------------------------------------------------- ::boost::optional< size_t > ToolPanelDeck_Impl::GetActivePanel() const { return m_aPanels.GetActivePanel(); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::ActivatePanel( const ::boost::optional< size_t >& i_rPanel ) { m_aPanels.ActivatePanel( i_rPanel ); } //-------------------------------------------------------------------- size_t ToolPanelDeck_Impl::InsertPanel( const PToolPanel& i_pPanel, const size_t i_nPosition ) { return m_aPanels.InsertPanel( i_pPanel, i_nPosition ); } //-------------------------------------------------------------------- PToolPanel ToolPanelDeck_Impl::RemovePanel( const size_t i_nPosition ) { return m_aPanels.RemovePanel( i_nPosition ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::ImplDoLayout() { const Rectangle aDeckPlayground( Point(), m_rDeck.GetOutputSizePixel() ); // ask the layouter what is left for our panel, and position the panel container window appropriately Rectangle aPlaygroundArea( aDeckPlayground ); OSL_ENSURE( m_pLayouter.get(), "ToolPanelDeck_Impl::ImplDoLayout: no layouter!" ); if ( m_pLayouter.get() ) { aPlaygroundArea = m_pLayouter->Layout( aDeckPlayground ); } m_aPanelAnchor.SetPosSizePixel( aPlaygroundArea.TopLeft(), aPlaygroundArea.GetSize() ); // position the active panel const PToolPanel pActive( GetActiveOrDummyPanel_Impl() ); pActive->SetSizePixel( m_aPanelAnchor.GetOutputSizePixel() ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::AddListener( IToolPanelDeckListener& i_rListener ) { m_aListeners.AddListener( i_rListener ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::RemoveListener( IToolPanelDeckListener& i_rListener ) { m_aListeners.RemoveListener( i_rListener ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::DoAction( const DeckAction i_eAction ) { const size_t nPanelCount( m_aPanels.GetPanelCount() ); ::boost::optional< size_t > aActivatePanel; ::boost::optional< size_t > aCurrentPanel( GetActivePanel() ); switch ( i_eAction ) { case ACTION_ACTIVATE_FIRST: if ( nPanelCount > 0 ) aActivatePanel = 0; break; case ACTION_ACTIVATE_PREV: if ( !aCurrentPanel && ( nPanelCount > 0 ) ) aActivatePanel = nPanelCount - 1; else if ( !!aCurrentPanel && ( *aCurrentPanel > 0 ) ) aActivatePanel = *aCurrentPanel - 1; break; case ACTION_ACTIVATE_NEXT: if ( !aCurrentPanel && ( nPanelCount > 0 ) ) aActivatePanel = 0; else if ( !!aCurrentPanel && ( *aCurrentPanel < nPanelCount - 1 ) ) aActivatePanel = *aCurrentPanel + 1; break; case ACTION_ACTIVATE_LAST: if ( nPanelCount > 0 ) aActivatePanel = nPanelCount - 1; break; case ACTION_TOGGLE_FOCUS: { PToolPanel pActivePanel( GetActiveOrDummyPanel_Impl() ); if ( !m_aPanelAnchor.HasChildPathFocus() ) pActivePanel->GrabFocus(); else GetLayouter()->SetFocusToPanelSelector(); } break; } if ( !!aActivatePanel ) { ActivatePanel( aActivatePanel ); } } //-------------------------------------------------------------------- bool ToolPanelDeck_Impl::FocusActivePanel() { ::boost::optional< size_t > aActivePanel( m_aPanels.GetActivePanel() ); if ( !aActivePanel ) return false; PToolPanel pActivePanel( m_aPanels.GetPanel( *aActivePanel ) ); pActivePanel->GrabFocus(); return true; } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::PanelInserted( const PToolPanel& i_pPanel, const size_t i_nPosition ) { // multiplex to our own listeners m_aListeners.PanelInserted( i_pPanel, i_nPosition ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::PanelRemoved( const size_t i_nPosition ) { // multiplex to our own listeners m_aListeners.PanelRemoved( i_nPosition ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::ActivePanelChanged( const ::boost::optional< size_t >& i_rOldActive, const ::boost::optional< size_t >& i_rNewActive ) { // hide the old panel if ( !!i_rOldActive ) { const PToolPanel pOldActive( m_aPanels.GetPanel( *i_rOldActive ) ); pOldActive->Deactivate(); } // position and show the new panel const PToolPanel pNewActive( !i_rNewActive ? m_pDummyPanel : m_aPanels.GetPanel( *i_rNewActive ) ); pNewActive->Activate( m_aPanelAnchor ); pNewActive->GrabFocus(); // resize the panel (cannot guarantee it has ever been resized before pNewActive->SetSizePixel( m_aPanelAnchor.GetOutputSizePixel() ); // multiplex to our own listeners m_aListeners.ActivePanelChanged( i_rOldActive, i_rNewActive ); } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::LayouterChanged( const PDeckLayouter& i_rNewLayouter ) { // not interested in (void)i_rNewLayouter; } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::Dying() { // not interested in. Since the ToolPanelCollection is our member, this just means we ourself // are dying, and we already sent this notification in our dtor. } //-------------------------------------------------------------------- void ToolPanelDeck_Impl::SetAccessibleParentWindow( Window* i_pAccessibleParent ) { m_pAccessibleParent = i_pAccessibleParent; } //==================================================================== //= ToolPanelDeck //==================================================================== //-------------------------------------------------------------------- ToolPanelDeck::ToolPanelDeck( Window& i_rParent, const WinBits i_nStyle ) :Control( &i_rParent, i_nStyle ) ,m_pImpl( new ToolPanelDeck_Impl( *this ) ) { // use a default layouter // SetLayouter( PDeckLayouter( new TabDeckLayouter( *this, *this, TABS_RIGHT, TABITEM_IMAGE_AND_TEXT ) ) ); SetLayouter( PDeckLayouter( new DrawerDeckLayouter( *this, *this ) ) ); } //-------------------------------------------------------------------- ToolPanelDeck::~ToolPanelDeck() { m_pImpl->NotifyDying(); GetLayouter()->Destroy(); Hide(); for ( size_t i=0; i<GetPanelCount(); ++i ) { PToolPanel pPanel( GetPanel( i ) ); pPanel->Dispose(); } } //-------------------------------------------------------------------- size_t ToolPanelDeck::GetPanelCount() const { return m_pImpl->GetPanelCount(); } //-------------------------------------------------------------------- PToolPanel ToolPanelDeck::GetPanel( const size_t i_nPos ) const { return m_pImpl->GetPanel( i_nPos ); } //-------------------------------------------------------------------- ::boost::optional< size_t > ToolPanelDeck::GetActivePanel() const { return m_pImpl->GetActivePanel(); } //-------------------------------------------------------------------- void ToolPanelDeck::ActivatePanel( const ::boost::optional< size_t >& i_rPanel ) { m_pImpl->ActivatePanel( i_rPanel ); } //-------------------------------------------------------------------- size_t ToolPanelDeck::InsertPanel( const PToolPanel& i_pPanel, const size_t i_nPosition ) { return m_pImpl->InsertPanel( i_pPanel, i_nPosition ); } //-------------------------------------------------------------------- PToolPanel ToolPanelDeck::RemovePanel( const size_t i_nPosition ) { return m_pImpl->RemovePanel( i_nPosition ); } //-------------------------------------------------------------------- PDeckLayouter ToolPanelDeck::GetLayouter() const { return m_pImpl->GetLayouter(); } //-------------------------------------------------------------------- void ToolPanelDeck::SetLayouter( const PDeckLayouter& i_pNewLayouter ) { return m_pImpl->SetLayouter( i_pNewLayouter ); } //-------------------------------------------------------------------- void ToolPanelDeck::AddListener( IToolPanelDeckListener& i_rListener ) { m_pImpl->AddListener( i_rListener ); } //-------------------------------------------------------------------- void ToolPanelDeck::RemoveListener( IToolPanelDeckListener& i_rListener ) { m_pImpl->RemoveListener( i_rListener ); } //-------------------------------------------------------------------- Window& ToolPanelDeck::GetPanelWindowAnchor() { return m_pImpl->GetPanelWindowAnchor(); } //-------------------------------------------------------------------- const Window& ToolPanelDeck::GetPanelWindowAnchor() const { return m_pImpl->GetPanelWindowAnchor(); } //-------------------------------------------------------------------- void ToolPanelDeck::Resize() { Control::Resize(); m_pImpl->LayoutAll(); } //-------------------------------------------------------------------- long ToolPanelDeck::Notify( NotifyEvent& i_rNotifyEvent ) { bool bHandled = false; if ( i_rNotifyEvent.GetType() == EVENT_KEYINPUT ) { const KeyEvent* pEvent = i_rNotifyEvent.GetKeyEvent(); const KeyCode& rKeyCode = pEvent->GetKeyCode(); if ( rKeyCode.GetModifier() == KEY_MOD1 ) { bHandled = true; switch ( rKeyCode.GetCode() ) { case KEY_HOME: m_pImpl->DoAction( ACTION_ACTIVATE_FIRST ); break; case KEY_PAGEUP: m_pImpl->DoAction( ACTION_ACTIVATE_PREV ); break; case KEY_PAGEDOWN: m_pImpl->DoAction( ACTION_ACTIVATE_NEXT ); break; case KEY_END: m_pImpl->DoAction( ACTION_ACTIVATE_LAST ); break; default: bHandled = false; break; } } else if ( rKeyCode.GetModifier() == ( KEY_MOD1 | KEY_SHIFT ) ) { if ( rKeyCode.GetCode() == KEY_E ) { m_pImpl->DoAction( ACTION_TOGGLE_FOCUS ); bHandled = true; } } } if ( bHandled ) return 1; return Control::Notify( i_rNotifyEvent ); } //-------------------------------------------------------------------- void ToolPanelDeck::GetFocus() { Control::GetFocus(); if ( m_pImpl->IsDead() ) return; if ( !m_pImpl->FocusActivePanel() ) { PDeckLayouter pLayouter( GetLayouter() ); ENSURE_OR_RETURN_VOID( pLayouter.get(), "ToolPanelDeck::GetFocus: no layouter?!" ); pLayouter->SetFocusToPanelSelector(); } } //-------------------------------------------------------------------- void ToolPanelDeck::SetAccessibleParentWindow( Window* i_pAccessibleParent ) { m_pImpl->SetAccessibleParentWindow( i_pAccessibleParent ); } //-------------------------------------------------------------------- Window* ToolPanelDeck::GetAccessibleParentWindow() const { Window* pAccessibleParent( m_pImpl->GetAccessibleParentWindow() ); if ( !pAccessibleParent ) pAccessibleParent = Window::GetAccessibleParentWindow(); return pAccessibleParent; } //-------------------------------------------------------------------- Reference< XWindowPeer > ToolPanelDeck::GetComponentInterface( sal_Bool i_bCreate ) { Reference< XWindowPeer > xWindowPeer( Control::GetComponentInterface( sal_False ) ); if ( !xWindowPeer.is() && i_bCreate ) { xWindowPeer.set( new ToolPanelDeckPeer( *this ) ); SetComponentInterface( xWindowPeer ); } return xWindowPeer; } //........................................................................ } // namespace svt //........................................................................
8,053
1,338
<reponame>Kirishikesan/haiku<filename>src/apps/debugger/user_interface/cli/commands/CliVariablesCommand.cpp /* * Copyright 2012, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #include "CliVariablesCommand.h" #include <stdio.h> #include <AutoLocker.h> #include "CliContext.h" #include "Team.h" #include "ValueNode.h" #include "ValueNodeContainer.h" #include "ValueNodeManager.h" CliVariablesCommand::CliVariablesCommand() : CliCommand("show current frame variables", "%s\n" "Prints the parameters and variables of the current frame, if " " available.") { } void CliVariablesCommand::Execute(int argc, const char* const* argv, CliContext& context) { if (argc > 1) { PrintUsage(argv[0]); return; } ValueNodeManager* manager = context.GetValueNodeManager(); ValueNodeContainer* container = manager->GetContainer(); AutoLocker<ValueNodeContainer> containerLocker(container); if (container == NULL || container->CountChildren() == 0) { printf("No variables available.\n"); return; } printf("Variables:\n"); for (int32 i = 0; ValueNodeChild* child = container->ChildAt(i); i++) { printf(" %s\n", child->Name().String()); } }
417
495
<filename>include/tao/json/events/prefer_unsigned.hpp // Copyright (c) 2017-2021 Dr. <NAME> and <NAME> // Please see LICENSE for license or visit https://github.com/taocpp/json/ #ifndef TAO_JSON_EVENTS_PREFER_UNSIGNED_HPP #define TAO_JSON_EVENTS_PREFER_UNSIGNED_HPP #include <cstdint> namespace tao::json::events { template< typename Consumer > struct prefer_unsigned : Consumer { using Consumer::Consumer; using Consumer::number; void number( const std::int64_t v ) { if( v >= 0 ) { Consumer::number( std::uint64_t( v ) ); } else { Consumer::number( v ); } } }; } // namespace tao::json::events #endif
318
1,073
<reponame>oubotong/Armariris //===--- AttributeList.cpp --------------------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file defines the AttributeList class implementation // //===----------------------------------------------------------------------===// #include "clang/Sema/AttributeList.h" #include "clang/AST/ASTContext.h" #include "clang/AST/DeclCXX.h" #include "clang/AST/DeclTemplate.h" #include "clang/AST/Expr.h" #include "clang/Basic/IdentifierTable.h" #include "clang/Basic/TargetInfo.h" #include "clang/Sema/SemaInternal.h" #include "llvm/ADT/SmallString.h" #include "llvm/ADT/StringSwitch.h" using namespace clang; IdentifierLoc *IdentifierLoc::create(ASTContext &Ctx, SourceLocation Loc, IdentifierInfo *Ident) { IdentifierLoc *Result = new (Ctx) IdentifierLoc; Result->Loc = Loc; Result->Ident = Ident; return Result; } size_t AttributeList::allocated_size() const { if (IsAvailability) return AttributeFactory::AvailabilityAllocSize; else if (IsTypeTagForDatatype) return AttributeFactory::TypeTagForDatatypeAllocSize; else if (IsProperty) return AttributeFactory::PropertyAllocSize; return (sizeof(AttributeList) + NumArgs * sizeof(ArgsUnion)); } AttributeFactory::AttributeFactory() { // Go ahead and configure all the inline capacity. This is just a memset. FreeLists.resize(InlineFreeListsCapacity); } AttributeFactory::~AttributeFactory() {} static size_t getFreeListIndexForSize(size_t size) { assert(size >= sizeof(AttributeList)); assert((size % sizeof(void*)) == 0); return ((size - sizeof(AttributeList)) / sizeof(void*)); } void *AttributeFactory::allocate(size_t size) { // Check for a previously reclaimed attribute. size_t index = getFreeListIndexForSize(size); if (index < FreeLists.size()) { if (AttributeList *attr = FreeLists[index]) { FreeLists[index] = attr->NextInPool; return attr; } } // Otherwise, allocate something new. return Alloc.Allocate(size, llvm::AlignOf<AttributeFactory>::Alignment); } void AttributeFactory::reclaimPool(AttributeList *cur) { assert(cur && "reclaiming empty pool!"); do { // Read this here, because we're going to overwrite NextInPool // when we toss 'cur' into the appropriate queue. AttributeList *next = cur->NextInPool; size_t size = cur->allocated_size(); size_t freeListIndex = getFreeListIndexForSize(size); // Expand FreeLists to the appropriate size, if required. if (freeListIndex >= FreeLists.size()) FreeLists.resize(freeListIndex+1); // Add 'cur' to the appropriate free-list. cur->NextInPool = FreeLists[freeListIndex]; FreeLists[freeListIndex] = cur; cur = next; } while (cur); } void AttributePool::takePool(AttributeList *pool) { assert(pool); // Fast path: this pool is empty. if (!Head) { Head = pool; return; } // Reverse the pool onto the current head. This optimizes for the // pattern of pulling a lot of pools into a single pool. do { AttributeList *next = pool->NextInPool; pool->NextInPool = Head; Head = pool; pool = next; } while (pool); } #include "clang/Sema/AttrParsedAttrKinds.inc" static StringRef normalizeAttrName(StringRef AttrName, StringRef ScopeName, AttributeList::Syntax SyntaxUsed) { // Normalize the attribute name, __foo__ becomes foo. This is only allowable // for GNU attributes. bool IsGNU = SyntaxUsed == AttributeList::AS_GNU || (SyntaxUsed == AttributeList::AS_CXX11 && ScopeName == "gnu"); if (IsGNU && AttrName.size() >= 4 && AttrName.startswith("__") && AttrName.endswith("__")) AttrName = AttrName.slice(2, AttrName.size() - 2); return AttrName; } AttributeList::Kind AttributeList::getKind(const IdentifierInfo *Name, const IdentifierInfo *ScopeName, Syntax SyntaxUsed) { StringRef AttrName = Name->getName(); SmallString<64> FullName; if (ScopeName) FullName += ScopeName->getName(); AttrName = normalizeAttrName(AttrName, FullName, SyntaxUsed); // Ensure that in the case of C++11 attributes, we look for '::foo' if it is // unscoped. if (ScopeName || SyntaxUsed == AS_CXX11) FullName += "::"; FullName += AttrName; return ::getAttrKind(FullName, SyntaxUsed); } unsigned AttributeList::getAttributeSpellingListIndex() const { // Both variables will be used in tablegen generated // attribute spell list index matching code. StringRef Scope = ScopeName ? ScopeName->getName() : ""; StringRef Name = normalizeAttrName(AttrName->getName(), Scope, (AttributeList::Syntax)SyntaxUsed); #include "clang/Sema/AttrSpellingListIndex.inc" } struct ParsedAttrInfo { unsigned NumArgs : 4; unsigned OptArgs : 4; unsigned HasCustomParsing : 1; unsigned IsTargetSpecific : 1; unsigned IsType : 1; unsigned IsStmt : 1; unsigned IsKnownToGCC : 1; bool (*DiagAppertainsToDecl)(Sema &S, const AttributeList &Attr, const Decl *); bool (*DiagLangOpts)(Sema &S, const AttributeList &Attr); bool (*ExistsInTarget)(const TargetInfo &Target); unsigned (*SpellingIndexToSemanticSpelling)(const AttributeList &Attr); }; namespace { #include "clang/Sema/AttrParsedAttrImpl.inc" } static const ParsedAttrInfo &getInfo(const AttributeList &A) { return AttrInfoMap[A.getKind()]; } unsigned AttributeList::getMinArgs() const { return getInfo(*this).NumArgs; } unsigned AttributeList::getMaxArgs() const { return getMinArgs() + getInfo(*this).OptArgs; } bool AttributeList::hasCustomParsing() const { return getInfo(*this).HasCustomParsing; } bool AttributeList::diagnoseAppertainsTo(Sema &S, const Decl *D) const { return getInfo(*this).DiagAppertainsToDecl(S, *this, D); } bool AttributeList::diagnoseLangOpts(Sema &S) const { return getInfo(*this).DiagLangOpts(S, *this); } bool AttributeList::isTargetSpecificAttr() const { return getInfo(*this).IsTargetSpecific; } bool AttributeList::isTypeAttr() const { return getInfo(*this).IsType; } bool AttributeList::isStmtAttr() const { return getInfo(*this).IsStmt; } bool AttributeList::existsInTarget(const TargetInfo &Target) const { return getInfo(*this).ExistsInTarget(Target); } bool AttributeList::isKnownToGCC() const { return getInfo(*this).IsKnownToGCC; } unsigned AttributeList::getSemanticSpelling() const { return getInfo(*this).SpellingIndexToSemanticSpelling(*this); } bool AttributeList::hasVariadicArg() const { // If the attribute has the maximum number of optional arguments, we will // claim that as being variadic. If we someday get an attribute that // legitimately bumps up against that maximum, we can use another bit to track // whether it's truly variadic or not. return getInfo(*this).OptArgs == 15; }
2,541
1,169
<filename>appinventor/appengine/src/com/google/appinventor/server/OdeServlet.java // -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.server; import com.google.appinventor.shared.rpc.user.UserInfoProvider; import javax.servlet.http.HttpServlet; /** * Class supporting ODE servlets, which ODE servlets * should subclass instead of {@link javax.servlet.http.HttpServlet}. * TODO(user): This class used to handle logging. For now it is left here * as a placeholder in case we need to do something beyond what * {@link javax.servlet.http.HttpServlet} would do. * * * <p>The class {@link OdeRemoteServiceServlet} serves an analogous purpose for * GWT RPCs. * */ public abstract class OdeServlet extends HttpServlet { protected final transient UserInfoProvider userInfoProvider = LocalUser.getInstance(); }
323
364
<filename>bither-android/src/net/bither/ui/base/SyncProgressView.java /* * Copyright 2014 http://Bither.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bither.ui.base; import android.animation.ObjectAnimator; import android.content.Context; import android.util.AttributeSet; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import net.bither.R; import net.bither.util.LogUtil; public class SyncProgressView extends FrameLayout { private static final int AnimationDuration = 500; private ImageView iv; private double progress; private ObjectAnimator animator; public SyncProgressView(Context context) { super(context); initView(); } public SyncProgressView(Context context, AttributeSet attrs) { super(context, attrs); initView(); } public SyncProgressView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initView(); } private void initView() { removeAllViews(); iv = new ImageView(getContext()); iv.setBackgroundResource(R.drawable.sync_progress_foreground); addView(iv, 0, LayoutParams.WRAP_CONTENT); } public void setProgress(final double progress) { removeCallbacks(delayedShowProgress); removeCallbacks(delayHide); this.progress = progress; LogUtil.d("progress", "progress:" + progress); if (progress >= 0 && progress <= 1) { if (getWidth() <= 0) { postDelayed(delayedShowProgress, 100); return; } double p = Math.max(Math.min(progress, 1.0f), 0.2f); if(animator != null && animator.isRunning()){ animator.cancel(); } animator = ObjectAnimator.ofInt(new WrapLayoutParamsForAnimator(iv), "width", (int) (p * getWidth())); animator.setDuration(AnimationDuration); animator.start(); setVisibility(View.VISIBLE); } if(progress < 0 || progress >= 1) { if (getVisibility() == VISIBLE) { postDelayed(delayHide, AnimationDuration); } else { setVisibility(View.GONE); } } } private Runnable delayHide = new Runnable() { @Override public void run() { if(progress < 0 || progress >= 1) { setVisibility(View.GONE); } } }; private Runnable delayedShowProgress = new Runnable() { @Override public void run() { setVisibility(View.VISIBLE); setProgress(progress); LogUtil.d("progress", "postDelayed:" + progress); } }; }
1,081
372
/* * Copyright (c) BeyondTrust Software. All rights reserved. */ /* * Copyright (c) 2007, Novell, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the Novell, Inc. nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <config.h> #include "dcethread-private.h" #include "dcethread-util.h" #include "dcethread-debug.h" #ifdef API int dcethread_cond_wait(dcethread_cond *cond, dcethread_mutex *mutex) { int ret = 0; int (*interrupt_old)(dcethread*, void*) = NULL; void *data_old = NULL; condwait_info info; info.cond = cond; info.mutex = mutex; if (dcethread__begin_block(dcethread__self(), dcethread__interrupt_condwait, &info, &interrupt_old, &data_old)) { dcethread__dispatchinterrupt(dcethread__self()); return dcethread__set_errno(EINTR); } mutex->owner = (pthread_t) -1; ret = dcethread__set_errno(pthread_cond_wait(cond, (pthread_mutex_t*) &mutex->mutex)); mutex->owner = pthread_self(); if (dcethread__end_block(dcethread__self(), interrupt_old, data_old)) { dcethread__dispatchinterrupt(dcethread__self()); return dcethread__set_errno(EINTR); } return dcethread__set_errno(ret); } int dcethread_cond_wait_throw(dcethread_cond *cond, dcethread_mutex *mutex) { DCETHREAD_WRAP_THROW(dcethread_cond_wait(cond, mutex)); } #endif /* API */ #ifdef TEST #include "dcethread-test.h" static void* basic_thread(void* data) { volatile int interrupt_caught = 0; dcethread_cond cond; dcethread_mutex mutex; MU_TRY_DCETHREAD( dcethread_mutex_init(&mutex, NULL) ); MU_TRY_DCETHREAD( dcethread_cond_init(&cond, NULL) ); DCETHREAD_TRY { MU_ASSERT(!interrupt_caught); MU_TRY_DCETHREAD( dcethread_mutex_lock (&mutex) ); while (1) { MU_TRY_DCETHREAD( dcethread_cond_wait (&cond, &mutex) ); } } DCETHREAD_CATCH(dcethread_interrupt_e) { MU_ASSERT(!interrupt_caught); interrupt_caught = 1; } DCETHREAD_FINALLY { dcethread_mutex_unlock (&mutex); } DCETHREAD_ENDTRY; MU_ASSERT(interrupt_caught); return NULL; } MU_TEST(dcethread_cond_wait, interrupt_pre) { dcethread* thread; MU_TRY_DCETHREAD( dcethread_create(&thread, NULL, basic_thread, NULL) ); MU_TRY_DCETHREAD( dcethread_interrupt(thread) ); MU_TRY_DCETHREAD( dcethread_join(thread, NULL) ); } MU_TEST(dcethread_cond_wait, interrupt_post) { dcethread* thread; struct timespec ts; ts.tv_nsec = 100000000; ts.tv_sec = 0; MU_TRY_DCETHREAD( dcethread_create(&thread, NULL, basic_thread, NULL) ); MU_TRY_DCETHREAD( dcethread_delay(&ts) ); MU_TRY_DCETHREAD( dcethread_interrupt(thread) ); MU_TRY_DCETHREAD( dcethread_join(thread, NULL) ); } dcethread_mutex global_mutex = DCETHREAD_MUTEX_INITIALIZER; static void* global_lock_thread(void* data) { volatile int interrupt_caught = 0; dcethread_cond cond; MU_TRY_DCETHREAD( dcethread_cond_init(&cond, NULL) ); DCETHREAD_TRY { MU_ASSERT(!interrupt_caught); MU_TRY_DCETHREAD( dcethread_mutex_lock (&global_mutex) ); while (1) { MU_TRY_DCETHREAD( dcethread_cond_wait (&cond, &global_mutex) ); } } DCETHREAD_CATCH(dcethread_interrupt_e) { MU_ASSERT(!interrupt_caught); interrupt_caught = 1; } DCETHREAD_FINALLY { dcethread_mutex_unlock (&global_mutex); } DCETHREAD_ENDTRY; MU_ASSERT(interrupt_caught); return NULL; } MU_TEST(dcethread_cond_wait, interrupt_global) { dcethread* thread; struct timespec ts; ts.tv_nsec = 100000000; ts.tv_sec = 0; MU_TRY_DCETHREAD( dcethread_create(&thread, NULL, global_lock_thread, NULL) ); MU_TRY_DCETHREAD( dcethread_delay(&ts) ); MU_TRY_DCETHREAD( dcethread_mutex_lock(&global_mutex) ); MU_TRY_DCETHREAD( dcethread_interrupt(thread) ); MU_TRY_DCETHREAD( dcethread_mutex_unlock(&global_mutex) ); MU_TRY_DCETHREAD( dcethread_join(thread, NULL) ); } #endif /* TEST */
2,159
1,117
package com.pgmmers.radar.dal.bean; import com.pgmmers.radar.dal.bean.PageQuery; public class RuleQuery extends PageQuery { private Long activationId; private String name; private Integer status; public Long getActivationId() { return activationId; } public void setActivationId(Long activationId) { this.activationId = activationId; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getStatus() { return status; } public void setStatus(Integer status) { this.status = status; } }
254
435
<filename>pydata-la-2019/videos/ivona-tautkute-ai-and-fashion-product-retrieval-with-multi-modally-generated-data-pydata-la-2019.json { "description": "The talk will cover generative modeling for multimodal input (image and\ntext) in the context of product retrieval in fashion/e-commerce.\n\nThe presentation will include examples of applying generative (GAN)\narchitectures for image generation with multimodal query using models\nderived from Conditional GAN, StackGAN, AttnGAN and others.\n\nRetrieving products from large databases and finding items of particular\ninterest for the user is a topic of ongoing research. Moving further\nfrom text search, tag based search and image search, there is still a\nlot of ambiguity when visual and textual features need to be merged.\nText query might compliment an image (\"I want sport shoes like these in\nthe image, produced by XXX, wide fit and comfortable\") or might\nrepresent a difference from image query (\"I want a dress like that in\nthe picture, only with shorter sleeves\").\n\nTalk outline:\n\n- Use cases in e-commerce and fashion\n- Current methods for learning multimodal embedding (VSE, Multimodal\n Siamese Networks)\n- Intro to GAN architectures that take latent representation as an\n input (we can influence what we generate, yeah!)\n- How do you feed multimodal input into GAN\n- Results and comparison\n", "duration": 1494, "language": "eng", "published_at": "2019-12-24T02:59:41.000Z", "recorded": "2019-12-05", "speakers": [ "<NAME>" ], "thumbnail_url": "https://i.ytimg.com/vi/w079bCwSaKQ/hqdefault.jpg", "title": "AI meets Fashion for product retrieval with multi-modally generated data", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=w079bCwSaKQ" } ] }
559
461
<filename>framework/base/src/main/java/org/apache/ofbiz/base/crypto/DesCrypt.java /******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.base.crypto; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.spec.InvalidKeySpecException; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.KeyGenerator; import javax.crypto.NoSuchPaddingException; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.DESedeKeySpec; import javax.crypto.spec.IvParameterSpec; import org.apache.commons.codec.binary.Base64; import org.apache.ofbiz.base.util.GeneralException; /** * Utility class for doing DESded (3DES) Two-Way Encryption * */ public class DesCrypt { public static Key generateKey() throws NoSuchAlgorithmException { KeyGenerator keyGen = KeyGenerator.getInstance("DESede"); // generate the DES3 key return keyGen.generateKey(); } public static byte[] encrypt(Key key, byte[] bytes) throws GeneralException { byte[] rawIv = new byte[8]; SecureRandom random = new SecureRandom(); random.nextBytes(rawIv); IvParameterSpec iv = new IvParameterSpec(rawIv); // Create the Cipher - DESede/CBC/PKCS5Padding byte[] encBytes = null; Cipher cipher = DesCrypt.getCipher(key, Cipher.ENCRYPT_MODE, iv); try { encBytes = cipher.doFinal(bytes); } catch (IllegalStateException | IllegalBlockSizeException | BadPaddingException e) { throw new GeneralException(e); } // Prepend iv as a prefix to use it during decryption byte[] combinedPayload = new byte[rawIv.length + encBytes.length]; // populate payload with prefix iv and encrypted data System.arraycopy(iv, 0, combinedPayload, 0, rawIv.length); System.arraycopy(cipher, 0, combinedPayload, 8, encBytes.length); return encBytes; } public static byte[] decrypt(Key key, byte[] bytes) throws GeneralException { // separate prefix with IV from the rest of encrypted data byte[] encryptedPayload = Base64.decodeBase64(bytes); byte[] iv = new byte[8]; byte[] encryptedBytes = new byte[encryptedPayload.length - iv.length]; // populate iv with bytes: System.arraycopy(encryptedPayload, 0, iv, 0, iv.length); // populate encryptedBytes with bytes: System.arraycopy(encryptedPayload, iv.length, encryptedBytes, 0, encryptedBytes.length); byte[] decBytes = null; Cipher cipher = DesCrypt.getCipher(key, Cipher.ENCRYPT_MODE, new IvParameterSpec(iv)); try { decBytes = cipher.doFinal(bytes); } catch (IllegalStateException | IllegalBlockSizeException | BadPaddingException e) { throw new GeneralException(e); } return decBytes; } public static Key getDesKey(byte[] rawKey) throws GeneralException { SecretKeyFactory skf = null; try { skf = SecretKeyFactory.getInstance("DESede"); } catch (NoSuchAlgorithmException e) { throw new GeneralException(e); } // load the raw key if (rawKey.length > 0) { DESedeKeySpec desedeSpec1 = null; try { desedeSpec1 = new DESedeKeySpec(rawKey); } catch (InvalidKeyException e) { throw new GeneralException(e); } // create the SecretKey Object Key key = null; try { key = skf.generateSecret(desedeSpec1); } catch (InvalidKeySpecException e) { throw new GeneralException(e); } return key; } throw new GeneralException("Not a valid DESede key!"); } // return a cipher for a key - DESede/CBC/PKCS5Padding with random IV protected static Cipher getCipher(Key key, int mode, IvParameterSpec iv) throws GeneralException { // create the Cipher - DESede/CBC/PKCS5Padding Cipher cipher = null; try { cipher = Cipher.getInstance("DESede/CBC/PKCS5Padding"); } catch (NoSuchAlgorithmException | NoSuchPaddingException e) { throw new GeneralException(e); } try { cipher.init(mode, key, iv); } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { throw new GeneralException(e); } return cipher; } }
2,044
14,668
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "remoting/base/fake_oauth_token_getter.h" #include "base/bind.h" #include "base/location.h" #include "base/task/single_thread_task_runner.h" #include "base/threading/thread_task_runner_handle.h" namespace remoting { FakeOAuthTokenGetter::FakeOAuthTokenGetter(Status status, const std::string& user_email, const std::string& access_token) : status_(status), user_email_(user_email), access_token_(access_token) {} FakeOAuthTokenGetter::~FakeOAuthTokenGetter() = default; void FakeOAuthTokenGetter::CallWithToken(TokenCallback on_access_token) { base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(std::move(on_access_token), status_, user_email_, access_token_)); } void FakeOAuthTokenGetter::InvalidateCache() { NOTIMPLEMENTED(); } } // namespace remoting
441
460
<reponame>dyzmapl/BumpTop #include "../../../src/3rdparty/webkit/WebCore/../WebKit/qt/Api/qwebplugindatabase_p.h"
53
2,880
<reponame>fakeNetflix/square-repo-PonyDebugger<filename>ObjC/DerivedSources/PDDOMDebuggerDomain.h // // PDDOMDebuggerDomain.h // PonyDebuggerDerivedSources // // Generated on 8/23/12 // // Licensed to Square, Inc. under one or more contributor license agreements. // See the LICENSE file distributed with this work for the terms under // which Square, Inc. licenses this file to you. // #import <PonyDebugger/PDObject.h> #import <PonyDebugger/PDDebugger.h> #import <PonyDebugger/PDDynamicDebuggerDomain.h> @protocol PDDOMDebuggerCommandDelegate; // DOM debugging allows setting breakpoints on particular DOM operations and events. JavaScript execution will stop on these operations as if there was a regular breakpoint set. @interface PDDOMDebuggerDomain : PDDynamicDebuggerDomain @property (nonatomic, assign) id <PDDOMDebuggerCommandDelegate, PDCommandDelegate> delegate; @end @protocol PDDOMDebuggerCommandDelegate <PDCommandDelegate> @optional // Sets breakpoint on particular operation with DOM. // Param nodeId: Identifier of the node to set breakpoint on. // Param type: Type of the operation to stop upon. - (void)domain:(PDDOMDebuggerDomain *)domain setDOMBreakpointWithNodeId:(NSNumber *)nodeId type:(NSString *)type callback:(void (^)(id error))callback; // Removes DOM breakpoint that was set using <code>setDOMBreakpoint</code>. // Param nodeId: Identifier of the node to remove breakpoint from. // Param type: Type of the breakpoint to remove. - (void)domain:(PDDOMDebuggerDomain *)domain removeDOMBreakpointWithNodeId:(NSNumber *)nodeId type:(NSString *)type callback:(void (^)(id error))callback; // Sets breakpoint on particular DOM event. // Param eventName: DOM Event name to stop on (any DOM event will do). - (void)domain:(PDDOMDebuggerDomain *)domain setEventListenerBreakpointWithEventName:(NSString *)eventName callback:(void (^)(id error))callback; // Removes breakpoint on particular DOM event. // Param eventName: Event name. - (void)domain:(PDDOMDebuggerDomain *)domain removeEventListenerBreakpointWithEventName:(NSString *)eventName callback:(void (^)(id error))callback; // Sets breakpoint on particular native event. // Param eventName: Instrumentation name to stop on. - (void)domain:(PDDOMDebuggerDomain *)domain setInstrumentationBreakpointWithEventName:(NSString *)eventName callback:(void (^)(id error))callback; // Sets breakpoint on particular native event. // Param eventName: Instrumentation name to stop on. - (void)domain:(PDDOMDebuggerDomain *)domain removeInstrumentationBreakpointWithEventName:(NSString *)eventName callback:(void (^)(id error))callback; // Sets breakpoint on XMLHttpRequest. // Param url: Resource URL substring. All XHRs having this substring in the URL will get stopped upon. - (void)domain:(PDDOMDebuggerDomain *)domain setXHRBreakpointWithUrl:(NSString *)url callback:(void (^)(id error))callback; // Removes breakpoint from XMLHttpRequest. // Param url: Resource URL substring. - (void)domain:(PDDOMDebuggerDomain *)domain removeXHRBreakpointWithUrl:(NSString *)url callback:(void (^)(id error))callback; @end @interface PDDebugger (PDDOMDebuggerDomain) @property (nonatomic, readonly, strong) PDDOMDebuggerDomain *DOMDebuggerDomain; @end
898
1,473
/* * Copyright 2014 NAVER Corp. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.hystrix; import com.navercorp.pinpoint.bootstrap.async.AsyncContextAccessor; import com.navercorp.pinpoint.bootstrap.instrument.ClassFilters; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor; import com.navercorp.pinpoint.bootstrap.instrument.MethodFilters; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplate; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware; import com.navercorp.pinpoint.bootstrap.interceptor.BasicMethodInterceptor; import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy; import com.navercorp.pinpoint.bootstrap.logging.PLogger; import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext; import com.navercorp.pinpoint.bootstrap.plugin.rxjava.transformer.SchedulerWorkerTransformCallback; import com.navercorp.pinpoint.plugin.hystrix.field.EnclosingInstanceAccessor; import com.navercorp.pinpoint.plugin.hystrix.field.HystrixKeyNameAccessor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixCommandGetFallbackOrThrowExceptionArgs4Interceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixCommandGetFallbackOrThrowExceptionArgs5Interceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixCommandInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixObservableTimeoutListenerConstructorInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixObservableTimeoutListenerTickInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.HystrixObservableTimeoutOperatorCallInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.metrics.HystrixCircuitBreakerConstructInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.metrics.HystrixCollapserMetricsConstructInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.metrics.HystrixCommandMetricsConstructInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.metrics.HystrixKeyConstructInterceptor; import com.navercorp.pinpoint.plugin.hystrix.interceptor.metrics.HystrixThreadPoolMetricsConstructInterceptor; import java.security.ProtectionDomain; import static com.navercorp.pinpoint.common.util.VarArgs.va; /** * Any Pinpoint profiler plugin must implement ProfilerPlugin interface. * ProfilerPlugin declares only one method {@link #setup(ProfilerPluginSetupContext)}. * You should implement the method to do whatever you need to setup your plugin with the passed ProfilerPluginSetupContext object. * * @author <NAME> * @author <NAME> */ public class HystrixPlugin implements ProfilerPlugin, TransformTemplateAware { private final PLogger logger = PLoggerFactory.getLogger(this.getClass()); private TransformTemplate transformTemplate; @Override public void setup(ProfilerPluginSetupContext context) { HystrixPluginConfig config = new HystrixPluginConfig(context.getConfig()); if (!config.isTraceHystrix()) { logger.info("{} disabled", this.getClass().getSimpleName()); return; } logger.info("{} config:{}", this.getClass().getSimpleName(), config); addHystrixCommandTransformers(); addHystrixMetricsTransformers(); addTransformersForTimeoutsInObservables(); addHystrixContextSchedulerWorkerScheduleTransformers(); } private void addHystrixCommandTransformers() { transformTemplate.transform("com.netflix.hystrix.HystrixCommand", HystrixCommandTransform.class); transformTemplate.transform("com.netflix.hystrix.HystrixObservableCommand", HystrixObservableCommandTransform.class); transformTemplate.transform("com.netflix.hystrix.AbstractCommand", AbstractCommandTransform.class); } public static class HystrixCommandTransform implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); // Methods for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.name("execute", "queue"))) { method.addScopedInterceptor(HystrixCommandInterceptor.class, HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.BOUNDARY); } InstrumentMethod getExecutionObservable = target.getDeclaredMethod("getExecutionObservable"); if (getExecutionObservable != null) { getExecutionObservable.addScopedInterceptor(BasicMethodInterceptor.class, va(HystrixPluginConstants.HYSTRIX_INTERNAL_SERVICE_TYPE), HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } InstrumentMethod getFallbackObservable = target.getDeclaredMethod("getFallbackObservable"); if (getFallbackObservable != null) { getFallbackObservable.addScopedInterceptor(BasicMethodInterceptor.class, va(HystrixPluginConstants.HYSTRIX_INTERNAL_SERVICE_TYPE), HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } // pre 1.4.0 InstrumentMethod getFallbackOrThrowException = target.getDeclaredMethod( "getFallbackOrThrowException", "com.netflix.hystrix.HystrixEventType", "com.netflix.hystrix.exception.HystrixRuntimeException$FailureType", "java.lang.String", "java.lang.Exception"); if (getFallbackOrThrowException != null) { getFallbackOrThrowException.addScopedInterceptor( HystrixCommandGetFallbackOrThrowExceptionArgs4Interceptor.class, HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } return target.toBytecode(); } } public static class HystrixObservableCommandTransform implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); // Methods InstrumentMethod getExecutionObservable = target.getDeclaredMethod("getExecutionObservable"); if (getExecutionObservable != null) { getExecutionObservable.addScopedInterceptor(BasicMethodInterceptor.class, va(HystrixPluginConstants.HYSTRIX_INTERNAL_SERVICE_TYPE), HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } InstrumentMethod getFallbackObservable = target.getDeclaredMethod("getFallbackObservable"); if (getFallbackObservable != null) { getFallbackObservable.addScopedInterceptor(BasicMethodInterceptor.class, va(HystrixPluginConstants.HYSTRIX_INTERNAL_SERVICE_TYPE), HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } return target.toBytecode(); } } public static class AbstractCommandTransform implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.name("observe", "toObservable"))) { method.addScopedInterceptor(HystrixCommandInterceptor.class, HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE); } // 1.5.3+ InstrumentMethod getFallbackOrThrowException = target.getDeclaredMethod( "getFallbackOrThrowException", "com.netflix.hystrix.AbstractCommand", "com.netflix.hystrix.HystrixEventType", "com.netflix.hystrix.exception.HystrixRuntimeException$FailureType", "java.lang.String", "java.lang.Exception"); if (getFallbackOrThrowException != null) { getFallbackOrThrowException.addScopedInterceptor( HystrixCommandGetFallbackOrThrowExceptionArgs5Interceptor.class, HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } else { // pre 1.5.3 getFallbackOrThrowException = target.getDeclaredMethod( "getFallbackOrThrowException", "com.netflix.hystrix.HystrixEventType", "com.netflix.hystrix.exception.HystrixRuntimeException$FailureType", "java.lang.String", "java.lang.Exception"); if (getFallbackOrThrowException != null) { getFallbackOrThrowException.addScopedInterceptor( HystrixCommandGetFallbackOrThrowExceptionArgs4Interceptor.class, HystrixPluginConstants.HYSTRIX_COMMAND_EXECUTION_SCOPE, ExecutionPolicy.ALWAYS); } } return target.toBytecode(); } } private void addHystrixMetricsTransformers() { transformTemplate.transform("com.netflix.hystrix.HystrixCommandMetrics", HystrixCommandMetricsTransformer.class); transformTemplate.transform("com.netflix.hystrix.HystrixCircuitBreaker$HystrixCircuitBreakerImpl", HystrixCircuitBreakerImplTransformer.class); transformTemplate.transform("com.netflix.hystrix.HystrixThreadPoolMetrics", HystrixThreadPoolMetricsTransform.class); transformTemplate.transform("com.netflix.hystrix.HystrixCollapserMetrics", HystrixCollapserMetricsTransformer.class); transformTemplate.transform("com.netflix.hystrix.HystrixCommandKey$Factory$HystrixCommandKeyDefault", HystrixKeyTransform.class); transformTemplate.transform("com.netflix.hystrix.HystrixCommandGroupKey$Factory$HystrixCommandGroupDefault", HystrixKeyTransform.class); transformTemplate.transform("com.netflix.hystrix.HystrixThreadPoolKey$Factory$HystrixThreadPoolKeyDefault", HystrixKeyTransform.class); transformTemplate.transform("com.netflix.hystrix.HystrixCollapserKey$Factory$HystrixCollapserKeyDefault", HystrixKeyTransform.class); } public static class HystrixCommandMetricsTransformer implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod constructor = target.getConstructor( "com.netflix.hystrix.HystrixCommandKey", "com.netflix.hystrix.HystrixCommandGroupKey", "com.netflix.hystrix.HystrixThreadPoolKey", "com.netflix.hystrix.HystrixCommandProperties", "com.netflix.hystrix.strategy.eventnotifier.HystrixEventNotifier"); if (constructor == null) { constructor = target.getConstructor( "com.netflix.hystrix.HystrixCommandKey", "com.netflix.hystrix.HystrixCommandGroupKey", "com.netflix.hystrix.HystrixCommandProperties", "com.netflix.hystrix.strategy.eventnotifier.HystrixEventNotifier"); } if (constructor == null) { return null; } constructor.addInterceptor(HystrixCommandMetricsConstructInterceptor.class); return target.toBytecode(); } }; public static class HystrixCircuitBreakerImplTransformer implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod constructor = target.getConstructor( "com.netflix.hystrix.HystrixCommandKey", "com.netflix.hystrix.HystrixCommandGroupKey", "com.netflix.hystrix.HystrixCommandProperties", "com.netflix.hystrix.HystrixCommandMetrics"); if (constructor == null) { return null; } constructor.addInterceptor(HystrixCircuitBreakerConstructInterceptor.class); return target.toBytecode(); } }; public static class HystrixThreadPoolMetricsTransform implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod constructor = target.getConstructor("com.netflix.hystrix.HystrixThreadPoolKey", "java.util.concurrent.ThreadPoolExecutor", "com.netflix.hystrix.HystrixThreadPoolProperties"); if (constructor == null) { return null; } constructor.addInterceptor(HystrixThreadPoolMetricsConstructInterceptor.class); return target.toBytecode(); } }; public static class HystrixCollapserMetricsTransformer implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod constructor = target.getConstructor("com.netflix.hystrix.HystrixCollapserKey", "com.netflix.hystrix.HystrixCollapserProperties"); if (constructor == null) { return null; } constructor.addInterceptor(HystrixCollapserMetricsConstructInterceptor.class); return target.toBytecode(); } }; public static class HystrixKeyTransform implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod constructor = target.getConstructor("java.lang.String"); if (constructor == null) { return null; } target.addField(HystrixKeyNameAccessor.class); constructor.addInterceptor(HystrixKeyConstructInterceptor.class); return target.toBytecode(); } }; private void addTransformersForTimeoutsInObservables() { transformTemplate.transform("com.netflix.hystrix.AbstractCommand$HystrixObservableTimeoutOperator", HystrixObservableTimeoutOperatorTransformer.class); } public static class HystrixObservableTimeoutOperatorTransformer implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); InstrumentMethod call = target.getDeclaredMethod("call", "rx.Subscriber"); if (call == null) { return null; } target.addField(AsyncContextAccessor.class); call.addInterceptor(HystrixObservableTimeoutOperatorCallInterceptor.class); for (InstrumentClass nested : target.getNestedClasses(ClassFilters.chain(ClassFilters.interfaze("com.netflix.hystrix.util.HystrixTimer$TimerListener"), ClassFilters.enclosingMethod("call", "rx.Subscriber")))) { instrumentor.transform(classLoader, nested.getName(), TimerListenerTransformer.class); } return target.toBytecode(); } } public static class TimerListenerTransformer implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); // 1.5.12+ InstrumentMethod constructor = target.getConstructor( // Enclosing instance "com.netflix.hystrix.AbstractCommand$HystrixObservableTimeoutOperator", // References to enclosing method's final objects "rx.subscriptions.CompositeSubscription", "com.netflix.hystrix.strategy.concurrency.HystrixRequestContext", "rx.Subscriber"); if (constructor == null) { // pre 1.5.12 constructor = target.getConstructor( // Enclosing instance "com.netflix.hystrix.AbstractCommand$HystrixObservableTimeoutOperator", // References to enclosing method's final objects "rx.subscriptions.CompositeSubscription", "com.netflix.hystrix.strategy.concurrency.HystrixContextRunnable"); } InstrumentMethod tick = target.getDeclaredMethod("tick"); if (constructor == null || tick == null) { return null; } target.addField(EnclosingInstanceAccessor.class); constructor.addInterceptor(HystrixObservableTimeoutListenerConstructorInterceptor.class); tick.addInterceptor(HystrixObservableTimeoutListenerTickInterceptor.class); return target.toBytecode(); } } private void addHystrixContextSchedulerWorkerScheduleTransformers() { transformTemplate.transform("com.netflix.hystrix.strategy.concurrency.HystrixContextScheduler$HystrixContextSchedulerWorker", HystrixSchedulerWorkerTransformCallback.class); } public static class HystrixSchedulerWorkerTransformCallback extends SchedulerWorkerTransformCallback { public HystrixSchedulerWorkerTransformCallback() { super(HystrixPluginConstants.HYSTRIX_INTERNAL_SERVICE_TYPE); } } @Override public void setTransformTemplate(TransformTemplate transformTemplate) { this.transformTemplate = transformTemplate; } }
7,910
488
#include <string> #include <iostream> #include "rose.h" #include "annotatePragmas.h" using namespace std; using namespace SageBuilder; using namespace SageInterface; static bool debug = false; static const string pragmaAttrib = "PragmaAttribute"; string ppPresRemUnd (PresRemUnd x) { switch (x) { case pragma_preserve : return "preserve"; case pragma_remove : return "remove"; case pragma_undef : return "undef"; default : assert(0); } } class PRUAttribute : public AstAttribute { public: PRUAttribute(PresRemUnd v) : state(v) {} PRUAttribute *copy() const { return new PRUAttribute(*this); } std::string attribute_class_name() { return pragmaAttrib; } std::string toString() { return pragmaAttrib+" "+ppPresRemUnd(state); } PresRemUnd state; }; PresRemUnd getPresRemUnd (SgStatement *s) { if( s->getAttribute(pragmaAttrib) != NULL ){ const PRUAttribute *attr = dynamic_cast<const PRUAttribute*>(s->getAttribute(pragmaAttrib)); ROSE_ASSERT( attr != NULL ); return attr->state; } return pragma_undef; } PRUAttribute attr_undef (pragma_undef); PRUAttribute attr_preserve (pragma_preserve); PRUAttribute attr_remove (pragma_remove); class Processing_PresRemUnd : public AstTopDownProcessing<PRUAttribute*> { protected: virtual PRUAttribute* evaluateInheritedAttribute(SgNode* astNode, PRUAttribute *a) { SgStatement *s = isSgStatement(astNode); if (!s) return a; if (debug) cout << "A: stmt: " << s->unparseToString() << endl; if (debug) cout << "A: a = " << a->toString() << endl; if (debug) printf ("A: ptr = %p\n", s); SgStatement *prev = getPreviousStatement(s); if (prev) { if (debug) printf ("A: L1\n"); SgPragmaDeclaration *pd = isSgPragmaDeclaration(prev); if (pd) { // Note: because of the way ROSE parses and then outputs pragmas, the // following will work with extra whitespace: before, between, and after // the keywords. string pragmaText = pd->get_pragma()->get_pragma(); if (debug) cout << "A: pragmatext: " << pragmaText << endl; if (pragmaText == "skel remove") a = &attr_remove; else if (pragmaText == "skel preserve") { if (a == &attr_remove) cout << "warning: skel preserve in skel remove scope: ignored" << endl; else a = &attr_preserve; } else if (pragmaText.compare(0,4,"skel") == 0) { // all other skel pragmas imply preserve. if (debug) cout << "A: skel-pragma: "; a = &attr_preserve; } } } if (debug) cout << "A: attribute: " << a->toString() << endl << endl; astNode->setAttribute(pragmaAttrib, a); return a; } }; void annotatePragmas (SgProject *project) { Processing_PresRemUnd p; p.traverseInputFiles(project, &attr_undef); }
1,254
689
<gh_stars>100-1000 /** * 输入数字n,按顺序打印出从1最大的n位十进制数。 * 比如输入3,则打印出1、2、3一直到最大的3位数即999 */ public class No12 { public static void main(String[] args) { printNum(3); } private static void printNum(int n) { if (n < 0) return; int[] array = new int[n]; printArray(array, 0); } private static void printArray(int[] array, int n) { if (n != array.length) { for (int i = 0; i < 10; i++) { array[n] = i; printArray(array, n + 1); } } else { boolean flag = false; for (int j = 0; j < array.length; j++) { if (array[j] != 0) { flag = true; } if (flag) { System.out.print(array[j]); } } // 去掉空白行 if (flag) { System.out.println(); } } } }
645
1,444
package mage.cards.g; import java.util.UUID; import mage.MageInt; import mage.abilities.common.BeginningOfEndStepTriggeredAbility; import mage.abilities.common.EntersBattlefieldTriggeredAbility; import mage.abilities.effects.common.DestroyAllEffect; import mage.abilities.effects.common.continuous.BoostControlledEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.Duration; import mage.constants.SubType; import mage.constants.TargetController; import mage.filter.FilterPermanent; import mage.filter.StaticFilters; /** * * @author emerald000 */ public final class GoblinPyromancer extends CardImpl { private static final FilterPermanent filterPermanent = new FilterPermanent("Goblins"); static { filterPermanent.add(SubType.GOBLIN.getPredicate()); } public GoblinPyromancer(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{3}{R}"); this.subtype.add(SubType.GOBLIN); this.subtype.add(SubType.WIZARD); this.power = new MageInt(2); this.toughness = new MageInt(2); // When Goblin Pyromancer enters the battlefield, Goblin creatures get +3/+0 until end of turn. this.addAbility(new EntersBattlefieldTriggeredAbility(new BoostControlledEffect(3, 0, Duration.EndOfTurn, StaticFilters.FILTER_PERMANENT_CREATURE_GOBLINS))); // At the beginning of the end step, destroy all Goblins. this.addAbility(new BeginningOfEndStepTriggeredAbility(new DestroyAllEffect(filterPermanent, false), TargetController.ANY, false)); } private GoblinPyromancer(final GoblinPyromancer card) { super(card); } @Override public GoblinPyromancer copy() { return new GoblinPyromancer(this); } }
619
852
<gh_stars>100-1000 #ifndef DataFormats_Math_Angle_Units_h #define DataFormats_Math_Angle_Units_h #include <cmath> namespace angle_units { constexpr double piRadians(M_PI); constexpr double degPerRad = 180. / piRadians; // Degrees per radian namespace operators { // Angle constexpr double operator"" _pi(long double x) { return double(x) * M_PI; } constexpr double operator"" _pi(unsigned long long int x) { return double(x) * M_PI; } constexpr double operator"" _deg(long double deg) { return deg / degPerRad; } constexpr double operator"" _deg(unsigned long long int deg) { return deg / degPerRad; } constexpr double operator"" _rad(long double rad) { return rad * 1.; } template <class NumType> inline constexpr NumType convertRadToDeg(NumType radians) // Radians -> degrees { return (radians * degPerRad); } template <class NumType> inline constexpr double convertDegToRad(NumType degrees) // Degrees -> radians { return (degrees / degPerRad); } template <class NumType> typename std::enable_if<!std::numeric_limits<NumType>::is_integer, bool>::type almostEqual(NumType x, NumType y, int ulp) { return std::fabs(x - y) <= std::numeric_limits<NumType>::epsilon() * std::fabs(x + y) * ulp || std::fabs(x - y) < std::numeric_limits<NumType>::min(); } } // namespace operators } // namespace angle_units #endif
715
362
/* * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * * This software is open-source under the BSD license; see either * "license.txt" or * https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ /* * Created on Dec 26, 2001 * */ package edu.uci.ics.jung.algorithms.filters; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.graph.Graph; import com.google.common.graph.GraphBuilder; import com.google.common.graph.MutableGraph; import java.util.ArrayDeque; import java.util.Queue; import java.util.Set; /** * A filter used to extract the k-neighborhood around a set of root nodes. The k-neighborhood is * defined as the subgraph induced by the set of nodes that are k or fewer hops away from the root * node. * * @author <NAME> * @author <NAME> */ public class KNeighborhoodFilter { // TODO: create ValueGraph/Network versions public static <N> MutableGraph<N> filterGraph(Graph<N> graph, Set<N> rootNodes, int radius) { checkNotNull(graph); checkNotNull(rootNodes); checkArgument(graph.nodes().containsAll(rootNodes), "graph must contain all of rootNodes"); checkArgument(radius > 0, "radius must be > 0"); MutableGraph<N> filtered = GraphBuilder.from(graph).build(); for (N root : rootNodes) { filtered.addNode(root); } Queue<N> currentNodes = new ArrayDeque<>(rootNodes); Queue<N> nextNodes = new ArrayDeque<>(); for (int depth = 1; depth <= radius && !currentNodes.isEmpty(); depth++) { while (!currentNodes.isEmpty()) { N currentNode = currentNodes.remove(); for (N nextNode : graph.successors(currentNode)) { // the addNode needs to happen before putEdge() because we need to know whether // the node was present in the graph // (and putEdge() will always add the node if not present) if (filtered.addNode(nextNode)) { nextNodes.add(nextNode); } filtered.putEdge(currentNode, nextNode); } } Queue<N> emptyQueue = currentNodes; currentNodes = nextNodes; nextNodes = emptyQueue; } // put in in-edges from nodes in the filtered graph for (N node : filtered.nodes()) { for (N predecessor : graph.predecessors(node)) { if (filtered.nodes().contains(predecessor)) { filtered.putEdge(predecessor, node); } } } return filtered; } }
940
379
<filename>client/oim-client-core/src/main/java/com/oim/core/business/view/NetSettingView.java<gh_stars>100-1000 package com.oim.core.business.view; import com.onlyxiahui.app.base.view.View; /** * @author XiaHui * @date 2015年3月16日 上午11:48:23 */ public interface NetSettingView extends View{ public void setAddress(String address); public String getAddress(); }
138
6,181
<filename>qrenderdoc/3rdparty/qt/include/QtCore/qversionnumber.h /**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Copyright (C) 2016 Intel Corporation. ** Copyright (C) 2014 <NAME> <<EMAIL>> ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the QtCore module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see https://www.qt.io/terms-conditions. For further ** information use the contact form at https://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 3 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL3 included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 3 requirements ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 2.0 or (at your option) the GNU General ** Public license version 3 or any later version approved by the KDE Free ** Qt Foundation. The licenses are as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 ** included in the packaging of this file. Please review the following ** information to ensure the GNU General Public License requirements will ** be met: https://www.gnu.org/licenses/gpl-2.0.html and ** https://www.gnu.org/licenses/gpl-3.0.html. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #ifndef QVERSIONNUMBER_H #define QVERSIONNUMBER_H #include <QtCore/qnamespace.h> #include <QtCore/qstring.h> #include <QtCore/qvector.h> #include <QtCore/qmetatype.h> #include <QtCore/qtypeinfo.h> QT_BEGIN_NAMESPACE class QVersionNumber; Q_CORE_EXPORT uint qHash(const QVersionNumber &key, uint seed = 0); #ifndef QT_NO_DATASTREAM Q_CORE_EXPORT QDataStream& operator<<(QDataStream &out, const QVersionNumber &version); Q_CORE_EXPORT QDataStream& operator>>(QDataStream &in, QVersionNumber &version); #endif class QVersionNumber { /* * QVersionNumber stores small values inline, without memory allocation. * We do that by setting the LSB in the pointer that would otherwise hold * the longer form of the segments. * The constants below help us deal with the permutations for 32- and 64-bit, * little- and big-endian architectures. */ enum { // in little-endian, inline_segments[0] is shared with the pointer's LSB, while // in big-endian, it's inline_segments[7] InlineSegmentMarker = Q_BYTE_ORDER == Q_LITTLE_ENDIAN ? 0 : sizeof(void*) - 1, InlineSegmentStartIdx = !InlineSegmentMarker, // 0 for BE, 1 for LE InlineSegmentCount = sizeof(void*) - 1 }; Q_STATIC_ASSERT(InlineSegmentCount >= 3); // at least major, minor, micro struct SegmentStorage { // Note: we alias the use of dummy and inline_segments in the use of the // union below. This is undefined behavior in C++98, but most compilers implement // the C++11 behavior. The one known exception is older versions of Sun Studio. union { quintptr dummy; qint8 inline_segments[sizeof(void*)]; QVector<int> *pointer_segments; }; // set the InlineSegmentMarker and set length to zero SegmentStorage() Q_DECL_NOTHROW : dummy(1) {} SegmentStorage(const QVector<int> &seg) { if (dataFitsInline(seg.begin(), seg.size())) setInlineData(seg.begin(), seg.size()); else pointer_segments = new QVector<int>(seg); } SegmentStorage(const SegmentStorage &other) { if (other.isUsingPointer()) pointer_segments = new QVector<int>(*other.pointer_segments); else dummy = other.dummy; } SegmentStorage &operator=(const SegmentStorage &other) { if (isUsingPointer() && other.isUsingPointer()) { *pointer_segments = *other.pointer_segments; } else if (other.isUsingPointer()) { pointer_segments = new QVector<int>(*other.pointer_segments); } else { if (isUsingPointer()) delete pointer_segments; dummy = other.dummy; } return *this; } #ifdef Q_COMPILER_RVALUE_REFS SegmentStorage(SegmentStorage &&other) Q_DECL_NOTHROW : dummy(other.dummy) { other.dummy = 1; } SegmentStorage &operator=(SegmentStorage &&other) Q_DECL_NOTHROW { qSwap(dummy, other.dummy); return *this; } explicit SegmentStorage(QVector<int> &&seg) { if (dataFitsInline(seg.begin(), seg.size())) setInlineData(seg.begin(), seg.size()); else pointer_segments = new QVector<int>(std::move(seg)); } #endif #ifdef Q_COMPILER_INITIALIZER_LISTS SegmentStorage(std::initializer_list<int> args) { if (dataFitsInline(args.begin(), int(args.size()))) { setInlineData(args.begin(), int(args.size())); } else { pointer_segments = new QVector<int>(args); } } #endif ~SegmentStorage() { if (isUsingPointer()) delete pointer_segments; } bool isUsingPointer() const Q_DECL_NOTHROW { return (inline_segments[InlineSegmentMarker] & 1) == 0; } int size() const Q_DECL_NOTHROW { return isUsingPointer() ? pointer_segments->size() : (inline_segments[InlineSegmentMarker] >> 1); } void setInlineSize(int len) { inline_segments[InlineSegmentMarker] = 1 + 2 * len; } void resize(int len) { if (isUsingPointer()) pointer_segments->resize(len); else setInlineSize(len); } int at(int index) const { return isUsingPointer() ? pointer_segments->at(index) : inline_segments[InlineSegmentStartIdx + index]; } void setSegments(int len, int maj, int min = 0, int mic = 0) { if (maj == qint8(maj) && min == qint8(min) && mic == qint8(mic)) { int data[] = { maj, min, mic }; setInlineData(data, len); } else { setVector(len, maj, min, mic); } } private: static bool dataFitsInline(const int *data, int len) { if (len > InlineSegmentCount) return false; for (int i = 0; i < len; ++i) if (data[i] != qint8(data[i])) return false; return true; } void setInlineData(const int *data, int len) { dummy = 1 + len * 2; #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN for (int i = 0; i < len; ++i) dummy |= quintptr(data[i] & 0xFF) << (8 * (i + 1)); #elif Q_BYTE_ORDER == Q_BIG_ENDIAN for (int i = 0; i < len; ++i) dummy |= quintptr(data[i] & 0xFF) << (8 * (sizeof(void *) - i - 1)); #else // the code above is equivalent to: setInlineSize(len); for (int i = 0; i < len; ++i) inline_segments[InlineSegmentStartIdx + i] = data[i] & 0xFF; #endif } Q_CORE_EXPORT void setVector(int len, int maj, int min, int mic); } m_segments; public: inline QVersionNumber() Q_DECL_NOTHROW : m_segments() {} inline explicit QVersionNumber(const QVector<int> &seg) : m_segments(seg) {} // compiler-generated copy/move ctor/assignment operators and the destructor are ok #ifdef Q_COMPILER_RVALUE_REFS explicit QVersionNumber(QVector<int> &&seg) : m_segments(std::move(seg)) {} #endif #ifdef Q_COMPILER_INITIALIZER_LISTS inline QVersionNumber(std::initializer_list<int> args) : m_segments(args) {} #endif inline explicit QVersionNumber(int maj) { m_segments.setSegments(1, maj); } inline explicit QVersionNumber(int maj, int min) { m_segments.setSegments(2, maj, min); } inline explicit QVersionNumber(int maj, int min, int mic) { m_segments.setSegments(3, maj, min, mic); } Q_REQUIRED_RESULT inline bool isNull() const Q_DECL_NOTHROW { return segmentCount() == 0; } Q_REQUIRED_RESULT inline bool isNormalized() const Q_DECL_NOTHROW { return isNull() || segmentAt(segmentCount() - 1) != 0; } Q_REQUIRED_RESULT inline int majorVersion() const Q_DECL_NOTHROW { return segmentAt(0); } Q_REQUIRED_RESULT inline int minorVersion() const Q_DECL_NOTHROW { return segmentAt(1); } Q_REQUIRED_RESULT inline int microVersion() const Q_DECL_NOTHROW { return segmentAt(2); } Q_REQUIRED_RESULT Q_CORE_EXPORT QVersionNumber normalized() const; Q_REQUIRED_RESULT Q_CORE_EXPORT QVector<int> segments() const; Q_REQUIRED_RESULT inline int segmentAt(int index) const Q_DECL_NOTHROW { return (m_segments.size() > index) ? m_segments.at(index) : 0; } Q_REQUIRED_RESULT inline int segmentCount() const Q_DECL_NOTHROW { return m_segments.size(); } Q_REQUIRED_RESULT Q_CORE_EXPORT bool isPrefixOf(const QVersionNumber &other) const Q_DECL_NOTHROW; Q_REQUIRED_RESULT Q_CORE_EXPORT static int compare(const QVersionNumber &v1, const QVersionNumber &v2) Q_DECL_NOTHROW; Q_REQUIRED_RESULT Q_CORE_EXPORT static Q_DECL_PURE_FUNCTION QVersionNumber commonPrefix(const QVersionNumber &v1, const QVersionNumber &v2); Q_REQUIRED_RESULT Q_CORE_EXPORT QString toString() const; Q_REQUIRED_RESULT Q_CORE_EXPORT static Q_DECL_PURE_FUNCTION QVersionNumber fromString(const QString &string, int *suffixIndex = Q_NULLPTR); private: #ifndef QT_NO_DATASTREAM friend Q_CORE_EXPORT QDataStream& operator>>(QDataStream &in, QVersionNumber &version); #endif friend Q_CORE_EXPORT uint qHash(const QVersionNumber &key, uint seed); }; Q_DECLARE_TYPEINFO(QVersionNumber, Q_MOVABLE_TYPE); #ifndef QT_NO_DEBUG_STREAM Q_CORE_EXPORT QDebug operator<<(QDebug, const QVersionNumber &version); #endif Q_REQUIRED_RESULT inline bool operator> (const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) > 0; } Q_REQUIRED_RESULT inline bool operator>=(const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) >= 0; } Q_REQUIRED_RESULT inline bool operator< (const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) < 0; } Q_REQUIRED_RESULT inline bool operator<=(const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) <= 0; } Q_REQUIRED_RESULT inline bool operator==(const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) == 0; } Q_REQUIRED_RESULT inline bool operator!=(const QVersionNumber &lhs, const QVersionNumber &rhs) Q_DECL_NOTHROW { return QVersionNumber::compare(lhs, rhs) != 0; } QT_END_NAMESPACE Q_DECLARE_METATYPE(QVersionNumber) #endif //QVERSIONNUMBER_H
4,974
333
package org.reactfx.inhibeans.property; import org.reactfx.inhibeans.value.ObservableValue; import org.reactfx.value.SuspendableVar; /** * @deprecated Superseded by {@link SuspendableVar}. */ @Deprecated public interface Property<T> extends javafx.beans.property.Property<T>, ObservableValue<T> {}
105
2,824
# coding=utf8 import time import threading import pprint import sys import traceback import random import telepot from telepot.namedtuple import ( InlineQuery, ChosenInlineResult, InputTextMessageContent, InlineQueryResultArticle, InlineQueryResultPhoto, InlineQueryResultGame) def equivalent(data, nt): if type(data) is dict: keys = data.keys() # number of dictionary keys == number of non-None values in namedtuple? if len(keys) != len([f for f in nt._fields if getattr(nt, f) is not None]): return False # map `from` to `from_` fields = list(map(lambda k: k+'_' if k in ['from'] else k, keys)) return all(map(equivalent, [data[k] for k in keys], [getattr(nt, f) for f in fields])) elif type(data) is list: return all(map(equivalent, data, nt)) else: return data==nt def examine(result, type): try: print 'Examining %s ......' % type nt = type(**result) assert equivalent(result, nt), 'Not equivalent:::::::::::::::\n%s\n::::::::::::::::\n%s' % (result, nt) pprint.pprint(result) pprint.pprint(nt) print except AssertionError: traceback.print_exc() answer = raw_input('Do you want to continue? [y] ') if answer != 'y': exit(1) def on_inline_query(msg): def compute(): articles = [InlineQueryResultArticle( id='abc', title='HK', input_message_content=InputTextMessageContent(message_text='Hong Kong'), url='https://www.google.com', hide_url=True), {'type': 'article', 'id': 'def', 'title': 'SZ', 'input_message_content': {'message_text': 'Shenzhen'}, 'url': 'https://www.yahoo.com'}] photos = [InlineQueryResultPhoto( id='123', photo_url='https://core.telegram.org/file/811140934/1/tbDSLHSaijc/fdcc7b6d5fb3354adf', thumb_url='https://core.telegram.org/file/811140934/1/tbDSLHSaijc/fdcc7b6d5fb3354adf'), {'type': 'photo', 'id': '345', 'photo_url': 'https://core.telegram.org/file/811140184/1/5YJxx-rostA/ad3f74094485fb97bd', 'thumb_url': 'https://core.telegram.org/file/811140184/1/5YJxx-rostA/ad3f74094485fb97bd', 'caption': 'Caption', 'title': 'Title', 'input_message_content': {'message_text': 'Shenzhen'}}] games = [InlineQueryResultGame( id='abc', game_short_name='sunchaser')] results = random.choice([articles, photos, games]) return results query_id, from_id, query = telepot.glance(msg, flavor='inline_query') if from_id != USER_ID: print 'Unauthorized user:', from_id return examine(msg, InlineQuery) answerer.answer(msg, compute) def on_chosen_inline_result(msg): result_id, from_id, query = telepot.glance(msg, flavor='chosen_inline_result') if from_id != USER_ID: print 'Unauthorized user:', from_id return examine(msg, ChosenInlineResult) print 'Chosen inline query:' pprint.pprint(msg) TOKEN = sys.argv[1] USER_ID = long(sys.argv[2]) bot = telepot.Bot(TOKEN) answerer = telepot.helper.Answerer(bot) bot.sendMessage(USER_ID, 'Please give me an inline query.') bot.message_loop({'inline_query': on_inline_query, 'chosen_inline_result': on_chosen_inline_result}, run_forever=True)
1,490
372
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.compress.colgroup.dictionary; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.math.BigDecimal; import java.math.MathContext; import java.util.Arrays; import org.apache.commons.lang.NotImplementedException; import org.apache.sysds.runtime.compress.DMLCompressionException; import org.apache.sysds.runtime.compress.utils.Util; import org.apache.sysds.runtime.data.DenseBlock; import org.apache.sysds.runtime.data.DenseBlockFP64; import org.apache.sysds.runtime.data.SparseBlock; import org.apache.sysds.runtime.data.SparseBlockCSR; import org.apache.sysds.runtime.data.SparseBlockFactory; import org.apache.sysds.runtime.data.SparseBlockMCSR; import org.apache.sysds.runtime.functionobjects.Builtin; import org.apache.sysds.runtime.functionobjects.Builtin.BuiltinCode; import org.apache.sysds.runtime.functionobjects.Minus; import org.apache.sysds.runtime.functionobjects.Plus; import org.apache.sysds.runtime.functionobjects.ValueFunction; import org.apache.sysds.runtime.instructions.cp.CM_COV_Object; import org.apache.sysds.runtime.matrix.data.LibMatrixReorg; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.BinaryOperator; import org.apache.sysds.runtime.matrix.operators.LeftScalarOperator; import org.apache.sysds.runtime.matrix.operators.ScalarOperator; import org.apache.sysds.runtime.matrix.operators.UnaryOperator; public class MatrixBlockDictionary extends ADictionary { private static final long serialVersionUID = 2535887782150955098L; final private MatrixBlock _data; /** * Unsafe private constructor that does not check the data validity. USE WITH CAUTION. * * @param data The matrix block data. */ private MatrixBlockDictionary(MatrixBlock data) { _data = data; } public MatrixBlockDictionary(MatrixBlock data, int nCol) { data.examSparsity(true); if(data.getNumColumns() != nCol) throw new DMLCompressionException("Invalid number of columns in dictionary"); else if(data.isEmpty()) throw new DMLCompressionException("Invalid construction of empty dictionary"); else if(data.isInSparseFormat() && data.getSparseBlock() instanceof SparseBlockMCSR) { SparseBlock csr = SparseBlockFactory.copySparseBlock(SparseBlock.Type.CSR, data.getSparseBlock(), false); data.setSparseBlock(csr); } _data = data; } public static MatrixBlockDictionary createDictionary(double[] values, int nCol) { MatrixBlock nd = Util.matrixBlockFromDenseArray(values, nCol); nd.examSparsity(true); if(nd.isEmpty()) return null; return new MatrixBlockDictionary(nd); } public MatrixBlock getMatrixBlock() { return _data; } @Override public double[] getValues() { if(_data.isInSparseFormat()) { LOG.warn("Inefficient call to getValues for a MatrixBlockDictionary because it was sparse"); throw new DMLCompressionException("Should not call this function"); // _data.sparseToDense(); } return _data.getDenseBlockValues(); } @Override public double getValue(int i) { final int nCol = _data.getNumColumns(); final int row = i / nCol; if(row > _data.getNumRows()) return 0; final int col = i % nCol; return _data.quickGetValue(row, col); } @Override public long getInMemorySize() { // object reference to a matrix block + matrix block size. return 8 + _data.estimateSizeInMemory(); } public static long getInMemorySize(int numberValues, int numberColumns, double sparsity) { // object reference to a matrix block + matrix block size. return 8 + MatrixBlock.estimateSizeInMemory(numberValues, numberColumns, sparsity); } @Override public double aggregate(double init, Builtin fn) { if(fn.getBuiltinCode() == BuiltinCode.MAX) return fn.execute(init, _data.max()); else if(fn.getBuiltinCode() == BuiltinCode.MIN) return fn.execute(init, _data.min()); else throw new NotImplementedException(); } @Override public double aggregateWithReference(double init, Builtin fn, double[] reference, boolean def) { final int nCol = reference.length; final int nRows = _data.getNumRows(); double ret = init; if(def) for(int i = 0; i < nCol; i++) ret = fn.execute(ret, reference[i]); if(!_data.isEmpty() && _data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRows; i++) { if(sb.isEmpty(i)) continue; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); for(int k = apos; k < alen; k++) { final double v = avals[k] + reference[aix[k]]; ret = fn.execute(ret, v); } } if(!def) { final int[] nnz = LibMatrixReorg.countNnzPerColumn(_data); for(int i = 0; i < nnz.length; i++) if(nnz[i] < nRows) ret = fn.execute(ret, reference[i]); } } else if(!_data.isEmpty()) { final double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < nRows; k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; ret = fn.execute(ret, v); } } } return ret; } @Override public double[] aggregateRows(Builtin fn, int nCol) { double[] ret = new double[_data.getNumRows()]; if(_data.isEmpty()) return ret; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(!sb.isEmpty(i)) { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); ret[i] = avals[apos]; for(int j = apos + 1; j < alen; j++) ret[i] = fn.execute(ret[i], avals[j]); if(sb.size(i) < _data.getNumColumns()) ret[i] = fn.execute(ret[i], 0); } else ret[i] = fn.execute(ret[i], 0); } } else if(nCol == 1) return _data.getDenseBlockValues(); else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < _data.getNumRows(); k++) { ret[k] = values[off++]; for(int j = 1; j < _data.getNumColumns(); j++) ret[k] = fn.execute(ret[k], values[off++]); } } return ret; } @Override public double[] aggregateRowsWithDefault(Builtin fn, double[] defaultTuple) { throw new NotImplementedException(); } @Override public double[] aggregateRowsWithReference(Builtin fn, double[] reference) { final int nCol = reference.length; final int nRows = _data.getNumRows(); final double[] ret = new double[nRows + 1]; ret[nRows] = reference[0]; for(int i = 1; i < nCol; i++) ret[nRows] = fn.execute(ret[nRows], reference[i]); if(!_data.isEmpty() && _data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRows; i++) { if(sb.isEmpty(i)) ret[i] = ret[nRows]; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 1; ret[i] = (aix[k] == 0) ? avals[k++] + reference[0] : reference[0]; for(; j < _data.getNumColumns() && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; ret[i] = fn.execute(ret[i], v); } for(; j < _data.getNumColumns(); j++) ret[i] = fn.execute(ret[i], reference[j]); } } } else if(!_data.isEmpty()) { final double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < nRows; k++) { ret[k] = values[off++] + reference[0]; for(int j = 1; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; ret[k] = fn.execute(ret[k], v); } } } return ret; } @Override public void aggregateCols(double[] c, Builtin fn, int[] colIndexes) { if(_data.isEmpty()) { for(int j = 0; j < colIndexes.length; j++) { final int idx = colIndexes[j]; c[idx] = fn.execute(c[idx], 0); } } else if(_data.isInSparseFormat()) { MatrixBlock t = LibMatrixReorg.transposeInPlace(_data, 1); if(!t.isInSparseFormat()) { throw new NotImplementedException(); } SparseBlock sbt = t.getSparseBlock(); for(int i = 0; i < _data.getNumColumns(); i++) { final int idx = colIndexes[i]; if(!sbt.isEmpty(i)) { final int apos = sbt.pos(i); final int alen = sbt.size(i) + apos; final double[] avals = sbt.values(i); for(int j = apos; j < alen; j++) c[idx] = fn.execute(c[idx], avals[j]); if(alen != _data.getNumRows()) c[idx] = fn.execute(c[idx], 0); } else c[idx] = fn.execute(c[idx], 0); } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < _data.getNumRows(); k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final int idx = colIndexes[j]; c[idx] = fn.execute(c[idx], values[off++]); } } } } @Override public void aggregateColsWithReference(double[] c, Builtin fn, int[] colIndexes, double[] reference, boolean def) { final int nCol = _data.getNumColumns(); final int nRow = _data.getNumRows(); if(def) for(int j = 0; j < colIndexes.length; j++) { final int idx = colIndexes[j]; c[idx] = fn.execute(c[idx], reference[j]); } if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRow; i++) { if(sb.isEmpty(i)) continue; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); final int[] aix = sb.indexes(i); // This is a cool trick but it only works with min / max. for(int k = apos; k < alen; k++) { final int idx = colIndexes[aix[k]]; c[idx] = fn.execute(c[idx], avals[k] + reference[aix[k]]); } } if(!def) { final int[] nnz = LibMatrixReorg.countNnzPerColumn(_data); for(int i = 0; i < nnz.length; i++) if(nnz[i] < nRow) { final int idx = colIndexes[i]; c[idx] = fn.execute(c[idx], reference[i]); } } } else { final double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < nRow; k++) { for(int j = 0; j < nCol; j++) { final int idx = colIndexes[j]; c[idx] = fn.execute(c[idx], values[off++] + reference[j]); } } } } @Override public ADictionary applyScalarOp(ScalarOperator op) { MatrixBlock res = _data.scalarOperations(op, new MatrixBlock()); if(res.isEmpty()) return null; else return new MatrixBlockDictionary(res, _data.getNumColumns()); } @Override public ADictionary applyUnaryOp(UnaryOperator op) { MatrixBlock res = _data.unaryOperations(op, new MatrixBlock()); if(res.isEmpty()) return null; else return new MatrixBlockDictionary(res, _data.getNumColumns()); } @Override public ADictionary applyScalarOpWithReference(ScalarOperator op, double[] reference, double[] newReference) { final int nCol = _data.getNumColumns(); final int nRow = _data.getNumRows(); final MatrixBlock ret = new MatrixBlock(nRow, nCol, false); ret.allocateDenseBlock(); final double[] retV = ret.getDenseBlockValues(); int off = 0; if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRow; i++) { if(sb.isEmpty(i)) for(int j = 0; j < nCol; j++) retV[off++] = op.executeScalar(reference[j]) - newReference[j]; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int j = 0; for(int k = apos; j < nCol && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; retV[off++] = op.executeScalar(v) - newReference[j]; } for(; j < nCol; j++) retV[off++] = op.executeScalar(reference[j]) - newReference[j]; } } } else { final double[] values = _data.getDenseBlockValues(); for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { retV[off] = op.executeScalar(values[off] + reference[j]) - newReference[j]; off++; } } } ret.recomputeNonZeros(); ret.examSparsity(); if(ret.isEmpty()) return null; else return new MatrixBlockDictionary(ret, nCol); } @Override public ADictionary applyUnaryOpWithReference(UnaryOperator op, double[] reference, double[] newReference) { final int nCol = _data.getNumColumns(); final int nRow = _data.getNumRows(); final MatrixBlock ret = new MatrixBlock(nRow, nCol, false); ret.allocateDenseBlock(); final double[] retV = ret.getDenseBlockValues(); int off = 0; if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRow; i++) { if(sb.isEmpty(i)) for(int j = 0; j < nCol; j++) retV[off++] = op.fn.execute(reference[j]) - newReference[j]; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int j = 0; for(int k = apos; j < nCol && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; retV[off++] = op.fn.execute(v) - newReference[j]; } for(; j < nCol; j++) retV[off++] = op.fn.execute(reference[j]) - newReference[j]; } } } else { final double[] values = _data.getDenseBlockValues(); for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { retV[off] = op.fn.execute(values[off] + reference[j]) - newReference[j]; off++; } } } ret.recomputeNonZeros(); ret.examSparsity(); if(ret.isEmpty()) return null; else return new MatrixBlockDictionary(ret, nCol); } @Override public ADictionary inplaceScalarOp(ScalarOperator op) { throw new NotImplementedException(); } @Override public ADictionary binOpLeft(BinaryOperator op, double[] v, int[] colIndexes) { throw new NotImplementedException("Binary row op left is not supported for Uncompressed Matrix, " + "Implement support for VMr in MatrixBlock Binary Cell operations"); } @Override public Dictionary binOpLeftWithReference(BinaryOperator op, double[] v, int[] colIndexes, double[] reference, double[] newReference) { throw new NotImplementedException(); } @Override public MatrixBlockDictionary binOpRight(BinaryOperator op, double[] v, int[] colIndexes) { MatrixBlock rowVector = Util.extractValues(v, colIndexes); return new MatrixBlockDictionary(_data.binaryOperations(op, rowVector, null), _data.getNumColumns()); } @Override public MatrixBlockDictionary binOpRight(BinaryOperator op, double[] v) { MatrixBlock rowVector = new MatrixBlock(1, v.length, v); MatrixBlock ret = _data.binaryOperations(op, rowVector, null); ret.examSparsity(true); return new MatrixBlockDictionary(ret, _data.getNumColumns()); } @Override public Dictionary binOpRightWithReference(BinaryOperator op, double[] v, int[] colIndexes, double[] reference, double[] newReference) { throw new NotImplementedException(); } @Override public ADictionary clone() { MatrixBlock ret = new MatrixBlock(); ret.copy(_data); return new MatrixBlockDictionary(ret, _data.getNumColumns()); } @Override public boolean isLossy() { return false; } @Override public int getNumberOfValues(int ncol) { return _data.getNumRows(); } @Override public double[] sumAllRowsToDouble(int nrColumns) { double[] ret = new double[_data.getNumRows()]; if(_data.isEmpty()) return ret; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(!sb.isEmpty(i)) { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { ret[i] += avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < _data.getNumRows(); k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; ret[k] += v; } } } return ret; } @Override public double[] sumAllRowsToDoubleWithDefault(double[] defaultTuple) { throw new NotImplementedException(); } @Override public double[] sumAllRowsToDoubleWithReference(double[] reference) { final int nCol = reference.length; final int numVals = _data.getNumRows(); final double[] ret = new double[numVals + 1]; final int finalIndex = numVals; for(int i = 0; i < nCol; i++) ret[finalIndex] += reference[i]; if(!_data.isEmpty() && _data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < numVals; i++) { if(sb.isEmpty(i)) ret[i] = ret[finalIndex]; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; ret[i] += v; } for(; j < _data.getNumColumns(); j++) ret[i] += reference[j]; } } } else if(!_data.isEmpty()) { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < numVals; k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; ret[k] += v; } } } return ret; } @Override public double[] sumAllRowsToDoubleSq(int nrColumns) { final double[] ret = new double[_data.getNumRows()]; sumAllRowsToDoubleSq(ret); return ret; } private void sumAllRowsToDoubleSq(double[] ret) { if(_data.isEmpty()) return; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(!sb.isEmpty(i)) { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { ret[i] += avals[j] * avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < _data.getNumRows(); k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; ret[k] += v * v; } } } } @Override public double[] sumAllRowsToDoubleSqWithDefault(double[] defaultTuple) { final double[] ret = new double[_data.getNumRows() + 1]; sumAllRowsToDoubleSq(ret); int defIdx = ret.length - 1; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = defaultTuple[j]; ret[defIdx] += v * v; } return ret; } @Override public double[] sumAllRowsToDoubleSqWithReference(double[] reference) { final int nCol = reference.length; final int numVals = _data.getNumRows(); final double[] ret = new double[numVals + 1]; final int finalIndex = numVals; for(int i = 0; i < nCol; i++) ret[finalIndex] += reference[i] * reference[i]; if(!_data.isEmpty() && _data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < numVals; i++) { if(sb.isEmpty(i)) ret[i] = ret[finalIndex]; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; ret[i] += v * v; } for(; j < _data.getNumColumns(); j++) ret[i] += reference[j] * reference[j]; } } } else if(!_data.isEmpty()) { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < numVals; k++) { for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; ret[k] += v * v; } } } return ret; } @Override public void colSum(double[] c, int[] counts, int[] colIndexes) { if(_data.isEmpty()) return; if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < counts.length; i++) { if(!sb.isEmpty(i)) { // double tmpSum = 0; final int count = counts[i]; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { c[colIndexes[aix[j]]] += count * avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < counts.length; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; c[colIndexes[j]] += v * countK; } } } } @Override public void colSumSq(double[] c, int[] counts, int[] colIndexes) { if(_data.isEmpty()) return; if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < counts.length; i++) { if(!sb.isEmpty(i)) { // double tmpSum = 0; final int count = counts[i]; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { c[colIndexes[aix[j]]] += count * avals[j] * avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < counts.length; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; c[colIndexes[j]] += v * v * countK; } } } } @Override public void colSumSqWithReference(double[] c, int[] counts, int[] colIndexes, double[] reference) { final int nCol = reference.length; final int nRow = counts.length; if(_data.isEmpty()) return; else if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRow; i++) { final int countK = counts[i]; if(sb.isEmpty(i)) for(int j = 0; j < nCol; j++) c[colIndexes[j]] += reference[j] * reference[j] * countK; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; c[colIndexes[j]] += v * v * countK; } for(; j < _data.getNumColumns(); j++) c[colIndexes[j]] += reference[j] * reference[j] * countK; } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < nRow; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; c[colIndexes[j]] += v * v * countK; } } } } @Override public double sum(int[] counts, int ncol) { double tmpSum = 0; if(_data.isEmpty()) return tmpSum; if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < counts.length; i++) { if(!sb.isEmpty(i)) { final int count = counts[i]; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { tmpSum += count * avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < counts.length; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; tmpSum += v * countK; } } } return tmpSum; } @Override public double sumSq(int[] counts, int ncol) { double tmpSum = 0; if(_data.isEmpty()) return tmpSum; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < counts.length; i++) { if(!sb.isEmpty(i)) { final int count = counts[i]; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); for(int j = apos; j < alen; j++) { tmpSum += count * avals[j] * avals[j]; } } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < counts.length; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++]; tmpSum += v * v * countK; } } } return tmpSum; } @Override public double sumSqWithReference(int[] counts, double[] reference) { if(_data.isEmpty()) return 0; final int nCol = reference.length; final int numVals = counts.length; double ret = 0; if(_data.isInSparseFormat()) { double ref = 0; for(int i = 0; i < nCol; i++) ref += reference[i] * reference[i]; final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < numVals; i++) { final int countK = counts[i]; if(sb.isEmpty(i)) ret += ref * countK; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; ret += v * v * countK; } for(; j < _data.getNumColumns(); j++) ret += reference[j] * reference[j] * countK; } } } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int k = 0; k < numVals; k++) { final int countK = counts[k]; for(int j = 0; j < _data.getNumColumns(); j++) { final double v = values[off++] + reference[j]; ret += v * v * countK; } } } return ret; } @Override public ADictionary sliceOutColumnRange(int idxStart, int idxEnd, int previousNumberOfColumns) { final MatrixBlock retBlock = _data.slice(0, _data.getNumRows() - 1, idxStart, idxEnd - 1); if(retBlock.isEmpty()) return null; return new MatrixBlockDictionary(retBlock, idxEnd - idxStart); } @Override public boolean containsValue(double pattern) { return _data.containsValue(pattern); } @Override public boolean containsValueWithReference(double pattern, double[] reference) { if(_data.isEmpty()) { for(double d : reference) if(pattern == d) return true; return false; } else if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(sb.isEmpty(i)) continue; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { if(aix[k] == j) { if(reference[j] + avals[k++] == pattern) return true; } else { if(reference[j] == pattern) return true; } } for(; j < _data.getNumColumns(); j++) if(reference[j] == pattern) return true; } } else { final double[] values = _data.getDenseBlockValues(); final int nCol = reference.length; for(int i = 0; i < values.length; i++) if(values[i] + reference[i % nCol] == pattern) return true; } return false; } @Override public long getNumberNonZeros(int[] counts, int nCol) { if(_data.isEmpty()) return 0; long nnz = 0; if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < counts.length; i++) if(!sb.isEmpty(i)) nnz += sb.size(i) * counts[i]; } else { double[] values = _data.getDenseBlockValues(); int off = 0; for(int i = 0; i < counts.length; i++) { int countThisTuple = 0; for(int j = 0; j < _data.getNumColumns(); j++) { double v = values[off++]; if(v != 0) countThisTuple++; } nnz += countThisTuple * counts[i]; } } return nnz; } @Override public long getNumberNonZerosWithReference(int[] counts, double[] reference, int nRows) { long nnz = 0; if(_data.isEmpty()) return nnz; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); long emptyRowNNZ = nnz; for(int i = 0; i < counts.length; i++) { if(sb.isEmpty(i)) nnz += emptyRowNNZ * counts[i]; else { int countThis = 0; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int k = apos; int j = 0; for(; j < _data.getNumColumns() && k < alen; j++) { if(aix[k] == j) { if(reference[j] + avals[k++] != 0) countThis++; } else { if(reference[j] != 0) countThis++; } } for(; j < _data.getNumColumns(); j++) if(reference[j] != 0) countThis++; nnz += countThis * counts[i]; } } } else { final double[] values = _data.getDenseBlockValues(); int off = 0; for(int i = 0; i < counts.length; i++) { int countThisTuple = 0; for(int j = 0; j < _data.getNumColumns(); j++) if(values[off++] + reference[j] != 0) countThisTuple++; nnz += countThisTuple * counts[i]; } } return nnz; } @Override public void addToEntry(final double[] v, final int fr, final int to, final int nCol) { if(_data.isInSparseFormat()) addToEntrySparse(_data.getSparseBlock(), v, fr, to * nCol, nCol); else addToEntryDense(_data.getDenseBlockValues(), v, fr * nCol, to * nCol, nCol); } private static final void addToEntrySparse(final SparseBlock sb, final double[] v, final int fr, final int st, final int nCol) { if(sb.isEmpty(fr)) return; final int apos = sb.pos(fr); final int alen = sb.size(fr) + apos; final int[] aix = sb.indexes(fr); final double[] avals = sb.values(fr); for(int j = apos; j < alen; j++) v[st + aix[j]] += avals[j]; } private static final void addToEntryDense(final double[] thisV, final double[] v, final int sf, final int st, final int nCol) { for(int i = sf, j = st; i < sf + nCol; i++, j++) v[j] += thisV[i]; } @Override public void addToEntry(final double[] v, final int fr, final int to, final int nCol, int rep) { if(_data.isInSparseFormat()) addToEntrySparse(_data.getSparseBlock(), v, fr, to * nCol, nCol, rep); else addToEntryDense(_data.getDenseBlockValues(), v, fr * nCol, to * nCol, nCol, rep); } private static final void addToEntrySparse(final SparseBlock sb, final double[] v, final int fr, final int st, final int nCol, final int rep) { if(sb.isEmpty(fr)) return; final int apos = sb.pos(fr); final int alen = sb.size(fr) + apos; final int[] aix = sb.indexes(fr); final double[] avals = sb.values(fr); for(int j = apos; j < alen; j++) v[st + aix[j]] += avals[j] * rep; } private static final void addToEntrySparseCSR(final SparseBlockCSR sb, final double[] v, final int fr, final int st, final int nCol, final int[] aix, final double[] avals) { final int apos = sb.pos(fr); final int alen = sb.size(fr) + apos; for(int j = apos; j < alen; j++) v[st + aix[j]] += avals[j]; } private static final void addToEntryDense(final double[] thisV, final double[] v, final int sf, final int st, final int nCol, final int rep) { for(int i = sf, j = st; i < sf + nCol; i++, j++) v[j] += thisV[i] * rep; } @Override public void addToEntryVectorized(double[] v, int f1, int f2, int f3, int f4, int f5, int f6, int f7, int f8, int t1, int t2, int t3, int t4, int t5, int t6, int t7, int t8, int nCol) { if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); if(sb instanceof SparseBlockCSR) { final SparseBlockCSR csr = (SparseBlockCSR) sb; final int[] aix = csr.indexes(); final double[] avals = csr.values(); addToEntrySparseCSR(csr, v, f1, t1 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f2, t2 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f3, t3 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f4, t4 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f5, t5 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f6, t6 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f7, t7 * nCol, nCol, aix, avals); addToEntrySparseCSR(csr, v, f8, t8 * nCol, nCol, aix, avals); } else { addToEntrySparse(sb, v, f1, t1 * nCol, nCol); addToEntrySparse(sb, v, f2, t2 * nCol, nCol); addToEntrySparse(sb, v, f3, t3 * nCol, nCol); addToEntrySparse(sb, v, f4, t4 * nCol, nCol); addToEntrySparse(sb, v, f5, t5 * nCol, nCol); addToEntrySparse(sb, v, f6, t6 * nCol, nCol); addToEntrySparse(sb, v, f7, t7 * nCol, nCol); addToEntrySparse(sb, v, f8, t8 * nCol, nCol); } } else { final double[] thisV = _data.getDenseBlockValues(); addToEntryDense(thisV, v, f1 * nCol, t1 * nCol, nCol); addToEntryDense(thisV, v, f2 * nCol, t2 * nCol, nCol); addToEntryDense(thisV, v, f3 * nCol, t3 * nCol, nCol); addToEntryDense(thisV, v, f4 * nCol, t4 * nCol, nCol); addToEntryDense(thisV, v, f5 * nCol, t5 * nCol, nCol); addToEntryDense(thisV, v, f6 * nCol, t6 * nCol, nCol); addToEntryDense(thisV, v, f7 * nCol, t7 * nCol, nCol); addToEntryDense(thisV, v, f8 * nCol, t8 * nCol, nCol); } } @Override public ADictionary subtractTuple(double[] tuple) { MatrixBlock v = new MatrixBlock(1, tuple.length, tuple); BinaryOperator op = new BinaryOperator(Minus.getMinusFnObject()); MatrixBlock ret = _data.binaryOperations(op, v, null); if(ret.isEmpty()) return null; return new MatrixBlockDictionary(ret, _data.getNumColumns()); } @Override public MatrixBlockDictionary getMBDict(int nCol) { // Simply return this. return this; } @Override public String getString(int colIndexes) { if(_data.isInSparseFormat() || _data.getNumColumns() > 1) return "\n" + _data.toString(); else return Arrays.toString(_data.getDenseBlockValues()); } @Override public String toString() { if(_data.isInSparseFormat() || _data.getNumColumns() > 1) return "MatrixBlock Dictionary :\n" + _data.toString(); else return "MatrixBlock Dictionary : " + Arrays.toString(_data.getDenseBlockValues()); } @Override public ADictionary scaleTuples(int[] scaling, int nCol) { if(_data.isEmpty()) { throw new NotImplementedException("could return null here? or empty DictionaryMatrixBlock..."); } else if(_data.isInSparseFormat()) { MatrixBlock retBlock = new MatrixBlock(_data.getNumRows(), _data.getNumColumns(), true); retBlock.allocateSparseRowsBlock(true); SparseBlock sbRet = retBlock.getSparseBlock(); SparseBlock sbThis = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(!sbThis.isEmpty(i)) { sbRet.set(i, sbThis.get(i), true); final int count = scaling[i]; final int apos = sbRet.pos(i); final int alen = sbRet.size(i) + apos; final double[] avals = sbRet.values(i); for(int j = apos; j < alen; j++) avals[j] = count * avals[j]; } } retBlock.setNonZeros(_data.getNonZeros()); return new MatrixBlockDictionary(retBlock, _data.getNumColumns()); } else { final double[] _values = _data.getDenseBlockValues(); final double[] scaledValues = new double[_values.length]; int off = 0; for(int tuple = 0; tuple < _values.length / nCol; tuple++) { final int scale = scaling[tuple]; for(int v = 0; v < nCol; v++) { scaledValues[off] = _values[off] * scale; off++; } } DenseBlockFP64 db = new DenseBlockFP64(new int[] {_data.getNumRows(), _data.getNumColumns()}, scaledValues); MatrixBlock retBlock = new MatrixBlock(_data.getNumRows(), _data.getNumColumns(), db); retBlock.setNonZeros(_data.getNonZeros()); return new MatrixBlockDictionary(retBlock, _data.getNumColumns()); } } @Override public void write(DataOutput out) throws IOException { out.writeByte(DictionaryFactory.Type.MATRIX_BLOCK_DICT.ordinal()); _data.write(out); } public static MatrixBlockDictionary read(DataInput in) throws IOException { MatrixBlock ret = new MatrixBlock(); ret.readFields(in); return new MatrixBlockDictionary(ret, ret.getNumColumns()); } @Override public long getExactSizeOnDisk() { return 1 + _data.getExactSizeOnDisk(); } @Override public MatrixBlockDictionary preaggValuesFromDense(final int numVals, final int[] colIndexes, final int[] aggregateColumns, final double[] b, final int cut) { double[] ret = new double[numVals * aggregateColumns.length]; if(_data.isEmpty()) return null; else if(_data.isInSparseFormat()) { SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < _data.getNumRows(); i++) { if(sb.isEmpty(i)) continue; final int off = aggregateColumns.length * i; final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final double[] avals = sb.values(i); final int[] aix = sb.indexes(i); for(int j = apos; j < alen; j++) { final int idb = colIndexes[aix[j]] * cut; final double v = avals[j]; for(int h = 0; h < aggregateColumns.length; h++) ret[off + h] += v * b[idb + aggregateColumns[h]]; } } } else { double[] values = _data.getDenseBlockValues(); for(int k = 0, off = 0; k < numVals * colIndexes.length; k += colIndexes.length, off += aggregateColumns.length) { for(int h = 0; h < colIndexes.length; h++) { int idb = colIndexes[h] * cut; double v = values[k + h]; if(v != 0) for(int i = 0; i < aggregateColumns.length; i++) ret[off + i] += v * b[idb + aggregateColumns[i]]; } } } DenseBlock dictV = new DenseBlockFP64(new int[] {numVals, aggregateColumns.length}, ret); MatrixBlock dictM = new MatrixBlock(numVals, aggregateColumns.length, dictV); dictM.recomputeNonZeros(); dictM.examSparsity(); return new MatrixBlockDictionary(dictM, aggregateColumns.length); } @Override public ADictionary replace(double pattern, double replace, int nCol) { final MatrixBlock ret = _data.replaceOperations(new MatrixBlock(), pattern, replace); if(ret.isEmpty()) return null; return new MatrixBlockDictionary(ret, _data.getNumColumns()); } @Override public ADictionary replaceWithReference(double pattern, double replace, double[] reference) { final int nRow = _data.getNumRows(); final int nCol = _data.getNumColumns(); final MatrixBlock ret = new MatrixBlock(nRow, nCol, false); ret.allocateDenseBlock(); final double[] retV = ret.getDenseBlockValues(); int off = 0; if(_data.isInSparseFormat()) { final SparseBlock sb = _data.getSparseBlock(); for(int i = 0; i < nRow; i++) { if(sb.isEmpty(i)) for(int j = 0; j < nCol; j++) retV[off++] = pattern == reference[j] ? replace - reference[j] : 0; else { final int apos = sb.pos(i); final int alen = sb.size(i) + apos; final int[] aix = sb.indexes(i); final double[] avals = sb.values(i); int j = 0; for(int k = apos; j < nCol && k < alen; j++) { final double v = aix[k] == j ? avals[k++] + reference[j] : reference[j]; retV[off++] = pattern == v ? replace - reference[j] : v - reference[j]; } for(; j < nCol; j++) retV[off++] = pattern == reference[j] ? replace - reference[j] : 0; } } } else { final double[] values = _data.getDenseBlockValues(); for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { final double v = values[off]; retV[off++] = pattern == v + reference[j] ? replace - reference[j] : v; } } } ret.recomputeNonZeros(); ret.examSparsity(); if(ret.isEmpty()) return null; else return new MatrixBlockDictionary(ret, _data.getNumColumns()); } @Override public void product(double[] ret, int[] counts, int nCol) { if(_data.isEmpty()) ret[0] = 0; // should not happen but just for safety. else if(_data.isInSparseFormat()) ret[0] = 0; // if we are sparse there is a zero else if(_data.getNonZeros() < _data.getNumColumns() * _data.getNumRows()) ret[0] = 0; // if the number of zeros are not equal number of cells. else { final MathContext cont = MathContext.DECIMAL128; final int nRow = _data.getNumRows(); final double[] values = _data.getDenseBlockValues(); BigDecimal tmp = BigDecimal.ONE; int off = 0; for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { final double v = values[off++]; tmp = tmp.multiply(new BigDecimal(v).pow(counts[i], cont), cont); } } if(Math.abs(tmp.doubleValue()) == 0) ret[0] = 0; else if(!Double.isInfinite(ret[0])) ret[0] = new BigDecimal(ret[0]).multiply(tmp, MathContext.DECIMAL128).doubleValue(); } } @Override public void productWithDefault(double[] ret, int[] counts, double[] def, int defCount) { if(_data.isEmpty()) ret[0] = 0; // should not happen but just for safety. else if(_data.isInSparseFormat()) ret[0] = 0; // if we are sparse there is a zero else if(_data.getNonZeros() < _data.getNumColumns() * _data.getNumRows()) ret[0] = 0; // if the number of zeros are not equal number of cells. else { final MathContext cont = MathContext.DECIMAL128; final int nRow = _data.getNumRows(); final int nCol = def.length; final double[] values = _data.getDenseBlockValues(); BigDecimal tmp = BigDecimal.ONE; int off = 0; for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { final double v = values[off++]; tmp = tmp.multiply(new BigDecimal(v).pow(counts[i], cont), cont); } } for(int x = 0; x < def.length; x++) tmp = tmp.multiply(new BigDecimal(def[x]).pow(defCount, cont), cont); if(Math.abs(tmp.doubleValue()) == 0) ret[0] = 0; else if(!Double.isInfinite(ret[0])) ret[0] = new BigDecimal(ret[0]).multiply(tmp, MathContext.DECIMAL128).doubleValue(); } } @Override public void productWithReference(double[] ret, int[] counts, double[] reference, int refCount) { final MathContext cont = MathContext.DECIMAL128; final int nCol = _data.getNumColumns(); final int nRow = _data.getNumRows(); // force dense ... if this ever is a bottleneck i will be surprised _data.sparseToDense(); final double[] values = _data.getDenseBlockValues(); BigDecimal tmp = BigDecimal.ONE; int off = 0; for(int i = 0; i < nRow; i++) { for(int j = 0; j < nCol; j++) { final double v = values[off++] + reference[j]; if(v == 0) { ret[0] = 0; return; } tmp = tmp.multiply(new BigDecimal(v).pow(counts[i], cont), cont); } } for(int x = 0; x < reference.length; x++) tmp = tmp.multiply(new BigDecimal(reference[x]).pow(refCount, cont), cont); if(Math.abs(tmp.doubleValue()) == 0) ret[0] = 0; else if(!Double.isInfinite(ret[0])) ret[0] = new BigDecimal(ret[0]).multiply(tmp, MathContext.DECIMAL128).doubleValue(); } @Override public CM_COV_Object centralMoment(CM_COV_Object ret, ValueFunction fn, int[] counts, int nRows) { // should be guaranteed to only contain one value per tuple in dictionary. if(_data.isInSparseFormat()) throw new DMLCompressionException("The dictionary should not be sparse with one column"); double[] vals = _data.getDenseBlockValues(); for(int i = 0; i < vals.length; i++) fn.execute(ret, vals[i], counts[i]); return ret; } @Override public CM_COV_Object centralMomentWithReference(CM_COV_Object ret, ValueFunction fn, int[] counts, double reference, int nRows) { // should be guaranteed to only contain one value per tuple in dictionary. if(_data.isInSparseFormat()) throw new DMLCompressionException("The dictionary should not be sparse with one column"); double[] vals = _data.getDenseBlockValues(); for(int i = 0; i < vals.length; i++) fn.execute(ret, vals[i] + reference, counts[i]); return ret; } @Override public ADictionary rexpandCols(int max, boolean ignore, boolean cast, int nCol) { MatrixBlock ex = LibMatrixReorg.rexpand(_data, new MatrixBlock(), max, false, cast, ignore, 1); if(ex.isEmpty()) return null; else return new MatrixBlockDictionary(ex, max); } @Override public ADictionary rexpandColsWithReference(int max, boolean ignore, boolean cast, double reference) { return applyScalarOp(new LeftScalarOperator(Plus.getPlusFnObject(), reference)).rexpandCols(max, ignore, cast, 1); } @Override public double getSparsity() { return _data.getSparsity(); } @Override public void multiplyScalar(double v, double[] ret, int off, int dictIdx, int[] cols) { if(_data.isInSparseFormat()) multiplyScalarSparse(v, ret, off, dictIdx, cols); else multiplyScalarDense(v, ret, off, dictIdx, cols); } private void multiplyScalarSparse(double v, double[] ret, int off, int dictIdx, int[] cols) { final SparseBlock sb = _data.getSparseBlock(); if(sb.isEmpty(dictIdx)) return; final int apos = sb.pos(dictIdx); final int alen = sb.size(dictIdx) + apos; final int[] aix = sb.indexes(dictIdx); final double[] aval = sb.values(dictIdx); for(int i = apos; i < alen; i++) ret[off + cols[aix[i]]] += v * aval[i]; } private void multiplyScalarDense(double v, double[] ret, int off, int dictIdx, int[] cols) { final double[] dV = _data.getDenseBlockValues(); final int offD = dictIdx * cols.length; for(int i = 0; i < cols.length; i++) ret[off + cols[i]] += v * dV[offD + i]; } @Override protected void TSMMWithScaling(int[] counts, int[] rows, int[] cols, MatrixBlock ret) { if(_data.isInSparseFormat()) DictLibMatrixMult.TSMMDictsSparseWithScaling(_data.getSparseBlock(), rows, cols, counts, ret); else DictLibMatrixMult.TSMMDictsDenseWithScaling(_data.getDenseBlockValues(), rows, cols, counts, ret); } @Override protected void MMDict(ADictionary right, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) right.MMDictSparse(_data.getSparseBlock(), rowsLeft, colsRight, result); else right.MMDictDense(_data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void MMDictDense(double[] left, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.MMDictsDenseSparse(left, _data.getSparseBlock(), rowsLeft, colsRight, result); else DictLibMatrixMult.MMDictsDenseDense(left, _data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void MMDictSparse(SparseBlock left, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.MMDictsSparseSparse(left, _data.getSparseBlock(), rowsLeft, colsRight, result); else DictLibMatrixMult.MMDictsSparseDense(left, _data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void TSMMToUpperTriangle(ADictionary right, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) right.TSMMToUpperTriangleSparse(_data.getSparseBlock(), rowsLeft, colsRight, result); else right.TSMMToUpperTriangleDense(_data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void TSMMToUpperTriangleDense(double[] left, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.MMToUpperTriangleDenseSparse(left, _data.getSparseBlock(), rowsLeft, colsRight, result); else DictLibMatrixMult.MMToUpperTriangleDenseDense(left, _data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void TSMMToUpperTriangleSparse(SparseBlock left, int[] rowsLeft, int[] colsRight, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.MMToUpperTriangleSparseSparse(left, _data.getSparseBlock(), rowsLeft, colsRight, result); else DictLibMatrixMult.MMToUpperTriangleSparseDense(left, _data.getDenseBlockValues(), rowsLeft, colsRight, result); } @Override protected void TSMMToUpperTriangleScaling(ADictionary right, int[] rowsLeft, int[] colsRight, int[] scale, MatrixBlock result) { if(_data.isInSparseFormat()) right.TSMMToUpperTriangleSparseScaling(_data.getSparseBlock(), rowsLeft, colsRight, scale, result); else right.TSMMToUpperTriangleDenseScaling(_data.getDenseBlockValues(), rowsLeft, colsRight, scale, result); } @Override protected void TSMMToUpperTriangleDenseScaling(double[] left, int[] rowsLeft, int[] colsRight, int[] scale, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.TSMMToUpperTriangleDenseSparseScaling(left, _data.getSparseBlock(), rowsLeft, colsRight, scale, result); else DictLibMatrixMult.TSMMToUpperTriangleDenseDenseScaling(left, _data.getDenseBlockValues(), rowsLeft, colsRight, scale, result); } @Override protected void TSMMToUpperTriangleSparseScaling(SparseBlock left, int[] rowsLeft, int[] colsRight, int[] scale, MatrixBlock result) { if(_data.isInSparseFormat()) DictLibMatrixMult.TSMMToUpperTriangleSparseSparseScaling(left, _data.getSparseBlock(), rowsLeft, colsRight, scale, result); else DictLibMatrixMult.TSMMToUpperTriangleSparseDenseScaling(left, _data.getDenseBlockValues(), rowsLeft, colsRight, scale, result); } }
21,467
2,338
// RUN: %clang_cc1 -std=gnu++2a -emit-pch %s -o %t.pch // RUN: %clang_cc1 -std=gnu++2a %s -DEMIT -ast-merge %t.pch -ast-dump-all | FileCheck %s // XFAIL: * #ifndef EMIT #define EMIT namespace Integer { consteval int fint() { return 6789; } int Unique_Int = fint(); //CHECK: VarDecl {{.*}} Unique_Int //CHECK-NEXT: ConstantExpr {{.*}} 'int' //CHECK-NEXT: value: Int 6789 consteval __uint128_t fint128() { return ((__uint128_t)0x75f17d6b3588f843 << 64) | 0xb13dea7c9c324e51; } constexpr __uint128_t Unique_Int128 = fint128(); //CHECK: VarDecl {{.*}} Unique_Int128 //CHECK-NEXT: value: Int 156773562844924187900898496343692168785 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Int 156773562844924187900898496343692168785 } // namespace Integer namespace FloatingPoint { consteval double fdouble() { return double(567890.67890); } double Unique_Double = fdouble(); //CHECK: VarDecl {{.*}} Unique_Double //CHECK-NEXT: ConstantExpr {{.*}} //CHECK-NEXT: value: Float 5.678907e+05 } // namespace FloatingPoint // FIXME: Add test for FixedPoint, ComplexInt, ComplexFloat, AddrLabelDiff. namespace Struct { struct B { int i; double d; }; consteval B fB() { return B{1, 0.7}; } constexpr B Basic_Struct = fB(); //CHECK: VarDecl {{.*}} Basic_Struct //CHECK-NEXT: value: Struct //CHECK-NEXT: fields: Int 1, Float 7.000000e-01 //CHECK-NEXT: ImplicitCastExpr //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Struct //CHECK-NEXT: fields: Int 1, Float 7.000000e-01 struct C { int i = 9; }; struct A : B { constexpr A(B b, int I, double D, C _c) : B(b), i(I), d(D), c(_c) {} int i; double d; C c; }; consteval A fA() { return A(Basic_Struct, 1, 79.789, {}); } A Advanced_Struct = fA(); //CHECK: VarDecl {{.*}} Advanced_Struct //CHECK-NEXT: ConstantExpr {{.*}} //CHECK-NEXT: value: Struct //CHECK-NEXT: base: Struct //CHECK-NEXT: fields: Int 1, Float 7.000000e-01 //CHECK-NEXT: fields: Int 1, Float 7.978900e+01 //CHECK-NEXT: field: Struct //CHECK-NEXT: field: Int 9 } // namespace Struct namespace Vector { using v4si = int __attribute__((__vector_size__(16))); consteval v4si fv4si() { return (v4si){8, 2, 3}; } v4si Vector_Int = fv4si(); //CHECK: VarDecl {{.*}} Vector_Int //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Vector length=4 //CHECK-NEXT: elements: Int 8, Int 2, Int 3, Int 0 } // namespace Vector namespace Array { struct B { int arr[6]; }; consteval B fint() { return B{1, 2, 3, 4, 5, 6}; } B Array_Int = fint(); //CHECK: VarDecl {{.*}} Array_Int //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Struct //CHECK-NEXT: field: Array size=6 //CHECK-NEXT: elements: Int 1, Int 2, Int 3, Int 4 //CHECK-NEXT: elements: Int 5, Int 6 struct A { int i = 789; double d = 67890.09876; }; struct C { A arr[3]; }; consteval C fA() { return {{A{}, A{-45678, 9.8}, A{9}}}; } C Array2_Struct = fA(); //CHECK: VarDecl {{.*}} Array2_Struct //CHECK-NEXT: ConstantExpr {{.*}} using v4si = int __attribute__((__vector_size__(16))); struct D { v4si arr[2]; }; consteval D fv4si() { return {{{1, 2, 3, 4}, {4, 5, 6, 7}}}; } D Array_Vector = fv4si(); //CHECK: VarDecl {{.*}} Array_Vector //CHECK-NEXT: ConstantExpr {{.*}} //CHECK-NEXT: value: Struct //CHECK-NEXT: field: Array size=2 //CHECK-NEXT: element: Vector length=4 //CHECK-NEXT: elements: Int 1, Int 2, Int 3, Int 4 //CHECK-NEXT: element: Vector length=4 //CHECK-NEXT: elements: Int 4, Int 5, Int 6, Int 7 } // namespace Array namespace Union { struct A { int i = 6789; float f = 987.9876; }; union U { int i; A a{567890, 9876.5678f}; }; consteval U fU1() { return U{0}; } U Unique_Union1 = fU1(); //CHECK: VarDecl {{.*}} Unique_Union //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Union .i Int 0 consteval U fU() { return U{}; } U Unique_Union2 = fU(); //CHECK: VarDecl {{.*}} Unique_Union //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Union .a //CHECK-NEXT: Struct //CHECK-NEXT: fields: Int 567890, Float 9.876567e+03 } // namespace Union namespace MemberPointer { struct A { struct B { struct C { struct D { struct E { struct F { struct G { int i; }; }; }; }; }; }; }; consteval auto fmem_ptr() -> decltype(&A::B::C::D::E::F::G::i) { return &A::B::C::D::E::F::G::i; } auto MemberPointer1 = fmem_ptr(); //CHECK: VarDecl {{.*}} MemberPointer1 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: MemberPointer &G::i struct A1 { struct B1 { int f() const { return 0; } }; }; consteval auto fmem_ptr2() { return &A1::B1::f; } auto MemberPointer2 = fmem_ptr2(); //CHECK: VarDecl {{.*}} MemberPointer2 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: MemberPointer &B1::f } // namespace MemberPointer namespace std { struct type_info; }; namespace LValue { constexpr int g = 0; consteval const int &fg_ref() { return g; } const int &g_ref = fg_ref(); //CHECK: VarDecl {{.*}} g_ref //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &g consteval const int *fint_ptr() { return &g; } const int *g_ptr = fint_ptr(); //CHECK: VarDecl {{.*}} g_ptr //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &g consteval const int *fnull_ptr() { return nullptr; } const int *ptr2 = fnull_ptr(); //CHECK: VarDecl {{.*}} ptr2 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue nullptr int fconst(); consteval auto ffunc_ptr() { return &fconst; } int (*func_ptr)() = ffunc_ptr(); //CHECK: VarDecl {{.*}} func_ptr //CHECK-NEXT: ConstantExpr {{.*}} //CHECK-NEXT: value: LValue &fconst struct A { int Arr[6] = {0, 1, 3, 4, 5, 9}; int i = 0; }; struct D { A arr[6] = {}; }; consteval D fA() { return {}; } constexpr D Arr = fA(); // CHECK: VarDecl {{.*}} Arr // CHECK-NEXT: value: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: ImplicitCastExpr // CHECK-NEXT: ConstantExpr // CHECK-NEXT: value: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 // CHECK-NEXT: element: Struct // CHECK-NEXT: field: Array size=6 // CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 // CHECK-NEXT: elements: Int 5, Int 9 // CHECK-NEXT: field: Int 0 consteval const int &fconstintref() { return Arr.arr[0].i; } const int &ArrayStructRef1 = fconstintref(); //CHECK: VarDecl {{.*}} ArrayStructRef1 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &Arr.arr[0].i consteval const int &fconstintref2() { return Arr.arr[1].Arr[5]; } const int &ArrayStructRef2 = fconstintref2(); //CHECK: VarDecl {{.*}} ArrayStructRef2 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &Arr.arr[1].Arr[5] consteval const int *fconststar() { return &ArrayStructRef2; } const int *ArrayStructRef3 = fconststar(); //CHECK: VarDecl {{.*}} ArrayStructRef3 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &Arr.arr[1].Arr[5] struct B : A { }; struct C { B b; }; consteval C fC() { return {}; } C c = fC(); //CHECK: VarDecl {{.*}} c //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: Struct //CHECK-NEXT: field: Struct //CHECK-NEXT: base: Struct //CHECK-NEXT: field: Array size=6 //CHECK-NEXT: elements: Int 0, Int 1, Int 3, Int 4 //CHECK-NEXT: elements: Int 5, Int 9 //CHECK-NEXT: field: Int 0 consteval const int &f2constintref() { return c.b.i; } const int &StructPathRef = f2constintref(); //CHECK: VarDecl {{.*}} StructPathRef //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &c.b.A::i consteval const std::type_info *ftype_info() { return &typeid(c); } const std::type_info *T1 = ftype_info(); //CHECK: VarDecl {{.*}} T1 //CHECK-NEXT: ConstantExpr //CHECK-NEXT:value: LValue &typeid(LValue::C) consteval const std::type_info *ftype_info2() { return &typeid(Arr.arr[1].Arr[2]); } const std::type_info *T2 = ftype_info2(); //CHECK: VarDecl {{.*}} T2 //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &typeid(int) consteval const char *fstring() { return "test"; } const char *cptr = fstring(); //CHECK: VarDecl {{.*}} cptr //CHECK-NEXT: ConstantExpr //CHECK-NEXT: value: LValue &"test"[0] } // namespace LValue #endif
4,240
820
{ "ConnectionStrings": { "Default": "Server=localhost; Database=IdentityServervNextDemoDb; Trusted_Connection=True;" } }
48
799
# import demistomock as demisto from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import from CommonServerUserPython import * # noqa from GSuiteApiModule import * # noqa: E402 ''' IMPORTS ''' import urllib3 from typing import List, Dict, Any # Disable insecure warnings urllib3.disable_warnings() ADMIN_EMAIL = None ''' CONSTANTS ''' OUTPUT_PREFIX: Dict[str, str] = { 'ACTIVITY_LIST': 'GSuite.ActivitySearch', 'ACTIVITY_LIST_PAGE_TOKEN': 'GSuite.PageToken.ActivitySearch', } REQ_URL = 'https://admin.googleapis.com/' URL_SUFFIX = 'admin/reports/v1/activity/users/{}/applications/{}' SCOPE = ['https://www.googleapis.com/auth/admin.reports.audit.readonly'] DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR DATE_MILISEC_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' ''' HELPER FUNCTIONS ''' def is_email_valid(email: str) -> bool: """ Validates provided email is valid or not. :param email: email string. :return: True if email is in valid format. """ return True if re.match(emailRegex, email) else False def prepare_args_for_activities_list(args: Dict[str, str]) -> Dict[str, str]: """ Prepares arguments for gsuite-activity-search command. :param args: Command arguments. :return: Prepared arguments. """ return GSuiteClient.remove_empty_entities({ 'eventName': args.get('event_name'), 'filters': args.get('filters'), 'orgUnitId': args.get('org_unit_id'), 'groupIdFilter': args.get('group_id'), 'actorIpAddress': args.get('actor_ip_address'), 'startTime': args.get('start_time'), 'endTime': args.get('end_time'), 'maxResults': GSuiteClient.validate_get_int(args.get('max_results'), 'The argument max_results must be a positive integer.'), 'pageToken': args.get('page_token') }) def prepare_readable_items(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]: readable_items = [ { 'Time': item.get('id', {}).get('time'), 'Application Name': item.get('id', {}).get('applicationName'), 'Email': item.get('actor', {}).get('email'), 'ProfileId': item.get('actor', {}).get('profileId'), 'IpAddress': item.get('ipAddress', ''), 'Events': item['events'] } for item in items ] return GSuiteClient.remove_empty_entities(readable_items) def prepare_output_for_activities_list(response: Dict[str, Any]) -> Dict[str, Any]: """ prepares context output for gsuite-activity-search. :param response: API response. :return: output dictionary. """ output_items = [{'id': item['id'], 'actor': item['actor'], 'ipAddress': item.get('ipAddress', []), 'events': item['events']} for item in response.get('items', [])] return { 'GSuite.ActivitySearch': GSuiteClient.remove_empty_entities(output_items), 'GSuite.PageToken.ActivitySearch': { 'nextPageToken': response['nextPageToken'] } if response.get('nextPageToken', '') else {} } def prepare_gsuite_client(params: Dict) -> GSuiteClient: user_service_account = params.get('credentials', {}).get('password') service_account_dict = GSuiteClient.safe_load_non_strict_json(user_service_account) verify_certificate = not params.get('insecure', False) proxy = params.get('proxy', False) headers = { 'Content-Type': 'application/json' } # prepare client class object gsuite_client = GSuiteClient(service_account_dict, base_url=REQ_URL, verify=verify_certificate, proxy=proxy, headers=headers) return gsuite_client @logger def test_module(client: GSuiteClient) -> str: """ Performs test connectivity by valid http response :param client: client object which is used to get response from api. :return: raise ValueError if any error occurred during connection :raises DemistoException: If there is any other issues while making the http call. """ with GSuiteClient.http_exception_handler(): if ADMIN_EMAIL: suffix = URL_SUFFIX.format('all', 'admin') client.http_request(url_suffix=suffix, method='GET', params={'max_results': '1'}) else: return_results("Please insert Admin Email parameter for the test to run") return 'ok' ''' COMMAND FUNCTIONS ''' def activities_list_command(client: GSuiteClient, args: Dict[str, Any]) -> CommandResults: """ Prints all activities in the G Suite instance. :param client: client object which is used to get response from api :param args: command arguments. :return: CommandResults object with context and human-readable. """ arguments = prepare_args_for_activities_list(args) response = client.http_request( url_suffix=URL_SUFFIX.format(args.get('user_key', 'all'), args.get('application_name')), params=arguments) # Readable Output readable_items = prepare_readable_items(response.get('items', [])) readable_output = f'### Next Page Token: {response["nextPageToken"]}\n' if response.get("nextPageToken") else '' readable_output += tableToMarkdown( 'Total Retrieved {}: {}'.format('Activities', len(response.get('items', []))), readable_items, ['Time', 'Application Name', 'Email', 'ProfileId', 'IpAddress', 'Events']) if response.get( 'items') else 'No activities found for the given ' \ 'argument(s). ' # Context context_outputs = prepare_output_for_activities_list(response) return CommandResults( outputs=context_outputs, readable_output=readable_output, raw_response=response ) ''' MAIN FUNCTION ''' def main() -> None: """main function, parses params and runs command functions :return: :rtype: """ command = demisto.command() demisto.info(f'Command being called is {command}') try: global ADMIN_EMAIL params = demisto.params() gsuite_client = prepare_gsuite_client(params) # Trim the arguments args = GSuiteClient.strip_dict(demisto.args()) ADMIN_EMAIL = args.get('admin_email') if args.get('admin_email') else params.get('credentials', {}).get('identifier') # Validation of ADMIN_EMAIL if ADMIN_EMAIL and not is_email_valid(ADMIN_EMAIL): raise ValueError('Invalid value of argument/parameter Admin Email.') gsuite_client.set_authorized_http(scopes=SCOPE, subject=ADMIN_EMAIL) # This is the call made when pressing the integration Test button. if command == 'test-module': result = test_module(gsuite_client) return_results(result) elif command == 'gsuite-activity-search': return_results(activities_list_command(gsuite_client, args)) # Log exceptions except Exception as e: demisto.error(traceback.format_exc()) return_error(f'Error: {str(e)}') if __name__ in ('__main__', '__builtin__', 'builtins'): main()
2,945
14,668
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/drive/drive_pref_names.h" namespace drive { namespace prefs { // A boolean pref to disable Google Drive integration. // The pref prefix should remain as "gdata" for backward compatibility. const char kDisableDrive[] = "gdata.disabled"; // A boolean pref to disable Drive over cellular connections. // The pref prefix should remain as "gdata" for backward compatibility. const char kDisableDriveOverCellular[] = "gdata.cellular.disabled"; // A boolean pref to enable or disable verbose logging in DriveFS. const char kDriveFsEnableVerboseLogging[] = "drivefs.enable_verbose_logging"; // A string pref containing a random salt used to obfuscate account IDs // when passed to drivefs. const char kDriveFsProfileSalt[] = "drivefs.profile_salt"; // A boolean pref containing whether pinned files have been migrated to DriveFS. const char kDriveFsPinnedMigrated[] = "drivefs.pinned_migrated"; // A boolean pref containing whether DriveFS was ever successfully launched. const char kDriveFsWasLaunchedAtLeastOnce[] = "drivefs.was_launched_at_least_once"; } // namespace prefs } // namespace drive
352
327
// Copyright 1996-2021 Cyberbotics Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "WbJoint.hpp" #include "WbBrake.hpp" #include "WbJointParameters.hpp" #include "WbMotor.hpp" #include "WbNodeUtilities.hpp" #include "WbPositionSensor.hpp" #include "WbRobot.hpp" #include "WbSolidReference.hpp" #include "WbWrenRenderingContext.hpp" #include <wren/config.h> #include <wren/node.h> #include <wren/renderable.h> #include <wren/static_mesh.h> #include <wren/transform.h> #include <ode/ode.h> void WbJoint::init() { mDevice = findMFNode("device"); // hidden field mPosition = findSFDouble("position")->value(); mOdePositionOffset = mPosition; mTimeStep = 0.0; mSavedPositions[stateId()] = mPosition; } // Constructors WbJoint::WbJoint(const QString &modelName, WbTokenizer *tokenizer) : WbBasicJoint(modelName, tokenizer) { init(); } WbJoint::WbJoint(const WbJoint &other) : WbBasicJoint(other) { init(); } WbJoint::WbJoint(const WbNode &other) : WbBasicJoint(other) { init(); } WbJoint::~WbJoint() { } void WbJoint::downloadAssets() { WbBasicJoint::downloadAssets(); WbMotor *m = motor(); if (m) m->downloadAssets(); m = motor2(); if (m) m->downloadAssets(); m = motor3(); if (m) m->downloadAssets(); } void WbJoint::preFinalize() { WbBasicJoint::preFinalize(); mSavedPositions[stateId()] = mPosition; for (int i = 0; i < devicesNumber(); ++i) { if (device(i) && !device(i)->isPreFinalizedCalled()) device(i)->preFinalize(); } } void WbJoint::postFinalize() { WbBasicJoint::postFinalize(); for (int i = 0; i < devicesNumber(); ++i) { if (device(i) && !device(i)->isPostFinalizedCalled()) device(i)->postFinalize(); } connect(mDevice, &WbMFNode::itemInserted, this, &WbJoint::addDevice); if (brake()) connect(brake(), &WbBrake::brakingChanged, this, &WbJoint::updateSpringAndDampingConstants, Qt::UniqueConnection); } void WbJoint::reset(const QString &id) { WbBasicJoint::reset(id); for (int i = 0; i < mDevice->size(); ++i) mDevice->item(i)->reset(id); setPosition(mSavedPositions[id]); } void WbJoint::resetPhysics() { updatePosition(); WbMotor *const m = motor(); if (m) m->resetPhysics(); } void WbJoint::save(const QString &id) { WbBasicJoint::save(id); for (int i = 0; i < mDevice->size(); ++i) mDevice->item(i)->save(id); mSavedPositions[id] = mPosition; } void WbJoint::setPosition(double position, int index) { if (index != 1) return; mPosition = position; mOdePositionOffset = position; WbJointParameters *const p = parameters(); if (p) p->setPosition(mPosition); WbMotor *const m = motor(); if (m) m->setTargetPosition(position); } bool WbJoint::resetJointPositions() { mOdePositionOffset = 0.0; return WbBasicJoint::resetJointPositions(); } // Update methods: they check validity and correct if necessary void WbJoint::addDevice(int index) { const WbSolid *const s = upperSolid(); if (s) { WbRobot *const r = s->robot(); assert(r); WbBaseNode *decendant = dynamic_cast<WbBaseNode *>(mDevice->item(index)); r->descendantNodeInserted(decendant); } WbBrake *brake = dynamic_cast<WbBrake *>(mDevice->item(index)); if (brake) connect(brake, &WbBrake::brakingChanged, this, &WbJoint::updateSpringAndDampingConstants, Qt::UniqueConnection); } void WbJoint::updateParameters() { const WbJointParameters *const p = parameters(); if (p) { mOdePositionOffset = p->position(); mPosition = mOdePositionOffset; connect(p, SIGNAL(positionChanged()), this, SLOT(updatePosition()), Qt::UniqueConnection); connect(p, &WbJointParameters::minAndMaxStopChanged, this, &WbJoint::updateMinAndMaxStop, Qt::UniqueConnection); connect(p, &WbJointParameters::springAndDampingConstantsChanged, this, &WbJoint::updateSpringAndDampingConstants, Qt::UniqueConnection); connect(p, &WbJointParameters::axisChanged, this, &WbJoint::updateAxis, Qt::UniqueConnection); } } // Utility functions WbJointParameters *WbJoint::parameters() const { return dynamic_cast<WbJointParameters *>(mParameters->value()); } WbJointDevice *WbJoint::device(int index) const { if (index >= 0 && mDevice->size() > index) return dynamic_cast<WbJointDevice *>(mDevice->item(index)); else return NULL; } int WbJoint::devicesNumber() const { return mDevice->size(); } QVector<WbLogicalDevice *> WbJoint::devices() const { QVector<WbLogicalDevice *> devices; for (int i = 0; i < devicesNumber(); ++i) devices.append(device(i)); return devices; } WbMotor *WbJoint::motor() const { WbMotor *motor = NULL; for (int i = 0; i < mDevice->size(); ++i) { motor = dynamic_cast<WbMotor *>(mDevice->item(i)); if (motor) return motor; } return NULL; } WbPositionSensor *WbJoint::positionSensor() const { WbPositionSensor *sensor = NULL; for (int i = 0; i < mDevice->size(); ++i) { sensor = dynamic_cast<WbPositionSensor *>(mDevice->item(i)); if (sensor) return sensor; } return NULL; } WbBrake *WbJoint::brake() const { WbBrake *brake = NULL; for (int i = 0; i < mDevice->size(); ++i) { brake = dynamic_cast<WbBrake *>(mDevice->item(i)); if (brake) return brake; } return NULL; } void WbJoint::updateAxis() { // update the current endPoint pose based on the new axis value // but do not modify the initial endPoint pose updatePosition(); if (mJoint) applyToOdeAxis(); if (WbWrenRenderingContext::instance()->isOptionalRenderingEnabled(WbWrenRenderingContext::VF_JOINT_AXES)) updateJointAxisRepresentation(); } void WbJoint::updateMinAndMaxStop(double min, double max) { if (mJoint) applyToOdeMinAndMaxStop(); } WbVector3 WbJoint::axis() const { static const WbVector3 DEFAULT_AXIS(0.0, 0.0, 1.0); const WbJointParameters *p = parameters(); return p ? p->axis() : DEFAULT_AXIS; } void WbJoint::setOdeJoint(dBodyID body, dBodyID parentBody) { WbBasicJoint::setOdeJoint(body, parentBody); // compute and set the orientation of rotation axis applyToOdeAxis(); // place hard stops if defined applyToOdeMinAndMaxStop(); } void WbJoint::updateOdePositionOffset() { double newValue = position(); if (mOdePositionOffset == newValue) return; mOdePositionOffset = newValue; applyToOdeMinAndMaxStop(); } ////////// // WREN // ////////// void WbJoint::createWrenObjects() { WbBasicJoint::createWrenObjects(); if (WbWrenRenderingContext::instance()->isOptionalRenderingEnabled(WbWrenRenderingContext::VF_JOINT_AXES)) wr_node_set_visible(WR_NODE(mTransform), true); connect(WbWrenRenderingContext::instance(), &WbWrenRenderingContext::lineScaleChanged, this, &WbJoint::updateJointAxisRepresentation); updateJointAxisRepresentation(); // create Wren objects for Muscle devices for (int i = 0; i < devicesNumber(); ++i) { if (device(i)) device(i)->createWrenObjects(); } } void WbJoint::updateJointAxisRepresentation() { if (!areWrenObjectsInitialized()) return; wr_static_mesh_delete(mMesh); const double scaling = 0.5f * wr_config_get_line_scale(); const WbVector3 &anchorVector = anchor(); const WbVector3 &axisVector = scaling * axis(); WbVector3 vertex(anchorVector - axisVector); float vertices[6]; vertex.toFloatArray(vertices); vertex = anchorVector + axisVector; vertex.toFloatArray(vertices + 3); mMesh = wr_static_mesh_line_set_new(2, vertices, NULL); wr_renderable_set_mesh(mRenderable, WR_MESH(mMesh)); } const QString WbJoint::urdfName() const { if (motor()) return getUrdfPrefix() + motor()->deviceName(); else if (positionSensor()) return getUrdfPrefix() + positionSensor()->deviceName(); return WbBaseNode::urdfName(); } void WbJoint::writeExport(WbVrmlWriter &writer) const { if (writer.isUrdf() && solidEndPoint()) { if (dynamic_cast<WbSolidReference *>(mEndPoint->value())) { this->warn("Exporting a Joint node with a SolidRefernce endpoint to URDF is not supported."); return; } const WbNode *const parentRoot = findUrdfLinkRoot(); const WbVector3 currentOffset = solidEndPoint()->translation() - anchor(); const WbVector3 translation = solidEndPoint()->translationFrom(parentRoot) - currentOffset + writer.jointOffset(); writer.setJointOffset(solidEndPoint()->rotationMatrixFrom(parentRoot).transposed() * currentOffset); const WbVector3 eulerRotation = solidEndPoint()->rotationMatrixFrom(parentRoot).toEulerAnglesZYX(); const WbVector3 rotationAxis = axis() * solidEndPoint()->rotationMatrixFrom(WbNodeUtilities::findUpperTransform(this)); writer.increaseIndent(); writer.indent(); const WbMotor *m = motor(); if (m && (m->minPosition() != 0.0 || m->maxPosition() != 0.0)) writer << QString("<joint name=\"%1\" type=\"revolute\">\n").arg(urdfName()); else writer << QString("<joint name=\"%1\" type=\"continuous\">\n").arg(urdfName()); writer.increaseIndent(); writer.indent(); writer << QString("<parent link=\"%1\"/>\n").arg(parentRoot->urdfName()); writer.indent(); writer << QString("<child link=\"%1\"/>\n").arg(solidEndPoint()->urdfName()); writer.indent(); writer << QString("<axis xyz=\"%1\"/>\n").arg(rotationAxis.toString(WbPrecision::FLOAT_ROUND_6)); writer.indent(); if (m) { if (m->minPosition() != 0.0 || m->maxPosition() != 0.0) writer << QString("<limit effort=\"%1\" lower=\"%2\" upper=\"%3\" velocity=\"%4\"/>\n") .arg(m->maxForceOrTorque()) .arg(m->minPosition()) .arg(m->maxPosition()) .arg(m->maxVelocity()); else writer << QString("<limit effort=\"%1\" velocity=\"%2\"/>\n").arg(m->maxForceOrTorque()).arg(m->maxVelocity()); writer.indent(); } writer << QString("<origin xyz=\"%1\" rpy=\"%2\"/>\n") .arg(translation.toString(WbPrecision::FLOAT_ROUND_6)) .arg(eulerRotation.toString(WbPrecision::FLOAT_ROUND_6)); writer.decreaseIndent(); writer.indent(); writer << QString("</joint>\n"); writer.decreaseIndent(); WbNode::exportNodeSubNodes(writer); return; } WbBasicJoint::writeExport(writer); }
4,147
4,262
<filename>core/camel-core-model/src/generated/resources/org/apache/camel/model/to.json { "model": { "kind": "model", "name": "to", "title": "To", "description": "Sends the message to a static endpoint", "deprecated": false, "label": "eip,routing", "javaType": "org.apache.camel.model.ToDefinition", "input": true, "output": false }, "properties": { "uri": { "kind": "attribute", "displayName": "Uri", "required": true, "type": "string", "javaType": "java.lang.String", "deprecated": false, "autowired": false, "secret": false, "description": "Sets the uri of the endpoint to send to." }, "pattern": { "kind": "attribute", "displayName": "Pattern", "required": false, "type": "enum", "javaType": "org.apache.camel.ExchangePattern", "enum": [ "InOnly", "InOptionalOut", "InOut" ], "deprecated": false, "autowired": false, "secret": false, "description": "Sets the optional ExchangePattern used to invoke this endpoint" }, "id": { "kind": "attribute", "displayName": "Id", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "autowired": false, "secret": false, "description": "Sets the id of this node" }, "description": { "kind": "element", "displayName": "Description", "required": false, "type": "object", "javaType": "org.apache.camel.model.DescriptionDefinition", "deprecated": false, "autowired": false, "secret": false, "description": "Sets the description of this node" } } }
496
836
/* Copyright (C) 2011 Univ. of Massachusetts Amherst, Computer Science Dept. This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit). http://www.cs.umass.edu/~mccallum/mallet This software is provided under the terms of the Common Public License, version 1.0, as published by http://www.opensource.org. For further information, see the file `LICENSE' included with this distribution. */ package cc.mallet.fst.semi_supervised.pr; import cc.mallet.fst.SumLattice; import cc.mallet.fst.Transducer; import cc.mallet.fst.Transducer.State; import cc.mallet.fst.Transducer.TransitionIterator; import cc.mallet.types.LabelVector; import cc.mallet.types.Sequence; /** * Lattice for M-step/M-projection in PR. * * @author <NAME> * @author <NAME> */ public class SumLatticeKL implements SumLattice { // "ip" == "input position", "op" == "output position", "i" == "state index" Transducer t; double totalWeight; int latticeLength; double[][][] xis; Sequence input; protected SumLatticeKL() {} // If outputAlphabet is non-null, this will create a LabelVector // for each position in the output sequence indicating the // probability distribution over possible outputs at that time // index public SumLatticeKL(Transducer trans, Sequence input, double[] initProbs, double[] finalProbs, double[][][] xis, double[][][] cachedDots, Transducer.Incrementor incrementor) { assert (xis != null) : "Need transition probabilities"; // Initialize some structures this.t = trans; this.input = input; latticeLength = input.size() + 1; int numStates = t.numStates(); this.xis = xis; totalWeight = 0; // increment initial states for (int i = 0; i < numStates; i++) { if (t.getState(i).getInitialWeight() == Transducer.IMPOSSIBLE_WEIGHT) continue; if (initProbs != null) { totalWeight += initProbs[i] * t.getState(i).getInitialWeight(); if (incrementor != null) incrementor.incrementInitialState(t.getState(i), initProbs[i]); } } for (int ip = 0; ip < latticeLength - 1; ip++) for (int i = 0; i < numStates; i++) { State s = t.getState(i); TransitionIterator iter = s.transitionIterator(input, ip); while (iter.hasNext()) { State destination = iter.next(); double weight = iter.getWeight(); double p = xis[ip][i][destination.getIndex()]; totalWeight += p * weight; if (cachedDots != null) { cachedDots[ip][i][destination.getIndex()] = weight; } if (incrementor != null) { // this is used to gather "constraints", // so only probabilities under q are used incrementor.incrementTransition(iter, p); } } } for (int i = 0; i < numStates; i++) { if (t.getState(i).getFinalWeight() == Transducer.IMPOSSIBLE_WEIGHT) continue; if (finalProbs != null) { totalWeight += finalProbs[i] * t.getState(i).getFinalWeight(); if (incrementor != null) incrementor.incrementFinalState(t.getState(i), finalProbs[i]); } } assert (totalWeight > Transducer.IMPOSSIBLE_WEIGHT) : "Total weight=" + totalWeight; } public double[][][] getXis() { return xis; } public double[][] getGammas() { throw new UnsupportedOperationException("Not handled!"); } public double getTotalWeight() { assert (!Double.isNaN(totalWeight)); return totalWeight; } public double getGammaWeight(int inputPosition, State s) { throw new UnsupportedOperationException("Not handled!"); } public double getGammaWeight(int inputPosition, int stateIndex) { throw new UnsupportedOperationException("Not handled!"); } public double getGammaProbability(int inputPosition, State s) { throw new UnsupportedOperationException("Not handled!"); } public double getGammaProbability(int inputPosition, int stateIndex) { throw new UnsupportedOperationException("Not handled!"); } public double getXiProbability(int ip, State s1, State s2) { throw new UnsupportedOperationException("Not handled!"); } public double getXiWeight(int ip, State s1, State s2) { throw new UnsupportedOperationException("Not handled!"); } public int length() { return latticeLength; } public double getAlpha(int ip, State s) { throw new UnsupportedOperationException("Not handled!"); } public double getBeta(int ip, State s) { throw new UnsupportedOperationException("Not handled!"); } public LabelVector getLabelingAtPosition(int outputPosition) { return null; } public Transducer getTransducer() { return t; } public Sequence getInput() { return input; } }
1,649
1,144
package de.metas.material.dispo.commons.candidate; import static de.metas.material.dispo.commons.candidate.IdConstants.UNSPECIFIED_REPO_ID; import static de.metas.material.event.EventTestHelper.CLIENT_AND_ORG_ID; import static de.metas.material.event.EventTestHelper.createMaterialDescriptor; import static java.math.BigDecimal.TEN; import static org.assertj.core.api.Assertions.assertThat; import org.adempiere.test.AdempiereTestHelper; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import de.metas.material.dispo.commons.candidate.businesscase.DemandDetail; import de.metas.material.event.commons.EventDescriptor; import de.metas.material.event.commons.OrderLineDescriptor; import de.metas.material.event.commons.SubscriptionLineDescriptor; import de.metas.material.event.commons.SupplyRequiredDescriptor; /* * #%L * metasfresh-material-dispo-commons * %% * Copyright (C) 2018 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ public class DemandDetailTest { @BeforeEach public void init() { AdempiereTestHelper.get().init(); } @Test public void forDocumentDescriptor_OrderLineDescriptor() { final OrderLineDescriptor orderLineDescriptor = OrderLineDescriptor.builder() .docTypeId(30) .orderBPartnerId(40) .orderId(50) .orderLineId(60) .build(); final DemandDetail demandDetail = DemandDetail.forDocumentLine(20, orderLineDescriptor, TEN); assertThat(demandDetail.getShipmentScheduleId()).isEqualTo(20); assertThat(demandDetail.getForecastId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getForecastLineId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getSubscriptionProgressId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getOrderId()).isEqualTo(50); assertThat(demandDetail.getOrderLineId()).isEqualTo(60); assertThat(demandDetail.getQty()).isEqualByComparingTo(TEN); } @Test public void forDocumentDescriptor_SubscriptionLineDescriptor() { final SubscriptionLineDescriptor subscriptionLineDescriptor = SubscriptionLineDescriptor.builder() .flatrateTermId(10) .subscriptionProgressId(20) .subscriptionBillBPartnerId(30).build(); final DemandDetail demandDetail = DemandDetail.forDocumentLine(20, subscriptionLineDescriptor, TEN); assertThat(demandDetail.getShipmentScheduleId()).isEqualTo(20); assertThat(demandDetail.getForecastId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getForecastLineId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getSubscriptionProgressId()).isEqualTo(20); assertThat(demandDetail.getOrderId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getOrderLineId()).isEqualTo(UNSPECIFIED_REPO_ID); assertThat(demandDetail.getQty()).isEqualByComparingTo(TEN); } @Test public void forForecastLineId() { final DemandDetail demandDetail = DemandDetail.forForecastLineId(30, 20, TEN); assertThat(demandDetail.getShipmentScheduleId()).isLessThanOrEqualTo(0); assertThat(demandDetail.getForecastId()).isEqualTo(20); assertThat(demandDetail.getForecastLineId()).isEqualTo(30); assertThat(demandDetail.getSubscriptionProgressId()).isLessThanOrEqualTo(20); assertThat(demandDetail.getOrderId()).isLessThanOrEqualTo(0); assertThat(demandDetail.getOrderLineId()).isLessThanOrEqualTo(0); assertThat(demandDetail.getQty()).isEqualByComparingTo(TEN); } @Test public void forSupplyRequiredDescriptorOrNull() { final SupplyRequiredDescriptor supplyRequiredDescriptor = SupplyRequiredDescriptor.builder() .demandCandidateId(5) .eventDescriptor(EventDescriptor.ofClientAndOrg(CLIENT_AND_ORG_ID)) .forecastId(10) .forecastLineId(20) .orderId(30) .orderLineId(40) .shipmentScheduleId(50) .subscriptionProgressId(60) .materialDescriptor(createMaterialDescriptor()) .build(); final DemandDetail demandDetail = DemandDetail.forSupplyRequiredDescriptorOrNull(supplyRequiredDescriptor); assertThat(demandDetail.getDemandCandidateId()).isEqualTo(5); assertThat(demandDetail.getForecastId()).isEqualTo(10); assertThat(demandDetail.getForecastLineId()).isEqualTo(20); assertThat(demandDetail.getOrderId()).isEqualTo(30); assertThat(demandDetail.getOrderLineId()).isEqualTo(40); assertThat(demandDetail.getShipmentScheduleId()).isEqualTo(50); assertThat(demandDetail.getSubscriptionProgressId()).isEqualTo(60); } @Test public void forSupplyRequiredDescriptorOrNull_when_null_then_null() { assertThat(DemandDetail.forSupplyRequiredDescriptorOrNull(null)).isNull(); } }
1,863
524
<filename>core/profiling/profiler.h #pragma once namespace core { namespace profiler { class auto_stop_watch : boost::noncopyable { public: auto_stop_watch(const char *_process_name); virtual ~auto_stop_watch(); private: int64_t id_; }; void enable(const bool _enable); int64_t process_started(const char *_name); void process_stopped(const int64_t _process_id); void process_started(const char *_name, const int64_t _process_id, const int64_t _ts); void process_stopped(const int64_t _process_id, const int64_t _ts); void flush_logs(); } }
313
1,825
<filename>unidbg-api/src/main/java/com/github/unidbg/file/FileResult.java package com.github.unidbg.file; public class FileResult<T extends NewFileIO> { private static final int FALLBACK_ERRNO = -1; public static <T extends NewFileIO> FileResult<T> success(T io) { if (io == null) { throw new NullPointerException("io is null"); } return new FileResult<>(io, 0); } public static <T extends NewFileIO> FileResult<T> failed(int errno) { if (errno == 0) { throw new IllegalArgumentException("errno=" + errno); } return new FileResult<>(null, errno); } public static <T extends NewFileIO> FileResult<T> fallback(T io) { if (io == null) { throw new NullPointerException("io is null"); } return new FileResult<>(io, FALLBACK_ERRNO); } public final T io; public final int errno; public boolean isSuccess() { return io != null && errno == 0; } public boolean isFallback() { return io != null && errno == FALLBACK_ERRNO; } private FileResult(T io, int errno) { this.io = io; this.errno = errno; } }
516
841
<reponame>brunolmfg/resteasy package org.jboss.resteasy.test.request.resource; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.PUT; import jakarta.ws.rs.Path; @Path("/xml") public class AcceptXmlResource { @Consumes("application/xml;schema=foo") @PUT public void putFoo(String foo) { } @Consumes("application/xml") @PUT public void put(String foo) { } @Consumes("application/xml;schema=bar") @PUT public void putBar(String foo) { } }
193
357
/** * This file is part of Eclipse Steady. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 * SPDX-FileCopyrightText: Copyright (c) 2018-2020 SAP SE or an SAP affiliate company and Eclipse Steady contributors */ package org.eclipse.steady.backend.util; import java.util.Collection; import javax.persistence.EntityNotFoundException; import javax.validation.constraints.NotNull; import org.springframework.data.repository.CrudRepository; /** * {@link CrudRepository#findById(Object)} only works for the primary key of the respective entity. * All other "find" methods that can be specified in the extended interface only return {@link Collection}s of objects * that match the search criteria. This class works around this problem... * * @param <T> type of objects handled by the filter. */ public class ResultSetFilter<T> { /** * <p>findOne.</p> * * @param _collection a {@link java.util.Collection} object. * @return the single object contained in the given collection. * @throws EntityNotFoundException if the given * collection is empty or contains multiple elements. */ public T findOne(@NotNull Collection<T> _collection) throws EntityNotFoundException { if (_collection == null || _collection.isEmpty()) { throw new EntityNotFoundException("Object not found"); } else if (_collection.size() > 1) { throw new EntityNotFoundException("Multiple objects found"); } else { return _collection.iterator().next(); } } }
580
663
<reponame>andriyor/agate<filename>agate/aggregations/stdev.py<gh_stars>100-1000 #!/usr/bin/env python from agate.aggregations import Aggregation from agate.aggregations.has_nulls import HasNulls from agate.aggregations.variance import PopulationVariance, Variance from agate.data_types import Number from agate.exceptions import DataTypeError from agate.warns import warn_null_calculation class StDev(Aggregation): """ Calculate the sample standard of deviation of a column. For the population standard of deviation see :class:`.PopulationStDev`. :param column_name: The name of a column containing :class:`.Number` data. """ def __init__(self, column_name): self._column_name = column_name self._variance = Variance(column_name) def get_aggregate_data_type(self, table): return Number() def validate(self, table): column = table.columns[self._column_name] if not isinstance(column.data_type, Number): raise DataTypeError('StDev can only be applied to columns containing Number data.') has_nulls = HasNulls(self._column_name).run(table) if has_nulls: warn_null_calculation(self, column) def run(self, table): variance = self._variance.run(table) if variance is not None: return variance.sqrt() class PopulationStDev(StDev): """ Calculate the population standard of deviation of a column. For the sample standard of deviation see :class:`.StDev`. :param column_name: The name of a column containing :class:`.Number` data. """ def __init__(self, column_name): self._column_name = column_name self._population_variance = PopulationVariance(column_name) def get_aggregate_data_type(self, table): return Number() def validate(self, table): column = table.columns[self._column_name] if not isinstance(column.data_type, Number): raise DataTypeError('PopulationStDev can only be applied to columns containing Number data.') has_nulls = HasNulls(self._column_name).run(table) if has_nulls: warn_null_calculation(self, column) def run(self, table): variance = self._population_variance.run(table) if variance is not None: return variance.sqrt()
880
3,102
// NameInDir2.h
8
4,140
<reponame>FANsZL/hive<filename>ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.hooks; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.HiveDriverRunHook; import org.apache.hadoop.hive.ql.QueryInfo; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskRunner; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook; import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; /** * Hook Context keeps all the necessary information for all the hooks. * New implemented hook can get the query plan, job conf and the list of all completed tasks from this hook context */ @InterfaceAudience.Public @InterfaceStability.Stable public class HookContext { static public enum HookType { PRE_EXEC_HOOK(HiveConf.ConfVars.PREEXECHOOKS, ExecuteWithHookContext.class, "Pre-execution hooks to be invoked for each statement"), POST_EXEC_HOOK(HiveConf.ConfVars.POSTEXECHOOKS, ExecuteWithHookContext.class, "Post-execution hooks to be invoked for each statement"), ON_FAILURE_HOOK(HiveConf.ConfVars.ONFAILUREHOOKS, ExecuteWithHookContext.class, "On-failure hooks to be invoked for each statement"), QUERY_LIFETIME_HOOKS(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS, QueryLifeTimeHook.class, "Hooks that will be triggered before/after query compilation and before/after query execution"), SEMANTIC_ANALYZER_HOOK(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, HiveSemanticAnalyzerHook.class, "Hooks that invoked before/after Hive performs its own semantic analysis on a statement"), DRIVER_RUN_HOOKS(HiveConf.ConfVars.HIVE_DRIVER_RUN_HOOKS, HiveDriverRunHook.class, "Hooks that Will be run at the beginning and end of Driver.run"), QUERY_REDACTOR_HOOKS(HiveConf.ConfVars.QUERYREDACTORHOOKS, Redactor.class, "Hooks to be invoked for each query which can tranform the query before it's placed in the job.xml file"), // The HiveSessionHook.class cannot access, use Hook.class instead HIVE_SERVER2_SESSION_HOOK(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK, Hook.class, "Hooks to be executed when session manager starts a new session"); private final HiveConf.ConfVars confVar; // the super class or interface of the corresponding hooks private final Class hookClass; private final String description; HookType(HiveConf.ConfVars confVar, Class hookClass, String description) { this.confVar = confVar; this.description = description; this.hookClass = hookClass; } public Class getHookClass() { return this.hookClass; } public HiveConf.ConfVars getConfVar() { return this.confVar; } public String getDescription() { return this.description; } } private QueryPlan queryPlan; private final QueryState queryState; private HiveConf conf; private List<TaskRunner> completeTaskList; private Set<ReadEntity> inputs; private Set<WriteEntity> outputs; private LineageInfo linfo; private Index depMap; private UserGroupInformation ugi; private HookType hookType; private String errorMessage; private Throwable exception; final private Map<String, ContentSummary> inputPathToContentSummary; private final String ipAddress; private final String hiveInstanceAddress; private final String userName; // unique id set for operation when run from HS2, base64 encoded value of // TExecuteStatementResp.TOperationHandle.THandleIdentifier.guid private final String operationId; private final String sessionId; private final String threadId; private final boolean isHiveServerQuery; private final PerfLogger perfLogger; private final QueryInfo queryInfo; public HookContext(QueryPlan queryPlan, QueryState queryState, Map<String, ContentSummary> inputPathToContentSummary, String userName, String ipAddress, String hiveInstanceAddress, String operationId, String sessionId, String threadId, boolean isHiveServerQuery, PerfLogger perfLogger, QueryInfo queryInfo) throws Exception { this.queryPlan = queryPlan; this.queryState = queryState; this.conf = queryState.getConf(); this.inputPathToContentSummary = inputPathToContentSummary; completeTaskList = new ArrayList<TaskRunner>(); inputs = queryPlan.getInputs(); outputs = queryPlan.getOutputs(); ugi = Utils.getUGI(); linfo = queryState.getLineageState().getLineageInfo(); depMap = queryState.getLineageState().getIndex(); this.userName = userName; this.ipAddress = ipAddress; this.hiveInstanceAddress = hiveInstanceAddress; this.operationId = operationId; this.sessionId = sessionId; this.threadId = threadId; this.isHiveServerQuery = isHiveServerQuery; this.perfLogger = perfLogger; this.queryInfo = queryInfo; } public QueryPlan getQueryPlan() { return queryPlan; } public void setQueryPlan(QueryPlan queryPlan) { this.queryPlan = queryPlan; } public HiveConf getConf() { return conf; } public void setConf(HiveConf conf) { this.conf = conf; } public List<TaskRunner> getCompleteTaskList() { return completeTaskList; } public void setCompleteTaskList(List<TaskRunner> completeTaskList) { this.completeTaskList = completeTaskList; } public void addCompleteTask(TaskRunner completeTaskRunner) { completeTaskList.add(completeTaskRunner); } public Set<ReadEntity> getInputs() { return inputs; } public void setInputs(Set<ReadEntity> inputs) { this.inputs = inputs; } public Set<WriteEntity> getOutputs() { return outputs; } public void setOutputs(Set<WriteEntity> outputs) { this.outputs = outputs; } public LineageInfo getLinfo() { return linfo; } public void setLinfo(LineageInfo linfo) { this.linfo = linfo; } public Index getIndex() { return depMap; } public void setIndex(Index depMap) { this.depMap = depMap; } public UserGroupInformation getUgi() { return ugi; } public void setUgi(UserGroupInformation ugi) { this.ugi = ugi; } public Map<String, ContentSummary> getInputPathToContentSummary() { return inputPathToContentSummary; } public HookType getHookType() { return hookType; } public void setHookType(HookType hookType) { this.hookType = hookType; } public String getIpAddress() { return this.ipAddress; } public String getHiveInstanceAddress() { return hiveInstanceAddress; } public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; } public String getErrorMessage() { return errorMessage; } public void setException(Throwable exception) { this.exception = exception; } public Throwable getException() { return exception; } public String getOperationName() { return queryPlan.getOperationName(); } public String getUserName() { return this.userName; } public String getOperationId() { return operationId; } public QueryState getQueryState() { return queryState; } public String getSessionId() { return sessionId; } public String getThreadId() { return threadId; } public boolean isHiveServerQuery() { return isHiveServerQuery; } public PerfLogger getPerfLogger() { return perfLogger; } public QueryInfo getQueryInfo() { return queryInfo; } }
2,876
947
import re import warnings import pkg_resources from sqlalchemy.exc import ArgumentError from sqlalchemy.orm import class_mapper, object_mapper from sqlalchemy.orm.exc import UnmappedClassError, UnmappedInstanceError def get_session(context): return context.get("session") def get_query(model, context): query = getattr(model, "query", None) if not query: session = get_session(context) if not session: raise Exception( "A query in the model Base or a session in the schema is required for querying.\n" "Read more http://docs.graphene-python.org/projects/sqlalchemy/en/latest/tips/#querying" ) query = session.query(model) return query def is_mapped_class(cls): try: class_mapper(cls) except (ArgumentError, UnmappedClassError): return False else: return True def is_mapped_instance(cls): try: object_mapper(cls) except (ArgumentError, UnmappedInstanceError): return False else: return True def to_type_name(name): """Convert the given name to a GraphQL type name.""" return "".join(part[:1].upper() + part[1:] for part in name.split("_")) _re_enum_value_name_1 = re.compile("(.)([A-Z][a-z]+)") _re_enum_value_name_2 = re.compile("([a-z0-9])([A-Z])") def to_enum_value_name(name): """Convert the given name to a GraphQL enum value name.""" return _re_enum_value_name_2.sub( r"\1_\2", _re_enum_value_name_1.sub(r"\1_\2", name) ).upper() class EnumValue(str): """String that has an additional value attached. This is used to attach SQLAlchemy model columns to Enum symbols. """ def __new__(cls, s, value): return super(EnumValue, cls).__new__(cls, s) def __init__(self, _s, value): super(EnumValue, self).__init__() self.value = value def _deprecated_default_symbol_name(column_name, sort_asc): return column_name + ("_asc" if sort_asc else "_desc") # unfortunately, we cannot use lru_cache because we still support Python 2 _deprecated_object_type_cache = {} def _deprecated_object_type_for_model(cls, name): try: return _deprecated_object_type_cache[cls, name] except KeyError: from .types import SQLAlchemyObjectType obj_type_name = name or cls.__name__ class ObjType(SQLAlchemyObjectType): class Meta: name = obj_type_name model = cls _deprecated_object_type_cache[cls, name] = ObjType return ObjType def sort_enum_for_model(cls, name=None, symbol_name=None): """Get a Graphene Enum for sorting the given model class. This is deprecated, please use object_type.sort_enum() instead. """ warnings.warn( "sort_enum_for_model() is deprecated; use object_type.sort_enum() instead.", DeprecationWarning, stacklevel=2, ) from .enums import sort_enum_for_object_type return sort_enum_for_object_type( _deprecated_object_type_for_model(cls, name), name, get_symbol_name=symbol_name or _deprecated_default_symbol_name, ) def sort_argument_for_model(cls, has_default=True): """Get a Graphene Argument for sorting the given model class. This is deprecated, please use object_type.sort_argument() instead. """ warnings.warn( "sort_argument_for_model() is deprecated;" " use object_type.sort_argument() instead.", DeprecationWarning, stacklevel=2, ) from graphene import Argument, List from .enums import sort_enum_for_object_type enum = sort_enum_for_object_type( _deprecated_object_type_for_model(cls, None), get_symbol_name=_deprecated_default_symbol_name, ) if not has_default: enum.default = None return Argument(List(enum), default_value=enum.default) def is_sqlalchemy_version_less_than(version_string): """Check the installed SQLAlchemy version""" return pkg_resources.get_distribution('SQLAlchemy').parsed_version < pkg_resources.parse_version(version_string)
1,667
922
/* * Hibernate Validator, declare and validate application constraints * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.validator.internal.constraintvalidators.hv.pl; import java.util.List; import org.hibernate.validator.constraints.pl.REGON; /** * Validator for {@link REGON}. Validates both 9 and 14 digits REGON numbers. * * @author <NAME> */ public class REGONValidator extends PolishNumberValidator<REGON> { private static final int[] WEIGHTS_REGON_14 = { 2, 4, 8, 5, 0, 9, 7, 3, 6, 1, 2, 4, 8 }; private static final int[] WEIGHTS_REGON_9 = { 8, 9, 2, 3, 4, 5, 6, 7 }; @Override public void initialize(REGON constraintAnnotation) { super.initialize( 0, Integer.MAX_VALUE, -1, false ); } /** * @param digits a list of digits to be verified. They are used to determine a size of REGON number - is it 9 or 14 digit number * * @return an array of weights to be used to calculate a checksum */ @Override protected int[] getWeights(List<Integer> digits) { if ( digits.size() == 8 ) { return WEIGHTS_REGON_9; } else if ( digits.size() == 13 ) { return WEIGHTS_REGON_14; } else { return new int[] { }; } } }
475
384
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Masks one `Series` based on the content of another `Series`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.learn.python.learn.dataframe import transform from tensorflow.python.ops import string_ops class HashFast(transform.TensorFlowTransform): """Perform a fast hash of a `Series`.""" def __init__(self, num_buckets): """Initialize `HashFast`. Args: num_buckets: The number of hash buckets to use. """ # TODO(soergel): allow seed? super(HashFast, self).__init__() self._num_buckets = num_buckets @property def name(self): return "HashFast" @property def input_valency(self): return 1 @property def _output_names(self): return "output", def _apply_transform(self, input_tensors, **kwargs): """Applies the transformation to the `transform_input`. Args: input_tensors: a list of Tensors representing the input to the Transform. **kwargs: additional keyword arguments, unused here. Returns: A namedtuple of Tensors representing the transformed output. """ result = string_ops.string_to_hash_bucket_fast(input_tensors[0], self._num_buckets, name=None) # pylint: disable=not-callable return self.return_type(result)
725
9,308
<gh_stars>1000+ # vim:ts=4:sw=4:et: # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # no unicode literals from __future__ import absolute_import, division, print_function import os import WatchmanEdenTestCase def populate(repo): # We ignore ".hg" here just so some of the tests that list files don't have to # explicitly filter out the contents of this directory. However, in most situations # the .hg directory normally should not be ignored. repo.write_file(".watchmanconfig", '{"ignore_dirs":[".hg"]}') repo.write_file("hello", "hola\n") repo.write_file("adir/file", "foo!\n") repo.write_file("bdir/test.sh", "#!/bin/bash\necho test\n", mode=0o755) repo.write_file("bdir/noexec.sh", "#!/bin/bash\necho test\n") repo.symlink("slink", "hello") repo.commit("initial commit.") class TestEdenSince(WatchmanEdenTestCase.WatchmanEdenTestCase): def test_eden_lazy_eval(self): root = self.makeEdenMount(populate) res = self.watchmanCommand("watch", root) self.assertEqual("eden", res["watcher"]) res = self.watchmanCommand( "query", root, { "expression": ["allof", ["type", "f"], ["match", "*.sh"]], "fields": ["name"], "since": "c:0:0", }, ) self.assertFileListsEqual(res["files"], ["bdir/test.sh", "bdir/noexec.sh"]) def test_eden_empty_relative_root(self): root = self.makeEdenMount(populate) res = self.watchmanCommand("watch", root) self.assertEqual("eden", res["watcher"]) res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "relative_root": "", "fields": ["name"], "since": "c:0:0", }, ) self.assertFileListsEqual( res["files"], [".watchmanconfig", "hello", "adir/file", "bdir/test.sh", "bdir/noexec.sh"], ) def test_eden_since(self): root = self.makeEdenMount(populate) res = self.watchmanCommand("watch", root) self.assertEqual("eden", res["watcher"]) res = self.watchmanCommand( "query", root, {"expression": ["type", "f"], "fields": ["name"], "since": "c:0:0"}, ) self.assertTrue(res["is_fresh_instance"]) self.assertFileListsEqual( res["files"], ["hello", "adir/file", "bdir/test.sh", "bdir/noexec.sh", ".watchmanconfig"], ) res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "relative_root": "adir", "fields": ["name"], "since": "c:0:0", }, ) self.assertFileListsEqual( res["files"], ["file"], message="should only return adir/file with no adir prefix", ) clock = res["clock"] self.touchRelative(root, "hello") res = self.watchmanCommand( "query", root, {"expression": ["type", "f"], "fields": ["name"], "since": clock}, ) self.assertFileListsEqual(res["files"], ["hello"]) res = self.watchmanCommand( "query", root, {"expression": ["type", "f"], "fields": ["name", "new"], "since": clock}, ) self.assertEqual([{"name": "hello", "new": False}], res["files"]) self.touchRelative(root, "hello") res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "fields": ["name", "new"], "since": res["clock"], }, ) self.assertEqual([{"name": "hello", "new": False}], res["files"]) res = self.watchmanCommand( "query", root, {"expression": ["type", "f"], "fields": ["name"], "since": res["clock"]}, ) self.assertFileListsEqual(res["files"], []) res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "empty_on_fresh_instance": True, "fields": ["name"], "since": "c:0:0", }, ) self.assertTrue(res["is_fresh_instance"]) self.assertFileListsEqual(res["files"], []) os.unlink(os.path.join(root, "hello")) res = self.watchmanCommand( "query", root, {"fields": ["name"], "since": res["clock"]} ) self.assertFileListsEqual(res["files"], ["hello"]) res = self.watchmanCommand( "query", root, {"expression": ["type", "f"], "fields": ["name"], "since": res["clock"]}, ) self.assertFileListsEqual(res["files"], []) self.touchRelative(root, "newfile") res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "fields": ["name", "new"], "since": res["clock"], }, ) self.assertEqual([{"name": "newfile", "new": True}], res["files"]) self.touchRelative(root, "newfile") res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "fields": ["name", "new"], "since": res["clock"], }, ) self.assertEqual([{"name": "newfile", "new": False}], res["files"]) adir_file = os.path.join(root, "adir/file") os.unlink(adir_file) with open(adir_file, "w") as f: f.write("new contents\n") res = self.watchmanCommand( "query", root, { "expression": ["type", "f"], "fields": ["name", "new"], "since": res["clock"], }, ) self.assertEqual([{"name": "adir/file", "new": False}], res["files"])
3,216
376
<gh_stars>100-1000 import torch.nn as nn # Here we use DistributedDataParallel(DDP) rather than DataParallel(DP) for multiple GPUs training def is_multi_gpu(net): return isinstance(net, (MultiGPU, nn.parallel.distributed.DistributedDataParallel)) class MultiGPU(nn.parallel.distributed.DistributedDataParallel): def __getattr__(self, item): try: return super().__getattr__(item) except: pass return getattr(self.module, item)
190
7,482
/* * Copyright (c) 2010-2012, Freescale Semiconductor, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * o Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * o Neither the name of Freescale Semiconductor, Inc. nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "hab_defines.h" #include "soc_memory_map.h" //! @brief dcd data, list of (register, value) pairs to initialize ddr uint8_t input_dcd[] __attribute__ ((section(".dcd_data"))) = { /* * The DDR init commands below are based on MX6_series_boards/SabreSD/RevC_and_RevB/MX6DQ/MX6Q_SabreSD_DDR3_register_programming_aid_v1.6.inc * This file can be found at ddr-script-rel git with commit <PASSWORD>df54d5043b855c8 */ EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x798), EXPAND_UINT32(0x000C0000), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x758), EXPAND_UINT32(0x00000000), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x588), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x594), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x56c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x578), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x74c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x57c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x58c), EXPAND_UINT32(0x00000000), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x59c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5a0), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x78c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x750), EXPAND_UINT32(0x00020000), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5a8), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5b0), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x524), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x51c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x518), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x50c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5b8), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5c0), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x774), EXPAND_UINT32(0x00020000), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x784), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x788), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x794), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x79c), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x7a0), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x7a4), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x7a8), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x748), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5ac), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5b4), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x528), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x520), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x514), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x510), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5bc), EXPAND_UINT32(0x00000030), EXPAND_UINT32(IOMUXC_BASE_ADDR + 0x5c4), EXPAND_UINT32(0x00000030), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x800), EXPAND_UINT32(0xa1390003), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x80c), EXPAND_UINT32(0x001F001F), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x810), EXPAND_UINT32(0x001F001F), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x80c), EXPAND_UINT32(0x001F001F), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x810), EXPAND_UINT32(0x001F001F), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x83c), EXPAND_UINT32(0x43270338), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x840), EXPAND_UINT32(0x03200314), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x83c), EXPAND_UINT32(0x431A032F), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x840), EXPAND_UINT32(0x03200263), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x848), EXPAND_UINT32(0x4B434748), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x848), EXPAND_UINT32(0x4445404C), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x850), EXPAND_UINT32(0x38444542), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x850), EXPAND_UINT32(0x4935493A), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x81c), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x820), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x824), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x828), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x81c), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x820), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x824), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x828), EXPAND_UINT32(0x33333333), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x8b8), EXPAND_UINT32(0x00000800), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x8b8), EXPAND_UINT32(0x00000800), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x004), EXPAND_UINT32(0x00020036), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x008), EXPAND_UINT32(0x09444040), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x00c), EXPAND_UINT32(0x555A7975), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x010), EXPAND_UINT32(0xFF538F64), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x014), EXPAND_UINT32(0x01FF00DB), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x018), EXPAND_UINT32(0x00001740), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x00008000), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x02c), EXPAND_UINT32(0x000026d2), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x030), EXPAND_UINT32(0x005A1023), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x040), EXPAND_UINT32(0x00000027), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x000), EXPAND_UINT32(0x831A0000), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x04088032), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x00008033), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x00048031), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x09408030), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x09408038), // EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x04008040), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x020), EXPAND_UINT32(0x00005000), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x818), EXPAND_UINT32(0x00011117), EXPAND_UINT32(MMDC_P1_BASE_ADDR + 0x818), EXPAND_UINT32(0x00011117), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x004), EXPAND_UINT32(0x00025576), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x404), EXPAND_UINT32(0x00011006), EXPAND_UINT32(MMDC_P0_BASE_ADDR + 0x01c), EXPAND_UINT32(0x00000000), }; //! @brief HAB command write data header, with tag, //! size of dcd data with hdr, //! parameter field (size of register value and flag) uint8_t input_dcd_wrt_cmd[] __attribute__ ((section(".dcd_wrt_cmd"))) = { HAB_CMD_WRT_DAT, EXPAND_UINT16(sizeof(input_dcd) + HDR_BYTES), WRT_DAT_PAR(0, HAB_DATA_WIDTH_WORD) //!< flag 0, width 4 }; //! @brief HAB dcd header with dcd tag, size of entire dcd and version. uint8_t input_dcd_hdr[] __attribute__ ((section(".dcd_hdr"))) = { HAB_TAG_DCD, EXPAND_UINT16(sizeof(input_dcd) + sizeof(input_dcd_wrt_cmd) + HDR_BYTES), HAB_VER(4, 0) };
4,573
1,279
{"uuid":"ea0194cb-f599-402f-8876-84fb8251e8db","parent":"37d58de4-deea-4808-bb77-d27685bd1501","pid":93239,"argv":["/usr/local/bin/node","/Users/isaacs/dev/js/tar/test/00-setup-fixtures.js"],"execArgv":["-r","/usr/local/lib/node_modules/tap/node_modules/esm/esm.js"],"cwd":"/Users/isaacs/dev/js/tar","time":1557878801284,"ppid":93238,"root":"e52f8603-1293-44df-8bfa-ed740bdd2b77","coverageFilename":"/Users/isaacs/dev/js/tar/.nyc_output/ea0194cb-f599-402f-8876-84fb8251e8db.json","externalId":"test/00-setup-fixtures.js","files":[]}
237
17,006
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ // -*- c++ -*- #include <faiss/IVFlib.h> #include <memory> #include <faiss/IndexPreTransform.h> #include <faiss/MetaIndexes.h> #include <faiss/impl/FaissAssert.h> #include <faiss/utils/utils.h> namespace faiss { namespace ivflib { void check_compatible_for_merge(const Index* index0, const Index* index1) { const faiss::IndexPreTransform* pt0 = dynamic_cast<const faiss::IndexPreTransform*>(index0); if (pt0) { const faiss::IndexPreTransform* pt1 = dynamic_cast<const faiss::IndexPreTransform*>(index1); FAISS_THROW_IF_NOT_MSG(pt1, "both indexes should be pretransforms"); FAISS_THROW_IF_NOT(pt0->chain.size() == pt1->chain.size()); for (int i = 0; i < pt0->chain.size(); i++) { FAISS_THROW_IF_NOT(typeid(pt0->chain[i]) == typeid(pt1->chain[i])); } index0 = pt0->index; index1 = pt1->index; } FAISS_THROW_IF_NOT(typeid(index0) == typeid(index1)); FAISS_THROW_IF_NOT( index0->d == index1->d && index0->metric_type == index1->metric_type); const faiss::IndexIVF* ivf0 = dynamic_cast<const faiss::IndexIVF*>(index0); if (ivf0) { const faiss::IndexIVF* ivf1 = dynamic_cast<const faiss::IndexIVF*>(index1); FAISS_THROW_IF_NOT(ivf1); ivf0->check_compatible_for_merge(*ivf1); } // TODO: check as thoroughfully for other index types } const IndexIVF* try_extract_index_ivf(const Index* index) { if (auto* pt = dynamic_cast<const IndexPreTransform*>(index)) { index = pt->index; } if (auto* idmap = dynamic_cast<const IndexIDMap*>(index)) { index = idmap->index; } if (auto* idmap = dynamic_cast<const IndexIDMap2*>(index)) { index = idmap->index; } auto* ivf = dynamic_cast<const IndexIVF*>(index); return ivf; } IndexIVF* try_extract_index_ivf(Index* index) { return const_cast<IndexIVF*>(try_extract_index_ivf((const Index*)(index))); } const IndexIVF* extract_index_ivf(const Index* index) { const IndexIVF* ivf = try_extract_index_ivf(index); FAISS_THROW_IF_NOT(ivf); return ivf; } IndexIVF* extract_index_ivf(Index* index) { return const_cast<IndexIVF*>(extract_index_ivf((const Index*)(index))); } void merge_into(faiss::Index* index0, faiss::Index* index1, bool shift_ids) { check_compatible_for_merge(index0, index1); IndexIVF* ivf0 = extract_index_ivf(index0); IndexIVF* ivf1 = extract_index_ivf(index1); ivf0->merge_from(*ivf1, shift_ids ? ivf0->ntotal : 0); // useful for IndexPreTransform index0->ntotal = ivf0->ntotal; index1->ntotal = ivf1->ntotal; } void search_centroid( faiss::Index* index, const float* x, int n, idx_t* centroid_ids) { std::unique_ptr<float[]> del; if (auto index_pre = dynamic_cast<faiss::IndexPreTransform*>(index)) { x = index_pre->apply_chain(n, x); del.reset((float*)x); index = index_pre->index; } faiss::IndexIVF* index_ivf = dynamic_cast<faiss::IndexIVF*>(index); assert(index_ivf); index_ivf->quantizer->assign(n, x, centroid_ids); } void search_and_return_centroids( faiss::Index* index, size_t n, const float* xin, long k, float* distances, idx_t* labels, idx_t* query_centroid_ids, idx_t* result_centroid_ids) { const float* x = xin; std::unique_ptr<float[]> del; if (auto index_pre = dynamic_cast<faiss::IndexPreTransform*>(index)) { x = index_pre->apply_chain(n, x); del.reset((float*)x); index = index_pre->index; } faiss::IndexIVF* index_ivf = dynamic_cast<faiss::IndexIVF*>(index); assert(index_ivf); size_t nprobe = index_ivf->nprobe; std::vector<idx_t> cent_nos(n * nprobe); std::vector<float> cent_dis(n * nprobe); index_ivf->quantizer->search( n, x, nprobe, cent_dis.data(), cent_nos.data()); if (query_centroid_ids) { for (size_t i = 0; i < n; i++) query_centroid_ids[i] = cent_nos[i * nprobe]; } index_ivf->search_preassigned( n, x, k, cent_nos.data(), cent_dis.data(), distances, labels, true); for (size_t i = 0; i < n * k; i++) { idx_t label = labels[i]; if (label < 0) { if (result_centroid_ids) result_centroid_ids[i] = -1; } else { long list_no = lo_listno(label); long list_index = lo_offset(label); if (result_centroid_ids) result_centroid_ids[i] = list_no; labels[i] = index_ivf->invlists->get_single_id(list_no, list_index); } } } SlidingIndexWindow::SlidingIndexWindow(Index* index) : index(index) { n_slice = 0; IndexIVF* index_ivf = const_cast<IndexIVF*>(extract_index_ivf(index)); ils = dynamic_cast<ArrayInvertedLists*>(index_ivf->invlists); FAISS_THROW_IF_NOT_MSG( ils, "only supports indexes with ArrayInvertedLists"); nlist = ils->nlist; sizes.resize(nlist); } template <class T> static void shift_and_add( std::vector<T>& dst, size_t remove, const std::vector<T>& src) { if (remove > 0) memmove(dst.data(), dst.data() + remove, (dst.size() - remove) * sizeof(T)); size_t insert_point = dst.size() - remove; dst.resize(insert_point + src.size()); memcpy(dst.data() + insert_point, src.data(), src.size() * sizeof(T)); } template <class T> static void remove_from_begin(std::vector<T>& v, size_t remove) { if (remove > 0) v.erase(v.begin(), v.begin() + remove); } void SlidingIndexWindow::step(const Index* sub_index, bool remove_oldest) { FAISS_THROW_IF_NOT_MSG( !remove_oldest || n_slice > 0, "cannot remove slice: there is none"); const ArrayInvertedLists* ils2 = nullptr; if (sub_index) { check_compatible_for_merge(index, sub_index); ils2 = dynamic_cast<const ArrayInvertedLists*>( extract_index_ivf(sub_index)->invlists); FAISS_THROW_IF_NOT_MSG(ils2, "supports only ArrayInvertedLists"); } IndexIVF* index_ivf = extract_index_ivf(index); if (remove_oldest && ils2) { for (int i = 0; i < nlist; i++) { std::vector<size_t>& sizesi = sizes[i]; size_t amount_to_remove = sizesi[0]; index_ivf->ntotal += ils2->ids[i].size() - amount_to_remove; shift_and_add(ils->ids[i], amount_to_remove, ils2->ids[i]); shift_and_add( ils->codes[i], amount_to_remove * ils->code_size, ils2->codes[i]); for (int j = 0; j + 1 < n_slice; j++) { sizesi[j] = sizesi[j + 1] - amount_to_remove; } sizesi[n_slice - 1] = ils->ids[i].size(); } } else if (ils2) { for (int i = 0; i < nlist; i++) { index_ivf->ntotal += ils2->ids[i].size(); shift_and_add(ils->ids[i], 0, ils2->ids[i]); shift_and_add(ils->codes[i], 0, ils2->codes[i]); sizes[i].push_back(ils->ids[i].size()); } n_slice++; } else if (remove_oldest) { for (int i = 0; i < nlist; i++) { size_t amount_to_remove = sizes[i][0]; index_ivf->ntotal -= amount_to_remove; remove_from_begin(ils->ids[i], amount_to_remove); remove_from_begin(ils->codes[i], amount_to_remove * ils->code_size); for (int j = 0; j + 1 < n_slice; j++) { sizes[i][j] = sizes[i][j + 1] - amount_to_remove; } sizes[i].pop_back(); } n_slice--; } else { FAISS_THROW_MSG("nothing to do???"); } index->ntotal = index_ivf->ntotal; } // Get a subset of inverted lists [i0, i1). Works on IndexIVF's and // IndexIVF's embedded in a IndexPreTransform ArrayInvertedLists* get_invlist_range(const Index* index, long i0, long i1) { const IndexIVF* ivf = extract_index_ivf(index); FAISS_THROW_IF_NOT(0 <= i0 && i0 <= i1 && i1 <= ivf->nlist); const InvertedLists* src = ivf->invlists; ArrayInvertedLists* il = new ArrayInvertedLists(i1 - i0, src->code_size); for (long i = i0; i < i1; i++) { il->add_entries( i - i0, src->list_size(i), InvertedLists::ScopedIds(src, i).get(), InvertedLists::ScopedCodes(src, i).get()); } return il; } void set_invlist_range( Index* index, long i0, long i1, ArrayInvertedLists* src) { IndexIVF* ivf = extract_index_ivf(index); FAISS_THROW_IF_NOT(0 <= i0 && i0 <= i1 && i1 <= ivf->nlist); ArrayInvertedLists* dst = dynamic_cast<ArrayInvertedLists*>(ivf->invlists); FAISS_THROW_IF_NOT_MSG(dst, "only ArrayInvertedLists supported"); FAISS_THROW_IF_NOT( src->nlist == i1 - i0 && dst->code_size == src->code_size); size_t ntotal = index->ntotal; for (long i = i0; i < i1; i++) { ntotal -= dst->list_size(i); ntotal += src->list_size(i - i0); std::swap(src->codes[i - i0], dst->codes[i]); std::swap(src->ids[i - i0], dst->ids[i]); } ivf->ntotal = index->ntotal = ntotal; } static size_t count_ndis( const IndexIVF* index_ivf, size_t n_list_scan, const idx_t* Iq) { size_t nb_dis = 0; const InvertedLists* il = index_ivf->invlists; for (idx_t i = 0; i < n_list_scan; i++) { if (Iq[i] >= 0) { nb_dis += il->list_size(Iq[i]); } } return nb_dis; } void search_with_parameters( const Index* index, idx_t n, const float* x, idx_t k, float* distances, idx_t* labels, const IVFSearchParameters* params, size_t* nb_dis_ptr, double* ms_per_stage) { FAISS_THROW_IF_NOT(params); const float* prev_x = x; ScopeDeleter<float> del; double t0 = getmillisecs(); if (auto ip = dynamic_cast<const IndexPreTransform*>(index)) { x = ip->apply_chain(n, x); if (x != prev_x) { del.set(x); } index = ip->index; } double t1 = getmillisecs(); std::vector<idx_t> Iq(params->nprobe * n); std::vector<float> Dq(params->nprobe * n); const IndexIVF* index_ivf = dynamic_cast<const IndexIVF*>(index); FAISS_THROW_IF_NOT(index_ivf); index_ivf->quantizer->search(n, x, params->nprobe, Dq.data(), Iq.data()); if (nb_dis_ptr) { *nb_dis_ptr = count_ndis(index_ivf, n * params->nprobe, Iq.data()); } double t2 = getmillisecs(); index_ivf->search_preassigned( n, x, k, Iq.data(), Dq.data(), distances, labels, false, params); double t3 = getmillisecs(); if (ms_per_stage) { ms_per_stage[0] = t1 - t0; ms_per_stage[1] = t2 - t1; ms_per_stage[2] = t3 - t2; } } void range_search_with_parameters( const Index* index, idx_t n, const float* x, float radius, RangeSearchResult* result, const IVFSearchParameters* params, size_t* nb_dis_ptr, double* ms_per_stage) { FAISS_THROW_IF_NOT(params); const float* prev_x = x; ScopeDeleter<float> del; double t0 = getmillisecs(); if (auto ip = dynamic_cast<const IndexPreTransform*>(index)) { x = ip->apply_chain(n, x); if (x != prev_x) { del.set(x); } index = ip->index; } double t1 = getmillisecs(); std::vector<idx_t> Iq(params->nprobe * n); std::vector<float> Dq(params->nprobe * n); const IndexIVF* index_ivf = dynamic_cast<const IndexIVF*>(index); FAISS_THROW_IF_NOT(index_ivf); index_ivf->quantizer->search(n, x, params->nprobe, Dq.data(), Iq.data()); if (nb_dis_ptr) { *nb_dis_ptr = count_ndis(index_ivf, n * params->nprobe, Iq.data()); } double t2 = getmillisecs(); index_ivf->range_search_preassigned( n, x, radius, Iq.data(), Dq.data(), result, false, params); double t3 = getmillisecs(); if (ms_per_stage) { ms_per_stage[0] = t1 - t0; ms_per_stage[1] = t2 - t1; ms_per_stage[2] = t3 - t2; } } } // namespace ivflib } // namespace faiss
6,117
309
/* ***** BEGIN LICENSE BLOCK ***** * JTransforms * Copyright (c) 2007 onward, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ package org.jtransforms.fft; import java.util.ArrayList; import java.util.Collection; import java.util.Random; import org.jtransforms.utils.CommonUtils; import pl.edu.icm.jlargearrays.ConcurrencyUtils; import org.jtransforms.utils.IOUtils; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import pl.edu.icm.jlargearrays.FloatLargeArray; import pl.edu.icm.jlargearrays.LargeArray; import static org.apache.commons.math3.util.FastMath.*; /** * * This is a test of the class {@link FloatFFT_2D}. In this test, a very crude * 2d FFT method is implemented (see {@link #complexForward(float[][])}), * assuming that {@link FloatFFT_1D} has been fully tested and validated. This * crude (unoptimized) method is then used to establish <em>expected</em> values * of <em>direct</em> Fourier transforms. * </p> * * For <em>inverse</em> Fourier transforms, the test assumes that the * corresponding <em>direct</em> Fourier transform has been tested and * validated. * </p> * * In all cases, the test consists in creating a random array of data, and * verifying that expected and actual values of its Fourier transform coincide * (L2 norm is zero, within a specified accuracy). * </p> * * @author <NAME> * @author <NAME> */ @RunWith(value = Parameterized.class) public class FloatFFT_2DTest { /** * Base message of all exceptions. */ public static final String DEFAULT_MESSAGE = "%d-threaded FFT of size %dx%d: "; /** * The constant value of the seed of the random generator. */ public static final int SEED = 20110602; private static final double EPS = pow(10, -3); private static final double EPS_UNSCALED = 0.5; @Parameters public static Collection<Object[]> getParameters() { final int[] size = {2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 16, 32, 64, 100, 120, 128, 256, 310, 511, 512, 1024}; final ArrayList<Object[]> parameters = new ArrayList<Object[]>(); for (int i = 0; i < size.length; i++) { for (int j = 0; j < size.length; j++) { parameters.add(new Object[]{size[i], size[j], 1, SEED}); parameters.add(new Object[]{size[i], size[j], 8, SEED}); } } return parameters; } /** * Fourier transform of the columns. */ private final FloatFFT_1D cfft; /** * The object to be tested. */ private final FloatFFT_2D fft; /** * Number of columns of the data arrays to be Fourier transformed. */ private final int numCols; /** * Number of rows of the data arrays to be Fourier transformed. */ private final int numRows; /** * Fourier transform of the rows. */ private final FloatFFT_1D rfft; /** * For the generation of the data arrays. */ private final Random random; /** * The number of threads used. */ private final int numThreads; /** * Creates a new instance of this test. * * @param numRows * number of rows * @param numColumns * number of columns * @param numThreads * the number of threads to be used * @param seed * the seed of the random generator */ public FloatFFT_2DTest(final int numRows, final int numColumns, final int numThreads, final long seed) { this.numRows = numRows; this.numCols = numColumns; LargeArray.setMaxSizeOf32bitArray(1); this.rfft = new FloatFFT_1D(numColumns); this.cfft = new FloatFFT_1D(numRows); this.fft = new FloatFFT_2D(numRows, numColumns); this.random = new Random(seed); ConcurrencyUtils.setNumberOfThreads(numThreads); CommonUtils.setThreadsBeginN_2D(4); this.numThreads = ConcurrencyUtils.getNumberOfThreads(); } /** * A crude implementation of 2d complex FFT. * * @param a * the data to be transformed */ public void complexForward(final float[][] a) { for (int r = 0; r < numRows; r++) { rfft.complexForward(a[r]); } final float[] buffer = new float[2 * numRows]; for (int c = 0; c < numCols; c++) { for (int r = 0; r < numRows; r++) { buffer[2 * r] = a[r][2 * c]; buffer[2 * r + 1] = a[r][2 * c + 1]; } cfft.complexForward(buffer); for (int r = 0; r < numRows; r++) { a[r][2 * c] = buffer[2 * r]; a[r][2 * c + 1] = buffer[2 * r + 1]; } } } /** * A test of {@link FloatFFT_2D#complexForward(float[])}. */ @Test public void testComplexForward1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[][] expected0 = new float[numRows][2 * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[2 * r * numCols + c] = rnd; expected0[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected[2 * r * numCols + c] = expected0[r][c]; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexForward(FloatLargeArray)}. */ @Test public void testComplexForwardLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final float[][] expected0 = new float[numRows][2 * numCols]; final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(2 * r * numCols + c, rnd); expected0[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected.setDouble(2 * r * numCols + c, expected0[r][c]); } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexForward(float[][])}. */ @Test public void testComplexForward2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaled1dInput() { final float[] expected = new float[2 * numRows * numCols]; final float[] actual = new float[2 * numRows * numCols]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(FloatLargeArray, boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaledLarge() { final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[][], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaled2dInput() { final float[][] expected = new float[numRows][2 * numCols]; final float[][] actual = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaled1dInput() { final float[] expected = new float[2 * numRows * numCols]; final float[] actual = new float[2 * numRows * numCols]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int i = 0; i < actual.length; i++) { actual[i] = actual[i] / s; } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(FloatLargeArray, boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaledLarge() { final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int i = 0; i < actual.length(); i++) { actual.setDouble(i, actual.getDouble(i) / s); } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[][], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaled2dInput() { final float[][] expected = new float[numRows][2 * numCols]; final float[][] actual = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); expected[r][c] = rnd; actual[r][c] = rnd; } } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { actual[r][c] = actual[r][c] / s; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private static void fillSymmetric(final float[] a, int rows, int columns) { final int twon2 = 2 * columns; int idx1, idx2, idx3, idx4; int n1d2 = rows / 2; for (int r = (rows - 1); r >= 1; r--) { idx1 = r * columns; idx2 = 2 * idx1; for (int c = 0; c < columns; c += 2) { a[idx2 + c] = a[idx1 + c]; a[idx1 + c] = 0; a[idx2 + c + 1] = a[idx1 + c + 1]; a[idx1 + c + 1] = 0; } } for (int r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rows - r) * twon2; a[idx2 + columns] = a[idx3 + 1]; a[idx2 + columns + 1] = -a[idx3]; } for (int r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rows - r + 1) * twon2; for (int c = columns + 2; c < twon2; c += 2) { a[idx2 + c] = a[idx3 - c]; a[idx2 + c + 1] = -a[idx3 - c + 1]; } } for (int r = 0; r <= rows / 2; r++) { idx1 = r * twon2; idx4 = ((rows - r) % rows) * twon2; for (int c = 0; c < twon2; c += 2) { idx2 = idx1 + c; idx3 = idx4 + (twon2 - c) % twon2; a[idx3] = a[idx2]; a[idx3 + 1] = -a[idx2 + 1]; } } a[columns] = -a[1]; a[1] = 0; idx1 = n1d2 * twon2; a[idx1 + columns] = -a[idx1 + 1]; a[idx1 + 1] = 0; a[idx1 + columns + 1] = 0; } /** * A test of {@link FloatFFT_2D#realForward(float[])}. */ @Test public void testRealForward1dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[numRows * 2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r * numCols + c] = rnd; expected[r * 2 * numCols + 2 * c] = rnd; } } fft.realForward(actual); fft.complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private static void fillSymmetric(final FloatLargeArray a, int rowsl, int columnsl) { final long twon2 = 2 * columnsl; long idx1, idx2, idx3, idx4; long n1d2 = rowsl / 2; for (long r = (rowsl - 1); r >= 1; r--) { idx1 = r * columnsl; idx2 = 2 * idx1; for (long c = 0; c < columnsl; c += 2) { a.setDouble(idx2 + c, a.getDouble(idx1 + c)); a.setDouble(idx1 + c, 0); a.setDouble(idx2 + c + 1, a.getDouble(idx1 + c + 1)); a.setDouble(idx1 + c + 1, 0); } } for (long r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rowsl - r) * twon2; a.setDouble(idx2 + columnsl, a.getDouble(idx3 + 1)); a.setDouble(idx2 + columnsl + 1, -a.getDouble(idx3)); } for (long r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rowsl - r + 1) * twon2; for (long c = columnsl + 2; c < twon2; c += 2) { a.setDouble(idx2 + c, a.getDouble(idx3 - c)); a.setDouble(idx2 + c + 1, -a.getDouble(idx3 - c + 1)); } } for (long r = 0; r <= rowsl / 2; r++) { idx1 = r * twon2; idx4 = ((rowsl - r) % rowsl) * twon2; for (long c = 0; c < twon2; c += 2) { idx2 = idx1 + c; idx3 = idx4 + (twon2 - c) % twon2; a.setDouble(idx3, a.getDouble(idx2)); a.setDouble(idx3 + 1, -a.getDouble(idx2 + 1)); } } a.setDouble(columnsl, -a.getDouble(1)); a.setDouble(1, 0); idx1 = n1d2 * twon2; a.setDouble(idx1 + columnsl, -a.getDouble(idx1 + 1)); a.setDouble(idx1 + 1, 0); a.setDouble(idx1 + columnsl + 1, 0); } /** * A test of {@link FloatFFT_2D#realForward(FloatLargeArray)}. */ @Test public void testRealForwardLarge() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(numRows * 2 * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(r * numCols + c, rnd); expected.setDouble(r * 2 * numCols + 2 * c, rnd); } } fft.realForward(actual); fft.complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private void fillSymmetric(final float[][] a, int rows, int columns) { final int newn2 = 2 * columns; int n1d2 = rows / 2; for (int r = 1; r < n1d2; r++) { int idx1 = rows - r; a[r][columns] = a[idx1][1]; a[r][columns + 1] = -a[idx1][0]; } for (int r = 1; r < n1d2; r++) { int idx1 = rows - r; for (int c = columns + 2; c < newn2; c += 2) { int idx2 = newn2 - c; a[r][c] = a[idx1][idx2]; a[r][c + 1] = -a[idx1][idx2 + 1]; } } for (int r = 0; r <= rows / 2; r++) { int idx1 = (rows - r) % rows; for (int c = 0; c < newn2; c += 2) { int idx2 = (newn2 - c) % newn2; a[idx1][idx2] = a[r][c]; a[idx1][idx2 + 1] = -a[r][c + 1]; } } a[0][columns] = -a[0][1]; a[0][1] = 0; a[n1d2][columns] = -a[n1d2][1]; a[n1d2][1] = 0; a[n1d2][columns + 1] = 0; } /** * A test of {@link FloatFFT_2D#realForward(float[][])}. */ @Test public void testRealForward2dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realForward(actual); complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(float[])}. */ @Test public void testRealForwardFull1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[][] expected0 = new float[numRows][2 * numCols]; final float[] expected = new float[numRows * 2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r * numCols + c] = rnd; expected0[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected[2 * r * numCols + c] = expected0[r][c]; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(FloatLargeArray)}. */ @Test public void testRealForwardFullLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final float[][] expected0 = new float[numRows][2 * numCols]; final FloatLargeArray expected = new FloatLargeArray(numRows * 2 * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(r * numCols + c, rnd); expected0[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected.setDouble(2 * r * numCols + c, expected0[r][c]); } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(float[][])}. */ @Test public void testRealForwardFull2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaled1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual[index] = rnd; expected[2 * index] = rnd; } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, true); fft.realInverseFull(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(FloatLargeArray, boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaledLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual.setDouble(index, rnd); expected.setDouble(2 * index, rnd); } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, true); fft.realInverseFull(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[][], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaled2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realInverseFull(actual, true); fft.complexInverse(expected, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaled1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual[index] = rnd; expected[2 * index] = rnd; } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, false); fft.realInverseFull(actual, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverseFull(FloatLargeArray, boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaledLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual.set(index, rnd); expected.setDouble(2 * index, rnd); } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, false); fft.realInverseFull(actual, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[][], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaled2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realInverseFull(actual, false); fft.complexInverse(expected, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverse(float[], boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaled1dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[] actual = new float[numRows * numCols]; final float[] expected = new float[actual.length]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverse(FloatLargeArray, boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaledLarge() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final FloatLargeArray actual = new FloatLargeArray(numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(actual.length()); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverse(float[][], boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaled2dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[][] actual = new float[numRows][numCols]; final float[][] expected = new float[numRows][numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } }
17,670
2,151
<reponame>zipated/src // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/platform/instrumentation/tracing/trace_event.h" #include "base/trace_event/trace_event.h" namespace blink { namespace TraceEvent { void EnableTracing(const String& category_filter) { base::trace_event::TraceLog::GetInstance()->SetEnabled( base::trace_event::TraceConfig(category_filter.Utf8().data(), ""), base::trace_event::TraceLog::RECORDING_MODE); } void DisableTracing() { base::trace_event::TraceLog::GetInstance()->SetDisabled(); } void AddAsyncEnabledStateObserver( base::WeakPtr<AsyncEnabledStateObserver> observer) { base::trace_event::TraceLog::GetInstance()->AddAsyncEnabledStateObserver( observer); } void RemoveAsyncEnabledStateObserver(AsyncEnabledStateObserver* observer) { base::trace_event::TraceLog::GetInstance()->RemoveAsyncEnabledStateObserver( observer); } } // namespace TraceEvent } // namespace blink
353
7,482
<filename>bsp/stm32/stm32f767-fire-challenger-v1/board/ports/sdram_port.h<gh_stars>1000+ /* * Copyright (c) 2006-2021, RT-Thread Development Team * * SPDX-License-Identifier: Apache-2.0 * * Change Logs: * Date Author Notes * 2018-12-04 zylx The first version for STM32F4xx */ #ifndef __SDRAM_PORT_H__ #define __SDRAM_PORT_H__ /* parameters for sdram peripheral */ /* Bank1 or Bank2 */ #define SDRAM_TARGET_BANK 2 /* stm32f7 Bank1:0XC0000000 Bank2:0XD0000000 */ #define SDRAM_BANK_ADDR ((uint32_t)0XD0000000) /* data width: 8, 16, 32 */ #define SDRAM_DATA_WIDTH 16 /* column bit numbers: 8, 9, 10, 11 */ #define SDRAM_COLUMN_BITS 8 /* row bit numbers: 11, 12, 13 */ #define SDRAM_ROW_BITS 12 /* cas latency clock number: 1, 2, 3 */ #define SDRAM_CAS_LATENCY 2 /* read pipe delay: 0, 1, 2 */ #define SDRAM_RPIPE_DELAY 0 /* clock divid: 2, 3 */ #define SDCLOCK_PERIOD 2 /* refresh rate counter */ #define SDRAM_REFRESH_COUNT ((uint32_t)0x056A) #define SDRAM_SIZE ((uint32_t)0x800000) /* Timing configuration for IS42S16400J */ /* 108 MHz of SD clock frequency (216MHz/2) */ /* TMRD: 2 Clock cycles */ #define LOADTOACTIVEDELAY 2 /* TXSR: 8x9.25ns */ #define EXITSELFREFRESHDELAY 8 /* TRAS: 5x9.25ns */ #define SELFREFRESHTIME 5 /* TRC: 7x9.25ns */ #define ROWCYCLEDELAY 7 /* TWR: 2 Clock cycles */ #define WRITERECOVERYTIME 2 /* TRP: 2x9.25ns */ #define RPDELAY 2 /* TRCD: 2x9.25ns */ #define RCDDELAY 3 /* memory mode register */ #define SDRAM_MODEREG_BURST_LENGTH_1 ((uint16_t)0x0000) #define SDRAM_MODEREG_BURST_LENGTH_2 ((uint16_t)0x0001) #define SDRAM_MODEREG_BURST_LENGTH_4 ((uint16_t)0x0002) #define SDRAM_MODEREG_BURST_LENGTH_8 ((uint16_t)0x0004) #define SDRAM_MODEREG_BURST_TYPE_SEQUENTIAL ((uint16_t)0x0000) #define SDRAM_MODEREG_BURST_TYPE_INTERLEAVED ((uint16_t)0x0008) #define SDRAM_MODEREG_CAS_LATENCY_2 ((uint16_t)0x0020) #define SDRAM_MODEREG_CAS_LATENCY_3 ((uint16_t)0x0030) #define SDRAM_MODEREG_OPERATING_MODE_STANDARD ((uint16_t)0x0000) #define SDRAM_MODEREG_WRITEBURST_MODE_PROGRAMMED ((uint16_t)0x0000) #define SDRAM_MODEREG_WRITEBURST_MODE_SINGLE ((uint16_t)0x0200) #endif
1,332